ppo-Huggy / run_logs /training_status.json
osbm's picture
Huggy
b36c813
{
"Huggy": {
"checkpoints": [
{
"steps": 199731,
"file_path": "results/Huggy/Huggy/Huggy-199731.onnx",
"reward": 3.117367534868179,
"creation_time": 1671801444.5429518,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199731.pt"
]
},
{
"steps": 399847,
"file_path": "results/Huggy/Huggy/Huggy-399847.onnx",
"reward": 3.4485505633056164,
"creation_time": 1671801691.0134096,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399847.pt"
]
},
{
"steps": 599261,
"file_path": "results/Huggy/Huggy/Huggy-599261.onnx",
"reward": 3.610715873539448,
"creation_time": 1671801941.5474608,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599261.pt"
]
},
{
"steps": 799938,
"file_path": "results/Huggy/Huggy/Huggy-799938.onnx",
"reward": 3.5881458661385945,
"creation_time": 1671802191.332617,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799938.pt"
]
},
{
"steps": 999980,
"file_path": "results/Huggy/Huggy/Huggy-999980.onnx",
"reward": 4.0510111828239594,
"creation_time": 1671802443.1285434,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999980.pt"
]
},
{
"steps": 1199256,
"file_path": "results/Huggy/Huggy/Huggy-1199256.onnx",
"reward": 3.924452391266823,
"creation_time": 1671802696.160974,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199256.pt"
]
},
{
"steps": 1399922,
"file_path": "results/Huggy/Huggy/Huggy-1399922.onnx",
"reward": 3.7576204809532348,
"creation_time": 1671802940.8225813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399922.pt"
]
},
{
"steps": 1599981,
"file_path": "results/Huggy/Huggy/Huggy-1599981.onnx",
"reward": 3.891991335954239,
"creation_time": 1671803190.1449516,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599981.pt"
]
},
{
"steps": 1799965,
"file_path": "results/Huggy/Huggy/Huggy-1799965.onnx",
"reward": 3.7882893044373085,
"creation_time": 1671803439.634791,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799965.pt"
]
},
{
"steps": 1999958,
"file_path": "results/Huggy/Huggy/Huggy-1999958.onnx",
"reward": 3.613110296165242,
"creation_time": 1671803685.612402,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999958.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy/Huggy/Huggy-2000041.onnx",
"reward": 3.620495166694909,
"creation_time": 1671803685.742681,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.620495166694909,
"creation_time": 1671803685.742681,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}