ppo-Huggy / run_logs /training_status.json
raj79's picture
Huggy
ac1982e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199967,
"file_path": "results/Huggy2/Huggy/Huggy-199967.onnx",
"reward": 3.325483771808007,
"creation_time": 1716821177.4266808,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199967.pt"
]
},
{
"steps": 399942,
"file_path": "results/Huggy2/Huggy/Huggy-399942.onnx",
"reward": 3.858892744442202,
"creation_time": 1716821704.0894024,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399942.pt"
]
},
{
"steps": 599964,
"file_path": "results/Huggy2/Huggy/Huggy-599964.onnx",
"reward": 5.156614303588867,
"creation_time": 1716822256.9699504,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599964.pt"
]
},
{
"steps": 799980,
"file_path": "results/Huggy2/Huggy/Huggy-799980.onnx",
"reward": 3.7796707809346275,
"creation_time": 1716822754.8074856,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799980.pt"
]
},
{
"steps": 999866,
"file_path": "results/Huggy2/Huggy/Huggy-999866.onnx",
"reward": 4.106272386556322,
"creation_time": 1716823283.2664094,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999866.pt"
]
},
{
"steps": 1199960,
"file_path": "results/Huggy2/Huggy/Huggy-1199960.onnx",
"reward": 3.71445276260376,
"creation_time": 1716823808.3647292,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199960.pt"
]
},
{
"steps": 1399727,
"file_path": "results/Huggy2/Huggy/Huggy-1399727.onnx",
"reward": 3.6980174326234394,
"creation_time": 1716824325.256869,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399727.pt"
]
},
{
"steps": 1599944,
"file_path": "results/Huggy2/Huggy/Huggy-1599944.onnx",
"reward": 3.9727997559088246,
"creation_time": 1716824853.19519,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599944.pt"
]
},
{
"steps": 1799844,
"file_path": "results/Huggy2/Huggy/Huggy-1799844.onnx",
"reward": 3.5638032671230944,
"creation_time": 1716825378.3797755,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799844.pt"
]
},
{
"steps": 1999684,
"file_path": "results/Huggy2/Huggy/Huggy-1999684.onnx",
"reward": 3.9270406704581244,
"creation_time": 1716825891.753089,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999684.pt"
]
},
{
"steps": 2000434,
"file_path": "results/Huggy2/Huggy/Huggy-2000434.onnx",
"reward": 3.8963356830261566,
"creation_time": 1716825892.0588822,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000434.pt"
]
}
],
"final_checkpoint": {
"steps": 2000434,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8963356830261566,
"creation_time": 1716825892.0588822,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000434.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}