ppo-Huggy / run_logs /training_status.json
Cidoyi's picture
Huggy
232b0ab verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199917,
"file_path": "results/Huggy2/Huggy/Huggy-199917.onnx",
"reward": 3.545702797751273,
"creation_time": 1738342800.4870648,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199917.pt"
]
},
{
"steps": 399801,
"file_path": "results/Huggy2/Huggy/Huggy-399801.onnx",
"reward": 3.456133166948954,
"creation_time": 1738343041.4639442,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399801.pt"
]
},
{
"steps": 599966,
"file_path": "results/Huggy2/Huggy/Huggy-599966.onnx",
"reward": 3.89725631972154,
"creation_time": 1738343284.4002903,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599966.pt"
]
},
{
"steps": 799949,
"file_path": "results/Huggy2/Huggy/Huggy-799949.onnx",
"reward": 3.979352740525025,
"creation_time": 1738343530.6852376,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799949.pt"
]
},
{
"steps": 999972,
"file_path": "results/Huggy2/Huggy/Huggy-999972.onnx",
"reward": 3.80673729088492,
"creation_time": 1738343781.1651216,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999972.pt"
]
},
{
"steps": 1199951,
"file_path": "results/Huggy2/Huggy/Huggy-1199951.onnx",
"reward": 3.8897436354533736,
"creation_time": 1738344040.0152435,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199951.pt"
]
},
{
"steps": 1399992,
"file_path": "results/Huggy2/Huggy/Huggy-1399992.onnx",
"reward": 3.4985432837690627,
"creation_time": 1738344295.0656161,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399992.pt"
]
},
{
"steps": 1599854,
"file_path": "results/Huggy2/Huggy/Huggy-1599854.onnx",
"reward": 3.780238839607794,
"creation_time": 1738344546.060805,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599854.pt"
]
},
{
"steps": 1799971,
"file_path": "results/Huggy2/Huggy/Huggy-1799971.onnx",
"reward": 3.93973322556569,
"creation_time": 1738344803.9781365,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799971.pt"
]
},
{
"steps": 1999963,
"file_path": "results/Huggy2/Huggy/Huggy-1999963.onnx",
"reward": 3.9886966809420517,
"creation_time": 1738345059.277329,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999963.pt"
]
},
{
"steps": 2000090,
"file_path": "results/Huggy2/Huggy/Huggy-2000090.onnx",
"reward": 4.038583628005451,
"creation_time": 1738345059.3954182,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000090.pt"
]
}
],
"final_checkpoint": {
"steps": 2000090,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.038583628005451,
"creation_time": 1738345059.3954182,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000090.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}