ppo-Huggy / run_logs /training_status.json
ojigrande's picture
Huggy runs for a stick.
acdab82 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199721,
"file_path": "results/Huggy2/Huggy/Huggy-199721.onnx",
"reward": 3.1430818984064004,
"creation_time": 1728572727.1846714,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199721.pt"
]
},
{
"steps": 399939,
"file_path": "results/Huggy2/Huggy/Huggy-399939.onnx",
"reward": 3.5262319905716075,
"creation_time": 1728572986.44572,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399939.pt"
]
},
{
"steps": 599991,
"file_path": "results/Huggy2/Huggy/Huggy-599991.onnx",
"reward": 3.5050937086343765,
"creation_time": 1728573244.189476,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599991.pt"
]
},
{
"steps": 799988,
"file_path": "results/Huggy2/Huggy/Huggy-799988.onnx",
"reward": 4.098956580320116,
"creation_time": 1728573517.9091253,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799988.pt"
]
},
{
"steps": 999928,
"file_path": "results/Huggy2/Huggy/Huggy-999928.onnx",
"reward": 3.984107193908071,
"creation_time": 1728573802.942381,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999928.pt"
]
},
{
"steps": 1199977,
"file_path": "results/Huggy2/Huggy/Huggy-1199977.onnx",
"reward": 4.014243315136622,
"creation_time": 1728574078.2140887,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199977.pt"
]
},
{
"steps": 1399906,
"file_path": "results/Huggy2/Huggy/Huggy-1399906.onnx",
"reward": 5.130318999290466,
"creation_time": 1728574352.9464197,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399906.pt"
]
},
{
"steps": 1599926,
"file_path": "results/Huggy2/Huggy/Huggy-1599926.onnx",
"reward": 3.5986345166990743,
"creation_time": 1728574616.343499,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599926.pt"
]
},
{
"steps": 1799683,
"file_path": "results/Huggy2/Huggy/Huggy-1799683.onnx",
"reward": 3.542311499413756,
"creation_time": 1728574872.3273218,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799683.pt"
]
},
{
"steps": 1999412,
"file_path": "results/Huggy2/Huggy/Huggy-1999412.onnx",
"reward": 3.3147306191293815,
"creation_time": 1728575126.726236,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999412.pt"
]
},
{
"steps": 2000162,
"file_path": "results/Huggy2/Huggy/Huggy-2000162.onnx",
"reward": 2.976955032348633,
"creation_time": 1728575126.8212204,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000162.pt"
]
}
],
"final_checkpoint": {
"steps": 2000162,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.976955032348633,
"creation_time": 1728575126.8212204,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000162.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0",
"torch_version": "2.2.0a0+81ea7a4"
}
}