rl-ppo-Huggy / run_logs /training_status.json
infinitas9's picture
Huggy
b867e87
{
"Huggy": {
"checkpoints": [
{
"steps": 199812,
"file_path": "results/Huggy/Huggy/Huggy-199812.onnx",
"reward": 3.457989297566875,
"creation_time": 1671708417.3296072,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199812.pt"
]
},
{
"steps": 399930,
"file_path": "results/Huggy/Huggy/Huggy-399930.onnx",
"reward": 3.5939913988113403,
"creation_time": 1671708654.7566645,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399930.pt"
]
},
{
"steps": 599933,
"file_path": "results/Huggy/Huggy/Huggy-599933.onnx",
"reward": 3.596273728779384,
"creation_time": 1671708887.320109,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599933.pt"
]
},
{
"steps": 799969,
"file_path": "results/Huggy/Huggy/Huggy-799969.onnx",
"reward": 4.00394890950695,
"creation_time": 1671709118.5493343,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799969.pt"
]
},
{
"steps": 999937,
"file_path": "results/Huggy/Huggy/Huggy-999937.onnx",
"reward": 3.98662221061517,
"creation_time": 1671709350.656788,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999937.pt"
]
},
{
"steps": 1199801,
"file_path": "results/Huggy/Huggy/Huggy-1199801.onnx",
"reward": 4.139042194171618,
"creation_time": 1671709584.78607,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199801.pt"
]
},
{
"steps": 1399977,
"file_path": "results/Huggy/Huggy/Huggy-1399977.onnx",
"reward": 4.222872253259023,
"creation_time": 1671709817.199542,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399977.pt"
]
},
{
"steps": 1599937,
"file_path": "results/Huggy/Huggy/Huggy-1599937.onnx",
"reward": 3.96832212635449,
"creation_time": 1671710046.375555,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599937.pt"
]
},
{
"steps": 1799952,
"file_path": "results/Huggy/Huggy/Huggy-1799952.onnx",
"reward": 3.755134836075798,
"creation_time": 1671710278.9018204,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799952.pt"
]
},
{
"steps": 1999970,
"file_path": "results/Huggy/Huggy/Huggy-1999970.onnx",
"reward": 3.7496339102922858,
"creation_time": 1671710509.661335,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999970.pt"
]
},
{
"steps": 2000108,
"file_path": "results/Huggy/Huggy/Huggy-2000108.onnx",
"reward": 3.742860241014449,
"creation_time": 1671710509.9333847,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000108.pt"
]
}
],
"final_checkpoint": {
"steps": 2000108,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.742860241014449,
"creation_time": 1671710509.9333847,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000108.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}