poca-SoccerTwos / run_logs /timers.json
Krazeder's picture
First Push
bee4356 verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.5365769863128662,
"min": 1.4813590049743652,
"max": 1.808916687965393,
"count": 848
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 30338.17578125,
"min": 26535.064453125,
"max": 39762.046875,
"count": 848
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 72.51470588235294,
"min": 44.32727272727273,
"max": 96.13461538461539,
"count": 848
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19724.0,
"min": 17756.0,
"max": 21392.0,
"count": 848
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1622.0647398806539,
"min": 1521.8086100971377,
"max": 1629.4448554804812,
"count": 848
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 220600.80462376893,
"min": 165666.44573914562,
"max": 342997.56245919503,
"count": 848
},
"SoccerTwos.Step.mean": {
"value": 49999973.0,
"min": 41529980.0,
"max": 49999973.0,
"count": 848
},
"SoccerTwos.Step.sum": {
"value": 49999973.0,
"min": 41529980.0,
"max": 49999973.0,
"count": 848
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.04019450023770332,
"min": -0.12426722049713135,
"max": 0.07426580041646957,
"count": 848
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 5.506646633148193,
"min": -20.752626419067383,
"max": 11.214136123657227,
"count": 848
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.040437038987874985,
"min": -0.12619948387145996,
"max": 0.07686888426542282,
"count": 848
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 5.53987455368042,
"min": -21.075313568115234,
"max": 11.60720157623291,
"count": 848
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 848
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 848
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.14372262728475307,
"min": -0.3546271589067247,
"max": 0.4209885707923344,
"count": 848
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 19.68999993801117,
"min": -57.449599742889404,
"max": 58.93839991092682,
"count": 848
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.14372262728475307,
"min": -0.3546271589067247,
"max": 0.4209885707923344,
"count": 848
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 19.68999993801117,
"min": -57.449599742889404,
"max": 58.93839991092682,
"count": 848
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 848
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 848
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.01726939525397029,
"min": 0.010818473714122472,
"max": 0.024776815770504376,
"count": 411
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.01726939525397029,
"min": 0.010818473714122472,
"max": 0.024776815770504376,
"count": 411
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.0959845520555973,
"min": 0.07766435618201892,
"max": 0.12066736668348313,
"count": 411
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.0959845520555973,
"min": 0.07766435618201892,
"max": 0.12066736668348313,
"count": 411
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.09715440968672434,
"min": 0.07839207574725152,
"max": 0.12248187363147736,
"count": 411
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.09715440968672434,
"min": 0.07839207574725152,
"max": 0.12248187363147736,
"count": 411
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 411
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 411
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 411
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 411
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 411
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 411
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1743095433",
"python_version": "3.10.12 (main, Sep 11 2024, 15:47:36) [GCC 11.4.0]",
"command_line_arguments": "/home/kraz/Documents/DeepRLCourse/selfplay/VENV/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.x86_64 --run-id=SoccerTwos --resume",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.6.0+cu124",
"numpy_version": "1.23.5",
"end_time_seconds": "1743108821"
},
"total": 13388.648106151999,
"count": 1,
"self": 0.31980500999816286,
"children": {
"run_training.setup": {
"total": 0.014941138000040155,
"count": 1,
"self": 0.014941138000040155
},
"TrainerController.start_learning": {
"total": 13388.313360004,
"count": 1,
"self": 7.981224752884373,
"children": {
"TrainerController._reset_env": {
"total": 5.233504972000446,
"count": 44,
"self": 5.233504972000446
},
"TrainerController.advance": {
"total": 13374.967367083114,
"count": 584115,
"self": 7.10108697205942,
"children": {
"env_step": {
"total": 10706.45407931988,
"count": 584115,
"self": 8444.909337357909,
"children": {
"SubprocessEnvManager._take_step": {
"total": 2256.47032713609,
"count": 584115,
"self": 44.79360782636559,
"children": {
"TorchPolicy.evaluate": {
"total": 2211.6767193097244,
"count": 1064266,
"self": 2211.6767193097244
}
}
},
"workers": {
"total": 5.07441482588024,
"count": 584115,
"self": 0.0,
"children": {
"worker_root": {
"total": 13372.792390264352,
"count": 584115,
"is_parallel": true,
"self": 5914.796352574395,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.011833296999952836,
"count": 2,
"is_parallel": true,
"self": 0.009410772000023826,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.00242252499992901,
"count": 8,
"is_parallel": true,
"self": 0.00242252499992901
}
}
},
"UnityEnvironment.step": {
"total": 0.03231245300003138,
"count": 1,
"is_parallel": true,
"self": 0.0009449970000332542,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0006791370000200914,
"count": 1,
"is_parallel": true,
"self": 0.0006791370000200914
},
"communicator.exchange": {
"total": 0.02807312200002343,
"count": 1,
"is_parallel": true,
"self": 0.02807312200002343
},
"steps_from_proto": {
"total": 0.0026151969999546054,
"count": 2,
"is_parallel": true,
"self": 0.0004390960002638167,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0021761009996907887,
"count": 8,
"is_parallel": true,
"self": 0.0021761009996907887
}
}
}
}
}
}
},
"steps_from_proto": {
"total": 0.10110702099120772,
"count": 86,
"is_parallel": true,
"self": 0.016134994990238738,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.08497202600096898,
"count": 344,
"is_parallel": true,
"self": 0.08497202600096898
}
}
},
"UnityEnvironment.step": {
"total": 7457.894930668966,
"count": 584114,
"is_parallel": true,
"self": 477.0810506252719,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 306.737595483266,
"count": 584114,
"is_parallel": true,
"self": 306.737595483266
},
"communicator.exchange": {
"total": 5379.600042634662,
"count": 584114,
"is_parallel": true,
"self": 5379.600042634662
},
"steps_from_proto": {
"total": 1294.4762419257654,
"count": 1168228,
"is_parallel": true,
"self": 208.49837002703043,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1085.977871898735,
"count": 4672912,
"is_parallel": true,
"self": 1085.977871898735
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 2661.4122007911765,
"count": 584115,
"self": 58.074566016771314,
"children": {
"process_trajectory": {
"total": 1202.7350175344188,
"count": 584115,
"self": 1200.3584681514194,
"children": {
"RLTrainer._checkpoint": {
"total": 2.3765493829994284,
"count": 17,
"self": 2.3765493829994284
}
}
},
"_update_policy": {
"total": 1400.6026172399866,
"count": 411,
"self": 839.4586819019712,
"children": {
"TorchPOCAOptimizer.update": {
"total": 561.1439353380154,
"count": 12330,
"self": 561.1439353380154
}
}
}
}
}
}
},
"trainer_threads": {
"total": 6.310001481324434e-07,
"count": 1,
"self": 6.310001481324434e-07
},
"TrainerController._save_models": {
"total": 0.13126256500072486,
"count": 1,
"self": 0.0018535050003265496,
"children": {
"RLTrainer._checkpoint": {
"total": 0.1294090600003983,
"count": 1,
"self": 0.1294090600003983
}
}
}
}
}
}
}