poca-SoccerTwos / run_logs /timers.json
morganjeffries's picture
First Push
30b940d
raw
history blame
20.2 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.6822625398635864,
"min": 1.6398881673812866,
"max": 1.842372179031372,
"count": 251
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 35690.8828125,
"min": 2177.860595703125,
"max": 40059.4921875,
"count": 251
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 57.04651162790697,
"min": 28.0,
"max": 74.60606060606061,
"count": 251
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19624.0,
"min": 224.0,
"max": 21012.0,
"count": 251
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1277.090876465809,
"min": 1183.1962961007646,
"max": 1278.9525609084717,
"count": 251
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 219659.63075211918,
"min": 4800.000263188352,
"max": 272948.30342207104,
"count": 251
},
"SoccerTwos.Step.mean": {
"value": 9999944.0,
"min": 7499994.0,
"max": 9999944.0,
"count": 251
},
"SoccerTwos.Step.sum": {
"value": 9999944.0,
"min": 7499994.0,
"max": 9999944.0,
"count": 251
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.001799598685465753,
"min": -0.11391967535018921,
"max": 0.10982072353363037,
"count": 251
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 0.30953097343444824,
"min": -22.03433609008789,
"max": 22.513248443603516,
"count": 251
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.005159409716725349,
"min": -0.12060762941837311,
"max": 0.10789498686790466,
"count": 251
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 0.8874185085296631,
"min": -22.232192993164062,
"max": 22.118473052978516,
"count": 251
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 251
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 251
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.015867442585701167,
"min": -0.3096150954564412,
"max": 0.31799999872843426,
"count": 251
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 2.7292001247406006,
"min": -52.772199869155884,
"max": 48.25259983539581,
"count": 251
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.015867442585701167,
"min": -0.3096150954564412,
"max": 0.31799999872843426,
"count": 251
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 2.7292001247406006,
"min": -52.772199869155884,
"max": 48.25259983539581,
"count": 251
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 251
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 251
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.021029965362201135,
"min": 0.012879154579907966,
"max": 0.025046016524235406,
"count": 121
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.021029965362201135,
"min": 0.012879154579907966,
"max": 0.025046016524235406,
"count": 121
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10496913641691208,
"min": 0.09255081589023272,
"max": 0.12506354277332624,
"count": 121
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10496913641691208,
"min": 0.09255081589023272,
"max": 0.12506354277332624,
"count": 121
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.1063881886502107,
"min": 0.09390097285310427,
"max": 0.127565894027551,
"count": 121
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.1063881886502107,
"min": 0.09390097285310427,
"max": 0.127565894027551,
"count": 121
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 121
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 121
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 121
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 121
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 121
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 121
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1675643849",
"python_version": "3.8.16 (default, Jan 17 2023, 23:13:24) \n[GCC 11.2.0]",
"command_line_arguments": "/opt/conda/envs/rl/bin/mlagents-learn ../data/SoccerTwos.yaml --env=../training-envs-executables/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.8.1+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1675656743"
},
"total": 12894.094235517987,
"count": 1,
"self": 1.5469279409880983,
"children": {
"run_training.setup": {
"total": 0.07375104300444946,
"count": 1,
"self": 0.07375104300444946
},
"TrainerController.start_learning": {
"total": 12892.473556533994,
"count": 1,
"self": 6.042133164562983,
"children": {
"TrainerController._reset_env": {
"total": 3.4907721130002756,
"count": 14,
"self": 3.4907721130002756
},
"TrainerController.advance": {
"total": 12881.093821607428,
"count": 174383,
"self": 7.125839299391373,
"children": {
"env_step": {
"total": 4457.228039254725,
"count": 174383,
"self": 3550.539422073867,
"children": {
"SubprocessEnvManager._take_step": {
"total": 902.6979490201629,
"count": 174383,
"self": 38.745676980295684,
"children": {
"TorchPolicy.evaluate": {
"total": 863.9522720398672,
"count": 313720,
"self": 863.9522720398672
}
}
},
"workers": {
"total": 3.99066816069535,
"count": 174383,
"self": 0.0,
"children": {
"worker_root": {
"total": 12868.206770986566,
"count": 174383,
"is_parallel": true,
"self": 10002.16133172765,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.003081321992794983,
"count": 2,
"is_parallel": true,
"self": 0.0006560630135936663,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.002425258979201317,
"count": 8,
"is_parallel": true,
"self": 0.002425258979201317
}
}
},
"UnityEnvironment.step": {
"total": 0.04443297600664664,
"count": 1,
"is_parallel": true,
"self": 0.0006884790200274438,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00047651999921072274,
"count": 1,
"is_parallel": true,
"self": 0.00047651999921072274
},
"communicator.exchange": {
"total": 0.04036948499560822,
"count": 1,
"is_parallel": true,
"self": 0.04036948499560822
},
"steps_from_proto": {
"total": 0.002898491991800256,
"count": 2,
"is_parallel": true,
"self": 0.0005686439835699275,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0023298480082303286,
"count": 8,
"is_parallel": true,
"self": 0.0023298480082303286
}
}
}
}
}
}
},
"steps_from_proto": {
"total": 0.02985909597191494,
"count": 26,
"is_parallel": true,
"self": 0.005891453954973258,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.02396764201694168,
"count": 104,
"is_parallel": true,
"self": 0.02396764201694168
}
}
},
"UnityEnvironment.step": {
"total": 2866.0155801629444,
"count": 174382,
"is_parallel": true,
"self": 143.22616853562067,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 98.19041134118743,
"count": 174382,
"is_parallel": true,
"self": 98.19041134118743
},
"communicator.exchange": {
"total": 2169.232469670169,
"count": 174382,
"is_parallel": true,
"self": 2169.232469670169
},
"steps_from_proto": {
"total": 455.36653061596735,
"count": 348764,
"is_parallel": true,
"self": 90.07770443204208,
"children": {
"_process_rank_one_or_two_observation": {
"total": 365.28882618392527,
"count": 1395056,
"is_parallel": true,
"self": 365.28882618392527
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 8416.739943053311,
"count": 174383,
"self": 37.67580802857992,
"children": {
"process_trajectory": {
"total": 1387.9469409556186,
"count": 174383,
"self": 1377.126402204638,
"children": {
"RLTrainer._checkpoint": {
"total": 10.820538750980631,
"count": 6,
"self": 10.820538750980631
}
}
},
"_update_policy": {
"total": 6991.117194069113,
"count": 121,
"self": 485.3654478910612,
"children": {
"TorchPOCAOptimizer.update": {
"total": 6505.751746178052,
"count": 3630,
"self": 6505.751746178052
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.7780112102627754e-06,
"count": 1,
"self": 1.7780112102627754e-06
},
"TrainerController._save_models": {
"total": 1.8468278709915467,
"count": 1,
"self": 0.1252153579989681,
"children": {
"RLTrainer._checkpoint": {
"total": 1.7216125129925786,
"count": 1,
"self": 1.7216125129925786
}
}
}
}
}
}
}