ppo-PyramidRND / run_logs /timers.json
makaveli10's picture
First Push
f98e897 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.39236167073249817,
"min": 0.39236167073249817,
"max": 1.4935498237609863,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 11695.5166015625,
"min": 11695.5166015625,
"max": 45308.328125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989930.0,
"min": 29962.0,
"max": 989930.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989930.0,
"min": 29962.0,
"max": 989930.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.5337557792663574,
"min": -0.1348305344581604,
"max": 0.5596157312393188,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 147.31658935546875,
"min": -32.08966827392578,
"max": 153.334716796875,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.16169095039367676,
"min": -0.34805065393447876,
"max": 0.15681646764278412,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -44.62670135498047,
"min": -95.36587524414062,
"max": 37.79277038574219,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06811334140537142,
"min": 0.06547812474032287,
"max": 0.07323475245558281,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9535867796751999,
"min": 0.49047698236964427,
"max": 1.0752853873532775,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.0180244758147655,
"min": 0.001364037501759713,
"max": 0.022849702598510442,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.252342661406717,
"min": 0.017732487522876268,
"max": 0.3198958363791462,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.59164032662857e-06,
"min": 7.59164032662857e-06,
"max": 0.00029523797301591427,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010628296457279998,
"min": 0.00010628296457279998,
"max": 0.0036092112969295996,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10253051428571429,
"min": 0.10253051428571429,
"max": 0.19841265714285714,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4354272000000001,
"min": 1.3888886,
"max": 2.5275016999999997,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002627983771428571,
"min": 0.0002627983771428571,
"max": 0.009841424448571428,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.00367917728,
"min": 0.00367917728,
"max": 0.12031673295999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.011338028125464916,
"min": 0.011338028125464916,
"max": 0.33617809414863586,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.15873239934444427,
"min": 0.15873239934444427,
"max": 2.3532466888427734,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 358.76190476190476,
"min": 331.35632183908046,
"max": 986.5,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30136.0,
"min": 16505.0,
"max": 33810.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.56979046034671,
"min": -0.9254242947155779,
"max": 1.5996574555662857,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 131.86239866912365,
"min": -30.53900172561407,
"max": 139.17019863426685,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.56979046034671,
"min": -0.9254242947155779,
"max": 1.5996574555662857,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 131.86239866912365,
"min": -30.53900172561407,
"max": 139.17019863426685,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.04215670825786739,
"min": 0.03957248334813028,
"max": 6.225102385725169,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.5411634936608607,
"min": 3.4428060512873344,
"max": 105.82674055732787,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1736929808",
"python_version": "3.10.12 (main, Nov 6 2024, 20:22:13) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./ml-agents/config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.5.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1736931975"
},
"total": 2166.829852895,
"count": 1,
"self": 0.4770183579994409,
"children": {
"run_training.setup": {
"total": 0.1072793040000306,
"count": 1,
"self": 0.1072793040000306
},
"TrainerController.start_learning": {
"total": 2166.2455552330002,
"count": 1,
"self": 1.3652455579749585,
"children": {
"TrainerController._reset_env": {
"total": 5.967830224000011,
"count": 1,
"self": 5.967830224000011
},
"TrainerController.advance": {
"total": 2158.8289077870254,
"count": 63733,
"self": 1.4177732519669917,
"children": {
"env_step": {
"total": 1465.8169747520426,
"count": 63733,
"self": 1317.0770374770862,
"children": {
"SubprocessEnvManager._take_step": {
"total": 147.94438495099206,
"count": 63733,
"self": 4.504026178036952,
"children": {
"TorchPolicy.evaluate": {
"total": 143.4403587729551,
"count": 62568,
"self": 143.4403587729551
}
}
},
"workers": {
"total": 0.7955523239643867,
"count": 63733,
"self": 0.0,
"children": {
"worker_root": {
"total": 2161.564255165001,
"count": 63733,
"is_parallel": true,
"self": 956.3367556089802,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0025466420000270773,
"count": 1,
"is_parallel": true,
"self": 0.0006635730001107731,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0018830689999163042,
"count": 8,
"is_parallel": true,
"self": 0.0018830689999163042
}
}
},
"UnityEnvironment.step": {
"total": 0.04606066600001668,
"count": 1,
"is_parallel": true,
"self": 0.0006850230000168267,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005034249999766871,
"count": 1,
"is_parallel": true,
"self": 0.0005034249999766871
},
"communicator.exchange": {
"total": 0.04327532600001405,
"count": 1,
"is_parallel": true,
"self": 0.04327532600001405
},
"steps_from_proto": {
"total": 0.0015968920000091202,
"count": 1,
"is_parallel": true,
"self": 0.0003393630000232406,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012575289999858796,
"count": 8,
"is_parallel": true,
"self": 0.0012575289999858796
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1205.227499556021,
"count": 63732,
"is_parallel": true,
"self": 33.01331578095824,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 22.616775073012946,
"count": 63732,
"is_parallel": true,
"self": 22.616775073012946
},
"communicator.exchange": {
"total": 1056.1542160870326,
"count": 63732,
"is_parallel": true,
"self": 1056.1542160870326
},
"steps_from_proto": {
"total": 93.44319261501721,
"count": 63732,
"is_parallel": true,
"self": 18.755360305883414,
"children": {
"_process_rank_one_or_two_observation": {
"total": 74.6878323091338,
"count": 509856,
"is_parallel": true,
"self": 74.6878323091338
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 691.5941597830157,
"count": 63733,
"self": 2.6953704130118012,
"children": {
"process_trajectory": {
"total": 131.04879736200223,
"count": 63733,
"self": 130.79655361400205,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2522437480001827,
"count": 2,
"self": 0.2522437480001827
}
}
},
"_update_policy": {
"total": 557.8499920080017,
"count": 457,
"self": 313.7344243169946,
"children": {
"TorchPPOOptimizer.update": {
"total": 244.11556769100707,
"count": 22827,
"self": 244.11556769100707
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.410000529896934e-07,
"count": 1,
"self": 9.410000529896934e-07
},
"TrainerController._save_models": {
"total": 0.08357072299986612,
"count": 1,
"self": 0.00148227199952089,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08208845100034523,
"count": 1,
"self": 0.08208845100034523
}
}
}
}
}
}
}