ppo-Pyramids / run_logs /timers.json
sofiapecora's picture
First Push
8668fcd
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.5660867094993591,
"min": 0.554049015045166,
"max": 1.4703035354614258,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 16910.142578125,
"min": 16754.44140625,
"max": 44603.12890625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989982.0,
"min": 29874.0,
"max": 989982.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989982.0,
"min": 29874.0,
"max": 989982.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.026042358949780464,
"min": -0.10206277668476105,
"max": 0.06799918413162231,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 6.4064202308654785,
"min": -24.597129821777344,
"max": 16.727798461914062,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.0075026145204901695,
"min": -0.0061217197217047215,
"max": 0.30238455533981323,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 1.845643162727356,
"min": -1.5059430599212646,
"max": 72.87467956542969,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06413571656588171,
"min": 0.06413571656588171,
"max": 0.07321951957525495,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.8979000319223439,
"min": 0.5000715416025814,
"max": 1.0388673787917164,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.005869294582623274,
"min": 0.00045276917492287995,
"max": 0.006556808983987138,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.08217012415672584,
"min": 0.00498046092415168,
"max": 0.09179532577581993,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 2.4270547159000002e-06,
"min": 2.4270547159000002e-06,
"max": 9.838354447360002e-05,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 3.3978766022600006e-05,
"min": 3.3978766022600006e-05,
"max": 0.0011691601308401,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10242695714285714,
"min": 0.10242695714285714,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4339774,
"min": 1.3886848,
"max": 2.5691599000000003,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00025245301857142866,
"min": 0.00025245301857142866,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003534342260000001,
"min": 0.003534342260000001,
"max": 0.11693907401000002,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.016322756186127663,
"min": 0.016322756186127663,
"max": 0.45837831497192383,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.22851857542991638,
"min": 0.22851857542991638,
"max": 3.208648204803467,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 801.09375,
"min": 757.1515151515151,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 25635.0,
"min": 16801.0,
"max": 33096.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 0.13601871114224195,
"min": -0.9999806972280625,
"max": 0.33352724372437503,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 4.3525987565517426,
"min": -30.99940161406994,
"max": 11.006399042904377,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 0.13601871114224195,
"min": -0.9999806972280625,
"max": 0.33352724372437503,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 4.3525987565517426,
"min": -30.99940161406994,
"max": 11.006399042904377,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.13575547446816927,
"min": 0.13575547446816927,
"max": 8.459434292772237,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 4.344175182981417,
"min": 4.344175182981417,
"max": 143.81038297712803,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1681928618",
"python_version": "3.9.16 (main, Dec 7 2022, 01:11:51) \n[GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training 2 --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1681930726"
},
"total": 2108.393649072,
"count": 1,
"self": 0.49360840300005293,
"children": {
"run_training.setup": {
"total": 0.1349265710000509,
"count": 1,
"self": 0.1349265710000509
},
"TrainerController.start_learning": {
"total": 2107.7651140979997,
"count": 1,
"self": 1.624797059906541,
"children": {
"TrainerController._reset_env": {
"total": 3.732865678999815,
"count": 1,
"self": 3.732865678999815
},
"TrainerController.advance": {
"total": 2102.3036475040935,
"count": 63237,
"self": 1.6385663731352906,
"children": {
"env_step": {
"total": 1462.634954482047,
"count": 63237,
"self": 1344.269540893071,
"children": {
"SubprocessEnvManager._take_step": {
"total": 117.43343352196166,
"count": 63237,
"self": 5.2410621559229185,
"children": {
"TorchPolicy.evaluate": {
"total": 112.19237136603874,
"count": 62559,
"self": 112.19237136603874
}
}
},
"workers": {
"total": 0.931980067014365,
"count": 63237,
"self": 0.0,
"children": {
"worker_root": {
"total": 2102.4071251949395,
"count": 63237,
"is_parallel": true,
"self": 880.8950439829671,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0017498520001026918,
"count": 1,
"is_parallel": true,
"self": 0.0005726539995976054,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011771980005050864,
"count": 8,
"is_parallel": true,
"self": 0.0011771980005050864
}
}
},
"UnityEnvironment.step": {
"total": 0.0689854220001962,
"count": 1,
"is_parallel": true,
"self": 0.0006268870001804316,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005241779999778373,
"count": 1,
"is_parallel": true,
"self": 0.0005241779999778373
},
"communicator.exchange": {
"total": 0.06475146000002496,
"count": 1,
"is_parallel": true,
"self": 0.06475146000002496
},
"steps_from_proto": {
"total": 0.003082897000012963,
"count": 1,
"is_parallel": true,
"self": 0.0004352429996288265,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0026476540003841365,
"count": 8,
"is_parallel": true,
"self": 0.0026476540003841365
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1221.5120812119724,
"count": 63236,
"is_parallel": true,
"self": 33.609199948826245,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 25.028001143049096,
"count": 63236,
"is_parallel": true,
"self": 25.028001143049096
},
"communicator.exchange": {
"total": 1061.0630888740818,
"count": 63236,
"is_parallel": true,
"self": 1061.0630888740818
},
"steps_from_proto": {
"total": 101.81179124601522,
"count": 63236,
"is_parallel": true,
"self": 22.103558426230165,
"children": {
"_process_rank_one_or_two_observation": {
"total": 79.70823281978505,
"count": 505888,
"is_parallel": true,
"self": 79.70823281978505
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 638.0301266489114,
"count": 63237,
"self": 2.7955249619283222,
"children": {
"process_trajectory": {
"total": 111.75122368298184,
"count": 63237,
"self": 111.52800133398227,
"children": {
"RLTrainer._checkpoint": {
"total": 0.22322234899957039,
"count": 2,
"self": 0.22322234899957039
}
}
},
"_update_policy": {
"total": 523.4833780040012,
"count": 449,
"self": 336.5329811050178,
"children": {
"TorchPPOOptimizer.update": {
"total": 186.9503968989834,
"count": 22767,
"self": 186.9503968989834
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.260002116207033e-07,
"count": 1,
"self": 9.260002116207033e-07
},
"TrainerController._save_models": {
"total": 0.10380292899981214,
"count": 1,
"self": 0.0015779899999870395,
"children": {
"RLTrainer._checkpoint": {
"total": 0.1022249389998251,
"count": 1,
"self": 0.1022249389998251
}
}
}
}
}
}
}