poca-SoccerTwos / run_logs /timers.json
VinayHajare's picture
Soccer Twos uploaded
d423c42
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.8366409540176392,
"min": 1.8191382884979248,
"max": 2.2652885913848877,
"count": 299
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 35674.9140625,
"min": 33638.78125,
"max": 51544.1953125,
"count": 299
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 53.62222222222222,
"min": 42.9375,
"max": 74.0,
"count": 299
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19304.0,
"min": 16328.0,
"max": 20732.0,
"count": 299
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1366.1017227198731,
"min": 1198.6892309915056,
"max": 1366.1017227198731,
"count": 299
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 245898.31008957716,
"min": 162906.44196233,
"max": 292160.6642892341,
"count": 299
},
"SoccerTwos.Step.mean": {
"value": 4999958.0,
"min": 2019995.0,
"max": 4999958.0,
"count": 299
},
"SoccerTwos.Step.sum": {
"value": 4999958.0,
"min": 2019995.0,
"max": 4999958.0,
"count": 299
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.05829504877328873,
"min": -0.11307865381240845,
"max": 0.12428557127714157,
"count": 299
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 10.493108749389648,
"min": -18.205663681030273,
"max": 20.06940460205078,
"count": 299
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.05640903115272522,
"min": -0.12473158538341522,
"max": 0.12447022646665573,
"count": 299
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 10.15362548828125,
"min": -20.081785202026367,
"max": 20.232776641845703,
"count": 299
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 299
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 299
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.12275777724054125,
"min": -0.3071284926803418,
"max": 0.34021734054378,
"count": 299
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 22.096399903297424,
"min": -55.9987998008728,
"max": 58.857599914073944,
"count": 299
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.12275777724054125,
"min": -0.3071284926803418,
"max": 0.34021734054378,
"count": 299
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 22.096399903297424,
"min": -55.9987998008728,
"max": 58.857599914073944,
"count": 299
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 299
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 299
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.017318982321497364,
"min": 0.010958337449119426,
"max": 0.0258120413365153,
"count": 145
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.017318982321497364,
"min": 0.010958337449119426,
"max": 0.0258120413365153,
"count": 145
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10514670958121618,
"min": 0.08302166983485222,
"max": 0.11510108535488446,
"count": 145
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10514670958121618,
"min": 0.08302166983485222,
"max": 0.11510108535488446,
"count": 145
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.1068361500898997,
"min": 0.0843203733364741,
"max": 0.1171369065841039,
"count": 145
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.1068361500898997,
"min": 0.0843203733364741,
"max": 0.1171369065841039,
"count": 145
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 145
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 145
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 145
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 145
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 145
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 145
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1703594557",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/linux/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.2+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1703601981"
},
"total": 7423.32795043,
"count": 1,
"self": 0.8187268939991554,
"children": {
"run_training.setup": {
"total": 0.052145051000479725,
"count": 1,
"self": 0.052145051000479725
},
"TrainerController.start_learning": {
"total": 7422.457078485,
"count": 1,
"self": 5.591342118945249,
"children": {
"TrainerController._reset_env": {
"total": 2.3483719270025176,
"count": 16,
"self": 2.3483719270025176
},
"TrainerController.advance": {
"total": 7414.170145946055,
"count": 207919,
"self": 5.943435315667557,
"children": {
"env_step": {
"total": 5753.48616998604,
"count": 207919,
"self": 4455.678779883493,
"children": {
"SubprocessEnvManager._take_step": {
"total": 1294.3799755227737,
"count": 207919,
"self": 35.312345244953576,
"children": {
"TorchPolicy.evaluate": {
"total": 1259.0676302778202,
"count": 374958,
"self": 1259.0676302778202
}
}
},
"workers": {
"total": 3.42741457977354,
"count": 207919,
"self": 0.0,
"children": {
"worker_root": {
"total": 7405.979755484864,
"count": 207919,
"is_parallel": true,
"self": 3648.0395091438877,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.002977608000037435,
"count": 2,
"is_parallel": true,
"self": 0.0008099170008790679,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.002167690999158367,
"count": 8,
"is_parallel": true,
"self": 0.002167690999158367
}
}
},
"UnityEnvironment.step": {
"total": 0.04280118200040306,
"count": 1,
"is_parallel": true,
"self": 0.0012480430013965815,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0009572049993948895,
"count": 1,
"is_parallel": true,
"self": 0.0009572049993948895
},
"communicator.exchange": {
"total": 0.036872446999950625,
"count": 1,
"is_parallel": true,
"self": 0.036872446999950625
},
"steps_from_proto": {
"total": 0.003723486999660963,
"count": 2,
"is_parallel": true,
"self": 0.0006542189985339064,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0030692680011270568,
"count": 8,
"is_parallel": true,
"self": 0.0030692680011270568
}
}
}
}
}
}
},
"steps_from_proto": {
"total": 0.03471507300037047,
"count": 30,
"is_parallel": true,
"self": 0.007339705994127144,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.027375367006243323,
"count": 120,
"is_parallel": true,
"self": 0.027375367006243323
}
}
},
"UnityEnvironment.step": {
"total": 3757.9055312679757,
"count": 207918,
"is_parallel": true,
"self": 238.62076344725392,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 161.03608339312086,
"count": 207918,
"is_parallel": true,
"self": 161.03608339312086
},
"communicator.exchange": {
"total": 2605.5229129148893,
"count": 207918,
"is_parallel": true,
"self": 2605.5229129148893
},
"steps_from_proto": {
"total": 752.7257715127116,
"count": 415836,
"is_parallel": true,
"self": 125.75205440297304,
"children": {
"_process_rank_one_or_two_observation": {
"total": 626.9737171097386,
"count": 1663344,
"is_parallel": true,
"self": 626.9737171097386
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1654.7405406443468,
"count": 207919,
"self": 41.082131978872894,
"children": {
"process_trajectory": {
"total": 780.9657481374725,
"count": 207919,
"self": 779.4602844554738,
"children": {
"RLTrainer._checkpoint": {
"total": 1.5054636819986627,
"count": 6,
"self": 1.5054636819986627
}
}
},
"_update_policy": {
"total": 832.6926605280014,
"count": 145,
"self": 493.94772783309145,
"children": {
"TorchPOCAOptimizer.update": {
"total": 338.74493269490995,
"count": 4350,
"self": 338.74493269490995
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.3449989637592807e-06,
"count": 1,
"self": 1.3449989637592807e-06
},
"TrainerController._save_models": {
"total": 0.34721714799889014,
"count": 1,
"self": 0.003982193999036099,
"children": {
"RLTrainer._checkpoint": {
"total": 0.34323495399985404,
"count": 1,
"self": 0.34323495399985404
}
}
}
}
}
}
}