ppo-Huggy / run_logs /training_status.json
michelle-lohwt's picture
Huggy
62a32d4
{
"Huggy": {
"checkpoints": [
{
"steps": 199819,
"file_path": "results/Huggy/Huggy/Huggy-199819.onnx",
"reward": 3.477520963549614,
"creation_time": 1679273282.1555057,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199819.pt"
]
},
{
"steps": 399984,
"file_path": "results/Huggy/Huggy/Huggy-399984.onnx",
"reward": 3.6576561995915005,
"creation_time": 1679273506.8165672,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399984.pt"
]
},
{
"steps": 599910,
"file_path": "results/Huggy/Huggy/Huggy-599910.onnx",
"reward": 4.134976801418123,
"creation_time": 1679273733.134396,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599910.pt"
]
},
{
"steps": 799977,
"file_path": "results/Huggy/Huggy/Huggy-799977.onnx",
"reward": 3.759302319707097,
"creation_time": 1679273962.0021515,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799977.pt"
]
},
{
"steps": 999645,
"file_path": "results/Huggy/Huggy/Huggy-999645.onnx",
"reward": 4.017336686299397,
"creation_time": 1679274193.8447587,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999645.pt"
]
},
{
"steps": 1199929,
"file_path": "results/Huggy/Huggy/Huggy-1199929.onnx",
"reward": 4.070781446364989,
"creation_time": 1679274426.82506,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199929.pt"
]
},
{
"steps": 1399883,
"file_path": "results/Huggy/Huggy/Huggy-1399883.onnx",
"reward": 3.7373433113098145,
"creation_time": 1679274659.7498496,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399883.pt"
]
},
{
"steps": 1599992,
"file_path": "results/Huggy/Huggy/Huggy-1599992.onnx",
"reward": 3.5478816019452135,
"creation_time": 1679274888.9166965,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599992.pt"
]
},
{
"steps": 1799899,
"file_path": "results/Huggy/Huggy/Huggy-1799899.onnx",
"reward": 3.603648239532403,
"creation_time": 1679275121.5575004,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799899.pt"
]
},
{
"steps": 1999990,
"file_path": "results/Huggy/Huggy/Huggy-1999990.onnx",
"reward": 3.732776892739673,
"creation_time": 1679275354.8151784,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999990.pt"
]
},
{
"steps": 2000022,
"file_path": "results/Huggy/Huggy/Huggy-2000022.onnx",
"reward": 3.7155421010472556,
"creation_time": 1679275354.9407132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000022.pt"
]
}
],
"final_checkpoint": {
"steps": 2000022,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7155421010472556,
"creation_time": 1679275354.9407132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000022.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}