ppo-Huggy-v2 / run_logs /training_status.json
astefani's picture
Huggy
85c25fb
{
"Huggy": {
"checkpoints": [
{
"steps": 199964,
"file_path": "results/Huggy-v2/Huggy/Huggy-199964.onnx",
"reward": 3.370545993532453,
"creation_time": 1678980774.124938,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-199964.pt"
]
},
{
"steps": 399919,
"file_path": "results/Huggy-v2/Huggy/Huggy-399919.onnx",
"reward": 3.779623644692557,
"creation_time": 1678981006.7119539,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-399919.pt"
]
},
{
"steps": 599999,
"file_path": "results/Huggy-v2/Huggy/Huggy-599999.onnx",
"reward": 3.786576762343898,
"creation_time": 1678981245.369736,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-599999.pt"
]
},
{
"steps": 799948,
"file_path": "results/Huggy-v2/Huggy/Huggy-799948.onnx",
"reward": 3.763545396909192,
"creation_time": 1678981483.5637214,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-799948.pt"
]
},
{
"steps": 999969,
"file_path": "results/Huggy-v2/Huggy/Huggy-999969.onnx",
"reward": 3.9756217705792394,
"creation_time": 1678981721.2272112,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-999969.pt"
]
},
{
"steps": 1199979,
"file_path": "results/Huggy-v2/Huggy/Huggy-1199979.onnx",
"reward": 3.5360729274615434,
"creation_time": 1678981962.6327872,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-1199979.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy-v2/Huggy/Huggy-1399994.onnx",
"reward": 3.7768851735375146,
"creation_time": 1678982202.069383,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599912,
"file_path": "results/Huggy-v2/Huggy/Huggy-1599912.onnx",
"reward": 3.6333799480602442,
"creation_time": 1678982438.4444132,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-1599912.pt"
]
},
{
"steps": 1799860,
"file_path": "results/Huggy-v2/Huggy/Huggy-1799860.onnx",
"reward": 3.3031652747810663,
"creation_time": 1678982683.1549962,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-1799860.pt"
]
},
{
"steps": 1999452,
"file_path": "results/Huggy-v2/Huggy/Huggy-1999452.onnx",
"reward": 4.37043309211731,
"creation_time": 1678982924.2666938,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-1999452.pt"
]
},
{
"steps": 2000202,
"file_path": "results/Huggy-v2/Huggy/Huggy-2000202.onnx",
"reward": 1.903377374013265,
"creation_time": 1678982924.4070501,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-2000202.pt"
]
}
],
"final_checkpoint": {
"steps": 2000202,
"file_path": "results/Huggy-v2/Huggy.onnx",
"reward": 1.903377374013265,
"creation_time": 1678982924.4070501,
"auxillary_file_paths": [
"results/Huggy-v2/Huggy/Huggy-2000202.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}