ppo-Huggy / run_logs /training_status.json
kucharskipj's picture
Huggy
7b1f7b6
{
"Huggy": {
"checkpoints": [
{
"steps": 199762,
"file_path": "results/Huggy/Huggy/Huggy-199762.onnx",
"reward": 3.567040846564553,
"creation_time": 1678923016.0021083,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199762.pt"
]
},
{
"steps": 399950,
"file_path": "results/Huggy/Huggy/Huggy-399950.onnx",
"reward": 3.695645197493131,
"creation_time": 1678923257.5959628,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399950.pt"
]
},
{
"steps": 599785,
"file_path": "results/Huggy/Huggy/Huggy-599785.onnx",
"reward": 4.241878572263215,
"creation_time": 1678923499.2159069,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599785.pt"
]
},
{
"steps": 799868,
"file_path": "results/Huggy/Huggy/Huggy-799868.onnx",
"reward": 3.730711823364474,
"creation_time": 1678923735.1486025,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799868.pt"
]
},
{
"steps": 999787,
"file_path": "results/Huggy/Huggy/Huggy-999787.onnx",
"reward": 3.5680794002963045,
"creation_time": 1678923976.2618802,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999787.pt"
]
},
{
"steps": 1199895,
"file_path": "results/Huggy/Huggy/Huggy-1199895.onnx",
"reward": 3.7703879795692585,
"creation_time": 1678924222.1930017,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199895.pt"
]
},
{
"steps": 1399920,
"file_path": "results/Huggy/Huggy/Huggy-1399920.onnx",
"reward": 3.6131907999515533,
"creation_time": 1678924470.3355286,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399920.pt"
]
},
{
"steps": 1599300,
"file_path": "results/Huggy/Huggy/Huggy-1599300.onnx",
"reward": 3.322625729117063,
"creation_time": 1678924723.2177496,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599300.pt"
]
},
{
"steps": 1799978,
"file_path": "results/Huggy/Huggy/Huggy-1799978.onnx",
"reward": 3.834745766260685,
"creation_time": 1678924974.3055916,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799978.pt"
]
},
{
"steps": 1999963,
"file_path": "results/Huggy/Huggy/Huggy-1999963.onnx",
"reward": 3.5918479720015584,
"creation_time": 1678925211.8451443,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999963.pt"
]
},
{
"steps": 2000030,
"file_path": "results/Huggy/Huggy/Huggy-2000030.onnx",
"reward": 3.5869729844339053,
"creation_time": 1678925211.9676847,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000030.pt"
]
}
],
"final_checkpoint": {
"steps": 2000030,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5869729844339053,
"creation_time": 1678925211.9676847,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000030.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}