ppo-Huggy / run_logs /training_status.json
MalumaDev's picture
Huggy
daeb909 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199854,
"file_path": "results/Huggy2/Huggy/Huggy-199854.onnx",
"reward": 3.267268293433719,
"creation_time": 1714290020.3322008,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199854.pt"
]
},
{
"steps": 399708,
"file_path": "results/Huggy2/Huggy/Huggy-399708.onnx",
"reward": 3.555430279261824,
"creation_time": 1714290277.828571,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399708.pt"
]
},
{
"steps": 599904,
"file_path": "results/Huggy2/Huggy/Huggy-599904.onnx",
"reward": 3.103867991612508,
"creation_time": 1714290536.068678,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599904.pt"
]
},
{
"steps": 799959,
"file_path": "results/Huggy2/Huggy/Huggy-799959.onnx",
"reward": 3.97401627185552,
"creation_time": 1714290792.8421125,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799959.pt"
]
},
{
"steps": 999968,
"file_path": "results/Huggy2/Huggy/Huggy-999968.onnx",
"reward": 3.500433039946819,
"creation_time": 1714291055.6147742,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999968.pt"
]
},
{
"steps": 1199928,
"file_path": "results/Huggy2/Huggy/Huggy-1199928.onnx",
"reward": 3.71185430930211,
"creation_time": 1714291326.088032,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199928.pt"
]
},
{
"steps": 1399851,
"file_path": "results/Huggy2/Huggy/Huggy-1399851.onnx",
"reward": 3.703476547430723,
"creation_time": 1714291589.3786087,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399851.pt"
]
},
{
"steps": 1599956,
"file_path": "results/Huggy2/Huggy/Huggy-1599956.onnx",
"reward": 3.742423607722709,
"creation_time": 1714291855.1095085,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599956.pt"
]
},
{
"steps": 1799975,
"file_path": "results/Huggy2/Huggy/Huggy-1799975.onnx",
"reward": 4.01412067767028,
"creation_time": 1714292117.8955774,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799975.pt"
]
},
{
"steps": 1999901,
"file_path": "results/Huggy2/Huggy/Huggy-1999901.onnx",
"reward": 3.3925688525040942,
"creation_time": 1714292376.4072258,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999901.pt"
]
},
{
"steps": 2000029,
"file_path": "results/Huggy2/Huggy/Huggy-2000029.onnx",
"reward": 3.5423581619416513,
"creation_time": 1714292376.5334845,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000029.pt"
]
}
],
"final_checkpoint": {
"steps": 2000029,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.5423581619416513,
"creation_time": 1714292376.5334845,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000029.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}