ppo-Huggy / run_logs /training_status.json
antnaumov's picture
Huggy
47c967f verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199896,
"file_path": "results/Huggy2/Huggy/Huggy-199896.onnx",
"reward": 3.4746068228374827,
"creation_time": 1740917086.055987,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199896.pt"
]
},
{
"steps": 399845,
"file_path": "results/Huggy2/Huggy/Huggy-399845.onnx",
"reward": 3.8225520903413948,
"creation_time": 1740917335.8771296,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399845.pt"
]
},
{
"steps": 599944,
"file_path": "results/Huggy2/Huggy/Huggy-599944.onnx",
"reward": 3.4943169128327143,
"creation_time": 1740917577.28815,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599944.pt"
]
},
{
"steps": 799908,
"file_path": "results/Huggy2/Huggy/Huggy-799908.onnx",
"reward": 3.7825774362592988,
"creation_time": 1740917814.0865612,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799908.pt"
]
},
{
"steps": 999272,
"file_path": "results/Huggy2/Huggy/Huggy-999272.onnx",
"reward": 3.83100329218684,
"creation_time": 1740918055.7257714,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999272.pt"
]
},
{
"steps": 1199962,
"file_path": "results/Huggy2/Huggy/Huggy-1199962.onnx",
"reward": 3.931835328946348,
"creation_time": 1740918298.525829,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199962.pt"
]
},
{
"steps": 1399535,
"file_path": "results/Huggy2/Huggy/Huggy-1399535.onnx",
"reward": 5.097894509633382,
"creation_time": 1740918543.348763,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399535.pt"
]
},
{
"steps": 1599970,
"file_path": "results/Huggy2/Huggy/Huggy-1599970.onnx",
"reward": 3.4755092288766587,
"creation_time": 1740918786.0524597,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599970.pt"
]
},
{
"steps": 1799930,
"file_path": "results/Huggy2/Huggy/Huggy-1799930.onnx",
"reward": 3.468407556414604,
"creation_time": 1740919029.985525,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799930.pt"
]
},
{
"steps": 1999889,
"file_path": "results/Huggy2/Huggy/Huggy-1999889.onnx",
"reward": 3.6870160500208535,
"creation_time": 1740919276.569207,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999889.pt"
]
},
{
"steps": 2000639,
"file_path": "results/Huggy2/Huggy/Huggy-2000639.onnx",
"reward": 3.0046390175819395,
"creation_time": 1740919276.710654,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000639.pt"
]
}
],
"final_checkpoint": {
"steps": 2000639,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.0046390175819395,
"creation_time": 1740919276.710654,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000639.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}