ppo-Huggy / run_logs /training_status.json
MarshallPF's picture
Huggy
9a91a27
{
"Huggy": {
"checkpoints": [
{
"steps": 199705,
"file_path": "results/Huggy/Huggy/Huggy-199705.onnx",
"reward": 3.1229912193515634,
"creation_time": 1679002658.6259167,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199705.pt"
]
},
{
"steps": 399938,
"file_path": "results/Huggy/Huggy/Huggy-399938.onnx",
"reward": 3.568470321484466,
"creation_time": 1679002910.9456985,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399938.pt"
]
},
{
"steps": 599945,
"file_path": "results/Huggy/Huggy/Huggy-599945.onnx",
"reward": 3.8678956769761585,
"creation_time": 1679003158.1032827,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599945.pt"
]
},
{
"steps": 799982,
"file_path": "results/Huggy/Huggy/Huggy-799982.onnx",
"reward": 3.7839637178844874,
"creation_time": 1679003413.9057908,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799982.pt"
]
},
{
"steps": 999953,
"file_path": "results/Huggy/Huggy/Huggy-999953.onnx",
"reward": 3.7644210993573908,
"creation_time": 1679003680.1959295,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999953.pt"
]
},
{
"steps": 1199943,
"file_path": "results/Huggy/Huggy/Huggy-1199943.onnx",
"reward": 4.16758476128088,
"creation_time": 1679003924.4311738,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199943.pt"
]
},
{
"steps": 1399990,
"file_path": "results/Huggy/Huggy/Huggy-1399990.onnx",
"reward": 3.779515476453872,
"creation_time": 1679004157.5488386,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399990.pt"
]
},
{
"steps": 1599998,
"file_path": "results/Huggy/Huggy/Huggy-1599998.onnx",
"reward": 4.0852654563831985,
"creation_time": 1679004387.5257146,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599998.pt"
]
},
{
"steps": 1799998,
"file_path": "results/Huggy/Huggy/Huggy-1799998.onnx",
"reward": 3.943627309214118,
"creation_time": 1679004622.0173514,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799998.pt"
]
},
{
"steps": 1999979,
"file_path": "results/Huggy/Huggy/Huggy-1999979.onnx",
"reward": 3.825535504932863,
"creation_time": 1679004857.0498385,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999979.pt"
]
},
{
"steps": 2000076,
"file_path": "results/Huggy/Huggy/Huggy-2000076.onnx",
"reward": 3.8386092420135225,
"creation_time": 1679004857.1716287,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000076.pt"
]
}
],
"final_checkpoint": {
"steps": 2000076,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.8386092420135225,
"creation_time": 1679004857.1716287,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000076.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}