ppo-Huggy / run_logs /training_status.json
Michal-Miszcz's picture
Huggy
47748d2
raw
history blame contribute delete
No virus
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199946,
"file_path": "results/Huggy/Huggy/Huggy-199946.onnx",
"reward": 3.393103063106537,
"creation_time": 1697625140.0470142,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199946.pt"
]
},
{
"steps": 399935,
"file_path": "results/Huggy/Huggy/Huggy-399935.onnx",
"reward": 3.7773636832075606,
"creation_time": 1697625371.1938038,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399935.pt"
]
},
{
"steps": 599924,
"file_path": "results/Huggy/Huggy/Huggy-599924.onnx",
"reward": 4.388597935438156,
"creation_time": 1697625602.1889534,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599924.pt"
]
},
{
"steps": 799878,
"file_path": "results/Huggy/Huggy/Huggy-799878.onnx",
"reward": 3.8850113615030195,
"creation_time": 1697625832.5411048,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799878.pt"
]
},
{
"steps": 999926,
"file_path": "results/Huggy/Huggy/Huggy-999926.onnx",
"reward": 3.6673866438454596,
"creation_time": 1697626066.1212025,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999926.pt"
]
},
{
"steps": 1199455,
"file_path": "results/Huggy/Huggy/Huggy-1199455.onnx",
"reward": 3.505105506400673,
"creation_time": 1697626300.158672,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199455.pt"
]
},
{
"steps": 1399969,
"file_path": "results/Huggy/Huggy/Huggy-1399969.onnx",
"reward": 3.81572201168328,
"creation_time": 1697626531.1465774,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399969.pt"
]
},
{
"steps": 1599925,
"file_path": "results/Huggy/Huggy/Huggy-1599925.onnx",
"reward": 3.873357899793207,
"creation_time": 1697626764.985731,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599925.pt"
]
},
{
"steps": 1799942,
"file_path": "results/Huggy/Huggy/Huggy-1799942.onnx",
"reward": 3.6479006731647186,
"creation_time": 1697626997.5589693,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799942.pt"
]
},
{
"steps": 1999976,
"file_path": "results/Huggy/Huggy/Huggy-1999976.onnx",
"reward": 3.7922083996236324,
"creation_time": 1697627231.4589024,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999976.pt"
]
},
{
"steps": 2000080,
"file_path": "results/Huggy/Huggy/Huggy-2000080.onnx",
"reward": 3.7358654141426086,
"creation_time": 1697627231.5630848,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000080.pt"
]
}
],
"final_checkpoint": {
"steps": 2000080,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7358654141426086,
"creation_time": 1697627231.5630848,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000080.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.0.1+cu118"
}
}