ppo-Huggy / run_logs /training_status.json
TootToot's picture
Huggy
b170c42
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199770,
"file_path": "results/Huggy/Huggy/Huggy-199770.onnx",
"reward": 3.631713208505663,
"creation_time": 1683819737.7751646,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199770.pt"
]
},
{
"steps": 399894,
"file_path": "results/Huggy/Huggy/Huggy-399894.onnx",
"reward": 4.060420019011343,
"creation_time": 1683819967.7910883,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399894.pt"
]
},
{
"steps": 599974,
"file_path": "results/Huggy/Huggy/Huggy-599974.onnx",
"reward": 4.151750664961965,
"creation_time": 1683820199.9652004,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599974.pt"
]
},
{
"steps": 799973,
"file_path": "results/Huggy/Huggy/Huggy-799973.onnx",
"reward": 3.615187671055665,
"creation_time": 1683820432.4321713,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799973.pt"
]
},
{
"steps": 999938,
"file_path": "results/Huggy/Huggy/Huggy-999938.onnx",
"reward": 4.024213735197411,
"creation_time": 1683820667.7281137,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999938.pt"
]
},
{
"steps": 1199759,
"file_path": "results/Huggy/Huggy/Huggy-1199759.onnx",
"reward": 3.8020275682210922,
"creation_time": 1683820901.2514317,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199759.pt"
]
},
{
"steps": 1399930,
"file_path": "results/Huggy/Huggy/Huggy-1399930.onnx",
"reward": 3.7888139292361243,
"creation_time": 1683821131.4652562,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399930.pt"
]
},
{
"steps": 1599617,
"file_path": "results/Huggy/Huggy/Huggy-1599617.onnx",
"reward": 3.8905157059899174,
"creation_time": 1683821364.84571,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599617.pt"
]
},
{
"steps": 1799953,
"file_path": "results/Huggy/Huggy/Huggy-1799953.onnx",
"reward": 3.6511306976278624,
"creation_time": 1683821607.4474437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799953.pt"
]
},
{
"steps": 1999930,
"file_path": "results/Huggy/Huggy/Huggy-1999930.onnx",
"reward": 4.0515340692118595,
"creation_time": 1683821843.4603624,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999930.pt"
]
},
{
"steps": 2000017,
"file_path": "results/Huggy/Huggy/Huggy-2000017.onnx",
"reward": 4.114608782988328,
"creation_time": 1683821843.6074064,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000017.pt"
]
}
],
"final_checkpoint": {
"steps": 2000017,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.114608782988328,
"creation_time": 1683821843.6074064,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000017.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}