ppo-Huggy / run_logs /training_status.json
lukaszkolodziejczyk's picture
Huggy
662fec4
{
"Huggy": {
"checkpoints": [
{
"steps": 199862,
"file_path": "results/Huggy/Huggy/Huggy-199862.onnx",
"reward": 3.2823599401642296,
"creation_time": 1687938852.6926434,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199862.pt"
]
},
{
"steps": 399885,
"file_path": "results/Huggy/Huggy/Huggy-399885.onnx",
"reward": 3.301360036899794,
"creation_time": 1687939126.9577196,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399885.pt"
]
},
{
"steps": 599731,
"file_path": "results/Huggy/Huggy/Huggy-599731.onnx",
"reward": 4.001459869471463,
"creation_time": 1687939404.5929232,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599731.pt"
]
},
{
"steps": 799885,
"file_path": "results/Huggy/Huggy/Huggy-799885.onnx",
"reward": 3.7012405494848886,
"creation_time": 1687939673.3341436,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799885.pt"
]
},
{
"steps": 999941,
"file_path": "results/Huggy/Huggy/Huggy-999941.onnx",
"reward": 3.8042977699419347,
"creation_time": 1687939955.647276,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999941.pt"
]
},
{
"steps": 1199929,
"file_path": "results/Huggy/Huggy/Huggy-1199929.onnx",
"reward": 3.879558963701129,
"creation_time": 1687940234.4832675,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199929.pt"
]
},
{
"steps": 1399980,
"file_path": "results/Huggy/Huggy/Huggy-1399980.onnx",
"reward": 3.6372030543667053,
"creation_time": 1687940511.7227495,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399980.pt"
]
},
{
"steps": 1599907,
"file_path": "results/Huggy/Huggy/Huggy-1599907.onnx",
"reward": 3.758211594510388,
"creation_time": 1687940802.0788684,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599907.pt"
]
},
{
"steps": 1799933,
"file_path": "results/Huggy/Huggy/Huggy-1799933.onnx",
"reward": 3.4213316118395007,
"creation_time": 1687941089.8191683,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799933.pt"
]
},
{
"steps": 1999923,
"file_path": "results/Huggy/Huggy/Huggy-1999923.onnx",
"reward": 4.461083224841526,
"creation_time": 1687941373.9869795,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999923.pt"
]
},
{
"steps": 2000001,
"file_path": "results/Huggy/Huggy/Huggy-2000001.onnx",
"reward": 4.530130847295125,
"creation_time": 1687941374.1146998,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000001.pt"
]
}
],
"final_checkpoint": {
"steps": 2000001,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.530130847295125,
"creation_time": 1687941374.1146998,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000001.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}