ppo-Huggy / run_logs /training_status.json
benedikt-schaber's picture
initial model commit
c58500c
{
"Huggy": {
"checkpoints": [
{
"steps": 199775,
"file_path": "results/Huggy/Huggy/Huggy-199775.onnx",
"reward": 3.108896549607887,
"creation_time": 1695310912.4697254,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199775.pt"
]
},
{
"steps": 399868,
"file_path": "results/Huggy/Huggy/Huggy-399868.onnx",
"reward": 3.6617024602561163,
"creation_time": 1695311160.7287571,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399868.pt"
]
},
{
"steps": 599968,
"file_path": "results/Huggy/Huggy/Huggy-599968.onnx",
"reward": 4.008996119866004,
"creation_time": 1695311412.1354678,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599968.pt"
]
},
{
"steps": 799878,
"file_path": "results/Huggy/Huggy/Huggy-799878.onnx",
"reward": 3.839232999256679,
"creation_time": 1695311661.1429129,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799878.pt"
]
},
{
"steps": 999981,
"file_path": "results/Huggy/Huggy/Huggy-999981.onnx",
"reward": 3.863997501190578,
"creation_time": 1695311919.5434368,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999981.pt"
]
},
{
"steps": 1199930,
"file_path": "results/Huggy/Huggy/Huggy-1199930.onnx",
"reward": 3.689853769667605,
"creation_time": 1695312171.9018548,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199930.pt"
]
},
{
"steps": 1399982,
"file_path": "results/Huggy/Huggy/Huggy-1399982.onnx",
"reward": 3.8881292021424203,
"creation_time": 1695312423.0971563,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399982.pt"
]
},
{
"steps": 1599965,
"file_path": "results/Huggy/Huggy/Huggy-1599965.onnx",
"reward": 3.9099271879138717,
"creation_time": 1695312679.0733972,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599965.pt"
]
},
{
"steps": 1799991,
"file_path": "results/Huggy/Huggy/Huggy-1799991.onnx",
"reward": 3.9205065112595165,
"creation_time": 1695312935.0664263,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799991.pt"
]
},
{
"steps": 1999895,
"file_path": "results/Huggy/Huggy/Huggy-1999895.onnx",
"reward": 4.482464713208816,
"creation_time": 1695313192.9473255,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999895.pt"
]
},
{
"steps": 2000008,
"file_path": "results/Huggy/Huggy/Huggy-2000008.onnx",
"reward": 4.508846371514457,
"creation_time": 1695313193.0819376,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000008.pt"
]
}
],
"final_checkpoint": {
"steps": 2000008,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.508846371514457,
"creation_time": 1695313193.0819376,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000008.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}