ppo-Huggy / run_logs /training_status.json
Eric Y
Push Huggy to the Hub
94a52d0
raw
history blame
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199659,
"file_path": "results/Huggy/Huggy/Huggy-199659.onnx",
"reward": 3.298839461976203,
"creation_time": 1672592010.6714315,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199659.pt"
]
},
{
"steps": 399665,
"file_path": "results/Huggy/Huggy/Huggy-399665.onnx",
"reward": 4.199782901796802,
"creation_time": 1672592239.9593925,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399665.pt"
]
},
{
"steps": 599980,
"file_path": "results/Huggy/Huggy/Huggy-599980.onnx",
"reward": 4.320159890434959,
"creation_time": 1672592468.0183399,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599980.pt"
]
},
{
"steps": 799905,
"file_path": "results/Huggy/Huggy/Huggy-799905.onnx",
"reward": 3.953115197519461,
"creation_time": 1672592693.9580467,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799905.pt"
]
},
{
"steps": 999929,
"file_path": "results/Huggy/Huggy/Huggy-999929.onnx",
"reward": 3.7291301904822425,
"creation_time": 1672592931.439383,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999929.pt"
]
},
{
"steps": 1199327,
"file_path": "results/Huggy/Huggy/Huggy-1199327.onnx",
"reward": 3.9850062478672372,
"creation_time": 1672593163.982083,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199327.pt"
]
},
{
"steps": 1399947,
"file_path": "results/Huggy/Huggy/Huggy-1399947.onnx",
"reward": 3.770041862700848,
"creation_time": 1672593394.2040114,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399947.pt"
]
},
{
"steps": 1599934,
"file_path": "results/Huggy/Huggy/Huggy-1599934.onnx",
"reward": 3.7559536764788075,
"creation_time": 1672593623.2027495,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599934.pt"
]
},
{
"steps": 1799978,
"file_path": "results/Huggy/Huggy/Huggy-1799978.onnx",
"reward": 3.792676950154239,
"creation_time": 1672593848.4994662,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799978.pt"
]
},
{
"steps": 1999728,
"file_path": "results/Huggy/Huggy/Huggy-1999728.onnx",
"reward": 3.984845757484436,
"creation_time": 1672594073.743504,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999728.pt"
]
},
{
"steps": 2000478,
"file_path": "results/Huggy/Huggy/Huggy-2000478.onnx",
"reward": 3.609804142089117,
"creation_time": 1672594073.8826032,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000478.pt"
]
}
],
"final_checkpoint": {
"steps": 2000478,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.609804142089117,
"creation_time": 1672594073.8826032,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000478.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}