ppo-Huggy / run_logs /training_status.json
edata's picture
Huggy
4dfc4d0
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199970,
"file_path": "results/Huggy/Huggy/Huggy-199970.onnx",
"reward": 3.3860852579275766,
"creation_time": 1685278373.4777029,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199970.pt"
]
},
{
"steps": 399982,
"file_path": "results/Huggy/Huggy/Huggy-399982.onnx",
"reward": 3.5542998028838118,
"creation_time": 1685278673.7008324,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399982.pt"
]
},
{
"steps": 599972,
"file_path": "results/Huggy/Huggy/Huggy-599972.onnx",
"reward": 3.6044828414916994,
"creation_time": 1685278980.0383346,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599972.pt"
]
},
{
"steps": 799985,
"file_path": "results/Huggy/Huggy/Huggy-799985.onnx",
"reward": 3.675096958014317,
"creation_time": 1685279280.1501548,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799985.pt"
]
},
{
"steps": 999878,
"file_path": "results/Huggy/Huggy/Huggy-999878.onnx",
"reward": 3.6825531053980556,
"creation_time": 1685279588.2315378,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999878.pt"
]
},
{
"steps": 1199989,
"file_path": "results/Huggy/Huggy/Huggy-1199989.onnx",
"reward": 3.98025465988722,
"creation_time": 1685279893.3766174,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199989.pt"
]
},
{
"steps": 1399964,
"file_path": "results/Huggy/Huggy/Huggy-1399964.onnx",
"reward": 3.9768793907523716,
"creation_time": 1685280195.267399,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399964.pt"
]
},
{
"steps": 1599479,
"file_path": "results/Huggy/Huggy/Huggy-1599479.onnx",
"reward": 3.845507639905681,
"creation_time": 1685280499.482534,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599479.pt"
]
},
{
"steps": 1799966,
"file_path": "results/Huggy/Huggy/Huggy-1799966.onnx",
"reward": 3.627436438999554,
"creation_time": 1685280809.0867422,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799966.pt"
]
},
{
"steps": 1999977,
"file_path": "results/Huggy/Huggy/Huggy-1999977.onnx",
"reward": 3.1517107444150105,
"creation_time": 1685281116.8964262,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999977.pt"
]
},
{
"steps": 2000003,
"file_path": "results/Huggy/Huggy/Huggy-2000003.onnx",
"reward": 3.073740268575734,
"creation_time": 1685281117.030092,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000003.pt"
]
}
],
"final_checkpoint": {
"steps": 2000003,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.073740268575734,
"creation_time": 1685281117.030092,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000003.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}