ppo-Huggy / run_logs /training_status.json
gilbaes's picture
Huggy
eb209f7
{
"Huggy": {
"checkpoints": [
{
"steps": 199661,
"file_path": "results/Huggy/Huggy/Huggy-199661.onnx",
"reward": 3.3730055283809053,
"creation_time": 1703188456.8351347,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199661.pt"
]
},
{
"steps": 399979,
"file_path": "results/Huggy/Huggy/Huggy-399979.onnx",
"reward": 3.6854015475994832,
"creation_time": 1703188708.2654393,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399979.pt"
]
},
{
"steps": 599567,
"file_path": "results/Huggy/Huggy/Huggy-599567.onnx",
"reward": 4.073660767596701,
"creation_time": 1703188959.046621,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599567.pt"
]
},
{
"steps": 799943,
"file_path": "results/Huggy/Huggy/Huggy-799943.onnx",
"reward": 3.8631714195898263,
"creation_time": 1703189210.570625,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799943.pt"
]
},
{
"steps": 999933,
"file_path": "results/Huggy/Huggy/Huggy-999933.onnx",
"reward": 3.808651665983529,
"creation_time": 1703189467.1236074,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999933.pt"
]
},
{
"steps": 1199937,
"file_path": "results/Huggy/Huggy/Huggy-1199937.onnx",
"reward": 3.9626186817464695,
"creation_time": 1703189726.6546023,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199937.pt"
]
},
{
"steps": 1399950,
"file_path": "results/Huggy/Huggy/Huggy-1399950.onnx",
"reward": 3.657960358787985,
"creation_time": 1703189991.4802053,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399950.pt"
]
},
{
"steps": 1599922,
"file_path": "results/Huggy/Huggy/Huggy-1599922.onnx",
"reward": 3.685807189941406,
"creation_time": 1703190246.234072,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599922.pt"
]
},
{
"steps": 1799706,
"file_path": "results/Huggy/Huggy/Huggy-1799706.onnx",
"reward": 3.895757733164607,
"creation_time": 1703190501.888785,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799706.pt"
]
},
{
"steps": 1999959,
"file_path": "results/Huggy/Huggy/Huggy-1999959.onnx",
"reward": 4.404570274628126,
"creation_time": 1703190760.0916543,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999959.pt"
]
},
{
"steps": 2000025,
"file_path": "results/Huggy/Huggy/Huggy-2000025.onnx",
"reward": 4.361772741911547,
"creation_time": 1703190760.210216,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000025.pt"
]
}
],
"final_checkpoint": {
"steps": 2000025,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.361772741911547,
"creation_time": 1703190760.210216,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000025.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}