ppo-Huggy / run_logs /training_status.json
Zak
Huggy
7aaeb8b
{
"Huggy": {
"checkpoints": [
{
"steps": 199916,
"file_path": "results/Huggy/Huggy/Huggy-199916.onnx",
"reward": 3.6856163659338224,
"creation_time": 1695545176.1075628,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199916.pt"
]
},
{
"steps": 399923,
"file_path": "results/Huggy/Huggy/Huggy-399923.onnx",
"reward": 3.8222323962620326,
"creation_time": 1695545444.2984898,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399923.pt"
]
},
{
"steps": 599921,
"file_path": "results/Huggy/Huggy/Huggy-599921.onnx",
"reward": 4.3181161350674095,
"creation_time": 1695545743.2396011,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599921.pt"
]
},
{
"steps": 799934,
"file_path": "results/Huggy/Huggy/Huggy-799934.onnx",
"reward": 3.908118902603326,
"creation_time": 1695546022.722845,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799934.pt"
]
},
{
"steps": 999936,
"file_path": "results/Huggy/Huggy/Huggy-999936.onnx",
"reward": 3.6444451087663152,
"creation_time": 1695546298.3765202,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999936.pt"
]
},
{
"steps": 1199975,
"file_path": "results/Huggy/Huggy/Huggy-1199975.onnx",
"reward": 4.003651396794752,
"creation_time": 1695546579.02007,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199975.pt"
]
},
{
"steps": 1399676,
"file_path": "results/Huggy/Huggy/Huggy-1399676.onnx",
"reward": 3.946212816238403,
"creation_time": 1695546844.7877505,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399676.pt"
]
},
{
"steps": 1599970,
"file_path": "results/Huggy/Huggy/Huggy-1599970.onnx",
"reward": 3.9389091507712406,
"creation_time": 1695547108.8779683,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599970.pt"
]
},
{
"steps": 1799997,
"file_path": "results/Huggy/Huggy/Huggy-1799997.onnx",
"reward": 3.9667594364926786,
"creation_time": 1695547373.3485372,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799997.pt"
]
},
{
"steps": 1999912,
"file_path": "results/Huggy/Huggy/Huggy-1999912.onnx",
"reward": 3.452528745559469,
"creation_time": 1695547641.1152585,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999912.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy/Huggy/Huggy-2000018.onnx",
"reward": 3.462295523782571,
"creation_time": 1695547641.2569194,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.462295523782571,
"creation_time": 1695547641.2569194,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}