ppo-Huggy / run_logs /training_status.json
omarcevi's picture
Huggy
896530d
{
"Huggy": {
"checkpoints": [
{
"steps": 199771,
"file_path": "results/Huggy/Huggy/Huggy-199771.onnx",
"reward": 3.255059546134511,
"creation_time": 1672036062.6422014,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199771.pt"
]
},
{
"steps": 399922,
"file_path": "results/Huggy/Huggy/Huggy-399922.onnx",
"reward": 3.5943855095882804,
"creation_time": 1672036277.271436,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399922.pt"
]
},
{
"steps": 599985,
"file_path": "results/Huggy/Huggy/Huggy-599985.onnx",
"reward": 4.251606702804565,
"creation_time": 1672036494.4737172,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599985.pt"
]
},
{
"steps": 799985,
"file_path": "results/Huggy/Huggy/Huggy-799985.onnx",
"reward": 3.69952575018607,
"creation_time": 1672036709.171366,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799985.pt"
]
},
{
"steps": 999978,
"file_path": "results/Huggy/Huggy/Huggy-999978.onnx",
"reward": 3.658444198695096,
"creation_time": 1672036928.1017194,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999978.pt"
]
},
{
"steps": 1199997,
"file_path": "results/Huggy/Huggy/Huggy-1199997.onnx",
"reward": 3.9532003509146825,
"creation_time": 1672037147.4626176,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199997.pt"
]
},
{
"steps": 1399983,
"file_path": "results/Huggy/Huggy/Huggy-1399983.onnx",
"reward": 3.7085350190268622,
"creation_time": 1672037364.3411927,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399983.pt"
]
},
{
"steps": 1599971,
"file_path": "results/Huggy/Huggy/Huggy-1599971.onnx",
"reward": 3.9897625450240937,
"creation_time": 1672037580.6855705,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599971.pt"
]
},
{
"steps": 1799637,
"file_path": "results/Huggy/Huggy/Huggy-1799637.onnx",
"reward": 3.672651980425182,
"creation_time": 1672037798.9020574,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799637.pt"
]
},
{
"steps": 1999930,
"file_path": "results/Huggy/Huggy/Huggy-1999930.onnx",
"reward": 4.049665626138449,
"creation_time": 1672038018.8266692,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999930.pt"
]
},
{
"steps": 2000004,
"file_path": "results/Huggy/Huggy/Huggy-2000004.onnx",
"reward": 4.126644159064574,
"creation_time": 1672038018.9441438,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000004.pt"
]
}
],
"final_checkpoint": {
"steps": 2000004,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.126644159064574,
"creation_time": 1672038018.9441438,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000004.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}