ppo-Huggy / run_logs /training_status.json
dhajnes's picture
Huggy
88886b0 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199942,
"file_path": "results/Huggy2/Huggy/Huggy-199942.onnx",
"reward": 3.4708274259405623,
"creation_time": 1710796941.5928707,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199942.pt"
]
},
{
"steps": 399977,
"file_path": "results/Huggy2/Huggy/Huggy-399977.onnx",
"reward": 3.878133241696791,
"creation_time": 1710797209.4099472,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399977.pt"
]
},
{
"steps": 599890,
"file_path": "results/Huggy2/Huggy/Huggy-599890.onnx",
"reward": 4.958312213420868,
"creation_time": 1710797482.3072526,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599890.pt"
]
},
{
"steps": 799466,
"file_path": "results/Huggy2/Huggy/Huggy-799466.onnx",
"reward": 3.6237554163546175,
"creation_time": 1710797750.116451,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799466.pt"
]
},
{
"steps": 999949,
"file_path": "results/Huggy2/Huggy/Huggy-999949.onnx",
"reward": 3.3892411395259527,
"creation_time": 1710798022.5362332,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999949.pt"
]
},
{
"steps": 1199889,
"file_path": "results/Huggy2/Huggy/Huggy-1199889.onnx",
"reward": 4.046838898407786,
"creation_time": 1710798288.492918,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199889.pt"
]
},
{
"steps": 1399976,
"file_path": "results/Huggy2/Huggy/Huggy-1399976.onnx",
"reward": 3.638733315832761,
"creation_time": 1710798556.0632527,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399976.pt"
]
},
{
"steps": 1599795,
"file_path": "results/Huggy2/Huggy/Huggy-1599795.onnx",
"reward": 3.6397749395370482,
"creation_time": 1710798830.0807095,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599795.pt"
]
},
{
"steps": 1799961,
"file_path": "results/Huggy2/Huggy/Huggy-1799961.onnx",
"reward": 4.270651503042742,
"creation_time": 1710799113.4908905,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799961.pt"
]
},
{
"steps": 1999407,
"file_path": "results/Huggy2/Huggy/Huggy-1999407.onnx",
"reward": 3.8775840792278786,
"creation_time": 1710799400.557017,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999407.pt"
]
},
{
"steps": 2000157,
"file_path": "results/Huggy2/Huggy/Huggy-2000157.onnx",
"reward": 3.8408060988013664,
"creation_time": 1710799400.7141016,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000157.pt"
]
}
],
"final_checkpoint": {
"steps": 2000157,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8408060988013664,
"creation_time": 1710799400.7141016,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000157.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}