ppo-Huggy / run_logs /training_status.json
CristoJV's picture
Huggy
0d5492b
{
"Huggy": {
"checkpoints": [
{
"steps": 199745,
"file_path": "results/Huggy/Huggy/Huggy-199745.onnx",
"reward": 3.5700685381889343,
"creation_time": 1691186618.2519276,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199745.pt"
]
},
{
"steps": 399863,
"file_path": "results/Huggy/Huggy/Huggy-399863.onnx",
"reward": 3.843587638580636,
"creation_time": 1691186820.0568182,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399863.pt"
]
},
{
"steps": 599992,
"file_path": "results/Huggy/Huggy/Huggy-599992.onnx",
"reward": 4.235859111503318,
"creation_time": 1691187023.8816724,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599992.pt"
]
},
{
"steps": 799997,
"file_path": "results/Huggy/Huggy/Huggy-799997.onnx",
"reward": 3.9073588962845958,
"creation_time": 1691187231.0356674,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799997.pt"
]
},
{
"steps": 999977,
"file_path": "results/Huggy/Huggy/Huggy-999977.onnx",
"reward": 3.5823841073164124,
"creation_time": 1691187437.8128808,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999977.pt"
]
},
{
"steps": 1199978,
"file_path": "results/Huggy/Huggy/Huggy-1199978.onnx",
"reward": 3.7409497445888733,
"creation_time": 1691187647.6955416,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199978.pt"
]
},
{
"steps": 1399956,
"file_path": "results/Huggy/Huggy/Huggy-1399956.onnx",
"reward": 3.1676555052399635,
"creation_time": 1691187865.3726077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399956.pt"
]
},
{
"steps": 1599998,
"file_path": "results/Huggy/Huggy/Huggy-1599998.onnx",
"reward": 3.8617880256580457,
"creation_time": 1691188076.3325276,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599998.pt"
]
},
{
"steps": 1799959,
"file_path": "results/Huggy/Huggy/Huggy-1799959.onnx",
"reward": 3.9781916432910496,
"creation_time": 1691188286.9378428,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799959.pt"
]
},
{
"steps": 1999945,
"file_path": "results/Huggy/Huggy/Huggy-1999945.onnx",
"reward": 4.052117497148648,
"creation_time": 1691188499.4005198,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999945.pt"
]
},
{
"steps": 2000007,
"file_path": "results/Huggy/Huggy/Huggy-2000007.onnx",
"reward": 4.060706934995121,
"creation_time": 1691188499.5412827,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000007.pt"
]
}
],
"final_checkpoint": {
"steps": 2000007,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.060706934995121,
"creation_time": 1691188499.5412827,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000007.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}