ppo-Huggy-test / run_logs /training_status.json
narySt's picture
Huggy
be6b899 verified
raw
history blame
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199929,
"file_path": "results/Huggy/Huggy/Huggy-199929.onnx",
"reward": 3.443464928598546,
"creation_time": 1705789665.69143,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199929.pt"
]
},
{
"steps": 399784,
"file_path": "results/Huggy/Huggy/Huggy-399784.onnx",
"reward": 3.730005377217343,
"creation_time": 1705789931.9758408,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399784.pt"
]
},
{
"steps": 599936,
"file_path": "results/Huggy/Huggy/Huggy-599936.onnx",
"reward": 3.1311864179113638,
"creation_time": 1705790193.2266145,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599936.pt"
]
},
{
"steps": 799886,
"file_path": "results/Huggy/Huggy/Huggy-799886.onnx",
"reward": 3.9782784926066608,
"creation_time": 1705790448.4407947,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799886.pt"
]
},
{
"steps": 999967,
"file_path": "results/Huggy/Huggy/Huggy-999967.onnx",
"reward": 3.943503976771326,
"creation_time": 1705790712.3201704,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999967.pt"
]
},
{
"steps": 1199945,
"file_path": "results/Huggy/Huggy/Huggy-1199945.onnx",
"reward": 3.6691866982728243,
"creation_time": 1705790978.4075177,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199945.pt"
]
},
{
"steps": 1399628,
"file_path": "results/Huggy/Huggy/Huggy-1399628.onnx",
"reward": 3.843163390469745,
"creation_time": 1705791239.020028,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399628.pt"
]
},
{
"steps": 1599967,
"file_path": "results/Huggy/Huggy/Huggy-1599967.onnx",
"reward": 3.7580219673328714,
"creation_time": 1705791503.9885368,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599967.pt"
]
},
{
"steps": 1799973,
"file_path": "results/Huggy/Huggy/Huggy-1799973.onnx",
"reward": 3.6474256743555484,
"creation_time": 1705791772.1991186,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799973.pt"
]
},
{
"steps": 1999973,
"file_path": "results/Huggy/Huggy/Huggy-1999973.onnx",
"reward": 4.334669264351449,
"creation_time": 1705792041.6437607,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999973.pt"
]
},
{
"steps": 2000127,
"file_path": "results/Huggy/Huggy/Huggy-2000127.onnx",
"reward": 4.35087186791176,
"creation_time": 1705792041.7757576,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000127.pt"
]
}
],
"final_checkpoint": {
"steps": 2000127,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.35087186791176,
"creation_time": 1705792041.7757576,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000127.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}