ppo-Huggy / run_logs /training_status.json
FreePlacki's picture
Huggy
61338c9 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199825,
"file_path": "results/Huggy2/Huggy/Huggy-199825.onnx",
"reward": 3.026054295333656,
"creation_time": 1720775271.9918966,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199825.pt"
]
},
{
"steps": 399909,
"file_path": "results/Huggy2/Huggy/Huggy-399909.onnx",
"reward": 3.7382250234887406,
"creation_time": 1720775509.8101544,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399909.pt"
]
},
{
"steps": 599990,
"file_path": "results/Huggy2/Huggy/Huggy-599990.onnx",
"reward": 3.654277714816007,
"creation_time": 1720775750.2919662,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599990.pt"
]
},
{
"steps": 799889,
"file_path": "results/Huggy2/Huggy/Huggy-799889.onnx",
"reward": 3.927841119871938,
"creation_time": 1720775990.5375385,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799889.pt"
]
},
{
"steps": 999922,
"file_path": "results/Huggy2/Huggy/Huggy-999922.onnx",
"reward": 4.137243415628161,
"creation_time": 1720776232.964811,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999922.pt"
]
},
{
"steps": 1199994,
"file_path": "results/Huggy2/Huggy/Huggy-1199994.onnx",
"reward": 3.8003281354904175,
"creation_time": 1720776477.4707873,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199994.pt"
]
},
{
"steps": 1399998,
"file_path": "results/Huggy2/Huggy/Huggy-1399998.onnx",
"reward": 3.6468630112134495,
"creation_time": 1720776722.5293455,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399998.pt"
]
},
{
"steps": 1599310,
"file_path": "results/Huggy2/Huggy/Huggy-1599310.onnx",
"reward": 4.0617966812048385,
"creation_time": 1720776960.5994453,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599310.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy2/Huggy/Huggy-1799996.onnx",
"reward": 3.9909232489916744,
"creation_time": 1720777205.0925307,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999930,
"file_path": "results/Huggy2/Huggy/Huggy-1999930.onnx",
"reward": 3.636946828701557,
"creation_time": 1720777445.4824924,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999930.pt"
]
},
{
"steps": 2000011,
"file_path": "results/Huggy2/Huggy/Huggy-2000011.onnx",
"reward": 3.65676775235164,
"creation_time": 1720777445.6615107,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000011.pt"
]
}
],
"final_checkpoint": {
"steps": 2000011,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.65676775235164,
"creation_time": 1720777445.6615107,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000011.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}