ppo-Huggy / run_logs /training_status.json
quixotte's picture
Huggy
ddf39bc verified
raw
history blame contribute delete
No virus
4.4 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199775,
"file_path": "results/Huggy2/Huggy/Huggy-199775.onnx",
"reward": 3.470437051671924,
"creation_time": 1719392965.7721858,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199775.pt"
]
},
{
"steps": 399995,
"file_path": "results/Huggy2/Huggy/Huggy-399995.onnx",
"reward": 3.8281652416501726,
"creation_time": 1719393455.1037757,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399995.pt"
]
},
{
"steps": 599973,
"file_path": "results/Huggy2/Huggy/Huggy-599973.onnx",
"reward": 4.167521953582764,
"creation_time": 1719393949.3081565,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599973.pt"
]
},
{
"steps": 799933,
"file_path": "results/Huggy2/Huggy/Huggy-799933.onnx",
"reward": 3.6813112250689803,
"creation_time": 1719394430.1490173,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799933.pt"
]
},
{
"steps": 999934,
"file_path": "results/Huggy2/Huggy/Huggy-999934.onnx",
"reward": 3.8881551887787564,
"creation_time": 1719394927.6854458,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999934.pt"
]
},
{
"steps": 1199960,
"file_path": "results/Huggy2/Huggy/Huggy-1199960.onnx",
"reward": 3.5059717768846554,
"creation_time": 1719395427.615659,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199960.pt"
]
},
{
"steps": 1399918,
"file_path": "results/Huggy2/Huggy/Huggy-1399918.onnx",
"reward": 4.24440055734971,
"creation_time": 1719395964.0756173,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399918.pt"
]
},
{
"steps": 1599886,
"file_path": "results/Huggy2/Huggy/Huggy-1599886.onnx",
"reward": 3.6562260921129925,
"creation_time": 1719396449.7493174,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599886.pt"
]
},
{
"steps": 1799347,
"file_path": "results/Huggy2/Huggy/Huggy-1799347.onnx",
"reward": 3.899751428295584,
"creation_time": 1719396954.7836938,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799347.pt"
]
},
{
"steps": 1999962,
"file_path": "results/Huggy2/Huggy/Huggy-1999962.onnx",
"reward": 3.7330776807424186,
"creation_time": 1719397463.1540132,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999962.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy2/Huggy/Huggy-2000041.onnx",
"reward": 3.772947211014597,
"creation_time": 1719397463.2734513,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.772947211014597,
"creation_time": 1719397463.2734513,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}