huggies / run_logs /training_status.json
Steffie's picture
Huggy
61c4125 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199996,
"file_path": "results/Huggy2/Huggy/Huggy-199996.onnx",
"reward": 3.510707667895726,
"creation_time": 1718539351.1599782,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199996.pt"
]
},
{
"steps": 399939,
"file_path": "results/Huggy2/Huggy/Huggy-399939.onnx",
"reward": 3.9675709482223267,
"creation_time": 1718539603.6679964,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399939.pt"
]
},
{
"steps": 599940,
"file_path": "results/Huggy2/Huggy/Huggy-599940.onnx",
"reward": 3.9316606475756717,
"creation_time": 1718539860.1056142,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599940.pt"
]
},
{
"steps": 799974,
"file_path": "results/Huggy2/Huggy/Huggy-799974.onnx",
"reward": 3.7429497306945763,
"creation_time": 1718540114.2571661,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799974.pt"
]
},
{
"steps": 999909,
"file_path": "results/Huggy2/Huggy/Huggy-999909.onnx",
"reward": 4.134475994600009,
"creation_time": 1718540372.8807855,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999909.pt"
]
},
{
"steps": 1199992,
"file_path": "results/Huggy2/Huggy/Huggy-1199992.onnx",
"reward": 4.12822191248235,
"creation_time": 1718540637.8053067,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199992.pt"
]
},
{
"steps": 1399999,
"file_path": "results/Huggy2/Huggy/Huggy-1399999.onnx",
"reward": 3.975245780944824,
"creation_time": 1718540903.9343638,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399999.pt"
]
},
{
"steps": 1599946,
"file_path": "results/Huggy2/Huggy/Huggy-1599946.onnx",
"reward": 3.9458985981063464,
"creation_time": 1718541164.323291,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599946.pt"
]
},
{
"steps": 1799975,
"file_path": "results/Huggy2/Huggy/Huggy-1799975.onnx",
"reward": 4.025322965075893,
"creation_time": 1718541425.668832,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799975.pt"
]
},
{
"steps": 1999417,
"file_path": "results/Huggy2/Huggy/Huggy-1999417.onnx",
"reward": 3.310473915265531,
"creation_time": 1718541687.1777084,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999417.pt"
]
},
{
"steps": 2000166,
"file_path": "results/Huggy2/Huggy/Huggy-2000166.onnx",
"reward": 3.152505499124527,
"creation_time": 1718541687.3428552,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000166.pt"
]
}
],
"final_checkpoint": {
"steps": 2000166,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.152505499124527,
"creation_time": 1718541687.3428552,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000166.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}