ppo-Huggy / run_logs /training_status.json
mck-111's picture
Huggy
ac7c2c6 verified
raw
history blame contribute delete
No virus
4.41 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199848,
"file_path": "results/Huggy2/Huggy/Huggy-199848.onnx",
"reward": 3.6790433700268084,
"creation_time": 1717962333.9892323,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199848.pt"
]
},
{
"steps": 399851,
"file_path": "results/Huggy2/Huggy/Huggy-399851.onnx",
"reward": 3.6144204963656033,
"creation_time": 1717962638.6392927,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399851.pt"
]
},
{
"steps": 599922,
"file_path": "results/Huggy2/Huggy/Huggy-599922.onnx",
"reward": 4.261630792617797,
"creation_time": 1717962917.787547,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599922.pt"
]
},
{
"steps": 799977,
"file_path": "results/Huggy2/Huggy/Huggy-799977.onnx",
"reward": 3.7596695161311424,
"creation_time": 1717963177.0934725,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799977.pt"
]
},
{
"steps": 999997,
"file_path": "results/Huggy2/Huggy/Huggy-999997.onnx",
"reward": 3.8804548721519305,
"creation_time": 1717963442.6181312,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999997.pt"
]
},
{
"steps": 1199962,
"file_path": "results/Huggy2/Huggy/Huggy-1199962.onnx",
"reward": 3.6660657749801384,
"creation_time": 1717963731.0969803,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199962.pt"
]
},
{
"steps": 1399793,
"file_path": "results/Huggy2/Huggy/Huggy-1399793.onnx",
"reward": 3.7270431915918985,
"creation_time": 1717964009.9716144,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399793.pt"
]
},
{
"steps": 1599979,
"file_path": "results/Huggy2/Huggy/Huggy-1599979.onnx",
"reward": 3.5052187940199597,
"creation_time": 1717964300.8170624,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599979.pt"
]
},
{
"steps": 1799755,
"file_path": "results/Huggy2/Huggy/Huggy-1799755.onnx",
"reward": 3.6012883864600083,
"creation_time": 1717964578.9233763,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799755.pt"
]
},
{
"steps": 1999997,
"file_path": "results/Huggy2/Huggy/Huggy-1999997.onnx",
"reward": 3.274408940915708,
"creation_time": 1717964839.4434476,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999997.pt"
]
},
{
"steps": 2000022,
"file_path": "results/Huggy2/Huggy/Huggy-2000022.onnx",
"reward": 3.2079140714236667,
"creation_time": 1717964839.6257582,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000022.pt"
]
}
],
"final_checkpoint": {
"steps": 2000022,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.2079140714236667,
"creation_time": 1717964839.6257582,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000022.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}