ppo-Huggy / run_logs /training_status.json
LoreMoretti's picture
Huggy
74333c0 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199902,
"file_path": "results/Huggy2/Huggy/Huggy-199902.onnx",
"reward": 3.4148158595479767,
"creation_time": 1711702998.8066657,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199902.pt"
]
},
{
"steps": 399880,
"file_path": "results/Huggy2/Huggy/Huggy-399880.onnx",
"reward": 3.491361126093797,
"creation_time": 1711703221.153264,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399880.pt"
]
},
{
"steps": 599944,
"file_path": "results/Huggy2/Huggy/Huggy-599944.onnx",
"reward": 3.954073667526245,
"creation_time": 1711703449.4165502,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599944.pt"
]
},
{
"steps": 799979,
"file_path": "results/Huggy2/Huggy/Huggy-799979.onnx",
"reward": 3.9140217227642764,
"creation_time": 1711703674.579311,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799979.pt"
]
},
{
"steps": 999729,
"file_path": "results/Huggy2/Huggy/Huggy-999729.onnx",
"reward": 3.9151135393932686,
"creation_time": 1711703903.3334467,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999729.pt"
]
},
{
"steps": 1199954,
"file_path": "results/Huggy2/Huggy/Huggy-1199954.onnx",
"reward": 3.7137580717311187,
"creation_time": 1711704132.8425345,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199954.pt"
]
},
{
"steps": 1399873,
"file_path": "results/Huggy2/Huggy/Huggy-1399873.onnx",
"reward": 2.946698713302612,
"creation_time": 1711704362.3457327,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399873.pt"
]
},
{
"steps": 1599992,
"file_path": "results/Huggy2/Huggy/Huggy-1599992.onnx",
"reward": 3.410565658116046,
"creation_time": 1711704590.790912,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599992.pt"
]
},
{
"steps": 1799926,
"file_path": "results/Huggy2/Huggy/Huggy-1799926.onnx",
"reward": 3.9546280770466247,
"creation_time": 1711704821.8431509,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799926.pt"
]
},
{
"steps": 1999614,
"file_path": "results/Huggy2/Huggy/Huggy-1999614.onnx",
"reward": 4.1377762739474955,
"creation_time": 1711705053.6473184,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999614.pt"
]
},
{
"steps": 2000364,
"file_path": "results/Huggy2/Huggy/Huggy-2000364.onnx",
"reward": 3.621174361024584,
"creation_time": 1711705053.7968078,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000364.pt"
]
}
],
"final_checkpoint": {
"steps": 2000364,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.621174361024584,
"creation_time": 1711705053.7968078,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000364.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}