ppo-Huggy / run_logs /training_status.json
RaymondF's picture
Huggy
a15cf6c verified
raw
history blame contribute delete
No virus
4.4 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199674,
"file_path": "results/Huggy2/Huggy/Huggy-199674.onnx",
"reward": 3.3955015266527897,
"creation_time": 1720079618.5861995,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199674.pt"
]
},
{
"steps": 399966,
"file_path": "results/Huggy2/Huggy/Huggy-399966.onnx",
"reward": 3.6392399139807257,
"creation_time": 1720079851.5885425,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399966.pt"
]
},
{
"steps": 599914,
"file_path": "results/Huggy2/Huggy/Huggy-599914.onnx",
"reward": 3.7819399178028106,
"creation_time": 1720080085.782302,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599914.pt"
]
},
{
"steps": 799926,
"file_path": "results/Huggy2/Huggy/Huggy-799926.onnx",
"reward": 3.734101342351249,
"creation_time": 1720080320.161497,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799926.pt"
]
},
{
"steps": 999954,
"file_path": "results/Huggy2/Huggy/Huggy-999954.onnx",
"reward": 3.9863265886235593,
"creation_time": 1720080561.0240588,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999954.pt"
]
},
{
"steps": 1199986,
"file_path": "results/Huggy2/Huggy/Huggy-1199986.onnx",
"reward": 3.882412486606174,
"creation_time": 1720080801.395312,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199986.pt"
]
},
{
"steps": 1399992,
"file_path": "results/Huggy2/Huggy/Huggy-1399992.onnx",
"reward": 4.078696617713342,
"creation_time": 1720081042.2319906,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399992.pt"
]
},
{
"steps": 1599979,
"file_path": "results/Huggy2/Huggy/Huggy-1599979.onnx",
"reward": 3.780160505196144,
"creation_time": 1720081279.5399528,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599979.pt"
]
},
{
"steps": 1799614,
"file_path": "results/Huggy2/Huggy/Huggy-1799614.onnx",
"reward": 3.7999916869746753,
"creation_time": 1720081517.1425498,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799614.pt"
]
},
{
"steps": 1999814,
"file_path": "results/Huggy2/Huggy/Huggy-1999814.onnx",
"reward": 3.1107556521892548,
"creation_time": 1720081755.599484,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999814.pt"
]
},
{
"steps": 2000564,
"file_path": "results/Huggy2/Huggy/Huggy-2000564.onnx",
"reward": 2.8827235481955786,
"creation_time": 1720081755.738521,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000564.pt"
]
}
],
"final_checkpoint": {
"steps": 2000564,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.8827235481955786,
"creation_time": 1720081755.738521,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000564.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}