ppo-Huggy / run_logs /training_status.json
lectura's picture
Huggy
3ae010d
raw
history blame
4.71 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 38276,
"file_path": "results/Huggy/Huggy/Huggy-38276.onnx",
"reward": 1.716653624041514,
"creation_time": 1701414312.6485565,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-38276.pt"
]
},
{
"steps": 199797,
"file_path": "results/Huggy/Huggy/Huggy-199797.onnx",
"reward": 3.087880155619453,
"creation_time": 1701414536.2514277,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199797.pt"
]
},
{
"steps": 399780,
"file_path": "results/Huggy/Huggy/Huggy-399780.onnx",
"reward": 3.7426033947202892,
"creation_time": 1701414776.27847,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399780.pt"
]
},
{
"steps": 599915,
"file_path": "results/Huggy/Huggy/Huggy-599915.onnx",
"reward": 4.054780111938227,
"creation_time": 1701415017.5839326,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599915.pt"
]
},
{
"steps": 799939,
"file_path": "results/Huggy/Huggy/Huggy-799939.onnx",
"reward": 3.5283188919226327,
"creation_time": 1701415258.0470004,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799939.pt"
]
},
{
"steps": 999946,
"file_path": "results/Huggy/Huggy/Huggy-999946.onnx",
"reward": 3.9204499402466944,
"creation_time": 1701415494.061784,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999946.pt"
]
},
{
"steps": 1199315,
"file_path": "results/Huggy/Huggy/Huggy-1199315.onnx",
"reward": 3.7728352036613684,
"creation_time": 1701415735.526149,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199315.pt"
]
},
{
"steps": 1399250,
"file_path": "results/Huggy/Huggy/Huggy-1399250.onnx",
"reward": 3.838205490356837,
"creation_time": 1701415979.1156852,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399250.pt"
]
},
{
"steps": 1599986,
"file_path": "results/Huggy/Huggy/Huggy-1599986.onnx",
"reward": 3.9440655439033123,
"creation_time": 1701416229.2615817,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599986.pt"
]
},
{
"steps": 1799910,
"file_path": "results/Huggy/Huggy/Huggy-1799910.onnx",
"reward": 3.3939417218699037,
"creation_time": 1701416489.5200386,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799910.pt"
]
},
{
"steps": 1999958,
"file_path": "results/Huggy/Huggy/Huggy-1999958.onnx",
"reward": 4.037008889934473,
"creation_time": 1701416751.503517,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999958.pt"
]
},
{
"steps": 2000030,
"file_path": "results/Huggy/Huggy/Huggy-2000030.onnx",
"reward": 4.05058339752,
"creation_time": 1701416751.6167884,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000030.pt"
]
}
],
"final_checkpoint": {
"steps": 2000030,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.05058339752,
"creation_time": 1701416751.6167884,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000030.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}