rl-ppo-Huggy / run_logs /training_status.json
banbao990's picture
Huggy
54b935c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199825,
"file_path": "results/Huggy2/Huggy/Huggy-199825.onnx",
"reward": 3.374353080579679,
"creation_time": 1721032038.7599225,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199825.pt"
]
},
{
"steps": 399896,
"file_path": "results/Huggy2/Huggy/Huggy-399896.onnx",
"reward": 3.631512754057583,
"creation_time": 1721032279.7641053,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399896.pt"
]
},
{
"steps": 599953,
"file_path": "results/Huggy2/Huggy/Huggy-599953.onnx",
"reward": 4.1925648286424835,
"creation_time": 1721032525.8053946,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599953.pt"
]
},
{
"steps": 609059,
"file_path": "results/Huggy2/Huggy/Huggy-609059.onnx",
"reward": 4.002884917892516,
"creation_time": 1721032535.7389889,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-609059.pt"
]
},
{
"steps": 799966,
"file_path": "results/Huggy2/Huggy/Huggy-799966.onnx",
"reward": 4.10767797032992,
"creation_time": 1721032786.0846994,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799966.pt"
]
},
{
"steps": 999929,
"file_path": "results/Huggy2/Huggy/Huggy-999929.onnx",
"reward": 3.5209803581237793,
"creation_time": 1721033027.5796738,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999929.pt"
]
},
{
"steps": 1199962,
"file_path": "results/Huggy2/Huggy/Huggy-1199962.onnx",
"reward": 3.853654679978729,
"creation_time": 1721033280.3462408,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199962.pt"
]
},
{
"steps": 1399921,
"file_path": "results/Huggy2/Huggy/Huggy-1399921.onnx",
"reward": 3.4973848510432886,
"creation_time": 1721033547.722385,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399921.pt"
]
},
{
"steps": 1599385,
"file_path": "results/Huggy2/Huggy/Huggy-1599385.onnx",
"reward": 4.2239556312561035,
"creation_time": 1721033856.189452,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599385.pt"
]
},
{
"steps": 1799944,
"file_path": "results/Huggy2/Huggy/Huggy-1799944.onnx",
"reward": 3.7030301817825864,
"creation_time": 1721034126.1989725,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799944.pt"
]
},
{
"steps": 1999959,
"file_path": "results/Huggy2/Huggy/Huggy-1999959.onnx",
"reward": 3.5029422815503746,
"creation_time": 1721034393.596848,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999959.pt"
]
},
{
"steps": 2000073,
"file_path": "results/Huggy2/Huggy/Huggy-2000073.onnx",
"reward": 3.540995903944565,
"creation_time": 1721034393.725677,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000073.pt"
]
}
],
"final_checkpoint": {
"steps": 2000073,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.540995903944565,
"creation_time": 1721034393.725677,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000073.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}