ppo-Huggy / run_logs /training_status.json
wahdan99's picture
Huggy
a55e59b
{
"Huggy": {
"checkpoints": [
{
"steps": 199849,
"file_path": "results/Huggy/Huggy/Huggy-199849.onnx",
"reward": 3.2820197865366936,
"creation_time": 1694677996.5669806,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199849.pt"
]
},
{
"steps": 399885,
"file_path": "results/Huggy/Huggy/Huggy-399885.onnx",
"reward": 3.9224041160196066,
"creation_time": 1694678239.3044298,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399885.pt"
]
},
{
"steps": 599964,
"file_path": "results/Huggy/Huggy/Huggy-599964.onnx",
"reward": 3.744783043861389,
"creation_time": 1694678488.919672,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599964.pt"
]
},
{
"steps": 799977,
"file_path": "results/Huggy/Huggy/Huggy-799977.onnx",
"reward": 3.7249039405030633,
"creation_time": 1694678747.2617826,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799977.pt"
]
},
{
"steps": 999906,
"file_path": "results/Huggy/Huggy/Huggy-999906.onnx",
"reward": 3.8867893147672343,
"creation_time": 1694679010.7180817,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999906.pt"
]
},
{
"steps": 1199918,
"file_path": "results/Huggy/Huggy/Huggy-1199918.onnx",
"reward": 3.453761212527752,
"creation_time": 1694679265.8858745,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199918.pt"
]
},
{
"steps": 1399881,
"file_path": "results/Huggy/Huggy/Huggy-1399881.onnx",
"reward": 3.7895976504472295,
"creation_time": 1694679521.3853385,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399881.pt"
]
},
{
"steps": 1599915,
"file_path": "results/Huggy/Huggy/Huggy-1599915.onnx",
"reward": 3.6789970768822564,
"creation_time": 1694679773.8023849,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599915.pt"
]
},
{
"steps": 1799956,
"file_path": "results/Huggy/Huggy/Huggy-1799956.onnx",
"reward": 3.3777180846701276,
"creation_time": 1694680035.7768865,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799956.pt"
]
},
{
"steps": 1999998,
"file_path": "results/Huggy/Huggy/Huggy-1999998.onnx",
"reward": 3.4927824574547843,
"creation_time": 1694680298.444485,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999998.pt"
]
},
{
"steps": 2000139,
"file_path": "results/Huggy/Huggy/Huggy-2000139.onnx",
"reward": 3.562831847291244,
"creation_time": 1694680298.5713673,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000139.pt"
]
}
],
"final_checkpoint": {
"steps": 2000139,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.562831847291244,
"creation_time": 1694680298.5713673,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000139.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}