ppo-Huggy / run_logs /training_status.json
Eslam25's picture
Huggy
0282d39
{
"Huggy": {
"checkpoints": [
{
"steps": 0,
"file_path": "results/Huggy/Huggy/Huggy-0.onnx",
"reward": null,
"creation_time": 1672232106.6002028,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-0.pt"
]
},
{
"steps": 199964,
"file_path": "results/Huggy/Huggy/Huggy-199964.onnx",
"reward": 3.4936264285674463,
"creation_time": 1672232381.1457717,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199964.pt"
]
},
{
"steps": 399865,
"file_path": "results/Huggy/Huggy/Huggy-399865.onnx",
"reward": 3.598518904050191,
"creation_time": 1672232617.684166,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399865.pt"
]
},
{
"steps": 599938,
"file_path": "results/Huggy/Huggy/Huggy-599938.onnx",
"reward": 4.4130578097842985,
"creation_time": 1672232858.3463402,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599938.pt"
]
},
{
"steps": 799986,
"file_path": "results/Huggy/Huggy/Huggy-799986.onnx",
"reward": 3.7498993234756663,
"creation_time": 1672233099.5223637,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799986.pt"
]
},
{
"steps": 999607,
"file_path": "results/Huggy/Huggy/Huggy-999607.onnx",
"reward": 3.8210158069083033,
"creation_time": 1672233342.8945434,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999607.pt"
]
},
{
"steps": 1199919,
"file_path": "results/Huggy/Huggy/Huggy-1199919.onnx",
"reward": 3.8229327184312485,
"creation_time": 1672233584.1645029,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199919.pt"
]
},
{
"steps": 1399993,
"file_path": "results/Huggy/Huggy/Huggy-1399993.onnx",
"reward": 3.3780885219573973,
"creation_time": 1672233825.9426248,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399993.pt"
]
},
{
"steps": 1599996,
"file_path": "results/Huggy/Huggy/Huggy-1599996.onnx",
"reward": 3.50539039157507,
"creation_time": 1672234061.4754586,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599996.pt"
]
},
{
"steps": 1799813,
"file_path": "results/Huggy/Huggy/Huggy-1799813.onnx",
"reward": 2.824856460094452,
"creation_time": 1672234302.2373319,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799813.pt"
]
},
{
"steps": 1999987,
"file_path": "results/Huggy/Huggy/Huggy-1999987.onnx",
"reward": 3.168665546637315,
"creation_time": 1672234541.9823532,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999987.pt"
]
},
{
"steps": 2000056,
"file_path": "results/Huggy/Huggy/Huggy-2000056.onnx",
"reward": 3.1024941631725858,
"creation_time": 1672234542.119842,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000056.pt"
]
}
],
"final_checkpoint": {
"steps": 2000056,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.1024941631725858,
"creation_time": 1672234542.119842,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000056.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}