ppo-Huggy / run_logs /training_status.json
TootToot's picture
Huggy
5a64b34
{
"Huggy": {
"checkpoints": [
{
"steps": 199894,
"file_path": "results/Huggy/Huggy/Huggy-199894.onnx",
"reward": 3.419936793915769,
"creation_time": 1688396156.6679292,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199894.pt"
]
},
{
"steps": 399998,
"file_path": "results/Huggy/Huggy/Huggy-399998.onnx",
"reward": 3.814316924025373,
"creation_time": 1688396388.517391,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399998.pt"
]
},
{
"steps": 599933,
"file_path": "results/Huggy/Huggy/Huggy-599933.onnx",
"reward": 2.530343929926554,
"creation_time": 1688396624.8762076,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599933.pt"
]
},
{
"steps": 799918,
"file_path": "results/Huggy/Huggy/Huggy-799918.onnx",
"reward": 3.622871805737902,
"creation_time": 1688396858.601711,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799918.pt"
]
},
{
"steps": 999955,
"file_path": "results/Huggy/Huggy/Huggy-999955.onnx",
"reward": 3.9664319665808425,
"creation_time": 1688397097.4261496,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999955.pt"
]
},
{
"steps": 1199993,
"file_path": "results/Huggy/Huggy/Huggy-1199993.onnx",
"reward": 4.4545183430115385,
"creation_time": 1688397336.8310432,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199993.pt"
]
},
{
"steps": 1399936,
"file_path": "results/Huggy/Huggy/Huggy-1399936.onnx",
"reward": 3.7061835756427364,
"creation_time": 1688397571.3089862,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399936.pt"
]
},
{
"steps": 1599458,
"file_path": "results/Huggy/Huggy/Huggy-1599458.onnx",
"reward": 3.8235666255156198,
"creation_time": 1688397810.47235,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599458.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy/Huggy/Huggy-1799968.onnx",
"reward": 3.6107782125473022,
"creation_time": 1688398046.2368243,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999906,
"file_path": "results/Huggy/Huggy/Huggy-1999906.onnx",
"reward": 3.579855168124904,
"creation_time": 1688398280.5472698,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999906.pt"
]
},
{
"steps": 2000015,
"file_path": "results/Huggy/Huggy/Huggy-2000015.onnx",
"reward": 3.581122413600162,
"creation_time": 1688398280.6621647,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000015.pt"
]
}
],
"final_checkpoint": {
"steps": 2000015,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.581122413600162,
"creation_time": 1688398280.6621647,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000015.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}