ppo-Huggy / run_logs /training_status.json
ElementBrawler's picture
Huggy
4f34f2a
{
"Huggy": {
"checkpoints": [
{
"steps": 199945,
"file_path": "results/Huggy/Huggy/Huggy-199945.onnx",
"reward": 3.499273081348367,
"creation_time": 1677560977.6731813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199945.pt"
]
},
{
"steps": 399820,
"file_path": "results/Huggy/Huggy/Huggy-399820.onnx",
"reward": 3.7213935405015945,
"creation_time": 1677561219.5850017,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399820.pt"
]
},
{
"steps": 599939,
"file_path": "results/Huggy/Huggy/Huggy-599939.onnx",
"reward": 3.96330879514034,
"creation_time": 1677561467.842268,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599939.pt"
]
},
{
"steps": 799942,
"file_path": "results/Huggy/Huggy/Huggy-799942.onnx",
"reward": 3.9583519173027883,
"creation_time": 1677561714.6153765,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799942.pt"
]
},
{
"steps": 999973,
"file_path": "results/Huggy/Huggy/Huggy-999973.onnx",
"reward": 3.8728307826178416,
"creation_time": 1677561961.8417392,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999973.pt"
]
},
{
"steps": 1199972,
"file_path": "results/Huggy/Huggy/Huggy-1199972.onnx",
"reward": 4.1406648818482745,
"creation_time": 1677562208.119195,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199972.pt"
]
},
{
"steps": 1399984,
"file_path": "results/Huggy/Huggy/Huggy-1399984.onnx",
"reward": 3.5988506262118998,
"creation_time": 1677562452.877678,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399984.pt"
]
},
{
"steps": 1599980,
"file_path": "results/Huggy/Huggy/Huggy-1599980.onnx",
"reward": 3.8784409273514706,
"creation_time": 1677562694.4169586,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599980.pt"
]
},
{
"steps": 1799909,
"file_path": "results/Huggy/Huggy/Huggy-1799909.onnx",
"reward": 3.8526897202402153,
"creation_time": 1677562938.7706313,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799909.pt"
]
},
{
"steps": 1999979,
"file_path": "results/Huggy/Huggy/Huggy-1999979.onnx",
"reward": 3.5327874447988425,
"creation_time": 1677563188.7320275,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999979.pt"
]
},
{
"steps": 2000118,
"file_path": "results/Huggy/Huggy/Huggy-2000118.onnx",
"reward": 3.5857131932462964,
"creation_time": 1677563188.9164312,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000118.pt"
]
}
],
"final_checkpoint": {
"steps": 2000118,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5857131932462964,
"creation_time": 1677563188.9164312,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000118.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}