File size: 2,339 Bytes
c929223 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
{
"Huggy": {
"checkpoints": [
{
"steps": 499849,
"file_path": "results\\Huggy_1\\Huggy\\Huggy-499849.onnx",
"reward": 3.8409776226166756,
"creation_time": 1650462766.9563007,
"auxillary_file_paths": [
"results\\Huggy_1\\Huggy\\Huggy-499849.pt"
]
},
{
"steps": 999633,
"file_path": "results\\Huggy_1\\Huggy\\Huggy-999633.onnx",
"reward": 3.8536432950924606,
"creation_time": 1650463501.1990354,
"auxillary_file_paths": [
"results\\Huggy_1\\Huggy\\Huggy-999633.pt"
]
},
{
"steps": 1499331,
"file_path": "results\\Huggy_1\\Huggy\\Huggy-1499331.onnx",
"reward": 3.8168014070605705,
"creation_time": 1650464251.328924,
"auxillary_file_paths": [
"results\\Huggy_1\\Huggy\\Huggy-1499331.pt"
]
},
{
"steps": 1999977,
"file_path": "results\\Huggy_1\\Huggy\\Huggy-1999977.onnx",
"reward": 3.819167050448331,
"creation_time": 1650464973.5974445,
"auxillary_file_paths": [
"results\\Huggy_1\\Huggy\\Huggy-1999977.pt"
]
},
{
"steps": 2000001,
"file_path": "results\\Huggy_1\\Huggy\\Huggy-2000001.onnx",
"reward": 3.7095100827839063,
"creation_time": 1650464973.7289164,
"auxillary_file_paths": [
"results\\Huggy_1\\Huggy\\Huggy-2000001.pt"
]
}
],
"final_checkpoint": {
"steps": 2000001,
"file_path": "results\\Huggy_1\\Huggy.onnx",
"reward": 3.7095100827839063,
"creation_time": 1650464973.7289164,
"auxillary_file_paths": [
"results\\Huggy_1\\Huggy\\Huggy-2000001.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.7.1+cu110"
}
} |