ppo-Huggy / run_logs /training_status.json
pavankandru's picture
Huggy
5086008
{
"Huggy": {
"checkpoints": [
{
"steps": 199979,
"file_path": "results/Huggy/Huggy/Huggy-199979.onnx",
"reward": 3.814997810583848,
"creation_time": 1679664589.5018747,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199979.pt"
]
},
{
"steps": 399750,
"file_path": "results/Huggy/Huggy/Huggy-399750.onnx",
"reward": 3.8152262400835752,
"creation_time": 1679664841.6860323,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399750.pt"
]
},
{
"steps": 599902,
"file_path": "results/Huggy/Huggy/Huggy-599902.onnx",
"reward": 3.6698243848739134,
"creation_time": 1679665101.177638,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599902.pt"
]
},
{
"steps": 799905,
"file_path": "results/Huggy/Huggy/Huggy-799905.onnx",
"reward": 3.8437989659497305,
"creation_time": 1679665353.50877,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799905.pt"
]
},
{
"steps": 999987,
"file_path": "results/Huggy/Huggy/Huggy-999987.onnx",
"reward": 3.9850060262268396,
"creation_time": 1679665610.3547742,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999987.pt"
]
},
{
"steps": 1199994,
"file_path": "results/Huggy/Huggy/Huggy-1199994.onnx",
"reward": 3.791697515200262,
"creation_time": 1679665867.3664021,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199994.pt"
]
},
{
"steps": 1399970,
"file_path": "results/Huggy/Huggy/Huggy-1399970.onnx",
"reward": 3.867976260185242,
"creation_time": 1679666124.2404382,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399970.pt"
]
},
{
"steps": 1599943,
"file_path": "results/Huggy/Huggy/Huggy-1599943.onnx",
"reward": 3.597625647355052,
"creation_time": 1679666376.3845646,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599943.pt"
]
},
{
"steps": 1799888,
"file_path": "results/Huggy/Huggy/Huggy-1799888.onnx",
"reward": 3.7450285947928994,
"creation_time": 1679666630.799099,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799888.pt"
]
},
{
"steps": 1999906,
"file_path": "results/Huggy/Huggy/Huggy-1999906.onnx",
"reward": 4.261201395988464,
"creation_time": 1679666886.894878,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999906.pt"
]
},
{
"steps": 2000014,
"file_path": "results/Huggy/Huggy/Huggy-2000014.onnx",
"reward": 4.290087349274579,
"creation_time": 1679666887.0224364,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
],
"final_checkpoint": {
"steps": 2000014,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.290087349274579,
"creation_time": 1679666887.0224364,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}