ppo-Huggy / run_logs /training_status.json
sajihadadi's picture
Huggy
6a37d2c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199982,
"file_path": "results/Huggy2/Huggy/Huggy-199982.onnx",
"reward": 2.9644240220980858,
"creation_time": 1732647877.519199,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199982.pt"
]
},
{
"steps": 399985,
"file_path": "results/Huggy2/Huggy/Huggy-399985.onnx",
"reward": 3.2140926160509626,
"creation_time": 1732648111.996119,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399985.pt"
]
},
{
"steps": 599935,
"file_path": "results/Huggy2/Huggy/Huggy-599935.onnx",
"reward": 4.306666904025608,
"creation_time": 1732648353.1430702,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599935.pt"
]
},
{
"steps": 799952,
"file_path": "results/Huggy2/Huggy/Huggy-799952.onnx",
"reward": 3.7837998519624985,
"creation_time": 1732648593.229356,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799952.pt"
]
},
{
"steps": 999907,
"file_path": "results/Huggy2/Huggy/Huggy-999907.onnx",
"reward": 3.4878284870646894,
"creation_time": 1732648836.5737946,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999907.pt"
]
},
{
"steps": 1199983,
"file_path": "results/Huggy2/Huggy/Huggy-1199983.onnx",
"reward": 3.4196476296466938,
"creation_time": 1732649082.082938,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199983.pt"
]
},
{
"steps": 1399968,
"file_path": "results/Huggy2/Huggy/Huggy-1399968.onnx",
"reward": 3.8209108438359976,
"creation_time": 1732649323.081717,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399968.pt"
]
},
{
"steps": 1599995,
"file_path": "results/Huggy2/Huggy/Huggy-1599995.onnx",
"reward": 3.634313987780221,
"creation_time": 1732649571.5310006,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599995.pt"
]
},
{
"steps": 1799965,
"file_path": "results/Huggy2/Huggy/Huggy-1799965.onnx",
"reward": 3.441365860402584,
"creation_time": 1732649825.7334495,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799965.pt"
]
},
{
"steps": 1999957,
"file_path": "results/Huggy2/Huggy/Huggy-1999957.onnx",
"reward": 1.775277018547058,
"creation_time": 1732650075.0480964,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999957.pt"
]
},
{
"steps": 2000013,
"file_path": "results/Huggy2/Huggy/Huggy-2000013.onnx",
"reward": 1.9465234756469727,
"creation_time": 1732650075.162895,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000013.pt"
]
}
],
"final_checkpoint": {
"steps": 2000013,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 1.9465234756469727,
"creation_time": 1732650075.162895,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000013.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}