ppo-Huggy / run_logs /training_status.json
Segamboam's picture
Huggy
ee51725
{
"Huggy": {
"checkpoints": [
{
"steps": 199767,
"file_path": "results/Huggy/Huggy/Huggy-199767.onnx",
"reward": 3.425866288798196,
"creation_time": 1673875005.7555587,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199767.pt"
]
},
{
"steps": 399970,
"file_path": "results/Huggy/Huggy/Huggy-399970.onnx",
"reward": 3.3867288288616,
"creation_time": 1673875225.7046475,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399970.pt"
]
},
{
"steps": 599900,
"file_path": "results/Huggy/Huggy/Huggy-599900.onnx",
"reward": 3.703661119937897,
"creation_time": 1673875448.468481,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599900.pt"
]
},
{
"steps": 799630,
"file_path": "results/Huggy/Huggy/Huggy-799630.onnx",
"reward": 3.8106221480572477,
"creation_time": 1673875666.954696,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799630.pt"
]
},
{
"steps": 999301,
"file_path": "results/Huggy/Huggy/Huggy-999301.onnx",
"reward": 3.265008219570484,
"creation_time": 1673875889.7347648,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999301.pt"
]
},
{
"steps": 1199913,
"file_path": "results/Huggy/Huggy/Huggy-1199913.onnx",
"reward": 3.48712186719857,
"creation_time": 1673876112.2797754,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199913.pt"
]
},
{
"steps": 1399794,
"file_path": "results/Huggy/Huggy/Huggy-1399794.onnx",
"reward": 3.430021280946295,
"creation_time": 1673876330.7859325,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399794.pt"
]
},
{
"steps": 1599930,
"file_path": "results/Huggy/Huggy/Huggy-1599930.onnx",
"reward": 3.4146504831314086,
"creation_time": 1673876553.3395386,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599930.pt"
]
},
{
"steps": 1799627,
"file_path": "results/Huggy/Huggy/Huggy-1799627.onnx",
"reward": 3.4238061223711287,
"creation_time": 1673876775.5805705,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799627.pt"
]
},
{
"steps": 1999960,
"file_path": "results/Huggy/Huggy/Huggy-1999960.onnx",
"reward": 3.1731835454702377,
"creation_time": 1673876994.0274851,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999960.pt"
]
},
{
"steps": 2000060,
"file_path": "results/Huggy/Huggy/Huggy-2000060.onnx",
"reward": 3.1712760954848993,
"creation_time": 1673876994.1598654,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000060.pt"
]
}
],
"final_checkpoint": {
"steps": 2000060,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.1712760954848993,
"creation_time": 1673876994.1598654,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000060.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}