|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.46129450771226754, |
|
"eval_steps": 500, |
|
"global_step": 6000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"ep_loss": 4.7343, |
|
"epoch": 0.04, |
|
"learning_rate": 2.5e-05, |
|
"loss": 11.6218, |
|
"mlm_loss": 6.8875, |
|
"step": 500 |
|
}, |
|
{ |
|
"ep_loss": 0.4315, |
|
"epoch": 0.08, |
|
"learning_rate": 5e-05, |
|
"loss": 3.0285, |
|
"mlm_loss": 2.5969, |
|
"step": 1000 |
|
}, |
|
{ |
|
"ep_loss": 0.3811, |
|
"epoch": 0.12, |
|
"learning_rate": 7.5e-05, |
|
"loss": 2.2571, |
|
"mlm_loss": 1.8761, |
|
"step": 1500 |
|
}, |
|
{ |
|
"ep_loss": 0.37, |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001, |
|
"loss": 1.9695, |
|
"mlm_loss": 1.5994, |
|
"step": 2000 |
|
}, |
|
{ |
|
"ep_loss": 0.3813, |
|
"epoch": 0.19, |
|
"learning_rate": 0.00012495, |
|
"loss": 1.5597, |
|
"mlm_loss": 1.1785, |
|
"step": 2500 |
|
}, |
|
{ |
|
"ep_loss": 0.3624, |
|
"epoch": 0.23, |
|
"learning_rate": 0.00014995, |
|
"loss": 1.4228, |
|
"mlm_loss": 1.0604, |
|
"step": 3000 |
|
}, |
|
{ |
|
"ep_loss": 0.338, |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017495, |
|
"loss": 1.3108, |
|
"mlm_loss": 0.9729, |
|
"step": 3500 |
|
}, |
|
{ |
|
"ep_loss": 0.3524, |
|
"epoch": 0.31, |
|
"learning_rate": 0.00019994999999999998, |
|
"loss": 1.2734, |
|
"mlm_loss": 0.921, |
|
"step": 4000 |
|
}, |
|
{ |
|
"ep_loss": 0.3423, |
|
"epoch": 0.35, |
|
"learning_rate": 0.00022495000000000002, |
|
"loss": 1.217, |
|
"mlm_loss": 0.8747, |
|
"step": 4500 |
|
}, |
|
{ |
|
"ep_loss": 0.3229, |
|
"epoch": 0.38, |
|
"learning_rate": 0.0002499, |
|
"loss": 1.1373, |
|
"mlm_loss": 0.8144, |
|
"step": 5000 |
|
}, |
|
{ |
|
"ep_loss": 0.3326, |
|
"epoch": 0.42, |
|
"learning_rate": 0.00027489999999999996, |
|
"loss": 1.117, |
|
"mlm_loss": 0.7844, |
|
"step": 5500 |
|
}, |
|
{ |
|
"ep_loss": 0.332, |
|
"epoch": 0.46, |
|
"learning_rate": 0.00029985, |
|
"loss": 1.0958, |
|
"mlm_loss": 0.7638, |
|
"step": 6000 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 520240, |
|
"num_train_epochs": 40, |
|
"save_steps": 500, |
|
"total_flos": 5.728903631701279e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|