|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 75, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 20.00289492370743, |
|
"learning_rate": 0.0, |
|
"loss": 1.5756, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 169.81482789708753, |
|
"learning_rate": 5e-05, |
|
"loss": 14.3496, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 170.94221266882175, |
|
"learning_rate": 7.924812503605781e-05, |
|
"loss": 14.3495, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 96.07266456886373, |
|
"learning_rate": 0.0001, |
|
"loss": 5.728, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 74.89495923839695, |
|
"learning_rate": 0.0001, |
|
"loss": 3.198, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 16.06625851910231, |
|
"learning_rate": 9.859154929577466e-05, |
|
"loss": 1.711, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 11.432322037094815, |
|
"learning_rate": 9.718309859154931e-05, |
|
"loss": 1.1097, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 10.90988949596415, |
|
"learning_rate": 9.577464788732394e-05, |
|
"loss": 1.1647, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.166602608719267, |
|
"learning_rate": 9.43661971830986e-05, |
|
"loss": 0.6874, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.063308573446352, |
|
"learning_rate": 9.295774647887325e-05, |
|
"loss": 0.6268, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 7.227736963113414, |
|
"learning_rate": 9.15492957746479e-05, |
|
"loss": 0.5151, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.259327811156309, |
|
"learning_rate": 9.014084507042254e-05, |
|
"loss": 0.4389, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 6.156983867098677, |
|
"learning_rate": 8.873239436619719e-05, |
|
"loss": 0.5211, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 5.241850109217824, |
|
"learning_rate": 8.732394366197182e-05, |
|
"loss": 0.3401, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 45.75950192245342, |
|
"learning_rate": 8.591549295774647e-05, |
|
"loss": 0.6114, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 12.747985157681715, |
|
"learning_rate": 8.450704225352113e-05, |
|
"loss": 0.4109, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 6.204387028637272, |
|
"learning_rate": 8.309859154929578e-05, |
|
"loss": 0.3238, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.784294966675183, |
|
"learning_rate": 8.169014084507043e-05, |
|
"loss": 0.2746, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0640451461171732, |
|
"learning_rate": 8.028169014084508e-05, |
|
"loss": 0.2216, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.247533777438539, |
|
"learning_rate": 7.887323943661972e-05, |
|
"loss": 0.2169, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5843446520161867, |
|
"learning_rate": 7.746478873239437e-05, |
|
"loss": 0.2063, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.6140022871361251, |
|
"learning_rate": 7.605633802816902e-05, |
|
"loss": 0.1924, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.6328264593401415, |
|
"learning_rate": 7.464788732394367e-05, |
|
"loss": 0.1687, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.47188590510134204, |
|
"learning_rate": 7.323943661971832e-05, |
|
"loss": 0.1502, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.4569187747764233, |
|
"learning_rate": 7.183098591549297e-05, |
|
"loss": 0.1322, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.5022295511205257, |
|
"learning_rate": 7.042253521126761e-05, |
|
"loss": 0.1283, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.32747245962381083, |
|
"learning_rate": 6.901408450704226e-05, |
|
"loss": 0.105, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.4071591561018996, |
|
"learning_rate": 6.76056338028169e-05, |
|
"loss": 0.1127, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.4886479527811691, |
|
"learning_rate": 6.619718309859155e-05, |
|
"loss": 0.1097, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.34862267892547183, |
|
"learning_rate": 6.47887323943662e-05, |
|
"loss": 0.1007, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.33123650236209157, |
|
"learning_rate": 6.338028169014085e-05, |
|
"loss": 0.0945, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.3246463353672584, |
|
"learning_rate": 6.197183098591549e-05, |
|
"loss": 0.0957, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.3503498413159293, |
|
"learning_rate": 6.056338028169014e-05, |
|
"loss": 0.1007, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 0.2994313557882548, |
|
"learning_rate": 5.915492957746479e-05, |
|
"loss": 0.0961, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.3098559906263102, |
|
"learning_rate": 5.774647887323944e-05, |
|
"loss": 0.0891, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.3320370344139516, |
|
"learning_rate": 5.633802816901409e-05, |
|
"loss": 0.093, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.2975761075734135, |
|
"learning_rate": 5.492957746478874e-05, |
|
"loss": 0.0824, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.2609696051747955, |
|
"learning_rate": 5.352112676056338e-05, |
|
"loss": 0.0849, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.2746218271104525, |
|
"learning_rate": 5.2112676056338026e-05, |
|
"loss": 0.0812, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.27280756684282476, |
|
"learning_rate": 5.070422535211268e-05, |
|
"loss": 0.0857, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.6400000000000001, |
|
"grad_norm": 0.3333732319784279, |
|
"learning_rate": 4.929577464788733e-05, |
|
"loss": 0.083, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 0.28187480716143415, |
|
"learning_rate": 4.788732394366197e-05, |
|
"loss": 0.0805, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.25874624247743283, |
|
"learning_rate": 4.647887323943662e-05, |
|
"loss": 0.0806, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.28417848530718226, |
|
"learning_rate": 4.507042253521127e-05, |
|
"loss": 0.0776, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.24698649624452082, |
|
"learning_rate": 4.366197183098591e-05, |
|
"loss": 0.0765, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 0.23537664520973256, |
|
"learning_rate": 4.225352112676056e-05, |
|
"loss": 0.0758, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.2534413438607467, |
|
"learning_rate": 4.0845070422535214e-05, |
|
"loss": 0.0804, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.20702488497520458, |
|
"learning_rate": 3.943661971830986e-05, |
|
"loss": 0.0715, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.2613262372736133, |
|
"learning_rate": 3.802816901408451e-05, |
|
"loss": 0.084, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.22492435603929029, |
|
"learning_rate": 3.661971830985916e-05, |
|
"loss": 0.0556, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.2611962185259755, |
|
"learning_rate": 3.5211267605633805e-05, |
|
"loss": 0.0481, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.15962947759230994, |
|
"learning_rate": 3.380281690140845e-05, |
|
"loss": 0.0449, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.1694677846392973, |
|
"learning_rate": 3.23943661971831e-05, |
|
"loss": 0.0482, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.1438746814861677, |
|
"learning_rate": 3.0985915492957744e-05, |
|
"loss": 0.0425, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.17430208968882352, |
|
"learning_rate": 2.9577464788732395e-05, |
|
"loss": 0.043, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.17207620385513733, |
|
"learning_rate": 2.8169014084507046e-05, |
|
"loss": 0.0449, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 2.2800000000000002, |
|
"grad_norm": 0.18001842523178951, |
|
"learning_rate": 2.676056338028169e-05, |
|
"loss": 0.0407, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.20877963876068653, |
|
"learning_rate": 2.535211267605634e-05, |
|
"loss": 0.0418, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.2086206476860082, |
|
"learning_rate": 2.3943661971830986e-05, |
|
"loss": 0.0432, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.18489125316064906, |
|
"learning_rate": 2.2535211267605634e-05, |
|
"loss": 0.0443, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.18836568321121092, |
|
"learning_rate": 2.112676056338028e-05, |
|
"loss": 0.0416, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.18671273407693323, |
|
"learning_rate": 1.971830985915493e-05, |
|
"loss": 0.0392, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.183141175658178, |
|
"learning_rate": 1.830985915492958e-05, |
|
"loss": 0.0406, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.19023565118928507, |
|
"learning_rate": 1.6901408450704224e-05, |
|
"loss": 0.0392, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.1734836339026565, |
|
"learning_rate": 1.5492957746478872e-05, |
|
"loss": 0.0408, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.2124529691628785, |
|
"learning_rate": 1.4084507042253523e-05, |
|
"loss": 0.0402, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.16929391483607104, |
|
"learning_rate": 1.267605633802817e-05, |
|
"loss": 0.0422, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 0.1865969813928253, |
|
"learning_rate": 1.1267605633802817e-05, |
|
"loss": 0.0423, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.19199511508921327, |
|
"learning_rate": 9.859154929577465e-06, |
|
"loss": 0.0428, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.15471843235506497, |
|
"learning_rate": 8.450704225352112e-06, |
|
"loss": 0.0383, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.17211726924185472, |
|
"learning_rate": 7.042253521126762e-06, |
|
"loss": 0.0422, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.16597891624795819, |
|
"learning_rate": 5.6338028169014084e-06, |
|
"loss": 0.0358, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.16696499746864643, |
|
"learning_rate": 4.225352112676056e-06, |
|
"loss": 0.0384, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.16841918706948866, |
|
"learning_rate": 2.8169014084507042e-06, |
|
"loss": 0.0399, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.12380367991746002, |
|
"learning_rate": 1.4084507042253521e-06, |
|
"loss": 0.0275, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 75, |
|
"total_flos": 175054227243008.0, |
|
"train_loss": 0.6997585766762495, |
|
"train_runtime": 937.3587, |
|
"train_samples_per_second": 103.388, |
|
"train_steps_per_second": 0.08 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 75, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 175054227243008.0, |
|
"train_batch_size": 12, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|