|
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 1.0,
|
|
"eval_steps": 500,
|
|
"global_step": 1148,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.008710801393728223,
|
|
"grad_norm": 96.23190307617188,
|
|
"learning_rate": 2.0000000000000002e-07,
|
|
"loss": 10.808,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.017421602787456445,
|
|
"grad_norm": 463.5845642089844,
|
|
"learning_rate": 4.0000000000000003e-07,
|
|
"loss": 10.8147,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.02613240418118467,
|
|
"grad_norm": 0.10578063130378723,
|
|
"learning_rate": 6.000000000000001e-07,
|
|
"loss": 10.8077,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.03484320557491289,
|
|
"grad_norm": 0.09780663996934891,
|
|
"learning_rate": 8.000000000000001e-07,
|
|
"loss": 10.8218,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.04355400696864112,
|
|
"grad_norm": 1469.5677490234375,
|
|
"learning_rate": 1.0000000000000002e-06,
|
|
"loss": 10.8079,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.05226480836236934,
|
|
"grad_norm": 0.09817048907279968,
|
|
"learning_rate": 1.2000000000000002e-06,
|
|
"loss": 10.8204,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.06097560975609756,
|
|
"grad_norm": 0.09422990679740906,
|
|
"learning_rate": 1.4000000000000001e-06,
|
|
"loss": 10.8159,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.06968641114982578,
|
|
"grad_norm": 0.09545309096574783,
|
|
"learning_rate": 1.6000000000000001e-06,
|
|
"loss": 10.811,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.078397212543554,
|
|
"grad_norm": 0.08887256681919098,
|
|
"learning_rate": 1.8000000000000001e-06,
|
|
"loss": 10.8048,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.08710801393728224,
|
|
"grad_norm": 0.0909484326839447,
|
|
"learning_rate": 2.0000000000000003e-06,
|
|
"loss": 10.8126,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.09581881533101046,
|
|
"grad_norm": 0.09697296470403671,
|
|
"learning_rate": 2.2e-06,
|
|
"loss": 10.8056,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.10452961672473868,
|
|
"grad_norm": 0.09927204251289368,
|
|
"learning_rate": 2.4000000000000003e-06,
|
|
"loss": 11.0541,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.1132404181184669,
|
|
"grad_norm": 0.09392143785953522,
|
|
"learning_rate": 2.6e-06,
|
|
"loss": 10.8073,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.12195121951219512,
|
|
"grad_norm": 0.09451345354318619,
|
|
"learning_rate": 2.8000000000000003e-06,
|
|
"loss": 10.8041,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.13066202090592335,
|
|
"grad_norm": 0.09947695583105087,
|
|
"learning_rate": 3e-06,
|
|
"loss": 10.8141,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.13937282229965156,
|
|
"grad_norm": 0.09803537279367447,
|
|
"learning_rate": 3.2000000000000003e-06,
|
|
"loss": 10.805,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.1480836236933798,
|
|
"grad_norm": 0.09703430533409119,
|
|
"learning_rate": 3.4000000000000005e-06,
|
|
"loss": 10.8193,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.156794425087108,
|
|
"grad_norm": 0.09272942692041397,
|
|
"learning_rate": 3.6000000000000003e-06,
|
|
"loss": 10.8151,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.16550522648083624,
|
|
"grad_norm": 0.0950935110449791,
|
|
"learning_rate": 3.8000000000000005e-06,
|
|
"loss": 10.8051,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.17421602787456447,
|
|
"grad_norm": 0.10270467400550842,
|
|
"learning_rate": 4.000000000000001e-06,
|
|
"loss": 10.8043,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.18292682926829268,
|
|
"grad_norm": 0.09881923347711563,
|
|
"learning_rate": 4.2000000000000004e-06,
|
|
"loss": 10.8044,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.1916376306620209,
|
|
"grad_norm": 0.10348394513130188,
|
|
"learning_rate": 4.4e-06,
|
|
"loss": 10.8036,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.20034843205574912,
|
|
"grad_norm": 0.09483418613672256,
|
|
"learning_rate": 4.600000000000001e-06,
|
|
"loss": 10.8046,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.20905923344947736,
|
|
"grad_norm": 0.09620082378387451,
|
|
"learning_rate": 4.800000000000001e-06,
|
|
"loss": 10.807,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.21777003484320556,
|
|
"grad_norm": 0.09709980338811874,
|
|
"learning_rate": 5e-06,
|
|
"loss": 10.8058,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.2264808362369338,
|
|
"grad_norm": 0.09373420476913452,
|
|
"learning_rate": 5.2e-06,
|
|
"loss": 10.8044,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.23519163763066203,
|
|
"grad_norm": 0.09700135141611099,
|
|
"learning_rate": 5.400000000000001e-06,
|
|
"loss": 10.8168,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.24390243902439024,
|
|
"grad_norm": 0.09450981020927429,
|
|
"learning_rate": 5.600000000000001e-06,
|
|
"loss": 10.8043,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.25261324041811845,
|
|
"grad_norm": 0.09983480721712112,
|
|
"learning_rate": 5.8e-06,
|
|
"loss": 10.8037,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.2613240418118467,
|
|
"grad_norm": 0.09153986722230911,
|
|
"learning_rate": 6e-06,
|
|
"loss": 10.8041,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.2700348432055749,
|
|
"grad_norm": 0.09664902836084366,
|
|
"learning_rate": 6.200000000000001e-06,
|
|
"loss": 10.805,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.2787456445993031,
|
|
"grad_norm": 0.08636524528265,
|
|
"learning_rate": 6.4000000000000006e-06,
|
|
"loss": 10.8049,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.2874564459930314,
|
|
"grad_norm": 0.0945158526301384,
|
|
"learning_rate": 6.600000000000001e-06,
|
|
"loss": 10.8047,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.2961672473867596,
|
|
"grad_norm": 0.09447965025901794,
|
|
"learning_rate": 6.800000000000001e-06,
|
|
"loss": 10.8044,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.3048780487804878,
|
|
"grad_norm": 0.0949367955327034,
|
|
"learning_rate": 7e-06,
|
|
"loss": 10.8038,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.313588850174216,
|
|
"grad_norm": 0.10188119113445282,
|
|
"learning_rate": 7.2000000000000005e-06,
|
|
"loss": 10.8037,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.32229965156794427,
|
|
"grad_norm": 0.09333271533250809,
|
|
"learning_rate": 7.4e-06,
|
|
"loss": 10.8029,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.3310104529616725,
|
|
"grad_norm": 0.10046978294849396,
|
|
"learning_rate": 7.600000000000001e-06,
|
|
"loss": 10.8037,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.3397212543554007,
|
|
"grad_norm": 0.09508766233921051,
|
|
"learning_rate": 7.800000000000002e-06,
|
|
"loss": 10.8065,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 0.34843205574912894,
|
|
"grad_norm": 0.09448560327291489,
|
|
"learning_rate": 8.000000000000001e-06,
|
|
"loss": 10.8058,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.35714285714285715,
|
|
"grad_norm": 0.09986421465873718,
|
|
"learning_rate": 8.2e-06,
|
|
"loss": 14.4299,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 0.36585365853658536,
|
|
"grad_norm": 0.10298220068216324,
|
|
"learning_rate": 8.400000000000001e-06,
|
|
"loss": 10.8035,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 0.37456445993031356,
|
|
"grad_norm": 0.09236953407526016,
|
|
"learning_rate": 8.6e-06,
|
|
"loss": 10.8309,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 0.3832752613240418,
|
|
"grad_norm": 0.09000707417726517,
|
|
"learning_rate": 8.8e-06,
|
|
"loss": 10.8027,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 0.39198606271777003,
|
|
"grad_norm": 0.09554922580718994,
|
|
"learning_rate": 9e-06,
|
|
"loss": 10.8045,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.40069686411149824,
|
|
"grad_norm": 130.35623168945312,
|
|
"learning_rate": 9.200000000000002e-06,
|
|
"loss": 10.8034,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 0.4094076655052265,
|
|
"grad_norm": 0.09580319374799728,
|
|
"learning_rate": 9.4e-06,
|
|
"loss": 10.8022,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 0.4181184668989547,
|
|
"grad_norm": 0.10328342765569687,
|
|
"learning_rate": 9.600000000000001e-06,
|
|
"loss": 10.8087,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 0.4268292682926829,
|
|
"grad_norm": 0.09383571892976761,
|
|
"learning_rate": 9.800000000000001e-06,
|
|
"loss": 10.8027,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 0.4355400696864111,
|
|
"grad_norm": 61532.49609375,
|
|
"learning_rate": 1e-05,
|
|
"loss": 15.4887,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.4442508710801394,
|
|
"grad_norm": 0.09285067766904831,
|
|
"learning_rate": 9.966032608695653e-06,
|
|
"loss": 10.8028,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 0.4529616724738676,
|
|
"grad_norm": 0.09329842031002045,
|
|
"learning_rate": 9.932065217391306e-06,
|
|
"loss": 10.8969,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 0.4616724738675958,
|
|
"grad_norm": 0.09399975091218948,
|
|
"learning_rate": 9.898097826086957e-06,
|
|
"loss": 10.8297,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 0.47038327526132406,
|
|
"grad_norm": 1130.7960205078125,
|
|
"learning_rate": 9.86413043478261e-06,
|
|
"loss": 10.8358,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 0.47909407665505227,
|
|
"grad_norm": 0.0971442461013794,
|
|
"learning_rate": 9.830163043478262e-06,
|
|
"loss": 10.8641,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 0.4878048780487805,
|
|
"grad_norm": 0.09969327598810196,
|
|
"learning_rate": 9.796195652173915e-06,
|
|
"loss": 10.8124,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 0.4965156794425087,
|
|
"grad_norm": 0.0888582393527031,
|
|
"learning_rate": 9.762228260869566e-06,
|
|
"loss": 10.9576,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 0.5052264808362369,
|
|
"grad_norm": 0.09847419708967209,
|
|
"learning_rate": 9.728260869565218e-06,
|
|
"loss": 10.8022,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 0.5139372822299652,
|
|
"grad_norm": 0.0947929099202156,
|
|
"learning_rate": 9.694293478260869e-06,
|
|
"loss": 10.8011,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 0.5226480836236934,
|
|
"grad_norm": 9.681638717651367,
|
|
"learning_rate": 9.660326086956523e-06,
|
|
"loss": 10.8015,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 0.5313588850174216,
|
|
"grad_norm": 0.09560558199882507,
|
|
"learning_rate": 9.626358695652174e-06,
|
|
"loss": 13.2261,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 0.5400696864111498,
|
|
"grad_norm": 0.09392021596431732,
|
|
"learning_rate": 9.592391304347827e-06,
|
|
"loss": 10.802,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 0.5487804878048781,
|
|
"grad_norm": 0.10044202208518982,
|
|
"learning_rate": 9.558423913043478e-06,
|
|
"loss": 10.8005,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 0.5574912891986062,
|
|
"grad_norm": 0.09005896002054214,
|
|
"learning_rate": 9.524456521739132e-06,
|
|
"loss": 10.802,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 0.5662020905923345,
|
|
"grad_norm": 0.09686450660228729,
|
|
"learning_rate": 9.490489130434783e-06,
|
|
"loss": 10.801,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 0.5749128919860628,
|
|
"grad_norm": 0.09321983903646469,
|
|
"learning_rate": 9.456521739130436e-06,
|
|
"loss": 10.8008,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 0.5836236933797909,
|
|
"grad_norm": 0.0884283185005188,
|
|
"learning_rate": 9.422554347826087e-06,
|
|
"loss": 10.801,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 0.5923344947735192,
|
|
"grad_norm": 0.09928812086582184,
|
|
"learning_rate": 9.388586956521741e-06,
|
|
"loss": 10.7998,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 0.6010452961672473,
|
|
"grad_norm": 0.09434370696544647,
|
|
"learning_rate": 9.354619565217392e-06,
|
|
"loss": 10.8007,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 0.6097560975609756,
|
|
"grad_norm": 0.09228064864873886,
|
|
"learning_rate": 9.320652173913044e-06,
|
|
"loss": 10.8002,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 0.6184668989547039,
|
|
"grad_norm": 0.09412040561437607,
|
|
"learning_rate": 9.286684782608695e-06,
|
|
"loss": 10.8009,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 0.627177700348432,
|
|
"grad_norm": 0.0975504145026207,
|
|
"learning_rate": 9.25271739130435e-06,
|
|
"loss": 10.8002,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 0.6358885017421603,
|
|
"grad_norm": 0.10332240164279938,
|
|
"learning_rate": 9.21875e-06,
|
|
"loss": 10.8001,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 0.6445993031358885,
|
|
"grad_norm": 0.09318797290325165,
|
|
"learning_rate": 9.184782608695653e-06,
|
|
"loss": 10.8001,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 0.6533101045296167,
|
|
"grad_norm": 0.09171754121780396,
|
|
"learning_rate": 9.150815217391306e-06,
|
|
"loss": 10.7999,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 0.662020905923345,
|
|
"grad_norm": 0.0921366959810257,
|
|
"learning_rate": 9.116847826086958e-06,
|
|
"loss": 10.8003,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 0.6707317073170732,
|
|
"grad_norm": 0.09661433100700378,
|
|
"learning_rate": 9.08288043478261e-06,
|
|
"loss": 10.8001,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 0.6794425087108014,
|
|
"grad_norm": 0.09323884546756744,
|
|
"learning_rate": 9.048913043478262e-06,
|
|
"loss": 10.7997,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 0.6881533101045296,
|
|
"grad_norm": 0.08752495795488358,
|
|
"learning_rate": 9.014945652173914e-06,
|
|
"loss": 10.8,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 0.6968641114982579,
|
|
"grad_norm": 0.09087631851434708,
|
|
"learning_rate": 8.980978260869567e-06,
|
|
"loss": 10.798,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 0.705574912891986,
|
|
"grad_norm": 0.09523475170135498,
|
|
"learning_rate": 8.947010869565218e-06,
|
|
"loss": 10.7994,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 0.7142857142857143,
|
|
"grad_norm": 0.09918010234832764,
|
|
"learning_rate": 8.91304347826087e-06,
|
|
"loss": 10.7996,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 0.7229965156794426,
|
|
"grad_norm": 0.09539210051298141,
|
|
"learning_rate": 8.879076086956523e-06,
|
|
"loss": 10.7993,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 0.7317073170731707,
|
|
"grad_norm": 0.09296409040689468,
|
|
"learning_rate": 8.845108695652174e-06,
|
|
"loss": 10.7993,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 0.740418118466899,
|
|
"grad_norm": 0.08845611661672592,
|
|
"learning_rate": 8.811141304347827e-06,
|
|
"loss": 10.7993,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 0.7491289198606271,
|
|
"grad_norm": 0.0954870954155922,
|
|
"learning_rate": 8.77717391304348e-06,
|
|
"loss": 10.7981,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 0.7578397212543554,
|
|
"grad_norm": 0.09371493011713028,
|
|
"learning_rate": 8.743206521739132e-06,
|
|
"loss": 10.7984,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 0.7665505226480837,
|
|
"grad_norm": 0.09903618693351746,
|
|
"learning_rate": 8.709239130434783e-06,
|
|
"loss": 10.7987,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 0.7752613240418118,
|
|
"grad_norm": 0.08991510421037674,
|
|
"learning_rate": 8.675271739130435e-06,
|
|
"loss": 10.7976,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 0.7839721254355401,
|
|
"grad_norm": 0.09141765534877777,
|
|
"learning_rate": 8.641304347826088e-06,
|
|
"loss": 10.799,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 0.7926829268292683,
|
|
"grad_norm": 0.09670023620128632,
|
|
"learning_rate": 8.60733695652174e-06,
|
|
"loss": 10.7973,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 0.8013937282229965,
|
|
"grad_norm": 0.09337509423494339,
|
|
"learning_rate": 8.573369565217391e-06,
|
|
"loss": 10.7985,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 0.8101045296167247,
|
|
"grad_norm": 0.09618163853883743,
|
|
"learning_rate": 8.539402173913044e-06,
|
|
"loss": 10.7989,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 0.818815331010453,
|
|
"grad_norm": 0.09537822753190994,
|
|
"learning_rate": 8.505434782608697e-06,
|
|
"loss": 10.7981,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 0.8275261324041812,
|
|
"grad_norm": 0.09312313050031662,
|
|
"learning_rate": 8.47146739130435e-06,
|
|
"loss": 10.7984,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 0.8362369337979094,
|
|
"grad_norm": 0.09118187427520752,
|
|
"learning_rate": 8.4375e-06,
|
|
"loss": 10.7981,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 0.8449477351916377,
|
|
"grad_norm": 0.0919727087020874,
|
|
"learning_rate": 8.403532608695653e-06,
|
|
"loss": 10.7978,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 0.8536585365853658,
|
|
"grad_norm": 0.09706488996744156,
|
|
"learning_rate": 8.369565217391305e-06,
|
|
"loss": 10.7979,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 0.8623693379790941,
|
|
"grad_norm": 0.0944119319319725,
|
|
"learning_rate": 8.335597826086958e-06,
|
|
"loss": 10.7983,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 0.8710801393728222,
|
|
"grad_norm": 0.1016387864947319,
|
|
"learning_rate": 8.301630434782609e-06,
|
|
"loss": 10.7973,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.8797909407665505,
|
|
"grad_norm": 0.10198845714330673,
|
|
"learning_rate": 8.267663043478261e-06,
|
|
"loss": 10.7984,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 0.8885017421602788,
|
|
"grad_norm": 0.10102390497922897,
|
|
"learning_rate": 8.233695652173914e-06,
|
|
"loss": 10.7971,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 0.8972125435540069,
|
|
"grad_norm": 0.0972791463136673,
|
|
"learning_rate": 8.199728260869567e-06,
|
|
"loss": 10.7971,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 0.9059233449477352,
|
|
"grad_norm": 0.09343789517879486,
|
|
"learning_rate": 8.165760869565218e-06,
|
|
"loss": 10.7978,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 0.9146341463414634,
|
|
"grad_norm": 0.09384151548147202,
|
|
"learning_rate": 8.13179347826087e-06,
|
|
"loss": 10.797,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 0.9233449477351916,
|
|
"grad_norm": 0.09324201196432114,
|
|
"learning_rate": 8.097826086956523e-06,
|
|
"loss": 10.7969,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 0.9320557491289199,
|
|
"grad_norm": 0.09341388940811157,
|
|
"learning_rate": 8.063858695652175e-06,
|
|
"loss": 10.7972,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 0.9407665505226481,
|
|
"grad_norm": 0.09563016146421432,
|
|
"learning_rate": 8.029891304347826e-06,
|
|
"loss": 10.7978,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 0.9494773519163763,
|
|
"grad_norm": 0.09752058237791061,
|
|
"learning_rate": 7.995923913043479e-06,
|
|
"loss": 10.7962,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 0.9581881533101045,
|
|
"grad_norm": 0.09652635455131531,
|
|
"learning_rate": 7.961956521739131e-06,
|
|
"loss": 10.7968,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 0.9668989547038328,
|
|
"grad_norm": 0.097730852663517,
|
|
"learning_rate": 7.927989130434784e-06,
|
|
"loss": 10.7967,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 0.975609756097561,
|
|
"grad_norm": 0.09178122133016586,
|
|
"learning_rate": 7.894021739130435e-06,
|
|
"loss": 10.7963,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 0.9843205574912892,
|
|
"grad_norm": 0.09718208760023117,
|
|
"learning_rate": 7.860054347826088e-06,
|
|
"loss": 10.7967,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 0.9930313588850174,
|
|
"grad_norm": 0.09228435158729553,
|
|
"learning_rate": 7.82608695652174e-06,
|
|
"loss": 10.7973,
|
|
"step": 1140
|
|
}
|
|
],
|
|
"logging_steps": 10,
|
|
"max_steps": 3444,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 3,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": false
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 565862839314432.0,
|
|
"train_batch_size": 32,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|