|
{ |
|
"best_metric": 0.9605053856292324, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-cancer/checkpoint-1547", |
|
"epoch": 0.9998384230085636, |
|
"eval_steps": 500, |
|
"global_step": 1547, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006463079657456778, |
|
"grad_norm": 7.331174850463867, |
|
"learning_rate": 3.225806451612903e-06, |
|
"loss": 0.7162, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.012926159314913557, |
|
"grad_norm": 4.575641632080078, |
|
"learning_rate": 6.451612903225806e-06, |
|
"loss": 0.6444, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.019389238972370333, |
|
"grad_norm": 3.2741119861602783, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 0.5618, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.025852318629827113, |
|
"grad_norm": 4.564846515655518, |
|
"learning_rate": 1.2903225806451613e-05, |
|
"loss": 0.4959, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03231539828728389, |
|
"grad_norm": 4.165364742279053, |
|
"learning_rate": 1.6129032258064517e-05, |
|
"loss": 0.447, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.038778477944740666, |
|
"grad_norm": 4.854116439819336, |
|
"learning_rate": 1.935483870967742e-05, |
|
"loss": 0.409, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.045241557602197446, |
|
"grad_norm": 4.5222907066345215, |
|
"learning_rate": 2.258064516129032e-05, |
|
"loss": 0.3992, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.051704637259654226, |
|
"grad_norm": 5.930300235748291, |
|
"learning_rate": 2.5806451612903226e-05, |
|
"loss": 0.3832, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.058167716917111006, |
|
"grad_norm": 6.286398410797119, |
|
"learning_rate": 2.9032258064516133e-05, |
|
"loss": 0.399, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06463079657456779, |
|
"grad_norm": 8.192097663879395, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 0.3631, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07109387623202457, |
|
"grad_norm": 7.482578754425049, |
|
"learning_rate": 3.548387096774194e-05, |
|
"loss": 0.3702, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07755695588948133, |
|
"grad_norm": 5.62371301651001, |
|
"learning_rate": 3.870967741935484e-05, |
|
"loss": 0.352, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08402003554693811, |
|
"grad_norm": 5.444067001342773, |
|
"learning_rate": 4.1935483870967746e-05, |
|
"loss": 0.3338, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09048311520439489, |
|
"grad_norm": 4.892695426940918, |
|
"learning_rate": 4.516129032258064e-05, |
|
"loss": 0.3463, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.09694619486185167, |
|
"grad_norm": 5.656785488128662, |
|
"learning_rate": 4.8387096774193554e-05, |
|
"loss": 0.3718, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.10340927451930845, |
|
"grad_norm": 7.789802074432373, |
|
"learning_rate": 4.982040229885058e-05, |
|
"loss": 0.3412, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10987235417676523, |
|
"grad_norm": 7.198510646820068, |
|
"learning_rate": 4.946120689655172e-05, |
|
"loss": 0.3577, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11633543383422201, |
|
"grad_norm": 7.838411808013916, |
|
"learning_rate": 4.9102011494252875e-05, |
|
"loss": 0.3522, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.12279851349167878, |
|
"grad_norm": 7.711742401123047, |
|
"learning_rate": 4.8742816091954026e-05, |
|
"loss": 0.3381, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12926159314913557, |
|
"grad_norm": 5.089247703552246, |
|
"learning_rate": 4.838362068965517e-05, |
|
"loss": 0.3507, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13572467280659234, |
|
"grad_norm": 4.4015913009643555, |
|
"learning_rate": 4.802442528735632e-05, |
|
"loss": 0.3162, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.14218775246404913, |
|
"grad_norm": 4.59859561920166, |
|
"learning_rate": 4.7665229885057474e-05, |
|
"loss": 0.274, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1486508321215059, |
|
"grad_norm": 3.7790114879608154, |
|
"learning_rate": 4.730603448275862e-05, |
|
"loss": 0.3161, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.15511391177896267, |
|
"grad_norm": 5.165287017822266, |
|
"learning_rate": 4.694683908045977e-05, |
|
"loss": 0.2878, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.16157699143641946, |
|
"grad_norm": 6.651523590087891, |
|
"learning_rate": 4.658764367816092e-05, |
|
"loss": 0.3133, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16804007109387623, |
|
"grad_norm": 6.430370807647705, |
|
"learning_rate": 4.622844827586207e-05, |
|
"loss": 0.2744, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.17450315075133302, |
|
"grad_norm": 3.7495827674865723, |
|
"learning_rate": 4.5869252873563225e-05, |
|
"loss": 0.2638, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.18096623040878979, |
|
"grad_norm": 4.071202278137207, |
|
"learning_rate": 4.551005747126437e-05, |
|
"loss": 0.2698, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.18742931006624658, |
|
"grad_norm": 5.824757099151611, |
|
"learning_rate": 4.515086206896552e-05, |
|
"loss": 0.2916, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.19389238972370335, |
|
"grad_norm": 4.58206033706665, |
|
"learning_rate": 4.4791666666666673e-05, |
|
"loss": 0.2674, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2003554693811601, |
|
"grad_norm": 7.343913555145264, |
|
"learning_rate": 4.443247126436782e-05, |
|
"loss": 0.3069, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2068185490386169, |
|
"grad_norm": 4.5890302658081055, |
|
"learning_rate": 4.407327586206897e-05, |
|
"loss": 0.2889, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.21328162869607367, |
|
"grad_norm": 6.601052761077881, |
|
"learning_rate": 4.371408045977012e-05, |
|
"loss": 0.2509, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.21974470835353047, |
|
"grad_norm": 2.973667621612549, |
|
"learning_rate": 4.3354885057471266e-05, |
|
"loss": 0.2667, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.22620778801098723, |
|
"grad_norm": 7.966582298278809, |
|
"learning_rate": 4.299568965517242e-05, |
|
"loss": 0.2888, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.23267086766844403, |
|
"grad_norm": 2.4454495906829834, |
|
"learning_rate": 4.263649425287356e-05, |
|
"loss": 0.2638, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2391339473259008, |
|
"grad_norm": 4.780755996704102, |
|
"learning_rate": 4.2277298850574714e-05, |
|
"loss": 0.2771, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.24559702698335756, |
|
"grad_norm": 7.205840587615967, |
|
"learning_rate": 4.1918103448275866e-05, |
|
"loss": 0.289, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.25206010664081435, |
|
"grad_norm": 6.309414863586426, |
|
"learning_rate": 4.155890804597701e-05, |
|
"loss": 0.2471, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.25852318629827115, |
|
"grad_norm": 4.736650466918945, |
|
"learning_rate": 4.119971264367816e-05, |
|
"loss": 0.2204, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2649862659557279, |
|
"grad_norm": 4.276492595672607, |
|
"learning_rate": 4.0840517241379314e-05, |
|
"loss": 0.2546, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2714493456131847, |
|
"grad_norm": 4.888199329376221, |
|
"learning_rate": 4.048132183908046e-05, |
|
"loss": 0.2343, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.27791242527064147, |
|
"grad_norm": 3.0027356147766113, |
|
"learning_rate": 4.012212643678161e-05, |
|
"loss": 0.2644, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.28437550492809827, |
|
"grad_norm": 3.5034611225128174, |
|
"learning_rate": 3.976293103448276e-05, |
|
"loss": 0.2318, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.290838584585555, |
|
"grad_norm": 4.029373645782471, |
|
"learning_rate": 3.940373563218391e-05, |
|
"loss": 0.2393, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2973016642430118, |
|
"grad_norm": 2.631014585494995, |
|
"learning_rate": 3.904454022988506e-05, |
|
"loss": 0.2497, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.3037647439004686, |
|
"grad_norm": 3.7511560916900635, |
|
"learning_rate": 3.86853448275862e-05, |
|
"loss": 0.2776, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.31022782355792533, |
|
"grad_norm": 4.721391677856445, |
|
"learning_rate": 3.8326149425287355e-05, |
|
"loss": 0.2353, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3166909032153821, |
|
"grad_norm": 3.97963285446167, |
|
"learning_rate": 3.796695402298851e-05, |
|
"loss": 0.2549, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.3231539828728389, |
|
"grad_norm": 4.646782398223877, |
|
"learning_rate": 3.760775862068966e-05, |
|
"loss": 0.2473, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3296170625302957, |
|
"grad_norm": 4.661077976226807, |
|
"learning_rate": 3.724856321839081e-05, |
|
"loss": 0.2581, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.33608014218775245, |
|
"grad_norm": 7.082584381103516, |
|
"learning_rate": 3.6889367816091954e-05, |
|
"loss": 0.2427, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.34254322184520924, |
|
"grad_norm": 4.724052906036377, |
|
"learning_rate": 3.6530172413793106e-05, |
|
"loss": 0.2683, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.34900630150266604, |
|
"grad_norm": 3.6246917247772217, |
|
"learning_rate": 3.617097701149426e-05, |
|
"loss": 0.2636, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3554693811601228, |
|
"grad_norm": 9.237018585205078, |
|
"learning_rate": 3.58117816091954e-05, |
|
"loss": 0.2502, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.36193246081757957, |
|
"grad_norm": 3.987290382385254, |
|
"learning_rate": 3.5452586206896554e-05, |
|
"loss": 0.2359, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.36839554047503636, |
|
"grad_norm": 6.946052074432373, |
|
"learning_rate": 3.5093390804597706e-05, |
|
"loss": 0.2445, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.37485862013249316, |
|
"grad_norm": 4.0834736824035645, |
|
"learning_rate": 3.473419540229885e-05, |
|
"loss": 0.2515, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3813216997899499, |
|
"grad_norm": 3.3558614253997803, |
|
"learning_rate": 3.4375e-05, |
|
"loss": 0.2511, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.3877847794474067, |
|
"grad_norm": 4.314471244812012, |
|
"learning_rate": 3.4015804597701154e-05, |
|
"loss": 0.232, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3942478591048635, |
|
"grad_norm": 3.44812273979187, |
|
"learning_rate": 3.36566091954023e-05, |
|
"loss": 0.1993, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.4007109387623202, |
|
"grad_norm": 4.297704219818115, |
|
"learning_rate": 3.329741379310345e-05, |
|
"loss": 0.2556, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.407174018419777, |
|
"grad_norm": 2.1731884479522705, |
|
"learning_rate": 3.2938218390804595e-05, |
|
"loss": 0.2168, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.4136370980772338, |
|
"grad_norm": 3.119593620300293, |
|
"learning_rate": 3.2579022988505747e-05, |
|
"loss": 0.2089, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.4201001777346906, |
|
"grad_norm": 3.971031665802002, |
|
"learning_rate": 3.22198275862069e-05, |
|
"loss": 0.2373, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.42656325739214734, |
|
"grad_norm": 3.3221435546875, |
|
"learning_rate": 3.186063218390804e-05, |
|
"loss": 0.243, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.43302633704960414, |
|
"grad_norm": 3.1322009563446045, |
|
"learning_rate": 3.1501436781609195e-05, |
|
"loss": 0.2239, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.43948941670706093, |
|
"grad_norm": 3.9616572856903076, |
|
"learning_rate": 3.1142241379310346e-05, |
|
"loss": 0.2316, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.44595249636451767, |
|
"grad_norm": 2.667262077331543, |
|
"learning_rate": 3.078304597701149e-05, |
|
"loss": 0.204, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.45241557602197446, |
|
"grad_norm": 5.354103088378906, |
|
"learning_rate": 3.042385057471265e-05, |
|
"loss": 0.2035, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.45887865567943126, |
|
"grad_norm": 3.422032356262207, |
|
"learning_rate": 3.0064655172413798e-05, |
|
"loss": 0.2518, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.46534173533688805, |
|
"grad_norm": 6.190507411956787, |
|
"learning_rate": 2.9705459770114946e-05, |
|
"loss": 0.2148, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4718048149943448, |
|
"grad_norm": 4.419785976409912, |
|
"learning_rate": 2.9346264367816094e-05, |
|
"loss": 0.2399, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4782678946518016, |
|
"grad_norm": 6.475925922393799, |
|
"learning_rate": 2.8987068965517246e-05, |
|
"loss": 0.2308, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.4847309743092584, |
|
"grad_norm": 2.907485246658325, |
|
"learning_rate": 2.8627873563218394e-05, |
|
"loss": 0.2071, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4911940539667151, |
|
"grad_norm": 3.476051092147827, |
|
"learning_rate": 2.8268678160919542e-05, |
|
"loss": 0.2374, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4976571336241719, |
|
"grad_norm": 6.298080921173096, |
|
"learning_rate": 2.7909482758620694e-05, |
|
"loss": 0.2277, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.5041202132816287, |
|
"grad_norm": 4.822377681732178, |
|
"learning_rate": 2.7550287356321842e-05, |
|
"loss": 0.2259, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5105832929390854, |
|
"grad_norm": 3.966501474380493, |
|
"learning_rate": 2.719109195402299e-05, |
|
"loss": 0.2212, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.5170463725965423, |
|
"grad_norm": 2.599501848220825, |
|
"learning_rate": 2.6831896551724138e-05, |
|
"loss": 0.2202, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.523509452253999, |
|
"grad_norm": 4.360517501831055, |
|
"learning_rate": 2.647270114942529e-05, |
|
"loss": 0.2482, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5299725319114558, |
|
"grad_norm": 4.807380199432373, |
|
"learning_rate": 2.6113505747126438e-05, |
|
"loss": 0.2222, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.5364356115689126, |
|
"grad_norm": 3.3132829666137695, |
|
"learning_rate": 2.5754310344827586e-05, |
|
"loss": 0.2421, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5428986912263694, |
|
"grad_norm": 6.560469150543213, |
|
"learning_rate": 2.5395114942528734e-05, |
|
"loss": 0.196, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5493617708838261, |
|
"grad_norm": 7.168336391448975, |
|
"learning_rate": 2.5035919540229886e-05, |
|
"loss": 0.244, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5558248505412829, |
|
"grad_norm": 3.6738462448120117, |
|
"learning_rate": 2.4676724137931034e-05, |
|
"loss": 0.2361, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5622879301987397, |
|
"grad_norm": 4.0579833984375, |
|
"learning_rate": 2.4317528735632186e-05, |
|
"loss": 0.2195, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5687510098561965, |
|
"grad_norm": 5.091606616973877, |
|
"learning_rate": 2.3958333333333334e-05, |
|
"loss": 0.2128, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5752140895136533, |
|
"grad_norm": 7.0507354736328125, |
|
"learning_rate": 2.3599137931034486e-05, |
|
"loss": 0.2259, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.58167716917111, |
|
"grad_norm": 4.719695568084717, |
|
"learning_rate": 2.3239942528735634e-05, |
|
"loss": 0.2119, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5881402488285669, |
|
"grad_norm": 4.4290289878845215, |
|
"learning_rate": 2.2880747126436782e-05, |
|
"loss": 0.2138, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5946033284860236, |
|
"grad_norm": 4.132259368896484, |
|
"learning_rate": 2.2521551724137934e-05, |
|
"loss": 0.2111, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.6010664081434803, |
|
"grad_norm": 6.010085105895996, |
|
"learning_rate": 2.2162356321839082e-05, |
|
"loss": 0.2106, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.6075294878009372, |
|
"grad_norm": 3.847933769226074, |
|
"learning_rate": 2.180316091954023e-05, |
|
"loss": 0.2146, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.6139925674583939, |
|
"grad_norm": 4.39457893371582, |
|
"learning_rate": 2.144396551724138e-05, |
|
"loss": 0.2279, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6204556471158507, |
|
"grad_norm": 3.269622564315796, |
|
"learning_rate": 2.108477011494253e-05, |
|
"loss": 0.2259, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.6269187267733075, |
|
"grad_norm": 3.5994322299957275, |
|
"learning_rate": 2.0725574712643678e-05, |
|
"loss": 0.1996, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.6333818064307642, |
|
"grad_norm": 4.982938289642334, |
|
"learning_rate": 2.036637931034483e-05, |
|
"loss": 0.2311, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.639844886088221, |
|
"grad_norm": 5.611133098602295, |
|
"learning_rate": 2.0007183908045978e-05, |
|
"loss": 0.2073, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.6463079657456778, |
|
"grad_norm": 4.818994522094727, |
|
"learning_rate": 1.964798850574713e-05, |
|
"loss": 0.2233, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6527710454031346, |
|
"grad_norm": 2.8192803859710693, |
|
"learning_rate": 1.9288793103448278e-05, |
|
"loss": 0.1841, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6592341250605914, |
|
"grad_norm": 3.74814510345459, |
|
"learning_rate": 1.8929597701149426e-05, |
|
"loss": 0.1908, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6656972047180482, |
|
"grad_norm": 5.619290351867676, |
|
"learning_rate": 1.8570402298850574e-05, |
|
"loss": 0.2135, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6721602843755049, |
|
"grad_norm": 3.431131362915039, |
|
"learning_rate": 1.8211206896551726e-05, |
|
"loss": 0.2036, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6786233640329618, |
|
"grad_norm": 4.3107781410217285, |
|
"learning_rate": 1.7852011494252874e-05, |
|
"loss": 0.2013, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6850864436904185, |
|
"grad_norm": 2.6932215690612793, |
|
"learning_rate": 1.7492816091954022e-05, |
|
"loss": 0.1654, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6915495233478752, |
|
"grad_norm": 5.07429313659668, |
|
"learning_rate": 1.7133620689655174e-05, |
|
"loss": 0.2128, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6980126030053321, |
|
"grad_norm": 6.4850873947143555, |
|
"learning_rate": 1.6774425287356325e-05, |
|
"loss": 0.1926, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.7044756826627888, |
|
"grad_norm": 4.696744441986084, |
|
"learning_rate": 1.6415229885057474e-05, |
|
"loss": 0.2329, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.7109387623202456, |
|
"grad_norm": 7.928685188293457, |
|
"learning_rate": 1.6056034482758622e-05, |
|
"loss": 0.2494, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7174018419777024, |
|
"grad_norm": 6.6810455322265625, |
|
"learning_rate": 1.569683908045977e-05, |
|
"loss": 0.2082, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.7238649216351591, |
|
"grad_norm": 6.868622303009033, |
|
"learning_rate": 1.533764367816092e-05, |
|
"loss": 0.2153, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.7303280012926159, |
|
"grad_norm": 4.763729095458984, |
|
"learning_rate": 1.497844827586207e-05, |
|
"loss": 0.1972, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.7367910809500727, |
|
"grad_norm": 3.208186149597168, |
|
"learning_rate": 1.461925287356322e-05, |
|
"loss": 0.201, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7432541606075295, |
|
"grad_norm": 4.111704349517822, |
|
"learning_rate": 1.4260057471264368e-05, |
|
"loss": 0.2154, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7497172402649863, |
|
"grad_norm": 6.802502632141113, |
|
"learning_rate": 1.3900862068965518e-05, |
|
"loss": 0.1819, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7561803199224431, |
|
"grad_norm": 4.15958833694458, |
|
"learning_rate": 1.3541666666666666e-05, |
|
"loss": 0.1823, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7626433995798998, |
|
"grad_norm": 2.806739568710327, |
|
"learning_rate": 1.3182471264367816e-05, |
|
"loss": 0.1963, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7691064792373566, |
|
"grad_norm": 5.593130111694336, |
|
"learning_rate": 1.2823275862068968e-05, |
|
"loss": 0.1948, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7755695588948134, |
|
"grad_norm": 7.160956859588623, |
|
"learning_rate": 1.2464080459770116e-05, |
|
"loss": 0.2249, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7820326385522701, |
|
"grad_norm": 5.844871520996094, |
|
"learning_rate": 1.2104885057471264e-05, |
|
"loss": 0.2109, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.788495718209727, |
|
"grad_norm": 4.273647785186768, |
|
"learning_rate": 1.1745689655172416e-05, |
|
"loss": 0.1976, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7949587978671837, |
|
"grad_norm": 5.624157428741455, |
|
"learning_rate": 1.1386494252873564e-05, |
|
"loss": 0.2017, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.8014218775246404, |
|
"grad_norm": 5.627654552459717, |
|
"learning_rate": 1.1027298850574714e-05, |
|
"loss": 0.2153, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.8078849571820973, |
|
"grad_norm": 4.900112628936768, |
|
"learning_rate": 1.0668103448275862e-05, |
|
"loss": 0.2436, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.814348036839554, |
|
"grad_norm": 4.697551727294922, |
|
"learning_rate": 1.0308908045977012e-05, |
|
"loss": 0.1912, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.8208111164970108, |
|
"grad_norm": 7.477961540222168, |
|
"learning_rate": 9.949712643678162e-06, |
|
"loss": 0.2109, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.8272741961544676, |
|
"grad_norm": 4.095799922943115, |
|
"learning_rate": 9.590517241379312e-06, |
|
"loss": 0.2129, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.8337372758119244, |
|
"grad_norm": 6.4529924392700195, |
|
"learning_rate": 9.23132183908046e-06, |
|
"loss": 0.2272, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.8402003554693812, |
|
"grad_norm": 5.603827476501465, |
|
"learning_rate": 8.87212643678161e-06, |
|
"loss": 0.1771, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.846663435126838, |
|
"grad_norm": 9.214598655700684, |
|
"learning_rate": 8.512931034482758e-06, |
|
"loss": 0.1775, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.8531265147842947, |
|
"grad_norm": 3.415295124053955, |
|
"learning_rate": 8.153735632183908e-06, |
|
"loss": 0.1973, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8595895944417515, |
|
"grad_norm": 3.975978136062622, |
|
"learning_rate": 7.794540229885058e-06, |
|
"loss": 0.187, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8660526740992083, |
|
"grad_norm": 2.804140090942383, |
|
"learning_rate": 7.435344827586208e-06, |
|
"loss": 0.2051, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.872515753756665, |
|
"grad_norm": 6.0080647468566895, |
|
"learning_rate": 7.076149425287357e-06, |
|
"loss": 0.216, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8789788334141219, |
|
"grad_norm": 4.182724475860596, |
|
"learning_rate": 6.716954022988506e-06, |
|
"loss": 0.1982, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8854419130715786, |
|
"grad_norm": 7.1342854499816895, |
|
"learning_rate": 6.357758620689655e-06, |
|
"loss": 0.1881, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8919049927290353, |
|
"grad_norm": 4.865418434143066, |
|
"learning_rate": 5.998563218390805e-06, |
|
"loss": 0.2099, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.8983680723864922, |
|
"grad_norm": 4.020890235900879, |
|
"learning_rate": 5.639367816091954e-06, |
|
"loss": 0.1819, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.9048311520439489, |
|
"grad_norm": 3.826892852783203, |
|
"learning_rate": 5.280172413793104e-06, |
|
"loss": 0.1846, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9112942317014057, |
|
"grad_norm": 5.284408092498779, |
|
"learning_rate": 4.920977011494253e-06, |
|
"loss": 0.1888, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.9177573113588625, |
|
"grad_norm": 4.285012722015381, |
|
"learning_rate": 4.561781609195403e-06, |
|
"loss": 0.1886, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.9242203910163193, |
|
"grad_norm": 3.0137135982513428, |
|
"learning_rate": 4.202586206896552e-06, |
|
"loss": 0.1976, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.9306834706737761, |
|
"grad_norm": 7.759015083312988, |
|
"learning_rate": 3.843390804597701e-06, |
|
"loss": 0.1891, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.9371465503312328, |
|
"grad_norm": 4.454775810241699, |
|
"learning_rate": 3.484195402298851e-06, |
|
"loss": 0.2085, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9436096299886896, |
|
"grad_norm": 4.0864949226379395, |
|
"learning_rate": 3.125e-06, |
|
"loss": 0.1859, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.9500727096461464, |
|
"grad_norm": 6.020027160644531, |
|
"learning_rate": 2.7658045977011496e-06, |
|
"loss": 0.1994, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.9565357893036032, |
|
"grad_norm": 5.507518768310547, |
|
"learning_rate": 2.4066091954022987e-06, |
|
"loss": 0.1908, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9629988689610599, |
|
"grad_norm": 4.609635829925537, |
|
"learning_rate": 2.047413793103448e-06, |
|
"loss": 0.2239, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9694619486185168, |
|
"grad_norm": 5.80496072769165, |
|
"learning_rate": 1.6882183908045979e-06, |
|
"loss": 0.1824, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9759250282759735, |
|
"grad_norm": 4.583664417266846, |
|
"learning_rate": 1.3290229885057471e-06, |
|
"loss": 0.1866, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9823881079334302, |
|
"grad_norm": 5.140854358673096, |
|
"learning_rate": 9.698275862068966e-07, |
|
"loss": 0.1693, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9888511875908871, |
|
"grad_norm": 4.870802402496338, |
|
"learning_rate": 6.106321839080461e-07, |
|
"loss": 0.1786, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.9953142672483438, |
|
"grad_norm": 4.085971832275391, |
|
"learning_rate": 2.514367816091954e-07, |
|
"loss": 0.1819, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.9998384230085636, |
|
"eval_accuracy": 0.9605053856292324, |
|
"eval_loss": 0.11349575221538544, |
|
"eval_runtime": 169.4025, |
|
"eval_samples_per_second": 129.886, |
|
"eval_steps_per_second": 4.061, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.9998384230085636, |
|
"step": 1547, |
|
"total_flos": 4.921883515023262e+18, |
|
"train_loss": 0.25020766805354133, |
|
"train_runtime": 3368.4805, |
|
"train_samples_per_second": 58.787, |
|
"train_steps_per_second": 0.459 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1547, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"total_flos": 4.921883515023262e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|