|
{ |
|
"best_metric": 69.00867976244861, |
|
"best_model_checkpoint": "/scratch/mrahma45/pixel/finetuned_models/canine/bert-base-finetuned-parsing-ud-Coptic-Scriptorium/checkpoint-9000", |
|
"epoch": 294.87179487179486, |
|
"global_step": 11500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 4.7843, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 7.948456375838927e-05, |
|
"loss": 2.0692, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 7.894765100671142e-05, |
|
"loss": 1.2357, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 7.841073825503357e-05, |
|
"loss": 0.8102, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"learning_rate": 7.78738255033557e-05, |
|
"loss": 0.5577, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"eval_las": 61.26998629511192, |
|
"eval_loss": 2.625490188598633, |
|
"eval_runtime": 2.5145, |
|
"eval_samples_per_second": 151.518, |
|
"eval_steps_per_second": 19.089, |
|
"eval_uas": 69.51119232526267, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"learning_rate": 7.733691275167786e-05, |
|
"loss": 0.4067, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 0.31, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 20.51, |
|
"learning_rate": 7.626308724832216e-05, |
|
"loss": 0.2329, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 23.08, |
|
"learning_rate": 7.57261744966443e-05, |
|
"loss": 0.2022, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 25.64, |
|
"learning_rate": 7.518926174496645e-05, |
|
"loss": 0.1537, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 25.64, |
|
"eval_las": 64.12060301507537, |
|
"eval_loss": 3.8098294734954834, |
|
"eval_runtime": 2.5064, |
|
"eval_samples_per_second": 152.01, |
|
"eval_steps_per_second": 19.151, |
|
"eval_uas": 72.4988579259936, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 28.21, |
|
"learning_rate": 7.46523489932886e-05, |
|
"loss": 0.1318, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 30.77, |
|
"learning_rate": 7.411543624161075e-05, |
|
"loss": 0.1154, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 33.33, |
|
"learning_rate": 7.35785234899329e-05, |
|
"loss": 0.0959, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 35.9, |
|
"learning_rate": 7.304161073825505e-05, |
|
"loss": 0.09, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 38.46, |
|
"learning_rate": 7.25046979865772e-05, |
|
"loss": 0.08, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 38.46, |
|
"eval_las": 65.40886249428964, |
|
"eval_loss": 4.147989273071289, |
|
"eval_runtime": 2.512, |
|
"eval_samples_per_second": 151.674, |
|
"eval_steps_per_second": 19.108, |
|
"eval_uas": 73.27546825034263, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 41.03, |
|
"learning_rate": 7.196778523489934e-05, |
|
"loss": 0.0696, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 43.59, |
|
"learning_rate": 7.143087248322148e-05, |
|
"loss": 0.0632, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 46.15, |
|
"learning_rate": 7.089395973154363e-05, |
|
"loss": 0.0566, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 48.72, |
|
"learning_rate": 7.035704697986578e-05, |
|
"loss": 0.0528, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 51.28, |
|
"learning_rate": 6.982013422818792e-05, |
|
"loss": 0.0523, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 51.28, |
|
"eval_las": 66.48698035632708, |
|
"eval_loss": 4.477940559387207, |
|
"eval_runtime": 2.5091, |
|
"eval_samples_per_second": 151.846, |
|
"eval_steps_per_second": 19.13, |
|
"eval_uas": 74.16171767930561, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 53.85, |
|
"learning_rate": 6.928322147651007e-05, |
|
"loss": 0.0455, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 56.41, |
|
"learning_rate": 6.874630872483222e-05, |
|
"loss": 0.0436, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 58.97, |
|
"learning_rate": 6.820939597315437e-05, |
|
"loss": 0.042, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 61.54, |
|
"learning_rate": 6.767248322147652e-05, |
|
"loss": 0.0379, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 64.1, |
|
"learning_rate": 6.713557046979866e-05, |
|
"loss": 0.0374, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 64.1, |
|
"eval_las": 66.45043398812243, |
|
"eval_loss": 4.416062831878662, |
|
"eval_runtime": 2.5116, |
|
"eval_samples_per_second": 151.694, |
|
"eval_steps_per_second": 19.111, |
|
"eval_uas": 74.2439470077661, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 66.67, |
|
"learning_rate": 6.659865771812081e-05, |
|
"loss": 0.0337, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 69.23, |
|
"learning_rate": 6.606174496644296e-05, |
|
"loss": 0.0321, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 71.79, |
|
"learning_rate": 6.55248322147651e-05, |
|
"loss": 0.0283, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 74.36, |
|
"learning_rate": 6.498791946308724e-05, |
|
"loss": 0.0292, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 76.92, |
|
"learning_rate": 6.445100671140939e-05, |
|
"loss": 0.0259, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 76.92, |
|
"eval_las": 66.7976244860667, |
|
"eval_loss": 4.949475288391113, |
|
"eval_runtime": 2.5044, |
|
"eval_samples_per_second": 152.134, |
|
"eval_steps_per_second": 19.166, |
|
"eval_uas": 74.59113750571036, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 79.49, |
|
"learning_rate": 6.391409395973154e-05, |
|
"loss": 0.0247, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 82.05, |
|
"learning_rate": 6.337718120805369e-05, |
|
"loss": 0.0259, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 84.62, |
|
"learning_rate": 6.284026845637584e-05, |
|
"loss": 0.0226, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 87.18, |
|
"learning_rate": 6.230335570469799e-05, |
|
"loss": 0.0247, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 89.74, |
|
"learning_rate": 6.176644295302013e-05, |
|
"loss": 0.0191, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 89.74, |
|
"eval_las": 66.78848789401553, |
|
"eval_loss": 5.227412700653076, |
|
"eval_runtime": 2.5046, |
|
"eval_samples_per_second": 152.118, |
|
"eval_steps_per_second": 19.164, |
|
"eval_uas": 74.36272270443125, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 92.31, |
|
"learning_rate": 6.122953020134228e-05, |
|
"loss": 0.0206, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 94.87, |
|
"learning_rate": 6.069261744966444e-05, |
|
"loss": 0.0203, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 97.44, |
|
"learning_rate": 6.0155704697986585e-05, |
|
"loss": 0.0196, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"learning_rate": 5.9618791946308734e-05, |
|
"loss": 0.0176, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 102.56, |
|
"learning_rate": 5.9081879194630875e-05, |
|
"loss": 0.0181, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 102.56, |
|
"eval_las": 67.46459570580173, |
|
"eval_loss": 5.025106906890869, |
|
"eval_runtime": 2.5067, |
|
"eval_samples_per_second": 151.992, |
|
"eval_steps_per_second": 19.149, |
|
"eval_uas": 74.9200548195523, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 105.13, |
|
"learning_rate": 5.854496644295302e-05, |
|
"loss": 0.0164, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 107.69, |
|
"learning_rate": 5.800805369127517e-05, |
|
"loss": 0.0177, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 110.26, |
|
"learning_rate": 5.747114093959732e-05, |
|
"loss": 0.0151, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 112.82, |
|
"learning_rate": 5.693422818791947e-05, |
|
"loss": 0.0159, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 115.38, |
|
"learning_rate": 5.6397315436241616e-05, |
|
"loss": 0.0148, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 115.38, |
|
"eval_las": 67.21790772042027, |
|
"eval_loss": 5.194654941558838, |
|
"eval_runtime": 2.5047, |
|
"eval_samples_per_second": 152.112, |
|
"eval_steps_per_second": 19.164, |
|
"eval_uas": 74.71904979442668, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 117.95, |
|
"learning_rate": 5.5860402684563764e-05, |
|
"loss": 0.0158, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 120.51, |
|
"learning_rate": 5.532348993288591e-05, |
|
"loss": 0.0127, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 123.08, |
|
"learning_rate": 5.478657718120806e-05, |
|
"loss": 0.0141, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 125.64, |
|
"learning_rate": 5.424966442953021e-05, |
|
"loss": 0.012, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 128.21, |
|
"learning_rate": 5.371275167785236e-05, |
|
"loss": 0.012, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 128.21, |
|
"eval_las": 67.5011420740064, |
|
"eval_loss": 5.243346214294434, |
|
"eval_runtime": 2.5045, |
|
"eval_samples_per_second": 152.124, |
|
"eval_steps_per_second": 19.165, |
|
"eval_uas": 75.23983554134308, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 130.77, |
|
"learning_rate": 5.31758389261745e-05, |
|
"loss": 0.0118, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 133.33, |
|
"learning_rate": 5.263892617449665e-05, |
|
"loss": 0.0112, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 135.9, |
|
"learning_rate": 5.2102013422818795e-05, |
|
"loss": 0.0094, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 138.46, |
|
"learning_rate": 5.1565100671140944e-05, |
|
"loss": 0.0133, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 141.03, |
|
"learning_rate": 5.102818791946309e-05, |
|
"loss": 0.0098, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 141.03, |
|
"eval_las": 67.54682503426223, |
|
"eval_loss": 5.314740180969238, |
|
"eval_runtime": 2.5216, |
|
"eval_samples_per_second": 151.093, |
|
"eval_steps_per_second": 19.035, |
|
"eval_uas": 74.99314755596163, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 143.59, |
|
"learning_rate": 5.049127516778524e-05, |
|
"loss": 0.0096, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 146.15, |
|
"learning_rate": 4.995436241610739e-05, |
|
"loss": 0.0104, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 148.72, |
|
"learning_rate": 4.941744966442954e-05, |
|
"loss": 0.0095, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 151.28, |
|
"learning_rate": 4.8880536912751685e-05, |
|
"loss": 0.0078, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 153.85, |
|
"learning_rate": 4.834362416107383e-05, |
|
"loss": 0.0083, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 153.85, |
|
"eval_las": 67.90315212425764, |
|
"eval_loss": 5.257903575897217, |
|
"eval_runtime": 2.5051, |
|
"eval_samples_per_second": 152.089, |
|
"eval_steps_per_second": 19.161, |
|
"eval_uas": 75.16674280493376, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 156.41, |
|
"learning_rate": 4.780671140939598e-05, |
|
"loss": 0.0086, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 158.97, |
|
"learning_rate": 4.726979865771813e-05, |
|
"loss": 0.0088, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 161.54, |
|
"learning_rate": 4.673288590604027e-05, |
|
"loss": 0.0073, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 164.1, |
|
"learning_rate": 4.619597315436242e-05, |
|
"loss": 0.0075, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 166.67, |
|
"learning_rate": 4.565906040268457e-05, |
|
"loss": 0.0075, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 166.67, |
|
"eval_las": 67.38236637734126, |
|
"eval_loss": 5.439788818359375, |
|
"eval_runtime": 2.5045, |
|
"eval_samples_per_second": 152.128, |
|
"eval_steps_per_second": 19.166, |
|
"eval_uas": 74.9200548195523, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 169.23, |
|
"learning_rate": 4.5122147651006716e-05, |
|
"loss": 0.0063, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 171.79, |
|
"learning_rate": 4.4585234899328864e-05, |
|
"loss": 0.0072, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 174.36, |
|
"learning_rate": 4.404832214765101e-05, |
|
"loss": 0.0062, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 176.92, |
|
"learning_rate": 4.351140939597316e-05, |
|
"loss": 0.0063, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 179.49, |
|
"learning_rate": 4.297449664429531e-05, |
|
"loss": 0.008, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 179.49, |
|
"eval_las": 67.61078117862037, |
|
"eval_loss": 5.429704189300537, |
|
"eval_runtime": 2.5048, |
|
"eval_samples_per_second": 152.105, |
|
"eval_steps_per_second": 19.163, |
|
"eval_uas": 75.58702603928734, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 182.05, |
|
"learning_rate": 4.243758389261746e-05, |
|
"loss": 0.0064, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 184.62, |
|
"learning_rate": 4.1900671140939605e-05, |
|
"loss": 0.0056, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 187.18, |
|
"learning_rate": 4.1363758389261754e-05, |
|
"loss": 0.0058, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 189.74, |
|
"learning_rate": 4.0826845637583895e-05, |
|
"loss": 0.0058, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 192.31, |
|
"learning_rate": 4.028993288590604e-05, |
|
"loss": 0.0064, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 192.31, |
|
"eval_las": 68.15897670169026, |
|
"eval_loss": 5.503659725189209, |
|
"eval_runtime": 2.5084, |
|
"eval_samples_per_second": 151.887, |
|
"eval_steps_per_second": 19.135, |
|
"eval_uas": 75.56875285518501, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 194.87, |
|
"learning_rate": 3.975302013422819e-05, |
|
"loss": 0.0067, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 197.44, |
|
"learning_rate": 3.921610738255034e-05, |
|
"loss": 0.0046, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 200.0, |
|
"learning_rate": 3.867919463087249e-05, |
|
"loss": 0.0045, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 202.56, |
|
"learning_rate": 3.8142281879194636e-05, |
|
"loss": 0.005, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 205.13, |
|
"learning_rate": 3.7605369127516784e-05, |
|
"loss": 0.0049, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 205.13, |
|
"eval_las": 67.86660575605299, |
|
"eval_loss": 5.539348125457764, |
|
"eval_runtime": 2.5096, |
|
"eval_samples_per_second": 151.815, |
|
"eval_steps_per_second": 19.126, |
|
"eval_uas": 75.4682503426222, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 207.69, |
|
"learning_rate": 3.706845637583893e-05, |
|
"loss": 0.0052, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 210.26, |
|
"learning_rate": 3.653154362416108e-05, |
|
"loss": 0.0043, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 212.82, |
|
"learning_rate": 3.599463087248322e-05, |
|
"loss": 0.0043, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 215.38, |
|
"learning_rate": 3.545771812080537e-05, |
|
"loss": 0.0041, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 217.95, |
|
"learning_rate": 3.492080536912752e-05, |
|
"loss": 0.0053, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 217.95, |
|
"eval_las": 68.07674737322978, |
|
"eval_loss": 5.689121246337891, |
|
"eval_runtime": 2.5046, |
|
"eval_samples_per_second": 152.121, |
|
"eval_steps_per_second": 19.165, |
|
"eval_uas": 75.678391959799, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 220.51, |
|
"learning_rate": 3.438389261744967e-05, |
|
"loss": 0.0039, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 223.08, |
|
"learning_rate": 3.3846979865771815e-05, |
|
"loss": 0.004, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 225.64, |
|
"learning_rate": 3.3310067114093964e-05, |
|
"loss": 0.0038, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 228.21, |
|
"learning_rate": 3.277315436241611e-05, |
|
"loss": 0.0038, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 230.77, |
|
"learning_rate": 3.223624161073826e-05, |
|
"loss": 0.0033, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 230.77, |
|
"eval_las": 69.00867976244861, |
|
"eval_loss": 5.695150852203369, |
|
"eval_runtime": 2.503, |
|
"eval_samples_per_second": 152.215, |
|
"eval_steps_per_second": 19.177, |
|
"eval_uas": 76.09867519415258, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 233.33, |
|
"learning_rate": 3.169932885906041e-05, |
|
"loss": 0.0038, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 235.9, |
|
"learning_rate": 3.1162416107382557e-05, |
|
"loss": 0.0034, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 238.46, |
|
"learning_rate": 3.0625503355704705e-05, |
|
"loss": 0.0036, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 241.03, |
|
"learning_rate": 3.0088590604026846e-05, |
|
"loss": 0.0037, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 243.59, |
|
"learning_rate": 2.9551677852348995e-05, |
|
"loss": 0.0033, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 243.59, |
|
"eval_las": 68.76199177706715, |
|
"eval_loss": 5.625730037689209, |
|
"eval_runtime": 2.5076, |
|
"eval_samples_per_second": 151.938, |
|
"eval_steps_per_second": 19.142, |
|
"eval_uas": 75.96162631338511, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 246.15, |
|
"learning_rate": 2.9014765100671143e-05, |
|
"loss": 0.0025, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 248.72, |
|
"learning_rate": 2.847785234899329e-05, |
|
"loss": 0.0029, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 251.28, |
|
"learning_rate": 2.794093959731544e-05, |
|
"loss": 0.0036, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 253.85, |
|
"learning_rate": 2.7404026845637588e-05, |
|
"loss": 0.0034, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 256.41, |
|
"learning_rate": 2.6867114093959732e-05, |
|
"loss": 0.0032, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 256.41, |
|
"eval_las": 68.68889904065783, |
|
"eval_loss": 5.689762115478516, |
|
"eval_runtime": 2.5074, |
|
"eval_samples_per_second": 151.949, |
|
"eval_steps_per_second": 19.143, |
|
"eval_uas": 75.68752855185016, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 258.97, |
|
"learning_rate": 2.633020134228188e-05, |
|
"loss": 0.0028, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 261.54, |
|
"learning_rate": 2.579328859060403e-05, |
|
"loss": 0.0027, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 264.1, |
|
"learning_rate": 2.5256375838926177e-05, |
|
"loss": 0.002, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 266.67, |
|
"learning_rate": 2.4719463087248325e-05, |
|
"loss": 0.0023, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 269.23, |
|
"learning_rate": 2.4182550335570474e-05, |
|
"loss": 0.002, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 269.23, |
|
"eval_las": 68.93558702603929, |
|
"eval_loss": 6.083931922912598, |
|
"eval_runtime": 2.511, |
|
"eval_samples_per_second": 151.731, |
|
"eval_steps_per_second": 19.116, |
|
"eval_uas": 76.0438556418456, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 271.79, |
|
"learning_rate": 2.364563758389262e-05, |
|
"loss": 0.0016, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 274.36, |
|
"learning_rate": 2.3108724832214767e-05, |
|
"loss": 0.0016, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 276.92, |
|
"learning_rate": 2.2571812080536915e-05, |
|
"loss": 0.0021, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 279.49, |
|
"learning_rate": 2.2034899328859063e-05, |
|
"loss": 0.0021, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 282.05, |
|
"learning_rate": 2.149798657718121e-05, |
|
"loss": 0.0019, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 282.05, |
|
"eval_las": 68.63407948835084, |
|
"eval_loss": 6.047039031982422, |
|
"eval_runtime": 2.5036, |
|
"eval_samples_per_second": 152.181, |
|
"eval_steps_per_second": 19.172, |
|
"eval_uas": 75.85198720877113, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 284.62, |
|
"learning_rate": 2.0961073825503356e-05, |
|
"loss": 0.0016, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 287.18, |
|
"learning_rate": 2.0424161073825505e-05, |
|
"loss": 0.0021, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 289.74, |
|
"learning_rate": 1.9887248322147653e-05, |
|
"loss": 0.0024, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 292.31, |
|
"learning_rate": 1.93503355704698e-05, |
|
"loss": 0.0017, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 294.87, |
|
"learning_rate": 1.881342281879195e-05, |
|
"loss": 0.0019, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 294.87, |
|
"eval_las": 68.79853814527182, |
|
"eval_loss": 6.113554000854492, |
|
"eval_runtime": 2.505, |
|
"eval_samples_per_second": 152.095, |
|
"eval_steps_per_second": 19.162, |
|
"eval_uas": 76.23572407492006, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 294.87, |
|
"step": 11500, |
|
"total_flos": 7.223115436230451e+16, |
|
"train_loss": 0.10975235502227493, |
|
"train_runtime": 5024.1647, |
|
"train_samples_per_second": 95.538, |
|
"train_steps_per_second": 2.986 |
|
} |
|
], |
|
"max_steps": 15000, |
|
"num_train_epochs": 385, |
|
"total_flos": 7.223115436230451e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|