|
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 4.999775432292836,
|
|
"global_step": 27830,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.9801,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.9375,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8924,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8901,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8608,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8033,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.826,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8897,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8379,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8547,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8092,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8155,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7923,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8267,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8055,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8084,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8232,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7805,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8009,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7566,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7745,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7858,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7508,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8148,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8054,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8149,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7903,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7937,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7489,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7783,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7757,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7746,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.778,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7951,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7764,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7385,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7503,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7673,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7718,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7988,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7394,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7757,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7381,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7857,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.798,
|
|
"step": 2250
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7987,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.74,
|
|
"step": 2350
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7832,
|
|
"step": 2400
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7566,
|
|
"step": 2450
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7863,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7419,
|
|
"step": 2550
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7553,
|
|
"step": 2600
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7749,
|
|
"step": 2650
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8235,
|
|
"step": 2700
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7366,
|
|
"step": 2750
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.802,
|
|
"step": 2800
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7669,
|
|
"step": 2850
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7899,
|
|
"step": 2900
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7651,
|
|
"step": 2950
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7202,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7423,
|
|
"step": 3050
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7732,
|
|
"step": 3100
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7635,
|
|
"step": 3150
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7664,
|
|
"step": 3200
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7623,
|
|
"step": 3250
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7787,
|
|
"step": 3300
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7563,
|
|
"step": 3350
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7318,
|
|
"step": 3400
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.762,
|
|
"step": 3450
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.782,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.8114,
|
|
"step": 3550
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7182,
|
|
"step": 3600
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7546,
|
|
"step": 3650
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7953,
|
|
"step": 3700
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7772,
|
|
"step": 3750
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7737,
|
|
"step": 3800
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7334,
|
|
"step": 3850
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7713,
|
|
"step": 3900
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.772,
|
|
"step": 3950
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7964,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7997,
|
|
"step": 4050
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7341,
|
|
"step": 4100
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7837,
|
|
"step": 4150
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7973,
|
|
"step": 4200
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.775,
|
|
"step": 4250
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7599,
|
|
"step": 4300
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7406,
|
|
"step": 4350
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7875,
|
|
"step": 4400
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7264,
|
|
"step": 4450
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7318,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7532,
|
|
"step": 4550
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7386,
|
|
"step": 4600
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7729,
|
|
"step": 4650
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7428,
|
|
"step": 4700
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7561,
|
|
"step": 4750
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7798,
|
|
"step": 4800
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7675,
|
|
"step": 4850
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7443,
|
|
"step": 4900
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7992,
|
|
"step": 4950
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7841,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7626,
|
|
"step": 5050
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7272,
|
|
"step": 5100
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7563,
|
|
"step": 5150
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.732,
|
|
"step": 5200
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7397,
|
|
"step": 5250
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.723,
|
|
"step": 5300
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7764,
|
|
"step": 5350
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7605,
|
|
"step": 5400
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7118,
|
|
"step": 5450
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7386,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.7196,
|
|
"step": 5550
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"eval_loss": 0.7622227668762207,
|
|
"eval_runtime": 209.9187,
|
|
"eval_samples_per_second": 15.306,
|
|
"eval_steps_per_second": 0.958,
|
|
"step": 5566
|
|
},
|
|
{
|
|
"epoch": 1.01,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6938,
|
|
"step": 5600
|
|
},
|
|
{
|
|
"epoch": 1.02,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6087,
|
|
"step": 5650
|
|
},
|
|
{
|
|
"epoch": 1.02,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6136,
|
|
"step": 5700
|
|
},
|
|
{
|
|
"epoch": 1.03,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6061,
|
|
"step": 5750
|
|
},
|
|
{
|
|
"epoch": 1.04,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6124,
|
|
"step": 5800
|
|
},
|
|
{
|
|
"epoch": 1.05,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5955,
|
|
"step": 5850
|
|
},
|
|
{
|
|
"epoch": 1.06,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6269,
|
|
"step": 5900
|
|
},
|
|
{
|
|
"epoch": 1.07,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6517,
|
|
"step": 5950
|
|
},
|
|
{
|
|
"epoch": 1.08,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6251,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"epoch": 1.09,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6702,
|
|
"step": 6050
|
|
},
|
|
{
|
|
"epoch": 1.1,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6072,
|
|
"step": 6100
|
|
},
|
|
{
|
|
"epoch": 1.1,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6186,
|
|
"step": 6150
|
|
},
|
|
{
|
|
"epoch": 1.11,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6037,
|
|
"step": 6200
|
|
},
|
|
{
|
|
"epoch": 1.12,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5849,
|
|
"step": 6250
|
|
},
|
|
{
|
|
"epoch": 1.13,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5903,
|
|
"step": 6300
|
|
},
|
|
{
|
|
"epoch": 1.14,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6206,
|
|
"step": 6350
|
|
},
|
|
{
|
|
"epoch": 1.15,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6017,
|
|
"step": 6400
|
|
},
|
|
{
|
|
"epoch": 1.16,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5683,
|
|
"step": 6450
|
|
},
|
|
{
|
|
"epoch": 1.17,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6193,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"epoch": 1.18,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.619,
|
|
"step": 6550
|
|
},
|
|
{
|
|
"epoch": 1.19,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5784,
|
|
"step": 6600
|
|
},
|
|
{
|
|
"epoch": 1.19,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6097,
|
|
"step": 6650
|
|
},
|
|
{
|
|
"epoch": 1.2,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.598,
|
|
"step": 6700
|
|
},
|
|
{
|
|
"epoch": 1.21,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5999,
|
|
"step": 6750
|
|
},
|
|
{
|
|
"epoch": 1.22,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6172,
|
|
"step": 6800
|
|
},
|
|
{
|
|
"epoch": 1.23,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.677,
|
|
"step": 6850
|
|
},
|
|
{
|
|
"epoch": 1.24,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6057,
|
|
"step": 6900
|
|
},
|
|
{
|
|
"epoch": 1.25,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6055,
|
|
"step": 6950
|
|
},
|
|
{
|
|
"epoch": 1.26,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6023,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"epoch": 1.27,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6063,
|
|
"step": 7050
|
|
},
|
|
{
|
|
"epoch": 1.28,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6092,
|
|
"step": 7100
|
|
},
|
|
{
|
|
"epoch": 1.28,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.622,
|
|
"step": 7150
|
|
},
|
|
{
|
|
"epoch": 1.29,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5788,
|
|
"step": 7200
|
|
},
|
|
{
|
|
"epoch": 1.3,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6234,
|
|
"step": 7250
|
|
},
|
|
{
|
|
"epoch": 1.31,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.566,
|
|
"step": 7300
|
|
},
|
|
{
|
|
"epoch": 1.32,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6285,
|
|
"step": 7350
|
|
},
|
|
{
|
|
"epoch": 1.33,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5961,
|
|
"step": 7400
|
|
},
|
|
{
|
|
"epoch": 1.34,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5837,
|
|
"step": 7450
|
|
},
|
|
{
|
|
"epoch": 1.35,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6118,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"epoch": 1.36,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6089,
|
|
"step": 7550
|
|
},
|
|
{
|
|
"epoch": 1.37,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6041,
|
|
"step": 7600
|
|
},
|
|
{
|
|
"epoch": 1.37,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.607,
|
|
"step": 7650
|
|
},
|
|
{
|
|
"epoch": 1.38,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6613,
|
|
"step": 7700
|
|
},
|
|
{
|
|
"epoch": 1.39,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6176,
|
|
"step": 7750
|
|
},
|
|
{
|
|
"epoch": 1.4,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5791,
|
|
"step": 7800
|
|
},
|
|
{
|
|
"epoch": 1.41,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6092,
|
|
"step": 7850
|
|
},
|
|
{
|
|
"epoch": 1.42,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6262,
|
|
"step": 7900
|
|
},
|
|
{
|
|
"epoch": 1.43,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6086,
|
|
"step": 7950
|
|
},
|
|
{
|
|
"epoch": 1.44,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5735,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"epoch": 1.45,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6109,
|
|
"step": 8050
|
|
},
|
|
{
|
|
"epoch": 1.46,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6125,
|
|
"step": 8100
|
|
},
|
|
{
|
|
"epoch": 1.46,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6218,
|
|
"step": 8150
|
|
},
|
|
{
|
|
"epoch": 1.47,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6117,
|
|
"step": 8200
|
|
},
|
|
{
|
|
"epoch": 1.48,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5824,
|
|
"step": 8250
|
|
},
|
|
{
|
|
"epoch": 1.49,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6448,
|
|
"step": 8300
|
|
},
|
|
{
|
|
"epoch": 1.5,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6563,
|
|
"step": 8350
|
|
},
|
|
{
|
|
"epoch": 1.51,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6707,
|
|
"step": 8400
|
|
},
|
|
{
|
|
"epoch": 1.52,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6097,
|
|
"step": 8450
|
|
},
|
|
{
|
|
"epoch": 1.53,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5972,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"epoch": 1.54,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6104,
|
|
"step": 8550
|
|
},
|
|
{
|
|
"epoch": 1.55,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.601,
|
|
"step": 8600
|
|
},
|
|
{
|
|
"epoch": 1.55,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.581,
|
|
"step": 8650
|
|
},
|
|
{
|
|
"epoch": 1.56,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6065,
|
|
"step": 8700
|
|
},
|
|
{
|
|
"epoch": 1.57,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5787,
|
|
"step": 8750
|
|
},
|
|
{
|
|
"epoch": 1.58,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6347,
|
|
"step": 8800
|
|
},
|
|
{
|
|
"epoch": 1.59,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6461,
|
|
"step": 8850
|
|
},
|
|
{
|
|
"epoch": 1.6,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6046,
|
|
"step": 8900
|
|
},
|
|
{
|
|
"epoch": 1.61,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6498,
|
|
"step": 8950
|
|
},
|
|
{
|
|
"epoch": 1.62,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6283,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"epoch": 1.63,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6221,
|
|
"step": 9050
|
|
},
|
|
{
|
|
"epoch": 1.63,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6324,
|
|
"step": 9100
|
|
},
|
|
{
|
|
"epoch": 1.64,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6142,
|
|
"step": 9150
|
|
},
|
|
{
|
|
"epoch": 1.65,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5834,
|
|
"step": 9200
|
|
},
|
|
{
|
|
"epoch": 1.66,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6326,
|
|
"step": 9250
|
|
},
|
|
{
|
|
"epoch": 1.67,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5982,
|
|
"step": 9300
|
|
},
|
|
{
|
|
"epoch": 1.68,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5965,
|
|
"step": 9350
|
|
},
|
|
{
|
|
"epoch": 1.69,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6052,
|
|
"step": 9400
|
|
},
|
|
{
|
|
"epoch": 1.7,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6658,
|
|
"step": 9450
|
|
},
|
|
{
|
|
"epoch": 1.71,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6163,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"epoch": 1.72,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6627,
|
|
"step": 9550
|
|
},
|
|
{
|
|
"epoch": 1.72,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6203,
|
|
"step": 9600
|
|
},
|
|
{
|
|
"epoch": 1.73,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.633,
|
|
"step": 9650
|
|
},
|
|
{
|
|
"epoch": 1.74,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.651,
|
|
"step": 9700
|
|
},
|
|
{
|
|
"epoch": 1.75,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5837,
|
|
"step": 9750
|
|
},
|
|
{
|
|
"epoch": 1.76,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6166,
|
|
"step": 9800
|
|
},
|
|
{
|
|
"epoch": 1.77,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5995,
|
|
"step": 9850
|
|
},
|
|
{
|
|
"epoch": 1.78,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5883,
|
|
"step": 9900
|
|
},
|
|
{
|
|
"epoch": 1.79,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5918,
|
|
"step": 9950
|
|
},
|
|
{
|
|
"epoch": 1.8,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5785,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 1.81,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6557,
|
|
"step": 10050
|
|
},
|
|
{
|
|
"epoch": 1.81,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5912,
|
|
"step": 10100
|
|
},
|
|
{
|
|
"epoch": 1.82,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6109,
|
|
"step": 10150
|
|
},
|
|
{
|
|
"epoch": 1.83,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.599,
|
|
"step": 10200
|
|
},
|
|
{
|
|
"epoch": 1.84,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6325,
|
|
"step": 10250
|
|
},
|
|
{
|
|
"epoch": 1.85,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6339,
|
|
"step": 10300
|
|
},
|
|
{
|
|
"epoch": 1.86,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6074,
|
|
"step": 10350
|
|
},
|
|
{
|
|
"epoch": 1.87,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5943,
|
|
"step": 10400
|
|
},
|
|
{
|
|
"epoch": 1.88,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6133,
|
|
"step": 10450
|
|
},
|
|
{
|
|
"epoch": 1.89,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5984,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"epoch": 1.9,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6695,
|
|
"step": 10550
|
|
},
|
|
{
|
|
"epoch": 1.9,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.591,
|
|
"step": 10600
|
|
},
|
|
{
|
|
"epoch": 1.91,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6208,
|
|
"step": 10650
|
|
},
|
|
{
|
|
"epoch": 1.92,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5744,
|
|
"step": 10700
|
|
},
|
|
{
|
|
"epoch": 1.93,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5873,
|
|
"step": 10750
|
|
},
|
|
{
|
|
"epoch": 1.94,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6261,
|
|
"step": 10800
|
|
},
|
|
{
|
|
"epoch": 1.95,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.617,
|
|
"step": 10850
|
|
},
|
|
{
|
|
"epoch": 1.96,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6151,
|
|
"step": 10900
|
|
},
|
|
{
|
|
"epoch": 1.97,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5936,
|
|
"step": 10950
|
|
},
|
|
{
|
|
"epoch": 1.98,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6366,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"epoch": 1.99,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.644,
|
|
"step": 11050
|
|
},
|
|
{
|
|
"epoch": 1.99,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.6302,
|
|
"step": 11100
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"eval_loss": 0.7252166271209717,
|
|
"eval_runtime": 201.6817,
|
|
"eval_samples_per_second": 15.931,
|
|
"eval_steps_per_second": 0.997,
|
|
"step": 11132
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5489,
|
|
"step": 11150
|
|
},
|
|
{
|
|
"epoch": 2.01,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5112,
|
|
"step": 11200
|
|
},
|
|
{
|
|
"epoch": 2.02,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4868,
|
|
"step": 11250
|
|
},
|
|
{
|
|
"epoch": 2.03,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.483,
|
|
"step": 11300
|
|
},
|
|
{
|
|
"epoch": 2.04,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4569,
|
|
"step": 11350
|
|
},
|
|
{
|
|
"epoch": 2.05,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4638,
|
|
"step": 11400
|
|
},
|
|
{
|
|
"epoch": 2.06,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4764,
|
|
"step": 11450
|
|
},
|
|
{
|
|
"epoch": 2.07,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4984,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"epoch": 2.08,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4797,
|
|
"step": 11550
|
|
},
|
|
{
|
|
"epoch": 2.08,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4924,
|
|
"step": 11600
|
|
},
|
|
{
|
|
"epoch": 2.09,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4921,
|
|
"step": 11650
|
|
},
|
|
{
|
|
"epoch": 2.1,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4652,
|
|
"step": 11700
|
|
},
|
|
{
|
|
"epoch": 2.11,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4842,
|
|
"step": 11750
|
|
},
|
|
{
|
|
"epoch": 2.12,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4982,
|
|
"step": 11800
|
|
},
|
|
{
|
|
"epoch": 2.13,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4682,
|
|
"step": 11850
|
|
},
|
|
{
|
|
"epoch": 2.14,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4642,
|
|
"step": 11900
|
|
},
|
|
{
|
|
"epoch": 2.15,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4982,
|
|
"step": 11950
|
|
},
|
|
{
|
|
"epoch": 2.16,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5336,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"epoch": 2.16,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4739,
|
|
"step": 12050
|
|
},
|
|
{
|
|
"epoch": 2.17,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4938,
|
|
"step": 12100
|
|
},
|
|
{
|
|
"epoch": 2.18,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5114,
|
|
"step": 12150
|
|
},
|
|
{
|
|
"epoch": 2.19,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4893,
|
|
"step": 12200
|
|
},
|
|
{
|
|
"epoch": 2.2,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4924,
|
|
"step": 12250
|
|
},
|
|
{
|
|
"epoch": 2.21,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4826,
|
|
"step": 12300
|
|
},
|
|
{
|
|
"epoch": 2.22,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4651,
|
|
"step": 12350
|
|
},
|
|
{
|
|
"epoch": 2.23,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5192,
|
|
"step": 12400
|
|
},
|
|
{
|
|
"epoch": 2.24,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5012,
|
|
"step": 12450
|
|
},
|
|
{
|
|
"epoch": 2.25,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4913,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"epoch": 2.25,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4999,
|
|
"step": 12550
|
|
},
|
|
{
|
|
"epoch": 2.26,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5245,
|
|
"step": 12600
|
|
},
|
|
{
|
|
"epoch": 2.27,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5264,
|
|
"step": 12650
|
|
},
|
|
{
|
|
"epoch": 2.28,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5132,
|
|
"step": 12700
|
|
},
|
|
{
|
|
"epoch": 2.29,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4598,
|
|
"step": 12750
|
|
},
|
|
{
|
|
"epoch": 2.3,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4883,
|
|
"step": 12800
|
|
},
|
|
{
|
|
"epoch": 2.31,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5397,
|
|
"step": 12850
|
|
},
|
|
{
|
|
"epoch": 2.32,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5253,
|
|
"step": 12900
|
|
},
|
|
{
|
|
"epoch": 2.33,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5067,
|
|
"step": 12950
|
|
},
|
|
{
|
|
"epoch": 2.34,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4831,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"epoch": 2.34,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.482,
|
|
"step": 13050
|
|
},
|
|
{
|
|
"epoch": 2.35,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5039,
|
|
"step": 13100
|
|
},
|
|
{
|
|
"epoch": 2.36,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.473,
|
|
"step": 13150
|
|
},
|
|
{
|
|
"epoch": 2.37,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5094,
|
|
"step": 13200
|
|
},
|
|
{
|
|
"epoch": 2.38,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4866,
|
|
"step": 13250
|
|
},
|
|
{
|
|
"epoch": 2.39,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4881,
|
|
"step": 13300
|
|
},
|
|
{
|
|
"epoch": 2.4,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5268,
|
|
"step": 13350
|
|
},
|
|
{
|
|
"epoch": 2.41,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5188,
|
|
"step": 13400
|
|
},
|
|
{
|
|
"epoch": 2.42,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.501,
|
|
"step": 13450
|
|
},
|
|
{
|
|
"epoch": 2.43,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5135,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"epoch": 2.43,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5056,
|
|
"step": 13550
|
|
},
|
|
{
|
|
"epoch": 2.44,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5062,
|
|
"step": 13600
|
|
},
|
|
{
|
|
"epoch": 2.45,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4823,
|
|
"step": 13650
|
|
},
|
|
{
|
|
"epoch": 2.46,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4636,
|
|
"step": 13700
|
|
},
|
|
{
|
|
"epoch": 2.47,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.507,
|
|
"step": 13750
|
|
},
|
|
{
|
|
"epoch": 2.48,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4648,
|
|
"step": 13800
|
|
},
|
|
{
|
|
"epoch": 2.49,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5061,
|
|
"step": 13850
|
|
},
|
|
{
|
|
"epoch": 2.5,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4872,
|
|
"step": 13900
|
|
},
|
|
{
|
|
"epoch": 2.51,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4897,
|
|
"step": 13950
|
|
},
|
|
{
|
|
"epoch": 2.52,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5166,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"epoch": 2.52,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4764,
|
|
"step": 14050
|
|
},
|
|
{
|
|
"epoch": 2.53,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5094,
|
|
"step": 14100
|
|
},
|
|
{
|
|
"epoch": 2.54,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4821,
|
|
"step": 14150
|
|
},
|
|
{
|
|
"epoch": 2.55,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5222,
|
|
"step": 14200
|
|
},
|
|
{
|
|
"epoch": 2.56,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4904,
|
|
"step": 14250
|
|
},
|
|
{
|
|
"epoch": 2.57,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5085,
|
|
"step": 14300
|
|
},
|
|
{
|
|
"epoch": 2.58,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4719,
|
|
"step": 14350
|
|
},
|
|
{
|
|
"epoch": 2.59,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5112,
|
|
"step": 14400
|
|
},
|
|
{
|
|
"epoch": 2.6,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5052,
|
|
"step": 14450
|
|
},
|
|
{
|
|
"epoch": 2.6,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5457,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"epoch": 2.61,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5034,
|
|
"step": 14550
|
|
},
|
|
{
|
|
"epoch": 2.62,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5023,
|
|
"step": 14600
|
|
},
|
|
{
|
|
"epoch": 2.63,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4967,
|
|
"step": 14650
|
|
},
|
|
{
|
|
"epoch": 2.64,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5124,
|
|
"step": 14700
|
|
},
|
|
{
|
|
"epoch": 2.65,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4987,
|
|
"step": 14750
|
|
},
|
|
{
|
|
"epoch": 2.66,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5234,
|
|
"step": 14800
|
|
},
|
|
{
|
|
"epoch": 2.67,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4957,
|
|
"step": 14850
|
|
},
|
|
{
|
|
"epoch": 2.68,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.516,
|
|
"step": 14900
|
|
},
|
|
{
|
|
"epoch": 2.69,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.479,
|
|
"step": 14950
|
|
},
|
|
{
|
|
"epoch": 2.69,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5057,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 2.7,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5224,
|
|
"step": 15050
|
|
},
|
|
{
|
|
"epoch": 2.71,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5428,
|
|
"step": 15100
|
|
},
|
|
{
|
|
"epoch": 2.72,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4645,
|
|
"step": 15150
|
|
},
|
|
{
|
|
"epoch": 2.73,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5114,
|
|
"step": 15200
|
|
},
|
|
{
|
|
"epoch": 2.74,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5074,
|
|
"step": 15250
|
|
},
|
|
{
|
|
"epoch": 2.75,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5103,
|
|
"step": 15300
|
|
},
|
|
{
|
|
"epoch": 2.76,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5326,
|
|
"step": 15350
|
|
},
|
|
{
|
|
"epoch": 2.77,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5023,
|
|
"step": 15400
|
|
},
|
|
{
|
|
"epoch": 2.78,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.467,
|
|
"step": 15450
|
|
},
|
|
{
|
|
"epoch": 2.78,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4833,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"epoch": 2.79,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5119,
|
|
"step": 15550
|
|
},
|
|
{
|
|
"epoch": 2.8,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5181,
|
|
"step": 15600
|
|
},
|
|
{
|
|
"epoch": 2.81,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4917,
|
|
"step": 15650
|
|
},
|
|
{
|
|
"epoch": 2.82,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5281,
|
|
"step": 15700
|
|
},
|
|
{
|
|
"epoch": 2.83,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4971,
|
|
"step": 15750
|
|
},
|
|
{
|
|
"epoch": 2.84,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.483,
|
|
"step": 15800
|
|
},
|
|
{
|
|
"epoch": 2.85,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5254,
|
|
"step": 15850
|
|
},
|
|
{
|
|
"epoch": 2.86,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5838,
|
|
"step": 15900
|
|
},
|
|
{
|
|
"epoch": 2.87,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5201,
|
|
"step": 15950
|
|
},
|
|
{
|
|
"epoch": 2.87,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5232,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"epoch": 2.88,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5249,
|
|
"step": 16050
|
|
},
|
|
{
|
|
"epoch": 2.89,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4707,
|
|
"step": 16100
|
|
},
|
|
{
|
|
"epoch": 2.9,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5058,
|
|
"step": 16150
|
|
},
|
|
{
|
|
"epoch": 2.91,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4838,
|
|
"step": 16200
|
|
},
|
|
{
|
|
"epoch": 2.92,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5012,
|
|
"step": 16250
|
|
},
|
|
{
|
|
"epoch": 2.93,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5369,
|
|
"step": 16300
|
|
},
|
|
{
|
|
"epoch": 2.94,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5372,
|
|
"step": 16350
|
|
},
|
|
{
|
|
"epoch": 2.95,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5441,
|
|
"step": 16400
|
|
},
|
|
{
|
|
"epoch": 2.96,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5506,
|
|
"step": 16450
|
|
},
|
|
{
|
|
"epoch": 2.96,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5412,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"epoch": 2.97,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4903,
|
|
"step": 16550
|
|
},
|
|
{
|
|
"epoch": 2.98,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5145,
|
|
"step": 16600
|
|
},
|
|
{
|
|
"epoch": 2.99,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5237,
|
|
"step": 16650
|
|
},
|
|
{
|
|
"epoch": 3.0,
|
|
"eval_loss": 0.7354088425636292,
|
|
"eval_runtime": 207.8886,
|
|
"eval_samples_per_second": 15.455,
|
|
"eval_steps_per_second": 0.967,
|
|
"step": 16698
|
|
},
|
|
{
|
|
"epoch": 3.0,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.5086,
|
|
"step": 16700
|
|
},
|
|
{
|
|
"epoch": 3.01,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4071,
|
|
"step": 16750
|
|
},
|
|
{
|
|
"epoch": 3.02,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4299,
|
|
"step": 16800
|
|
},
|
|
{
|
|
"epoch": 3.03,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3996,
|
|
"step": 16850
|
|
},
|
|
{
|
|
"epoch": 3.04,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.412,
|
|
"step": 16900
|
|
},
|
|
{
|
|
"epoch": 3.05,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4214,
|
|
"step": 16950
|
|
},
|
|
{
|
|
"epoch": 3.05,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3986,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"epoch": 3.06,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4177,
|
|
"step": 17050
|
|
},
|
|
{
|
|
"epoch": 3.07,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3833,
|
|
"step": 17100
|
|
},
|
|
{
|
|
"epoch": 3.08,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4054,
|
|
"step": 17150
|
|
},
|
|
{
|
|
"epoch": 3.09,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3802,
|
|
"step": 17200
|
|
},
|
|
{
|
|
"epoch": 3.1,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4142,
|
|
"step": 17250
|
|
},
|
|
{
|
|
"epoch": 3.11,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3996,
|
|
"step": 17300
|
|
},
|
|
{
|
|
"epoch": 3.12,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4025,
|
|
"step": 17350
|
|
},
|
|
{
|
|
"epoch": 3.13,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3554,
|
|
"step": 17400
|
|
},
|
|
{
|
|
"epoch": 3.13,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3842,
|
|
"step": 17450
|
|
},
|
|
{
|
|
"epoch": 3.14,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3796,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"epoch": 3.15,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3765,
|
|
"step": 17550
|
|
},
|
|
{
|
|
"epoch": 3.16,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4065,
|
|
"step": 17600
|
|
},
|
|
{
|
|
"epoch": 3.17,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3981,
|
|
"step": 17650
|
|
},
|
|
{
|
|
"epoch": 3.18,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4013,
|
|
"step": 17700
|
|
},
|
|
{
|
|
"epoch": 3.19,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4043,
|
|
"step": 17750
|
|
},
|
|
{
|
|
"epoch": 3.2,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.422,
|
|
"step": 17800
|
|
},
|
|
{
|
|
"epoch": 3.21,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4058,
|
|
"step": 17850
|
|
},
|
|
{
|
|
"epoch": 3.22,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3772,
|
|
"step": 17900
|
|
},
|
|
{
|
|
"epoch": 3.22,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3926,
|
|
"step": 17950
|
|
},
|
|
{
|
|
"epoch": 3.23,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3846,
|
|
"step": 18000
|
|
},
|
|
{
|
|
"epoch": 3.24,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4259,
|
|
"step": 18050
|
|
},
|
|
{
|
|
"epoch": 3.25,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3775,
|
|
"step": 18100
|
|
},
|
|
{
|
|
"epoch": 3.26,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4124,
|
|
"step": 18150
|
|
},
|
|
{
|
|
"epoch": 3.27,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.404,
|
|
"step": 18200
|
|
},
|
|
{
|
|
"epoch": 3.28,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4017,
|
|
"step": 18250
|
|
},
|
|
{
|
|
"epoch": 3.29,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4419,
|
|
"step": 18300
|
|
},
|
|
{
|
|
"epoch": 3.3,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4284,
|
|
"step": 18350
|
|
},
|
|
{
|
|
"epoch": 3.31,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4295,
|
|
"step": 18400
|
|
},
|
|
{
|
|
"epoch": 3.31,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4021,
|
|
"step": 18450
|
|
},
|
|
{
|
|
"epoch": 3.32,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4314,
|
|
"step": 18500
|
|
},
|
|
{
|
|
"epoch": 3.33,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4332,
|
|
"step": 18550
|
|
},
|
|
{
|
|
"epoch": 3.34,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3919,
|
|
"step": 18600
|
|
},
|
|
{
|
|
"epoch": 3.35,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4467,
|
|
"step": 18650
|
|
},
|
|
{
|
|
"epoch": 3.36,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3978,
|
|
"step": 18700
|
|
},
|
|
{
|
|
"epoch": 3.37,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4264,
|
|
"step": 18750
|
|
},
|
|
{
|
|
"epoch": 3.38,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.421,
|
|
"step": 18800
|
|
},
|
|
{
|
|
"epoch": 3.39,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4217,
|
|
"step": 18850
|
|
},
|
|
{
|
|
"epoch": 3.4,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4244,
|
|
"step": 18900
|
|
},
|
|
{
|
|
"epoch": 3.4,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3918,
|
|
"step": 18950
|
|
},
|
|
{
|
|
"epoch": 3.41,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4048,
|
|
"step": 19000
|
|
},
|
|
{
|
|
"epoch": 3.42,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4142,
|
|
"step": 19050
|
|
},
|
|
{
|
|
"epoch": 3.43,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4071,
|
|
"step": 19100
|
|
},
|
|
{
|
|
"epoch": 3.44,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.39,
|
|
"step": 19150
|
|
},
|
|
{
|
|
"epoch": 3.45,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3983,
|
|
"step": 19200
|
|
},
|
|
{
|
|
"epoch": 3.46,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4342,
|
|
"step": 19250
|
|
},
|
|
{
|
|
"epoch": 3.47,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4071,
|
|
"step": 19300
|
|
},
|
|
{
|
|
"epoch": 3.48,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4294,
|
|
"step": 19350
|
|
},
|
|
{
|
|
"epoch": 3.49,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3989,
|
|
"step": 19400
|
|
},
|
|
{
|
|
"epoch": 3.49,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4115,
|
|
"step": 19450
|
|
},
|
|
{
|
|
"epoch": 3.5,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4238,
|
|
"step": 19500
|
|
},
|
|
{
|
|
"epoch": 3.51,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3964,
|
|
"step": 19550
|
|
},
|
|
{
|
|
"epoch": 3.52,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4103,
|
|
"step": 19600
|
|
},
|
|
{
|
|
"epoch": 3.53,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.416,
|
|
"step": 19650
|
|
},
|
|
{
|
|
"epoch": 3.54,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4362,
|
|
"step": 19700
|
|
},
|
|
{
|
|
"epoch": 3.55,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4094,
|
|
"step": 19750
|
|
},
|
|
{
|
|
"epoch": 3.56,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4167,
|
|
"step": 19800
|
|
},
|
|
{
|
|
"epoch": 3.57,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4025,
|
|
"step": 19850
|
|
},
|
|
{
|
|
"epoch": 3.58,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3804,
|
|
"step": 19900
|
|
},
|
|
{
|
|
"epoch": 3.58,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4188,
|
|
"step": 19950
|
|
},
|
|
{
|
|
"epoch": 3.59,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4333,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"epoch": 3.6,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4001,
|
|
"step": 20050
|
|
},
|
|
{
|
|
"epoch": 3.61,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3774,
|
|
"step": 20100
|
|
},
|
|
{
|
|
"epoch": 3.62,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4332,
|
|
"step": 20150
|
|
},
|
|
{
|
|
"epoch": 3.63,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4342,
|
|
"step": 20200
|
|
},
|
|
{
|
|
"epoch": 3.64,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4147,
|
|
"step": 20250
|
|
},
|
|
{
|
|
"epoch": 3.65,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4372,
|
|
"step": 20300
|
|
},
|
|
{
|
|
"epoch": 3.66,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3962,
|
|
"step": 20350
|
|
},
|
|
{
|
|
"epoch": 3.66,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4262,
|
|
"step": 20400
|
|
},
|
|
{
|
|
"epoch": 3.67,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4362,
|
|
"step": 20450
|
|
},
|
|
{
|
|
"epoch": 3.68,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4112,
|
|
"step": 20500
|
|
},
|
|
{
|
|
"epoch": 3.69,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4011,
|
|
"step": 20550
|
|
},
|
|
{
|
|
"epoch": 3.7,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3906,
|
|
"step": 20600
|
|
},
|
|
{
|
|
"epoch": 3.71,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4094,
|
|
"step": 20650
|
|
},
|
|
{
|
|
"epoch": 3.72,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4302,
|
|
"step": 20700
|
|
},
|
|
{
|
|
"epoch": 3.73,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4143,
|
|
"step": 20750
|
|
},
|
|
{
|
|
"epoch": 3.74,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.398,
|
|
"step": 20800
|
|
},
|
|
{
|
|
"epoch": 3.75,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.43,
|
|
"step": 20850
|
|
},
|
|
{
|
|
"epoch": 3.75,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4124,
|
|
"step": 20900
|
|
},
|
|
{
|
|
"epoch": 3.76,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4167,
|
|
"step": 20950
|
|
},
|
|
{
|
|
"epoch": 3.77,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4219,
|
|
"step": 21000
|
|
},
|
|
{
|
|
"epoch": 3.78,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4264,
|
|
"step": 21050
|
|
},
|
|
{
|
|
"epoch": 3.79,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.435,
|
|
"step": 21100
|
|
},
|
|
{
|
|
"epoch": 3.8,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4449,
|
|
"step": 21150
|
|
},
|
|
{
|
|
"epoch": 3.81,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4595,
|
|
"step": 21200
|
|
},
|
|
{
|
|
"epoch": 3.82,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4202,
|
|
"step": 21250
|
|
},
|
|
{
|
|
"epoch": 3.83,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3981,
|
|
"step": 21300
|
|
},
|
|
{
|
|
"epoch": 3.84,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4261,
|
|
"step": 21350
|
|
},
|
|
{
|
|
"epoch": 3.84,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4412,
|
|
"step": 21400
|
|
},
|
|
{
|
|
"epoch": 3.85,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3918,
|
|
"step": 21450
|
|
},
|
|
{
|
|
"epoch": 3.86,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4354,
|
|
"step": 21500
|
|
},
|
|
{
|
|
"epoch": 3.87,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4373,
|
|
"step": 21550
|
|
},
|
|
{
|
|
"epoch": 3.88,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4302,
|
|
"step": 21600
|
|
},
|
|
{
|
|
"epoch": 3.89,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4113,
|
|
"step": 21650
|
|
},
|
|
{
|
|
"epoch": 3.9,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.392,
|
|
"step": 21700
|
|
},
|
|
{
|
|
"epoch": 3.91,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4335,
|
|
"step": 21750
|
|
},
|
|
{
|
|
"epoch": 3.92,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4211,
|
|
"step": 21800
|
|
},
|
|
{
|
|
"epoch": 3.93,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4178,
|
|
"step": 21850
|
|
},
|
|
{
|
|
"epoch": 3.93,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4333,
|
|
"step": 21900
|
|
},
|
|
{
|
|
"epoch": 3.94,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4438,
|
|
"step": 21950
|
|
},
|
|
{
|
|
"epoch": 3.95,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4118,
|
|
"step": 22000
|
|
},
|
|
{
|
|
"epoch": 3.96,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4256,
|
|
"step": 22050
|
|
},
|
|
{
|
|
"epoch": 3.97,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4394,
|
|
"step": 22100
|
|
},
|
|
{
|
|
"epoch": 3.98,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4329,
|
|
"step": 22150
|
|
},
|
|
{
|
|
"epoch": 3.99,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4481,
|
|
"step": 22200
|
|
},
|
|
{
|
|
"epoch": 4.0,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.4215,
|
|
"step": 22250
|
|
},
|
|
{
|
|
"epoch": 4.0,
|
|
"eval_loss": 0.7574929594993591,
|
|
"eval_runtime": 207.1872,
|
|
"eval_samples_per_second": 15.508,
|
|
"eval_steps_per_second": 0.97,
|
|
"step": 22265
|
|
},
|
|
{
|
|
"epoch": 4.01,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3323,
|
|
"step": 22300
|
|
},
|
|
{
|
|
"epoch": 4.02,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3289,
|
|
"step": 22350
|
|
},
|
|
{
|
|
"epoch": 4.02,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3332,
|
|
"step": 22400
|
|
},
|
|
{
|
|
"epoch": 4.03,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3404,
|
|
"step": 22450
|
|
},
|
|
{
|
|
"epoch": 4.04,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3108,
|
|
"step": 22500
|
|
},
|
|
{
|
|
"epoch": 4.05,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3362,
|
|
"step": 22550
|
|
},
|
|
{
|
|
"epoch": 4.06,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3105,
|
|
"step": 22600
|
|
},
|
|
{
|
|
"epoch": 4.07,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3056,
|
|
"step": 22650
|
|
},
|
|
{
|
|
"epoch": 4.08,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3253,
|
|
"step": 22700
|
|
},
|
|
{
|
|
"epoch": 4.09,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3151,
|
|
"step": 22750
|
|
},
|
|
{
|
|
"epoch": 4.1,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3482,
|
|
"step": 22800
|
|
},
|
|
{
|
|
"epoch": 4.11,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3216,
|
|
"step": 22850
|
|
},
|
|
{
|
|
"epoch": 4.11,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3492,
|
|
"step": 22900
|
|
},
|
|
{
|
|
"epoch": 4.12,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3172,
|
|
"step": 22950
|
|
},
|
|
{
|
|
"epoch": 4.13,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3397,
|
|
"step": 23000
|
|
},
|
|
{
|
|
"epoch": 4.14,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3569,
|
|
"step": 23050
|
|
},
|
|
{
|
|
"epoch": 4.15,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3143,
|
|
"step": 23100
|
|
},
|
|
{
|
|
"epoch": 4.16,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.313,
|
|
"step": 23150
|
|
},
|
|
{
|
|
"epoch": 4.17,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3394,
|
|
"step": 23200
|
|
},
|
|
{
|
|
"epoch": 4.18,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3434,
|
|
"step": 23250
|
|
},
|
|
{
|
|
"epoch": 4.19,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3217,
|
|
"step": 23300
|
|
},
|
|
{
|
|
"epoch": 4.19,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3066,
|
|
"step": 23350
|
|
},
|
|
{
|
|
"epoch": 4.2,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3317,
|
|
"step": 23400
|
|
},
|
|
{
|
|
"epoch": 4.21,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3496,
|
|
"step": 23450
|
|
},
|
|
{
|
|
"epoch": 4.22,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3313,
|
|
"step": 23500
|
|
},
|
|
{
|
|
"epoch": 4.23,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3496,
|
|
"step": 23550
|
|
},
|
|
{
|
|
"epoch": 4.24,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3218,
|
|
"step": 23600
|
|
},
|
|
{
|
|
"epoch": 4.25,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3474,
|
|
"step": 23650
|
|
},
|
|
{
|
|
"epoch": 4.26,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3447,
|
|
"step": 23700
|
|
},
|
|
{
|
|
"epoch": 4.27,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3166,
|
|
"step": 23750
|
|
},
|
|
{
|
|
"epoch": 4.28,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3571,
|
|
"step": 23800
|
|
},
|
|
{
|
|
"epoch": 4.28,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3497,
|
|
"step": 23850
|
|
},
|
|
{
|
|
"epoch": 4.29,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3384,
|
|
"step": 23900
|
|
},
|
|
{
|
|
"epoch": 4.3,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3305,
|
|
"step": 23950
|
|
},
|
|
{
|
|
"epoch": 4.31,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3372,
|
|
"step": 24000
|
|
},
|
|
{
|
|
"epoch": 4.32,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3429,
|
|
"step": 24050
|
|
},
|
|
{
|
|
"epoch": 4.33,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3241,
|
|
"step": 24100
|
|
},
|
|
{
|
|
"epoch": 4.34,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3267,
|
|
"step": 24150
|
|
},
|
|
{
|
|
"epoch": 4.35,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3426,
|
|
"step": 24200
|
|
},
|
|
{
|
|
"epoch": 4.36,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3445,
|
|
"step": 24250
|
|
},
|
|
{
|
|
"epoch": 4.37,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3387,
|
|
"step": 24300
|
|
},
|
|
{
|
|
"epoch": 4.37,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3388,
|
|
"step": 24350
|
|
},
|
|
{
|
|
"epoch": 4.38,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.343,
|
|
"step": 24400
|
|
},
|
|
{
|
|
"epoch": 4.39,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3596,
|
|
"step": 24450
|
|
},
|
|
{
|
|
"epoch": 4.4,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3675,
|
|
"step": 24500
|
|
},
|
|
{
|
|
"epoch": 4.41,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3494,
|
|
"step": 24550
|
|
},
|
|
{
|
|
"epoch": 4.42,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3435,
|
|
"step": 24600
|
|
},
|
|
{
|
|
"epoch": 4.43,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3374,
|
|
"step": 24650
|
|
},
|
|
{
|
|
"epoch": 4.44,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3486,
|
|
"step": 24700
|
|
},
|
|
{
|
|
"epoch": 4.45,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3753,
|
|
"step": 24750
|
|
},
|
|
{
|
|
"epoch": 4.46,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3417,
|
|
"step": 24800
|
|
},
|
|
{
|
|
"epoch": 4.46,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3431,
|
|
"step": 24850
|
|
},
|
|
{
|
|
"epoch": 4.47,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3354,
|
|
"step": 24900
|
|
},
|
|
{
|
|
"epoch": 4.48,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3351,
|
|
"step": 24950
|
|
},
|
|
{
|
|
"epoch": 4.49,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3239,
|
|
"step": 25000
|
|
},
|
|
{
|
|
"epoch": 4.5,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3404,
|
|
"step": 25050
|
|
},
|
|
{
|
|
"epoch": 4.51,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3418,
|
|
"step": 25100
|
|
},
|
|
{
|
|
"epoch": 4.52,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3809,
|
|
"step": 25150
|
|
},
|
|
{
|
|
"epoch": 4.53,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3399,
|
|
"step": 25200
|
|
},
|
|
{
|
|
"epoch": 4.54,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3327,
|
|
"step": 25250
|
|
},
|
|
{
|
|
"epoch": 4.55,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3445,
|
|
"step": 25300
|
|
},
|
|
{
|
|
"epoch": 4.55,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3208,
|
|
"step": 25350
|
|
},
|
|
{
|
|
"epoch": 4.56,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3522,
|
|
"step": 25400
|
|
},
|
|
{
|
|
"epoch": 4.57,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3467,
|
|
"step": 25450
|
|
},
|
|
{
|
|
"epoch": 4.58,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.34,
|
|
"step": 25500
|
|
},
|
|
{
|
|
"epoch": 4.59,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3376,
|
|
"step": 25550
|
|
},
|
|
{
|
|
"epoch": 4.6,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.329,
|
|
"step": 25600
|
|
},
|
|
{
|
|
"epoch": 4.61,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.333,
|
|
"step": 25650
|
|
},
|
|
{
|
|
"epoch": 4.62,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3547,
|
|
"step": 25700
|
|
},
|
|
{
|
|
"epoch": 4.63,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.345,
|
|
"step": 25750
|
|
},
|
|
{
|
|
"epoch": 4.64,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3507,
|
|
"step": 25800
|
|
},
|
|
{
|
|
"epoch": 4.64,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3705,
|
|
"step": 25850
|
|
},
|
|
{
|
|
"epoch": 4.65,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3417,
|
|
"step": 25900
|
|
},
|
|
{
|
|
"epoch": 4.66,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3667,
|
|
"step": 25950
|
|
},
|
|
{
|
|
"epoch": 4.67,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3328,
|
|
"step": 26000
|
|
},
|
|
{
|
|
"epoch": 4.68,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3337,
|
|
"step": 26050
|
|
},
|
|
{
|
|
"epoch": 4.69,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3462,
|
|
"step": 26100
|
|
},
|
|
{
|
|
"epoch": 4.7,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3463,
|
|
"step": 26150
|
|
},
|
|
{
|
|
"epoch": 4.71,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3293,
|
|
"step": 26200
|
|
},
|
|
{
|
|
"epoch": 4.72,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3444,
|
|
"step": 26250
|
|
},
|
|
{
|
|
"epoch": 4.72,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3554,
|
|
"step": 26300
|
|
},
|
|
{
|
|
"epoch": 4.73,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3538,
|
|
"step": 26350
|
|
},
|
|
{
|
|
"epoch": 4.74,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3365,
|
|
"step": 26400
|
|
},
|
|
{
|
|
"epoch": 4.75,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3396,
|
|
"step": 26450
|
|
},
|
|
{
|
|
"epoch": 4.76,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3481,
|
|
"step": 26500
|
|
},
|
|
{
|
|
"epoch": 4.77,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3623,
|
|
"step": 26550
|
|
},
|
|
{
|
|
"epoch": 4.78,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3395,
|
|
"step": 26600
|
|
},
|
|
{
|
|
"epoch": 4.79,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3763,
|
|
"step": 26650
|
|
},
|
|
{
|
|
"epoch": 4.8,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3368,
|
|
"step": 26700
|
|
},
|
|
{
|
|
"epoch": 4.81,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3409,
|
|
"step": 26750
|
|
},
|
|
{
|
|
"epoch": 4.81,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3473,
|
|
"step": 26800
|
|
},
|
|
{
|
|
"epoch": 4.82,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3595,
|
|
"step": 26850
|
|
},
|
|
{
|
|
"epoch": 4.83,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3463,
|
|
"step": 26900
|
|
},
|
|
{
|
|
"epoch": 4.84,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3486,
|
|
"step": 26950
|
|
},
|
|
{
|
|
"epoch": 4.85,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3689,
|
|
"step": 27000
|
|
},
|
|
{
|
|
"epoch": 4.86,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3721,
|
|
"step": 27050
|
|
},
|
|
{
|
|
"epoch": 4.87,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.377,
|
|
"step": 27100
|
|
},
|
|
{
|
|
"epoch": 4.88,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3337,
|
|
"step": 27150
|
|
},
|
|
{
|
|
"epoch": 4.89,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3574,
|
|
"step": 27200
|
|
},
|
|
{
|
|
"epoch": 4.9,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3342,
|
|
"step": 27250
|
|
},
|
|
{
|
|
"epoch": 4.9,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3368,
|
|
"step": 27300
|
|
},
|
|
{
|
|
"epoch": 4.91,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3494,
|
|
"step": 27350
|
|
},
|
|
{
|
|
"epoch": 4.92,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3333,
|
|
"step": 27400
|
|
},
|
|
{
|
|
"epoch": 4.93,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3729,
|
|
"step": 27450
|
|
},
|
|
{
|
|
"epoch": 4.94,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3522,
|
|
"step": 27500
|
|
},
|
|
{
|
|
"epoch": 4.95,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3498,
|
|
"step": 27550
|
|
},
|
|
{
|
|
"epoch": 4.96,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3494,
|
|
"step": 27600
|
|
},
|
|
{
|
|
"epoch": 4.97,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.336,
|
|
"step": 27650
|
|
},
|
|
{
|
|
"epoch": 4.98,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3659,
|
|
"step": 27700
|
|
},
|
|
{
|
|
"epoch": 4.99,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3807,
|
|
"step": 27750
|
|
},
|
|
{
|
|
"epoch": 4.99,
|
|
"learning_rate": 0.0001,
|
|
"loss": 0.3693,
|
|
"step": 27800
|
|
},
|
|
{
|
|
"epoch": 5.0,
|
|
"eval_loss": 0.7859576940536499,
|
|
"eval_runtime": 205.7789,
|
|
"eval_samples_per_second": 15.614,
|
|
"eval_steps_per_second": 0.977,
|
|
"step": 27830
|
|
}
|
|
],
|
|
"max_steps": 27830,
|
|
"num_train_epochs": 5,
|
|
"total_flos": 14931153948672.0,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|