|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 2002, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004995004995004995, |
|
"grad_norm": 1.1599862583346459, |
|
"learning_rate": 9.950248756218906e-08, |
|
"loss": 2.4381, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0024975024975024975, |
|
"grad_norm": 1.3646452392145616, |
|
"learning_rate": 4.975124378109453e-07, |
|
"loss": 2.3925, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004995004995004995, |
|
"grad_norm": 1.050547128080392, |
|
"learning_rate": 9.950248756218907e-07, |
|
"loss": 2.3899, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.007492507492507493, |
|
"grad_norm": 1.1106843586026567, |
|
"learning_rate": 1.4925373134328358e-06, |
|
"loss": 2.3606, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.00999000999000999, |
|
"grad_norm": 1.0868632779387772, |
|
"learning_rate": 1.9900497512437813e-06, |
|
"loss": 2.3782, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.012487512487512488, |
|
"grad_norm": 0.8822872185142195, |
|
"learning_rate": 2.4875621890547264e-06, |
|
"loss": 2.3681, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.014985014985014986, |
|
"grad_norm": 0.6990472283962801, |
|
"learning_rate": 2.9850746268656716e-06, |
|
"loss": 2.3332, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.017482517482517484, |
|
"grad_norm": 0.6640215974826378, |
|
"learning_rate": 3.4825870646766175e-06, |
|
"loss": 2.3265, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01998001998001998, |
|
"grad_norm": 0.6451421174329706, |
|
"learning_rate": 3.980099502487563e-06, |
|
"loss": 2.2609, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.022477522477522476, |
|
"grad_norm": 0.4613452178440462, |
|
"learning_rate": 4.477611940298508e-06, |
|
"loss": 2.1835, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.024975024975024976, |
|
"grad_norm": 0.36216302524794514, |
|
"learning_rate": 4.975124378109453e-06, |
|
"loss": 2.1793, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.027472527472527472, |
|
"grad_norm": 0.29388450685747625, |
|
"learning_rate": 5.472636815920398e-06, |
|
"loss": 2.1195, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.029970029970029972, |
|
"grad_norm": 0.25381832457288017, |
|
"learning_rate": 5.970149253731343e-06, |
|
"loss": 2.065, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.032467532467532464, |
|
"grad_norm": 0.23481350483234162, |
|
"learning_rate": 6.46766169154229e-06, |
|
"loss": 2.1047, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03496503496503497, |
|
"grad_norm": 0.20628569151997012, |
|
"learning_rate": 6.965174129353235e-06, |
|
"loss": 2.0828, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.037462537462537464, |
|
"grad_norm": 0.19709869224218293, |
|
"learning_rate": 7.46268656716418e-06, |
|
"loss": 2.0265, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03996003996003996, |
|
"grad_norm": 0.2006237162245652, |
|
"learning_rate": 7.960199004975125e-06, |
|
"loss": 2.0299, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.042457542457542456, |
|
"grad_norm": 0.18543831140294761, |
|
"learning_rate": 8.45771144278607e-06, |
|
"loss": 2.0171, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04495504495504495, |
|
"grad_norm": 0.18480210061309715, |
|
"learning_rate": 8.955223880597016e-06, |
|
"loss": 2.0388, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.047452547452547456, |
|
"grad_norm": 0.18002159323152245, |
|
"learning_rate": 9.45273631840796e-06, |
|
"loss": 2.0161, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04995004995004995, |
|
"grad_norm": 0.18038747124195287, |
|
"learning_rate": 9.950248756218906e-06, |
|
"loss": 1.9666, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05244755244755245, |
|
"grad_norm": 0.18508558771562467, |
|
"learning_rate": 1.0447761194029851e-05, |
|
"loss": 1.9788, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.054945054945054944, |
|
"grad_norm": 0.1753326294106231, |
|
"learning_rate": 1.0945273631840796e-05, |
|
"loss": 1.9935, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05744255744255744, |
|
"grad_norm": 0.17338505913330995, |
|
"learning_rate": 1.1442786069651741e-05, |
|
"loss": 1.982, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.059940059940059943, |
|
"grad_norm": 0.17069633894083674, |
|
"learning_rate": 1.1940298507462686e-05, |
|
"loss": 1.9677, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06243756243756244, |
|
"grad_norm": 0.17033240645669973, |
|
"learning_rate": 1.2437810945273631e-05, |
|
"loss": 1.9579, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06493506493506493, |
|
"grad_norm": 0.16801885256509916, |
|
"learning_rate": 1.293532338308458e-05, |
|
"loss": 1.9328, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06743256743256744, |
|
"grad_norm": 0.16854179300669697, |
|
"learning_rate": 1.3432835820895525e-05, |
|
"loss": 1.9424, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06993006993006994, |
|
"grad_norm": 0.16787976921679182, |
|
"learning_rate": 1.393034825870647e-05, |
|
"loss": 1.9229, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07242757242757243, |
|
"grad_norm": 0.167368866372539, |
|
"learning_rate": 1.4427860696517415e-05, |
|
"loss": 1.9537, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07492507492507493, |
|
"grad_norm": 0.16524535846346908, |
|
"learning_rate": 1.492537313432836e-05, |
|
"loss": 1.911, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07742257742257742, |
|
"grad_norm": 0.16714889916639722, |
|
"learning_rate": 1.5422885572139307e-05, |
|
"loss": 1.9399, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07992007992007992, |
|
"grad_norm": 0.16617978712852743, |
|
"learning_rate": 1.592039800995025e-05, |
|
"loss": 1.9203, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08241758241758242, |
|
"grad_norm": 0.16512217331592868, |
|
"learning_rate": 1.6417910447761197e-05, |
|
"loss": 1.923, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.08491508491508491, |
|
"grad_norm": 0.16496696956998044, |
|
"learning_rate": 1.691542288557214e-05, |
|
"loss": 1.9442, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08741258741258741, |
|
"grad_norm": 0.16376024451930163, |
|
"learning_rate": 1.7412935323383088e-05, |
|
"loss": 1.931, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.0899100899100899, |
|
"grad_norm": 0.1640420678366522, |
|
"learning_rate": 1.791044776119403e-05, |
|
"loss": 1.908, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.0924075924075924, |
|
"grad_norm": 0.160666099150629, |
|
"learning_rate": 1.8407960199004978e-05, |
|
"loss": 1.9139, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.09490509490509491, |
|
"grad_norm": 0.16238948145763205, |
|
"learning_rate": 1.890547263681592e-05, |
|
"loss": 1.8926, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.09740259740259741, |
|
"grad_norm": 0.163531659071721, |
|
"learning_rate": 1.9402985074626868e-05, |
|
"loss": 1.899, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.0999000999000999, |
|
"grad_norm": 0.16286090035138232, |
|
"learning_rate": 1.990049751243781e-05, |
|
"loss": 1.9091, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1023976023976024, |
|
"grad_norm": 0.16060757439288406, |
|
"learning_rate": 1.9999756577597317e-05, |
|
"loss": 1.8851, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1048951048951049, |
|
"grad_norm": 0.16221564755663112, |
|
"learning_rate": 1.9998767694397236e-05, |
|
"loss": 1.8779, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.10739260739260739, |
|
"grad_norm": 0.158639070485282, |
|
"learning_rate": 1.99970182116654e-05, |
|
"loss": 1.8574, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.10989010989010989, |
|
"grad_norm": 0.1602764625642043, |
|
"learning_rate": 1.9994508262483786e-05, |
|
"loss": 1.8777, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.11238761238761238, |
|
"grad_norm": 0.16307835969521423, |
|
"learning_rate": 1.999123803778254e-05, |
|
"loss": 1.9083, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.11488511488511488, |
|
"grad_norm": 0.1628647640219108, |
|
"learning_rate": 1.998720778632546e-05, |
|
"loss": 1.8748, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.11738261738261738, |
|
"grad_norm": 0.16061369451851806, |
|
"learning_rate": 1.9982417814691048e-05, |
|
"loss": 1.8514, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.11988011988011989, |
|
"grad_norm": 0.1623492271600646, |
|
"learning_rate": 1.997686848724924e-05, |
|
"loss": 1.8334, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12237762237762238, |
|
"grad_norm": 0.16052653488024277, |
|
"learning_rate": 1.997056022613363e-05, |
|
"loss": 1.8828, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.12487512487512488, |
|
"grad_norm": 0.16186662472616486, |
|
"learning_rate": 1.9963493511209405e-05, |
|
"loss": 1.8689, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.12737262737262736, |
|
"grad_norm": 0.15814062813935195, |
|
"learning_rate": 1.9955668880036812e-05, |
|
"loss": 1.8633, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.12987012987012986, |
|
"grad_norm": 0.16042937756824743, |
|
"learning_rate": 1.9947086927830277e-05, |
|
"loss": 1.859, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.13236763236763235, |
|
"grad_norm": 0.15749320226952399, |
|
"learning_rate": 1.9937748307413134e-05, |
|
"loss": 1.8324, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.13486513486513488, |
|
"grad_norm": 0.15932775031704177, |
|
"learning_rate": 1.9927653729167957e-05, |
|
"loss": 1.8403, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.13736263736263737, |
|
"grad_norm": 0.15539081264737326, |
|
"learning_rate": 1.9916803960982518e-05, |
|
"loss": 1.8352, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.13986013986013987, |
|
"grad_norm": 0.1616390225384804, |
|
"learning_rate": 1.9905199828191385e-05, |
|
"loss": 1.8703, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.14235764235764237, |
|
"grad_norm": 0.1608186724233932, |
|
"learning_rate": 1.9892842213513135e-05, |
|
"loss": 1.8503, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.14485514485514486, |
|
"grad_norm": 0.15963816595377592, |
|
"learning_rate": 1.98797320569832e-05, |
|
"loss": 1.8459, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.14735264735264736, |
|
"grad_norm": 0.1582839014078666, |
|
"learning_rate": 1.986587035588237e-05, |
|
"loss": 1.8553, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.14985014985014986, |
|
"grad_norm": 0.15947984158782932, |
|
"learning_rate": 1.985125816466092e-05, |
|
"loss": 1.8418, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.15234765234765235, |
|
"grad_norm": 0.15926277299896913, |
|
"learning_rate": 1.9835896594858405e-05, |
|
"loss": 1.8398, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.15484515484515485, |
|
"grad_norm": 0.1619303456318674, |
|
"learning_rate": 1.9819786815019108e-05, |
|
"loss": 1.8453, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.15734265734265734, |
|
"grad_norm": 0.15663126843033132, |
|
"learning_rate": 1.9802930050603143e-05, |
|
"loss": 1.7968, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.15984015984015984, |
|
"grad_norm": 0.15428142491915428, |
|
"learning_rate": 1.9785327583893233e-05, |
|
"loss": 1.7914, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.16233766233766234, |
|
"grad_norm": 0.1535269165426423, |
|
"learning_rate": 1.9766980753897186e-05, |
|
"loss": 1.8398, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.16483516483516483, |
|
"grad_norm": 0.1566922978711067, |
|
"learning_rate": 1.974789095624601e-05, |
|
"loss": 1.8197, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.16733266733266733, |
|
"grad_norm": 0.16077008857885935, |
|
"learning_rate": 1.972805964308778e-05, |
|
"loss": 1.827, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.16983016983016982, |
|
"grad_norm": 0.1595092032589525, |
|
"learning_rate": 1.9707488322977137e-05, |
|
"loss": 1.8202, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.17232767232767232, |
|
"grad_norm": 0.16187621193128984, |
|
"learning_rate": 1.968617856076056e-05, |
|
"loss": 1.8462, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.17482517482517482, |
|
"grad_norm": 0.15740467422342347, |
|
"learning_rate": 1.9664131977457334e-05, |
|
"loss": 1.8285, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.1773226773226773, |
|
"grad_norm": 0.15653103603511576, |
|
"learning_rate": 1.964135025013621e-05, |
|
"loss": 1.8524, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.1798201798201798, |
|
"grad_norm": 0.16162833026081047, |
|
"learning_rate": 1.9617835111787867e-05, |
|
"loss": 1.8384, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.1823176823176823, |
|
"grad_norm": 0.15473723241970383, |
|
"learning_rate": 1.9593588351193052e-05, |
|
"loss": 1.8139, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.1848151848151848, |
|
"grad_norm": 0.15885275250786435, |
|
"learning_rate": 1.9568611812786532e-05, |
|
"loss": 1.8218, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.18731268731268733, |
|
"grad_norm": 0.1568218347795679, |
|
"learning_rate": 1.9542907396516785e-05, |
|
"loss": 1.8099, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.18981018981018982, |
|
"grad_norm": 0.15705793055606954, |
|
"learning_rate": 1.9516477057701466e-05, |
|
"loss": 1.8264, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 0.15732378505813635, |
|
"learning_rate": 1.9489322806878656e-05, |
|
"loss": 1.7763, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.19480519480519481, |
|
"grad_norm": 0.16123492252157817, |
|
"learning_rate": 1.9461446709653957e-05, |
|
"loss": 1.8205, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.1973026973026973, |
|
"grad_norm": 0.15737920869870928, |
|
"learning_rate": 1.9432850886543326e-05, |
|
"loss": 1.8145, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.1998001998001998, |
|
"grad_norm": 0.15899572770709094, |
|
"learning_rate": 1.9403537512811787e-05, |
|
"loss": 1.8273, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2022977022977023, |
|
"grad_norm": 0.15657272543340842, |
|
"learning_rate": 1.9373508818307968e-05, |
|
"loss": 1.7994, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.2047952047952048, |
|
"grad_norm": 0.15774961567754203, |
|
"learning_rate": 1.934276708729445e-05, |
|
"loss": 1.8152, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2072927072927073, |
|
"grad_norm": 0.1597456026863612, |
|
"learning_rate": 1.931131465827403e-05, |
|
"loss": 1.8067, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2097902097902098, |
|
"grad_norm": 0.15874324914863305, |
|
"learning_rate": 1.927915392381183e-05, |
|
"loss": 1.8113, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2122877122877123, |
|
"grad_norm": 0.15739095253831112, |
|
"learning_rate": 1.924628733035327e-05, |
|
"loss": 1.7572, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.21478521478521478, |
|
"grad_norm": 0.15745037871244488, |
|
"learning_rate": 1.921271737803802e-05, |
|
"loss": 1.7815, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.21728271728271728, |
|
"grad_norm": 0.1574617354946401, |
|
"learning_rate": 1.9178446620509762e-05, |
|
"loss": 1.8072, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.21978021978021978, |
|
"grad_norm": 0.16217729499687356, |
|
"learning_rate": 1.9143477664721958e-05, |
|
"loss": 1.8034, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.22227772227772227, |
|
"grad_norm": 0.16044737049383317, |
|
"learning_rate": 1.910781317073956e-05, |
|
"loss": 1.8369, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.22477522477522477, |
|
"grad_norm": 0.1573797424088386, |
|
"learning_rate": 1.9071455851536624e-05, |
|
"loss": 1.7681, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.22727272727272727, |
|
"grad_norm": 0.15944418619813316, |
|
"learning_rate": 1.9034408472789966e-05, |
|
"loss": 1.8018, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.22977022977022976, |
|
"grad_norm": 0.1567957354310453, |
|
"learning_rate": 1.899667385266876e-05, |
|
"loss": 1.8262, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.23226773226773226, |
|
"grad_norm": 0.15732076579865928, |
|
"learning_rate": 1.8958254861620175e-05, |
|
"loss": 1.8065, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.23476523476523475, |
|
"grad_norm": 0.155336816664159, |
|
"learning_rate": 1.891915442215101e-05, |
|
"loss": 1.8053, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.23726273726273725, |
|
"grad_norm": 0.1597669648163907, |
|
"learning_rate": 1.88793755086054e-05, |
|
"loss": 1.8183, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.23976023976023977, |
|
"grad_norm": 0.15788615176295445, |
|
"learning_rate": 1.8838921146938523e-05, |
|
"loss": 1.8216, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.24225774225774227, |
|
"grad_norm": 0.15554518751652616, |
|
"learning_rate": 1.8797794414486464e-05, |
|
"loss": 1.8177, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.24475524475524477, |
|
"grad_norm": 0.15599244982971106, |
|
"learning_rate": 1.8755998439732092e-05, |
|
"loss": 1.7595, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.24725274725274726, |
|
"grad_norm": 0.1565896274442155, |
|
"learning_rate": 1.8713536402067084e-05, |
|
"loss": 1.7927, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.24975024975024976, |
|
"grad_norm": 0.15811536697664205, |
|
"learning_rate": 1.8670411531550078e-05, |
|
"loss": 1.774, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.25224775224775225, |
|
"grad_norm": 0.156281445500983, |
|
"learning_rate": 1.8626627108660967e-05, |
|
"loss": 1.7896, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.2547452547452547, |
|
"grad_norm": 0.16199506703702074, |
|
"learning_rate": 1.858218646405134e-05, |
|
"loss": 1.7957, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.25724275724275725, |
|
"grad_norm": 0.15409864113956373, |
|
"learning_rate": 1.853709297829113e-05, |
|
"loss": 1.7681, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.2597402597402597, |
|
"grad_norm": 0.15595897376396248, |
|
"learning_rate": 1.8491350081611476e-05, |
|
"loss": 1.7821, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.26223776223776224, |
|
"grad_norm": 0.15981459835452744, |
|
"learning_rate": 1.8444961253643735e-05, |
|
"loss": 1.8, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.2647352647352647, |
|
"grad_norm": 0.1598175484696389, |
|
"learning_rate": 1.839793002315486e-05, |
|
"loss": 1.798, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.26723276723276723, |
|
"grad_norm": 0.15880648153398907, |
|
"learning_rate": 1.8350259967778904e-05, |
|
"loss": 1.8151, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.26973026973026976, |
|
"grad_norm": 0.16237848244779185, |
|
"learning_rate": 1.8301954713744913e-05, |
|
"loss": 1.8267, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2722277722277722, |
|
"grad_norm": 0.15519746128863732, |
|
"learning_rate": 1.825301793560106e-05, |
|
"loss": 1.7678, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.27472527472527475, |
|
"grad_norm": 0.15889605012099542, |
|
"learning_rate": 1.8203453355935138e-05, |
|
"loss": 1.7552, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.2772227772227772, |
|
"grad_norm": 0.15715544258244915, |
|
"learning_rate": 1.8153264745091376e-05, |
|
"loss": 1.7906, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.27972027972027974, |
|
"grad_norm": 0.15630976676944236, |
|
"learning_rate": 1.8102455920883638e-05, |
|
"loss": 1.7698, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2822177822177822, |
|
"grad_norm": 0.15690924431151737, |
|
"learning_rate": 1.8051030748304995e-05, |
|
"loss": 1.7676, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.28471528471528473, |
|
"grad_norm": 0.15783500931857442, |
|
"learning_rate": 1.7998993139233733e-05, |
|
"loss": 1.768, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.2872127872127872, |
|
"grad_norm": 0.1567537339747436, |
|
"learning_rate": 1.7946347052135765e-05, |
|
"loss": 1.766, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.2897102897102897, |
|
"grad_norm": 0.16054044236045276, |
|
"learning_rate": 1.7893096491763516e-05, |
|
"loss": 1.7679, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.2922077922077922, |
|
"grad_norm": 0.1559143042109544, |
|
"learning_rate": 1.783924550885129e-05, |
|
"loss": 1.7702, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.2947052947052947, |
|
"grad_norm": 0.1576160782660945, |
|
"learning_rate": 1.7784798199807128e-05, |
|
"loss": 1.7615, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2972027972027972, |
|
"grad_norm": 0.15642650102699018, |
|
"learning_rate": 1.7729758706401198e-05, |
|
"loss": 1.7765, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.2997002997002997, |
|
"grad_norm": 0.1559306633293477, |
|
"learning_rate": 1.7674131215450737e-05, |
|
"loss": 1.771, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3021978021978022, |
|
"grad_norm": 0.15795527293903494, |
|
"learning_rate": 1.761791995850156e-05, |
|
"loss": 1.7941, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.3046953046953047, |
|
"grad_norm": 0.15804826082822157, |
|
"learning_rate": 1.756112921150616e-05, |
|
"loss": 1.7846, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.30719280719280717, |
|
"grad_norm": 0.15566746006494162, |
|
"learning_rate": 1.7503763294498457e-05, |
|
"loss": 1.7756, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3096903096903097, |
|
"grad_norm": 0.1575042305833722, |
|
"learning_rate": 1.744582657126517e-05, |
|
"loss": 1.7905, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.31218781218781216, |
|
"grad_norm": 0.16032862889097663, |
|
"learning_rate": 1.7387323449013847e-05, |
|
"loss": 1.7862, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3146853146853147, |
|
"grad_norm": 0.15809033799977093, |
|
"learning_rate": 1.732825837803765e-05, |
|
"loss": 1.7702, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.31718281718281716, |
|
"grad_norm": 0.15672979108179055, |
|
"learning_rate": 1.7268635851376785e-05, |
|
"loss": 1.76, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.3196803196803197, |
|
"grad_norm": 0.1554900829598635, |
|
"learning_rate": 1.7208460404476745e-05, |
|
"loss": 1.7411, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3221778221778222, |
|
"grad_norm": 0.15739315261539163, |
|
"learning_rate": 1.7147736614843297e-05, |
|
"loss": 1.7954, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3246753246753247, |
|
"grad_norm": 0.15852054226953874, |
|
"learning_rate": 1.7086469101694256e-05, |
|
"loss": 1.7627, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.3271728271728272, |
|
"grad_norm": 0.1596482505865284, |
|
"learning_rate": 1.7024662525608138e-05, |
|
"loss": 1.751, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.32967032967032966, |
|
"grad_norm": 0.15871675517321407, |
|
"learning_rate": 1.6962321588169598e-05, |
|
"loss": 1.7434, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.3321678321678322, |
|
"grad_norm": 0.15766920664878184, |
|
"learning_rate": 1.6899451031611814e-05, |
|
"loss": 1.7639, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.33466533466533466, |
|
"grad_norm": 0.155044210346668, |
|
"learning_rate": 1.6836055638455722e-05, |
|
"loss": 1.7703, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.3371628371628372, |
|
"grad_norm": 0.1554119105318693, |
|
"learning_rate": 1.677214023114623e-05, |
|
"loss": 1.75, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.33966033966033965, |
|
"grad_norm": 0.15802877535499618, |
|
"learning_rate": 1.670770967168537e-05, |
|
"loss": 1.7771, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3421578421578422, |
|
"grad_norm": 0.15628665794752747, |
|
"learning_rate": 1.664276886126246e-05, |
|
"loss": 1.7738, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.34465534465534464, |
|
"grad_norm": 0.15746939136953078, |
|
"learning_rate": 1.6577322739881255e-05, |
|
"loss": 1.76, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.34715284715284717, |
|
"grad_norm": 0.1563776125400448, |
|
"learning_rate": 1.6511376285984178e-05, |
|
"loss": 1.7815, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.34965034965034963, |
|
"grad_norm": 0.15612599803803687, |
|
"learning_rate": 1.6444934516073616e-05, |
|
"loss": 1.7325, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.35214785214785216, |
|
"grad_norm": 0.15539555825333878, |
|
"learning_rate": 1.6378002484330302e-05, |
|
"loss": 1.7267, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.3546453546453546, |
|
"grad_norm": 0.15742718718893856, |
|
"learning_rate": 1.631058528222886e-05, |
|
"loss": 1.7677, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 0.15722376923699383, |
|
"learning_rate": 1.6242688038150506e-05, |
|
"loss": 1.7593, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.3596403596403596, |
|
"grad_norm": 0.15692354412727902, |
|
"learning_rate": 1.617431591699291e-05, |
|
"loss": 1.7681, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.36213786213786214, |
|
"grad_norm": 0.15962277561047927, |
|
"learning_rate": 1.610547411977734e-05, |
|
"loss": 1.75, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.3646353646353646, |
|
"grad_norm": 0.15692940824376528, |
|
"learning_rate": 1.6036167883252988e-05, |
|
"loss": 1.7444, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.36713286713286714, |
|
"grad_norm": 0.15649627153834378, |
|
"learning_rate": 1.5966402479498642e-05, |
|
"loss": 1.7191, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.3696303696303696, |
|
"grad_norm": 0.1573682637472189, |
|
"learning_rate": 1.589618321552163e-05, |
|
"loss": 1.78, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.37212787212787213, |
|
"grad_norm": 0.15762121053782066, |
|
"learning_rate": 1.5825515432854125e-05, |
|
"loss": 1.7795, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.37462537462537465, |
|
"grad_norm": 0.15776485071561994, |
|
"learning_rate": 1.575440450714681e-05, |
|
"loss": 1.7656, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.3771228771228771, |
|
"grad_norm": 0.15837437348496275, |
|
"learning_rate": 1.5682855847759965e-05, |
|
"loss": 1.7684, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.37962037962037964, |
|
"grad_norm": 0.16037425169330838, |
|
"learning_rate": 1.561087489735197e-05, |
|
"loss": 1.7368, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.3821178821178821, |
|
"grad_norm": 0.15513596949144706, |
|
"learning_rate": 1.55384671314653e-05, |
|
"loss": 1.7818, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 0.15507828031304494, |
|
"learning_rate": 1.5465638058109998e-05, |
|
"loss": 1.7796, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.3871128871128871, |
|
"grad_norm": 0.1554815328421005, |
|
"learning_rate": 1.5392393217344666e-05, |
|
"loss": 1.755, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.38961038961038963, |
|
"grad_norm": 0.15688136225119317, |
|
"learning_rate": 1.5318738180855073e-05, |
|
"loss": 1.7387, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.3921078921078921, |
|
"grad_norm": 0.1582625776474904, |
|
"learning_rate": 1.5244678551530294e-05, |
|
"loss": 1.7603, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.3946053946053946, |
|
"grad_norm": 0.15701117307543766, |
|
"learning_rate": 1.5170219963036501e-05, |
|
"loss": 1.7182, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.3971028971028971, |
|
"grad_norm": 0.15766319963371506, |
|
"learning_rate": 1.5095368079388433e-05, |
|
"loss": 1.7295, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.3996003996003996, |
|
"grad_norm": 0.1587394631264011, |
|
"learning_rate": 1.5020128594518521e-05, |
|
"loss": 1.7914, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4020979020979021, |
|
"grad_norm": 0.15654000030100182, |
|
"learning_rate": 1.4944507231843756e-05, |
|
"loss": 1.7202, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.4045954045954046, |
|
"grad_norm": 0.15292942125756898, |
|
"learning_rate": 1.4868509743830332e-05, |
|
"loss": 1.7901, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.4070929070929071, |
|
"grad_norm": 0.15560456619792792, |
|
"learning_rate": 1.4792141911556027e-05, |
|
"loss": 1.7716, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.4095904095904096, |
|
"grad_norm": 0.15543386661085493, |
|
"learning_rate": 1.4715409544270462e-05, |
|
"loss": 1.7203, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.41208791208791207, |
|
"grad_norm": 0.15809116648034297, |
|
"learning_rate": 1.4638318478953202e-05, |
|
"loss": 1.7382, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.4145854145854146, |
|
"grad_norm": 0.1557252652086651, |
|
"learning_rate": 1.4560874579869716e-05, |
|
"loss": 1.7729, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.41708291708291706, |
|
"grad_norm": 0.1558675093074507, |
|
"learning_rate": 1.4483083738125312e-05, |
|
"loss": 1.7453, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.4195804195804196, |
|
"grad_norm": 0.15507446312985207, |
|
"learning_rate": 1.440495187121698e-05, |
|
"loss": 1.7678, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.42207792207792205, |
|
"grad_norm": 0.1553337193710422, |
|
"learning_rate": 1.4326484922583277e-05, |
|
"loss": 1.7709, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.4245754245754246, |
|
"grad_norm": 0.15761636621288116, |
|
"learning_rate": 1.4247688861152195e-05, |
|
"loss": 1.7315, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.4270729270729271, |
|
"grad_norm": 0.15356881599814767, |
|
"learning_rate": 1.4168569680887115e-05, |
|
"loss": 1.6987, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.42957042957042957, |
|
"grad_norm": 0.15728107221945523, |
|
"learning_rate": 1.4089133400330851e-05, |
|
"loss": 1.741, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.4320679320679321, |
|
"grad_norm": 0.1548348784145128, |
|
"learning_rate": 1.4009386062147829e-05, |
|
"loss": 1.7323, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.43456543456543456, |
|
"grad_norm": 0.15572598457585934, |
|
"learning_rate": 1.3929333732664405e-05, |
|
"loss": 1.727, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.4370629370629371, |
|
"grad_norm": 0.15491152138136058, |
|
"learning_rate": 1.3848982501407433e-05, |
|
"loss": 1.7137, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 0.15288889483520682, |
|
"learning_rate": 1.376833848064102e-05, |
|
"loss": 1.7241, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.4420579420579421, |
|
"grad_norm": 0.15423204666405854, |
|
"learning_rate": 1.3687407804901562e-05, |
|
"loss": 1.7536, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.44455544455544455, |
|
"grad_norm": 0.15495163216179897, |
|
"learning_rate": 1.3606196630531125e-05, |
|
"loss": 1.7184, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.44705294705294707, |
|
"grad_norm": 0.15761280747235365, |
|
"learning_rate": 1.3524711135209106e-05, |
|
"loss": 1.7751, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.44955044955044954, |
|
"grad_norm": 0.15841771000224297, |
|
"learning_rate": 1.344295751748231e-05, |
|
"loss": 1.7514, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.45204795204795206, |
|
"grad_norm": 0.15428205907298273, |
|
"learning_rate": 1.3360941996293439e-05, |
|
"loss": 1.716, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.15303631590090105, |
|
"learning_rate": 1.3278670810508016e-05, |
|
"loss": 1.7083, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.45704295704295705, |
|
"grad_norm": 0.152891460359696, |
|
"learning_rate": 1.3196150218439791e-05, |
|
"loss": 1.711, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.4595404595404595, |
|
"grad_norm": 0.1541097644604054, |
|
"learning_rate": 1.3113386497374691e-05, |
|
"loss": 1.7352, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.46203796203796205, |
|
"grad_norm": 0.1581983359025658, |
|
"learning_rate": 1.303038594309329e-05, |
|
"loss": 1.7367, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.4645354645354645, |
|
"grad_norm": 0.15475095934873914, |
|
"learning_rate": 1.294715486939192e-05, |
|
"loss": 1.7169, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.46703296703296704, |
|
"grad_norm": 0.15844968557859115, |
|
"learning_rate": 1.2863699607602358e-05, |
|
"loss": 1.7208, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.4695304695304695, |
|
"grad_norm": 0.15639170445946177, |
|
"learning_rate": 1.2780026506110225e-05, |
|
"loss": 1.7242, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.47202797202797203, |
|
"grad_norm": 0.15698004903219, |
|
"learning_rate": 1.2696141929872064e-05, |
|
"loss": 1.754, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.4745254745254745, |
|
"grad_norm": 0.15510251460198132, |
|
"learning_rate": 1.2612052259931146e-05, |
|
"loss": 1.7437, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.477022977022977, |
|
"grad_norm": 0.1569119958304332, |
|
"learning_rate": 1.2527763892932098e-05, |
|
"loss": 1.7334, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.47952047952047955, |
|
"grad_norm": 0.15742453447805246, |
|
"learning_rate": 1.2443283240634295e-05, |
|
"loss": 1.7468, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.482017982017982, |
|
"grad_norm": 0.15752206416220757, |
|
"learning_rate": 1.2358616729424113e-05, |
|
"loss": 1.7285, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.48451548451548454, |
|
"grad_norm": 0.15404918669113854, |
|
"learning_rate": 1.2273770799826104e-05, |
|
"loss": 1.6984, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.487012987012987, |
|
"grad_norm": 0.15592357368342188, |
|
"learning_rate": 1.2188751906013054e-05, |
|
"loss": 1.7329, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.48951048951048953, |
|
"grad_norm": 0.156094008256334, |
|
"learning_rate": 1.2103566515315015e-05, |
|
"loss": 1.7513, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.492007992007992, |
|
"grad_norm": 0.1563990934117787, |
|
"learning_rate": 1.2018221107727348e-05, |
|
"loss": 1.7353, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.4945054945054945, |
|
"grad_norm": 0.1562707992028513, |
|
"learning_rate": 1.1932722175417796e-05, |
|
"loss": 1.7572, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.497002997002997, |
|
"grad_norm": 0.1535470008052979, |
|
"learning_rate": 1.1847076222232614e-05, |
|
"loss": 1.7176, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.4995004995004995, |
|
"grad_norm": 0.1582614864687129, |
|
"learning_rate": 1.1761289763201843e-05, |
|
"loss": 1.7784, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.501998001998002, |
|
"grad_norm": 0.155758931237908, |
|
"learning_rate": 1.1675369324043711e-05, |
|
"loss": 1.7144, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.5044955044955045, |
|
"grad_norm": 0.15984802366682394, |
|
"learning_rate": 1.1589321440668215e-05, |
|
"loss": 1.7144, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.506993006993007, |
|
"grad_norm": 0.1593119494177456, |
|
"learning_rate": 1.150315265867996e-05, |
|
"loss": 1.6995, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.5094905094905094, |
|
"grad_norm": 0.15572494789848285, |
|
"learning_rate": 1.1416869532880219e-05, |
|
"loss": 1.7289, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.511988011988012, |
|
"grad_norm": 0.15289377290260284, |
|
"learning_rate": 1.1330478626768322e-05, |
|
"loss": 1.7166, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.5144855144855145, |
|
"grad_norm": 0.15398889595667828, |
|
"learning_rate": 1.1243986512042377e-05, |
|
"loss": 1.7163, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.516983016983017, |
|
"grad_norm": 0.15490392246805634, |
|
"learning_rate": 1.1157399768099366e-05, |
|
"loss": 1.7134, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.5194805194805194, |
|
"grad_norm": 0.15500679881154095, |
|
"learning_rate": 1.1070724981534647e-05, |
|
"loss": 1.7396, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.521978021978022, |
|
"grad_norm": 0.1560235269544757, |
|
"learning_rate": 1.0983968745640923e-05, |
|
"loss": 1.6969, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.5244755244755245, |
|
"grad_norm": 0.15453453742882894, |
|
"learning_rate": 1.0897137659906688e-05, |
|
"loss": 1.7426, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.526973026973027, |
|
"grad_norm": 0.15599081676361193, |
|
"learning_rate": 1.0810238329514225e-05, |
|
"loss": 1.7457, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5294705294705294, |
|
"grad_norm": 0.15738103416797297, |
|
"learning_rate": 1.072327736483713e-05, |
|
"loss": 1.7159, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.531968031968032, |
|
"grad_norm": 0.15854287836067726, |
|
"learning_rate": 1.0636261380937483e-05, |
|
"loss": 1.7684, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.5344655344655345, |
|
"grad_norm": 0.15666913409437308, |
|
"learning_rate": 1.0549196997062636e-05, |
|
"loss": 1.7533, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5369630369630369, |
|
"grad_norm": 0.15513752052678667, |
|
"learning_rate": 1.0462090836141705e-05, |
|
"loss": 1.7485, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.5394605394605395, |
|
"grad_norm": 0.15606650567355096, |
|
"learning_rate": 1.0374949524281741e-05, |
|
"loss": 1.7112, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.541958041958042, |
|
"grad_norm": 0.15696280237552776, |
|
"learning_rate": 1.0287779690263718e-05, |
|
"loss": 1.7299, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.5444555444555444, |
|
"grad_norm": 0.15466947191164923, |
|
"learning_rate": 1.020058796503826e-05, |
|
"loss": 1.7416, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.5469530469530469, |
|
"grad_norm": 0.15659516724914946, |
|
"learning_rate": 1.0113380981221251e-05, |
|
"loss": 1.7309, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.5494505494505495, |
|
"grad_norm": 0.15698307016673457, |
|
"learning_rate": 1.0026165372589278e-05, |
|
"loss": 1.7416, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.551948051948052, |
|
"grad_norm": 0.15747483213576682, |
|
"learning_rate": 9.938947773575014e-06, |
|
"loss": 1.7354, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.5544455544455544, |
|
"grad_norm": 0.1548555078024705, |
|
"learning_rate": 9.85173481876254e-06, |
|
"loss": 1.7285, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.5569430569430569, |
|
"grad_norm": 0.1576329932512269, |
|
"learning_rate": 9.76453314238266e-06, |
|
"loss": 1.7295, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.5594405594405595, |
|
"grad_norm": 0.15518997199188608, |
|
"learning_rate": 9.677349377808223e-06, |
|
"loss": 1.7252, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.561938061938062, |
|
"grad_norm": 0.15573012190773938, |
|
"learning_rate": 9.59019015704955e-06, |
|
"loss": 1.7257, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.5644355644355644, |
|
"grad_norm": 0.15100601160295765, |
|
"learning_rate": 9.50306211024993e-06, |
|
"loss": 1.6836, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.5669330669330669, |
|
"grad_norm": 0.15501908738813838, |
|
"learning_rate": 9.415971865181262e-06, |
|
"loss": 1.7429, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.5694305694305695, |
|
"grad_norm": 0.15812904061992633, |
|
"learning_rate": 9.328926046739899e-06, |
|
"loss": 1.7084, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.5719280719280719, |
|
"grad_norm": 0.15627757158949415, |
|
"learning_rate": 9.241931276442692e-06, |
|
"loss": 1.7214, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.5744255744255744, |
|
"grad_norm": 0.1558930669628705, |
|
"learning_rate": 9.154994171923285e-06, |
|
"loss": 1.7109, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 0.15441900101502826, |
|
"learning_rate": 9.068121346428735e-06, |
|
"loss": 1.7376, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.5794205794205795, |
|
"grad_norm": 0.15623145805295774, |
|
"learning_rate": 8.981319408316435e-06, |
|
"loss": 1.736, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.5819180819180819, |
|
"grad_norm": 0.15421994986436063, |
|
"learning_rate": 8.894594960551417e-06, |
|
"loss": 1.7484, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.5844155844155844, |
|
"grad_norm": 0.15562604285522325, |
|
"learning_rate": 8.807954600204079e-06, |
|
"loss": 1.7608, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.586913086913087, |
|
"grad_norm": 0.15592693727619433, |
|
"learning_rate": 8.721404917948343e-06, |
|
"loss": 1.7358, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.5894105894105894, |
|
"grad_norm": 0.15213420323940668, |
|
"learning_rate": 8.634952497560311e-06, |
|
"loss": 1.6824, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.5919080919080919, |
|
"grad_norm": 0.15353479909518586, |
|
"learning_rate": 8.548603915417445e-06, |
|
"loss": 1.7362, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.5944055944055944, |
|
"grad_norm": 0.1568536405873683, |
|
"learning_rate": 8.462365739998293e-06, |
|
"loss": 1.7144, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.596903096903097, |
|
"grad_norm": 0.15792171997932242, |
|
"learning_rate": 8.37624453138284e-06, |
|
"loss": 1.7364, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.5994005994005994, |
|
"grad_norm": 0.1559386534693386, |
|
"learning_rate": 8.29024684075349e-06, |
|
"loss": 1.7593, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6018981018981019, |
|
"grad_norm": 0.15715611634818757, |
|
"learning_rate": 8.204379209896712e-06, |
|
"loss": 1.7032, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.6043956043956044, |
|
"grad_norm": 0.15372344898616952, |
|
"learning_rate": 8.118648170705418e-06, |
|
"loss": 1.6978, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6068931068931069, |
|
"grad_norm": 0.15810145378733176, |
|
"learning_rate": 8.033060244682079e-06, |
|
"loss": 1.7396, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.6093906093906094, |
|
"grad_norm": 0.1557299319735277, |
|
"learning_rate": 7.947621942442651e-06, |
|
"loss": 1.7232, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.6118881118881119, |
|
"grad_norm": 0.1557147874512252, |
|
"learning_rate": 7.86233976322131e-06, |
|
"loss": 1.71, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.6143856143856143, |
|
"grad_norm": 0.1554308477187568, |
|
"learning_rate": 7.777220194376047e-06, |
|
"loss": 1.7177, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.6168831168831169, |
|
"grad_norm": 0.1564865748636126, |
|
"learning_rate": 7.692269710895204e-06, |
|
"loss": 1.705, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.6193806193806194, |
|
"grad_norm": 0.1540720670994077, |
|
"learning_rate": 7.6074947749049085e-06, |
|
"loss": 1.7039, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.6218781218781219, |
|
"grad_norm": 0.15756506692256836, |
|
"learning_rate": 7.522901835177506e-06, |
|
"loss": 1.7217, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.6243756243756243, |
|
"grad_norm": 0.15371879475088404, |
|
"learning_rate": 7.438497326641012e-06, |
|
"loss": 1.6895, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.6268731268731269, |
|
"grad_norm": 0.15945775556289293, |
|
"learning_rate": 7.354287669889606e-06, |
|
"loss": 1.7366, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.6293706293706294, |
|
"grad_norm": 0.15778707821642424, |
|
"learning_rate": 7.270279270695224e-06, |
|
"loss": 1.7303, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.6318681318681318, |
|
"grad_norm": 0.1586505017667703, |
|
"learning_rate": 7.186478519520279e-06, |
|
"loss": 1.7147, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.6343656343656343, |
|
"grad_norm": 0.15789741519311937, |
|
"learning_rate": 7.10289179103153e-06, |
|
"loss": 1.7146, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.6368631368631369, |
|
"grad_norm": 0.15184374761336888, |
|
"learning_rate": 7.019525443615181e-06, |
|
"loss": 1.7195, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.6393606393606394, |
|
"grad_norm": 0.15637777552677476, |
|
"learning_rate": 6.936385818893197e-06, |
|
"loss": 1.7471, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.6418581418581418, |
|
"grad_norm": 0.1567686671030996, |
|
"learning_rate": 6.853479241240895e-06, |
|
"loss": 1.7436, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.6443556443556444, |
|
"grad_norm": 0.15591409064699058, |
|
"learning_rate": 6.770812017305862e-06, |
|
"loss": 1.7252, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.6468531468531469, |
|
"grad_norm": 0.15804342302479574, |
|
"learning_rate": 6.688390435528209e-06, |
|
"loss": 1.7334, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.6493506493506493, |
|
"grad_norm": 0.15436002266775298, |
|
"learning_rate": 6.606220765662209e-06, |
|
"loss": 1.7165, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.6518481518481518, |
|
"grad_norm": 0.15531296312132536, |
|
"learning_rate": 6.524309258299368e-06, |
|
"loss": 1.7605, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.6543456543456544, |
|
"grad_norm": 0.1565211383258821, |
|
"learning_rate": 6.4426621443929505e-06, |
|
"loss": 1.7061, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.6568431568431569, |
|
"grad_norm": 0.1582362713382269, |
|
"learning_rate": 6.361285634783976e-06, |
|
"loss": 1.7196, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.6593406593406593, |
|
"grad_norm": 0.15521707003590374, |
|
"learning_rate": 6.280185919728784e-06, |
|
"loss": 1.6963, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.6618381618381618, |
|
"grad_norm": 0.15705101318113973, |
|
"learning_rate": 6.199369168428143e-06, |
|
"loss": 1.742, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.6643356643356644, |
|
"grad_norm": 0.15356334771300495, |
|
"learning_rate": 6.11884152855795e-06, |
|
"loss": 1.7011, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.6668331668331668, |
|
"grad_norm": 0.15700312151436902, |
|
"learning_rate": 6.0386091258015965e-06, |
|
"loss": 1.6928, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.6693306693306693, |
|
"grad_norm": 0.15784510931828966, |
|
"learning_rate": 5.95867806338398e-06, |
|
"loss": 1.7009, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.6718281718281718, |
|
"grad_norm": 0.15746608991343575, |
|
"learning_rate": 5.879054421607248e-06, |
|
"loss": 1.7397, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.6743256743256744, |
|
"grad_norm": 0.15620423042692408, |
|
"learning_rate": 5.79974425738826e-06, |
|
"loss": 1.7045, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.6768231768231768, |
|
"grad_norm": 0.15370013592929113, |
|
"learning_rate": 5.720753603797855e-06, |
|
"loss": 1.7015, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.6793206793206793, |
|
"grad_norm": 0.15590449648570684, |
|
"learning_rate": 5.6420884696019085e-06, |
|
"loss": 1.6937, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.6818181818181818, |
|
"grad_norm": 0.15510936819450175, |
|
"learning_rate": 5.563754838804252e-06, |
|
"loss": 1.7019, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.6843156843156843, |
|
"grad_norm": 0.1527676787399584, |
|
"learning_rate": 5.485758670191486e-06, |
|
"loss": 1.7051, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.6868131868131868, |
|
"grad_norm": 0.15546653572707683, |
|
"learning_rate": 5.408105896879684e-06, |
|
"loss": 1.7296, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.6893106893106893, |
|
"grad_norm": 0.15309923322143176, |
|
"learning_rate": 5.330802425863064e-06, |
|
"loss": 1.6989, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.6918081918081919, |
|
"grad_norm": 0.15308524124271006, |
|
"learning_rate": 5.2538541375646675e-06, |
|
"loss": 1.6826, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.6943056943056943, |
|
"grad_norm": 0.1538371045270346, |
|
"learning_rate": 5.177266885389016e-06, |
|
"loss": 1.6983, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.6968031968031968, |
|
"grad_norm": 0.15413826922713267, |
|
"learning_rate": 5.101046495276852e-06, |
|
"loss": 1.724, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.6993006993006993, |
|
"grad_norm": 0.15297221612329867, |
|
"learning_rate": 5.025198765261981e-06, |
|
"loss": 1.7201, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.7017982017982018, |
|
"grad_norm": 0.15512908248946458, |
|
"learning_rate": 4.949729465030193e-06, |
|
"loss": 1.6795, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.7042957042957043, |
|
"grad_norm": 0.15505906851804813, |
|
"learning_rate": 4.874644335480383e-06, |
|
"loss": 1.707, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.7067932067932068, |
|
"grad_norm": 0.15397133300096347, |
|
"learning_rate": 4.799949088287849e-06, |
|
"loss": 1.7008, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.7092907092907093, |
|
"grad_norm": 0.15813016475329747, |
|
"learning_rate": 4.725649405469786e-06, |
|
"loss": 1.7325, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.7117882117882118, |
|
"grad_norm": 0.15446637709842673, |
|
"learning_rate": 4.65175093895308e-06, |
|
"loss": 1.7215, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.1561588030182068, |
|
"learning_rate": 4.57825931014437e-06, |
|
"loss": 1.7184, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.7167832167832168, |
|
"grad_norm": 0.15772322144743187, |
|
"learning_rate": 4.50518010950241e-06, |
|
"loss": 1.7166, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.7192807192807192, |
|
"grad_norm": 0.15727583874213139, |
|
"learning_rate": 4.432518896112831e-06, |
|
"loss": 1.7034, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.7217782217782218, |
|
"grad_norm": 0.153411598788086, |
|
"learning_rate": 4.360281197265249e-06, |
|
"loss": 1.695, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.7242757242757243, |
|
"grad_norm": 0.15655819818705863, |
|
"learning_rate": 4.2884725080328245e-06, |
|
"loss": 1.7411, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.7267732267732268, |
|
"grad_norm": 0.15649221553246048, |
|
"learning_rate": 4.217098290854234e-06, |
|
"loss": 1.6852, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.7292707292707292, |
|
"grad_norm": 0.15608752112425878, |
|
"learning_rate": 4.146163975118154e-06, |
|
"loss": 1.7158, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.7317682317682318, |
|
"grad_norm": 0.156378012901712, |
|
"learning_rate": 4.0756749567502704e-06, |
|
"loss": 1.7141, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.7342657342657343, |
|
"grad_norm": 0.15573008458523765, |
|
"learning_rate": 4.005636597802785e-06, |
|
"loss": 1.7316, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.7367632367632367, |
|
"grad_norm": 0.15624579822166534, |
|
"learning_rate": 3.936054226046539e-06, |
|
"loss": 1.7275, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.7392607392607392, |
|
"grad_norm": 0.15708652587903585, |
|
"learning_rate": 3.866933134565747e-06, |
|
"loss": 1.7146, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.7417582417582418, |
|
"grad_norm": 0.15685542138951347, |
|
"learning_rate": 3.7982785813553335e-06, |
|
"loss": 1.7009, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.7442557442557443, |
|
"grad_norm": 0.15672884263951484, |
|
"learning_rate": 3.730095788920969e-06, |
|
"loss": 1.7273, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.7467532467532467, |
|
"grad_norm": 0.15717150554726544, |
|
"learning_rate": 3.662389943881811e-06, |
|
"loss": 1.7223, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.7492507492507493, |
|
"grad_norm": 0.153594301470774, |
|
"learning_rate": 3.5951661965759376e-06, |
|
"loss": 1.7232, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.7517482517482518, |
|
"grad_norm": 0.15489817985281926, |
|
"learning_rate": 3.5284296606685788e-06, |
|
"loss": 1.7198, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.7542457542457542, |
|
"grad_norm": 0.15650041684511018, |
|
"learning_rate": 3.4621854127631293e-06, |
|
"loss": 1.7427, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.7567432567432567, |
|
"grad_norm": 0.15414035664901124, |
|
"learning_rate": 3.3964384920149574e-06, |
|
"loss": 1.6846, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.7592407592407593, |
|
"grad_norm": 0.15583094131363653, |
|
"learning_rate": 3.331193899748091e-06, |
|
"loss": 1.726, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.7617382617382618, |
|
"grad_norm": 0.1554155623994627, |
|
"learning_rate": 3.2664565990747733e-06, |
|
"loss": 1.7206, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.7642357642357642, |
|
"grad_norm": 0.15305845088804462, |
|
"learning_rate": 3.202231514517913e-06, |
|
"loss": 1.7028, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.7667332667332667, |
|
"grad_norm": 0.15291334769441534, |
|
"learning_rate": 3.1385235316364805e-06, |
|
"loss": 1.7322, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.15661260187424764, |
|
"learning_rate": 3.0753374966538807e-06, |
|
"loss": 1.6848, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.7717282717282717, |
|
"grad_norm": 0.15809358800271614, |
|
"learning_rate": 3.012678216089281e-06, |
|
"loss": 1.6997, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.7742257742257742, |
|
"grad_norm": 0.155978034855165, |
|
"learning_rate": 2.9505504563920005e-06, |
|
"loss": 1.7356, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.7767232767232767, |
|
"grad_norm": 0.15604151355576693, |
|
"learning_rate": 2.888958943578919e-06, |
|
"loss": 1.7165, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.7792207792207793, |
|
"grad_norm": 0.1550171550409355, |
|
"learning_rate": 2.827908362874986e-06, |
|
"loss": 1.7025, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.7817182817182817, |
|
"grad_norm": 0.15527890651905346, |
|
"learning_rate": 2.7674033583568004e-06, |
|
"loss": 1.687, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.7842157842157842, |
|
"grad_norm": 0.15537639424177968, |
|
"learning_rate": 2.7074485325993482e-06, |
|
"loss": 1.7126, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.7867132867132867, |
|
"grad_norm": 0.15244961389707662, |
|
"learning_rate": 2.648048446325894e-06, |
|
"loss": 1.6943, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.7892107892107892, |
|
"grad_norm": 0.15407223657879413, |
|
"learning_rate": 2.5892076180610372e-06, |
|
"loss": 1.7077, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.7917082917082917, |
|
"grad_norm": 0.15395383095089893, |
|
"learning_rate": 2.5309305237869953e-06, |
|
"loss": 1.7428, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.7942057942057942, |
|
"grad_norm": 0.15693221036515126, |
|
"learning_rate": 2.473221596603127e-06, |
|
"loss": 1.7003, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.7967032967032966, |
|
"grad_norm": 0.15202864145333894, |
|
"learning_rate": 2.416085226388699e-06, |
|
"loss": 1.7062, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.7992007992007992, |
|
"grad_norm": 0.15438777267790824, |
|
"learning_rate": 2.3595257594689504e-06, |
|
"loss": 1.716, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.8016983016983017, |
|
"grad_norm": 0.15408101244601666, |
|
"learning_rate": 2.303547498284483e-06, |
|
"loss": 1.74, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.8041958041958042, |
|
"grad_norm": 0.1529673037596131, |
|
"learning_rate": 2.2481547010639648e-06, |
|
"loss": 1.724, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.8066933066933067, |
|
"grad_norm": 0.15429371566399036, |
|
"learning_rate": 2.1933515815002115e-06, |
|
"loss": 1.7058, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.8091908091908092, |
|
"grad_norm": 0.1553802340727305, |
|
"learning_rate": 2.1391423084296627e-06, |
|
"loss": 1.7044, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.8116883116883117, |
|
"grad_norm": 0.15552250374029558, |
|
"learning_rate": 2.08553100551525e-06, |
|
"loss": 1.7294, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.8141858141858141, |
|
"grad_norm": 0.15556388073981073, |
|
"learning_rate": 2.0325217509327145e-06, |
|
"loss": 1.6825, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.8166833166833167, |
|
"grad_norm": 0.1534237780794405, |
|
"learning_rate": 1.980118577060397e-06, |
|
"loss": 1.7258, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.8191808191808192, |
|
"grad_norm": 0.1556601065527979, |
|
"learning_rate": 1.9283254701724742e-06, |
|
"loss": 1.6938, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.8216783216783217, |
|
"grad_norm": 0.156089780593886, |
|
"learning_rate": 1.8771463701357428e-06, |
|
"loss": 1.7357, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.8241758241758241, |
|
"grad_norm": 0.15554888194693464, |
|
"learning_rate": 1.8265851701099146e-06, |
|
"loss": 1.7063, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.8266733266733267, |
|
"grad_norm": 0.15466801254433934, |
|
"learning_rate": 1.7766457162514594e-06, |
|
"loss": 1.7435, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.8291708291708292, |
|
"grad_norm": 0.15476788949201128, |
|
"learning_rate": 1.7273318074210298e-06, |
|
"loss": 1.7059, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.8316683316683317, |
|
"grad_norm": 0.15219152100759303, |
|
"learning_rate": 1.6786471948944994e-06, |
|
"loss": 1.6964, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.8341658341658341, |
|
"grad_norm": 0.1548313560211368, |
|
"learning_rate": 1.630595582077591e-06, |
|
"loss": 1.7, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.8366633366633367, |
|
"grad_norm": 0.15585812651840628, |
|
"learning_rate": 1.5831806242241632e-06, |
|
"loss": 1.7028, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.8391608391608392, |
|
"grad_norm": 0.15443911839682758, |
|
"learning_rate": 1.5364059281581566e-06, |
|
"loss": 1.7176, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.8416583416583416, |
|
"grad_norm": 0.1605017036197551, |
|
"learning_rate": 1.4902750519992392e-06, |
|
"loss": 1.7156, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.8441558441558441, |
|
"grad_norm": 0.1538258207387918, |
|
"learning_rate": 1.4447915048921224e-06, |
|
"loss": 1.6744, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.8466533466533467, |
|
"grad_norm": 0.15406565385641532, |
|
"learning_rate": 1.3999587467396336e-06, |
|
"loss": 1.6946, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.8491508491508492, |
|
"grad_norm": 0.1536094454245134, |
|
"learning_rate": 1.3557801879395283e-06, |
|
"loss": 1.7148, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.8516483516483516, |
|
"grad_norm": 0.15544099947612952, |
|
"learning_rate": 1.3122591891250492e-06, |
|
"loss": 1.6875, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.8541458541458542, |
|
"grad_norm": 0.16188709049635025, |
|
"learning_rate": 1.2693990609092965e-06, |
|
"loss": 1.7387, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.8566433566433567, |
|
"grad_norm": 0.15363848054893653, |
|
"learning_rate": 1.227203063633393e-06, |
|
"loss": 1.7277, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.8591408591408591, |
|
"grad_norm": 0.15554467350943088, |
|
"learning_rate": 1.185674407118461e-06, |
|
"loss": 1.6892, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.8616383616383616, |
|
"grad_norm": 0.15618845824953714, |
|
"learning_rate": 1.1448162504214621e-06, |
|
"loss": 1.7413, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.8641358641358642, |
|
"grad_norm": 0.15459131494726566, |
|
"learning_rate": 1.1046317015948904e-06, |
|
"loss": 1.7133, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.8666333666333667, |
|
"grad_norm": 0.1552140715983614, |
|
"learning_rate": 1.0651238174503408e-06, |
|
"loss": 1.7147, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.8691308691308691, |
|
"grad_norm": 0.15520848465047085, |
|
"learning_rate": 1.0262956033259775e-06, |
|
"loss": 1.7367, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.8716283716283716, |
|
"grad_norm": 0.15402498238996434, |
|
"learning_rate": 9.881500128579314e-07, |
|
"loss": 1.6949, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.8741258741258742, |
|
"grad_norm": 0.15820953048710004, |
|
"learning_rate": 9.506899477556042e-07, |
|
"loss": 1.6936, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.8766233766233766, |
|
"grad_norm": 0.15475714836078286, |
|
"learning_rate": 9.139182575809446e-07, |
|
"loss": 1.7216, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"grad_norm": 0.15320851876078712, |
|
"learning_rate": 8.778377395316906e-07, |
|
"loss": 1.6875, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.8816183816183816, |
|
"grad_norm": 0.1559111566296162, |
|
"learning_rate": 8.424511382285749e-07, |
|
"loss": 1.7169, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.8841158841158842, |
|
"grad_norm": 0.15776692471495612, |
|
"learning_rate": 8.077611455065493e-07, |
|
"loss": 1.7276, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.8866133866133866, |
|
"grad_norm": 0.15457624124158628, |
|
"learning_rate": 7.737704002100255e-07, |
|
"loss": 1.6813, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.8891108891108891, |
|
"grad_norm": 0.1542971837033228, |
|
"learning_rate": 7.404814879921296e-07, |
|
"loss": 1.7033, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.8916083916083916, |
|
"grad_norm": 0.15193831756852316, |
|
"learning_rate": 7.078969411180159e-07, |
|
"loss": 1.6647, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.8941058941058941, |
|
"grad_norm": 0.15247095933972296, |
|
"learning_rate": 6.760192382722485e-07, |
|
"loss": 1.7022, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.8966033966033966, |
|
"grad_norm": 0.15342741598547197, |
|
"learning_rate": 6.44850804370234e-07, |
|
"loss": 1.7011, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.8991008991008991, |
|
"grad_norm": 0.15441584662597227, |
|
"learning_rate": 6.143940103737689e-07, |
|
"loss": 1.7223, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.9015984015984015, |
|
"grad_norm": 0.15544328956589934, |
|
"learning_rate": 5.846511731106864e-07, |
|
"loss": 1.7176, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.9040959040959041, |
|
"grad_norm": 0.15335771344533347, |
|
"learning_rate": 5.556245550986051e-07, |
|
"loss": 1.7273, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.9065934065934066, |
|
"grad_norm": 0.15533326348626406, |
|
"learning_rate": 5.273163643728296e-07, |
|
"loss": 1.6976, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.1516521943992359, |
|
"learning_rate": 4.997287543183815e-07, |
|
"loss": 1.6956, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.9115884115884116, |
|
"grad_norm": 0.15363897561993292, |
|
"learning_rate": 4.728638235062022e-07, |
|
"loss": 1.6807, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.9140859140859141, |
|
"grad_norm": 0.15398450709114828, |
|
"learning_rate": 4.4672361553350307e-07, |
|
"loss": 1.7005, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.9165834165834166, |
|
"grad_norm": 0.15239411554319313, |
|
"learning_rate": 4.213101188683155e-07, |
|
"loss": 1.6957, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.919080919080919, |
|
"grad_norm": 0.15568622874699023, |
|
"learning_rate": 3.9662526669823954e-07, |
|
"loss": 1.7054, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.9215784215784216, |
|
"grad_norm": 0.15485180274139432, |
|
"learning_rate": 3.7267093678336807e-07, |
|
"loss": 1.7272, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.9240759240759241, |
|
"grad_norm": 0.15780700441318615, |
|
"learning_rate": 3.4944895131346355e-07, |
|
"loss": 1.6873, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.9265734265734266, |
|
"grad_norm": 0.15379026482093777, |
|
"learning_rate": 3.2696107676933874e-07, |
|
"loss": 1.6611, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.929070929070929, |
|
"grad_norm": 0.1548358187558438, |
|
"learning_rate": 3.052090237884808e-07, |
|
"loss": 1.7334, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.9315684315684316, |
|
"grad_norm": 0.15404474230711981, |
|
"learning_rate": 2.841944470349256e-07, |
|
"loss": 1.7395, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.9340659340659341, |
|
"grad_norm": 0.15245851102095165, |
|
"learning_rate": 2.6391894507339036e-07, |
|
"loss": 1.6845, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.9365634365634365, |
|
"grad_norm": 0.15457664788214828, |
|
"learning_rate": 2.443840602476666e-07, |
|
"loss": 1.7197, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.939060939060939, |
|
"grad_norm": 0.15619921990085028, |
|
"learning_rate": 2.2559127856330187e-07, |
|
"loss": 1.7138, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.9415584415584416, |
|
"grad_norm": 0.15465495381686764, |
|
"learning_rate": 2.075420295745567e-07, |
|
"loss": 1.7034, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.9440559440559441, |
|
"grad_norm": 0.15413725828913954, |
|
"learning_rate": 1.902376862756583e-07, |
|
"loss": 1.7132, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.9465534465534465, |
|
"grad_norm": 0.1553239253397568, |
|
"learning_rate": 1.7367956499635963e-07, |
|
"loss": 1.7267, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.949050949050949, |
|
"grad_norm": 0.15270107237353941, |
|
"learning_rate": 1.578689253018062e-07, |
|
"loss": 1.7124, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.9515484515484516, |
|
"grad_norm": 0.15613663746353207, |
|
"learning_rate": 1.4280696989672383e-07, |
|
"loss": 1.6997, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.954045954045954, |
|
"grad_norm": 0.15490386612035345, |
|
"learning_rate": 1.2849484453392624e-07, |
|
"loss": 1.7035, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.9565434565434565, |
|
"grad_norm": 0.15328450998495585, |
|
"learning_rate": 1.1493363792716262e-07, |
|
"loss": 1.7039, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.9590409590409591, |
|
"grad_norm": 0.15356789205414245, |
|
"learning_rate": 1.0212438166829375e-07, |
|
"loss": 1.7214, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 0.15318911423373824, |
|
"learning_rate": 9.006805014882825e-08, |
|
"loss": 1.6622, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.964035964035964, |
|
"grad_norm": 0.15361081252860728, |
|
"learning_rate": 7.876556048579287e-08, |
|
"loss": 1.6955, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.9665334665334665, |
|
"grad_norm": 0.15466222218440107, |
|
"learning_rate": 6.82177724519717e-08, |
|
"loss": 1.6808, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.9690309690309691, |
|
"grad_norm": 0.1551486540401332, |
|
"learning_rate": 5.8425488410505107e-08, |
|
"loss": 1.7217, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.9715284715284715, |
|
"grad_norm": 0.15270102301709607, |
|
"learning_rate": 4.9389453253850806e-08, |
|
"loss": 1.674, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.974025974025974, |
|
"grad_norm": 0.15535594613189269, |
|
"learning_rate": 4.111035434712585e-08, |
|
"loss": 1.7045, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.9765234765234765, |
|
"grad_norm": 0.15346069144811603, |
|
"learning_rate": 3.35888214758151e-08, |
|
"loss": 1.714, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.9790209790209791, |
|
"grad_norm": 0.1535377723271678, |
|
"learning_rate": 2.682542679786071e-08, |
|
"loss": 1.7105, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.9815184815184815, |
|
"grad_norm": 0.153091026718829, |
|
"learning_rate": 2.0820684800147983e-08, |
|
"loss": 1.6672, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.984015984015984, |
|
"grad_norm": 0.15163180759905762, |
|
"learning_rate": 1.557505225936118e-08, |
|
"loss": 1.7388, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.9865134865134865, |
|
"grad_norm": 0.15480094009362919, |
|
"learning_rate": 1.1088928207236837e-08, |
|
"loss": 1.6973, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.989010989010989, |
|
"grad_norm": 0.15658829069949848, |
|
"learning_rate": 7.362653900215844e-09, |
|
"loss": 1.7533, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.9915084915084915, |
|
"grad_norm": 0.15216671113258667, |
|
"learning_rate": 4.396512793475305e-09, |
|
"loss": 1.6998, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.994005994005994, |
|
"grad_norm": 0.15475133859641596, |
|
"learning_rate": 2.1907305193757944e-09, |
|
"loss": 1.7031, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.9965034965034965, |
|
"grad_norm": 0.15399579527373794, |
|
"learning_rate": 7.454748702895309e-10, |
|
"loss": 1.6964, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.999000999000999, |
|
"grad_norm": 0.157143946266898, |
|
"learning_rate": 6.085578584280604e-11, |
|
"loss": 1.7337, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.7156749963760376, |
|
"eval_runtime": 110.9741, |
|
"eval_samples_per_second": 127.76, |
|
"eval_steps_per_second": 2.0, |
|
"step": 2002 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 2002, |
|
"total_flos": 145100101386240.0, |
|
"train_loss": 1.7762367073948924, |
|
"train_runtime": 4136.0894, |
|
"train_samples_per_second": 30.978, |
|
"train_steps_per_second": 0.484 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2002, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 145100101386240.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|