|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.999775734469612, |
|
"eval_steps": 500, |
|
"global_step": 2229, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008970621215519175, |
|
"grad_norm": 1.0645267963409424, |
|
"learning_rate": 5.970149253731343e-06, |
|
"loss": 4.2454, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01794124243103835, |
|
"grad_norm": 0.974229633808136, |
|
"learning_rate": 1.1940298507462686e-05, |
|
"loss": 3.1584, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.026911863646557524, |
|
"grad_norm": 0.9352247714996338, |
|
"learning_rate": 1.791044776119403e-05, |
|
"loss": 3.0759, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0358824848620767, |
|
"grad_norm": 0.7863051891326904, |
|
"learning_rate": 1.999821584672887e-05, |
|
"loss": 3.0341, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04485310607759587, |
|
"grad_norm": 0.8577287197113037, |
|
"learning_rate": 1.998850515736159e-05, |
|
"loss": 3.0815, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05382372729311505, |
|
"grad_norm": 0.6335000395774841, |
|
"learning_rate": 1.9970358823117534e-05, |
|
"loss": 3.0384, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06279434850863422, |
|
"grad_norm": 0.7152234315872192, |
|
"learning_rate": 1.994379216921594e-05, |
|
"loss": 2.9225, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0717649697241534, |
|
"grad_norm": 0.6568615436553955, |
|
"learning_rate": 1.990882763213298e-05, |
|
"loss": 2.9516, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08073559093967257, |
|
"grad_norm": 0.6041061282157898, |
|
"learning_rate": 1.986549474065333e-05, |
|
"loss": 3.0125, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.08970621215519174, |
|
"grad_norm": 0.6791430115699768, |
|
"learning_rate": 1.98138300909321e-05, |
|
"loss": 3.0675, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09867683337071093, |
|
"grad_norm": 0.5899327397346497, |
|
"learning_rate": 1.9753877315588072e-05, |
|
"loss": 2.7601, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1076474545862301, |
|
"grad_norm": 0.6127994656562805, |
|
"learning_rate": 1.9685687046854415e-05, |
|
"loss": 2.8789, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.11661807580174927, |
|
"grad_norm": 0.5294649004936218, |
|
"learning_rate": 1.9609316873817992e-05, |
|
"loss": 2.9753, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.12558869701726844, |
|
"grad_norm": 0.5529225468635559, |
|
"learning_rate": 1.952483129378333e-05, |
|
"loss": 2.9229, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.13455931823278763, |
|
"grad_norm": 0.8343173861503601, |
|
"learning_rate": 1.9432301657802378e-05, |
|
"loss": 3.0116, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1435299394483068, |
|
"grad_norm": 0.528243899345398, |
|
"learning_rate": 1.9331806110416027e-05, |
|
"loss": 2.9695, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.15250056066382597, |
|
"grad_norm": 0.5646138191223145, |
|
"learning_rate": 1.922342952365829e-05, |
|
"loss": 2.8888, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.16147118187934514, |
|
"grad_norm": 0.5159996151924133, |
|
"learning_rate": 1.9107263425378873e-05, |
|
"loss": 2.9894, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.1704418030948643, |
|
"grad_norm": 0.6501027941703796, |
|
"learning_rate": 1.8983405921944686e-05, |
|
"loss": 2.8444, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.17941242431038348, |
|
"grad_norm": 0.5916054248809814, |
|
"learning_rate": 1.8851961615385542e-05, |
|
"loss": 2.96, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.18838304552590268, |
|
"grad_norm": 0.5567501783370972, |
|
"learning_rate": 1.8713041515054065e-05, |
|
"loss": 2.9343, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.19735366674142185, |
|
"grad_norm": 0.5652680397033691, |
|
"learning_rate": 1.8566762943874376e-05, |
|
"loss": 2.8886, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.20632428795694102, |
|
"grad_norm": 0.5061436295509338, |
|
"learning_rate": 1.8413249439258743e-05, |
|
"loss": 2.9397, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2152949091724602, |
|
"grad_norm": 0.592443585395813, |
|
"learning_rate": 1.8252630648775874e-05, |
|
"loss": 2.9407, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.22426553038797936, |
|
"grad_norm": 0.6554358601570129, |
|
"learning_rate": 1.8085042220658993e-05, |
|
"loss": 2.8923, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.23323615160349853, |
|
"grad_norm": 0.5817515850067139, |
|
"learning_rate": 1.791062568924609e-05, |
|
"loss": 2.905, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.24220677281901773, |
|
"grad_norm": 0.5327040553092957, |
|
"learning_rate": 1.7729528355449214e-05, |
|
"loss": 2.8576, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.25117739403453687, |
|
"grad_norm": 0.6554744839668274, |
|
"learning_rate": 1.7541903162353638e-05, |
|
"loss": 2.9158, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.26014801525005604, |
|
"grad_norm": 0.5069458484649658, |
|
"learning_rate": 1.734790856605204e-05, |
|
"loss": 2.8801, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.26911863646557527, |
|
"grad_norm": 0.8996289372444153, |
|
"learning_rate": 1.714770840182273e-05, |
|
"loss": 2.8207, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.27808925768109444, |
|
"grad_norm": 0.6184943318367004, |
|
"learning_rate": 1.6941471745764996e-05, |
|
"loss": 2.8421, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.2870598788966136, |
|
"grad_norm": 1.6989350318908691, |
|
"learning_rate": 1.672937277200837e-05, |
|
"loss": 2.9348, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.2960305001121328, |
|
"grad_norm": 0.5281040668487549, |
|
"learning_rate": 1.6511590605616423e-05, |
|
"loss": 2.8391, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.30500112132765195, |
|
"grad_norm": 0.8458319306373596, |
|
"learning_rate": 1.628830917130935e-05, |
|
"loss": 2.9132, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3139717425431711, |
|
"grad_norm": 0.61818927526474, |
|
"learning_rate": 1.6059717038133038e-05, |
|
"loss": 2.9718, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.3229423637586903, |
|
"grad_norm": 0.608075737953186, |
|
"learning_rate": 1.5826007260205868e-05, |
|
"loss": 2.8189, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.33191298497420946, |
|
"grad_norm": 0.5290448069572449, |
|
"learning_rate": 1.5587377213677705e-05, |
|
"loss": 2.8194, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.3408836061897286, |
|
"grad_norm": 1.4047750234603882, |
|
"learning_rate": 1.5344028430038764e-05, |
|
"loss": 2.849, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.3498542274052478, |
|
"grad_norm": 0.5588248372077942, |
|
"learning_rate": 1.5096166425919176e-05, |
|
"loss": 2.811, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.35882484862076697, |
|
"grad_norm": 0.5888757109642029, |
|
"learning_rate": 1.4844000529522942e-05, |
|
"loss": 2.8002, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.36779546983628614, |
|
"grad_norm": 0.5068212747573853, |
|
"learning_rate": 1.458774370384287e-05, |
|
"loss": 2.9016, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.37676609105180536, |
|
"grad_norm": 0.5036651492118835, |
|
"learning_rate": 1.4327612366805832e-05, |
|
"loss": 2.8372, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.38573671226732453, |
|
"grad_norm": 0.5104559659957886, |
|
"learning_rate": 1.4063826208500182e-05, |
|
"loss": 2.9339, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.3947073334828437, |
|
"grad_norm": 0.5798824429512024, |
|
"learning_rate": 1.3796608005639738e-05, |
|
"loss": 2.9184, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.4036779546983629, |
|
"grad_norm": 0.48436641693115234, |
|
"learning_rate": 1.352618343342098e-05, |
|
"loss": 2.8423, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.41264857591388204, |
|
"grad_norm": 0.5358359217643738, |
|
"learning_rate": 1.3252780874932395e-05, |
|
"loss": 2.8338, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.4216191971294012, |
|
"grad_norm": 0.5757357478141785, |
|
"learning_rate": 1.2976631228276894e-05, |
|
"loss": 2.851, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.4305898183449204, |
|
"grad_norm": 0.662520706653595, |
|
"learning_rate": 1.2697967711570243e-05, |
|
"loss": 2.7493, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 0.464627206325531, |
|
"learning_rate": 1.2417025665980114e-05, |
|
"loss": 2.8057, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.4485310607759587, |
|
"grad_norm": 0.5200120806694031, |
|
"learning_rate": 1.2134042356972175e-05, |
|
"loss": 2.9126, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.4575016819914779, |
|
"grad_norm": 0.530265748500824, |
|
"learning_rate": 1.1849256773931058e-05, |
|
"loss": 2.6896, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.46647230320699706, |
|
"grad_norm": 0.4869816303253174, |
|
"learning_rate": 1.156290942832536e-05, |
|
"loss": 2.7893, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.47544292442251623, |
|
"grad_norm": 0.5833351612091064, |
|
"learning_rate": 1.1275242150587254e-05, |
|
"loss": 2.8435, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.48441354563803546, |
|
"grad_norm": 0.520775556564331, |
|
"learning_rate": 1.0986497885878145e-05, |
|
"loss": 2.781, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.49338416685355463, |
|
"grad_norm": 0.5047538876533508, |
|
"learning_rate": 1.0696920488912923e-05, |
|
"loss": 2.8266, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5023547880690737, |
|
"grad_norm": 0.7905083894729614, |
|
"learning_rate": 1.0406754518016047e-05, |
|
"loss": 2.9204, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5113254092845929, |
|
"grad_norm": 0.6023253798484802, |
|
"learning_rate": 1.0116245028583418e-05, |
|
"loss": 2.8986, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.5202960305001121, |
|
"grad_norm": 0.4778802692890167, |
|
"learning_rate": 9.825637366124458e-06, |
|
"loss": 2.7699, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.5292666517156313, |
|
"grad_norm": 0.5715895295143127, |
|
"learning_rate": 9.535176959059171e-06, |
|
"loss": 2.837, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.5382372729311505, |
|
"grad_norm": 0.523801863193512, |
|
"learning_rate": 9.245109111445189e-06, |
|
"loss": 2.7792, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.5472078941466697, |
|
"grad_norm": 0.7714831233024597, |
|
"learning_rate": 8.95567879580984e-06, |
|
"loss": 2.7714, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.5561785153621889, |
|
"grad_norm": 0.7075884342193604, |
|
"learning_rate": 8.667130446262214e-06, |
|
"loss": 2.7973, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.565149136577708, |
|
"grad_norm": 0.6289039850234985, |
|
"learning_rate": 8.379707752059932e-06, |
|
"loss": 2.8021, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.5741197577932272, |
|
"grad_norm": 0.5410972237586975, |
|
"learning_rate": 8.093653451804987e-06, |
|
"loss": 2.9025, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.5830903790087464, |
|
"grad_norm": 0.5992820262908936, |
|
"learning_rate": 7.809209128442408e-06, |
|
"loss": 2.8603, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.5920610002242656, |
|
"grad_norm": 0.5674196481704712, |
|
"learning_rate": 7.52661500523497e-06, |
|
"loss": 2.827, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.6010316214397847, |
|
"grad_norm": 0.6220895051956177, |
|
"learning_rate": 7.246109742886156e-06, |
|
"loss": 2.8788, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.6100022426553039, |
|
"grad_norm": 0.5512953996658325, |
|
"learning_rate": 6.967930237982793e-06, |
|
"loss": 2.7649, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.6189728638708231, |
|
"grad_norm": 0.6244585514068604, |
|
"learning_rate": 6.692311422927515e-06, |
|
"loss": 2.792, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.6279434850863422, |
|
"grad_norm": 0.5638495683670044, |
|
"learning_rate": 6.4194860675300695e-06, |
|
"loss": 2.8633, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.6369141063018614, |
|
"grad_norm": 0.5531652569770813, |
|
"learning_rate": 6.149684582425013e-06, |
|
"loss": 2.8242, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.6458847275173806, |
|
"grad_norm": 0.6662128567695618, |
|
"learning_rate": 5.883134824481786e-06, |
|
"loss": 2.8038, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.6548553487328997, |
|
"grad_norm": 0.5970107913017273, |
|
"learning_rate": 5.620061904371565e-06, |
|
"loss": 2.7767, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.6638259699484189, |
|
"grad_norm": 0.7101424932479858, |
|
"learning_rate": 5.360687996453348e-06, |
|
"loss": 2.8163, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.6727965911639381, |
|
"grad_norm": 0.6316853761672974, |
|
"learning_rate": 5.105232151139895e-06, |
|
"loss": 2.8411, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.6817672123794573, |
|
"grad_norm": 0.6361038684844971, |
|
"learning_rate": 4.853910109901901e-06, |
|
"loss": 2.7593, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.6907378335949764, |
|
"grad_norm": 0.6812238097190857, |
|
"learning_rate": 4.606934123066739e-06, |
|
"loss": 2.7211, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.6997084548104956, |
|
"grad_norm": 0.6737092733383179, |
|
"learning_rate": 4.3645127705655654e-06, |
|
"loss": 2.8697, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.7086790760260148, |
|
"grad_norm": 0.5926268696784973, |
|
"learning_rate": 4.126850785780199e-06, |
|
"loss": 2.9391, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.7176496972415339, |
|
"grad_norm": 0.6012536287307739, |
|
"learning_rate": 3.8941488826385855e-06, |
|
"loss": 2.8052, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.7266203184570531, |
|
"grad_norm": 1.8011324405670166, |
|
"learning_rate": 3.6666035861047744e-06, |
|
"loss": 2.8488, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.7355909396725723, |
|
"grad_norm": 0.6892098784446716, |
|
"learning_rate": 3.444407066206692e-06, |
|
"loss": 2.7448, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.7445615608880914, |
|
"grad_norm": 0.5641138553619385, |
|
"learning_rate": 3.2277469757417403e-06, |
|
"loss": 2.7845, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.7535321821036107, |
|
"grad_norm": 0.623548150062561, |
|
"learning_rate": 3.0168062917974173e-06, |
|
"loss": 2.7557, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.7625028033191299, |
|
"grad_norm": 0.553068995475769, |
|
"learning_rate": 2.8117631612207084e-06, |
|
"loss": 2.787, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.7714734245346491, |
|
"grad_norm": 0.6410458087921143, |
|
"learning_rate": 2.6127907501667726e-06, |
|
"loss": 2.8642, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.7804440457501682, |
|
"grad_norm": 0.6185659766197205, |
|
"learning_rate": 2.420057097854046e-06, |
|
"loss": 2.7798, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.7894146669656874, |
|
"grad_norm": 0.5815610885620117, |
|
"learning_rate": 2.2337249746491695e-06, |
|
"loss": 2.7624, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.7983852881812066, |
|
"grad_norm": 0.549314558506012, |
|
"learning_rate": 2.0539517446016975e-06, |
|
"loss": 2.7385, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.8073559093967257, |
|
"grad_norm": 0.6471444368362427, |
|
"learning_rate": 1.880889232544585e-06, |
|
"loss": 2.8196, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.8163265306122449, |
|
"grad_norm": 0.5903030633926392, |
|
"learning_rate": 1.714683595872777e-06, |
|
"loss": 2.7618, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.8252971518277641, |
|
"grad_norm": 0.5994915962219238, |
|
"learning_rate": 1.5554752011081332e-06, |
|
"loss": 2.7856, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.8342677730432833, |
|
"grad_norm": 0.6510009169578552, |
|
"learning_rate": 1.4033985053549425e-06, |
|
"loss": 2.7116, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.8432383942588024, |
|
"grad_norm": 0.6316107511520386, |
|
"learning_rate": 1.2585819427461564e-06, |
|
"loss": 2.7599, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.8522090154743216, |
|
"grad_norm": 0.6176051497459412, |
|
"learning_rate": 1.121147815976248e-06, |
|
"loss": 2.7122, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.8611796366898408, |
|
"grad_norm": 0.552987813949585, |
|
"learning_rate": 9.912121930122542e-07, |
|
"loss": 2.7253, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.8701502579053599, |
|
"grad_norm": 0.6738405227661133, |
|
"learning_rate": 8.688848090702928e-07, |
|
"loss": 2.7408, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"grad_norm": 0.6588544249534607, |
|
"learning_rate": 7.542689739403097e-07, |
|
"loss": 2.7972, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.8880915003363983, |
|
"grad_norm": 0.58263099193573, |
|
"learning_rate": 6.474614847373051e-07, |
|
"loss": 2.7572, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.8970621215519174, |
|
"grad_norm": 0.7105110287666321, |
|
"learning_rate": 5.485525441527651e-07, |
|
"loss": 2.7105, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.9060327427674366, |
|
"grad_norm": 0.6308173537254333, |
|
"learning_rate": 4.5762568427529795e-07, |
|
"loss": 2.7918, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.9150033639829558, |
|
"grad_norm": 0.5979055166244507, |
|
"learning_rate": 3.747576960448551e-07, |
|
"loss": 2.7782, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.923973985198475, |
|
"grad_norm": 1.7976534366607666, |
|
"learning_rate": 3.0001856440005307e-07, |
|
"loss": 2.7122, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.9329446064139941, |
|
"grad_norm": 0.6443936228752136, |
|
"learning_rate": 2.3347140917344579e-07, |
|
"loss": 2.884, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.9419152276295133, |
|
"grad_norm": 0.6675403118133545, |
|
"learning_rate": 1.7517243178458486e-07, |
|
"loss": 2.799, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.9508858488450325, |
|
"grad_norm": 0.6668349504470825, |
|
"learning_rate": 1.2517086777594112e-07, |
|
"loss": 2.7874, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.9598564700605517, |
|
"grad_norm": 0.5564482808113098, |
|
"learning_rate": 8.35089452317639e-08, |
|
"loss": 2.6968, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.9688270912760709, |
|
"grad_norm": 0.6463912129402161, |
|
"learning_rate": 5.022184911495864e-08, |
|
"loss": 2.7258, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.9777977124915901, |
|
"grad_norm": 0.7440292835235596, |
|
"learning_rate": 2.5337691552156372e-08, |
|
"loss": 2.7671, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.9867683337071093, |
|
"grad_norm": 0.9270328283309937, |
|
"learning_rate": 8.877488092022823e-09, |
|
"loss": 2.7775, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.9957389549226284, |
|
"grad_norm": 0.6276254057884216, |
|
"learning_rate": 8.551399568945684e-10, |
|
"loss": 2.7756, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.999775734469612, |
|
"step": 2229, |
|
"total_flos": 3.285867140355195e+17, |
|
"train_loss": 2.858161311879079, |
|
"train_runtime": 1835.3961, |
|
"train_samples_per_second": 77.727, |
|
"train_steps_per_second": 1.214 |
|
} |
|
], |
|
"logging_steps": 20, |
|
"max_steps": 2229, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 5000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.285867140355195e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|