CodeLlama-13B-QML / trainer_state.json
qt-spyro-hf's picture
Upload 13 files
3bf99cc verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 20,
"global_step": 11621,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0017211333663217229,
"grad_norm": 0.512730062007904,
"learning_rate": 0.0002,
"loss": 1.7072,
"step": 20
},
{
"epoch": 0.0034422667326434457,
"grad_norm": 0.3541152775287628,
"learning_rate": 0.0002,
"loss": 1.3871,
"step": 40
},
{
"epoch": 0.005163400098965169,
"grad_norm": 0.40835729241371155,
"learning_rate": 0.0002,
"loss": 1.2637,
"step": 60
},
{
"epoch": 0.0068845334652868915,
"grad_norm": 0.3723342716693878,
"learning_rate": 0.0002,
"loss": 1.2378,
"step": 80
},
{
"epoch": 0.008605666831608615,
"grad_norm": 0.3391265571117401,
"learning_rate": 0.0002,
"loss": 1.1991,
"step": 100
},
{
"epoch": 0.010326800197930338,
"grad_norm": 0.3430226445198059,
"learning_rate": 0.0002,
"loss": 1.1191,
"step": 120
},
{
"epoch": 0.01204793356425206,
"grad_norm": 0.39266398549079895,
"learning_rate": 0.0002,
"loss": 1.1358,
"step": 140
},
{
"epoch": 0.013769066930573783,
"grad_norm": 0.38184505701065063,
"learning_rate": 0.0002,
"loss": 1.0851,
"step": 160
},
{
"epoch": 0.015490200296895506,
"grad_norm": 0.3323766887187958,
"learning_rate": 0.0002,
"loss": 1.0519,
"step": 180
},
{
"epoch": 0.01721133366321723,
"grad_norm": 0.3520768880844116,
"learning_rate": 0.0002,
"loss": 0.9788,
"step": 200
},
{
"epoch": 0.018932467029538953,
"grad_norm": 0.4066319465637207,
"learning_rate": 0.0002,
"loss": 0.9647,
"step": 220
},
{
"epoch": 0.020653600395860675,
"grad_norm": 0.3871042728424072,
"learning_rate": 0.0002,
"loss": 0.9981,
"step": 240
},
{
"epoch": 0.022374733762182398,
"grad_norm": 0.3759310841560364,
"learning_rate": 0.0002,
"loss": 0.9527,
"step": 260
},
{
"epoch": 0.02409586712850412,
"grad_norm": 0.411364883184433,
"learning_rate": 0.0002,
"loss": 1.0079,
"step": 280
},
{
"epoch": 0.025817000494825843,
"grad_norm": 0.38831627368927,
"learning_rate": 0.0002,
"loss": 1.0129,
"step": 300
},
{
"epoch": 0.027538133861147566,
"grad_norm": 0.36452218890190125,
"learning_rate": 0.0002,
"loss": 0.9021,
"step": 320
},
{
"epoch": 0.02925926722746929,
"grad_norm": 0.40188145637512207,
"learning_rate": 0.0002,
"loss": 0.9138,
"step": 340
},
{
"epoch": 0.03098040059379101,
"grad_norm": 0.3994237184524536,
"learning_rate": 0.0002,
"loss": 0.9164,
"step": 360
},
{
"epoch": 0.03270153396011274,
"grad_norm": 0.4226379692554474,
"learning_rate": 0.0002,
"loss": 0.8986,
"step": 380
},
{
"epoch": 0.03442266732643446,
"grad_norm": 0.4331601560115814,
"learning_rate": 0.0002,
"loss": 0.8443,
"step": 400
},
{
"epoch": 0.03614380069275618,
"grad_norm": 0.373415470123291,
"learning_rate": 0.0002,
"loss": 0.8437,
"step": 420
},
{
"epoch": 0.037864934059077905,
"grad_norm": 0.49005845189094543,
"learning_rate": 0.0002,
"loss": 0.8387,
"step": 440
},
{
"epoch": 0.03958606742539963,
"grad_norm": 0.5034841895103455,
"learning_rate": 0.0002,
"loss": 0.8108,
"step": 460
},
{
"epoch": 0.04130720079172135,
"grad_norm": 0.3401569426059723,
"learning_rate": 0.0002,
"loss": 0.7975,
"step": 480
},
{
"epoch": 0.04302833415804307,
"grad_norm": 0.37207746505737305,
"learning_rate": 0.0002,
"loss": 0.8135,
"step": 500
},
{
"epoch": 0.044749467524364796,
"grad_norm": 0.39017221331596375,
"learning_rate": 0.0002,
"loss": 0.7053,
"step": 520
},
{
"epoch": 0.04647060089068652,
"grad_norm": 0.42643848061561584,
"learning_rate": 0.0002,
"loss": 0.8119,
"step": 540
},
{
"epoch": 0.04819173425700824,
"grad_norm": 0.4266549050807953,
"learning_rate": 0.0002,
"loss": 0.7676,
"step": 560
},
{
"epoch": 0.049912867623329964,
"grad_norm": 0.37006089091300964,
"learning_rate": 0.0002,
"loss": 0.7675,
"step": 580
},
{
"epoch": 0.051634000989651686,
"grad_norm": 0.3692554235458374,
"learning_rate": 0.0002,
"loss": 0.7843,
"step": 600
},
{
"epoch": 0.05335513435597341,
"grad_norm": 0.41451218724250793,
"learning_rate": 0.0002,
"loss": 0.7111,
"step": 620
},
{
"epoch": 0.05507626772229513,
"grad_norm": 0.46577128767967224,
"learning_rate": 0.0002,
"loss": 0.7248,
"step": 640
},
{
"epoch": 0.056797401088616854,
"grad_norm": 0.4749889373779297,
"learning_rate": 0.0002,
"loss": 0.7454,
"step": 660
},
{
"epoch": 0.05851853445493858,
"grad_norm": 0.44663748145103455,
"learning_rate": 0.0002,
"loss": 0.6829,
"step": 680
},
{
"epoch": 0.0602396678212603,
"grad_norm": 0.3434062600135803,
"learning_rate": 0.0002,
"loss": 0.6625,
"step": 700
},
{
"epoch": 0.06196080118758202,
"grad_norm": 0.45600661635398865,
"learning_rate": 0.0002,
"loss": 0.6623,
"step": 720
},
{
"epoch": 0.06368193455390375,
"grad_norm": 0.41260451078414917,
"learning_rate": 0.0002,
"loss": 0.6497,
"step": 740
},
{
"epoch": 0.06540306792022547,
"grad_norm": 0.3827366232872009,
"learning_rate": 0.0002,
"loss": 0.6133,
"step": 760
},
{
"epoch": 0.0671242012865472,
"grad_norm": 0.44087541103363037,
"learning_rate": 0.0002,
"loss": 0.6782,
"step": 780
},
{
"epoch": 0.06884533465286892,
"grad_norm": 0.3596974313259125,
"learning_rate": 0.0002,
"loss": 0.6294,
"step": 800
},
{
"epoch": 0.07056646801919064,
"grad_norm": 0.42401421070098877,
"learning_rate": 0.0002,
"loss": 0.6761,
"step": 820
},
{
"epoch": 0.07228760138551237,
"grad_norm": 0.34142622351646423,
"learning_rate": 0.0002,
"loss": 0.6274,
"step": 840
},
{
"epoch": 0.07400873475183409,
"grad_norm": 0.3514570891857147,
"learning_rate": 0.0002,
"loss": 0.6053,
"step": 860
},
{
"epoch": 0.07572986811815581,
"grad_norm": 0.3995071053504944,
"learning_rate": 0.0002,
"loss": 0.5953,
"step": 880
},
{
"epoch": 0.07745100148447753,
"grad_norm": 0.3967471420764923,
"learning_rate": 0.0002,
"loss": 0.6375,
"step": 900
},
{
"epoch": 0.07917213485079926,
"grad_norm": 0.434865266084671,
"learning_rate": 0.0002,
"loss": 0.6053,
"step": 920
},
{
"epoch": 0.08089326821712098,
"grad_norm": 0.42927464842796326,
"learning_rate": 0.0002,
"loss": 0.5947,
"step": 940
},
{
"epoch": 0.0826144015834427,
"grad_norm": 0.6075530052185059,
"learning_rate": 0.0002,
"loss": 0.6331,
"step": 960
},
{
"epoch": 0.08433553494976442,
"grad_norm": 0.46515706181526184,
"learning_rate": 0.0002,
"loss": 0.569,
"step": 980
},
{
"epoch": 0.08605666831608615,
"grad_norm": 0.2592894732952118,
"learning_rate": 0.0002,
"loss": 0.5572,
"step": 1000
},
{
"epoch": 0.08777780168240787,
"grad_norm": 0.4780791699886322,
"learning_rate": 0.0002,
"loss": 0.6014,
"step": 1020
},
{
"epoch": 0.08949893504872959,
"grad_norm": 0.37773773074150085,
"learning_rate": 0.0002,
"loss": 0.5783,
"step": 1040
},
{
"epoch": 0.09122006841505131,
"grad_norm": 0.5149890780448914,
"learning_rate": 0.0002,
"loss": 0.5485,
"step": 1060
},
{
"epoch": 0.09294120178137304,
"grad_norm": 0.522530734539032,
"learning_rate": 0.0002,
"loss": 0.5707,
"step": 1080
},
{
"epoch": 0.09466233514769476,
"grad_norm": 0.42418375611305237,
"learning_rate": 0.0002,
"loss": 0.5817,
"step": 1100
},
{
"epoch": 0.09638346851401648,
"grad_norm": 0.3230820596218109,
"learning_rate": 0.0002,
"loss": 0.5426,
"step": 1120
},
{
"epoch": 0.0981046018803382,
"grad_norm": 0.46259307861328125,
"learning_rate": 0.0002,
"loss": 0.5304,
"step": 1140
},
{
"epoch": 0.09982573524665993,
"grad_norm": 0.41511914134025574,
"learning_rate": 0.0002,
"loss": 0.5343,
"step": 1160
},
{
"epoch": 0.10154686861298165,
"grad_norm": 0.5695448517799377,
"learning_rate": 0.0002,
"loss": 0.5773,
"step": 1180
},
{
"epoch": 0.10326800197930337,
"grad_norm": 0.3354031443595886,
"learning_rate": 0.0002,
"loss": 0.5377,
"step": 1200
},
{
"epoch": 0.1049891353456251,
"grad_norm": 0.5198836922645569,
"learning_rate": 0.0002,
"loss": 0.5454,
"step": 1220
},
{
"epoch": 0.10671026871194682,
"grad_norm": 0.3364623188972473,
"learning_rate": 0.0002,
"loss": 0.5057,
"step": 1240
},
{
"epoch": 0.10843140207826854,
"grad_norm": 0.3844848871231079,
"learning_rate": 0.0002,
"loss": 0.571,
"step": 1260
},
{
"epoch": 0.11015253544459026,
"grad_norm": 0.45747458934783936,
"learning_rate": 0.0002,
"loss": 0.5425,
"step": 1280
},
{
"epoch": 0.11187366881091199,
"grad_norm": 0.4278349280357361,
"learning_rate": 0.0002,
"loss": 0.4814,
"step": 1300
},
{
"epoch": 0.11359480217723371,
"grad_norm": 0.4377232491970062,
"learning_rate": 0.0002,
"loss": 0.5083,
"step": 1320
},
{
"epoch": 0.11531593554355543,
"grad_norm": 0.48636892437934875,
"learning_rate": 0.0002,
"loss": 0.5317,
"step": 1340
},
{
"epoch": 0.11703706890987715,
"grad_norm": 0.39063599705696106,
"learning_rate": 0.0002,
"loss": 0.482,
"step": 1360
},
{
"epoch": 0.11875820227619888,
"grad_norm": 0.45347273349761963,
"learning_rate": 0.0002,
"loss": 0.4754,
"step": 1380
},
{
"epoch": 0.1204793356425206,
"grad_norm": 0.44759300351142883,
"learning_rate": 0.0002,
"loss": 0.4773,
"step": 1400
},
{
"epoch": 0.12220046900884232,
"grad_norm": 0.44881579279899597,
"learning_rate": 0.0002,
"loss": 0.5236,
"step": 1420
},
{
"epoch": 0.12392160237516404,
"grad_norm": 0.5386276245117188,
"learning_rate": 0.0002,
"loss": 0.5021,
"step": 1440
},
{
"epoch": 0.12564273574148577,
"grad_norm": 0.48680126667022705,
"learning_rate": 0.0002,
"loss": 0.4784,
"step": 1460
},
{
"epoch": 0.1273638691078075,
"grad_norm": 0.44588467478752136,
"learning_rate": 0.0002,
"loss": 0.4905,
"step": 1480
},
{
"epoch": 0.1290850024741292,
"grad_norm": 0.4805804491043091,
"learning_rate": 0.0002,
"loss": 0.4866,
"step": 1500
},
{
"epoch": 0.13080613584045095,
"grad_norm": 0.44360053539276123,
"learning_rate": 0.0002,
"loss": 0.4637,
"step": 1520
},
{
"epoch": 0.13252726920677266,
"grad_norm": 0.3818538784980774,
"learning_rate": 0.0002,
"loss": 0.4512,
"step": 1540
},
{
"epoch": 0.1342484025730944,
"grad_norm": 0.44495511054992676,
"learning_rate": 0.0002,
"loss": 0.4691,
"step": 1560
},
{
"epoch": 0.1359695359394161,
"grad_norm": 0.4303475618362427,
"learning_rate": 0.0002,
"loss": 0.4551,
"step": 1580
},
{
"epoch": 0.13769066930573784,
"grad_norm": 0.4990542531013489,
"learning_rate": 0.0002,
"loss": 0.4426,
"step": 1600
},
{
"epoch": 0.13941180267205955,
"grad_norm": 0.4822593331336975,
"learning_rate": 0.0002,
"loss": 0.4746,
"step": 1620
},
{
"epoch": 0.14113293603838128,
"grad_norm": 0.39103543758392334,
"learning_rate": 0.0002,
"loss": 0.4503,
"step": 1640
},
{
"epoch": 0.142854069404703,
"grad_norm": 0.3490641117095947,
"learning_rate": 0.0002,
"loss": 0.4869,
"step": 1660
},
{
"epoch": 0.14457520277102473,
"grad_norm": 0.3039151728153229,
"learning_rate": 0.0002,
"loss": 0.4885,
"step": 1680
},
{
"epoch": 0.14629633613734644,
"grad_norm": 0.45525145530700684,
"learning_rate": 0.0002,
"loss": 0.4465,
"step": 1700
},
{
"epoch": 0.14801746950366818,
"grad_norm": 0.3779124915599823,
"learning_rate": 0.0002,
"loss": 0.4077,
"step": 1720
},
{
"epoch": 0.14973860286998988,
"grad_norm": 0.3984091579914093,
"learning_rate": 0.0002,
"loss": 0.4212,
"step": 1740
},
{
"epoch": 0.15145973623631162,
"grad_norm": 0.37952274084091187,
"learning_rate": 0.0002,
"loss": 0.4158,
"step": 1760
},
{
"epoch": 0.15318086960263333,
"grad_norm": 0.34842512011528015,
"learning_rate": 0.0002,
"loss": 0.4188,
"step": 1780
},
{
"epoch": 0.15490200296895507,
"grad_norm": 0.36399173736572266,
"learning_rate": 0.0002,
"loss": 0.4347,
"step": 1800
},
{
"epoch": 0.15662313633527677,
"grad_norm": 0.38256826996803284,
"learning_rate": 0.0002,
"loss": 0.4179,
"step": 1820
},
{
"epoch": 0.1583442697015985,
"grad_norm": 0.3715302050113678,
"learning_rate": 0.0002,
"loss": 0.4222,
"step": 1840
},
{
"epoch": 0.16006540306792022,
"grad_norm": 0.3852146565914154,
"learning_rate": 0.0002,
"loss": 0.4005,
"step": 1860
},
{
"epoch": 0.16178653643424196,
"grad_norm": 0.3589220643043518,
"learning_rate": 0.0002,
"loss": 0.4099,
"step": 1880
},
{
"epoch": 0.16350766980056367,
"grad_norm": 0.5060593485832214,
"learning_rate": 0.0002,
"loss": 0.4158,
"step": 1900
},
{
"epoch": 0.1652288031668854,
"grad_norm": 0.3826269507408142,
"learning_rate": 0.0002,
"loss": 0.3874,
"step": 1920
},
{
"epoch": 0.1669499365332071,
"grad_norm": 0.3576675057411194,
"learning_rate": 0.0002,
"loss": 0.4226,
"step": 1940
},
{
"epoch": 0.16867106989952885,
"grad_norm": 0.37530502676963806,
"learning_rate": 0.0002,
"loss": 0.4014,
"step": 1960
},
{
"epoch": 0.17039220326585056,
"grad_norm": 0.3857693374156952,
"learning_rate": 0.0002,
"loss": 0.413,
"step": 1980
},
{
"epoch": 0.1721133366321723,
"grad_norm": 0.42137113213539124,
"learning_rate": 0.0002,
"loss": 0.4212,
"step": 2000
},
{
"epoch": 0.173834469998494,
"grad_norm": 0.37976202368736267,
"learning_rate": 0.0002,
"loss": 0.3925,
"step": 2020
},
{
"epoch": 0.17555560336481574,
"grad_norm": 0.3828701078891754,
"learning_rate": 0.0002,
"loss": 0.4004,
"step": 2040
},
{
"epoch": 0.17727673673113745,
"grad_norm": 0.5018408894538879,
"learning_rate": 0.0002,
"loss": 0.384,
"step": 2060
},
{
"epoch": 0.17899787009745918,
"grad_norm": 0.4595503509044647,
"learning_rate": 0.0002,
"loss": 0.4179,
"step": 2080
},
{
"epoch": 0.1807190034637809,
"grad_norm": 0.5221086144447327,
"learning_rate": 0.0002,
"loss": 0.3855,
"step": 2100
},
{
"epoch": 0.18244013683010263,
"grad_norm": 0.5442349314689636,
"learning_rate": 0.0002,
"loss": 0.3579,
"step": 2120
},
{
"epoch": 0.18416127019642434,
"grad_norm": 0.5430803298950195,
"learning_rate": 0.0002,
"loss": 0.3871,
"step": 2140
},
{
"epoch": 0.18588240356274607,
"grad_norm": 0.2919999063014984,
"learning_rate": 0.0002,
"loss": 0.3947,
"step": 2160
},
{
"epoch": 0.18760353692906778,
"grad_norm": 0.3997620940208435,
"learning_rate": 0.0002,
"loss": 0.3989,
"step": 2180
},
{
"epoch": 0.18932467029538952,
"grad_norm": 0.38948822021484375,
"learning_rate": 0.0002,
"loss": 0.4292,
"step": 2200
},
{
"epoch": 0.19104580366171123,
"grad_norm": 0.4080924689769745,
"learning_rate": 0.0002,
"loss": 0.398,
"step": 2220
},
{
"epoch": 0.19276693702803296,
"grad_norm": 0.3982754647731781,
"learning_rate": 0.0002,
"loss": 0.4022,
"step": 2240
},
{
"epoch": 0.19448807039435467,
"grad_norm": 0.41448819637298584,
"learning_rate": 0.0002,
"loss": 0.3968,
"step": 2260
},
{
"epoch": 0.1962092037606764,
"grad_norm": 0.42457613348960876,
"learning_rate": 0.0002,
"loss": 0.4024,
"step": 2280
},
{
"epoch": 0.19793033712699812,
"grad_norm": 0.2920919358730316,
"learning_rate": 0.0002,
"loss": 0.387,
"step": 2300
},
{
"epoch": 0.19965147049331985,
"grad_norm": 0.4027709662914276,
"learning_rate": 0.0002,
"loss": 0.3835,
"step": 2320
},
{
"epoch": 0.20137260385964156,
"grad_norm": 0.41888585686683655,
"learning_rate": 0.0002,
"loss": 0.3985,
"step": 2340
},
{
"epoch": 0.2030937372259633,
"grad_norm": 0.4292152225971222,
"learning_rate": 0.0002,
"loss": 0.3823,
"step": 2360
},
{
"epoch": 0.204814870592285,
"grad_norm": 0.4724443554878235,
"learning_rate": 0.0002,
"loss": 0.3589,
"step": 2380
},
{
"epoch": 0.20653600395860675,
"grad_norm": 0.32000842690467834,
"learning_rate": 0.0002,
"loss": 0.383,
"step": 2400
},
{
"epoch": 0.20825713732492845,
"grad_norm": 0.3765513300895691,
"learning_rate": 0.0002,
"loss": 0.371,
"step": 2420
},
{
"epoch": 0.2099782706912502,
"grad_norm": 0.39066755771636963,
"learning_rate": 0.0002,
"loss": 0.3684,
"step": 2440
},
{
"epoch": 0.2116994040575719,
"grad_norm": 0.42424216866493225,
"learning_rate": 0.0002,
"loss": 0.3653,
"step": 2460
},
{
"epoch": 0.21342053742389364,
"grad_norm": 0.5066993832588196,
"learning_rate": 0.0002,
"loss": 0.3566,
"step": 2480
},
{
"epoch": 0.21514167079021534,
"grad_norm": 0.44107547402381897,
"learning_rate": 0.0002,
"loss": 0.3748,
"step": 2500
},
{
"epoch": 0.21686280415653708,
"grad_norm": 0.3890049457550049,
"learning_rate": 0.0002,
"loss": 0.3378,
"step": 2520
},
{
"epoch": 0.2185839375228588,
"grad_norm": 0.3157922327518463,
"learning_rate": 0.0002,
"loss": 0.3462,
"step": 2540
},
{
"epoch": 0.22030507088918053,
"grad_norm": 0.34473904967308044,
"learning_rate": 0.0002,
"loss": 0.3523,
"step": 2560
},
{
"epoch": 0.22202620425550224,
"grad_norm": 0.38636985421180725,
"learning_rate": 0.0002,
"loss": 0.3694,
"step": 2580
},
{
"epoch": 0.22374733762182397,
"grad_norm": 0.4448566436767578,
"learning_rate": 0.0002,
"loss": 0.3752,
"step": 2600
},
{
"epoch": 0.22546847098814568,
"grad_norm": 0.3706173002719879,
"learning_rate": 0.0002,
"loss": 0.3706,
"step": 2620
},
{
"epoch": 0.22718960435446742,
"grad_norm": 0.47221776843070984,
"learning_rate": 0.0002,
"loss": 0.3865,
"step": 2640
},
{
"epoch": 0.22891073772078913,
"grad_norm": 0.39190933108329773,
"learning_rate": 0.0002,
"loss": 0.362,
"step": 2660
},
{
"epoch": 0.23063187108711086,
"grad_norm": 0.36031046509742737,
"learning_rate": 0.0002,
"loss": 0.352,
"step": 2680
},
{
"epoch": 0.2323530044534326,
"grad_norm": 0.4583648145198822,
"learning_rate": 0.0002,
"loss": 0.3343,
"step": 2700
},
{
"epoch": 0.2340741378197543,
"grad_norm": 0.35927554965019226,
"learning_rate": 0.0002,
"loss": 0.3748,
"step": 2720
},
{
"epoch": 0.23579527118607604,
"grad_norm": 0.45593491196632385,
"learning_rate": 0.0002,
"loss": 0.356,
"step": 2740
},
{
"epoch": 0.23751640455239775,
"grad_norm": 0.5094373226165771,
"learning_rate": 0.0002,
"loss": 0.3636,
"step": 2760
},
{
"epoch": 0.2392375379187195,
"grad_norm": 0.47478821873664856,
"learning_rate": 0.0002,
"loss": 0.3537,
"step": 2780
},
{
"epoch": 0.2409586712850412,
"grad_norm": 0.37840044498443604,
"learning_rate": 0.0002,
"loss": 0.3507,
"step": 2800
},
{
"epoch": 0.24267980465136293,
"grad_norm": 0.41650140285491943,
"learning_rate": 0.0002,
"loss": 0.3405,
"step": 2820
},
{
"epoch": 0.24440093801768464,
"grad_norm": 0.47170814871788025,
"learning_rate": 0.0002,
"loss": 0.3564,
"step": 2840
},
{
"epoch": 0.24612207138400638,
"grad_norm": 0.3727279305458069,
"learning_rate": 0.0002,
"loss": 0.3621,
"step": 2860
},
{
"epoch": 0.2478432047503281,
"grad_norm": 0.2921255826950073,
"learning_rate": 0.0002,
"loss": 0.3504,
"step": 2880
},
{
"epoch": 0.24956433811664983,
"grad_norm": 0.30992376804351807,
"learning_rate": 0.0002,
"loss": 0.3403,
"step": 2900
},
{
"epoch": 0.25128547148297153,
"grad_norm": 0.3726598620414734,
"learning_rate": 0.0002,
"loss": 0.337,
"step": 2920
},
{
"epoch": 0.25300660484929327,
"grad_norm": 0.46260905265808105,
"learning_rate": 0.0002,
"loss": 0.3428,
"step": 2940
},
{
"epoch": 0.254727738215615,
"grad_norm": 0.4512200653553009,
"learning_rate": 0.0002,
"loss": 0.3456,
"step": 2960
},
{
"epoch": 0.2564488715819367,
"grad_norm": 0.3799881041049957,
"learning_rate": 0.0002,
"loss": 0.3479,
"step": 2980
},
{
"epoch": 0.2581700049482584,
"grad_norm": 0.36525946855545044,
"learning_rate": 0.0002,
"loss": 0.3324,
"step": 3000
},
{
"epoch": 0.25989113831458016,
"grad_norm": 0.43525341153144836,
"learning_rate": 0.0002,
"loss": 0.3499,
"step": 3020
},
{
"epoch": 0.2616122716809019,
"grad_norm": 0.39706552028656006,
"learning_rate": 0.0002,
"loss": 0.3319,
"step": 3040
},
{
"epoch": 0.2633334050472236,
"grad_norm": 0.36725783348083496,
"learning_rate": 0.0002,
"loss": 0.3581,
"step": 3060
},
{
"epoch": 0.2650545384135453,
"grad_norm": 0.4533792734146118,
"learning_rate": 0.0002,
"loss": 0.3377,
"step": 3080
},
{
"epoch": 0.26677567177986705,
"grad_norm": 0.3752067983150482,
"learning_rate": 0.0002,
"loss": 0.3331,
"step": 3100
},
{
"epoch": 0.2684968051461888,
"grad_norm": 0.31743401288986206,
"learning_rate": 0.0002,
"loss": 0.3118,
"step": 3120
},
{
"epoch": 0.27021793851251047,
"grad_norm": 0.45233970880508423,
"learning_rate": 0.0002,
"loss": 0.334,
"step": 3140
},
{
"epoch": 0.2719390718788322,
"grad_norm": 0.32593509554862976,
"learning_rate": 0.0002,
"loss": 0.3208,
"step": 3160
},
{
"epoch": 0.27366020524515394,
"grad_norm": 0.3870694935321808,
"learning_rate": 0.0002,
"loss": 0.3398,
"step": 3180
},
{
"epoch": 0.2753813386114757,
"grad_norm": 0.39160028100013733,
"learning_rate": 0.0002,
"loss": 0.3209,
"step": 3200
},
{
"epoch": 0.27710247197779736,
"grad_norm": 0.3718438148498535,
"learning_rate": 0.0002,
"loss": 0.2982,
"step": 3220
},
{
"epoch": 0.2788236053441191,
"grad_norm": 0.4262580871582031,
"learning_rate": 0.0002,
"loss": 0.2977,
"step": 3240
},
{
"epoch": 0.28054473871044083,
"grad_norm": 0.33147528767585754,
"learning_rate": 0.0002,
"loss": 0.3345,
"step": 3260
},
{
"epoch": 0.28226587207676257,
"grad_norm": 0.3187743127346039,
"learning_rate": 0.0002,
"loss": 0.3325,
"step": 3280
},
{
"epoch": 0.28398700544308425,
"grad_norm": 0.4593104422092438,
"learning_rate": 0.0002,
"loss": 0.3101,
"step": 3300
},
{
"epoch": 0.285708138809406,
"grad_norm": 0.431805819272995,
"learning_rate": 0.0002,
"loss": 0.2995,
"step": 3320
},
{
"epoch": 0.2874292721757277,
"grad_norm": 0.3650576174259186,
"learning_rate": 0.0002,
"loss": 0.3176,
"step": 3340
},
{
"epoch": 0.28915040554204946,
"grad_norm": 0.3000945746898651,
"learning_rate": 0.0002,
"loss": 0.3364,
"step": 3360
},
{
"epoch": 0.29087153890837114,
"grad_norm": 0.3499468266963959,
"learning_rate": 0.0002,
"loss": 0.2956,
"step": 3380
},
{
"epoch": 0.2925926722746929,
"grad_norm": 0.41105055809020996,
"learning_rate": 0.0002,
"loss": 0.3516,
"step": 3400
},
{
"epoch": 0.2943138056410146,
"grad_norm": 0.4257635772228241,
"learning_rate": 0.0002,
"loss": 0.3098,
"step": 3420
},
{
"epoch": 0.29603493900733635,
"grad_norm": 0.3407900035381317,
"learning_rate": 0.0002,
"loss": 0.302,
"step": 3440
},
{
"epoch": 0.29775607237365803,
"grad_norm": 0.3562329113483429,
"learning_rate": 0.0002,
"loss": 0.3066,
"step": 3460
},
{
"epoch": 0.29947720573997977,
"grad_norm": 0.4639800786972046,
"learning_rate": 0.0002,
"loss": 0.3108,
"step": 3480
},
{
"epoch": 0.3011983391063015,
"grad_norm": 0.47634968161582947,
"learning_rate": 0.0002,
"loss": 0.3185,
"step": 3500
},
{
"epoch": 0.30291947247262324,
"grad_norm": 0.42725369334220886,
"learning_rate": 0.0002,
"loss": 0.3081,
"step": 3520
},
{
"epoch": 0.3046406058389449,
"grad_norm": 0.38221365213394165,
"learning_rate": 0.0002,
"loss": 0.3282,
"step": 3540
},
{
"epoch": 0.30636173920526666,
"grad_norm": 0.3222472369670868,
"learning_rate": 0.0002,
"loss": 0.2975,
"step": 3560
},
{
"epoch": 0.3080828725715884,
"grad_norm": 0.3551533818244934,
"learning_rate": 0.0002,
"loss": 0.308,
"step": 3580
},
{
"epoch": 0.30980400593791013,
"grad_norm": 0.35602936148643494,
"learning_rate": 0.0002,
"loss": 0.3225,
"step": 3600
},
{
"epoch": 0.3115251393042318,
"grad_norm": 0.36850786209106445,
"learning_rate": 0.0002,
"loss": 0.293,
"step": 3620
},
{
"epoch": 0.31324627267055355,
"grad_norm": 0.4266447126865387,
"learning_rate": 0.0002,
"loss": 0.3055,
"step": 3640
},
{
"epoch": 0.3149674060368753,
"grad_norm": 0.3386904001235962,
"learning_rate": 0.0002,
"loss": 0.3046,
"step": 3660
},
{
"epoch": 0.316688539403197,
"grad_norm": 0.39958760142326355,
"learning_rate": 0.0002,
"loss": 0.3177,
"step": 3680
},
{
"epoch": 0.3184096727695187,
"grad_norm": 0.3587161600589752,
"learning_rate": 0.0002,
"loss": 0.3183,
"step": 3700
},
{
"epoch": 0.32013080613584044,
"grad_norm": 0.37195342779159546,
"learning_rate": 0.0002,
"loss": 0.2984,
"step": 3720
},
{
"epoch": 0.3218519395021622,
"grad_norm": 0.42094019055366516,
"learning_rate": 0.0002,
"loss": 0.3019,
"step": 3740
},
{
"epoch": 0.3235730728684839,
"grad_norm": 0.2889210879802704,
"learning_rate": 0.0002,
"loss": 0.3229,
"step": 3760
},
{
"epoch": 0.3252942062348056,
"grad_norm": 0.4362521767616272,
"learning_rate": 0.0002,
"loss": 0.2891,
"step": 3780
},
{
"epoch": 0.32701533960112733,
"grad_norm": 0.3560026288032532,
"learning_rate": 0.0002,
"loss": 0.3042,
"step": 3800
},
{
"epoch": 0.32873647296744907,
"grad_norm": 0.5475091338157654,
"learning_rate": 0.0002,
"loss": 0.2949,
"step": 3820
},
{
"epoch": 0.3304576063337708,
"grad_norm": 0.3368379473686218,
"learning_rate": 0.0002,
"loss": 0.3203,
"step": 3840
},
{
"epoch": 0.3321787397000925,
"grad_norm": 0.45856744050979614,
"learning_rate": 0.0002,
"loss": 0.3107,
"step": 3860
},
{
"epoch": 0.3338998730664142,
"grad_norm": 0.47050952911376953,
"learning_rate": 0.0002,
"loss": 0.2882,
"step": 3880
},
{
"epoch": 0.33562100643273596,
"grad_norm": 0.5037484169006348,
"learning_rate": 0.0002,
"loss": 0.3033,
"step": 3900
},
{
"epoch": 0.3373421397990577,
"grad_norm": 0.34310051798820496,
"learning_rate": 0.0002,
"loss": 0.3099,
"step": 3920
},
{
"epoch": 0.3390632731653794,
"grad_norm": 0.47114118933677673,
"learning_rate": 0.0002,
"loss": 0.2892,
"step": 3940
},
{
"epoch": 0.3407844065317011,
"grad_norm": 0.4255194365978241,
"learning_rate": 0.0002,
"loss": 0.3022,
"step": 3960
},
{
"epoch": 0.34250553989802285,
"grad_norm": 0.5251383185386658,
"learning_rate": 0.0002,
"loss": 0.3114,
"step": 3980
},
{
"epoch": 0.3442266732643446,
"grad_norm": 0.29961732029914856,
"learning_rate": 0.0002,
"loss": 0.3093,
"step": 4000
},
{
"epoch": 0.3459478066306663,
"grad_norm": 0.4707966446876526,
"learning_rate": 0.0002,
"loss": 0.3139,
"step": 4020
},
{
"epoch": 0.347668939996988,
"grad_norm": 0.33052656054496765,
"learning_rate": 0.0002,
"loss": 0.277,
"step": 4040
},
{
"epoch": 0.34939007336330974,
"grad_norm": 0.3629648685455322,
"learning_rate": 0.0002,
"loss": 0.2934,
"step": 4060
},
{
"epoch": 0.3511112067296315,
"grad_norm": 0.3852795362472534,
"learning_rate": 0.0002,
"loss": 0.2885,
"step": 4080
},
{
"epoch": 0.3528323400959532,
"grad_norm": 0.3913141191005707,
"learning_rate": 0.0002,
"loss": 0.3009,
"step": 4100
},
{
"epoch": 0.3545534734622749,
"grad_norm": 0.4364122152328491,
"learning_rate": 0.0002,
"loss": 0.2825,
"step": 4120
},
{
"epoch": 0.35627460682859663,
"grad_norm": 0.3607100248336792,
"learning_rate": 0.0002,
"loss": 0.2969,
"step": 4140
},
{
"epoch": 0.35799574019491837,
"grad_norm": 0.41620948910713196,
"learning_rate": 0.0002,
"loss": 0.2735,
"step": 4160
},
{
"epoch": 0.3597168735612401,
"grad_norm": 0.36478644609451294,
"learning_rate": 0.0002,
"loss": 0.2826,
"step": 4180
},
{
"epoch": 0.3614380069275618,
"grad_norm": 0.39005762338638306,
"learning_rate": 0.0002,
"loss": 0.2945,
"step": 4200
},
{
"epoch": 0.3631591402938835,
"grad_norm": 0.3086032271385193,
"learning_rate": 0.0002,
"loss": 0.3057,
"step": 4220
},
{
"epoch": 0.36488027366020526,
"grad_norm": 0.4346630275249481,
"learning_rate": 0.0002,
"loss": 0.2667,
"step": 4240
},
{
"epoch": 0.366601407026527,
"grad_norm": 0.3394578993320465,
"learning_rate": 0.0002,
"loss": 0.3107,
"step": 4260
},
{
"epoch": 0.3683225403928487,
"grad_norm": 0.44261375069618225,
"learning_rate": 0.0002,
"loss": 0.2968,
"step": 4280
},
{
"epoch": 0.3700436737591704,
"grad_norm": 0.36678528785705566,
"learning_rate": 0.0002,
"loss": 0.3023,
"step": 4300
},
{
"epoch": 0.37176480712549215,
"grad_norm": 0.43799522519111633,
"learning_rate": 0.0002,
"loss": 0.2781,
"step": 4320
},
{
"epoch": 0.3734859404918139,
"grad_norm": 0.3344813883304596,
"learning_rate": 0.0002,
"loss": 0.2818,
"step": 4340
},
{
"epoch": 0.37520707385813556,
"grad_norm": 0.519148588180542,
"learning_rate": 0.0002,
"loss": 0.2897,
"step": 4360
},
{
"epoch": 0.3769282072244573,
"grad_norm": 0.36793655157089233,
"learning_rate": 0.0002,
"loss": 0.3068,
"step": 4380
},
{
"epoch": 0.37864934059077904,
"grad_norm": 0.37348249554634094,
"learning_rate": 0.0002,
"loss": 0.2849,
"step": 4400
},
{
"epoch": 0.3803704739571008,
"grad_norm": 0.44873690605163574,
"learning_rate": 0.0002,
"loss": 0.2902,
"step": 4420
},
{
"epoch": 0.38209160732342246,
"grad_norm": 0.32298997044563293,
"learning_rate": 0.0002,
"loss": 0.2892,
"step": 4440
},
{
"epoch": 0.3838127406897442,
"grad_norm": 0.4323394298553467,
"learning_rate": 0.0002,
"loss": 0.288,
"step": 4460
},
{
"epoch": 0.38553387405606593,
"grad_norm": 0.3986116945743561,
"learning_rate": 0.0002,
"loss": 0.2962,
"step": 4480
},
{
"epoch": 0.38725500742238766,
"grad_norm": 0.4980430603027344,
"learning_rate": 0.0002,
"loss": 0.2846,
"step": 4500
},
{
"epoch": 0.38897614078870935,
"grad_norm": 0.4195205569267273,
"learning_rate": 0.0002,
"loss": 0.2806,
"step": 4520
},
{
"epoch": 0.3906972741550311,
"grad_norm": 0.2984451949596405,
"learning_rate": 0.0002,
"loss": 0.2796,
"step": 4540
},
{
"epoch": 0.3924184075213528,
"grad_norm": 0.3897689878940582,
"learning_rate": 0.0002,
"loss": 0.2842,
"step": 4560
},
{
"epoch": 0.39413954088767456,
"grad_norm": 0.40885892510414124,
"learning_rate": 0.0002,
"loss": 0.2801,
"step": 4580
},
{
"epoch": 0.39586067425399624,
"grad_norm": 0.41099223494529724,
"learning_rate": 0.0002,
"loss": 0.276,
"step": 4600
},
{
"epoch": 0.397581807620318,
"grad_norm": 0.4116320312023163,
"learning_rate": 0.0002,
"loss": 0.2898,
"step": 4620
},
{
"epoch": 0.3993029409866397,
"grad_norm": 0.37863150238990784,
"learning_rate": 0.0002,
"loss": 0.268,
"step": 4640
},
{
"epoch": 0.40102407435296145,
"grad_norm": 0.43875718116760254,
"learning_rate": 0.0002,
"loss": 0.277,
"step": 4660
},
{
"epoch": 0.4027452077192831,
"grad_norm": 0.3862336277961731,
"learning_rate": 0.0002,
"loss": 0.269,
"step": 4680
},
{
"epoch": 0.40446634108560486,
"grad_norm": 0.330501914024353,
"learning_rate": 0.0002,
"loss": 0.2754,
"step": 4700
},
{
"epoch": 0.4061874744519266,
"grad_norm": 0.3258209526538849,
"learning_rate": 0.0002,
"loss": 0.2933,
"step": 4720
},
{
"epoch": 0.40790860781824834,
"grad_norm": 0.4297473430633545,
"learning_rate": 0.0002,
"loss": 0.2726,
"step": 4740
},
{
"epoch": 0.40962974118457,
"grad_norm": 0.36448150873184204,
"learning_rate": 0.0002,
"loss": 0.2645,
"step": 4760
},
{
"epoch": 0.41135087455089175,
"grad_norm": 0.4198833107948303,
"learning_rate": 0.0002,
"loss": 0.3029,
"step": 4780
},
{
"epoch": 0.4130720079172135,
"grad_norm": 0.35666197538375854,
"learning_rate": 0.0002,
"loss": 0.2443,
"step": 4800
},
{
"epoch": 0.4147931412835352,
"grad_norm": 0.38097846508026123,
"learning_rate": 0.0002,
"loss": 0.2706,
"step": 4820
},
{
"epoch": 0.4165142746498569,
"grad_norm": 0.3574432134628296,
"learning_rate": 0.0002,
"loss": 0.2791,
"step": 4840
},
{
"epoch": 0.41823540801617864,
"grad_norm": 0.46495670080184937,
"learning_rate": 0.0002,
"loss": 0.2753,
"step": 4860
},
{
"epoch": 0.4199565413825004,
"grad_norm": 0.4777359664440155,
"learning_rate": 0.0002,
"loss": 0.2717,
"step": 4880
},
{
"epoch": 0.4216776747488221,
"grad_norm": 0.4285059869289398,
"learning_rate": 0.0002,
"loss": 0.271,
"step": 4900
},
{
"epoch": 0.4233988081151438,
"grad_norm": 0.45529553294181824,
"learning_rate": 0.0002,
"loss": 0.268,
"step": 4920
},
{
"epoch": 0.42511994148146554,
"grad_norm": 0.3446490466594696,
"learning_rate": 0.0002,
"loss": 0.2659,
"step": 4940
},
{
"epoch": 0.42684107484778727,
"grad_norm": 0.3138258457183838,
"learning_rate": 0.0002,
"loss": 0.2845,
"step": 4960
},
{
"epoch": 0.428562208214109,
"grad_norm": 0.3556279242038727,
"learning_rate": 0.0002,
"loss": 0.2688,
"step": 4980
},
{
"epoch": 0.4302833415804307,
"grad_norm": 0.4338746964931488,
"learning_rate": 0.0002,
"loss": 0.272,
"step": 5000
},
{
"epoch": 0.4320044749467524,
"grad_norm": 0.3764958083629608,
"learning_rate": 0.0002,
"loss": 0.2618,
"step": 5020
},
{
"epoch": 0.43372560831307416,
"grad_norm": 0.3759072721004486,
"learning_rate": 0.0002,
"loss": 0.2657,
"step": 5040
},
{
"epoch": 0.4354467416793959,
"grad_norm": 0.3582591414451599,
"learning_rate": 0.0002,
"loss": 0.3033,
"step": 5060
},
{
"epoch": 0.4371678750457176,
"grad_norm": 0.3076396584510803,
"learning_rate": 0.0002,
"loss": 0.2719,
"step": 5080
},
{
"epoch": 0.4388890084120393,
"grad_norm": 0.4947834610939026,
"learning_rate": 0.0002,
"loss": 0.2661,
"step": 5100
},
{
"epoch": 0.44061014177836105,
"grad_norm": 0.4582163095474243,
"learning_rate": 0.0002,
"loss": 0.2687,
"step": 5120
},
{
"epoch": 0.4423312751446828,
"grad_norm": 0.3605941832065582,
"learning_rate": 0.0002,
"loss": 0.2692,
"step": 5140
},
{
"epoch": 0.44405240851100447,
"grad_norm": 0.37437519431114197,
"learning_rate": 0.0002,
"loss": 0.264,
"step": 5160
},
{
"epoch": 0.4457735418773262,
"grad_norm": 0.3707126975059509,
"learning_rate": 0.0002,
"loss": 0.2535,
"step": 5180
},
{
"epoch": 0.44749467524364794,
"grad_norm": 0.3294564485549927,
"learning_rate": 0.0002,
"loss": 0.2781,
"step": 5200
},
{
"epoch": 0.4492158086099697,
"grad_norm": 0.3913413882255554,
"learning_rate": 0.0002,
"loss": 0.2532,
"step": 5220
},
{
"epoch": 0.45093694197629136,
"grad_norm": 0.31665486097335815,
"learning_rate": 0.0002,
"loss": 0.2731,
"step": 5240
},
{
"epoch": 0.4526580753426131,
"grad_norm": 0.405577689409256,
"learning_rate": 0.0002,
"loss": 0.2559,
"step": 5260
},
{
"epoch": 0.45437920870893483,
"grad_norm": 0.41928738355636597,
"learning_rate": 0.0002,
"loss": 0.2874,
"step": 5280
},
{
"epoch": 0.45610034207525657,
"grad_norm": 0.4400196671485901,
"learning_rate": 0.0002,
"loss": 0.2797,
"step": 5300
},
{
"epoch": 0.45782147544157825,
"grad_norm": 0.29811346530914307,
"learning_rate": 0.0002,
"loss": 0.2619,
"step": 5320
},
{
"epoch": 0.4595426088079,
"grad_norm": 0.3616848289966583,
"learning_rate": 0.0002,
"loss": 0.2658,
"step": 5340
},
{
"epoch": 0.4612637421742217,
"grad_norm": 0.2878625690937042,
"learning_rate": 0.0002,
"loss": 0.2606,
"step": 5360
},
{
"epoch": 0.46298487554054346,
"grad_norm": 0.40930724143981934,
"learning_rate": 0.0002,
"loss": 0.2653,
"step": 5380
},
{
"epoch": 0.4647060089068652,
"grad_norm": 0.29266178607940674,
"learning_rate": 0.0002,
"loss": 0.2698,
"step": 5400
},
{
"epoch": 0.4664271422731869,
"grad_norm": 0.4216326177120209,
"learning_rate": 0.0002,
"loss": 0.2622,
"step": 5420
},
{
"epoch": 0.4681482756395086,
"grad_norm": 0.4683050215244293,
"learning_rate": 0.0002,
"loss": 0.2618,
"step": 5440
},
{
"epoch": 0.46986940900583035,
"grad_norm": 0.41073721647262573,
"learning_rate": 0.0002,
"loss": 0.2627,
"step": 5460
},
{
"epoch": 0.4715905423721521,
"grad_norm": 0.3913174867630005,
"learning_rate": 0.0002,
"loss": 0.257,
"step": 5480
},
{
"epoch": 0.47331167573847377,
"grad_norm": 0.39269042015075684,
"learning_rate": 0.0002,
"loss": 0.2659,
"step": 5500
},
{
"epoch": 0.4750328091047955,
"grad_norm": 0.4052613079547882,
"learning_rate": 0.0002,
"loss": 0.2523,
"step": 5520
},
{
"epoch": 0.47675394247111724,
"grad_norm": 0.36656561493873596,
"learning_rate": 0.0002,
"loss": 0.2627,
"step": 5540
},
{
"epoch": 0.478475075837439,
"grad_norm": 0.3679077625274658,
"learning_rate": 0.0002,
"loss": 0.264,
"step": 5560
},
{
"epoch": 0.48019620920376066,
"grad_norm": 0.49448278546333313,
"learning_rate": 0.0002,
"loss": 0.2688,
"step": 5580
},
{
"epoch": 0.4819173425700824,
"grad_norm": 0.39515119791030884,
"learning_rate": 0.0002,
"loss": 0.2498,
"step": 5600
},
{
"epoch": 0.48363847593640413,
"grad_norm": 0.3903290033340454,
"learning_rate": 0.0002,
"loss": 0.2744,
"step": 5620
},
{
"epoch": 0.48535960930272587,
"grad_norm": 0.41950464248657227,
"learning_rate": 0.0002,
"loss": 0.2645,
"step": 5640
},
{
"epoch": 0.48708074266904755,
"grad_norm": 0.3693583607673645,
"learning_rate": 0.0002,
"loss": 0.261,
"step": 5660
},
{
"epoch": 0.4888018760353693,
"grad_norm": 0.3864137530326843,
"learning_rate": 0.0002,
"loss": 0.2685,
"step": 5680
},
{
"epoch": 0.490523009401691,
"grad_norm": 0.5029241442680359,
"learning_rate": 0.0002,
"loss": 0.2531,
"step": 5700
},
{
"epoch": 0.49224414276801276,
"grad_norm": 0.37595513463020325,
"learning_rate": 0.0002,
"loss": 0.2775,
"step": 5720
},
{
"epoch": 0.49396527613433444,
"grad_norm": 0.4152761399745941,
"learning_rate": 0.0002,
"loss": 0.2448,
"step": 5740
},
{
"epoch": 0.4956864095006562,
"grad_norm": 0.36774635314941406,
"learning_rate": 0.0002,
"loss": 0.2731,
"step": 5760
},
{
"epoch": 0.4974075428669779,
"grad_norm": 0.347969114780426,
"learning_rate": 0.0002,
"loss": 0.2689,
"step": 5780
},
{
"epoch": 0.49912867623329965,
"grad_norm": 0.37085452675819397,
"learning_rate": 0.0002,
"loss": 0.2545,
"step": 5800
},
{
"epoch": 0.5008498095996213,
"grad_norm": 0.3683365285396576,
"learning_rate": 0.0002,
"loss": 0.2755,
"step": 5820
},
{
"epoch": 0.5025709429659431,
"grad_norm": 0.2994697690010071,
"learning_rate": 0.0002,
"loss": 0.2565,
"step": 5840
},
{
"epoch": 0.5042920763322648,
"grad_norm": 0.3561314046382904,
"learning_rate": 0.0002,
"loss": 0.2697,
"step": 5860
},
{
"epoch": 0.5060132096985865,
"grad_norm": 0.47199949622154236,
"learning_rate": 0.0002,
"loss": 0.2457,
"step": 5880
},
{
"epoch": 0.5077343430649083,
"grad_norm": 0.3466067612171173,
"learning_rate": 0.0002,
"loss": 0.2571,
"step": 5900
},
{
"epoch": 0.50945547643123,
"grad_norm": 0.26111066341400146,
"learning_rate": 0.0002,
"loss": 0.2509,
"step": 5920
},
{
"epoch": 0.5111766097975516,
"grad_norm": 0.32691895961761475,
"learning_rate": 0.0002,
"loss": 0.2575,
"step": 5940
},
{
"epoch": 0.5128977431638734,
"grad_norm": 0.31905749440193176,
"learning_rate": 0.0002,
"loss": 0.2549,
"step": 5960
},
{
"epoch": 0.5146188765301951,
"grad_norm": 0.3610023558139801,
"learning_rate": 0.0002,
"loss": 0.263,
"step": 5980
},
{
"epoch": 0.5163400098965168,
"grad_norm": 0.2800861895084381,
"learning_rate": 0.0002,
"loss": 0.2517,
"step": 6000
},
{
"epoch": 0.5180611432628386,
"grad_norm": 0.33658984303474426,
"learning_rate": 0.0002,
"loss": 0.2633,
"step": 6020
},
{
"epoch": 0.5197822766291603,
"grad_norm": 0.4304841160774231,
"learning_rate": 0.0002,
"loss": 0.2759,
"step": 6040
},
{
"epoch": 0.5215034099954821,
"grad_norm": 0.41126030683517456,
"learning_rate": 0.0002,
"loss": 0.2719,
"step": 6060
},
{
"epoch": 0.5232245433618038,
"grad_norm": 0.5020288825035095,
"learning_rate": 0.0002,
"loss": 0.2532,
"step": 6080
},
{
"epoch": 0.5249456767281254,
"grad_norm": 0.32016468048095703,
"learning_rate": 0.0002,
"loss": 0.2601,
"step": 6100
},
{
"epoch": 0.5266668100944472,
"grad_norm": 0.33081743121147156,
"learning_rate": 0.0002,
"loss": 0.2544,
"step": 6120
},
{
"epoch": 0.5283879434607689,
"grad_norm": 0.3016170561313629,
"learning_rate": 0.0002,
"loss": 0.2454,
"step": 6140
},
{
"epoch": 0.5301090768270906,
"grad_norm": 0.3886088728904724,
"learning_rate": 0.0002,
"loss": 0.2693,
"step": 6160
},
{
"epoch": 0.5318302101934124,
"grad_norm": 0.37182730436325073,
"learning_rate": 0.0002,
"loss": 0.2599,
"step": 6180
},
{
"epoch": 0.5335513435597341,
"grad_norm": 0.2848903238773346,
"learning_rate": 0.0002,
"loss": 0.26,
"step": 6200
},
{
"epoch": 0.5352724769260558,
"grad_norm": 0.45262956619262695,
"learning_rate": 0.0002,
"loss": 0.2587,
"step": 6220
},
{
"epoch": 0.5369936102923776,
"grad_norm": 0.3710488975048065,
"learning_rate": 0.0002,
"loss": 0.2608,
"step": 6240
},
{
"epoch": 0.5387147436586992,
"grad_norm": 0.39627817273139954,
"learning_rate": 0.0002,
"loss": 0.2505,
"step": 6260
},
{
"epoch": 0.5404358770250209,
"grad_norm": 0.35057738423347473,
"learning_rate": 0.0002,
"loss": 0.2619,
"step": 6280
},
{
"epoch": 0.5421570103913427,
"grad_norm": 0.3486057221889496,
"learning_rate": 0.0002,
"loss": 0.2472,
"step": 6300
},
{
"epoch": 0.5438781437576644,
"grad_norm": 0.330469012260437,
"learning_rate": 0.0002,
"loss": 0.2458,
"step": 6320
},
{
"epoch": 0.5455992771239861,
"grad_norm": 0.34958067536354065,
"learning_rate": 0.0002,
"loss": 0.2462,
"step": 6340
},
{
"epoch": 0.5473204104903079,
"grad_norm": 0.3149743676185608,
"learning_rate": 0.0002,
"loss": 0.2465,
"step": 6360
},
{
"epoch": 0.5490415438566296,
"grad_norm": 0.33875513076782227,
"learning_rate": 0.0002,
"loss": 0.2593,
"step": 6380
},
{
"epoch": 0.5507626772229514,
"grad_norm": 0.8838099241256714,
"learning_rate": 0.0002,
"loss": 0.2712,
"step": 6400
},
{
"epoch": 0.552483810589273,
"grad_norm": 0.3521359860897064,
"learning_rate": 0.0002,
"loss": 0.2441,
"step": 6420
},
{
"epoch": 0.5542049439555947,
"grad_norm": 0.4659444987773895,
"learning_rate": 0.0002,
"loss": 0.2526,
"step": 6440
},
{
"epoch": 0.5559260773219165,
"grad_norm": 0.4129142165184021,
"learning_rate": 0.0002,
"loss": 0.2586,
"step": 6460
},
{
"epoch": 0.5576472106882382,
"grad_norm": 0.36697232723236084,
"learning_rate": 0.0002,
"loss": 0.2367,
"step": 6480
},
{
"epoch": 0.5593683440545599,
"grad_norm": 0.37690311670303345,
"learning_rate": 0.0002,
"loss": 0.253,
"step": 6500
},
{
"epoch": 0.5610894774208817,
"grad_norm": 0.2961209714412689,
"learning_rate": 0.0002,
"loss": 0.2441,
"step": 6520
},
{
"epoch": 0.5628106107872034,
"grad_norm": 0.32476598024368286,
"learning_rate": 0.0002,
"loss": 0.2552,
"step": 6540
},
{
"epoch": 0.5645317441535251,
"grad_norm": 0.36377888917922974,
"learning_rate": 0.0002,
"loss": 0.259,
"step": 6560
},
{
"epoch": 0.5662528775198468,
"grad_norm": 0.3714245557785034,
"learning_rate": 0.0002,
"loss": 0.2499,
"step": 6580
},
{
"epoch": 0.5679740108861685,
"grad_norm": 0.3322594463825226,
"learning_rate": 0.0002,
"loss": 0.2437,
"step": 6600
},
{
"epoch": 0.5696951442524902,
"grad_norm": 0.38657745718955994,
"learning_rate": 0.0002,
"loss": 0.2452,
"step": 6620
},
{
"epoch": 0.571416277618812,
"grad_norm": 0.34600910544395447,
"learning_rate": 0.0002,
"loss": 0.2454,
"step": 6640
},
{
"epoch": 0.5731374109851337,
"grad_norm": 0.3906444311141968,
"learning_rate": 0.0002,
"loss": 0.254,
"step": 6660
},
{
"epoch": 0.5748585443514554,
"grad_norm": 0.37352487444877625,
"learning_rate": 0.0002,
"loss": 0.2489,
"step": 6680
},
{
"epoch": 0.5765796777177772,
"grad_norm": 0.3105640411376953,
"learning_rate": 0.0002,
"loss": 0.2523,
"step": 6700
},
{
"epoch": 0.5783008110840989,
"grad_norm": 0.3537706732749939,
"learning_rate": 0.0002,
"loss": 0.2505,
"step": 6720
},
{
"epoch": 0.5800219444504207,
"grad_norm": 0.45570480823516846,
"learning_rate": 0.0002,
"loss": 0.2386,
"step": 6740
},
{
"epoch": 0.5817430778167423,
"grad_norm": 0.36528217792510986,
"learning_rate": 0.0002,
"loss": 0.2382,
"step": 6760
},
{
"epoch": 0.583464211183064,
"grad_norm": 0.3420511782169342,
"learning_rate": 0.0002,
"loss": 0.2456,
"step": 6780
},
{
"epoch": 0.5851853445493858,
"grad_norm": 0.35168027877807617,
"learning_rate": 0.0002,
"loss": 0.2578,
"step": 6800
},
{
"epoch": 0.5869064779157075,
"grad_norm": 0.3705766201019287,
"learning_rate": 0.0002,
"loss": 0.2481,
"step": 6820
},
{
"epoch": 0.5886276112820292,
"grad_norm": 0.4245602786540985,
"learning_rate": 0.0002,
"loss": 0.2603,
"step": 6840
},
{
"epoch": 0.590348744648351,
"grad_norm": 0.42988812923431396,
"learning_rate": 0.0002,
"loss": 0.2308,
"step": 6860
},
{
"epoch": 0.5920698780146727,
"grad_norm": 0.3902256190776825,
"learning_rate": 0.0002,
"loss": 0.2382,
"step": 6880
},
{
"epoch": 0.5937910113809944,
"grad_norm": 0.3394165337085724,
"learning_rate": 0.0002,
"loss": 0.2264,
"step": 6900
},
{
"epoch": 0.5955121447473161,
"grad_norm": 0.40591520071029663,
"learning_rate": 0.0002,
"loss": 0.2487,
"step": 6920
},
{
"epoch": 0.5972332781136378,
"grad_norm": 0.37964189052581787,
"learning_rate": 0.0002,
"loss": 0.2466,
"step": 6940
},
{
"epoch": 0.5989544114799595,
"grad_norm": 0.4443446099758148,
"learning_rate": 0.0002,
"loss": 0.2384,
"step": 6960
},
{
"epoch": 0.6006755448462813,
"grad_norm": 0.40669143199920654,
"learning_rate": 0.0002,
"loss": 0.244,
"step": 6980
},
{
"epoch": 0.602396678212603,
"grad_norm": 0.40179166197776794,
"learning_rate": 0.0002,
"loss": 0.2368,
"step": 7000
},
{
"epoch": 0.6041178115789247,
"grad_norm": 0.4410141408443451,
"learning_rate": 0.0002,
"loss": 0.2388,
"step": 7020
},
{
"epoch": 0.6058389449452465,
"grad_norm": 0.43160775303840637,
"learning_rate": 0.0002,
"loss": 0.2377,
"step": 7040
},
{
"epoch": 0.6075600783115682,
"grad_norm": 0.40169423818588257,
"learning_rate": 0.0002,
"loss": 0.2566,
"step": 7060
},
{
"epoch": 0.6092812116778898,
"grad_norm": 0.43439412117004395,
"learning_rate": 0.0002,
"loss": 0.2559,
"step": 7080
},
{
"epoch": 0.6110023450442116,
"grad_norm": 0.40532273054122925,
"learning_rate": 0.0002,
"loss": 0.242,
"step": 7100
},
{
"epoch": 0.6127234784105333,
"grad_norm": 0.3553959131240845,
"learning_rate": 0.0002,
"loss": 0.2497,
"step": 7120
},
{
"epoch": 0.614444611776855,
"grad_norm": 0.33834314346313477,
"learning_rate": 0.0002,
"loss": 0.2441,
"step": 7140
},
{
"epoch": 0.6161657451431768,
"grad_norm": 0.432019978761673,
"learning_rate": 0.0002,
"loss": 0.2701,
"step": 7160
},
{
"epoch": 0.6178868785094985,
"grad_norm": 0.35808584094047546,
"learning_rate": 0.0002,
"loss": 0.2436,
"step": 7180
},
{
"epoch": 0.6196080118758203,
"grad_norm": 0.5244463682174683,
"learning_rate": 0.0002,
"loss": 0.2356,
"step": 7200
},
{
"epoch": 0.621329145242142,
"grad_norm": 0.3416566848754883,
"learning_rate": 0.0002,
"loss": 0.2498,
"step": 7220
},
{
"epoch": 0.6230502786084636,
"grad_norm": 0.377354234457016,
"learning_rate": 0.0002,
"loss": 0.2361,
"step": 7240
},
{
"epoch": 0.6247714119747854,
"grad_norm": 0.3780601918697357,
"learning_rate": 0.0002,
"loss": 0.2414,
"step": 7260
},
{
"epoch": 0.6264925453411071,
"grad_norm": 0.4537530541419983,
"learning_rate": 0.0002,
"loss": 0.2553,
"step": 7280
},
{
"epoch": 0.6282136787074288,
"grad_norm": 0.34764549136161804,
"learning_rate": 0.0002,
"loss": 0.2294,
"step": 7300
},
{
"epoch": 0.6299348120737506,
"grad_norm": 0.4038061499595642,
"learning_rate": 0.0002,
"loss": 0.2267,
"step": 7320
},
{
"epoch": 0.6316559454400723,
"grad_norm": 0.3201640546321869,
"learning_rate": 0.0002,
"loss": 0.2407,
"step": 7340
},
{
"epoch": 0.633377078806394,
"grad_norm": 0.437897652387619,
"learning_rate": 0.0002,
"loss": 0.238,
"step": 7360
},
{
"epoch": 0.6350982121727158,
"grad_norm": 0.32471516728401184,
"learning_rate": 0.0002,
"loss": 0.2403,
"step": 7380
},
{
"epoch": 0.6368193455390374,
"grad_norm": 0.2739992141723633,
"learning_rate": 0.0002,
"loss": 0.2507,
"step": 7400
},
{
"epoch": 0.6385404789053591,
"grad_norm": 0.33521589636802673,
"learning_rate": 0.0002,
"loss": 0.2462,
"step": 7420
},
{
"epoch": 0.6402616122716809,
"grad_norm": 0.3575187027454376,
"learning_rate": 0.0002,
"loss": 0.2445,
"step": 7440
},
{
"epoch": 0.6419827456380026,
"grad_norm": 0.3016495704650879,
"learning_rate": 0.0002,
"loss": 0.2324,
"step": 7460
},
{
"epoch": 0.6437038790043244,
"grad_norm": 0.4337838888168335,
"learning_rate": 0.0002,
"loss": 0.2508,
"step": 7480
},
{
"epoch": 0.6454250123706461,
"grad_norm": 0.35893139243125916,
"learning_rate": 0.0002,
"loss": 0.2318,
"step": 7500
},
{
"epoch": 0.6471461457369678,
"grad_norm": 0.3144209682941437,
"learning_rate": 0.0002,
"loss": 0.2384,
"step": 7520
},
{
"epoch": 0.6488672791032896,
"grad_norm": 0.30142101645469666,
"learning_rate": 0.0002,
"loss": 0.2439,
"step": 7540
},
{
"epoch": 0.6505884124696112,
"grad_norm": 0.3650546669960022,
"learning_rate": 0.0002,
"loss": 0.2495,
"step": 7560
},
{
"epoch": 0.6523095458359329,
"grad_norm": 0.41195932030677795,
"learning_rate": 0.0002,
"loss": 0.2497,
"step": 7580
},
{
"epoch": 0.6540306792022547,
"grad_norm": 0.34587377309799194,
"learning_rate": 0.0002,
"loss": 0.2372,
"step": 7600
},
{
"epoch": 0.6557518125685764,
"grad_norm": 0.35166653990745544,
"learning_rate": 0.0002,
"loss": 0.2332,
"step": 7620
},
{
"epoch": 0.6574729459348981,
"grad_norm": 0.46442800760269165,
"learning_rate": 0.0002,
"loss": 0.232,
"step": 7640
},
{
"epoch": 0.6591940793012199,
"grad_norm": 0.32510513067245483,
"learning_rate": 0.0002,
"loss": 0.2286,
"step": 7660
},
{
"epoch": 0.6609152126675416,
"grad_norm": 0.3486369848251343,
"learning_rate": 0.0002,
"loss": 0.2311,
"step": 7680
},
{
"epoch": 0.6626363460338633,
"grad_norm": 0.4640563130378723,
"learning_rate": 0.0002,
"loss": 0.2386,
"step": 7700
},
{
"epoch": 0.664357479400185,
"grad_norm": 0.4044354557991028,
"learning_rate": 0.0002,
"loss": 0.2408,
"step": 7720
},
{
"epoch": 0.6660786127665067,
"grad_norm": 0.38926753401756287,
"learning_rate": 0.0002,
"loss": 0.2537,
"step": 7740
},
{
"epoch": 0.6677997461328284,
"grad_norm": 0.428072988986969,
"learning_rate": 0.0002,
"loss": 0.231,
"step": 7760
},
{
"epoch": 0.6695208794991502,
"grad_norm": 0.2939753830432892,
"learning_rate": 0.0002,
"loss": 0.2424,
"step": 7780
},
{
"epoch": 0.6712420128654719,
"grad_norm": 0.41152870655059814,
"learning_rate": 0.0002,
"loss": 0.2422,
"step": 7800
},
{
"epoch": 0.6729631462317937,
"grad_norm": 0.35313352942466736,
"learning_rate": 0.0002,
"loss": 0.2401,
"step": 7820
},
{
"epoch": 0.6746842795981154,
"grad_norm": 0.4280535876750946,
"learning_rate": 0.0002,
"loss": 0.2309,
"step": 7840
},
{
"epoch": 0.6764054129644371,
"grad_norm": 0.36120399832725525,
"learning_rate": 0.0002,
"loss": 0.2339,
"step": 7860
},
{
"epoch": 0.6781265463307588,
"grad_norm": 0.39646589756011963,
"learning_rate": 0.0002,
"loss": 0.2464,
"step": 7880
},
{
"epoch": 0.6798476796970805,
"grad_norm": 0.4060419797897339,
"learning_rate": 0.0002,
"loss": 0.2331,
"step": 7900
},
{
"epoch": 0.6815688130634022,
"grad_norm": 0.4130091369152069,
"learning_rate": 0.0002,
"loss": 0.2331,
"step": 7920
},
{
"epoch": 0.683289946429724,
"grad_norm": 0.31750190258026123,
"learning_rate": 0.0002,
"loss": 0.2371,
"step": 7940
},
{
"epoch": 0.6850110797960457,
"grad_norm": 0.4835965037345886,
"learning_rate": 0.0002,
"loss": 0.2292,
"step": 7960
},
{
"epoch": 0.6867322131623674,
"grad_norm": 0.3932303488254547,
"learning_rate": 0.0002,
"loss": 0.2243,
"step": 7980
},
{
"epoch": 0.6884533465286892,
"grad_norm": 0.3820064067840576,
"learning_rate": 0.0002,
"loss": 0.2288,
"step": 8000
},
{
"epoch": 0.6901744798950109,
"grad_norm": 0.40602800250053406,
"learning_rate": 0.0002,
"loss": 0.2259,
"step": 8020
},
{
"epoch": 0.6918956132613326,
"grad_norm": 0.3742893636226654,
"learning_rate": 0.0002,
"loss": 0.2395,
"step": 8040
},
{
"epoch": 0.6936167466276543,
"grad_norm": 0.39692652225494385,
"learning_rate": 0.0002,
"loss": 0.2296,
"step": 8060
},
{
"epoch": 0.695337879993976,
"grad_norm": 0.31871652603149414,
"learning_rate": 0.0002,
"loss": 0.2444,
"step": 8080
},
{
"epoch": 0.6970590133602977,
"grad_norm": 0.3136846125125885,
"learning_rate": 0.0002,
"loss": 0.2344,
"step": 8100
},
{
"epoch": 0.6987801467266195,
"grad_norm": 0.3090713918209076,
"learning_rate": 0.0002,
"loss": 0.2353,
"step": 8120
},
{
"epoch": 0.7005012800929412,
"grad_norm": 0.33116286993026733,
"learning_rate": 0.0002,
"loss": 0.2205,
"step": 8140
},
{
"epoch": 0.702222413459263,
"grad_norm": 0.2979792058467865,
"learning_rate": 0.0002,
"loss": 0.2371,
"step": 8160
},
{
"epoch": 0.7039435468255847,
"grad_norm": 0.3515206277370453,
"learning_rate": 0.0002,
"loss": 0.2293,
"step": 8180
},
{
"epoch": 0.7056646801919064,
"grad_norm": 0.2888669967651367,
"learning_rate": 0.0002,
"loss": 0.234,
"step": 8200
},
{
"epoch": 0.707385813558228,
"grad_norm": 0.3582359552383423,
"learning_rate": 0.0002,
"loss": 0.2291,
"step": 8220
},
{
"epoch": 0.7091069469245498,
"grad_norm": 0.4168799817562103,
"learning_rate": 0.0002,
"loss": 0.2241,
"step": 8240
},
{
"epoch": 0.7108280802908715,
"grad_norm": 0.3614865243434906,
"learning_rate": 0.0002,
"loss": 0.2323,
"step": 8260
},
{
"epoch": 0.7125492136571933,
"grad_norm": 0.35036012530326843,
"learning_rate": 0.0002,
"loss": 0.2285,
"step": 8280
},
{
"epoch": 0.714270347023515,
"grad_norm": 0.460549533367157,
"learning_rate": 0.0002,
"loss": 0.2303,
"step": 8300
},
{
"epoch": 0.7159914803898367,
"grad_norm": 0.44449788331985474,
"learning_rate": 0.0002,
"loss": 0.2279,
"step": 8320
},
{
"epoch": 0.7177126137561585,
"grad_norm": 0.29927289485931396,
"learning_rate": 0.0002,
"loss": 0.2235,
"step": 8340
},
{
"epoch": 0.7194337471224802,
"grad_norm": 0.3563356101512909,
"learning_rate": 0.0002,
"loss": 0.2307,
"step": 8360
},
{
"epoch": 0.7211548804888018,
"grad_norm": 0.42358216643333435,
"learning_rate": 0.0002,
"loss": 0.227,
"step": 8380
},
{
"epoch": 0.7228760138551236,
"grad_norm": 0.42043471336364746,
"learning_rate": 0.0002,
"loss": 0.2199,
"step": 8400
},
{
"epoch": 0.7245971472214453,
"grad_norm": 0.3534243702888489,
"learning_rate": 0.0002,
"loss": 0.2384,
"step": 8420
},
{
"epoch": 0.726318280587767,
"grad_norm": 0.35375455021858215,
"learning_rate": 0.0002,
"loss": 0.2337,
"step": 8440
},
{
"epoch": 0.7280394139540888,
"grad_norm": 0.31629830598831177,
"learning_rate": 0.0002,
"loss": 0.2326,
"step": 8460
},
{
"epoch": 0.7297605473204105,
"grad_norm": 0.3449802100658417,
"learning_rate": 0.0002,
"loss": 0.221,
"step": 8480
},
{
"epoch": 0.7314816806867322,
"grad_norm": 0.44133901596069336,
"learning_rate": 0.0002,
"loss": 0.2304,
"step": 8500
},
{
"epoch": 0.733202814053054,
"grad_norm": 0.3913390040397644,
"learning_rate": 0.0002,
"loss": 0.2277,
"step": 8520
},
{
"epoch": 0.7349239474193756,
"grad_norm": 0.3438524901866913,
"learning_rate": 0.0002,
"loss": 0.2484,
"step": 8540
},
{
"epoch": 0.7366450807856973,
"grad_norm": 0.34400486946105957,
"learning_rate": 0.0002,
"loss": 0.2257,
"step": 8560
},
{
"epoch": 0.7383662141520191,
"grad_norm": 0.40113121271133423,
"learning_rate": 0.0002,
"loss": 0.2246,
"step": 8580
},
{
"epoch": 0.7400873475183408,
"grad_norm": 0.3908839821815491,
"learning_rate": 0.0002,
"loss": 0.2178,
"step": 8600
},
{
"epoch": 0.7418084808846626,
"grad_norm": 0.3656678795814514,
"learning_rate": 0.0002,
"loss": 0.2345,
"step": 8620
},
{
"epoch": 0.7435296142509843,
"grad_norm": 0.3811242878437042,
"learning_rate": 0.0002,
"loss": 0.2387,
"step": 8640
},
{
"epoch": 0.745250747617306,
"grad_norm": 0.3093118965625763,
"learning_rate": 0.0002,
"loss": 0.2179,
"step": 8660
},
{
"epoch": 0.7469718809836278,
"grad_norm": 0.42958253622055054,
"learning_rate": 0.0002,
"loss": 0.2212,
"step": 8680
},
{
"epoch": 0.7486930143499494,
"grad_norm": 0.3202790319919586,
"learning_rate": 0.0002,
"loss": 0.2394,
"step": 8700
},
{
"epoch": 0.7504141477162711,
"grad_norm": 0.42061832547187805,
"learning_rate": 0.0002,
"loss": 0.2282,
"step": 8720
},
{
"epoch": 0.7521352810825929,
"grad_norm": 0.2962038516998291,
"learning_rate": 0.0002,
"loss": 0.2322,
"step": 8740
},
{
"epoch": 0.7538564144489146,
"grad_norm": 0.3707858920097351,
"learning_rate": 0.0002,
"loss": 0.2251,
"step": 8760
},
{
"epoch": 0.7555775478152363,
"grad_norm": 0.424730509519577,
"learning_rate": 0.0002,
"loss": 0.2345,
"step": 8780
},
{
"epoch": 0.7572986811815581,
"grad_norm": 0.34581223130226135,
"learning_rate": 0.0002,
"loss": 0.2196,
"step": 8800
},
{
"epoch": 0.7590198145478798,
"grad_norm": 0.436617374420166,
"learning_rate": 0.0002,
"loss": 0.2332,
"step": 8820
},
{
"epoch": 0.7607409479142015,
"grad_norm": 0.3432689905166626,
"learning_rate": 0.0002,
"loss": 0.231,
"step": 8840
},
{
"epoch": 0.7624620812805232,
"grad_norm": 0.36506009101867676,
"learning_rate": 0.0002,
"loss": 0.2299,
"step": 8860
},
{
"epoch": 0.7641832146468449,
"grad_norm": 0.29211345314979553,
"learning_rate": 0.0002,
"loss": 0.2169,
"step": 8880
},
{
"epoch": 0.7659043480131666,
"grad_norm": 0.3884661793708801,
"learning_rate": 0.0002,
"loss": 0.2205,
"step": 8900
},
{
"epoch": 0.7676254813794884,
"grad_norm": 0.3466391861438751,
"learning_rate": 0.0002,
"loss": 0.2249,
"step": 8920
},
{
"epoch": 0.7693466147458101,
"grad_norm": 0.3165854811668396,
"learning_rate": 0.0002,
"loss": 0.2436,
"step": 8940
},
{
"epoch": 0.7710677481121319,
"grad_norm": 0.42315399646759033,
"learning_rate": 0.0002,
"loss": 0.2207,
"step": 8960
},
{
"epoch": 0.7727888814784536,
"grad_norm": 0.3629299998283386,
"learning_rate": 0.0002,
"loss": 0.2356,
"step": 8980
},
{
"epoch": 0.7745100148447753,
"grad_norm": 0.4586748480796814,
"learning_rate": 0.0002,
"loss": 0.2152,
"step": 9000
},
{
"epoch": 0.776231148211097,
"grad_norm": 0.27728915214538574,
"learning_rate": 0.0002,
"loss": 0.2284,
"step": 9020
},
{
"epoch": 0.7779522815774187,
"grad_norm": 0.3490050435066223,
"learning_rate": 0.0002,
"loss": 0.2166,
"step": 9040
},
{
"epoch": 0.7796734149437404,
"grad_norm": 0.2839438021183014,
"learning_rate": 0.0002,
"loss": 0.2283,
"step": 9060
},
{
"epoch": 0.7813945483100622,
"grad_norm": 0.35332831740379333,
"learning_rate": 0.0002,
"loss": 0.2336,
"step": 9080
},
{
"epoch": 0.7831156816763839,
"grad_norm": 0.4003342390060425,
"learning_rate": 0.0002,
"loss": 0.2279,
"step": 9100
},
{
"epoch": 0.7848368150427056,
"grad_norm": 0.32392072677612305,
"learning_rate": 0.0002,
"loss": 0.235,
"step": 9120
},
{
"epoch": 0.7865579484090274,
"grad_norm": 0.3040711581707001,
"learning_rate": 0.0002,
"loss": 0.2234,
"step": 9140
},
{
"epoch": 0.7882790817753491,
"grad_norm": 0.35234126448631287,
"learning_rate": 0.0002,
"loss": 0.2215,
"step": 9160
},
{
"epoch": 0.7900002151416707,
"grad_norm": 0.3502811789512634,
"learning_rate": 0.0002,
"loss": 0.225,
"step": 9180
},
{
"epoch": 0.7917213485079925,
"grad_norm": 0.3539245128631592,
"learning_rate": 0.0002,
"loss": 0.2289,
"step": 9200
},
{
"epoch": 0.7934424818743142,
"grad_norm": 0.3154076635837555,
"learning_rate": 0.0002,
"loss": 0.2282,
"step": 9220
},
{
"epoch": 0.795163615240636,
"grad_norm": 0.3497600853443146,
"learning_rate": 0.0002,
"loss": 0.2159,
"step": 9240
},
{
"epoch": 0.7968847486069577,
"grad_norm": 0.3394204080104828,
"learning_rate": 0.0002,
"loss": 0.2175,
"step": 9260
},
{
"epoch": 0.7986058819732794,
"grad_norm": 0.42728209495544434,
"learning_rate": 0.0002,
"loss": 0.2316,
"step": 9280
},
{
"epoch": 0.8003270153396012,
"grad_norm": 0.30218786001205444,
"learning_rate": 0.0002,
"loss": 0.2268,
"step": 9300
},
{
"epoch": 0.8020481487059229,
"grad_norm": 0.3418307602405548,
"learning_rate": 0.0002,
"loss": 0.2315,
"step": 9320
},
{
"epoch": 0.8037692820722446,
"grad_norm": 0.38792407512664795,
"learning_rate": 0.0002,
"loss": 0.2302,
"step": 9340
},
{
"epoch": 0.8054904154385663,
"grad_norm": 0.2837861180305481,
"learning_rate": 0.0002,
"loss": 0.2193,
"step": 9360
},
{
"epoch": 0.807211548804888,
"grad_norm": 0.4480486214160919,
"learning_rate": 0.0002,
"loss": 0.2264,
"step": 9380
},
{
"epoch": 0.8089326821712097,
"grad_norm": 0.25541505217552185,
"learning_rate": 0.0002,
"loss": 0.2302,
"step": 9400
},
{
"epoch": 0.8106538155375315,
"grad_norm": 0.28958380222320557,
"learning_rate": 0.0002,
"loss": 0.2269,
"step": 9420
},
{
"epoch": 0.8123749489038532,
"grad_norm": 0.2757406532764435,
"learning_rate": 0.0002,
"loss": 0.2155,
"step": 9440
},
{
"epoch": 0.8140960822701749,
"grad_norm": 0.3484242558479309,
"learning_rate": 0.0002,
"loss": 0.2346,
"step": 9460
},
{
"epoch": 0.8158172156364967,
"grad_norm": 0.23964551091194153,
"learning_rate": 0.0002,
"loss": 0.2208,
"step": 9480
},
{
"epoch": 0.8175383490028184,
"grad_norm": 0.3568115234375,
"learning_rate": 0.0002,
"loss": 0.2187,
"step": 9500
},
{
"epoch": 0.81925948236914,
"grad_norm": 0.4668004512786865,
"learning_rate": 0.0002,
"loss": 0.2306,
"step": 9520
},
{
"epoch": 0.8209806157354618,
"grad_norm": 0.33082979917526245,
"learning_rate": 0.0002,
"loss": 0.2135,
"step": 9540
},
{
"epoch": 0.8227017491017835,
"grad_norm": 0.3717847764492035,
"learning_rate": 0.0002,
"loss": 0.2203,
"step": 9560
},
{
"epoch": 0.8244228824681052,
"grad_norm": 0.37038078904151917,
"learning_rate": 0.0002,
"loss": 0.2271,
"step": 9580
},
{
"epoch": 0.826144015834427,
"grad_norm": 0.33178070187568665,
"learning_rate": 0.0002,
"loss": 0.2222,
"step": 9600
},
{
"epoch": 0.8278651492007487,
"grad_norm": 0.27431583404541016,
"learning_rate": 0.0002,
"loss": 0.2304,
"step": 9620
},
{
"epoch": 0.8295862825670705,
"grad_norm": 0.33868762850761414,
"learning_rate": 0.0002,
"loss": 0.2197,
"step": 9640
},
{
"epoch": 0.8313074159333922,
"grad_norm": 0.34042665362358093,
"learning_rate": 0.0002,
"loss": 0.2194,
"step": 9660
},
{
"epoch": 0.8330285492997138,
"grad_norm": 0.431411474943161,
"learning_rate": 0.0002,
"loss": 0.2228,
"step": 9680
},
{
"epoch": 0.8347496826660356,
"grad_norm": 0.2958236634731293,
"learning_rate": 0.0002,
"loss": 0.2268,
"step": 9700
},
{
"epoch": 0.8364708160323573,
"grad_norm": 0.31208258867263794,
"learning_rate": 0.0002,
"loss": 0.2371,
"step": 9720
},
{
"epoch": 0.838191949398679,
"grad_norm": 0.3337579667568207,
"learning_rate": 0.0002,
"loss": 0.2312,
"step": 9740
},
{
"epoch": 0.8399130827650008,
"grad_norm": 0.5392299294471741,
"learning_rate": 0.0002,
"loss": 0.2324,
"step": 9760
},
{
"epoch": 0.8416342161313225,
"grad_norm": 0.3481907248497009,
"learning_rate": 0.0002,
"loss": 0.2241,
"step": 9780
},
{
"epoch": 0.8433553494976442,
"grad_norm": 0.29136040806770325,
"learning_rate": 0.0002,
"loss": 0.2147,
"step": 9800
},
{
"epoch": 0.845076482863966,
"grad_norm": 0.2877753973007202,
"learning_rate": 0.0002,
"loss": 0.1988,
"step": 9820
},
{
"epoch": 0.8467976162302876,
"grad_norm": 0.4024602174758911,
"learning_rate": 0.0002,
"loss": 0.2161,
"step": 9840
},
{
"epoch": 0.8485187495966093,
"grad_norm": 0.28684961795806885,
"learning_rate": 0.0002,
"loss": 0.2199,
"step": 9860
},
{
"epoch": 0.8502398829629311,
"grad_norm": 0.31743690371513367,
"learning_rate": 0.0002,
"loss": 0.2196,
"step": 9880
},
{
"epoch": 0.8519610163292528,
"grad_norm": 0.3548375368118286,
"learning_rate": 0.0002,
"loss": 0.2179,
"step": 9900
},
{
"epoch": 0.8536821496955745,
"grad_norm": 0.31174436211586,
"learning_rate": 0.0002,
"loss": 0.2236,
"step": 9920
},
{
"epoch": 0.8554032830618963,
"grad_norm": 0.2791374623775482,
"learning_rate": 0.0002,
"loss": 0.2257,
"step": 9940
},
{
"epoch": 0.857124416428218,
"grad_norm": 0.3355172574520111,
"learning_rate": 0.0002,
"loss": 0.2244,
"step": 9960
},
{
"epoch": 0.8588455497945398,
"grad_norm": 0.3883482813835144,
"learning_rate": 0.0002,
"loss": 0.2178,
"step": 9980
},
{
"epoch": 0.8605666831608614,
"grad_norm": 0.3601789176464081,
"learning_rate": 0.0002,
"loss": 0.227,
"step": 10000
},
{
"epoch": 0.8622878165271831,
"grad_norm": 0.24994397163391113,
"learning_rate": 0.0002,
"loss": 0.2165,
"step": 10020
},
{
"epoch": 0.8640089498935049,
"grad_norm": 0.40082284808158875,
"learning_rate": 0.0002,
"loss": 0.228,
"step": 10040
},
{
"epoch": 0.8657300832598266,
"grad_norm": 0.47781533002853394,
"learning_rate": 0.0002,
"loss": 0.2283,
"step": 10060
},
{
"epoch": 0.8674512166261483,
"grad_norm": 0.3652552366256714,
"learning_rate": 0.0002,
"loss": 0.2207,
"step": 10080
},
{
"epoch": 0.8691723499924701,
"grad_norm": 0.38674041628837585,
"learning_rate": 0.0002,
"loss": 0.2162,
"step": 10100
},
{
"epoch": 0.8708934833587918,
"grad_norm": 0.32635965943336487,
"learning_rate": 0.0002,
"loss": 0.2173,
"step": 10120
},
{
"epoch": 0.8726146167251135,
"grad_norm": 0.28276535868644714,
"learning_rate": 0.0002,
"loss": 0.2203,
"step": 10140
},
{
"epoch": 0.8743357500914352,
"grad_norm": 0.363016813993454,
"learning_rate": 0.0002,
"loss": 0.2355,
"step": 10160
},
{
"epoch": 0.8760568834577569,
"grad_norm": 0.302642822265625,
"learning_rate": 0.0002,
"loss": 0.2099,
"step": 10180
},
{
"epoch": 0.8777780168240786,
"grad_norm": 0.25143080949783325,
"learning_rate": 0.0002,
"loss": 0.2233,
"step": 10200
},
{
"epoch": 0.8794991501904004,
"grad_norm": 0.37836262583732605,
"learning_rate": 0.0002,
"loss": 0.2218,
"step": 10220
},
{
"epoch": 0.8812202835567221,
"grad_norm": 0.3723512887954712,
"learning_rate": 0.0002,
"loss": 0.219,
"step": 10240
},
{
"epoch": 0.8829414169230438,
"grad_norm": 0.33276501297950745,
"learning_rate": 0.0002,
"loss": 0.2153,
"step": 10260
},
{
"epoch": 0.8846625502893656,
"grad_norm": 0.3877188563346863,
"learning_rate": 0.0002,
"loss": 0.2213,
"step": 10280
},
{
"epoch": 0.8863836836556873,
"grad_norm": 0.36920130252838135,
"learning_rate": 0.0002,
"loss": 0.2099,
"step": 10300
},
{
"epoch": 0.8881048170220089,
"grad_norm": 0.2991655170917511,
"learning_rate": 0.0002,
"loss": 0.218,
"step": 10320
},
{
"epoch": 0.8898259503883307,
"grad_norm": 0.4785827100276947,
"learning_rate": 0.0002,
"loss": 0.2226,
"step": 10340
},
{
"epoch": 0.8915470837546524,
"grad_norm": 0.30903199315071106,
"learning_rate": 0.0002,
"loss": 0.2059,
"step": 10360
},
{
"epoch": 0.8932682171209742,
"grad_norm": 0.3614109754562378,
"learning_rate": 0.0002,
"loss": 0.2271,
"step": 10380
},
{
"epoch": 0.8949893504872959,
"grad_norm": 0.3223751485347748,
"learning_rate": 0.0002,
"loss": 0.2223,
"step": 10400
},
{
"epoch": 0.8967104838536176,
"grad_norm": 0.3175138533115387,
"learning_rate": 0.0002,
"loss": 0.2369,
"step": 10420
},
{
"epoch": 0.8984316172199394,
"grad_norm": 0.37820857763290405,
"learning_rate": 0.0002,
"loss": 0.2206,
"step": 10440
},
{
"epoch": 0.9001527505862611,
"grad_norm": 0.4233035445213318,
"learning_rate": 0.0002,
"loss": 0.2169,
"step": 10460
},
{
"epoch": 0.9018738839525827,
"grad_norm": 0.4142135977745056,
"learning_rate": 0.0002,
"loss": 0.21,
"step": 10480
},
{
"epoch": 0.9035950173189045,
"grad_norm": 0.43564194440841675,
"learning_rate": 0.0002,
"loss": 0.2231,
"step": 10500
},
{
"epoch": 0.9053161506852262,
"grad_norm": 0.3781276345252991,
"learning_rate": 0.0002,
"loss": 0.2258,
"step": 10520
},
{
"epoch": 0.9070372840515479,
"grad_norm": 0.4279311001300812,
"learning_rate": 0.0002,
"loss": 0.2287,
"step": 10540
},
{
"epoch": 0.9087584174178697,
"grad_norm": 0.3078250586986542,
"learning_rate": 0.0002,
"loss": 0.212,
"step": 10560
},
{
"epoch": 0.9104795507841914,
"grad_norm": 0.3039957880973816,
"learning_rate": 0.0002,
"loss": 0.2137,
"step": 10580
},
{
"epoch": 0.9122006841505131,
"grad_norm": 0.3359488546848297,
"learning_rate": 0.0002,
"loss": 0.2206,
"step": 10600
},
{
"epoch": 0.9139218175168349,
"grad_norm": 0.30003640055656433,
"learning_rate": 0.0002,
"loss": 0.213,
"step": 10620
},
{
"epoch": 0.9156429508831565,
"grad_norm": 0.3175880014896393,
"learning_rate": 0.0002,
"loss": 0.2099,
"step": 10640
},
{
"epoch": 0.9173640842494782,
"grad_norm": 0.3824600577354431,
"learning_rate": 0.0002,
"loss": 0.2126,
"step": 10660
},
{
"epoch": 0.9190852176158,
"grad_norm": 0.3589145839214325,
"learning_rate": 0.0002,
"loss": 0.2234,
"step": 10680
},
{
"epoch": 0.9208063509821217,
"grad_norm": 0.3821096420288086,
"learning_rate": 0.0002,
"loss": 0.2163,
"step": 10700
},
{
"epoch": 0.9225274843484434,
"grad_norm": 0.41358160972595215,
"learning_rate": 0.0002,
"loss": 0.2207,
"step": 10720
},
{
"epoch": 0.9242486177147652,
"grad_norm": 0.2841893434524536,
"learning_rate": 0.0002,
"loss": 0.2418,
"step": 10740
},
{
"epoch": 0.9259697510810869,
"grad_norm": 0.29627254605293274,
"learning_rate": 0.0002,
"loss": 0.2259,
"step": 10760
},
{
"epoch": 0.9276908844474087,
"grad_norm": 0.3516784906387329,
"learning_rate": 0.0002,
"loss": 0.2117,
"step": 10780
},
{
"epoch": 0.9294120178137304,
"grad_norm": 0.37627148628234863,
"learning_rate": 0.0002,
"loss": 0.2236,
"step": 10800
},
{
"epoch": 0.931133151180052,
"grad_norm": 0.3956553041934967,
"learning_rate": 0.0002,
"loss": 0.2091,
"step": 10820
},
{
"epoch": 0.9328542845463738,
"grad_norm": 0.3255549967288971,
"learning_rate": 0.0002,
"loss": 0.2247,
"step": 10840
},
{
"epoch": 0.9345754179126955,
"grad_norm": 0.3365786075592041,
"learning_rate": 0.0002,
"loss": 0.1955,
"step": 10860
},
{
"epoch": 0.9362965512790172,
"grad_norm": 0.3629949688911438,
"learning_rate": 0.0002,
"loss": 0.2137,
"step": 10880
},
{
"epoch": 0.938017684645339,
"grad_norm": 0.3127789795398712,
"learning_rate": 0.0002,
"loss": 0.2077,
"step": 10900
},
{
"epoch": 0.9397388180116607,
"grad_norm": 0.3616786003112793,
"learning_rate": 0.0002,
"loss": 0.2125,
"step": 10920
},
{
"epoch": 0.9414599513779824,
"grad_norm": 0.33411502838134766,
"learning_rate": 0.0002,
"loss": 0.2137,
"step": 10940
},
{
"epoch": 0.9431810847443042,
"grad_norm": 0.3387938439846039,
"learning_rate": 0.0002,
"loss": 0.2089,
"step": 10960
},
{
"epoch": 0.9449022181106258,
"grad_norm": 0.42915311455726624,
"learning_rate": 0.0002,
"loss": 0.2144,
"step": 10980
},
{
"epoch": 0.9466233514769475,
"grad_norm": 0.4593462646007538,
"learning_rate": 0.0002,
"loss": 0.2136,
"step": 11000
},
{
"epoch": 0.9483444848432693,
"grad_norm": 0.3778610825538635,
"learning_rate": 0.0002,
"loss": 0.2198,
"step": 11020
},
{
"epoch": 0.950065618209591,
"grad_norm": 0.34387239813804626,
"learning_rate": 0.0002,
"loss": 0.2215,
"step": 11040
},
{
"epoch": 0.9517867515759127,
"grad_norm": 0.38071706891059875,
"learning_rate": 0.0002,
"loss": 0.2159,
"step": 11060
},
{
"epoch": 0.9535078849422345,
"grad_norm": 0.3698328137397766,
"learning_rate": 0.0002,
"loss": 0.214,
"step": 11080
},
{
"epoch": 0.9552290183085562,
"grad_norm": 0.43745896220207214,
"learning_rate": 0.0002,
"loss": 0.2252,
"step": 11100
},
{
"epoch": 0.956950151674878,
"grad_norm": 0.2951034903526306,
"learning_rate": 0.0002,
"loss": 0.2099,
"step": 11120
},
{
"epoch": 0.9586712850411996,
"grad_norm": 0.35736939311027527,
"learning_rate": 0.0002,
"loss": 0.222,
"step": 11140
},
{
"epoch": 0.9603924184075213,
"grad_norm": 0.3355426490306854,
"learning_rate": 0.0002,
"loss": 0.2142,
"step": 11160
},
{
"epoch": 0.9621135517738431,
"grad_norm": 0.2925025522708893,
"learning_rate": 0.0002,
"loss": 0.2085,
"step": 11180
},
{
"epoch": 0.9638346851401648,
"grad_norm": 0.36077678203582764,
"learning_rate": 0.0002,
"loss": 0.2097,
"step": 11200
},
{
"epoch": 0.9655558185064865,
"grad_norm": 0.3234957158565521,
"learning_rate": 0.0002,
"loss": 0.2209,
"step": 11220
},
{
"epoch": 0.9672769518728083,
"grad_norm": 0.32046082615852356,
"learning_rate": 0.0002,
"loss": 0.2051,
"step": 11240
},
{
"epoch": 0.96899808523913,
"grad_norm": 0.3421325087547302,
"learning_rate": 0.0002,
"loss": 0.2288,
"step": 11260
},
{
"epoch": 0.9707192186054517,
"grad_norm": 0.35989081859588623,
"learning_rate": 0.0002,
"loss": 0.2061,
"step": 11280
},
{
"epoch": 0.9724403519717734,
"grad_norm": 0.3135656714439392,
"learning_rate": 0.0002,
"loss": 0.2238,
"step": 11300
},
{
"epoch": 0.9741614853380951,
"grad_norm": 0.29025885462760925,
"learning_rate": 0.0002,
"loss": 0.2111,
"step": 11320
},
{
"epoch": 0.9758826187044168,
"grad_norm": 0.4417757987976074,
"learning_rate": 0.0002,
"loss": 0.2,
"step": 11340
},
{
"epoch": 0.9776037520707386,
"grad_norm": 0.3659359812736511,
"learning_rate": 0.0002,
"loss": 0.216,
"step": 11360
},
{
"epoch": 0.9793248854370603,
"grad_norm": 0.3737911283969879,
"learning_rate": 0.0002,
"loss": 0.2139,
"step": 11380
},
{
"epoch": 0.981046018803382,
"grad_norm": 0.38346412777900696,
"learning_rate": 0.0002,
"loss": 0.2072,
"step": 11400
},
{
"epoch": 0.9827671521697038,
"grad_norm": 0.32871776819229126,
"learning_rate": 0.0002,
"loss": 0.2197,
"step": 11420
},
{
"epoch": 0.9844882855360255,
"grad_norm": 0.29058969020843506,
"learning_rate": 0.0002,
"loss": 0.211,
"step": 11440
},
{
"epoch": 0.9862094189023471,
"grad_norm": 0.39237943291664124,
"learning_rate": 0.0002,
"loss": 0.2053,
"step": 11460
},
{
"epoch": 0.9879305522686689,
"grad_norm": 0.3916817307472229,
"learning_rate": 0.0002,
"loss": 0.2177,
"step": 11480
},
{
"epoch": 0.9896516856349906,
"grad_norm": 0.2479276806116104,
"learning_rate": 0.0002,
"loss": 0.2128,
"step": 11500
},
{
"epoch": 0.9913728190013124,
"grad_norm": 0.33634355664253235,
"learning_rate": 0.0002,
"loss": 0.2097,
"step": 11520
},
{
"epoch": 0.9930939523676341,
"grad_norm": 0.336535781621933,
"learning_rate": 0.0002,
"loss": 0.208,
"step": 11540
},
{
"epoch": 0.9948150857339558,
"grad_norm": 0.4115926921367645,
"learning_rate": 0.0002,
"loss": 0.2057,
"step": 11560
},
{
"epoch": 0.9965362191002776,
"grad_norm": 0.3465426564216614,
"learning_rate": 0.0002,
"loss": 0.204,
"step": 11580
},
{
"epoch": 0.9982573524665993,
"grad_norm": 0.32643455266952515,
"learning_rate": 0.0002,
"loss": 0.2068,
"step": 11600
},
{
"epoch": 0.9999784858329209,
"grad_norm": 0.3390646278858185,
"learning_rate": 0.0002,
"loss": 0.2095,
"step": 11620
},
{
"epoch": 1.0,
"eval_loss": 0.2866213619709015,
"eval_runtime": 903.225,
"eval_samples_per_second": 4.146,
"eval_steps_per_second": 0.519,
"step": 11621
}
],
"logging_steps": 20,
"max_steps": 13000,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 77,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 7.484935677507797e+18,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}