TaiGary's picture
Add files using upload-large-folder tool
7e37ef2 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 100,
"global_step": 784,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.002551020408163265,
"grad_norm": 1.0885527168794946,
"learning_rate": 9.999959857256764e-06,
"loss": 0.1967,
"step": 1
},
{
"epoch": 0.00510204081632653,
"grad_norm": 1.3738268211294016,
"learning_rate": 9.999839429671632e-06,
"loss": 0.2404,
"step": 2
},
{
"epoch": 0.007653061224489796,
"grad_norm": 1.2030714088342824,
"learning_rate": 9.99963871917832e-06,
"loss": 0.2047,
"step": 3
},
{
"epoch": 0.01020408163265306,
"grad_norm": 0.9283104826448032,
"learning_rate": 9.999357728999657e-06,
"loss": 0.1697,
"step": 4
},
{
"epoch": 0.012755102040816327,
"grad_norm": 0.8955433527027652,
"learning_rate": 9.99899646364753e-06,
"loss": 0.2107,
"step": 5
},
{
"epoch": 0.015306122448979591,
"grad_norm": 0.839499734603853,
"learning_rate": 9.99855492892281e-06,
"loss": 0.1967,
"step": 6
},
{
"epoch": 0.017857142857142856,
"grad_norm": 0.7667610964068909,
"learning_rate": 9.998033131915266e-06,
"loss": 0.1599,
"step": 7
},
{
"epoch": 0.02040816326530612,
"grad_norm": 0.7717571071599194,
"learning_rate": 9.99743108100344e-06,
"loss": 0.1708,
"step": 8
},
{
"epoch": 0.02295918367346939,
"grad_norm": 0.8407497770261014,
"learning_rate": 9.996748785854524e-06,
"loss": 0.1666,
"step": 9
},
{
"epoch": 0.025510204081632654,
"grad_norm": 0.8443185283822576,
"learning_rate": 9.9959862574242e-06,
"loss": 0.1631,
"step": 10
},
{
"epoch": 0.02806122448979592,
"grad_norm": 0.7685166109923904,
"learning_rate": 9.995143507956456e-06,
"loss": 0.1821,
"step": 11
},
{
"epoch": 0.030612244897959183,
"grad_norm": 0.8594670816954412,
"learning_rate": 9.994220550983404e-06,
"loss": 0.1814,
"step": 12
},
{
"epoch": 0.03316326530612245,
"grad_norm": 0.7096373833961109,
"learning_rate": 9.993217401325057e-06,
"loss": 0.1743,
"step": 13
},
{
"epoch": 0.03571428571428571,
"grad_norm": 0.7098228947181078,
"learning_rate": 9.992134075089085e-06,
"loss": 0.1639,
"step": 14
},
{
"epoch": 0.03826530612244898,
"grad_norm": 0.7563234672158685,
"learning_rate": 9.99097058967056e-06,
"loss": 0.1746,
"step": 15
},
{
"epoch": 0.04081632653061224,
"grad_norm": 0.7772366871916015,
"learning_rate": 9.989726963751683e-06,
"loss": 0.1894,
"step": 16
},
{
"epoch": 0.04336734693877551,
"grad_norm": 0.6199203281059847,
"learning_rate": 9.988403217301477e-06,
"loss": 0.1636,
"step": 17
},
{
"epoch": 0.04591836734693878,
"grad_norm": 0.6852105358993172,
"learning_rate": 9.986999371575465e-06,
"loss": 0.1643,
"step": 18
},
{
"epoch": 0.04846938775510204,
"grad_norm": 0.7736501072385444,
"learning_rate": 9.985515449115338e-06,
"loss": 0.1729,
"step": 19
},
{
"epoch": 0.05102040816326531,
"grad_norm": 0.7089838632803512,
"learning_rate": 9.983951473748579e-06,
"loss": 0.1764,
"step": 20
},
{
"epoch": 0.05357142857142857,
"grad_norm": 0.7063856891605839,
"learning_rate": 9.982307470588097e-06,
"loss": 0.1733,
"step": 21
},
{
"epoch": 0.05612244897959184,
"grad_norm": 0.6555043059144575,
"learning_rate": 9.980583466031808e-06,
"loss": 0.1614,
"step": 22
},
{
"epoch": 0.058673469387755105,
"grad_norm": 0.689488625298009,
"learning_rate": 9.978779487762221e-06,
"loss": 0.1767,
"step": 23
},
{
"epoch": 0.061224489795918366,
"grad_norm": 0.6773533739024657,
"learning_rate": 9.976895564745993e-06,
"loss": 0.154,
"step": 24
},
{
"epoch": 0.06377551020408163,
"grad_norm": 0.7140495522183441,
"learning_rate": 9.974931727233454e-06,
"loss": 0.1815,
"step": 25
},
{
"epoch": 0.0663265306122449,
"grad_norm": 0.6802708469583214,
"learning_rate": 9.97288800675814e-06,
"loss": 0.1734,
"step": 26
},
{
"epoch": 0.06887755102040816,
"grad_norm": 0.6558035514810944,
"learning_rate": 9.970764436136266e-06,
"loss": 0.1607,
"step": 27
},
{
"epoch": 0.07142857142857142,
"grad_norm": 0.6255417776467211,
"learning_rate": 9.968561049466214e-06,
"loss": 0.1573,
"step": 28
},
{
"epoch": 0.07397959183673469,
"grad_norm": 0.6604445618965782,
"learning_rate": 9.966277882127975e-06,
"loss": 0.163,
"step": 29
},
{
"epoch": 0.07653061224489796,
"grad_norm": 0.8657610543676655,
"learning_rate": 9.963914970782594e-06,
"loss": 0.2162,
"step": 30
},
{
"epoch": 0.07908163265306123,
"grad_norm": 0.683836315319945,
"learning_rate": 9.961472353371564e-06,
"loss": 0.186,
"step": 31
},
{
"epoch": 0.08163265306122448,
"grad_norm": 0.6923067383095874,
"learning_rate": 9.95895006911623e-06,
"loss": 0.1994,
"step": 32
},
{
"epoch": 0.08418367346938775,
"grad_norm": 0.6564772259446939,
"learning_rate": 9.956348158517162e-06,
"loss": 0.155,
"step": 33
},
{
"epoch": 0.08673469387755102,
"grad_norm": 0.7093161053652716,
"learning_rate": 9.953666663353485e-06,
"loss": 0.1755,
"step": 34
},
{
"epoch": 0.08928571428571429,
"grad_norm": 0.835449363527211,
"learning_rate": 9.950905626682229e-06,
"loss": 0.2058,
"step": 35
},
{
"epoch": 0.09183673469387756,
"grad_norm": 0.6276558861634733,
"learning_rate": 9.948065092837631e-06,
"loss": 0.1499,
"step": 36
},
{
"epoch": 0.09438775510204081,
"grad_norm": 0.7029471015624225,
"learning_rate": 9.945145107430416e-06,
"loss": 0.1852,
"step": 37
},
{
"epoch": 0.09693877551020408,
"grad_norm": 0.6194669679342504,
"learning_rate": 9.942145717347077e-06,
"loss": 0.1652,
"step": 38
},
{
"epoch": 0.09948979591836735,
"grad_norm": 0.6471970281746303,
"learning_rate": 9.939066970749112e-06,
"loss": 0.1614,
"step": 39
},
{
"epoch": 0.10204081632653061,
"grad_norm": 0.7172164141492537,
"learning_rate": 9.935908917072253e-06,
"loss": 0.1933,
"step": 40
},
{
"epoch": 0.10459183673469388,
"grad_norm": 0.6810647853743311,
"learning_rate": 9.932671607025673e-06,
"loss": 0.1777,
"step": 41
},
{
"epoch": 0.10714285714285714,
"grad_norm": 0.6124857370200141,
"learning_rate": 9.92935509259118e-06,
"loss": 0.1471,
"step": 42
},
{
"epoch": 0.1096938775510204,
"grad_norm": 0.6506073417099824,
"learning_rate": 9.925959427022366e-06,
"loss": 0.1791,
"step": 43
},
{
"epoch": 0.11224489795918367,
"grad_norm": 0.7867658827361405,
"learning_rate": 9.922484664843763e-06,
"loss": 0.1924,
"step": 44
},
{
"epoch": 0.11479591836734694,
"grad_norm": 0.6579496186128595,
"learning_rate": 9.918930861849965e-06,
"loss": 0.1825,
"step": 45
},
{
"epoch": 0.11734693877551021,
"grad_norm": 0.6295603187995811,
"learning_rate": 9.915298075104735e-06,
"loss": 0.1748,
"step": 46
},
{
"epoch": 0.11989795918367346,
"grad_norm": 0.6001949141128546,
"learning_rate": 9.911586362940082e-06,
"loss": 0.1625,
"step": 47
},
{
"epoch": 0.12244897959183673,
"grad_norm": 0.638398314084115,
"learning_rate": 9.907795784955327e-06,
"loss": 0.1517,
"step": 48
},
{
"epoch": 0.125,
"grad_norm": 0.6399210795033582,
"learning_rate": 9.903926402016153e-06,
"loss": 0.153,
"step": 49
},
{
"epoch": 0.12755102040816327,
"grad_norm": 0.7153964945511394,
"learning_rate": 9.899978276253617e-06,
"loss": 0.1715,
"step": 50
},
{
"epoch": 0.13010204081632654,
"grad_norm": 0.6490936443465146,
"learning_rate": 9.895951471063156e-06,
"loss": 0.1783,
"step": 51
},
{
"epoch": 0.1326530612244898,
"grad_norm": 0.6495112832522,
"learning_rate": 9.891846051103578e-06,
"loss": 0.1746,
"step": 52
},
{
"epoch": 0.13520408163265307,
"grad_norm": 0.756147962576476,
"learning_rate": 9.887662082296006e-06,
"loss": 0.1973,
"step": 53
},
{
"epoch": 0.1377551020408163,
"grad_norm": 0.6587202327840592,
"learning_rate": 9.883399631822836e-06,
"loss": 0.1624,
"step": 54
},
{
"epoch": 0.14030612244897958,
"grad_norm": 0.7693655467061048,
"learning_rate": 9.87905876812665e-06,
"loss": 0.2131,
"step": 55
},
{
"epoch": 0.14285714285714285,
"grad_norm": 0.7303583152250236,
"learning_rate": 9.874639560909118e-06,
"loss": 0.1953,
"step": 56
},
{
"epoch": 0.14540816326530612,
"grad_norm": 0.6899593083786243,
"learning_rate": 9.870142081129883e-06,
"loss": 0.1684,
"step": 57
},
{
"epoch": 0.14795918367346939,
"grad_norm": 0.693858594186068,
"learning_rate": 9.86556640100541e-06,
"loss": 0.1923,
"step": 58
},
{
"epoch": 0.15051020408163265,
"grad_norm": 0.665392217093543,
"learning_rate": 9.860912594007847e-06,
"loss": 0.1796,
"step": 59
},
{
"epoch": 0.15306122448979592,
"grad_norm": 0.6903415551256747,
"learning_rate": 9.85618073486382e-06,
"loss": 0.1856,
"step": 60
},
{
"epoch": 0.1556122448979592,
"grad_norm": 0.6652802573305568,
"learning_rate": 9.851370899553254e-06,
"loss": 0.1713,
"step": 61
},
{
"epoch": 0.15816326530612246,
"grad_norm": 0.6650490539593145,
"learning_rate": 9.846483165308142e-06,
"loss": 0.172,
"step": 62
},
{
"epoch": 0.16071428571428573,
"grad_norm": 0.6483494869026171,
"learning_rate": 9.841517610611309e-06,
"loss": 0.1665,
"step": 63
},
{
"epoch": 0.16326530612244897,
"grad_norm": 0.7366504472267554,
"learning_rate": 9.836474315195148e-06,
"loss": 0.2066,
"step": 64
},
{
"epoch": 0.16581632653061223,
"grad_norm": 0.7433091492751206,
"learning_rate": 9.831353360040347e-06,
"loss": 0.1822,
"step": 65
},
{
"epoch": 0.1683673469387755,
"grad_norm": 0.5733277219690146,
"learning_rate": 9.826154827374578e-06,
"loss": 0.1398,
"step": 66
},
{
"epoch": 0.17091836734693877,
"grad_norm": 0.6510749686231305,
"learning_rate": 9.82087880067119e-06,
"loss": 0.1683,
"step": 67
},
{
"epoch": 0.17346938775510204,
"grad_norm": 0.6369611187446915,
"learning_rate": 9.815525364647853e-06,
"loss": 0.1698,
"step": 68
},
{
"epoch": 0.1760204081632653,
"grad_norm": 0.6249985108358178,
"learning_rate": 9.810094605265212e-06,
"loss": 0.1598,
"step": 69
},
{
"epoch": 0.17857142857142858,
"grad_norm": 0.7226036716768888,
"learning_rate": 9.804586609725499e-06,
"loss": 0.191,
"step": 70
},
{
"epoch": 0.18112244897959184,
"grad_norm": 0.6414207048608993,
"learning_rate": 9.799001466471134e-06,
"loss": 0.1874,
"step": 71
},
{
"epoch": 0.1836734693877551,
"grad_norm": 0.7694659731472804,
"learning_rate": 9.793339265183303e-06,
"loss": 0.2056,
"step": 72
},
{
"epoch": 0.18622448979591838,
"grad_norm": 0.6898939009314069,
"learning_rate": 9.787600096780529e-06,
"loss": 0.1793,
"step": 73
},
{
"epoch": 0.18877551020408162,
"grad_norm": 0.6117605046259157,
"learning_rate": 9.781784053417192e-06,
"loss": 0.1452,
"step": 74
},
{
"epoch": 0.1913265306122449,
"grad_norm": 0.7207508689727239,
"learning_rate": 9.775891228482068e-06,
"loss": 0.1862,
"step": 75
},
{
"epoch": 0.19387755102040816,
"grad_norm": 0.6195808967801664,
"learning_rate": 9.76992171659682e-06,
"loss": 0.1538,
"step": 76
},
{
"epoch": 0.19642857142857142,
"grad_norm": 0.6630826534576278,
"learning_rate": 9.763875613614482e-06,
"loss": 0.1648,
"step": 77
},
{
"epoch": 0.1989795918367347,
"grad_norm": 0.702756061791453,
"learning_rate": 9.757753016617917e-06,
"loss": 0.1884,
"step": 78
},
{
"epoch": 0.20153061224489796,
"grad_norm": 0.597140282770509,
"learning_rate": 9.751554023918261e-06,
"loss": 0.1509,
"step": 79
},
{
"epoch": 0.20408163265306123,
"grad_norm": 0.7398956578868144,
"learning_rate": 9.745278735053345e-06,
"loss": 0.1893,
"step": 80
},
{
"epoch": 0.2066326530612245,
"grad_norm": 0.6596462460468865,
"learning_rate": 9.738927250786088e-06,
"loss": 0.1704,
"step": 81
},
{
"epoch": 0.20918367346938777,
"grad_norm": 0.6869452338425555,
"learning_rate": 9.732499673102895e-06,
"loss": 0.1892,
"step": 82
},
{
"epoch": 0.21173469387755103,
"grad_norm": 0.6390745720379519,
"learning_rate": 9.725996105212002e-06,
"loss": 0.1781,
"step": 83
},
{
"epoch": 0.21428571428571427,
"grad_norm": 0.6284608424012191,
"learning_rate": 9.719416651541839e-06,
"loss": 0.1643,
"step": 84
},
{
"epoch": 0.21683673469387754,
"grad_norm": 0.6422070407982153,
"learning_rate": 9.712761417739326e-06,
"loss": 0.1832,
"step": 85
},
{
"epoch": 0.2193877551020408,
"grad_norm": 0.6950581393056733,
"learning_rate": 9.706030510668202e-06,
"loss": 0.1794,
"step": 86
},
{
"epoch": 0.22193877551020408,
"grad_norm": 0.604792640333442,
"learning_rate": 9.6992240384073e-06,
"loss": 0.1716,
"step": 87
},
{
"epoch": 0.22448979591836735,
"grad_norm": 0.7157986039856136,
"learning_rate": 9.692342110248802e-06,
"loss": 0.2274,
"step": 88
},
{
"epoch": 0.22704081632653061,
"grad_norm": 0.6239441335723747,
"learning_rate": 9.685384836696503e-06,
"loss": 0.1747,
"step": 89
},
{
"epoch": 0.22959183673469388,
"grad_norm": 0.6341863503525225,
"learning_rate": 9.678352329464018e-06,
"loss": 0.1771,
"step": 90
},
{
"epoch": 0.23214285714285715,
"grad_norm": 0.6494114016675228,
"learning_rate": 9.671244701472999e-06,
"loss": 0.1769,
"step": 91
},
{
"epoch": 0.23469387755102042,
"grad_norm": 0.6473038344264327,
"learning_rate": 9.664062066851325e-06,
"loss": 0.1714,
"step": 92
},
{
"epoch": 0.2372448979591837,
"grad_norm": 0.6552409387122857,
"learning_rate": 9.656804540931256e-06,
"loss": 0.1684,
"step": 93
},
{
"epoch": 0.23979591836734693,
"grad_norm": 0.7059100317343475,
"learning_rate": 9.649472240247588e-06,
"loss": 0.2126,
"step": 94
},
{
"epoch": 0.2423469387755102,
"grad_norm": 0.7101604206793758,
"learning_rate": 9.642065282535795e-06,
"loss": 0.1786,
"step": 95
},
{
"epoch": 0.24489795918367346,
"grad_norm": 0.6055224147282258,
"learning_rate": 9.63458378673011e-06,
"loss": 0.1487,
"step": 96
},
{
"epoch": 0.24744897959183673,
"grad_norm": 0.6630323155965017,
"learning_rate": 9.627027872961642e-06,
"loss": 0.1802,
"step": 97
},
{
"epoch": 0.25,
"grad_norm": 0.6943625213974632,
"learning_rate": 9.619397662556434e-06,
"loss": 0.1662,
"step": 98
},
{
"epoch": 0.25255102040816324,
"grad_norm": 0.6707618830728986,
"learning_rate": 9.611693278033517e-06,
"loss": 0.1722,
"step": 99
},
{
"epoch": 0.25510204081632654,
"grad_norm": 0.63430666549655,
"learning_rate": 9.603914843102941e-06,
"loss": 0.1755,
"step": 100
},
{
"epoch": 0.25510204081632654,
"eval_loss": 0.17825095355510712,
"eval_runtime": 2.5188,
"eval_samples_per_second": 12.704,
"eval_steps_per_second": 3.176,
"step": 100
},
{
"epoch": 0.2576530612244898,
"grad_norm": 0.6301792150542433,
"learning_rate": 9.596062482663796e-06,
"loss": 0.1647,
"step": 101
},
{
"epoch": 0.2602040816326531,
"grad_norm": 0.7439256267733433,
"learning_rate": 9.588136322802194e-06,
"loss": 0.1843,
"step": 102
},
{
"epoch": 0.2627551020408163,
"grad_norm": 0.6241120007830788,
"learning_rate": 9.580136490789255e-06,
"loss": 0.1774,
"step": 103
},
{
"epoch": 0.2653061224489796,
"grad_norm": 0.6589666329393432,
"learning_rate": 9.572063115079063e-06,
"loss": 0.1685,
"step": 104
},
{
"epoch": 0.26785714285714285,
"grad_norm": 0.628815378883589,
"learning_rate": 9.563916325306595e-06,
"loss": 0.1595,
"step": 105
},
{
"epoch": 0.27040816326530615,
"grad_norm": 0.7047293090329506,
"learning_rate": 9.555696252285648e-06,
"loss": 0.1952,
"step": 106
},
{
"epoch": 0.2729591836734694,
"grad_norm": 0.6557715899844985,
"learning_rate": 9.547403028006734e-06,
"loss": 0.1661,
"step": 107
},
{
"epoch": 0.2755102040816326,
"grad_norm": 0.7357553062387641,
"learning_rate": 9.539036785634961e-06,
"loss": 0.2147,
"step": 108
},
{
"epoch": 0.2780612244897959,
"grad_norm": 0.604934655741894,
"learning_rate": 9.530597659507898e-06,
"loss": 0.1478,
"step": 109
},
{
"epoch": 0.28061224489795916,
"grad_norm": 0.6963912461652633,
"learning_rate": 9.522085785133415e-06,
"loss": 0.1732,
"step": 110
},
{
"epoch": 0.28316326530612246,
"grad_norm": 0.6913597853134738,
"learning_rate": 9.513501299187506e-06,
"loss": 0.1802,
"step": 111
},
{
"epoch": 0.2857142857142857,
"grad_norm": 0.6061097854231792,
"learning_rate": 9.504844339512096e-06,
"loss": 0.1459,
"step": 112
},
{
"epoch": 0.288265306122449,
"grad_norm": 0.6784991397247085,
"learning_rate": 9.49611504511283e-06,
"loss": 0.1577,
"step": 113
},
{
"epoch": 0.29081632653061223,
"grad_norm": 0.6820299760172144,
"learning_rate": 9.48731355615684e-06,
"loss": 0.181,
"step": 114
},
{
"epoch": 0.29336734693877553,
"grad_norm": 0.6080242625892597,
"learning_rate": 9.478440013970485e-06,
"loss": 0.1438,
"step": 115
},
{
"epoch": 0.29591836734693877,
"grad_norm": 0.7274055237100493,
"learning_rate": 9.469494561037097e-06,
"loss": 0.1861,
"step": 116
},
{
"epoch": 0.29846938775510207,
"grad_norm": 0.681863192199185,
"learning_rate": 9.46047734099469e-06,
"loss": 0.1714,
"step": 117
},
{
"epoch": 0.3010204081632653,
"grad_norm": 0.6493118569066717,
"learning_rate": 9.451388498633635e-06,
"loss": 0.1673,
"step": 118
},
{
"epoch": 0.30357142857142855,
"grad_norm": 0.6750599274016734,
"learning_rate": 9.442228179894362e-06,
"loss": 0.1819,
"step": 119
},
{
"epoch": 0.30612244897959184,
"grad_norm": 0.6385238805634963,
"learning_rate": 9.432996531865001e-06,
"loss": 0.1657,
"step": 120
},
{
"epoch": 0.3086734693877551,
"grad_norm": 0.6134862321136166,
"learning_rate": 9.423693702779022e-06,
"loss": 0.1626,
"step": 121
},
{
"epoch": 0.3112244897959184,
"grad_norm": 0.6711298307545004,
"learning_rate": 9.414319842012855e-06,
"loss": 0.1804,
"step": 122
},
{
"epoch": 0.3137755102040816,
"grad_norm": 0.6460358850190799,
"learning_rate": 9.404875100083497e-06,
"loss": 0.1799,
"step": 123
},
{
"epoch": 0.3163265306122449,
"grad_norm": 0.6229317835591002,
"learning_rate": 9.395359628646087e-06,
"loss": 0.1726,
"step": 124
},
{
"epoch": 0.31887755102040816,
"grad_norm": 0.7105507007735112,
"learning_rate": 9.385773580491476e-06,
"loss": 0.1773,
"step": 125
},
{
"epoch": 0.32142857142857145,
"grad_norm": 0.652326436562633,
"learning_rate": 9.376117109543769e-06,
"loss": 0.1696,
"step": 126
},
{
"epoch": 0.3239795918367347,
"grad_norm": 0.6846427621008662,
"learning_rate": 9.366390370857863e-06,
"loss": 0.1586,
"step": 127
},
{
"epoch": 0.32653061224489793,
"grad_norm": 0.6988027763071323,
"learning_rate": 9.356593520616948e-06,
"loss": 0.1939,
"step": 128
},
{
"epoch": 0.32908163265306123,
"grad_norm": 0.5989815910486795,
"learning_rate": 9.34672671613e-06,
"loss": 0.1508,
"step": 129
},
{
"epoch": 0.33163265306122447,
"grad_norm": 0.6157873448382567,
"learning_rate": 9.336790115829255e-06,
"loss": 0.1652,
"step": 130
},
{
"epoch": 0.33418367346938777,
"grad_norm": 0.6104056434176193,
"learning_rate": 9.326783879267678e-06,
"loss": 0.1538,
"step": 131
},
{
"epoch": 0.336734693877551,
"grad_norm": 0.7468762943061447,
"learning_rate": 9.316708167116377e-06,
"loss": 0.2006,
"step": 132
},
{
"epoch": 0.3392857142857143,
"grad_norm": 0.6338257023806732,
"learning_rate": 9.306563141162046e-06,
"loss": 0.1759,
"step": 133
},
{
"epoch": 0.34183673469387754,
"grad_norm": 0.623029871752068,
"learning_rate": 9.296348964304351e-06,
"loss": 0.1563,
"step": 134
},
{
"epoch": 0.34438775510204084,
"grad_norm": 0.616534494076029,
"learning_rate": 9.286065800553327e-06,
"loss": 0.1612,
"step": 135
},
{
"epoch": 0.3469387755102041,
"grad_norm": 0.6567894577284148,
"learning_rate": 9.275713815026732e-06,
"loss": 0.1794,
"step": 136
},
{
"epoch": 0.3494897959183674,
"grad_norm": 0.6929529884982644,
"learning_rate": 9.265293173947404e-06,
"loss": 0.183,
"step": 137
},
{
"epoch": 0.3520408163265306,
"grad_norm": 0.5590498656811169,
"learning_rate": 9.254804044640596e-06,
"loss": 0.1383,
"step": 138
},
{
"epoch": 0.35459183673469385,
"grad_norm": 0.6259188867432768,
"learning_rate": 9.244246595531272e-06,
"loss": 0.15,
"step": 139
},
{
"epoch": 0.35714285714285715,
"grad_norm": 0.6374491966087964,
"learning_rate": 9.233620996141421e-06,
"loss": 0.1724,
"step": 140
},
{
"epoch": 0.3596938775510204,
"grad_norm": 0.6775149835839037,
"learning_rate": 9.22292741708733e-06,
"loss": 0.16,
"step": 141
},
{
"epoch": 0.3622448979591837,
"grad_norm": 0.6507427637455202,
"learning_rate": 9.212166030076832e-06,
"loss": 0.1791,
"step": 142
},
{
"epoch": 0.3647959183673469,
"grad_norm": 0.5787991775867446,
"learning_rate": 9.20133700790657e-06,
"loss": 0.1436,
"step": 143
},
{
"epoch": 0.3673469387755102,
"grad_norm": 0.6577565969598809,
"learning_rate": 9.190440524459203e-06,
"loss": 0.1778,
"step": 144
},
{
"epoch": 0.36989795918367346,
"grad_norm": 0.63281714426237,
"learning_rate": 9.179476754700632e-06,
"loss": 0.1579,
"step": 145
},
{
"epoch": 0.37244897959183676,
"grad_norm": 0.620787271558136,
"learning_rate": 9.168445874677168e-06,
"loss": 0.1523,
"step": 146
},
{
"epoch": 0.375,
"grad_norm": 0.604356995426742,
"learning_rate": 9.157348061512728e-06,
"loss": 0.1556,
"step": 147
},
{
"epoch": 0.37755102040816324,
"grad_norm": 0.6941039323941053,
"learning_rate": 9.146183493405976e-06,
"loss": 0.2001,
"step": 148
},
{
"epoch": 0.38010204081632654,
"grad_norm": 0.6710751485897907,
"learning_rate": 9.13495234962747e-06,
"loss": 0.1997,
"step": 149
},
{
"epoch": 0.3826530612244898,
"grad_norm": 0.6080309117589121,
"learning_rate": 9.12365481051678e-06,
"loss": 0.1665,
"step": 150
},
{
"epoch": 0.3852040816326531,
"grad_norm": 0.621319557269394,
"learning_rate": 9.112291057479586e-06,
"loss": 0.1584,
"step": 151
},
{
"epoch": 0.3877551020408163,
"grad_norm": 0.6304671673258588,
"learning_rate": 9.10086127298478e-06,
"loss": 0.1681,
"step": 152
},
{
"epoch": 0.3903061224489796,
"grad_norm": 0.643668035685642,
"learning_rate": 9.089365640561523e-06,
"loss": 0.1652,
"step": 153
},
{
"epoch": 0.39285714285714285,
"grad_norm": 0.6295795105784965,
"learning_rate": 9.077804344796302e-06,
"loss": 0.1632,
"step": 154
},
{
"epoch": 0.39540816326530615,
"grad_norm": 0.6212503718092719,
"learning_rate": 9.066177571329969e-06,
"loss": 0.1555,
"step": 155
},
{
"epoch": 0.3979591836734694,
"grad_norm": 0.5942667214942609,
"learning_rate": 9.054485506854756e-06,
"loss": 0.1642,
"step": 156
},
{
"epoch": 0.4005102040816326,
"grad_norm": 0.6468545759854039,
"learning_rate": 9.04272833911128e-06,
"loss": 0.1654,
"step": 157
},
{
"epoch": 0.4030612244897959,
"grad_norm": 0.6066009285810315,
"learning_rate": 9.030906256885528e-06,
"loss": 0.1448,
"step": 158
},
{
"epoch": 0.40561224489795916,
"grad_norm": 0.6277372050287073,
"learning_rate": 9.019019450005826e-06,
"loss": 0.1627,
"step": 159
},
{
"epoch": 0.40816326530612246,
"grad_norm": 0.6086742021736391,
"learning_rate": 9.007068109339783e-06,
"loss": 0.1574,
"step": 160
},
{
"epoch": 0.4107142857142857,
"grad_norm": 0.671251680756004,
"learning_rate": 8.995052426791247e-06,
"loss": 0.1912,
"step": 161
},
{
"epoch": 0.413265306122449,
"grad_norm": 0.6714069477587861,
"learning_rate": 8.982972595297195e-06,
"loss": 0.186,
"step": 162
},
{
"epoch": 0.41581632653061223,
"grad_norm": 0.642109869112752,
"learning_rate": 8.970828808824659e-06,
"loss": 0.1708,
"step": 163
},
{
"epoch": 0.41836734693877553,
"grad_norm": 0.6168564976222412,
"learning_rate": 8.9586212623676e-06,
"loss": 0.1669,
"step": 164
},
{
"epoch": 0.42091836734693877,
"grad_norm": 0.6695625235286308,
"learning_rate": 8.946350151943778e-06,
"loss": 0.1909,
"step": 165
},
{
"epoch": 0.42346938775510207,
"grad_norm": 0.6036159435553136,
"learning_rate": 8.93401567459161e-06,
"loss": 0.1687,
"step": 166
},
{
"epoch": 0.4260204081632653,
"grad_norm": 0.6616845609083462,
"learning_rate": 8.921618028366997e-06,
"loss": 0.1684,
"step": 167
},
{
"epoch": 0.42857142857142855,
"grad_norm": 0.6019985636648711,
"learning_rate": 8.90915741234015e-06,
"loss": 0.164,
"step": 168
},
{
"epoch": 0.43112244897959184,
"grad_norm": 0.6318313353599098,
"learning_rate": 8.896634026592393e-06,
"loss": 0.1707,
"step": 169
},
{
"epoch": 0.4336734693877551,
"grad_norm": 0.6343404996339642,
"learning_rate": 8.884048072212952e-06,
"loss": 0.1552,
"step": 170
},
{
"epoch": 0.4362244897959184,
"grad_norm": 0.6073430634182778,
"learning_rate": 8.871399751295717e-06,
"loss": 0.1523,
"step": 171
},
{
"epoch": 0.4387755102040816,
"grad_norm": 0.684776429861673,
"learning_rate": 8.85868926693601e-06,
"loss": 0.1624,
"step": 172
},
{
"epoch": 0.4413265306122449,
"grad_norm": 0.6020383554217804,
"learning_rate": 8.845916823227315e-06,
"loss": 0.1625,
"step": 173
},
{
"epoch": 0.44387755102040816,
"grad_norm": 0.6548300342133035,
"learning_rate": 8.833082625258003e-06,
"loss": 0.1562,
"step": 174
},
{
"epoch": 0.44642857142857145,
"grad_norm": 0.6479894794435125,
"learning_rate": 8.820186879108038e-06,
"loss": 0.1649,
"step": 175
},
{
"epoch": 0.4489795918367347,
"grad_norm": 0.6764426315561457,
"learning_rate": 8.807229791845673e-06,
"loss": 0.1633,
"step": 176
},
{
"epoch": 0.45153061224489793,
"grad_norm": 0.6239624169344059,
"learning_rate": 8.794211571524119e-06,
"loss": 0.1557,
"step": 177
},
{
"epoch": 0.45408163265306123,
"grad_norm": 0.6474988134364261,
"learning_rate": 8.781132427178203e-06,
"loss": 0.1818,
"step": 178
},
{
"epoch": 0.45663265306122447,
"grad_norm": 0.6938478733121906,
"learning_rate": 8.767992568821021e-06,
"loss": 0.1924,
"step": 179
},
{
"epoch": 0.45918367346938777,
"grad_norm": 0.6179549390370858,
"learning_rate": 8.754792207440557e-06,
"loss": 0.1544,
"step": 180
},
{
"epoch": 0.461734693877551,
"grad_norm": 0.6256602273247317,
"learning_rate": 8.741531554996298e-06,
"loss": 0.1627,
"step": 181
},
{
"epoch": 0.4642857142857143,
"grad_norm": 0.6269721018978721,
"learning_rate": 8.728210824415829e-06,
"loss": 0.1502,
"step": 182
},
{
"epoch": 0.46683673469387754,
"grad_norm": 0.6746598533142751,
"learning_rate": 8.714830229591418e-06,
"loss": 0.1946,
"step": 183
},
{
"epoch": 0.46938775510204084,
"grad_norm": 0.7113907278859266,
"learning_rate": 8.701389985376578e-06,
"loss": 0.1984,
"step": 184
},
{
"epoch": 0.4719387755102041,
"grad_norm": 0.6364152878079946,
"learning_rate": 8.687890307582618e-06,
"loss": 0.171,
"step": 185
},
{
"epoch": 0.4744897959183674,
"grad_norm": 0.659485503945726,
"learning_rate": 8.674331412975178e-06,
"loss": 0.166,
"step": 186
},
{
"epoch": 0.4770408163265306,
"grad_norm": 0.6273274956447809,
"learning_rate": 8.660713519270749e-06,
"loss": 0.1549,
"step": 187
},
{
"epoch": 0.47959183673469385,
"grad_norm": 0.6683355477651822,
"learning_rate": 8.647036845133171e-06,
"loss": 0.1715,
"step": 188
},
{
"epoch": 0.48214285714285715,
"grad_norm": 0.7035769520046447,
"learning_rate": 8.633301610170136e-06,
"loss": 0.1676,
"step": 189
},
{
"epoch": 0.4846938775510204,
"grad_norm": 0.665388776003314,
"learning_rate": 8.619508034929646e-06,
"loss": 0.1745,
"step": 190
},
{
"epoch": 0.4872448979591837,
"grad_norm": 0.6263473288405155,
"learning_rate": 8.60565634089648e-06,
"loss": 0.168,
"step": 191
},
{
"epoch": 0.4897959183673469,
"grad_norm": 0.6454061053639517,
"learning_rate": 8.591746750488639e-06,
"loss": 0.1793,
"step": 192
},
{
"epoch": 0.4923469387755102,
"grad_norm": 0.595965931581441,
"learning_rate": 8.577779487053767e-06,
"loss": 0.1438,
"step": 193
},
{
"epoch": 0.49489795918367346,
"grad_norm": 0.585416860329295,
"learning_rate": 8.563754774865574e-06,
"loss": 0.152,
"step": 194
},
{
"epoch": 0.49744897959183676,
"grad_norm": 0.65139162533389,
"learning_rate": 8.549672839120227e-06,
"loss": 0.1721,
"step": 195
},
{
"epoch": 0.5,
"grad_norm": 0.6021792322784167,
"learning_rate": 8.535533905932739e-06,
"loss": 0.167,
"step": 196
},
{
"epoch": 0.5025510204081632,
"grad_norm": 0.6974220008021444,
"learning_rate": 8.521338202333335e-06,
"loss": 0.183,
"step": 197
},
{
"epoch": 0.5051020408163265,
"grad_norm": 0.6710913460442544,
"learning_rate": 8.507085956263808e-06,
"loss": 0.19,
"step": 198
},
{
"epoch": 0.5076530612244898,
"grad_norm": 0.6602730300692774,
"learning_rate": 8.492777396573863e-06,
"loss": 0.1822,
"step": 199
},
{
"epoch": 0.5102040816326531,
"grad_norm": 0.6096784561422763,
"learning_rate": 8.478412753017433e-06,
"loss": 0.1688,
"step": 200
},
{
"epoch": 0.5102040816326531,
"eval_loss": 0.17530770599842072,
"eval_runtime": 2.3928,
"eval_samples_per_second": 13.374,
"eval_steps_per_second": 3.343,
"step": 200
},
{
"epoch": 0.5127551020408163,
"grad_norm": 0.5777967437570939,
"learning_rate": 8.463992256248997e-06,
"loss": 0.1515,
"step": 201
},
{
"epoch": 0.5153061224489796,
"grad_norm": 0.6469736667234415,
"learning_rate": 8.449516137819875e-06,
"loss": 0.1716,
"step": 202
},
{
"epoch": 0.5178571428571429,
"grad_norm": 0.6042565412596849,
"learning_rate": 8.43498463017451e-06,
"loss": 0.1498,
"step": 203
},
{
"epoch": 0.5204081632653061,
"grad_norm": 0.6453530466355405,
"learning_rate": 8.420397966646732e-06,
"loss": 0.1716,
"step": 204
},
{
"epoch": 0.5229591836734694,
"grad_norm": 0.6434693539199635,
"learning_rate": 8.405756381456017e-06,
"loss": 0.1812,
"step": 205
},
{
"epoch": 0.5255102040816326,
"grad_norm": 0.7389376263189975,
"learning_rate": 8.391060109703725e-06,
"loss": 0.1781,
"step": 206
},
{
"epoch": 0.5280612244897959,
"grad_norm": 0.6155812259074535,
"learning_rate": 8.376309387369318e-06,
"loss": 0.1649,
"step": 207
},
{
"epoch": 0.5306122448979592,
"grad_norm": 0.650794631172212,
"learning_rate": 8.361504451306585e-06,
"loss": 0.175,
"step": 208
},
{
"epoch": 0.5331632653061225,
"grad_norm": 0.6309566465508609,
"learning_rate": 8.34664553923982e-06,
"loss": 0.1722,
"step": 209
},
{
"epoch": 0.5357142857142857,
"grad_norm": 0.6153722780738814,
"learning_rate": 8.331732889760021e-06,
"loss": 0.151,
"step": 210
},
{
"epoch": 0.5382653061224489,
"grad_norm": 0.6213867045748396,
"learning_rate": 8.316766742321052e-06,
"loss": 0.1709,
"step": 211
},
{
"epoch": 0.5408163265306123,
"grad_norm": 0.6231378594046826,
"learning_rate": 8.301747337235798e-06,
"loss": 0.1726,
"step": 212
},
{
"epoch": 0.5433673469387755,
"grad_norm": 0.6347367176192658,
"learning_rate": 8.286674915672308e-06,
"loss": 0.1675,
"step": 213
},
{
"epoch": 0.5459183673469388,
"grad_norm": 0.6293391795307883,
"learning_rate": 8.271549719649923e-06,
"loss": 0.16,
"step": 214
},
{
"epoch": 0.548469387755102,
"grad_norm": 0.6270775654657718,
"learning_rate": 8.256371992035384e-06,
"loss": 0.1679,
"step": 215
},
{
"epoch": 0.5510204081632653,
"grad_norm": 0.5994977548727174,
"learning_rate": 8.241141976538944e-06,
"loss": 0.1321,
"step": 216
},
{
"epoch": 0.5535714285714286,
"grad_norm": 0.6676987808194949,
"learning_rate": 8.22585991771044e-06,
"loss": 0.1678,
"step": 217
},
{
"epoch": 0.5561224489795918,
"grad_norm": 0.6632408731790466,
"learning_rate": 8.210526060935377e-06,
"loss": 0.1797,
"step": 218
},
{
"epoch": 0.5586734693877551,
"grad_norm": 0.6700416358253908,
"learning_rate": 8.19514065243099e-06,
"loss": 0.1862,
"step": 219
},
{
"epoch": 0.5612244897959183,
"grad_norm": 0.6307204362512712,
"learning_rate": 8.179703939242276e-06,
"loss": 0.1737,
"step": 220
},
{
"epoch": 0.5637755102040817,
"grad_norm": 0.6441334257769658,
"learning_rate": 8.164216169238043e-06,
"loss": 0.1599,
"step": 221
},
{
"epoch": 0.5663265306122449,
"grad_norm": 0.5816475384619316,
"learning_rate": 8.148677591106919e-06,
"loss": 0.1509,
"step": 222
},
{
"epoch": 0.5688775510204082,
"grad_norm": 0.6348006825878267,
"learning_rate": 8.133088454353366e-06,
"loss": 0.1697,
"step": 223
},
{
"epoch": 0.5714285714285714,
"grad_norm": 0.6582403387620182,
"learning_rate": 8.117449009293668e-06,
"loss": 0.1797,
"step": 224
},
{
"epoch": 0.5739795918367347,
"grad_norm": 0.6270714690286511,
"learning_rate": 8.101759507051919e-06,
"loss": 0.1625,
"step": 225
},
{
"epoch": 0.576530612244898,
"grad_norm": 0.6652022735952716,
"learning_rate": 8.08602019955598e-06,
"loss": 0.1622,
"step": 226
},
{
"epoch": 0.5790816326530612,
"grad_norm": 0.693450200019601,
"learning_rate": 8.070231339533442e-06,
"loss": 0.188,
"step": 227
},
{
"epoch": 0.5816326530612245,
"grad_norm": 0.611727450765603,
"learning_rate": 8.054393180507572e-06,
"loss": 0.1587,
"step": 228
},
{
"epoch": 0.5841836734693877,
"grad_norm": 0.592663870640422,
"learning_rate": 8.038505976793226e-06,
"loss": 0.1452,
"step": 229
},
{
"epoch": 0.5867346938775511,
"grad_norm": 0.6302845415061488,
"learning_rate": 8.022569983492781e-06,
"loss": 0.1672,
"step": 230
},
{
"epoch": 0.5892857142857143,
"grad_norm": 0.5804051065068835,
"learning_rate": 8.00658545649203e-06,
"loss": 0.1471,
"step": 231
},
{
"epoch": 0.5918367346938775,
"grad_norm": 0.6273355896398363,
"learning_rate": 7.99055265245608e-06,
"loss": 0.1721,
"step": 232
},
{
"epoch": 0.5943877551020408,
"grad_norm": 0.65405300250257,
"learning_rate": 7.974471828825227e-06,
"loss": 0.1698,
"step": 233
},
{
"epoch": 0.5969387755102041,
"grad_norm": 0.611434007618611,
"learning_rate": 7.958343243810818e-06,
"loss": 0.1507,
"step": 234
},
{
"epoch": 0.5994897959183674,
"grad_norm": 0.6897532176595209,
"learning_rate": 7.942167156391111e-06,
"loss": 0.1768,
"step": 235
},
{
"epoch": 0.6020408163265306,
"grad_norm": 0.6147188964397875,
"learning_rate": 7.925943826307119e-06,
"loss": 0.1502,
"step": 236
},
{
"epoch": 0.6045918367346939,
"grad_norm": 0.6057461789141037,
"learning_rate": 7.909673514058428e-06,
"loss": 0.1538,
"step": 237
},
{
"epoch": 0.6071428571428571,
"grad_norm": 0.5842486053218805,
"learning_rate": 7.89335648089903e-06,
"loss": 0.1473,
"step": 238
},
{
"epoch": 0.6096938775510204,
"grad_norm": 0.6124541145552258,
"learning_rate": 7.876992988833107e-06,
"loss": 0.1713,
"step": 239
},
{
"epoch": 0.6122448979591837,
"grad_norm": 0.6918344738184035,
"learning_rate": 7.860583300610849e-06,
"loss": 0.1741,
"step": 240
},
{
"epoch": 0.6147959183673469,
"grad_norm": 0.5869529235754722,
"learning_rate": 7.844127679724213e-06,
"loss": 0.1643,
"step": 241
},
{
"epoch": 0.6173469387755102,
"grad_norm": 0.5917786409568101,
"learning_rate": 7.827626390402707e-06,
"loss": 0.1323,
"step": 242
},
{
"epoch": 0.6198979591836735,
"grad_norm": 0.649081051443825,
"learning_rate": 7.811079697609137e-06,
"loss": 0.1642,
"step": 243
},
{
"epoch": 0.6224489795918368,
"grad_norm": 0.6068854372103691,
"learning_rate": 7.794487867035358e-06,
"loss": 0.1497,
"step": 244
},
{
"epoch": 0.625,
"grad_norm": 0.6921605466960177,
"learning_rate": 7.777851165098012e-06,
"loss": 0.1685,
"step": 245
},
{
"epoch": 0.6275510204081632,
"grad_norm": 0.6336841548041962,
"learning_rate": 7.761169858934238e-06,
"loss": 0.1741,
"step": 246
},
{
"epoch": 0.6301020408163265,
"grad_norm": 0.7562805886149276,
"learning_rate": 7.744444216397393e-06,
"loss": 0.202,
"step": 247
},
{
"epoch": 0.6326530612244898,
"grad_norm": 0.6315316001771077,
"learning_rate": 7.727674506052744e-06,
"loss": 0.1571,
"step": 248
},
{
"epoch": 0.6352040816326531,
"grad_norm": 0.6322335288478561,
"learning_rate": 7.710860997173164e-06,
"loss": 0.1648,
"step": 249
},
{
"epoch": 0.6377551020408163,
"grad_norm": 0.6990439392046193,
"learning_rate": 7.694003959734802e-06,
"loss": 0.1621,
"step": 250
},
{
"epoch": 0.6403061224489796,
"grad_norm": 0.5892549294977432,
"learning_rate": 7.677103664412747e-06,
"loss": 0.1666,
"step": 251
},
{
"epoch": 0.6428571428571429,
"grad_norm": 0.7152881541118469,
"learning_rate": 7.660160382576683e-06,
"loss": 0.1912,
"step": 252
},
{
"epoch": 0.6454081632653061,
"grad_norm": 0.6123697518954341,
"learning_rate": 7.643174386286539e-06,
"loss": 0.1705,
"step": 253
},
{
"epoch": 0.6479591836734694,
"grad_norm": 0.6404458075559971,
"learning_rate": 7.626145948288107e-06,
"loss": 0.1445,
"step": 254
},
{
"epoch": 0.6505102040816326,
"grad_norm": 0.5989704014552829,
"learning_rate": 7.609075342008674e-06,
"loss": 0.1518,
"step": 255
},
{
"epoch": 0.6530612244897959,
"grad_norm": 0.6166584428376881,
"learning_rate": 7.591962841552627e-06,
"loss": 0.1646,
"step": 256
},
{
"epoch": 0.6556122448979592,
"grad_norm": 0.6023893275125043,
"learning_rate": 7.574808721697047e-06,
"loss": 0.1594,
"step": 257
},
{
"epoch": 0.6581632653061225,
"grad_norm": 0.5980117337714704,
"learning_rate": 7.55761325788731e-06,
"loss": 0.1629,
"step": 258
},
{
"epoch": 0.6607142857142857,
"grad_norm": 0.5957147716396232,
"learning_rate": 7.540376726232648e-06,
"loss": 0.1562,
"step": 259
},
{
"epoch": 0.6632653061224489,
"grad_norm": 0.6352476407461065,
"learning_rate": 7.52309940350173e-06,
"loss": 0.1611,
"step": 260
},
{
"epoch": 0.6658163265306123,
"grad_norm": 0.6705656721875611,
"learning_rate": 7.505781567118204e-06,
"loss": 0.1717,
"step": 261
},
{
"epoch": 0.6683673469387755,
"grad_norm": 0.6416780470208198,
"learning_rate": 7.488423495156258e-06,
"loss": 0.1765,
"step": 262
},
{
"epoch": 0.6709183673469388,
"grad_norm": 0.6396852156623875,
"learning_rate": 7.471025466336139e-06,
"loss": 0.1608,
"step": 263
},
{
"epoch": 0.673469387755102,
"grad_norm": 0.6315942638591941,
"learning_rate": 7.453587760019691e-06,
"loss": 0.1608,
"step": 264
},
{
"epoch": 0.6760204081632653,
"grad_norm": 0.6445458308180846,
"learning_rate": 7.436110656205859e-06,
"loss": 0.1623,
"step": 265
},
{
"epoch": 0.6785714285714286,
"grad_norm": 0.6064991852375194,
"learning_rate": 7.4185944355261996e-06,
"loss": 0.1557,
"step": 266
},
{
"epoch": 0.6811224489795918,
"grad_norm": 0.7041372391725068,
"learning_rate": 7.401039379240373e-06,
"loss": 0.1889,
"step": 267
},
{
"epoch": 0.6836734693877551,
"grad_norm": 0.652310000895515,
"learning_rate": 7.383445769231628e-06,
"loss": 0.1842,
"step": 268
},
{
"epoch": 0.6862244897959183,
"grad_norm": 0.5766488734278856,
"learning_rate": 7.365813888002269e-06,
"loss": 0.1515,
"step": 269
},
{
"epoch": 0.6887755102040817,
"grad_norm": 0.6210407194072641,
"learning_rate": 7.348144018669129e-06,
"loss": 0.1566,
"step": 270
},
{
"epoch": 0.6913265306122449,
"grad_norm": 0.5587746621446245,
"learning_rate": 7.3304364449590215e-06,
"loss": 0.14,
"step": 271
},
{
"epoch": 0.6938775510204082,
"grad_norm": 0.6355280238362423,
"learning_rate": 7.312691451204178e-06,
"loss": 0.1726,
"step": 272
},
{
"epoch": 0.6964285714285714,
"grad_norm": 0.642365945745617,
"learning_rate": 7.294909322337689e-06,
"loss": 0.1833,
"step": 273
},
{
"epoch": 0.6989795918367347,
"grad_norm": 0.6283138953382016,
"learning_rate": 7.277090343888931e-06,
"loss": 0.1688,
"step": 274
},
{
"epoch": 0.701530612244898,
"grad_norm": 0.6011352110745912,
"learning_rate": 7.259234801978971e-06,
"loss": 0.1574,
"step": 275
},
{
"epoch": 0.7040816326530612,
"grad_norm": 0.6354286801860528,
"learning_rate": 7.241342983315985e-06,
"loss": 0.1739,
"step": 276
},
{
"epoch": 0.7066326530612245,
"grad_norm": 0.6106322866032395,
"learning_rate": 7.223415175190647e-06,
"loss": 0.1707,
"step": 277
},
{
"epoch": 0.7091836734693877,
"grad_norm": 0.7159138748192706,
"learning_rate": 7.205451665471515e-06,
"loss": 0.2022,
"step": 278
},
{
"epoch": 0.7117346938775511,
"grad_norm": 0.5823787309582513,
"learning_rate": 7.187452742600409e-06,
"loss": 0.1568,
"step": 279
},
{
"epoch": 0.7142857142857143,
"grad_norm": 0.6151783594340412,
"learning_rate": 7.169418695587791e-06,
"loss": 0.1635,
"step": 280
},
{
"epoch": 0.7168367346938775,
"grad_norm": 0.6130620738320395,
"learning_rate": 7.151349814008104e-06,
"loss": 0.1327,
"step": 281
},
{
"epoch": 0.7193877551020408,
"grad_norm": 0.6748859373806838,
"learning_rate": 7.1332463879951404e-06,
"loss": 0.1811,
"step": 282
},
{
"epoch": 0.7219387755102041,
"grad_norm": 0.5834476885398581,
"learning_rate": 7.1151087082373705e-06,
"loss": 0.1478,
"step": 283
},
{
"epoch": 0.7244897959183674,
"grad_norm": 0.6169796809060963,
"learning_rate": 7.096937065973285e-06,
"loss": 0.1631,
"step": 284
},
{
"epoch": 0.7270408163265306,
"grad_norm": 0.6959780203274529,
"learning_rate": 7.078731752986709e-06,
"loss": 0.1616,
"step": 285
},
{
"epoch": 0.7295918367346939,
"grad_norm": 0.6979567612851677,
"learning_rate": 7.060493061602128e-06,
"loss": 0.187,
"step": 286
},
{
"epoch": 0.7321428571428571,
"grad_norm": 0.5732766031573223,
"learning_rate": 7.042221284679982e-06,
"loss": 0.1529,
"step": 287
},
{
"epoch": 0.7346938775510204,
"grad_norm": 0.6156530993396518,
"learning_rate": 7.023916715611969e-06,
"loss": 0.1498,
"step": 288
},
{
"epoch": 0.7372448979591837,
"grad_norm": 0.7132345983065509,
"learning_rate": 7.00557964831634e-06,
"loss": 0.1617,
"step": 289
},
{
"epoch": 0.7397959183673469,
"grad_norm": 0.6232249158259106,
"learning_rate": 6.987210377233165e-06,
"loss": 0.149,
"step": 290
},
{
"epoch": 0.7423469387755102,
"grad_norm": 0.7277934128266045,
"learning_rate": 6.968809197319618e-06,
"loss": 0.1864,
"step": 291
},
{
"epoch": 0.7448979591836735,
"grad_norm": 0.7164985831395658,
"learning_rate": 6.950376404045235e-06,
"loss": 0.1664,
"step": 292
},
{
"epoch": 0.7474489795918368,
"grad_norm": 0.6722545299275654,
"learning_rate": 6.931912293387172e-06,
"loss": 0.1808,
"step": 293
},
{
"epoch": 0.75,
"grad_norm": 0.6442507964460605,
"learning_rate": 6.913417161825449e-06,
"loss": 0.1583,
"step": 294
},
{
"epoch": 0.7525510204081632,
"grad_norm": 0.645133746906539,
"learning_rate": 6.894891306338195e-06,
"loss": 0.1696,
"step": 295
},
{
"epoch": 0.7551020408163265,
"grad_norm": 0.5410055981651295,
"learning_rate": 6.876335024396872e-06,
"loss": 0.1293,
"step": 296
},
{
"epoch": 0.7576530612244898,
"grad_norm": 0.688793218054993,
"learning_rate": 6.857748613961503e-06,
"loss": 0.1906,
"step": 297
},
{
"epoch": 0.7602040816326531,
"grad_norm": 0.6133050542728484,
"learning_rate": 6.839132373475894e-06,
"loss": 0.1592,
"step": 298
},
{
"epoch": 0.7627551020408163,
"grad_norm": 0.6599036669431815,
"learning_rate": 6.820486601862826e-06,
"loss": 0.1783,
"step": 299
},
{
"epoch": 0.7653061224489796,
"grad_norm": 0.6391559096522518,
"learning_rate": 6.801811598519268e-06,
"loss": 0.1684,
"step": 300
},
{
"epoch": 0.7653061224489796,
"eval_loss": 0.1719779670238495,
"eval_runtime": 2.1687,
"eval_samples_per_second": 14.755,
"eval_steps_per_second": 3.689,
"step": 300
},
{
"epoch": 0.7678571428571429,
"grad_norm": 0.6092171680985036,
"learning_rate": 6.783107663311566e-06,
"loss": 0.1599,
"step": 301
},
{
"epoch": 0.7704081632653061,
"grad_norm": 0.6151859069141618,
"learning_rate": 6.764375096570628e-06,
"loss": 0.1577,
"step": 302
},
{
"epoch": 0.7729591836734694,
"grad_norm": 0.6557762414524845,
"learning_rate": 6.7456141990871e-06,
"loss": 0.1877,
"step": 303
},
{
"epoch": 0.7755102040816326,
"grad_norm": 0.6277047181654297,
"learning_rate": 6.726825272106539e-06,
"loss": 0.1623,
"step": 304
},
{
"epoch": 0.7780612244897959,
"grad_norm": 0.6430715807058038,
"learning_rate": 6.708008617324573e-06,
"loss": 0.1491,
"step": 305
},
{
"epoch": 0.7806122448979592,
"grad_norm": 0.644153170652738,
"learning_rate": 6.689164536882059e-06,
"loss": 0.1791,
"step": 306
},
{
"epoch": 0.7831632653061225,
"grad_norm": 0.6160033596879607,
"learning_rate": 6.670293333360229e-06,
"loss": 0.1427,
"step": 307
},
{
"epoch": 0.7857142857142857,
"grad_norm": 0.5649228032258986,
"learning_rate": 6.651395309775837e-06,
"loss": 0.1362,
"step": 308
},
{
"epoch": 0.7882653061224489,
"grad_norm": 0.6371148038872716,
"learning_rate": 6.632470769576283e-06,
"loss": 0.171,
"step": 309
},
{
"epoch": 0.7908163265306123,
"grad_norm": 0.5975125963921615,
"learning_rate": 6.6135200166347505e-06,
"loss": 0.149,
"step": 310
},
{
"epoch": 0.7933673469387755,
"grad_norm": 0.5408720399231929,
"learning_rate": 6.594543355245324e-06,
"loss": 0.1216,
"step": 311
},
{
"epoch": 0.7959183673469388,
"grad_norm": 0.7069571267475502,
"learning_rate": 6.575541090118105e-06,
"loss": 0.1837,
"step": 312
},
{
"epoch": 0.798469387755102,
"grad_norm": 0.6603211346802758,
"learning_rate": 6.556513526374307e-06,
"loss": 0.1894,
"step": 313
},
{
"epoch": 0.8010204081632653,
"grad_norm": 0.5956839299454775,
"learning_rate": 6.537460969541378e-06,
"loss": 0.148,
"step": 314
},
{
"epoch": 0.8035714285714286,
"grad_norm": 0.6123273259294997,
"learning_rate": 6.518383725548074e-06,
"loss": 0.1495,
"step": 315
},
{
"epoch": 0.8061224489795918,
"grad_norm": 0.6038963523853219,
"learning_rate": 6.499282100719558e-06,
"loss": 0.1528,
"step": 316
},
{
"epoch": 0.8086734693877551,
"grad_norm": 0.6358364483640969,
"learning_rate": 6.4801564017724804e-06,
"loss": 0.1539,
"step": 317
},
{
"epoch": 0.8112244897959183,
"grad_norm": 0.6181985396343684,
"learning_rate": 6.461006935810048e-06,
"loss": 0.1548,
"step": 318
},
{
"epoch": 0.8137755102040817,
"grad_norm": 0.6209023492344633,
"learning_rate": 6.441834010317097e-06,
"loss": 0.1424,
"step": 319
},
{
"epoch": 0.8163265306122449,
"grad_norm": 0.6560248537666912,
"learning_rate": 6.4226379331551625e-06,
"loss": 0.1605,
"step": 320
},
{
"epoch": 0.8188775510204082,
"grad_norm": 0.7033920181013527,
"learning_rate": 6.40341901255752e-06,
"loss": 0.1863,
"step": 321
},
{
"epoch": 0.8214285714285714,
"grad_norm": 0.6117156078888957,
"learning_rate": 6.384177557124247e-06,
"loss": 0.1604,
"step": 322
},
{
"epoch": 0.8239795918367347,
"grad_norm": 0.6141997763943361,
"learning_rate": 6.364913875817267e-06,
"loss": 0.1475,
"step": 323
},
{
"epoch": 0.826530612244898,
"grad_norm": 0.546093227403362,
"learning_rate": 6.345628277955384e-06,
"loss": 0.1285,
"step": 324
},
{
"epoch": 0.8290816326530612,
"grad_norm": 0.6952190901409022,
"learning_rate": 6.326321073209321e-06,
"loss": 0.1736,
"step": 325
},
{
"epoch": 0.8316326530612245,
"grad_norm": 0.6457827737823263,
"learning_rate": 6.306992571596742e-06,
"loss": 0.1822,
"step": 326
},
{
"epoch": 0.8341836734693877,
"grad_norm": 0.5815248212460418,
"learning_rate": 6.287643083477275e-06,
"loss": 0.1303,
"step": 327
},
{
"epoch": 0.8367346938775511,
"grad_norm": 0.6366648152395168,
"learning_rate": 6.268272919547537e-06,
"loss": 0.1522,
"step": 328
},
{
"epoch": 0.8392857142857143,
"grad_norm": 0.5957035573721868,
"learning_rate": 6.248882390836135e-06,
"loss": 0.1411,
"step": 329
},
{
"epoch": 0.8418367346938775,
"grad_norm": 0.628234311427476,
"learning_rate": 6.229471808698673e-06,
"loss": 0.165,
"step": 330
},
{
"epoch": 0.8443877551020408,
"grad_norm": 0.5801673506686771,
"learning_rate": 6.2100414848127586e-06,
"loss": 0.1496,
"step": 331
},
{
"epoch": 0.8469387755102041,
"grad_norm": 0.6150706660795319,
"learning_rate": 6.1905917311729915e-06,
"loss": 0.1414,
"step": 332
},
{
"epoch": 0.8494897959183674,
"grad_norm": 0.7390081360716291,
"learning_rate": 6.17112286008596e-06,
"loss": 0.1872,
"step": 333
},
{
"epoch": 0.8520408163265306,
"grad_norm": 0.6878106569070934,
"learning_rate": 6.151635184165219e-06,
"loss": 0.1876,
"step": 334
},
{
"epoch": 0.8545918367346939,
"grad_norm": 0.5683915193981929,
"learning_rate": 6.132129016326279e-06,
"loss": 0.1437,
"step": 335
},
{
"epoch": 0.8571428571428571,
"grad_norm": 0.6162512785667651,
"learning_rate": 6.112604669781572e-06,
"loss": 0.1583,
"step": 336
},
{
"epoch": 0.8596938775510204,
"grad_norm": 0.6164623578503621,
"learning_rate": 6.093062458035433e-06,
"loss": 0.1521,
"step": 337
},
{
"epoch": 0.8622448979591837,
"grad_norm": 0.6110050489294294,
"learning_rate": 6.073502694879059e-06,
"loss": 0.1462,
"step": 338
},
{
"epoch": 0.8647959183673469,
"grad_norm": 0.5658668675870233,
"learning_rate": 6.053925694385464e-06,
"loss": 0.1415,
"step": 339
},
{
"epoch": 0.8673469387755102,
"grad_norm": 0.6183851263339577,
"learning_rate": 6.034331770904455e-06,
"loss": 0.1678,
"step": 340
},
{
"epoch": 0.8698979591836735,
"grad_norm": 0.6380267035308205,
"learning_rate": 6.014721239057565e-06,
"loss": 0.1647,
"step": 341
},
{
"epoch": 0.8724489795918368,
"grad_norm": 0.6475786946852516,
"learning_rate": 5.9950944137330125e-06,
"loss": 0.1626,
"step": 342
},
{
"epoch": 0.875,
"grad_norm": 0.7178296798128999,
"learning_rate": 5.975451610080643e-06,
"loss": 0.176,
"step": 343
},
{
"epoch": 0.8775510204081632,
"grad_norm": 0.548799383239884,
"learning_rate": 5.955793143506863e-06,
"loss": 0.132,
"step": 344
},
{
"epoch": 0.8801020408163265,
"grad_norm": 0.6140832996638459,
"learning_rate": 5.936119329669584e-06,
"loss": 0.1622,
"step": 345
},
{
"epoch": 0.8826530612244898,
"grad_norm": 0.6117216867875228,
"learning_rate": 5.916430484473149e-06,
"loss": 0.166,
"step": 346
},
{
"epoch": 0.8852040816326531,
"grad_norm": 0.6438861515674844,
"learning_rate": 5.896726924063263e-06,
"loss": 0.1484,
"step": 347
},
{
"epoch": 0.8877551020408163,
"grad_norm": 0.684727641108475,
"learning_rate": 5.877008964821909e-06,
"loss": 0.1713,
"step": 348
},
{
"epoch": 0.8903061224489796,
"grad_norm": 0.6263934617347773,
"learning_rate": 5.8572769233622794e-06,
"loss": 0.1617,
"step": 349
},
{
"epoch": 0.8928571428571429,
"grad_norm": 0.6512663586367785,
"learning_rate": 5.837531116523683e-06,
"loss": 0.1833,
"step": 350
},
{
"epoch": 0.8954081632653061,
"grad_norm": 0.6446733318710774,
"learning_rate": 5.81777186136646e-06,
"loss": 0.1627,
"step": 351
},
{
"epoch": 0.8979591836734694,
"grad_norm": 0.6157294973622298,
"learning_rate": 5.797999475166897e-06,
"loss": 0.1365,
"step": 352
},
{
"epoch": 0.9005102040816326,
"grad_norm": 0.607524650157665,
"learning_rate": 5.778214275412118e-06,
"loss": 0.1541,
"step": 353
},
{
"epoch": 0.9030612244897959,
"grad_norm": 0.5654449477539436,
"learning_rate": 5.7584165797950055e-06,
"loss": 0.1354,
"step": 354
},
{
"epoch": 0.9056122448979592,
"grad_norm": 0.6257348947364124,
"learning_rate": 5.738606706209079e-06,
"loss": 0.1476,
"step": 355
},
{
"epoch": 0.9081632653061225,
"grad_norm": 0.6459117447837404,
"learning_rate": 5.71878497274341e-06,
"loss": 0.155,
"step": 356
},
{
"epoch": 0.9107142857142857,
"grad_norm": 0.5840351587262443,
"learning_rate": 5.698951697677498e-06,
"loss": 0.1407,
"step": 357
},
{
"epoch": 0.9132653061224489,
"grad_norm": 0.6739427176791648,
"learning_rate": 5.679107199476174e-06,
"loss": 0.1667,
"step": 358
},
{
"epoch": 0.9158163265306123,
"grad_norm": 0.6041549005412797,
"learning_rate": 5.659251796784474e-06,
"loss": 0.1697,
"step": 359
},
{
"epoch": 0.9183673469387755,
"grad_norm": 0.6116781652359067,
"learning_rate": 5.6393858084225305e-06,
"loss": 0.1532,
"step": 360
},
{
"epoch": 0.9209183673469388,
"grad_norm": 0.6077294016328789,
"learning_rate": 5.6195095533804546e-06,
"loss": 0.1675,
"step": 361
},
{
"epoch": 0.923469387755102,
"grad_norm": 0.6230191584366697,
"learning_rate": 5.599623350813202e-06,
"loss": 0.1595,
"step": 362
},
{
"epoch": 0.9260204081632653,
"grad_norm": 0.6494202373448055,
"learning_rate": 5.579727520035469e-06,
"loss": 0.1687,
"step": 363
},
{
"epoch": 0.9285714285714286,
"grad_norm": 0.678388379140103,
"learning_rate": 5.559822380516539e-06,
"loss": 0.196,
"step": 364
},
{
"epoch": 0.9311224489795918,
"grad_norm": 0.6795163128070991,
"learning_rate": 5.53990825187518e-06,
"loss": 0.1937,
"step": 365
},
{
"epoch": 0.9336734693877551,
"grad_norm": 0.631118915172661,
"learning_rate": 5.5199854538744905e-06,
"loss": 0.1684,
"step": 366
},
{
"epoch": 0.9362244897959183,
"grad_norm": 0.5111362970956352,
"learning_rate": 5.500054306416776e-06,
"loss": 0.135,
"step": 367
},
{
"epoch": 0.9387755102040817,
"grad_norm": 0.7200957437333302,
"learning_rate": 5.480115129538409e-06,
"loss": 0.197,
"step": 368
},
{
"epoch": 0.9413265306122449,
"grad_norm": 0.5990796224723971,
"learning_rate": 5.460168243404696e-06,
"loss": 0.1404,
"step": 369
},
{
"epoch": 0.9438775510204082,
"grad_norm": 0.6508909764184223,
"learning_rate": 5.440213968304728e-06,
"loss": 0.1653,
"step": 370
},
{
"epoch": 0.9464285714285714,
"grad_norm": 0.7058087802665988,
"learning_rate": 5.420252624646238e-06,
"loss": 0.1889,
"step": 371
},
{
"epoch": 0.9489795918367347,
"grad_norm": 0.62520078208343,
"learning_rate": 5.4002845329504675e-06,
"loss": 0.151,
"step": 372
},
{
"epoch": 0.951530612244898,
"grad_norm": 0.6175896170477174,
"learning_rate": 5.380310013847006e-06,
"loss": 0.1617,
"step": 373
},
{
"epoch": 0.9540816326530612,
"grad_norm": 0.6241321981735418,
"learning_rate": 5.360329388068649e-06,
"loss": 0.1738,
"step": 374
},
{
"epoch": 0.9566326530612245,
"grad_norm": 0.5387011325868518,
"learning_rate": 5.340342976446251e-06,
"loss": 0.1327,
"step": 375
},
{
"epoch": 0.9591836734693877,
"grad_norm": 0.6360363000351817,
"learning_rate": 5.320351099903565e-06,
"loss": 0.1798,
"step": 376
},
{
"epoch": 0.9617346938775511,
"grad_norm": 0.6197073671288826,
"learning_rate": 5.3003540794521005e-06,
"loss": 0.1536,
"step": 377
},
{
"epoch": 0.9642857142857143,
"grad_norm": 0.5982694119598283,
"learning_rate": 5.2803522361859596e-06,
"loss": 0.1427,
"step": 378
},
{
"epoch": 0.9668367346938775,
"grad_norm": 0.5902423939987766,
"learning_rate": 5.260345891276685e-06,
"loss": 0.1399,
"step": 379
},
{
"epoch": 0.9693877551020408,
"grad_norm": 0.6648465322813785,
"learning_rate": 5.240335365968104e-06,
"loss": 0.1691,
"step": 380
},
{
"epoch": 0.9719387755102041,
"grad_norm": 0.6128401036727914,
"learning_rate": 5.220320981571168e-06,
"loss": 0.1575,
"step": 381
},
{
"epoch": 0.9744897959183674,
"grad_norm": 0.6072504634016694,
"learning_rate": 5.2003030594587964e-06,
"loss": 0.1677,
"step": 382
},
{
"epoch": 0.9770408163265306,
"grad_norm": 0.5721919519862779,
"learning_rate": 5.18028192106071e-06,
"loss": 0.1444,
"step": 383
},
{
"epoch": 0.9795918367346939,
"grad_norm": 0.7081920183600142,
"learning_rate": 5.160257887858278e-06,
"loss": 0.2072,
"step": 384
},
{
"epoch": 0.9821428571428571,
"grad_norm": 0.6065745788364745,
"learning_rate": 5.140231281379345e-06,
"loss": 0.1522,
"step": 385
},
{
"epoch": 0.9846938775510204,
"grad_norm": 0.6371181525494265,
"learning_rate": 5.120202423193085e-06,
"loss": 0.1406,
"step": 386
},
{
"epoch": 0.9872448979591837,
"grad_norm": 0.6505788223343205,
"learning_rate": 5.100171634904821e-06,
"loss": 0.1683,
"step": 387
},
{
"epoch": 0.9897959183673469,
"grad_norm": 0.6419015901383764,
"learning_rate": 5.080139238150869e-06,
"loss": 0.1739,
"step": 388
},
{
"epoch": 0.9923469387755102,
"grad_norm": 0.5741359343804769,
"learning_rate": 5.060105554593374e-06,
"loss": 0.1432,
"step": 389
},
{
"epoch": 0.9948979591836735,
"grad_norm": 0.5944648027143441,
"learning_rate": 5.040070905915139e-06,
"loss": 0.1578,
"step": 390
},
{
"epoch": 0.9974489795918368,
"grad_norm": 0.5745910641764089,
"learning_rate": 5.020035613814469e-06,
"loss": 0.1449,
"step": 391
},
{
"epoch": 1.0,
"grad_norm": 0.6947779028476239,
"learning_rate": 5e-06,
"loss": 0.1125,
"step": 392
},
{
"epoch": 1.0025510204081634,
"grad_norm": 0.5323047103078277,
"learning_rate": 4.979964386185531e-06,
"loss": 0.097,
"step": 393
},
{
"epoch": 1.0051020408163265,
"grad_norm": 0.563964010101689,
"learning_rate": 4.959929094084862e-06,
"loss": 0.135,
"step": 394
},
{
"epoch": 1.0076530612244898,
"grad_norm": 0.6137555160835548,
"learning_rate": 4.9398944454066275e-06,
"loss": 0.1438,
"step": 395
},
{
"epoch": 1.010204081632653,
"grad_norm": 0.5584169281749033,
"learning_rate": 4.919860761849132e-06,
"loss": 0.1259,
"step": 396
},
{
"epoch": 1.0127551020408163,
"grad_norm": 0.5655350454746906,
"learning_rate": 4.89982836509518e-06,
"loss": 0.1227,
"step": 397
},
{
"epoch": 1.0153061224489797,
"grad_norm": 0.5932329728601095,
"learning_rate": 4.879797576806915e-06,
"loss": 0.132,
"step": 398
},
{
"epoch": 1.0178571428571428,
"grad_norm": 0.6072074491772425,
"learning_rate": 4.859768718620656e-06,
"loss": 0.1224,
"step": 399
},
{
"epoch": 1.0204081632653061,
"grad_norm": 0.5867196136790125,
"learning_rate": 4.839742112141725e-06,
"loss": 0.1183,
"step": 400
},
{
"epoch": 1.0204081632653061,
"eval_loss": 0.17335140705108643,
"eval_runtime": 2.1717,
"eval_samples_per_second": 14.735,
"eval_steps_per_second": 3.684,
"step": 400
},
{
"epoch": 1.0229591836734695,
"grad_norm": 0.6842931691283886,
"learning_rate": 4.8197180789392904e-06,
"loss": 0.1289,
"step": 401
},
{
"epoch": 1.0255102040816326,
"grad_norm": 0.5843835873726522,
"learning_rate": 4.799696940541204e-06,
"loss": 0.1224,
"step": 402
},
{
"epoch": 1.028061224489796,
"grad_norm": 0.6303376367215173,
"learning_rate": 4.779679018428832e-06,
"loss": 0.1159,
"step": 403
},
{
"epoch": 1.030612244897959,
"grad_norm": 0.6128285299025862,
"learning_rate": 4.759664634031897e-06,
"loss": 0.1109,
"step": 404
},
{
"epoch": 1.0331632653061225,
"grad_norm": 0.6997351244068603,
"learning_rate": 4.739654108723317e-06,
"loss": 0.1211,
"step": 405
},
{
"epoch": 1.0357142857142858,
"grad_norm": 0.6504032679810356,
"learning_rate": 4.719647763814041e-06,
"loss": 0.1299,
"step": 406
},
{
"epoch": 1.038265306122449,
"grad_norm": 0.609002998941228,
"learning_rate": 4.699645920547901e-06,
"loss": 0.1227,
"step": 407
},
{
"epoch": 1.0408163265306123,
"grad_norm": 0.6418661123328896,
"learning_rate": 4.679648900096436e-06,
"loss": 0.1301,
"step": 408
},
{
"epoch": 1.0433673469387754,
"grad_norm": 0.6541209431022946,
"learning_rate": 4.659657023553752e-06,
"loss": 0.1149,
"step": 409
},
{
"epoch": 1.0459183673469388,
"grad_norm": 0.5416993630355218,
"learning_rate": 4.6396706119313526e-06,
"loss": 0.0949,
"step": 410
},
{
"epoch": 1.0484693877551021,
"grad_norm": 0.6686437193372081,
"learning_rate": 4.619689986152996e-06,
"loss": 0.139,
"step": 411
},
{
"epoch": 1.0510204081632653,
"grad_norm": 0.6680097009304201,
"learning_rate": 4.599715467049534e-06,
"loss": 0.1446,
"step": 412
},
{
"epoch": 1.0535714285714286,
"grad_norm": 0.5528151642549715,
"learning_rate": 4.579747375353763e-06,
"loss": 0.1075,
"step": 413
},
{
"epoch": 1.0561224489795917,
"grad_norm": 0.5916503836624794,
"learning_rate": 4.559786031695275e-06,
"loss": 0.1113,
"step": 414
},
{
"epoch": 1.058673469387755,
"grad_norm": 0.5999571410820965,
"learning_rate": 4.5398317565953055e-06,
"loss": 0.1168,
"step": 415
},
{
"epoch": 1.0612244897959184,
"grad_norm": 0.6152629066291727,
"learning_rate": 4.5198848704615915e-06,
"loss": 0.1263,
"step": 416
},
{
"epoch": 1.0637755102040816,
"grad_norm": 0.5434823415058014,
"learning_rate": 4.499945693583227e-06,
"loss": 0.1077,
"step": 417
},
{
"epoch": 1.066326530612245,
"grad_norm": 0.6220819462707029,
"learning_rate": 4.480014546125511e-06,
"loss": 0.1306,
"step": 418
},
{
"epoch": 1.068877551020408,
"grad_norm": 0.5835461892516719,
"learning_rate": 4.4600917481248205e-06,
"loss": 0.1117,
"step": 419
},
{
"epoch": 1.0714285714285714,
"grad_norm": 0.633213180463133,
"learning_rate": 4.4401776194834615e-06,
"loss": 0.1165,
"step": 420
},
{
"epoch": 1.0739795918367347,
"grad_norm": 0.6041200748663155,
"learning_rate": 4.420272479964532e-06,
"loss": 0.1143,
"step": 421
},
{
"epoch": 1.0765306122448979,
"grad_norm": 0.6969958031590414,
"learning_rate": 4.4003766491867984e-06,
"loss": 0.1212,
"step": 422
},
{
"epoch": 1.0790816326530612,
"grad_norm": 0.6445563858005139,
"learning_rate": 4.380490446619547e-06,
"loss": 0.1389,
"step": 423
},
{
"epoch": 1.0816326530612246,
"grad_norm": 0.6125954009368678,
"learning_rate": 4.3606141915774695e-06,
"loss": 0.1092,
"step": 424
},
{
"epoch": 1.0841836734693877,
"grad_norm": 0.8037138319949373,
"learning_rate": 4.340748203215528e-06,
"loss": 0.1489,
"step": 425
},
{
"epoch": 1.086734693877551,
"grad_norm": 0.5725494006977168,
"learning_rate": 4.320892800523827e-06,
"loss": 0.1162,
"step": 426
},
{
"epoch": 1.0892857142857142,
"grad_norm": 0.6096824707475266,
"learning_rate": 4.3010483023225045e-06,
"loss": 0.1123,
"step": 427
},
{
"epoch": 1.0918367346938775,
"grad_norm": 0.648005078531451,
"learning_rate": 4.281215027256592e-06,
"loss": 0.1198,
"step": 428
},
{
"epoch": 1.094387755102041,
"grad_norm": 0.5945617987155178,
"learning_rate": 4.261393293790922e-06,
"loss": 0.1079,
"step": 429
},
{
"epoch": 1.096938775510204,
"grad_norm": 0.5850588276218499,
"learning_rate": 4.241583420204998e-06,
"loss": 0.101,
"step": 430
},
{
"epoch": 1.0994897959183674,
"grad_norm": 0.6791345011904737,
"learning_rate": 4.221785724587883e-06,
"loss": 0.1417,
"step": 431
},
{
"epoch": 1.1020408163265305,
"grad_norm": 0.637553585890781,
"learning_rate": 4.2020005248331056e-06,
"loss": 0.1233,
"step": 432
},
{
"epoch": 1.1045918367346939,
"grad_norm": 0.6509522793471568,
"learning_rate": 4.182228138633541e-06,
"loss": 0.1199,
"step": 433
},
{
"epoch": 1.1071428571428572,
"grad_norm": 0.6443343768927554,
"learning_rate": 4.162468883476319e-06,
"loss": 0.0941,
"step": 434
},
{
"epoch": 1.1096938775510203,
"grad_norm": 0.6027022006132112,
"learning_rate": 4.142723076637723e-06,
"loss": 0.1131,
"step": 435
},
{
"epoch": 1.1122448979591837,
"grad_norm": 0.6397623750787707,
"learning_rate": 4.122991035178093e-06,
"loss": 0.1347,
"step": 436
},
{
"epoch": 1.114795918367347,
"grad_norm": 0.6329399298444501,
"learning_rate": 4.103273075936739e-06,
"loss": 0.1328,
"step": 437
},
{
"epoch": 1.1173469387755102,
"grad_norm": 0.5764546644609387,
"learning_rate": 4.083569515526853e-06,
"loss": 0.1107,
"step": 438
},
{
"epoch": 1.1198979591836735,
"grad_norm": 0.6606104418167631,
"learning_rate": 4.063880670330417e-06,
"loss": 0.128,
"step": 439
},
{
"epoch": 1.1224489795918366,
"grad_norm": 0.6106517768536024,
"learning_rate": 4.04420685649314e-06,
"loss": 0.1152,
"step": 440
},
{
"epoch": 1.125,
"grad_norm": 0.6184011733260752,
"learning_rate": 4.02454838991936e-06,
"loss": 0.131,
"step": 441
},
{
"epoch": 1.1275510204081634,
"grad_norm": 0.6214106226786603,
"learning_rate": 4.004905586266988e-06,
"loss": 0.1218,
"step": 442
},
{
"epoch": 1.1301020408163265,
"grad_norm": 0.5864515890113116,
"learning_rate": 3.985278760942437e-06,
"loss": 0.1131,
"step": 443
},
{
"epoch": 1.1326530612244898,
"grad_norm": 0.6318618768163554,
"learning_rate": 3.965668229095546e-06,
"loss": 0.1052,
"step": 444
},
{
"epoch": 1.135204081632653,
"grad_norm": 0.6627304810877596,
"learning_rate": 3.946074305614537e-06,
"loss": 0.1296,
"step": 445
},
{
"epoch": 1.1377551020408163,
"grad_norm": 0.587382351188069,
"learning_rate": 3.926497305120943e-06,
"loss": 0.1045,
"step": 446
},
{
"epoch": 1.1403061224489797,
"grad_norm": 0.7385589854862445,
"learning_rate": 3.906937541964567e-06,
"loss": 0.1396,
"step": 447
},
{
"epoch": 1.1428571428571428,
"grad_norm": 0.6636112117501785,
"learning_rate": 3.887395330218429e-06,
"loss": 0.1233,
"step": 448
},
{
"epoch": 1.1454081632653061,
"grad_norm": 0.589345932247423,
"learning_rate": 3.8678709836737225e-06,
"loss": 0.1168,
"step": 449
},
{
"epoch": 1.1479591836734695,
"grad_norm": 0.6799593595492233,
"learning_rate": 3.848364815834782e-06,
"loss": 0.1466,
"step": 450
},
{
"epoch": 1.1505102040816326,
"grad_norm": 0.6091206654350524,
"learning_rate": 3.828877139914042e-06,
"loss": 0.1138,
"step": 451
},
{
"epoch": 1.153061224489796,
"grad_norm": 0.5823166449307083,
"learning_rate": 3.809408268827009e-06,
"loss": 0.1189,
"step": 452
},
{
"epoch": 1.155612244897959,
"grad_norm": 0.6079458200187238,
"learning_rate": 3.7899585151872444e-06,
"loss": 0.1302,
"step": 453
},
{
"epoch": 1.1581632653061225,
"grad_norm": 0.6287432593849783,
"learning_rate": 3.7705281913013286e-06,
"loss": 0.1315,
"step": 454
},
{
"epoch": 1.1607142857142858,
"grad_norm": 0.6137365328324991,
"learning_rate": 3.751117609163865e-06,
"loss": 0.126,
"step": 455
},
{
"epoch": 1.163265306122449,
"grad_norm": 0.6115580022695268,
"learning_rate": 3.731727080452464e-06,
"loss": 0.1161,
"step": 456
},
{
"epoch": 1.1658163265306123,
"grad_norm": 0.5932705584378828,
"learning_rate": 3.7123569165227263e-06,
"loss": 0.1059,
"step": 457
},
{
"epoch": 1.1683673469387754,
"grad_norm": 0.7061338946181202,
"learning_rate": 3.6930074284032613e-06,
"loss": 0.1492,
"step": 458
},
{
"epoch": 1.1709183673469388,
"grad_norm": 0.6614069888583459,
"learning_rate": 3.6736789267906803e-06,
"loss": 0.1287,
"step": 459
},
{
"epoch": 1.1734693877551021,
"grad_norm": 0.6099727489081356,
"learning_rate": 3.654371722044616e-06,
"loss": 0.1233,
"step": 460
},
{
"epoch": 1.1760204081632653,
"grad_norm": 0.5734538026453548,
"learning_rate": 3.635086124182734e-06,
"loss": 0.1141,
"step": 461
},
{
"epoch": 1.1785714285714286,
"grad_norm": 0.6570242670654317,
"learning_rate": 3.6158224428757538e-06,
"loss": 0.1339,
"step": 462
},
{
"epoch": 1.181122448979592,
"grad_norm": 0.6261495037300829,
"learning_rate": 3.5965809874424817e-06,
"loss": 0.1189,
"step": 463
},
{
"epoch": 1.183673469387755,
"grad_norm": 0.6587312495933207,
"learning_rate": 3.5773620668448384e-06,
"loss": 0.1341,
"step": 464
},
{
"epoch": 1.1862244897959184,
"grad_norm": 0.564185159561041,
"learning_rate": 3.5581659896829024e-06,
"loss": 0.115,
"step": 465
},
{
"epoch": 1.1887755102040816,
"grad_norm": 0.5477375739868198,
"learning_rate": 3.538993064189954e-06,
"loss": 0.1016,
"step": 466
},
{
"epoch": 1.191326530612245,
"grad_norm": 0.5636030369867936,
"learning_rate": 3.519843598227521e-06,
"loss": 0.1091,
"step": 467
},
{
"epoch": 1.193877551020408,
"grad_norm": 0.6017788620294022,
"learning_rate": 3.500717899280442e-06,
"loss": 0.1091,
"step": 468
},
{
"epoch": 1.1964285714285714,
"grad_norm": 0.5961071194795863,
"learning_rate": 3.4816162744519266e-06,
"loss": 0.1228,
"step": 469
},
{
"epoch": 1.1989795918367347,
"grad_norm": 0.6155967911771543,
"learning_rate": 3.4625390304586224e-06,
"loss": 0.1209,
"step": 470
},
{
"epoch": 1.2015306122448979,
"grad_norm": 0.5994417641613636,
"learning_rate": 3.4434864736256936e-06,
"loss": 0.1126,
"step": 471
},
{
"epoch": 1.2040816326530612,
"grad_norm": 0.5962055760170724,
"learning_rate": 3.424458909881897e-06,
"loss": 0.1238,
"step": 472
},
{
"epoch": 1.2066326530612246,
"grad_norm": 0.6212245734208236,
"learning_rate": 3.4054566447546755e-06,
"loss": 0.1203,
"step": 473
},
{
"epoch": 1.2091836734693877,
"grad_norm": 0.6674392281469284,
"learning_rate": 3.386479983365251e-06,
"loss": 0.1292,
"step": 474
},
{
"epoch": 1.211734693877551,
"grad_norm": 0.619719014082779,
"learning_rate": 3.3675292304237184e-06,
"loss": 0.123,
"step": 475
},
{
"epoch": 1.2142857142857142,
"grad_norm": 0.6787663179071107,
"learning_rate": 3.3486046902241663e-06,
"loss": 0.1297,
"step": 476
},
{
"epoch": 1.2168367346938775,
"grad_norm": 0.615581005728474,
"learning_rate": 3.3297066666397715e-06,
"loss": 0.1261,
"step": 477
},
{
"epoch": 1.219387755102041,
"grad_norm": 0.6772704385194932,
"learning_rate": 3.310835463117942e-06,
"loss": 0.1124,
"step": 478
},
{
"epoch": 1.221938775510204,
"grad_norm": 0.5788924276971349,
"learning_rate": 3.291991382675429e-06,
"loss": 0.1085,
"step": 479
},
{
"epoch": 1.2244897959183674,
"grad_norm": 0.6627991978200464,
"learning_rate": 3.273174727893463e-06,
"loss": 0.1342,
"step": 480
},
{
"epoch": 1.2270408163265305,
"grad_norm": 0.5795534308305166,
"learning_rate": 3.2543858009129028e-06,
"loss": 0.1168,
"step": 481
},
{
"epoch": 1.2295918367346939,
"grad_norm": 0.5802342946393737,
"learning_rate": 3.235624903429374e-06,
"loss": 0.1084,
"step": 482
},
{
"epoch": 1.2321428571428572,
"grad_norm": 0.6081875215495436,
"learning_rate": 3.216892336688435e-06,
"loss": 0.1255,
"step": 483
},
{
"epoch": 1.2346938775510203,
"grad_norm": 0.5606290959505179,
"learning_rate": 3.198188401480734e-06,
"loss": 0.1054,
"step": 484
},
{
"epoch": 1.2372448979591837,
"grad_norm": 0.6254832539000906,
"learning_rate": 3.179513398137176e-06,
"loss": 0.1229,
"step": 485
},
{
"epoch": 1.239795918367347,
"grad_norm": 0.6016469759559874,
"learning_rate": 3.160867626524107e-06,
"loss": 0.1238,
"step": 486
},
{
"epoch": 1.2423469387755102,
"grad_norm": 0.6212628039144623,
"learning_rate": 3.1422513860384972e-06,
"loss": 0.1251,
"step": 487
},
{
"epoch": 1.2448979591836735,
"grad_norm": 0.6001544256782972,
"learning_rate": 3.12366497560313e-06,
"loss": 0.1107,
"step": 488
},
{
"epoch": 1.2474489795918366,
"grad_norm": 0.7212798836278922,
"learning_rate": 3.105108693661807e-06,
"loss": 0.1378,
"step": 489
},
{
"epoch": 1.25,
"grad_norm": 0.5877635178170327,
"learning_rate": 3.0865828381745515e-06,
"loss": 0.1235,
"step": 490
},
{
"epoch": 1.2525510204081631,
"grad_norm": 0.6030584719864545,
"learning_rate": 3.0680877066128287e-06,
"loss": 0.1154,
"step": 491
},
{
"epoch": 1.2551020408163265,
"grad_norm": 0.5769299888240651,
"learning_rate": 3.049623595954766e-06,
"loss": 0.1066,
"step": 492
},
{
"epoch": 1.2576530612244898,
"grad_norm": 0.640929927230285,
"learning_rate": 3.031190802680383e-06,
"loss": 0.1272,
"step": 493
},
{
"epoch": 1.260204081632653,
"grad_norm": 0.6131724938331422,
"learning_rate": 3.0127896227668367e-06,
"loss": 0.136,
"step": 494
},
{
"epoch": 1.2627551020408163,
"grad_norm": 0.6420939632710524,
"learning_rate": 2.9944203516836614e-06,
"loss": 0.1303,
"step": 495
},
{
"epoch": 1.2653061224489797,
"grad_norm": 0.5545302124781333,
"learning_rate": 2.976083284388031e-06,
"loss": 0.1128,
"step": 496
},
{
"epoch": 1.2678571428571428,
"grad_norm": 0.612470227896181,
"learning_rate": 2.95777871532002e-06,
"loss": 0.1118,
"step": 497
},
{
"epoch": 1.2704081632653061,
"grad_norm": 0.6065384958439005,
"learning_rate": 2.9395069383978725e-06,
"loss": 0.125,
"step": 498
},
{
"epoch": 1.2729591836734695,
"grad_norm": 0.667310023102144,
"learning_rate": 2.9212682470132904e-06,
"loss": 0.1334,
"step": 499
},
{
"epoch": 1.2755102040816326,
"grad_norm": 0.5680061294949651,
"learning_rate": 2.9030629340267165e-06,
"loss": 0.1013,
"step": 500
},
{
"epoch": 1.2755102040816326,
"eval_loss": 0.17167198657989502,
"eval_runtime": 2.1684,
"eval_samples_per_second": 14.757,
"eval_steps_per_second": 3.689,
"step": 500
},
{
"epoch": 1.278061224489796,
"grad_norm": 0.5741423355706899,
"learning_rate": 2.8848912917626295e-06,
"loss": 0.1096,
"step": 501
},
{
"epoch": 1.280612244897959,
"grad_norm": 0.5925872438345459,
"learning_rate": 2.8667536120048616e-06,
"loss": 0.1157,
"step": 502
},
{
"epoch": 1.2831632653061225,
"grad_norm": 0.6383357759073394,
"learning_rate": 2.8486501859918967e-06,
"loss": 0.1206,
"step": 503
},
{
"epoch": 1.2857142857142856,
"grad_norm": 0.6120389722512302,
"learning_rate": 2.83058130441221e-06,
"loss": 0.1172,
"step": 504
},
{
"epoch": 1.288265306122449,
"grad_norm": 0.6437124638460001,
"learning_rate": 2.8125472573995903e-06,
"loss": 0.1302,
"step": 505
},
{
"epoch": 1.2908163265306123,
"grad_norm": 0.6071220260605691,
"learning_rate": 2.794548334528486e-06,
"loss": 0.1202,
"step": 506
},
{
"epoch": 1.2933673469387754,
"grad_norm": 0.5954473533080002,
"learning_rate": 2.776584824809355e-06,
"loss": 0.1141,
"step": 507
},
{
"epoch": 1.2959183673469388,
"grad_norm": 0.6173913926365706,
"learning_rate": 2.7586570166840154e-06,
"loss": 0.1091,
"step": 508
},
{
"epoch": 1.2984693877551021,
"grad_norm": 0.5459335138030598,
"learning_rate": 2.74076519802103e-06,
"loss": 0.1035,
"step": 509
},
{
"epoch": 1.3010204081632653,
"grad_norm": 0.6207207997242377,
"learning_rate": 2.7229096561110703e-06,
"loss": 0.1234,
"step": 510
},
{
"epoch": 1.3035714285714286,
"grad_norm": 0.5489479626562238,
"learning_rate": 2.705090677662311e-06,
"loss": 0.1039,
"step": 511
},
{
"epoch": 1.306122448979592,
"grad_norm": 0.5972773116742518,
"learning_rate": 2.687308548795825e-06,
"loss": 0.1102,
"step": 512
},
{
"epoch": 1.308673469387755,
"grad_norm": 0.6008948675073799,
"learning_rate": 2.6695635550409806e-06,
"loss": 0.1103,
"step": 513
},
{
"epoch": 1.3112244897959184,
"grad_norm": 0.6672021707421822,
"learning_rate": 2.651855981330872e-06,
"loss": 0.1274,
"step": 514
},
{
"epoch": 1.3137755102040816,
"grad_norm": 0.6088405581814523,
"learning_rate": 2.634186111997734e-06,
"loss": 0.1102,
"step": 515
},
{
"epoch": 1.316326530612245,
"grad_norm": 0.6482576902690778,
"learning_rate": 2.6165542307683744e-06,
"loss": 0.1376,
"step": 516
},
{
"epoch": 1.318877551020408,
"grad_norm": 0.6204897670785255,
"learning_rate": 2.5989606207596262e-06,
"loss": 0.1148,
"step": 517
},
{
"epoch": 1.3214285714285714,
"grad_norm": 0.573934588312968,
"learning_rate": 2.5814055644738013e-06,
"loss": 0.1062,
"step": 518
},
{
"epoch": 1.3239795918367347,
"grad_norm": 0.65506919752253,
"learning_rate": 2.5638893437941426e-06,
"loss": 0.1274,
"step": 519
},
{
"epoch": 1.3265306122448979,
"grad_norm": 0.5654354860920429,
"learning_rate": 2.5464122399803126e-06,
"loss": 0.0995,
"step": 520
},
{
"epoch": 1.3290816326530612,
"grad_norm": 0.6193610227103749,
"learning_rate": 2.5289745336638634e-06,
"loss": 0.1106,
"step": 521
},
{
"epoch": 1.3316326530612246,
"grad_norm": 0.5918931034127528,
"learning_rate": 2.5115765048437445e-06,
"loss": 0.112,
"step": 522
},
{
"epoch": 1.3341836734693877,
"grad_norm": 0.5669060325477613,
"learning_rate": 2.494218432881797e-06,
"loss": 0.1059,
"step": 523
},
{
"epoch": 1.336734693877551,
"grad_norm": 0.5976675877158825,
"learning_rate": 2.4769005964982718e-06,
"loss": 0.1081,
"step": 524
},
{
"epoch": 1.3392857142857144,
"grad_norm": 0.6899118506088527,
"learning_rate": 2.4596232737673544e-06,
"loss": 0.1313,
"step": 525
},
{
"epoch": 1.3418367346938775,
"grad_norm": 0.6341327579512294,
"learning_rate": 2.4423867421126923e-06,
"loss": 0.1225,
"step": 526
},
{
"epoch": 1.344387755102041,
"grad_norm": 0.5935000252838433,
"learning_rate": 2.425191278302954e-06,
"loss": 0.1218,
"step": 527
},
{
"epoch": 1.346938775510204,
"grad_norm": 0.577233550155536,
"learning_rate": 2.408037158447375e-06,
"loss": 0.1117,
"step": 528
},
{
"epoch": 1.3494897959183674,
"grad_norm": 0.5910734869924921,
"learning_rate": 2.3909246579913265e-06,
"loss": 0.1052,
"step": 529
},
{
"epoch": 1.3520408163265305,
"grad_norm": 0.6022665804010583,
"learning_rate": 2.3738540517118953e-06,
"loss": 0.1092,
"step": 530
},
{
"epoch": 1.3545918367346939,
"grad_norm": 0.6087219965715591,
"learning_rate": 2.356825613713463e-06,
"loss": 0.1262,
"step": 531
},
{
"epoch": 1.3571428571428572,
"grad_norm": 0.5961121795834876,
"learning_rate": 2.339839617423318e-06,
"loss": 0.1148,
"step": 532
},
{
"epoch": 1.3596938775510203,
"grad_norm": 0.5652951454072906,
"learning_rate": 2.322896335587255e-06,
"loss": 0.1009,
"step": 533
},
{
"epoch": 1.3622448979591837,
"grad_norm": 0.6288949087425784,
"learning_rate": 2.305996040265198e-06,
"loss": 0.1168,
"step": 534
},
{
"epoch": 1.364795918367347,
"grad_norm": 0.6079352763616246,
"learning_rate": 2.289139002826835e-06,
"loss": 0.1212,
"step": 535
},
{
"epoch": 1.3673469387755102,
"grad_norm": 0.6069904187749375,
"learning_rate": 2.272325493947257e-06,
"loss": 0.1126,
"step": 536
},
{
"epoch": 1.3698979591836735,
"grad_norm": 0.5730921239886521,
"learning_rate": 2.255555783602609e-06,
"loss": 0.1112,
"step": 537
},
{
"epoch": 1.3724489795918369,
"grad_norm": 0.6388291813483298,
"learning_rate": 2.238830141065765e-06,
"loss": 0.1283,
"step": 538
},
{
"epoch": 1.375,
"grad_norm": 0.5797940980374893,
"learning_rate": 2.2221488349019903e-06,
"loss": 0.1112,
"step": 539
},
{
"epoch": 1.3775510204081631,
"grad_norm": 0.7076481363035177,
"learning_rate": 2.2055121329646416e-06,
"loss": 0.1351,
"step": 540
},
{
"epoch": 1.3801020408163265,
"grad_norm": 0.624306986743108,
"learning_rate": 2.1889203023908655e-06,
"loss": 0.1185,
"step": 541
},
{
"epoch": 1.3826530612244898,
"grad_norm": 0.6470359831455992,
"learning_rate": 2.1723736095972946e-06,
"loss": 0.1351,
"step": 542
},
{
"epoch": 1.385204081632653,
"grad_norm": 0.6481944902344723,
"learning_rate": 2.155872320275789e-06,
"loss": 0.1276,
"step": 543
},
{
"epoch": 1.3877551020408163,
"grad_norm": 0.602073181996085,
"learning_rate": 2.139416699389153e-06,
"loss": 0.1186,
"step": 544
},
{
"epoch": 1.3903061224489797,
"grad_norm": 0.6084186098092307,
"learning_rate": 2.123007011166894e-06,
"loss": 0.1193,
"step": 545
},
{
"epoch": 1.3928571428571428,
"grad_norm": 0.6633754617972489,
"learning_rate": 2.1066435191009717e-06,
"loss": 0.136,
"step": 546
},
{
"epoch": 1.3954081632653061,
"grad_norm": 0.6598906975031021,
"learning_rate": 2.090326485941571e-06,
"loss": 0.1355,
"step": 547
},
{
"epoch": 1.3979591836734695,
"grad_norm": 0.6040777362158246,
"learning_rate": 2.074056173692881e-06,
"loss": 0.1165,
"step": 548
},
{
"epoch": 1.4005102040816326,
"grad_norm": 0.5450778208953521,
"learning_rate": 2.0578328436088903e-06,
"loss": 0.1043,
"step": 549
},
{
"epoch": 1.403061224489796,
"grad_norm": 0.5919992077560257,
"learning_rate": 2.041656756189184e-06,
"loss": 0.1078,
"step": 550
},
{
"epoch": 1.405612244897959,
"grad_norm": 0.5568670148515367,
"learning_rate": 2.025528171174775e-06,
"loss": 0.1043,
"step": 551
},
{
"epoch": 1.4081632653061225,
"grad_norm": 0.6076808020159301,
"learning_rate": 2.00944734754392e-06,
"loss": 0.1255,
"step": 552
},
{
"epoch": 1.4107142857142856,
"grad_norm": 0.589238565781633,
"learning_rate": 1.9934145435079705e-06,
"loss": 0.1199,
"step": 553
},
{
"epoch": 1.413265306122449,
"grad_norm": 0.6444093306560709,
"learning_rate": 1.977430016507222e-06,
"loss": 0.1272,
"step": 554
},
{
"epoch": 1.4158163265306123,
"grad_norm": 0.601713977214125,
"learning_rate": 1.9614940232067758e-06,
"loss": 0.1169,
"step": 555
},
{
"epoch": 1.4183673469387754,
"grad_norm": 1.1093633597999613,
"learning_rate": 1.945606819492429e-06,
"loss": 0.1201,
"step": 556
},
{
"epoch": 1.4209183673469388,
"grad_norm": 0.6666149410482589,
"learning_rate": 1.929768660466557e-06,
"loss": 0.1114,
"step": 557
},
{
"epoch": 1.4234693877551021,
"grad_norm": 0.6082880380250021,
"learning_rate": 1.913979800444021e-06,
"loss": 0.1153,
"step": 558
},
{
"epoch": 1.4260204081632653,
"grad_norm": 0.6431693821610938,
"learning_rate": 1.898240492948083e-06,
"loss": 0.1253,
"step": 559
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.6278068786376103,
"learning_rate": 1.8825509907063328e-06,
"loss": 0.123,
"step": 560
},
{
"epoch": 1.431122448979592,
"grad_norm": 0.6946774708929673,
"learning_rate": 1.866911545646637e-06,
"loss": 0.1289,
"step": 561
},
{
"epoch": 1.433673469387755,
"grad_norm": 0.6384174590542929,
"learning_rate": 1.8513224088930814e-06,
"loss": 0.1227,
"step": 562
},
{
"epoch": 1.4362244897959184,
"grad_norm": 0.5938784568604658,
"learning_rate": 1.8357838307619574e-06,
"loss": 0.1244,
"step": 563
},
{
"epoch": 1.4387755102040816,
"grad_norm": 0.6319464389681908,
"learning_rate": 1.8202960607577246e-06,
"loss": 0.1282,
"step": 564
},
{
"epoch": 1.441326530612245,
"grad_norm": 0.6527056924637811,
"learning_rate": 1.8048593475690112e-06,
"loss": 0.1361,
"step": 565
},
{
"epoch": 1.443877551020408,
"grad_norm": 0.6763003101411658,
"learning_rate": 1.7894739390646227e-06,
"loss": 0.1341,
"step": 566
},
{
"epoch": 1.4464285714285714,
"grad_norm": 0.6141066682445433,
"learning_rate": 1.7741400822895633e-06,
"loss": 0.1157,
"step": 567
},
{
"epoch": 1.4489795918367347,
"grad_norm": 0.6216654523919399,
"learning_rate": 1.7588580234610592e-06,
"loss": 0.1109,
"step": 568
},
{
"epoch": 1.4515306122448979,
"grad_norm": 0.6008945770620517,
"learning_rate": 1.7436280079646172e-06,
"loss": 0.1192,
"step": 569
},
{
"epoch": 1.4540816326530612,
"grad_norm": 0.6118854666306467,
"learning_rate": 1.728450280350079e-06,
"loss": 0.1167,
"step": 570
},
{
"epoch": 1.4566326530612246,
"grad_norm": 0.6283008080395546,
"learning_rate": 1.7133250843276928e-06,
"loss": 0.1277,
"step": 571
},
{
"epoch": 1.4591836734693877,
"grad_norm": 0.6654587725209296,
"learning_rate": 1.6982526627642043e-06,
"loss": 0.1276,
"step": 572
},
{
"epoch": 1.461734693877551,
"grad_norm": 0.7397859180981282,
"learning_rate": 1.6832332576789501e-06,
"loss": 0.131,
"step": 573
},
{
"epoch": 1.4642857142857144,
"grad_norm": 0.6487672371648604,
"learning_rate": 1.6682671102399806e-06,
"loss": 0.1356,
"step": 574
},
{
"epoch": 1.4668367346938775,
"grad_norm": 0.6205070557254059,
"learning_rate": 1.6533544607601815e-06,
"loss": 0.1234,
"step": 575
},
{
"epoch": 1.469387755102041,
"grad_norm": 0.6493137431186743,
"learning_rate": 1.6384955486934157e-06,
"loss": 0.1188,
"step": 576
},
{
"epoch": 1.471938775510204,
"grad_norm": 0.6479947647795395,
"learning_rate": 1.6236906126306824e-06,
"loss": 0.1404,
"step": 577
},
{
"epoch": 1.4744897959183674,
"grad_norm": 0.5133523501462218,
"learning_rate": 1.6089398902962767e-06,
"loss": 0.0822,
"step": 578
},
{
"epoch": 1.4770408163265305,
"grad_norm": 0.6734740471049492,
"learning_rate": 1.5942436185439842e-06,
"loss": 0.0994,
"step": 579
},
{
"epoch": 1.4795918367346939,
"grad_norm": 0.616152641405367,
"learning_rate": 1.5796020333532696e-06,
"loss": 0.1246,
"step": 580
},
{
"epoch": 1.4821428571428572,
"grad_norm": 0.6141253226645143,
"learning_rate": 1.5650153698254916e-06,
"loss": 0.1282,
"step": 581
},
{
"epoch": 1.4846938775510203,
"grad_norm": 0.6364488406850194,
"learning_rate": 1.5504838621801272e-06,
"loss": 0.1233,
"step": 582
},
{
"epoch": 1.4872448979591837,
"grad_norm": 0.622428601485364,
"learning_rate": 1.5360077437510046e-06,
"loss": 0.1147,
"step": 583
},
{
"epoch": 1.489795918367347,
"grad_norm": 0.6015605938585638,
"learning_rate": 1.5215872469825682e-06,
"loss": 0.1085,
"step": 584
},
{
"epoch": 1.4923469387755102,
"grad_norm": 0.6073886538256179,
"learning_rate": 1.5072226034261373e-06,
"loss": 0.1218,
"step": 585
},
{
"epoch": 1.4948979591836735,
"grad_norm": 0.6260867939739607,
"learning_rate": 1.4929140437361916e-06,
"loss": 0.1307,
"step": 586
},
{
"epoch": 1.4974489795918369,
"grad_norm": 0.6184344057757138,
"learning_rate": 1.4786617976666674e-06,
"loss": 0.1249,
"step": 587
},
{
"epoch": 1.5,
"grad_norm": 0.6501373896364471,
"learning_rate": 1.4644660940672628e-06,
"loss": 0.1303,
"step": 588
},
{
"epoch": 1.5025510204081631,
"grad_norm": 0.59898207205143,
"learning_rate": 1.4503271608797741e-06,
"loss": 0.1252,
"step": 589
},
{
"epoch": 1.5051020408163265,
"grad_norm": 0.5658331177409838,
"learning_rate": 1.4362452251344283e-06,
"loss": 0.1066,
"step": 590
},
{
"epoch": 1.5076530612244898,
"grad_norm": 0.5963824135058199,
"learning_rate": 1.4222205129462347e-06,
"loss": 0.11,
"step": 591
},
{
"epoch": 1.510204081632653,
"grad_norm": 0.6320736861925936,
"learning_rate": 1.4082532495113627e-06,
"loss": 0.1187,
"step": 592
},
{
"epoch": 1.5127551020408163,
"grad_norm": 0.608135373271551,
"learning_rate": 1.3943436591035208e-06,
"loss": 0.1191,
"step": 593
},
{
"epoch": 1.5153061224489797,
"grad_norm": 0.5958622972293384,
"learning_rate": 1.3804919650703551e-06,
"loss": 0.1112,
"step": 594
},
{
"epoch": 1.5178571428571428,
"grad_norm": 0.5984589902887925,
"learning_rate": 1.3666983898298659e-06,
"loss": 0.1063,
"step": 595
},
{
"epoch": 1.5204081632653061,
"grad_norm": 0.5813429342860311,
"learning_rate": 1.3529631548668298e-06,
"loss": 0.1092,
"step": 596
},
{
"epoch": 1.5229591836734695,
"grad_norm": 0.5785015266459514,
"learning_rate": 1.3392864807292533e-06,
"loss": 0.1192,
"step": 597
},
{
"epoch": 1.5255102040816326,
"grad_norm": 0.6096834391748477,
"learning_rate": 1.3256685870248227e-06,
"loss": 0.1178,
"step": 598
},
{
"epoch": 1.5280612244897958,
"grad_norm": 0.6385790013683148,
"learning_rate": 1.3121096924173822e-06,
"loss": 0.135,
"step": 599
},
{
"epoch": 1.5306122448979593,
"grad_norm": 0.5985384578843902,
"learning_rate": 1.298610014623423e-06,
"loss": 0.1174,
"step": 600
},
{
"epoch": 1.5306122448979593,
"eval_loss": 0.1697322279214859,
"eval_runtime": 2.1699,
"eval_samples_per_second": 14.747,
"eval_steps_per_second": 3.687,
"step": 600
},
{
"epoch": 1.5331632653061225,
"grad_norm": 0.5750684962923132,
"learning_rate": 1.2851697704085835e-06,
"loss": 0.1163,
"step": 601
},
{
"epoch": 1.5357142857142856,
"grad_norm": 0.5726286154930482,
"learning_rate": 1.2717891755841722e-06,
"loss": 0.1014,
"step": 602
},
{
"epoch": 1.538265306122449,
"grad_norm": 0.5978740659319679,
"learning_rate": 1.2584684450037033e-06,
"loss": 0.1243,
"step": 603
},
{
"epoch": 1.5408163265306123,
"grad_norm": 0.5756266287690696,
"learning_rate": 1.2452077925594435e-06,
"loss": 0.122,
"step": 604
},
{
"epoch": 1.5433673469387754,
"grad_norm": 0.6125015524156492,
"learning_rate": 1.2320074311789804e-06,
"loss": 0.1175,
"step": 605
},
{
"epoch": 1.5459183673469388,
"grad_norm": 0.6519146321418777,
"learning_rate": 1.2188675728217986e-06,
"loss": 0.1239,
"step": 606
},
{
"epoch": 1.5484693877551021,
"grad_norm": 0.5822060099198982,
"learning_rate": 1.2057884284758831e-06,
"loss": 0.1104,
"step": 607
},
{
"epoch": 1.5510204081632653,
"grad_norm": 0.5734092094057847,
"learning_rate": 1.1927702081543279e-06,
"loss": 0.1177,
"step": 608
},
{
"epoch": 1.5535714285714286,
"grad_norm": 0.6496922741781745,
"learning_rate": 1.1798131208919628e-06,
"loss": 0.1256,
"step": 609
},
{
"epoch": 1.556122448979592,
"grad_norm": 0.5759657676358546,
"learning_rate": 1.166917374742e-06,
"loss": 0.1065,
"step": 610
},
{
"epoch": 1.558673469387755,
"grad_norm": 0.5897621812551755,
"learning_rate": 1.1540831767726868e-06,
"loss": 0.1107,
"step": 611
},
{
"epoch": 1.5612244897959182,
"grad_norm": 0.6155690329879021,
"learning_rate": 1.141310733063991e-06,
"loss": 0.1297,
"step": 612
},
{
"epoch": 1.5637755102040818,
"grad_norm": 0.5972777096073331,
"learning_rate": 1.1286002487042858e-06,
"loss": 0.1104,
"step": 613
},
{
"epoch": 1.566326530612245,
"grad_norm": 0.5545756200377255,
"learning_rate": 1.1159519277870507e-06,
"loss": 0.1063,
"step": 614
},
{
"epoch": 1.568877551020408,
"grad_norm": 0.6473303351126259,
"learning_rate": 1.1033659734076069e-06,
"loss": 0.1357,
"step": 615
},
{
"epoch": 1.5714285714285714,
"grad_norm": 0.6289627879632347,
"learning_rate": 1.0908425876598512e-06,
"loss": 0.1223,
"step": 616
},
{
"epoch": 1.5739795918367347,
"grad_norm": 0.6548826721422767,
"learning_rate": 1.078381971633004e-06,
"loss": 0.1141,
"step": 617
},
{
"epoch": 1.5765306122448979,
"grad_norm": 0.6073010852184507,
"learning_rate": 1.0659843254083919e-06,
"loss": 0.1141,
"step": 618
},
{
"epoch": 1.5790816326530612,
"grad_norm": 0.624859565552834,
"learning_rate": 1.0536498480562224e-06,
"loss": 0.1037,
"step": 619
},
{
"epoch": 1.5816326530612246,
"grad_norm": 0.6145027976623533,
"learning_rate": 1.041378737632402e-06,
"loss": 0.1141,
"step": 620
},
{
"epoch": 1.5841836734693877,
"grad_norm": 0.6121850726408201,
"learning_rate": 1.0291711911753426e-06,
"loss": 0.1176,
"step": 621
},
{
"epoch": 1.586734693877551,
"grad_norm": 0.6281282406543977,
"learning_rate": 1.0170274047028068e-06,
"loss": 0.1341,
"step": 622
},
{
"epoch": 1.5892857142857144,
"grad_norm": 0.6207735780436981,
"learning_rate": 1.004947573208756e-06,
"loss": 0.1267,
"step": 623
},
{
"epoch": 1.5918367346938775,
"grad_norm": 0.593259088117168,
"learning_rate": 9.929318906602176e-07,
"loss": 0.1121,
"step": 624
},
{
"epoch": 1.5943877551020407,
"grad_norm": 0.6268891283106383,
"learning_rate": 9.809805499941766e-07,
"loss": 0.1163,
"step": 625
},
{
"epoch": 1.5969387755102042,
"grad_norm": 0.5988029304619094,
"learning_rate": 9.690937431144725e-07,
"loss": 0.1108,
"step": 626
},
{
"epoch": 1.5994897959183674,
"grad_norm": 0.5841571355523898,
"learning_rate": 9.572716608887206e-07,
"loss": 0.12,
"step": 627
},
{
"epoch": 1.6020408163265305,
"grad_norm": 0.6290182145775021,
"learning_rate": 9.455144931452459e-07,
"loss": 0.1154,
"step": 628
},
{
"epoch": 1.6045918367346939,
"grad_norm": 0.6042795874115894,
"learning_rate": 9.338224286700331e-07,
"loss": 0.1031,
"step": 629
},
{
"epoch": 1.6071428571428572,
"grad_norm": 0.6567260772251352,
"learning_rate": 9.221956552036992e-07,
"loss": 0.1366,
"step": 630
},
{
"epoch": 1.6096938775510203,
"grad_norm": 0.6078750345148625,
"learning_rate": 9.106343594384781e-07,
"loss": 0.1097,
"step": 631
},
{
"epoch": 1.6122448979591837,
"grad_norm": 0.6804350031515204,
"learning_rate": 8.991387270152202e-07,
"loss": 0.1274,
"step": 632
},
{
"epoch": 1.614795918367347,
"grad_norm": 0.5317896654689815,
"learning_rate": 8.877089425204139e-07,
"loss": 0.0822,
"step": 633
},
{
"epoch": 1.6173469387755102,
"grad_norm": 0.6041659502330885,
"learning_rate": 8.76345189483222e-07,
"loss": 0.1231,
"step": 634
},
{
"epoch": 1.6198979591836735,
"grad_norm": 0.5840192411770336,
"learning_rate": 8.650476503725302e-07,
"loss": 0.1046,
"step": 635
},
{
"epoch": 1.6224489795918369,
"grad_norm": 0.6028973067898996,
"learning_rate": 8.538165065940263e-07,
"loss": 0.1121,
"step": 636
},
{
"epoch": 1.625,
"grad_norm": 0.5931218547558655,
"learning_rate": 8.426519384872733e-07,
"loss": 0.1192,
"step": 637
},
{
"epoch": 1.6275510204081631,
"grad_norm": 0.5938652446965892,
"learning_rate": 8.315541253228332e-07,
"loss": 0.1237,
"step": 638
},
{
"epoch": 1.6301020408163265,
"grad_norm": 0.5798559230674245,
"learning_rate": 8.205232452993705e-07,
"loss": 0.1023,
"step": 639
},
{
"epoch": 1.6326530612244898,
"grad_norm": 0.5604806286202133,
"learning_rate": 8.095594755407971e-07,
"loss": 0.1075,
"step": 640
},
{
"epoch": 1.635204081632653,
"grad_norm": 0.5927466172637318,
"learning_rate": 7.986629920934325e-07,
"loss": 0.1147,
"step": 641
},
{
"epoch": 1.6377551020408163,
"grad_norm": 0.7286784566346942,
"learning_rate": 7.878339699231702e-07,
"loss": 0.1396,
"step": 642
},
{
"epoch": 1.6403061224489797,
"grad_norm": 0.6117769245456077,
"learning_rate": 7.770725829126729e-07,
"loss": 0.1145,
"step": 643
},
{
"epoch": 1.6428571428571428,
"grad_norm": 0.6722856470291241,
"learning_rate": 7.663790038585794e-07,
"loss": 0.1379,
"step": 644
},
{
"epoch": 1.6454081632653061,
"grad_norm": 0.6754780154914234,
"learning_rate": 7.557534044687293e-07,
"loss": 0.1367,
"step": 645
},
{
"epoch": 1.6479591836734695,
"grad_norm": 0.6734627155633237,
"learning_rate": 7.451959553594051e-07,
"loss": 0.1353,
"step": 646
},
{
"epoch": 1.6505102040816326,
"grad_norm": 0.6588713762790792,
"learning_rate": 7.347068260525963e-07,
"loss": 0.1386,
"step": 647
},
{
"epoch": 1.6530612244897958,
"grad_norm": 0.57992012984708,
"learning_rate": 7.242861849732696e-07,
"loss": 0.1171,
"step": 648
},
{
"epoch": 1.6556122448979593,
"grad_norm": 0.6147459621798507,
"learning_rate": 7.139341994466742e-07,
"loss": 0.1258,
"step": 649
},
{
"epoch": 1.6581632653061225,
"grad_norm": 0.6233471241208175,
"learning_rate": 7.036510356956494e-07,
"loss": 0.1108,
"step": 650
},
{
"epoch": 1.6607142857142856,
"grad_norm": 0.6273261120821847,
"learning_rate": 6.934368588379553e-07,
"loss": 0.1105,
"step": 651
},
{
"epoch": 1.663265306122449,
"grad_norm": 0.5674331409773821,
"learning_rate": 6.832918328836247e-07,
"loss": 0.1008,
"step": 652
},
{
"epoch": 1.6658163265306123,
"grad_norm": 0.6514909353266433,
"learning_rate": 6.73216120732324e-07,
"loss": 0.1369,
"step": 653
},
{
"epoch": 1.6683673469387754,
"grad_norm": 0.5875385008989574,
"learning_rate": 6.632098841707458e-07,
"loss": 0.1062,
"step": 654
},
{
"epoch": 1.6709183673469388,
"grad_norm": 0.6013339921903986,
"learning_rate": 6.532732838700023e-07,
"loss": 0.1053,
"step": 655
},
{
"epoch": 1.6734693877551021,
"grad_norm": 0.6317221666265858,
"learning_rate": 6.43406479383053e-07,
"loss": 0.121,
"step": 656
},
{
"epoch": 1.6760204081632653,
"grad_norm": 0.6159119707750507,
"learning_rate": 6.336096291421379e-07,
"loss": 0.117,
"step": 657
},
{
"epoch": 1.6785714285714286,
"grad_norm": 0.6540774604197073,
"learning_rate": 6.238828904562316e-07,
"loss": 0.1372,
"step": 658
},
{
"epoch": 1.681122448979592,
"grad_norm": 0.5855990129942852,
"learning_rate": 6.142264195085273e-07,
"loss": 0.1124,
"step": 659
},
{
"epoch": 1.683673469387755,
"grad_norm": 0.5905644474117571,
"learning_rate": 6.04640371353914e-07,
"loss": 0.1103,
"step": 660
},
{
"epoch": 1.6862244897959182,
"grad_norm": 0.5828354953054207,
"learning_rate": 5.951248999165032e-07,
"loss": 0.1152,
"step": 661
},
{
"epoch": 1.6887755102040818,
"grad_norm": 0.6090515161337641,
"learning_rate": 5.856801579871457e-07,
"loss": 0.1186,
"step": 662
},
{
"epoch": 1.691326530612245,
"grad_norm": 0.6893535410804377,
"learning_rate": 5.763062972209793e-07,
"loss": 0.1313,
"step": 663
},
{
"epoch": 1.693877551020408,
"grad_norm": 0.6124288616769757,
"learning_rate": 5.670034681349995e-07,
"loss": 0.1233,
"step": 664
},
{
"epoch": 1.6964285714285714,
"grad_norm": 0.6003579181797115,
"learning_rate": 5.577718201056392e-07,
"loss": 0.1138,
"step": 665
},
{
"epoch": 1.6989795918367347,
"grad_norm": 0.6545089312294674,
"learning_rate": 5.486115013663668e-07,
"loss": 0.1265,
"step": 666
},
{
"epoch": 1.7015306122448979,
"grad_norm": 0.6202843082223166,
"learning_rate": 5.395226590053126e-07,
"loss": 0.1187,
"step": 667
},
{
"epoch": 1.7040816326530612,
"grad_norm": 0.6030705142860276,
"learning_rate": 5.305054389629022e-07,
"loss": 0.1031,
"step": 668
},
{
"epoch": 1.7066326530612246,
"grad_norm": 0.5826774737854693,
"learning_rate": 5.215599860295162e-07,
"loss": 0.1133,
"step": 669
},
{
"epoch": 1.7091836734693877,
"grad_norm": 0.6377804413011416,
"learning_rate": 5.126864438431628e-07,
"loss": 0.122,
"step": 670
},
{
"epoch": 1.711734693877551,
"grad_norm": 0.7283982000793318,
"learning_rate": 5.038849548871705e-07,
"loss": 0.1439,
"step": 671
},
{
"epoch": 1.7142857142857144,
"grad_norm": 0.7141564642792316,
"learning_rate": 4.951556604879049e-07,
"loss": 0.1521,
"step": 672
},
{
"epoch": 1.7168367346938775,
"grad_norm": 0.593372869293056,
"learning_rate": 4.864987008124949e-07,
"loss": 0.1152,
"step": 673
},
{
"epoch": 1.7193877551020407,
"grad_norm": 0.5778699052985155,
"learning_rate": 4.779142148665855e-07,
"loss": 0.1167,
"step": 674
},
{
"epoch": 1.7219387755102042,
"grad_norm": 0.6047519754038335,
"learning_rate": 4.6940234049210277e-07,
"loss": 0.1144,
"step": 675
},
{
"epoch": 1.7244897959183674,
"grad_norm": 0.6089775983442018,
"learning_rate": 4.6096321436504e-07,
"loss": 0.1222,
"step": 676
},
{
"epoch": 1.7270408163265305,
"grad_norm": 0.6215389881009823,
"learning_rate": 4.5259697199326714e-07,
"loss": 0.1243,
"step": 677
},
{
"epoch": 1.7295918367346939,
"grad_norm": 0.5968710527528669,
"learning_rate": 4.4430374771435245e-07,
"loss": 0.1182,
"step": 678
},
{
"epoch": 1.7321428571428572,
"grad_norm": 0.5602996301369507,
"learning_rate": 4.3608367469340553e-07,
"loss": 0.1137,
"step": 679
},
{
"epoch": 1.7346938775510203,
"grad_norm": 0.6177976764551923,
"learning_rate": 4.279368849209381e-07,
"loss": 0.1171,
"step": 680
},
{
"epoch": 1.7372448979591837,
"grad_norm": 0.6362139473836395,
"learning_rate": 4.198635092107456e-07,
"loss": 0.1357,
"step": 681
},
{
"epoch": 1.739795918367347,
"grad_norm": 0.616733264645363,
"learning_rate": 4.1186367719780737e-07,
"loss": 0.1216,
"step": 682
},
{
"epoch": 1.7423469387755102,
"grad_norm": 0.6113272277992989,
"learning_rate": 4.039375173362053e-07,
"loss": 0.1247,
"step": 683
},
{
"epoch": 1.7448979591836735,
"grad_norm": 0.5859466621068377,
"learning_rate": 3.960851568970586e-07,
"loss": 0.1156,
"step": 684
},
{
"epoch": 1.7474489795918369,
"grad_norm": 0.5768103705810081,
"learning_rate": 3.883067219664838e-07,
"loss": 0.1146,
"step": 685
},
{
"epoch": 1.75,
"grad_norm": 0.5900740212059127,
"learning_rate": 3.8060233744356634e-07,
"loss": 0.1183,
"step": 686
},
{
"epoch": 1.7525510204081631,
"grad_norm": 0.6071582238903932,
"learning_rate": 3.729721270383585e-07,
"loss": 0.1068,
"step": 687
},
{
"epoch": 1.7551020408163265,
"grad_norm": 0.579043109644617,
"learning_rate": 3.6541621326989183e-07,
"loss": 0.1133,
"step": 688
},
{
"epoch": 1.7576530612244898,
"grad_norm": 0.5317309897500903,
"learning_rate": 3.579347174642073e-07,
"loss": 0.095,
"step": 689
},
{
"epoch": 1.760204081632653,
"grad_norm": 0.5792381004031227,
"learning_rate": 3.5052775975241203e-07,
"loss": 0.119,
"step": 690
},
{
"epoch": 1.7627551020408163,
"grad_norm": 0.5806092547850344,
"learning_rate": 3.431954590687464e-07,
"loss": 0.1027,
"step": 691
},
{
"epoch": 1.7653061224489797,
"grad_norm": 0.5720600883457903,
"learning_rate": 3.359379331486762e-07,
"loss": 0.1076,
"step": 692
},
{
"epoch": 1.7678571428571428,
"grad_norm": 0.6301152495971145,
"learning_rate": 3.287552985270015e-07,
"loss": 0.1226,
"step": 693
},
{
"epoch": 1.7704081632653061,
"grad_norm": 0.6366540857894387,
"learning_rate": 3.216476705359839e-07,
"loss": 0.1302,
"step": 694
},
{
"epoch": 1.7729591836734695,
"grad_norm": 0.5689854208147107,
"learning_rate": 3.146151633034983e-07,
"loss": 0.0909,
"step": 695
},
{
"epoch": 1.7755102040816326,
"grad_norm": 0.624163406400808,
"learning_rate": 3.076578897511978e-07,
"loss": 0.1166,
"step": 696
},
{
"epoch": 1.7780612244897958,
"grad_norm": 0.605898818231271,
"learning_rate": 3.0077596159270086e-07,
"loss": 0.1137,
"step": 697
},
{
"epoch": 1.7806122448979593,
"grad_norm": 0.5928268623308518,
"learning_rate": 2.939694893317979e-07,
"loss": 0.112,
"step": 698
},
{
"epoch": 1.7831632653061225,
"grad_norm": 0.565357840643717,
"learning_rate": 2.8723858226067493e-07,
"loss": 0.0942,
"step": 699
},
{
"epoch": 1.7857142857142856,
"grad_norm": 0.6756531287422569,
"learning_rate": 2.8058334845816214e-07,
"loss": 0.1391,
"step": 700
},
{
"epoch": 1.7857142857142856,
"eval_loss": 0.16841819882392883,
"eval_runtime": 2.4116,
"eval_samples_per_second": 13.269,
"eval_steps_per_second": 3.317,
"step": 700
},
{
"epoch": 1.788265306122449,
"grad_norm": 0.5735443897062006,
"learning_rate": 2.7400389478799696e-07,
"loss": 0.1065,
"step": 701
},
{
"epoch": 1.7908163265306123,
"grad_norm": 0.5810012198351104,
"learning_rate": 2.6750032689710604e-07,
"loss": 0.1082,
"step": 702
},
{
"epoch": 1.7933673469387754,
"grad_norm": 0.5958481412831177,
"learning_rate": 2.610727492139131e-07,
"loss": 0.1119,
"step": 703
},
{
"epoch": 1.7959183673469388,
"grad_norm": 0.5840809015166184,
"learning_rate": 2.547212649466568e-07,
"loss": 0.1036,
"step": 704
},
{
"epoch": 1.7984693877551021,
"grad_norm": 0.5888202219012438,
"learning_rate": 2.4844597608173925e-07,
"loss": 0.114,
"step": 705
},
{
"epoch": 1.8010204081632653,
"grad_norm": 0.6092819061535076,
"learning_rate": 2.4224698338208344e-07,
"loss": 0.1189,
"step": 706
},
{
"epoch": 1.8035714285714286,
"grad_norm": 0.6314320254586682,
"learning_rate": 2.3612438638551837e-07,
"loss": 0.1191,
"step": 707
},
{
"epoch": 1.806122448979592,
"grad_norm": 0.5897736239110092,
"learning_rate": 2.3007828340318117e-07,
"loss": 0.1081,
"step": 708
},
{
"epoch": 1.808673469387755,
"grad_norm": 0.6275390646683443,
"learning_rate": 2.2410877151793354e-07,
"loss": 0.1275,
"step": 709
},
{
"epoch": 1.8112244897959182,
"grad_norm": 0.5797864579749057,
"learning_rate": 2.1821594658280932e-07,
"loss": 0.1167,
"step": 710
},
{
"epoch": 1.8137755102040818,
"grad_norm": 0.5530302763446563,
"learning_rate": 2.123999032194729e-07,
"loss": 0.111,
"step": 711
},
{
"epoch": 1.816326530612245,
"grad_norm": 0.5920894147609517,
"learning_rate": 2.0666073481669714e-07,
"loss": 0.1173,
"step": 712
},
{
"epoch": 1.818877551020408,
"grad_norm": 0.5578466613023159,
"learning_rate": 2.0099853352886721e-07,
"loss": 0.1063,
"step": 713
},
{
"epoch": 1.8214285714285714,
"grad_norm": 0.6118756903721626,
"learning_rate": 1.9541339027450256e-07,
"loss": 0.1142,
"step": 714
},
{
"epoch": 1.8239795918367347,
"grad_norm": 0.5906387766424285,
"learning_rate": 1.8990539473478896e-07,
"loss": 0.1165,
"step": 715
},
{
"epoch": 1.8265306122448979,
"grad_norm": 0.573214883177142,
"learning_rate": 1.8447463535214872e-07,
"loss": 0.1143,
"step": 716
},
{
"epoch": 1.8290816326530612,
"grad_norm": 0.5543354201963542,
"learning_rate": 1.7912119932881167e-07,
"loss": 0.1022,
"step": 717
},
{
"epoch": 1.8316326530612246,
"grad_norm": 0.6540919785157406,
"learning_rate": 1.7384517262542255e-07,
"loss": 0.1284,
"step": 718
},
{
"epoch": 1.8341836734693877,
"grad_norm": 0.5724171162277051,
"learning_rate": 1.686466399596548e-07,
"loss": 0.1097,
"step": 719
},
{
"epoch": 1.836734693877551,
"grad_norm": 0.5988377208759277,
"learning_rate": 1.6352568480485277e-07,
"loss": 0.1109,
"step": 720
},
{
"epoch": 1.8392857142857144,
"grad_norm": 0.5946198760051452,
"learning_rate": 1.5848238938869332e-07,
"loss": 0.1218,
"step": 721
},
{
"epoch": 1.8418367346938775,
"grad_norm": 0.6610304566003866,
"learning_rate": 1.5351683469185973e-07,
"loss": 0.1247,
"step": 722
},
{
"epoch": 1.8443877551020407,
"grad_norm": 0.6351539530682201,
"learning_rate": 1.4862910044674772e-07,
"loss": 0.1292,
"step": 723
},
{
"epoch": 1.8469387755102042,
"grad_norm": 0.6207785896757562,
"learning_rate": 1.4381926513618139e-07,
"loss": 0.1214,
"step": 724
},
{
"epoch": 1.8494897959183674,
"grad_norm": 0.5778796347145898,
"learning_rate": 1.390874059921543e-07,
"loss": 0.1137,
"step": 725
},
{
"epoch": 1.8520408163265305,
"grad_norm": 0.6289894556783412,
"learning_rate": 1.3443359899458997e-07,
"loss": 0.1306,
"step": 726
},
{
"epoch": 1.8545918367346939,
"grad_norm": 0.5920906901307591,
"learning_rate": 1.298579188701188e-07,
"loss": 0.1162,
"step": 727
},
{
"epoch": 1.8571428571428572,
"grad_norm": 0.6488560446877133,
"learning_rate": 1.253604390908819e-07,
"loss": 0.1326,
"step": 728
},
{
"epoch": 1.8596938775510203,
"grad_norm": 0.5940363576120224,
"learning_rate": 1.2094123187335104e-07,
"loss": 0.1111,
"step": 729
},
{
"epoch": 1.8622448979591837,
"grad_norm": 0.6319328716661287,
"learning_rate": 1.1660036817716492e-07,
"loss": 0.1324,
"step": 730
},
{
"epoch": 1.864795918367347,
"grad_norm": 0.6077519215437209,
"learning_rate": 1.1233791770399516e-07,
"loss": 0.1155,
"step": 731
},
{
"epoch": 1.8673469387755102,
"grad_norm": 0.6250261912069963,
"learning_rate": 1.0815394889642339e-07,
"loss": 0.1286,
"step": 732
},
{
"epoch": 1.8698979591836735,
"grad_norm": 0.5491011558690894,
"learning_rate": 1.0404852893684425e-07,
"loss": 0.099,
"step": 733
},
{
"epoch": 1.8724489795918369,
"grad_norm": 0.5744764104571831,
"learning_rate": 1.0002172374638519e-07,
"loss": 0.1079,
"step": 734
},
{
"epoch": 1.875,
"grad_norm": 0.6679548709900088,
"learning_rate": 9.607359798384785e-08,
"loss": 0.1237,
"step": 735
},
{
"epoch": 1.8775510204081631,
"grad_norm": 0.6108632547487393,
"learning_rate": 9.22042150446728e-08,
"loss": 0.1228,
"step": 736
},
{
"epoch": 1.8801020408163265,
"grad_norm": 0.5435663894998473,
"learning_rate": 8.841363705991924e-08,
"loss": 0.0981,
"step": 737
},
{
"epoch": 1.8826530612244898,
"grad_norm": 0.6268626044693186,
"learning_rate": 8.470192489526519e-08,
"loss": 0.1132,
"step": 738
},
{
"epoch": 1.885204081632653,
"grad_norm": 0.6193334720497722,
"learning_rate": 8.106913815003503e-08,
"loss": 0.0943,
"step": 739
},
{
"epoch": 1.8877551020408163,
"grad_norm": 0.6074530776682006,
"learning_rate": 7.7515335156238e-08,
"loss": 0.1194,
"step": 740
},
{
"epoch": 1.8903061224489797,
"grad_norm": 0.5440145745307361,
"learning_rate": 7.4040572977635e-08,
"loss": 0.0938,
"step": 741
},
{
"epoch": 1.8928571428571428,
"grad_norm": 0.6266963631247235,
"learning_rate": 7.064490740882057e-08,
"loss": 0.1254,
"step": 742
},
{
"epoch": 1.8954081632653061,
"grad_norm": 0.585333945184069,
"learning_rate": 6.732839297432736e-08,
"loss": 0.1125,
"step": 743
},
{
"epoch": 1.8979591836734695,
"grad_norm": 0.6102667577399207,
"learning_rate": 6.409108292774912e-08,
"loss": 0.1186,
"step": 744
},
{
"epoch": 1.9005102040816326,
"grad_norm": 0.5757285795331835,
"learning_rate": 6.093302925088974e-08,
"loss": 0.1113,
"step": 745
},
{
"epoch": 1.9030612244897958,
"grad_norm": 0.6411162442844834,
"learning_rate": 5.785428265292381e-08,
"loss": 0.1246,
"step": 746
},
{
"epoch": 1.9056122448979593,
"grad_norm": 0.6282690667914622,
"learning_rate": 5.485489256958465e-08,
"loss": 0.1232,
"step": 747
},
{
"epoch": 1.9081632653061225,
"grad_norm": 0.6100309844718854,
"learning_rate": 5.1934907162370374e-08,
"loss": 0.1287,
"step": 748
},
{
"epoch": 1.9107142857142856,
"grad_norm": 0.6013903017998977,
"learning_rate": 4.909437331777178e-08,
"loss": 0.1108,
"step": 749
},
{
"epoch": 1.913265306122449,
"grad_norm": 0.5687648257186603,
"learning_rate": 4.63333366465174e-08,
"loss": 0.1066,
"step": 750
},
{
"epoch": 1.9158163265306123,
"grad_norm": 0.5666552452982463,
"learning_rate": 4.3651841482840185e-08,
"loss": 0.0968,
"step": 751
},
{
"epoch": 1.9183673469387754,
"grad_norm": 0.6465617962350463,
"learning_rate": 4.104993088376974e-08,
"loss": 0.1218,
"step": 752
},
{
"epoch": 1.9209183673469388,
"grad_norm": 0.5822884224730042,
"learning_rate": 3.8527646628437334e-08,
"loss": 0.1132,
"step": 753
},
{
"epoch": 1.9234693877551021,
"grad_norm": 0.5758652097322462,
"learning_rate": 3.608502921740753e-08,
"loss": 0.1068,
"step": 754
},
{
"epoch": 1.9260204081632653,
"grad_norm": 0.7245821946400494,
"learning_rate": 3.372211787202484e-08,
"loss": 0.1202,
"step": 755
},
{
"epoch": 1.9285714285714286,
"grad_norm": 0.6115149197447761,
"learning_rate": 3.143895053378698e-08,
"loss": 0.1221,
"step": 756
},
{
"epoch": 1.931122448979592,
"grad_norm": 0.6143567966734291,
"learning_rate": 2.9235563863734274e-08,
"loss": 0.1215,
"step": 757
},
{
"epoch": 1.933673469387755,
"grad_norm": 0.6825501189925092,
"learning_rate": 2.7111993241860646e-08,
"loss": 0.1281,
"step": 758
},
{
"epoch": 1.9362244897959182,
"grad_norm": 0.57001567551457,
"learning_rate": 2.5068272766545776e-08,
"loss": 0.1072,
"step": 759
},
{
"epoch": 1.9387755102040818,
"grad_norm": 0.6273070677346755,
"learning_rate": 2.3104435254008852e-08,
"loss": 0.1217,
"step": 760
},
{
"epoch": 1.941326530612245,
"grad_norm": 0.6108930925346002,
"learning_rate": 2.1220512237779544e-08,
"loss": 0.1267,
"step": 761
},
{
"epoch": 1.943877551020408,
"grad_norm": 0.6167546366354988,
"learning_rate": 1.9416533968193428e-08,
"loss": 0.124,
"step": 762
},
{
"epoch": 1.9464285714285714,
"grad_norm": 0.5886499101171274,
"learning_rate": 1.769252941190458e-08,
"loss": 0.11,
"step": 763
},
{
"epoch": 1.9489795918367347,
"grad_norm": 0.6360123535606697,
"learning_rate": 1.6048526251421502e-08,
"loss": 0.1269,
"step": 764
},
{
"epoch": 1.9515306122448979,
"grad_norm": 0.6104208596036047,
"learning_rate": 1.4484550884664162e-08,
"loss": 0.1196,
"step": 765
},
{
"epoch": 1.9540816326530612,
"grad_norm": 0.6460129093291147,
"learning_rate": 1.3000628424535978e-08,
"loss": 0.1265,
"step": 766
},
{
"epoch": 1.9566326530612246,
"grad_norm": 0.5905020194158532,
"learning_rate": 1.1596782698524712e-08,
"loss": 0.1142,
"step": 767
},
{
"epoch": 1.9591836734693877,
"grad_norm": 0.5472224778815454,
"learning_rate": 1.0273036248318325e-08,
"loss": 0.1081,
"step": 768
},
{
"epoch": 1.961734693877551,
"grad_norm": 0.5782839724577236,
"learning_rate": 9.029410329441379e-09,
"loss": 0.1058,
"step": 769
},
{
"epoch": 1.9642857142857144,
"grad_norm": 0.5777960470695286,
"learning_rate": 7.865924910916977e-09,
"loss": 0.1094,
"step": 770
},
{
"epoch": 1.9668367346938775,
"grad_norm": 0.6786006991385576,
"learning_rate": 6.782598674943686e-09,
"loss": 0.1159,
"step": 771
},
{
"epoch": 1.9693877551020407,
"grad_norm": 0.6711169694004527,
"learning_rate": 5.779449016595773e-09,
"loss": 0.1335,
"step": 772
},
{
"epoch": 1.9719387755102042,
"grad_norm": 0.6530310832242592,
"learning_rate": 4.856492043545102e-09,
"loss": 0.1443,
"step": 773
},
{
"epoch": 1.9744897959183674,
"grad_norm": 0.6253745584184208,
"learning_rate": 4.0137425758018935e-09,
"loss": 0.136,
"step": 774
},
{
"epoch": 1.9770408163265305,
"grad_norm": 0.5727994729661025,
"learning_rate": 3.2512141454760227e-09,
"loss": 0.1042,
"step": 775
},
{
"epoch": 1.9795918367346939,
"grad_norm": 0.6480065878060496,
"learning_rate": 2.568918996560532e-09,
"loss": 0.1247,
"step": 776
},
{
"epoch": 1.9821428571428572,
"grad_norm": 0.5819417407468576,
"learning_rate": 1.9668680847356735e-09,
"loss": 0.1029,
"step": 777
},
{
"epoch": 1.9846938775510203,
"grad_norm": 0.6137075701886557,
"learning_rate": 1.4450710771907184e-09,
"loss": 0.1133,
"step": 778
},
{
"epoch": 1.9872448979591837,
"grad_norm": 0.6333191914868241,
"learning_rate": 1.0035363524713015e-09,
"loss": 0.1235,
"step": 779
},
{
"epoch": 1.989795918367347,
"grad_norm": 0.5929959113660918,
"learning_rate": 6.422710003439747e-10,
"loss": 0.1075,
"step": 780
},
{
"epoch": 1.9923469387755102,
"grad_norm": 0.5584038012931548,
"learning_rate": 3.6128082168074286e-10,
"loss": 0.1037,
"step": 781
},
{
"epoch": 1.9948979591836735,
"grad_norm": 0.5336444844217255,
"learning_rate": 1.6057032836913623e-10,
"loss": 0.0918,
"step": 782
},
{
"epoch": 1.9974489795918369,
"grad_norm": 0.557285241591839,
"learning_rate": 4.0142743236160075e-11,
"loss": 0.1073,
"step": 783
},
{
"epoch": 2.0,
"grad_norm": 0.49667553817630916,
"learning_rate": 0.0,
"loss": 0.0785,
"step": 784
},
{
"epoch": 2.0,
"step": 784,
"total_flos": 44668733620224.0,
"train_loss": 0.14301018513815136,
"train_runtime": 1428.0634,
"train_samples_per_second": 4.382,
"train_steps_per_second": 0.549
}
],
"logging_steps": 1,
"max_steps": 784,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 800000000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 44668733620224.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}