|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.7094017094017095, |
|
"eval_steps": 500, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 503.30001945495604, |
|
"epoch": 0.042735042735042736, |
|
"grad_norm": 0.14690014719963074, |
|
"kl": 0.0005318611507391324, |
|
"learning_rate": 9.871794871794872e-05, |
|
"loss": -0.0402, |
|
"num_tokens": 33288.0, |
|
"reward": -1.1772359251976012, |
|
"reward_std": 2.312183880805969, |
|
"rewards/reward_function": -1.1772359311580658, |
|
"step": 10 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 509.85001373291016, |
|
"epoch": 0.08547008547008547, |
|
"grad_norm": 0.16410136222839355, |
|
"kl": 0.001959062390960753, |
|
"learning_rate": 9.72934472934473e-05, |
|
"loss": -0.008, |
|
"num_tokens": 67317.0, |
|
"reward": -1.455158567428589, |
|
"reward_std": 2.39455783367157, |
|
"rewards/reward_function": -1.4551584720611572, |
|
"step": 20 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 382.58334197998045, |
|
"epoch": 0.1282051282051282, |
|
"grad_norm": 0.5200854539871216, |
|
"kl": 0.003280931804329157, |
|
"learning_rate": 9.586894586894587e-05, |
|
"loss": 0.0011, |
|
"num_tokens": 93788.0, |
|
"reward": 2.4295308887958527, |
|
"reward_std": 1.948470675945282, |
|
"rewards/reward_function": 2.4295308709144594, |
|
"step": 30 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 466.25001068115233, |
|
"epoch": 0.17094017094017094, |
|
"grad_norm": 0.2215566486120224, |
|
"kl": 0.005862738820724189, |
|
"learning_rate": 9.444444444444444e-05, |
|
"loss": -0.0111, |
|
"num_tokens": 125045.0, |
|
"reward": 1.2611542105674745, |
|
"reward_std": 2.462652099132538, |
|
"rewards/reward_function": 1.2611541628837586, |
|
"step": 40 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 518.6166809082031, |
|
"epoch": 0.21367521367521367, |
|
"grad_norm": 0.15176545083522797, |
|
"kl": 0.008088351227343082, |
|
"learning_rate": 9.301994301994303e-05, |
|
"loss": -0.0085, |
|
"num_tokens": 159318.0, |
|
"reward": -1.0398478865623475, |
|
"reward_std": 1.9632926762104035, |
|
"rewards/reward_function": -1.039847767353058, |
|
"step": 50 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 436.50001125335694, |
|
"epoch": 0.2564102564102564, |
|
"grad_norm": 0.23773637413978577, |
|
"kl": 0.007962367637082935, |
|
"learning_rate": 9.15954415954416e-05, |
|
"loss": 0.0155, |
|
"num_tokens": 188622.0, |
|
"reward": -0.9993367567658424, |
|
"reward_std": 2.5938119292259216, |
|
"rewards/reward_function": -0.9993367329239845, |
|
"step": 60 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 465.0166717529297, |
|
"epoch": 0.29914529914529914, |
|
"grad_norm": 0.22686824202537537, |
|
"kl": 0.01036710049957037, |
|
"learning_rate": 9.017094017094018e-05, |
|
"loss": 0.0232, |
|
"num_tokens": 219649.0, |
|
"reward": -1.0854085847735404, |
|
"reward_std": 2.3031186699867248, |
|
"rewards/reward_function": -1.0854085013270378, |
|
"step": 70 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 594.3666900634765, |
|
"epoch": 0.3418803418803419, |
|
"grad_norm": 0.2330074906349182, |
|
"kl": 0.011198346642777324, |
|
"learning_rate": 8.874643874643875e-05, |
|
"loss": 0.0085, |
|
"num_tokens": 258521.0, |
|
"reward": 0.44582319259643555, |
|
"reward_std": 2.180183058977127, |
|
"rewards/reward_function": 0.44582313299179077, |
|
"step": 80 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 363.2166831970215, |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 1.2950036525726318, |
|
"kl": 0.01624974999576807, |
|
"learning_rate": 8.732193732193732e-05, |
|
"loss": -0.0279, |
|
"num_tokens": 283896.0, |
|
"reward": -0.22942656874656678, |
|
"reward_std": 2.698866534233093, |
|
"rewards/reward_function": -0.2294266164302826, |
|
"step": 90 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 366.3166763305664, |
|
"epoch": 0.42735042735042733, |
|
"grad_norm": 0.44916248321533203, |
|
"kl": 0.0371546683833003, |
|
"learning_rate": 8.58974358974359e-05, |
|
"loss": -0.034, |
|
"num_tokens": 309127.0, |
|
"reward": 1.030603051185608, |
|
"reward_std": 2.1768004059791566, |
|
"rewards/reward_function": 1.03060302734375, |
|
"step": 100 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 627.7833419799805, |
|
"epoch": 0.4700854700854701, |
|
"grad_norm": 0.24189692735671997, |
|
"kl": 0.02031996361911297, |
|
"learning_rate": 8.447293447293447e-05, |
|
"loss": 0.0057, |
|
"num_tokens": 350124.0, |
|
"reward": 1.7054275810718535, |
|
"reward_std": 2.761507248878479, |
|
"rewards/reward_function": 1.7054275453090668, |
|
"step": 110 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 466.7833419799805, |
|
"epoch": 0.5128205128205128, |
|
"grad_norm": 0.3966105282306671, |
|
"kl": 0.03020444568246603, |
|
"learning_rate": 8.304843304843305e-05, |
|
"loss": 0.0158, |
|
"num_tokens": 381323.0, |
|
"reward": 0.4135805606842041, |
|
"reward_std": 2.793270480632782, |
|
"rewards/reward_function": 0.41358056366443635, |
|
"step": 120 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 418.7166778564453, |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.8130244016647339, |
|
"kl": 0.050436797365546224, |
|
"learning_rate": 8.162393162393163e-05, |
|
"loss": 0.0109, |
|
"num_tokens": 409632.0, |
|
"reward": 3.6511381447315214, |
|
"reward_std": 2.057768177986145, |
|
"rewards/reward_function": 3.651138073205948, |
|
"step": 130 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 401.18334617614744, |
|
"epoch": 0.5982905982905983, |
|
"grad_norm": 0.237253338098526, |
|
"kl": 0.08577278926968575, |
|
"learning_rate": 8.01994301994302e-05, |
|
"loss": 0.005, |
|
"num_tokens": 436613.0, |
|
"reward": 3.9179525792598726, |
|
"reward_std": 2.5252403259277343, |
|
"rewards/reward_function": 3.9179525285959245, |
|
"step": 140 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 534.9666793823242, |
|
"epoch": 0.6410256410256411, |
|
"grad_norm": 0.23871329426765442, |
|
"kl": 0.0823534980416298, |
|
"learning_rate": 7.877492877492878e-05, |
|
"loss": 0.0435, |
|
"num_tokens": 471891.0, |
|
"reward": 4.100382626056671, |
|
"reward_std": 2.5033695101737976, |
|
"rewards/reward_function": 4.100382459163666, |
|
"step": 150 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 332.50001373291013, |
|
"epoch": 0.6837606837606838, |
|
"grad_norm": 0.8124951720237732, |
|
"kl": 0.15444251857697963, |
|
"learning_rate": 7.735042735042735e-05, |
|
"loss": 0.0189, |
|
"num_tokens": 494961.0, |
|
"reward": 5.122303450107575, |
|
"reward_std": 2.0907008171081545, |
|
"rewards/reward_function": 5.122303307056427, |
|
"step": 160 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 405.25001373291013, |
|
"epoch": 0.7264957264957265, |
|
"grad_norm": 0.7909404635429382, |
|
"kl": 0.13212636522948742, |
|
"learning_rate": 7.592592592592593e-05, |
|
"loss": -0.0367, |
|
"num_tokens": 522258.0, |
|
"reward": 5.544608736038208, |
|
"reward_std": 1.7265446126461028, |
|
"rewards/reward_function": 5.544608497619629, |
|
"step": 170 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 595.9833587646484, |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.15963077545166016, |
|
"kl": 0.09406365267932415, |
|
"learning_rate": 7.450142450142451e-05, |
|
"loss": -0.0366, |
|
"num_tokens": 561197.0, |
|
"reward": 7.151281929016113, |
|
"reward_std": 2.8219340562820436, |
|
"rewards/reward_function": 7.15128173828125, |
|
"step": 180 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 566.9666809082031, |
|
"epoch": 0.811965811965812, |
|
"grad_norm": 0.18807636201381683, |
|
"kl": 0.06498432569205762, |
|
"learning_rate": 7.307692307692307e-05, |
|
"loss": -0.0543, |
|
"num_tokens": 598575.0, |
|
"reward": 1.7874940395355225, |
|
"reward_std": 2.3280829310417177, |
|
"rewards/reward_function": 1.7874938309192658, |
|
"step": 190 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 831.9500183105469, |
|
"epoch": 0.8547008547008547, |
|
"grad_norm": 0.19820700585842133, |
|
"kl": 0.03369698449969292, |
|
"learning_rate": 7.165242165242165e-05, |
|
"loss": -0.0132, |
|
"num_tokens": 651618.0, |
|
"reward": 2.0692047476768494, |
|
"reward_std": 2.6171678781509398, |
|
"rewards/reward_function": 2.06920468211174, |
|
"step": 200 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 390.78334617614746, |
|
"epoch": 0.8974358974358975, |
|
"grad_norm": 0.1917090117931366, |
|
"kl": 0.10797237679362297, |
|
"learning_rate": 7.022792022792024e-05, |
|
"loss": -0.011, |
|
"num_tokens": 677981.0, |
|
"reward": 5.218200743198395, |
|
"reward_std": 2.8841384768486025, |
|
"rewards/reward_function": 5.218200659751892, |
|
"step": 210 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 485.500016784668, |
|
"epoch": 0.9401709401709402, |
|
"grad_norm": 0.18474219739437103, |
|
"kl": 0.11786541007459164, |
|
"learning_rate": 6.880341880341881e-05, |
|
"loss": -0.0031, |
|
"num_tokens": 710033.0, |
|
"reward": 5.224206709861756, |
|
"reward_std": 2.2454194366931914, |
|
"rewards/reward_function": 5.224206617474556, |
|
"step": 220 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 458.06668243408205, |
|
"epoch": 0.9829059829059829, |
|
"grad_norm": 0.19587470591068268, |
|
"kl": 0.09677453897893429, |
|
"learning_rate": 6.737891737891738e-05, |
|
"loss": -0.0014, |
|
"num_tokens": 741117.0, |
|
"reward": 4.320850740373134, |
|
"reward_std": 2.3484508872032164, |
|
"rewards/reward_function": 4.320850642025471, |
|
"step": 230 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 605.0666931152343, |
|
"epoch": 1.0256410256410255, |
|
"grad_norm": 0.26696279644966125, |
|
"kl": 0.10099472776055336, |
|
"learning_rate": 6.595441595441596e-05, |
|
"loss": -0.025, |
|
"num_tokens": 780847.0, |
|
"reward": 7.07135591506958, |
|
"reward_std": 2.179239821434021, |
|
"rewards/reward_function": 7.071355724334717, |
|
"step": 240 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 570.1833526611329, |
|
"epoch": 1.0683760683760684, |
|
"grad_norm": 0.41289836168289185, |
|
"kl": 0.10571693740785122, |
|
"learning_rate": 6.452991452991453e-05, |
|
"loss": 0.0201, |
|
"num_tokens": 818394.0, |
|
"reward": 4.596634943783283, |
|
"reward_std": 2.0250564932823183, |
|
"rewards/reward_function": 4.5966349326074125, |
|
"step": 250 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 638.4166839599609, |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 0.23291794955730438, |
|
"kl": 0.06395059525966644, |
|
"learning_rate": 6.310541310541312e-05, |
|
"loss": 0.0023, |
|
"num_tokens": 859765.0, |
|
"reward": 4.951379495859146, |
|
"reward_std": 2.139006716012955, |
|
"rewards/reward_function": 4.95137942135334, |
|
"step": 260 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 425.76667404174805, |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 0.2002115249633789, |
|
"kl": 0.14280213601887226, |
|
"learning_rate": 6.168091168091168e-05, |
|
"loss": 0.0113, |
|
"num_tokens": 888389.0, |
|
"reward": 6.41916925907135, |
|
"reward_std": 2.205563151836395, |
|
"rewards/reward_function": 6.419169163703918, |
|
"step": 270 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 523.6166763305664, |
|
"epoch": 1.1965811965811965, |
|
"grad_norm": 0.18461842834949493, |
|
"kl": 0.1309021409600973, |
|
"learning_rate": 6.025641025641026e-05, |
|
"loss": -0.0021, |
|
"num_tokens": 923046.0, |
|
"reward": 5.595602548122406, |
|
"reward_std": 2.1117491006851195, |
|
"rewards/reward_function": 5.59560244679451, |
|
"step": 280 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 483.0666799545288, |
|
"epoch": 1.2393162393162394, |
|
"grad_norm": 3.1094970703125, |
|
"kl": 0.1820569921284914, |
|
"learning_rate": 5.883190883190883e-05, |
|
"loss": 0.0173, |
|
"num_tokens": 955420.0, |
|
"reward": 5.614298677444458, |
|
"reward_std": 2.518768322467804, |
|
"rewards/reward_function": 5.614298534393311, |
|
"step": 290 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 617.6333404541016, |
|
"epoch": 1.282051282051282, |
|
"grad_norm": 0.3713136613368988, |
|
"kl": 0.11033637076616287, |
|
"learning_rate": 5.740740740740741e-05, |
|
"loss": 0.0062, |
|
"num_tokens": 995922.0, |
|
"reward": 4.5290528535842896, |
|
"reward_std": 2.494095575809479, |
|
"rewards/reward_function": 4.529052710533142, |
|
"step": 300 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 442.0166839599609, |
|
"epoch": 1.3247863247863247, |
|
"grad_norm": 0.21180328726768494, |
|
"kl": 0.1499824345111847, |
|
"learning_rate": 5.5982905982905986e-05, |
|
"loss": -0.0013, |
|
"num_tokens": 1025455.0, |
|
"reward": 8.561688470840455, |
|
"reward_std": 1.9707270503044128, |
|
"rewards/reward_function": 8.56168818473816, |
|
"step": 310 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 580.050008392334, |
|
"epoch": 1.3675213675213675, |
|
"grad_norm": 0.20131155848503113, |
|
"kl": 0.11668501645326615, |
|
"learning_rate": 5.4558404558404567e-05, |
|
"loss": -0.0146, |
|
"num_tokens": 1063354.0, |
|
"reward": 5.419061434268952, |
|
"reward_std": 2.1403504729270937, |
|
"rewards/reward_function": 5.419061243534088, |
|
"step": 320 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 652.3166870117187, |
|
"epoch": 1.4102564102564101, |
|
"grad_norm": 0.2526203691959381, |
|
"kl": 0.09888706281781197, |
|
"learning_rate": 5.313390313390314e-05, |
|
"loss": 0.0084, |
|
"num_tokens": 1105397.0, |
|
"reward": 7.351496028900146, |
|
"reward_std": 1.773663091659546, |
|
"rewards/reward_function": 7.351495909690857, |
|
"step": 330 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 718.36669921875, |
|
"epoch": 1.452991452991453, |
|
"grad_norm": 0.1585923731327057, |
|
"kl": 0.10820744708180427, |
|
"learning_rate": 5.1709401709401714e-05, |
|
"loss": 0.0009, |
|
"num_tokens": 1151751.0, |
|
"reward": 6.595072710514069, |
|
"reward_std": 1.881132996082306, |
|
"rewards/reward_function": 6.59507247209549, |
|
"step": 340 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 623.8333557128906, |
|
"epoch": 1.4957264957264957, |
|
"grad_norm": 0.18694844841957092, |
|
"kl": 0.11230501309037208, |
|
"learning_rate": 5.028490028490028e-05, |
|
"loss": -0.0362, |
|
"num_tokens": 1192547.0, |
|
"reward": 6.264591613411904, |
|
"reward_std": 1.9230000615119933, |
|
"rewards/reward_function": 6.26459142267704, |
|
"step": 350 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 704.0500183105469, |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 0.20967163145542145, |
|
"kl": 0.08291292320936919, |
|
"learning_rate": 4.886039886039887e-05, |
|
"loss": -0.0191, |
|
"num_tokens": 1237892.0, |
|
"reward": 5.098879025876522, |
|
"reward_std": 2.248683771491051, |
|
"rewards/reward_function": 5.09887896925211, |
|
"step": 360 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 661.6000152587891, |
|
"epoch": 1.5811965811965814, |
|
"grad_norm": 0.19468806684017181, |
|
"kl": 0.10559967942535878, |
|
"learning_rate": 4.7435897435897435e-05, |
|
"loss": -0.0077, |
|
"num_tokens": 1280828.0, |
|
"reward": 6.5063467741012575, |
|
"reward_std": 1.6363932967185975, |
|
"rewards/reward_function": 6.506346654891968, |
|
"step": 370 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 426.3000122070313, |
|
"epoch": 1.623931623931624, |
|
"grad_norm": 0.40712612867355347, |
|
"kl": 0.14229083359241484, |
|
"learning_rate": 4.6011396011396016e-05, |
|
"loss": -0.0067, |
|
"num_tokens": 1309910.0, |
|
"reward": 8.865666103363036, |
|
"reward_std": 1.6894314289093018, |
|
"rewards/reward_function": 8.865665578842163, |
|
"step": 380 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 645.4000122070313, |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.1824493706226349, |
|
"kl": 0.14776081275194883, |
|
"learning_rate": 4.458689458689459e-05, |
|
"loss": 0.0223, |
|
"num_tokens": 1351754.0, |
|
"reward": 4.024516892433167, |
|
"reward_std": 2.152446281909943, |
|
"rewards/reward_function": 4.024516820907593, |
|
"step": 390 |
|
}, |
|
{ |
|
"clip_ratio": 0.0, |
|
"completion_length": 494.9333465576172, |
|
"epoch": 1.7094017094017095, |
|
"grad_norm": 0.4138166010379791, |
|
"kl": 0.12820503935217858, |
|
"learning_rate": 4.316239316239317e-05, |
|
"loss": -0.0496, |
|
"num_tokens": 1384870.0, |
|
"reward": 9.3906578540802, |
|
"reward_std": 2.297537350654602, |
|
"rewards/reward_function": 9.390657663345337, |
|
"step": 400 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 702, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|