| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9008102822017323, | |
| "eval_steps": 500, | |
| "global_step": 1700, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0011176306230790724, | |
| "grad_norm": 10.011957168579102, | |
| "learning_rate": 0.0, | |
| "loss": 1.2017, | |
| "num_tokens": 1730426.0, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.005588153115395362, | |
| "grad_norm": 10.05500602722168, | |
| "learning_rate": 4.46927374301676e-07, | |
| "loss": 1.2038, | |
| "num_tokens": 8603870.0, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.011176306230790724, | |
| "grad_norm": 8.985472679138184, | |
| "learning_rate": 1.005586592178771e-06, | |
| "loss": 1.1918, | |
| "num_tokens": 17231775.0, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.016764459346186086, | |
| "grad_norm": 6.6670756340026855, | |
| "learning_rate": 1.564245810055866e-06, | |
| "loss": 1.1476, | |
| "num_tokens": 25849869.0, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.022352612461581448, | |
| "grad_norm": 3.7649803161621094, | |
| "learning_rate": 2.1229050279329612e-06, | |
| "loss": 1.0329, | |
| "num_tokens": 34465678.0, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02794076557697681, | |
| "grad_norm": 2.7102489471435547, | |
| "learning_rate": 2.6815642458100562e-06, | |
| "loss": 0.9115, | |
| "num_tokens": 43150100.0, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03352891869237217, | |
| "grad_norm": 2.4278452396392822, | |
| "learning_rate": 3.240223463687151e-06, | |
| "loss": 0.7605, | |
| "num_tokens": 51786456.0, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03911707180776753, | |
| "grad_norm": 1.5626779794692993, | |
| "learning_rate": 3.798882681564246e-06, | |
| "loss": 0.6526, | |
| "num_tokens": 60435486.0, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.044705224923162895, | |
| "grad_norm": 0.6909887194633484, | |
| "learning_rate": 4.357541899441341e-06, | |
| "loss": 0.5659, | |
| "num_tokens": 69072463.0, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.050293378038558254, | |
| "grad_norm": 0.5558706521987915, | |
| "learning_rate": 4.916201117318436e-06, | |
| "loss": 0.5397, | |
| "num_tokens": 77692952.0, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.05588153115395362, | |
| "grad_norm": 0.4786052405834198, | |
| "learning_rate": 5.474860335195531e-06, | |
| "loss": 0.5163, | |
| "num_tokens": 86330255.0, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06146968426934898, | |
| "grad_norm": 0.49514034390449524, | |
| "learning_rate": 6.033519553072626e-06, | |
| "loss": 0.5015, | |
| "num_tokens": 94954407.0, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.06705783738474434, | |
| "grad_norm": 0.8501572012901306, | |
| "learning_rate": 6.592178770949721e-06, | |
| "loss": 0.4872, | |
| "num_tokens": 103559855.0, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0726459905001397, | |
| "grad_norm": 0.3786110281944275, | |
| "learning_rate": 7.150837988826816e-06, | |
| "loss": 0.4761, | |
| "num_tokens": 112208289.0, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07823414361553506, | |
| "grad_norm": 0.2072855532169342, | |
| "learning_rate": 7.709497206703911e-06, | |
| "loss": 0.4718, | |
| "num_tokens": 120800497.0, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.08382229673093043, | |
| "grad_norm": 0.2174290269613266, | |
| "learning_rate": 8.268156424581007e-06, | |
| "loss": 0.4608, | |
| "num_tokens": 129451121.0, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08941044984632579, | |
| "grad_norm": 0.22045612335205078, | |
| "learning_rate": 8.826815642458101e-06, | |
| "loss": 0.453, | |
| "num_tokens": 138103500.0, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.09499860296172115, | |
| "grad_norm": 0.25303542613983154, | |
| "learning_rate": 9.385474860335197e-06, | |
| "loss": 0.4518, | |
| "num_tokens": 146767921.0, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.10058675607711651, | |
| "grad_norm": 0.24008460342884064, | |
| "learning_rate": 9.944134078212291e-06, | |
| "loss": 0.4462, | |
| "num_tokens": 155426403.0, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.10617490919251188, | |
| "grad_norm": 0.2523616850376129, | |
| "learning_rate": 1.0502793296089386e-05, | |
| "loss": 0.4419, | |
| "num_tokens": 164080193.0, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.11176306230790724, | |
| "grad_norm": 0.21842315793037415, | |
| "learning_rate": 1.1061452513966481e-05, | |
| "loss": 0.4369, | |
| "num_tokens": 172737694.0, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1173512154233026, | |
| "grad_norm": 0.3130860924720764, | |
| "learning_rate": 1.1620111731843577e-05, | |
| "loss": 0.4347, | |
| "num_tokens": 181372620.0, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.12293936853869795, | |
| "grad_norm": 0.2151191383600235, | |
| "learning_rate": 1.2178770949720671e-05, | |
| "loss": 0.4321, | |
| "num_tokens": 189993336.0, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.1285275216540933, | |
| "grad_norm": 0.26080241799354553, | |
| "learning_rate": 1.2737430167597766e-05, | |
| "loss": 0.4265, | |
| "num_tokens": 198600121.0, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.13411567476948869, | |
| "grad_norm": 0.21047507226467133, | |
| "learning_rate": 1.3296089385474861e-05, | |
| "loss": 0.427, | |
| "num_tokens": 207208210.0, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.13970382788488406, | |
| "grad_norm": 0.49889320135116577, | |
| "learning_rate": 1.3854748603351957e-05, | |
| "loss": 0.4213, | |
| "num_tokens": 215841764.0, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1452919810002794, | |
| "grad_norm": 0.48499205708503723, | |
| "learning_rate": 1.4413407821229052e-05, | |
| "loss": 0.4198, | |
| "num_tokens": 224461916.0, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.15088013411567477, | |
| "grad_norm": 0.28938624262809753, | |
| "learning_rate": 1.4972067039106146e-05, | |
| "loss": 0.4172, | |
| "num_tokens": 233116996.0, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.15646828723107012, | |
| "grad_norm": 0.41849035024642944, | |
| "learning_rate": 1.553072625698324e-05, | |
| "loss": 0.4143, | |
| "num_tokens": 241767774.0, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1620564403464655, | |
| "grad_norm": 0.2590814530849457, | |
| "learning_rate": 1.6089385474860336e-05, | |
| "loss": 0.4099, | |
| "num_tokens": 250387551.0, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.16764459346186086, | |
| "grad_norm": 0.3640440106391907, | |
| "learning_rate": 1.664804469273743e-05, | |
| "loss": 0.4079, | |
| "num_tokens": 259008641.0, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1732327465772562, | |
| "grad_norm": 0.2570585012435913, | |
| "learning_rate": 1.7206703910614527e-05, | |
| "loss": 0.4072, | |
| "num_tokens": 267655165.0, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.17882089969265158, | |
| "grad_norm": 0.31423676013946533, | |
| "learning_rate": 1.776536312849162e-05, | |
| "loss": 0.4035, | |
| "num_tokens": 276311055.0, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.18440905280804695, | |
| "grad_norm": 0.3440604507923126, | |
| "learning_rate": 1.8324022346368716e-05, | |
| "loss": 0.4049, | |
| "num_tokens": 284946584.0, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.1899972059234423, | |
| "grad_norm": 0.21540623903274536, | |
| "learning_rate": 1.888268156424581e-05, | |
| "loss": 0.4002, | |
| "num_tokens": 293559223.0, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.19558535903883767, | |
| "grad_norm": 0.23785561323165894, | |
| "learning_rate": 1.9441340782122907e-05, | |
| "loss": 0.4022, | |
| "num_tokens": 302183283.0, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.20117351215423301, | |
| "grad_norm": 0.26867231726646423, | |
| "learning_rate": 2e-05, | |
| "loss": 0.3957, | |
| "num_tokens": 310770434.0, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2067616652696284, | |
| "grad_norm": 0.300030380487442, | |
| "learning_rate": 1.9937849596022375e-05, | |
| "loss": 0.3966, | |
| "num_tokens": 319421031.0, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.21234981838502376, | |
| "grad_norm": 0.3302135467529297, | |
| "learning_rate": 1.9875699192044752e-05, | |
| "loss": 0.3951, | |
| "num_tokens": 328071145.0, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2179379715004191, | |
| "grad_norm": 0.29629579186439514, | |
| "learning_rate": 1.9813548788067125e-05, | |
| "loss": 0.3937, | |
| "num_tokens": 336706183.0, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.22352612461581448, | |
| "grad_norm": 0.28898075222969055, | |
| "learning_rate": 1.97513983840895e-05, | |
| "loss": 0.3906, | |
| "num_tokens": 345411712.0, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.22911427773120985, | |
| "grad_norm": 0.3078921139240265, | |
| "learning_rate": 1.9689247980111872e-05, | |
| "loss": 0.3898, | |
| "num_tokens": 354033114.0, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.2347024308466052, | |
| "grad_norm": 0.4353577792644501, | |
| "learning_rate": 1.9627097576134246e-05, | |
| "loss": 0.388, | |
| "num_tokens": 362630287.0, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.24029058396200056, | |
| "grad_norm": 0.309552937746048, | |
| "learning_rate": 1.9564947172156622e-05, | |
| "loss": 0.3909, | |
| "num_tokens": 371235488.0, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.2458787370773959, | |
| "grad_norm": 0.2993320822715759, | |
| "learning_rate": 1.9502796768178996e-05, | |
| "loss": 0.3849, | |
| "num_tokens": 379873019.0, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2514668901927913, | |
| "grad_norm": 0.30897048115730286, | |
| "learning_rate": 1.944064636420137e-05, | |
| "loss": 0.3836, | |
| "num_tokens": 388483293.0, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.2570550433081866, | |
| "grad_norm": 0.319886714220047, | |
| "learning_rate": 1.9378495960223743e-05, | |
| "loss": 0.3858, | |
| "num_tokens": 397132035.0, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.262643196423582, | |
| "grad_norm": 0.3353484272956848, | |
| "learning_rate": 1.9316345556246116e-05, | |
| "loss": 0.3815, | |
| "num_tokens": 405760877.0, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.26823134953897737, | |
| "grad_norm": 0.3035387396812439, | |
| "learning_rate": 1.925419515226849e-05, | |
| "loss": 0.3811, | |
| "num_tokens": 414412147.0, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.2738195026543727, | |
| "grad_norm": 0.2977229654788971, | |
| "learning_rate": 1.9192044748290866e-05, | |
| "loss": 0.381, | |
| "num_tokens": 423027670.0, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2794076557697681, | |
| "grad_norm": 0.23410959541797638, | |
| "learning_rate": 1.912989434431324e-05, | |
| "loss": 0.3784, | |
| "num_tokens": 431706857.0, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.28499580888516346, | |
| "grad_norm": 0.2829587161540985, | |
| "learning_rate": 1.9067743940335613e-05, | |
| "loss": 0.3798, | |
| "num_tokens": 440317430.0, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2905839620005588, | |
| "grad_norm": 0.26716259121894836, | |
| "learning_rate": 1.9005593536357987e-05, | |
| "loss": 0.379, | |
| "num_tokens": 448976567.0, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2961721151159542, | |
| "grad_norm": 0.2489883005619049, | |
| "learning_rate": 1.894344313238036e-05, | |
| "loss": 0.3768, | |
| "num_tokens": 457608742.0, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.30176026823134955, | |
| "grad_norm": 0.25023195147514343, | |
| "learning_rate": 1.8881292728402737e-05, | |
| "loss": 0.3743, | |
| "num_tokens": 466243999.0, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3073484213467449, | |
| "grad_norm": 0.23837079107761383, | |
| "learning_rate": 1.881914232442511e-05, | |
| "loss": 0.3733, | |
| "num_tokens": 474886567.0, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.31293657446214024, | |
| "grad_norm": 0.2237701565027237, | |
| "learning_rate": 1.8756991920447484e-05, | |
| "loss": 0.3741, | |
| "num_tokens": 483548448.0, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.31852472757753564, | |
| "grad_norm": 0.2825288474559784, | |
| "learning_rate": 1.869484151646986e-05, | |
| "loss": 0.3733, | |
| "num_tokens": 492194335.0, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.324112880692931, | |
| "grad_norm": 0.4711460769176483, | |
| "learning_rate": 1.863269111249223e-05, | |
| "loss": 0.3706, | |
| "num_tokens": 500857334.0, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3297010338083263, | |
| "grad_norm": 0.3319377601146698, | |
| "learning_rate": 1.8570540708514607e-05, | |
| "loss": 0.3684, | |
| "num_tokens": 509470803.0, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3352891869237217, | |
| "grad_norm": 0.25508102774620056, | |
| "learning_rate": 1.850839030453698e-05, | |
| "loss": 0.3703, | |
| "num_tokens": 518116395.0, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.34087734003911707, | |
| "grad_norm": 0.2849508225917816, | |
| "learning_rate": 1.8446239900559354e-05, | |
| "loss": 0.3667, | |
| "num_tokens": 526715030.0, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.3464654931545124, | |
| "grad_norm": 0.3298902213573456, | |
| "learning_rate": 1.838408949658173e-05, | |
| "loss": 0.3674, | |
| "num_tokens": 535330384.0, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3520536462699078, | |
| "grad_norm": 0.28954699635505676, | |
| "learning_rate": 1.8321939092604105e-05, | |
| "loss": 0.3668, | |
| "num_tokens": 543982535.0, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.35764179938530316, | |
| "grad_norm": 0.26005271077156067, | |
| "learning_rate": 1.8259788688626478e-05, | |
| "loss": 0.3654, | |
| "num_tokens": 552673445.0, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3632299525006985, | |
| "grad_norm": 0.26138827204704285, | |
| "learning_rate": 1.819763828464885e-05, | |
| "loss": 0.3657, | |
| "num_tokens": 561284033.0, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.3688181056160939, | |
| "grad_norm": 0.2150503545999527, | |
| "learning_rate": 1.8135487880671225e-05, | |
| "loss": 0.3617, | |
| "num_tokens": 569844238.0, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.37440625873148925, | |
| "grad_norm": 0.25445595383644104, | |
| "learning_rate": 1.8073337476693598e-05, | |
| "loss": 0.3633, | |
| "num_tokens": 578513463.0, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3799944118468846, | |
| "grad_norm": 0.25044742226600647, | |
| "learning_rate": 1.8011187072715975e-05, | |
| "loss": 0.3623, | |
| "num_tokens": 587170364.0, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.38558256496227994, | |
| "grad_norm": 0.2201726734638214, | |
| "learning_rate": 1.794903666873835e-05, | |
| "loss": 0.363, | |
| "num_tokens": 595780927.0, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.39117071807767534, | |
| "grad_norm": 0.24220991134643555, | |
| "learning_rate": 1.7886886264760722e-05, | |
| "loss": 0.3595, | |
| "num_tokens": 604423673.0, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3967588711930707, | |
| "grad_norm": 0.27938011288642883, | |
| "learning_rate": 1.7824735860783095e-05, | |
| "loss": 0.3625, | |
| "num_tokens": 613077425.0, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.40234702430846603, | |
| "grad_norm": 0.3587215542793274, | |
| "learning_rate": 1.776258545680547e-05, | |
| "loss": 0.3603, | |
| "num_tokens": 621679546.0, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.40793517742386143, | |
| "grad_norm": 0.24406756460666656, | |
| "learning_rate": 1.7700435052827846e-05, | |
| "loss": 0.358, | |
| "num_tokens": 630345024.0, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.4135233305392568, | |
| "grad_norm": 0.2351740151643753, | |
| "learning_rate": 1.763828464885022e-05, | |
| "loss": 0.357, | |
| "num_tokens": 638998446.0, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4191114836546521, | |
| "grad_norm": 0.25008639693260193, | |
| "learning_rate": 1.7576134244872592e-05, | |
| "loss": 0.3568, | |
| "num_tokens": 647620127.0, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.4246996367700475, | |
| "grad_norm": 0.28882575035095215, | |
| "learning_rate": 1.751398384089497e-05, | |
| "loss": 0.357, | |
| "num_tokens": 656263086.0, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.43028778988544286, | |
| "grad_norm": 0.24458937346935272, | |
| "learning_rate": 1.745183343691734e-05, | |
| "loss": 0.3565, | |
| "num_tokens": 664900751.0, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.4358759430008382, | |
| "grad_norm": 0.2362358570098877, | |
| "learning_rate": 1.7389683032939716e-05, | |
| "loss": 0.3553, | |
| "num_tokens": 673566366.0, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.4414640961162336, | |
| "grad_norm": 0.3116516172885895, | |
| "learning_rate": 1.732753262896209e-05, | |
| "loss": 0.3562, | |
| "num_tokens": 682217285.0, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.44705224923162895, | |
| "grad_norm": 0.2760963439941406, | |
| "learning_rate": 1.7265382224984463e-05, | |
| "loss": 0.3532, | |
| "num_tokens": 690882999.0, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4526404023470243, | |
| "grad_norm": 0.2616076171398163, | |
| "learning_rate": 1.720323182100684e-05, | |
| "loss": 0.3547, | |
| "num_tokens": 699464323.0, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.4582285554624197, | |
| "grad_norm": 0.2745835483074188, | |
| "learning_rate": 1.7141081417029213e-05, | |
| "loss": 0.3543, | |
| "num_tokens": 708050644.0, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.46381670857781504, | |
| "grad_norm": 0.22677594423294067, | |
| "learning_rate": 1.7078931013051587e-05, | |
| "loss": 0.3545, | |
| "num_tokens": 716706828.0, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.4694048616932104, | |
| "grad_norm": 0.2808341383934021, | |
| "learning_rate": 1.701678060907396e-05, | |
| "loss": 0.3548, | |
| "num_tokens": 725378942.0, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.47499301480860573, | |
| "grad_norm": 0.23508451879024506, | |
| "learning_rate": 1.6954630205096334e-05, | |
| "loss": 0.3509, | |
| "num_tokens": 734043136.0, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.48058116792400113, | |
| "grad_norm": 0.254522442817688, | |
| "learning_rate": 1.6892479801118707e-05, | |
| "loss": 0.3511, | |
| "num_tokens": 742633173.0, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4861693210393965, | |
| "grad_norm": 0.31279993057250977, | |
| "learning_rate": 1.6830329397141084e-05, | |
| "loss": 0.351, | |
| "num_tokens": 751312188.0, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.4917574741547918, | |
| "grad_norm": 0.25775179266929626, | |
| "learning_rate": 1.6768178993163457e-05, | |
| "loss": 0.3501, | |
| "num_tokens": 759946023.0, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4973456272701872, | |
| "grad_norm": 0.2667147219181061, | |
| "learning_rate": 1.670602858918583e-05, | |
| "loss": 0.349, | |
| "num_tokens": 768604979.0, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.5029337803855826, | |
| "grad_norm": 0.28277674317359924, | |
| "learning_rate": 1.6643878185208204e-05, | |
| "loss": 0.348, | |
| "num_tokens": 777238572.0, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.508521933500978, | |
| "grad_norm": 0.24286143481731415, | |
| "learning_rate": 1.6581727781230577e-05, | |
| "loss": 0.3481, | |
| "num_tokens": 785827036.0, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.5141100866163733, | |
| "grad_norm": 0.23307713866233826, | |
| "learning_rate": 1.6519577377252954e-05, | |
| "loss": 0.3478, | |
| "num_tokens": 794445531.0, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5196982397317687, | |
| "grad_norm": 0.23410995304584503, | |
| "learning_rate": 1.6457426973275328e-05, | |
| "loss": 0.3455, | |
| "num_tokens": 803071897.0, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.525286392847164, | |
| "grad_norm": 0.2380787432193756, | |
| "learning_rate": 1.63952765692977e-05, | |
| "loss": 0.3455, | |
| "num_tokens": 811671356.0, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5308745459625593, | |
| "grad_norm": 0.23634526133537292, | |
| "learning_rate": 1.6333126165320078e-05, | |
| "loss": 0.3466, | |
| "num_tokens": 820292054.0, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.5364626990779547, | |
| "grad_norm": 0.25275230407714844, | |
| "learning_rate": 1.6270975761342448e-05, | |
| "loss": 0.3461, | |
| "num_tokens": 828956490.0, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.5420508521933501, | |
| "grad_norm": 0.23368898034095764, | |
| "learning_rate": 1.6208825357364825e-05, | |
| "loss": 0.3461, | |
| "num_tokens": 837575876.0, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.5476390053087454, | |
| "grad_norm": 0.22830277681350708, | |
| "learning_rate": 1.6146674953387198e-05, | |
| "loss": 0.3437, | |
| "num_tokens": 846242404.0, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5532271584241408, | |
| "grad_norm": 0.24559421837329865, | |
| "learning_rate": 1.608452454940957e-05, | |
| "loss": 0.3439, | |
| "num_tokens": 854952393.0, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.5588153115395362, | |
| "grad_norm": 0.4303223192691803, | |
| "learning_rate": 1.602237414543195e-05, | |
| "loss": 0.3455, | |
| "num_tokens": 863588754.0, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5644034646549315, | |
| "grad_norm": 0.2650391161441803, | |
| "learning_rate": 1.5960223741454322e-05, | |
| "loss": 0.3434, | |
| "num_tokens": 872242175.0, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.5699916177703269, | |
| "grad_norm": 0.34314268827438354, | |
| "learning_rate": 1.5898073337476695e-05, | |
| "loss": 0.3436, | |
| "num_tokens": 880858359.0, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5755797708857223, | |
| "grad_norm": 0.2963222861289978, | |
| "learning_rate": 1.583592293349907e-05, | |
| "loss": 0.3409, | |
| "num_tokens": 889452124.0, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.5811679240011176, | |
| "grad_norm": 0.2436487376689911, | |
| "learning_rate": 1.5773772529521442e-05, | |
| "loss": 0.3405, | |
| "num_tokens": 898106005.0, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.586756077116513, | |
| "grad_norm": 0.26480334997177124, | |
| "learning_rate": 1.5711622125543816e-05, | |
| "loss": 0.342, | |
| "num_tokens": 906759880.0, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.5923442302319084, | |
| "grad_norm": 0.24269545078277588, | |
| "learning_rate": 1.5649471721566192e-05, | |
| "loss": 0.3404, | |
| "num_tokens": 915407081.0, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5979323833473037, | |
| "grad_norm": 0.25484395027160645, | |
| "learning_rate": 1.5587321317588566e-05, | |
| "loss": 0.3414, | |
| "num_tokens": 924034724.0, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.6035205364626991, | |
| "grad_norm": 0.26847922801971436, | |
| "learning_rate": 1.552517091361094e-05, | |
| "loss": 0.3396, | |
| "num_tokens": 932639422.0, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.6091086895780944, | |
| "grad_norm": 0.3018784821033478, | |
| "learning_rate": 1.5463020509633313e-05, | |
| "loss": 0.3388, | |
| "num_tokens": 941252618.0, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.6146968426934898, | |
| "grad_norm": 0.2776123583316803, | |
| "learning_rate": 1.5400870105655686e-05, | |
| "loss": 0.3378, | |
| "num_tokens": 949916767.0, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.6202849958088852, | |
| "grad_norm": 0.26316747069358826, | |
| "learning_rate": 1.5338719701678063e-05, | |
| "loss": 0.3377, | |
| "num_tokens": 958556988.0, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.6258731489242805, | |
| "grad_norm": 0.35456159710884094, | |
| "learning_rate": 1.5276569297700436e-05, | |
| "loss": 0.336, | |
| "num_tokens": 967198821.0, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6314613020396759, | |
| "grad_norm": 0.2805976867675781, | |
| "learning_rate": 1.5214418893722812e-05, | |
| "loss": 0.3371, | |
| "num_tokens": 975836394.0, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.6370494551550713, | |
| "grad_norm": 0.3051447868347168, | |
| "learning_rate": 1.5152268489745185e-05, | |
| "loss": 0.3355, | |
| "num_tokens": 984478918.0, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.6426376082704666, | |
| "grad_norm": 0.29085445404052734, | |
| "learning_rate": 1.5090118085767558e-05, | |
| "loss": 0.3349, | |
| "num_tokens": 993082498.0, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.648225761385862, | |
| "grad_norm": 0.21163810789585114, | |
| "learning_rate": 1.5027967681789932e-05, | |
| "loss": 0.3363, | |
| "num_tokens": 1001741153.0, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.6538139145012574, | |
| "grad_norm": 0.221357524394989, | |
| "learning_rate": 1.4965817277812307e-05, | |
| "loss": 0.3358, | |
| "num_tokens": 1010394285.0, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.6594020676166527, | |
| "grad_norm": 0.2421073317527771, | |
| "learning_rate": 1.4903666873834682e-05, | |
| "loss": 0.3345, | |
| "num_tokens": 1019057041.0, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.664990220732048, | |
| "grad_norm": 0.32805293798446655, | |
| "learning_rate": 1.4841516469857056e-05, | |
| "loss": 0.3335, | |
| "num_tokens": 1027673717.0, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.6705783738474435, | |
| "grad_norm": 0.25039365887641907, | |
| "learning_rate": 1.477936606587943e-05, | |
| "loss": 0.3346, | |
| "num_tokens": 1036289387.0, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6761665269628387, | |
| "grad_norm": 0.2505406141281128, | |
| "learning_rate": 1.4717215661901802e-05, | |
| "loss": 0.3344, | |
| "num_tokens": 1044956336.0, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.6817546800782341, | |
| "grad_norm": 0.21238237619400024, | |
| "learning_rate": 1.4655065257924177e-05, | |
| "loss": 0.3313, | |
| "num_tokens": 1053596820.0, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.6873428331936295, | |
| "grad_norm": 0.2559036910533905, | |
| "learning_rate": 1.4592914853946551e-05, | |
| "loss": 0.3318, | |
| "num_tokens": 1062244415.0, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.6929309863090248, | |
| "grad_norm": 0.23106950521469116, | |
| "learning_rate": 1.4530764449968926e-05, | |
| "loss": 0.3309, | |
| "num_tokens": 1070875234.0, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6985191394244202, | |
| "grad_norm": 0.24224470555782318, | |
| "learning_rate": 1.4468614045991301e-05, | |
| "loss": 0.3317, | |
| "num_tokens": 1079478349.0, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.7041072925398156, | |
| "grad_norm": 0.2289455533027649, | |
| "learning_rate": 1.4406463642013675e-05, | |
| "loss": 0.3325, | |
| "num_tokens": 1088152645.0, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.7096954456552109, | |
| "grad_norm": 0.23123560845851898, | |
| "learning_rate": 1.434431323803605e-05, | |
| "loss": 0.3315, | |
| "num_tokens": 1096793450.0, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.7152835987706063, | |
| "grad_norm": 0.2753809988498688, | |
| "learning_rate": 1.4282162834058421e-05, | |
| "loss": 0.3295, | |
| "num_tokens": 1105421755.0, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.7208717518860017, | |
| "grad_norm": 0.19695010781288147, | |
| "learning_rate": 1.4220012430080797e-05, | |
| "loss": 0.3297, | |
| "num_tokens": 1114063613.0, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.726459905001397, | |
| "grad_norm": 0.23159605264663696, | |
| "learning_rate": 1.415786202610317e-05, | |
| "loss": 0.3287, | |
| "num_tokens": 1122701970.0, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.7320480581167924, | |
| "grad_norm": 0.2736544609069824, | |
| "learning_rate": 1.4095711622125545e-05, | |
| "loss": 0.3292, | |
| "num_tokens": 1131362655.0, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.7376362112321878, | |
| "grad_norm": 0.21057793498039246, | |
| "learning_rate": 1.403356121814792e-05, | |
| "loss": 0.3301, | |
| "num_tokens": 1139970963.0, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.7432243643475831, | |
| "grad_norm": 0.24804027378559113, | |
| "learning_rate": 1.3971410814170294e-05, | |
| "loss": 0.3292, | |
| "num_tokens": 1148655402.0, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.7488125174629785, | |
| "grad_norm": 0.26707762479782104, | |
| "learning_rate": 1.3909260410192667e-05, | |
| "loss": 0.3288, | |
| "num_tokens": 1157277819.0, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.7544006705783739, | |
| "grad_norm": 0.22491665184497833, | |
| "learning_rate": 1.384711000621504e-05, | |
| "loss": 0.3275, | |
| "num_tokens": 1165874070.0, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.7599888236937692, | |
| "grad_norm": 0.35867735743522644, | |
| "learning_rate": 1.3784959602237416e-05, | |
| "loss": 0.3259, | |
| "num_tokens": 1174553674.0, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.7655769768091646, | |
| "grad_norm": 0.2906293272972107, | |
| "learning_rate": 1.372280919825979e-05, | |
| "loss": 0.3275, | |
| "num_tokens": 1183203660.0, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.7711651299245599, | |
| "grad_norm": 0.2248237431049347, | |
| "learning_rate": 1.3660658794282164e-05, | |
| "loss": 0.3257, | |
| "num_tokens": 1191792299.0, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7767532830399553, | |
| "grad_norm": 0.22850936651229858, | |
| "learning_rate": 1.359850839030454e-05, | |
| "loss": 0.3241, | |
| "num_tokens": 1200443497.0, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.7823414361553507, | |
| "grad_norm": 0.2620466649532318, | |
| "learning_rate": 1.3536357986326911e-05, | |
| "loss": 0.3255, | |
| "num_tokens": 1209088496.0, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.787929589270746, | |
| "grad_norm": 0.2618216574192047, | |
| "learning_rate": 1.3474207582349286e-05, | |
| "loss": 0.3248, | |
| "num_tokens": 1217733862.0, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.7935177423861414, | |
| "grad_norm": 0.22120532393455505, | |
| "learning_rate": 1.341205717837166e-05, | |
| "loss": 0.3253, | |
| "num_tokens": 1226435221.0, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.7991058955015368, | |
| "grad_norm": 0.23882830142974854, | |
| "learning_rate": 1.3349906774394035e-05, | |
| "loss": 0.3247, | |
| "num_tokens": 1235039345.0, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.8046940486169321, | |
| "grad_norm": 0.2220090627670288, | |
| "learning_rate": 1.328775637041641e-05, | |
| "loss": 0.3242, | |
| "num_tokens": 1243676965.0, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.8102822017323275, | |
| "grad_norm": 0.2509444057941437, | |
| "learning_rate": 1.3225605966438783e-05, | |
| "loss": 0.3246, | |
| "num_tokens": 1252287521.0, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.8158703548477229, | |
| "grad_norm": 0.2144639939069748, | |
| "learning_rate": 1.3163455562461157e-05, | |
| "loss": 0.3219, | |
| "num_tokens": 1260906284.0, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.8214585079631181, | |
| "grad_norm": 0.24812334775924683, | |
| "learning_rate": 1.310130515848353e-05, | |
| "loss": 0.3216, | |
| "num_tokens": 1269550220.0, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.8270466610785135, | |
| "grad_norm": 0.25637102127075195, | |
| "learning_rate": 1.3039154754505905e-05, | |
| "loss": 0.3223, | |
| "num_tokens": 1278166875.0, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.832634814193909, | |
| "grad_norm": 0.2210138738155365, | |
| "learning_rate": 1.2977004350528279e-05, | |
| "loss": 0.3215, | |
| "num_tokens": 1286779173.0, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.8382229673093042, | |
| "grad_norm": 0.24274571239948273, | |
| "learning_rate": 1.2914853946550654e-05, | |
| "loss": 0.322, | |
| "num_tokens": 1295400109.0, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.8438111204246996, | |
| "grad_norm": 0.22785916924476624, | |
| "learning_rate": 1.2852703542573029e-05, | |
| "loss": 0.3212, | |
| "num_tokens": 1304038572.0, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.849399273540095, | |
| "grad_norm": 0.24811935424804688, | |
| "learning_rate": 1.2790553138595402e-05, | |
| "loss": 0.3208, | |
| "num_tokens": 1312695953.0, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.8549874266554903, | |
| "grad_norm": 0.2515169084072113, | |
| "learning_rate": 1.2728402734617776e-05, | |
| "loss": 0.3219, | |
| "num_tokens": 1321320330.0, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.8605755797708857, | |
| "grad_norm": 0.23009227216243744, | |
| "learning_rate": 1.266625233064015e-05, | |
| "loss": 0.32, | |
| "num_tokens": 1329933663.0, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.8661637328862811, | |
| "grad_norm": 0.286752313375473, | |
| "learning_rate": 1.2604101926662524e-05, | |
| "loss": 0.3191, | |
| "num_tokens": 1338601890.0, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.8717518860016764, | |
| "grad_norm": 0.25049400329589844, | |
| "learning_rate": 1.25419515226849e-05, | |
| "loss": 0.3193, | |
| "num_tokens": 1347191005.0, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.8773400391170718, | |
| "grad_norm": 0.3110709488391876, | |
| "learning_rate": 1.2479801118707273e-05, | |
| "loss": 0.3199, | |
| "num_tokens": 1355865305.0, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.8829281922324672, | |
| "grad_norm": 0.27292343974113464, | |
| "learning_rate": 1.2417650714729648e-05, | |
| "loss": 0.3186, | |
| "num_tokens": 1364495144.0, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.8885163453478625, | |
| "grad_norm": 0.24087272584438324, | |
| "learning_rate": 1.235550031075202e-05, | |
| "loss": 0.3179, | |
| "num_tokens": 1373161011.0, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.8941044984632579, | |
| "grad_norm": 0.22793588042259216, | |
| "learning_rate": 1.2293349906774395e-05, | |
| "loss": 0.3175, | |
| "num_tokens": 1381821262.0, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.8996926515786533, | |
| "grad_norm": 0.26788079738616943, | |
| "learning_rate": 1.2231199502796768e-05, | |
| "loss": 0.3173, | |
| "num_tokens": 1390460262.0, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.9052808046940486, | |
| "grad_norm": 0.24791817367076874, | |
| "learning_rate": 1.2169049098819143e-05, | |
| "loss": 0.3198, | |
| "num_tokens": 1399107355.0, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.910868957809444, | |
| "grad_norm": 0.23244717717170715, | |
| "learning_rate": 1.2106898694841519e-05, | |
| "loss": 0.3178, | |
| "num_tokens": 1407743698.0, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.9164571109248394, | |
| "grad_norm": 0.22195950150489807, | |
| "learning_rate": 1.2044748290863892e-05, | |
| "loss": 0.3174, | |
| "num_tokens": 1416397325.0, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.9220452640402347, | |
| "grad_norm": 0.23812797665596008, | |
| "learning_rate": 1.1982597886886265e-05, | |
| "loss": 0.3172, | |
| "num_tokens": 1425035981.0, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.9276334171556301, | |
| "grad_norm": 0.26328200101852417, | |
| "learning_rate": 1.1920447482908639e-05, | |
| "loss": 0.3165, | |
| "num_tokens": 1433658159.0, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.9332215702710255, | |
| "grad_norm": 0.29811593890190125, | |
| "learning_rate": 1.1858297078931014e-05, | |
| "loss": 0.3174, | |
| "num_tokens": 1442308046.0, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.9388097233864208, | |
| "grad_norm": 0.3524268567562103, | |
| "learning_rate": 1.1796146674953387e-05, | |
| "loss": 0.3134, | |
| "num_tokens": 1450897134.0, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.9443978765018162, | |
| "grad_norm": 0.29091647267341614, | |
| "learning_rate": 1.1733996270975763e-05, | |
| "loss": 0.3153, | |
| "num_tokens": 1459562818.0, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.9499860296172115, | |
| "grad_norm": 0.24113890528678894, | |
| "learning_rate": 1.1671845866998138e-05, | |
| "loss": 0.315, | |
| "num_tokens": 1468199788.0, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.9555741827326069, | |
| "grad_norm": 0.24603360891342163, | |
| "learning_rate": 1.1609695463020511e-05, | |
| "loss": 0.3146, | |
| "num_tokens": 1476828577.0, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.9611623358480023, | |
| "grad_norm": 0.2403990924358368, | |
| "learning_rate": 1.1547545059042884e-05, | |
| "loss": 0.3136, | |
| "num_tokens": 1485446216.0, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.9667504889633975, | |
| "grad_norm": 0.23709586262702942, | |
| "learning_rate": 1.1485394655065258e-05, | |
| "loss": 0.314, | |
| "num_tokens": 1494105354.0, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.972338642078793, | |
| "grad_norm": 0.22443188726902008, | |
| "learning_rate": 1.1423244251087633e-05, | |
| "loss": 0.3128, | |
| "num_tokens": 1502756944.0, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.9779267951941883, | |
| "grad_norm": 0.22289405763149261, | |
| "learning_rate": 1.1361093847110008e-05, | |
| "loss": 0.3128, | |
| "num_tokens": 1511405759.0, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.9835149483095836, | |
| "grad_norm": 0.21792230010032654, | |
| "learning_rate": 1.1298943443132382e-05, | |
| "loss": 0.3128, | |
| "num_tokens": 1520041593.0, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.989103101424979, | |
| "grad_norm": 0.23965007066726685, | |
| "learning_rate": 1.1236793039154757e-05, | |
| "loss": 0.3105, | |
| "num_tokens": 1528731440.0, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.9946912545403744, | |
| "grad_norm": 0.21196743845939636, | |
| "learning_rate": 1.1174642635177128e-05, | |
| "loss": 0.313, | |
| "num_tokens": 1537329601.0, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.0011176306230791, | |
| "grad_norm": 0.4137466251850128, | |
| "learning_rate": 1.1112492231199504e-05, | |
| "loss": 0.3744, | |
| "num_tokens": 1546855399.0, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.0067057837384745, | |
| "grad_norm": 0.3543248772621155, | |
| "learning_rate": 1.1050341827221877e-05, | |
| "loss": 0.3073, | |
| "num_tokens": 1555449486.0, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.0122939368538697, | |
| "grad_norm": 0.2658456563949585, | |
| "learning_rate": 1.0988191423244252e-05, | |
| "loss": 0.3094, | |
| "num_tokens": 1564115495.0, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.017882089969265, | |
| "grad_norm": 0.23490653932094574, | |
| "learning_rate": 1.0926041019266627e-05, | |
| "loss": 0.3073, | |
| "num_tokens": 1572713661.0, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.0234702430846605, | |
| "grad_norm": 0.2108413279056549, | |
| "learning_rate": 1.0863890615289e-05, | |
| "loss": 0.3078, | |
| "num_tokens": 1581295552.0, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.029058396200056, | |
| "grad_norm": 0.21604564785957336, | |
| "learning_rate": 1.0801740211311374e-05, | |
| "loss": 0.308, | |
| "num_tokens": 1589925350.0, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.0346465493154513, | |
| "grad_norm": 0.24258844554424286, | |
| "learning_rate": 1.0739589807333748e-05, | |
| "loss": 0.3071, | |
| "num_tokens": 1598552250.0, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.0402347024308467, | |
| "grad_norm": 0.2516770362854004, | |
| "learning_rate": 1.0677439403356123e-05, | |
| "loss": 0.3074, | |
| "num_tokens": 1607226139.0, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.0458228555462419, | |
| "grad_norm": 0.267008900642395, | |
| "learning_rate": 1.0615288999378496e-05, | |
| "loss": 0.3075, | |
| "num_tokens": 1615893500.0, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.0514110086616373, | |
| "grad_norm": 0.22778408229351044, | |
| "learning_rate": 1.0553138595400871e-05, | |
| "loss": 0.305, | |
| "num_tokens": 1624544526.0, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.0569991617770327, | |
| "grad_norm": 0.24864831566810608, | |
| "learning_rate": 1.0490988191423246e-05, | |
| "loss": 0.3071, | |
| "num_tokens": 1633169926.0, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 1.062587314892428, | |
| "grad_norm": 0.24507726728916168, | |
| "learning_rate": 1.0428837787445618e-05, | |
| "loss": 0.3067, | |
| "num_tokens": 1641807825.0, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.0681754680078235, | |
| "grad_norm": 0.24927540123462677, | |
| "learning_rate": 1.0366687383467993e-05, | |
| "loss": 0.305, | |
| "num_tokens": 1650440605.0, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 1.0737636211232187, | |
| "grad_norm": 0.23993715643882751, | |
| "learning_rate": 1.0304536979490367e-05, | |
| "loss": 0.3062, | |
| "num_tokens": 1659114119.0, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.079351774238614, | |
| "grad_norm": 0.23860405385494232, | |
| "learning_rate": 1.0242386575512742e-05, | |
| "loss": 0.3051, | |
| "num_tokens": 1667790637.0, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 1.0849399273540095, | |
| "grad_norm": 0.22328642010688782, | |
| "learning_rate": 1.0180236171535117e-05, | |
| "loss": 0.3036, | |
| "num_tokens": 1676410799.0, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.0905280804694049, | |
| "grad_norm": 0.22773276269435883, | |
| "learning_rate": 1.011808576755749e-05, | |
| "loss": 0.3045, | |
| "num_tokens": 1685038550.0, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.0961162335848003, | |
| "grad_norm": 0.24300263822078705, | |
| "learning_rate": 1.0055935363579865e-05, | |
| "loss": 0.3036, | |
| "num_tokens": 1693653167.0, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.1017043867001957, | |
| "grad_norm": 0.2338700294494629, | |
| "learning_rate": 9.993784959602239e-06, | |
| "loss": 0.3035, | |
| "num_tokens": 1702298742.0, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 1.107292539815591, | |
| "grad_norm": 0.26763731241226196, | |
| "learning_rate": 9.931634555624612e-06, | |
| "loss": 0.3031, | |
| "num_tokens": 1710896315.0, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.1128806929309862, | |
| "grad_norm": 0.2267494946718216, | |
| "learning_rate": 9.869484151646986e-06, | |
| "loss": 0.3025, | |
| "num_tokens": 1719508004.0, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.1184688460463816, | |
| "grad_norm": 0.2253628671169281, | |
| "learning_rate": 9.80733374766936e-06, | |
| "loss": 0.302, | |
| "num_tokens": 1728072354.0, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.124056999161777, | |
| "grad_norm": 0.3325177729129791, | |
| "learning_rate": 9.745183343691734e-06, | |
| "loss": 0.3023, | |
| "num_tokens": 1736731519.0, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.1296451522771724, | |
| "grad_norm": 0.25409770011901855, | |
| "learning_rate": 9.68303293971411e-06, | |
| "loss": 0.3025, | |
| "num_tokens": 1745381878.0, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.1352333053925678, | |
| "grad_norm": 0.2311965525150299, | |
| "learning_rate": 9.620882535736483e-06, | |
| "loss": 0.3035, | |
| "num_tokens": 1753981361.0, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.140821458507963, | |
| "grad_norm": 0.215187668800354, | |
| "learning_rate": 9.558732131758858e-06, | |
| "loss": 0.3008, | |
| "num_tokens": 1762589098.0, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.1464096116233584, | |
| "grad_norm": 0.26533347368240356, | |
| "learning_rate": 9.496581727781231e-06, | |
| "loss": 0.3, | |
| "num_tokens": 1771273179.0, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.1519977647387538, | |
| "grad_norm": 0.25665122270584106, | |
| "learning_rate": 9.434431323803605e-06, | |
| "loss": 0.3021, | |
| "num_tokens": 1779963496.0, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.1575859178541492, | |
| "grad_norm": 0.25517359375953674, | |
| "learning_rate": 9.37228091982598e-06, | |
| "loss": 0.3016, | |
| "num_tokens": 1788626566.0, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.1631740709695446, | |
| "grad_norm": 0.24597561359405518, | |
| "learning_rate": 9.310130515848353e-06, | |
| "loss": 0.2999, | |
| "num_tokens": 1797251848.0, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.16876222408494, | |
| "grad_norm": 0.234393909573555, | |
| "learning_rate": 9.247980111870728e-06, | |
| "loss": 0.3003, | |
| "num_tokens": 1805885491.0, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 1.1743503772003352, | |
| "grad_norm": 0.23870638012886047, | |
| "learning_rate": 9.185829707893102e-06, | |
| "loss": 0.299, | |
| "num_tokens": 1814510483.0, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.1799385303157306, | |
| "grad_norm": 0.23010727763175964, | |
| "learning_rate": 9.123679303915475e-06, | |
| "loss": 0.2997, | |
| "num_tokens": 1823182283.0, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 1.185526683431126, | |
| "grad_norm": 0.22427524626255035, | |
| "learning_rate": 9.06152889993785e-06, | |
| "loss": 0.2986, | |
| "num_tokens": 1831855032.0, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.1911148365465214, | |
| "grad_norm": 0.24159206449985504, | |
| "learning_rate": 8.999378495960226e-06, | |
| "loss": 0.3003, | |
| "num_tokens": 1840477078.0, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 1.1967029896619168, | |
| "grad_norm": 0.25824683904647827, | |
| "learning_rate": 8.937228091982599e-06, | |
| "loss": 0.3012, | |
| "num_tokens": 1849104626.0, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.2022911427773122, | |
| "grad_norm": 0.23380401730537415, | |
| "learning_rate": 8.875077688004972e-06, | |
| "loss": 0.2982, | |
| "num_tokens": 1857782320.0, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.2078792958927074, | |
| "grad_norm": 0.2337757796049118, | |
| "learning_rate": 8.812927284027348e-06, | |
| "loss": 0.2988, | |
| "num_tokens": 1866411525.0, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.2134674490081028, | |
| "grad_norm": 0.23148195445537567, | |
| "learning_rate": 8.750776880049721e-06, | |
| "loss": 0.2994, | |
| "num_tokens": 1875046108.0, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.2190556021234982, | |
| "grad_norm": 0.2387813925743103, | |
| "learning_rate": 8.688626476072094e-06, | |
| "loss": 0.298, | |
| "num_tokens": 1883663395.0, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.2246437552388936, | |
| "grad_norm": 0.23834937810897827, | |
| "learning_rate": 8.62647607209447e-06, | |
| "loss": 0.2976, | |
| "num_tokens": 1892282345.0, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.230231908354289, | |
| "grad_norm": 0.22994597256183624, | |
| "learning_rate": 8.564325668116843e-06, | |
| "loss": 0.2994, | |
| "num_tokens": 1900904719.0, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.2358200614696844, | |
| "grad_norm": 0.2896011769771576, | |
| "learning_rate": 8.502175264139218e-06, | |
| "loss": 0.2986, | |
| "num_tokens": 1909539207.0, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.2414082145850796, | |
| "grad_norm": 0.2331543117761612, | |
| "learning_rate": 8.440024860161591e-06, | |
| "loss": 0.2987, | |
| "num_tokens": 1918146801.0, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.246996367700475, | |
| "grad_norm": 0.27627456188201904, | |
| "learning_rate": 8.377874456183965e-06, | |
| "loss": 0.2973, | |
| "num_tokens": 1926809633.0, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.2525845208158704, | |
| "grad_norm": 0.22385410964488983, | |
| "learning_rate": 8.31572405220634e-06, | |
| "loss": 0.2989, | |
| "num_tokens": 1935446815.0, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.2581726739312658, | |
| "grad_norm": 0.2674117982387543, | |
| "learning_rate": 8.253573648228713e-06, | |
| "loss": 0.2959, | |
| "num_tokens": 1944051171.0, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.2637608270466612, | |
| "grad_norm": 0.23551645874977112, | |
| "learning_rate": 8.191423244251089e-06, | |
| "loss": 0.2966, | |
| "num_tokens": 1952699890.0, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.2693489801620563, | |
| "grad_norm": 0.2742044925689697, | |
| "learning_rate": 8.129272840273462e-06, | |
| "loss": 0.2959, | |
| "num_tokens": 1961336294.0, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.2749371332774517, | |
| "grad_norm": 0.300987184047699, | |
| "learning_rate": 8.067122436295837e-06, | |
| "loss": 0.297, | |
| "num_tokens": 1969959503.0, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.2805252863928471, | |
| "grad_norm": 0.26457130908966064, | |
| "learning_rate": 8.00497203231821e-06, | |
| "loss": 0.2958, | |
| "num_tokens": 1978592174.0, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.2861134395082425, | |
| "grad_norm": 0.25730499625205994, | |
| "learning_rate": 7.942821628340584e-06, | |
| "loss": 0.2957, | |
| "num_tokens": 1987264337.0, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.291701592623638, | |
| "grad_norm": 0.23706525564193726, | |
| "learning_rate": 7.880671224362959e-06, | |
| "loss": 0.296, | |
| "num_tokens": 1995921874.0, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.2972897457390333, | |
| "grad_norm": 0.2367008924484253, | |
| "learning_rate": 7.818520820385334e-06, | |
| "loss": 0.2961, | |
| "num_tokens": 2004554306.0, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.3028778988544287, | |
| "grad_norm": 0.24648545682430267, | |
| "learning_rate": 7.756370416407708e-06, | |
| "loss": 0.296, | |
| "num_tokens": 2013186850.0, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.308466051969824, | |
| "grad_norm": 0.23670046031475067, | |
| "learning_rate": 7.694220012430081e-06, | |
| "loss": 0.2955, | |
| "num_tokens": 2021845658.0, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.3140542050852193, | |
| "grad_norm": 0.25605788826942444, | |
| "learning_rate": 7.632069608452456e-06, | |
| "loss": 0.2954, | |
| "num_tokens": 2030475717.0, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.3196423582006147, | |
| "grad_norm": 0.231962651014328, | |
| "learning_rate": 7.56991920447483e-06, | |
| "loss": 0.2933, | |
| "num_tokens": 2039123741.0, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.32523051131601, | |
| "grad_norm": 0.27191412448883057, | |
| "learning_rate": 7.507768800497204e-06, | |
| "loss": 0.2956, | |
| "num_tokens": 2047780009.0, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.3308186644314053, | |
| "grad_norm": 0.25106704235076904, | |
| "learning_rate": 7.445618396519578e-06, | |
| "loss": 0.2943, | |
| "num_tokens": 2056375982.0, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.3364068175468007, | |
| "grad_norm": 0.24563732743263245, | |
| "learning_rate": 7.383467992541952e-06, | |
| "loss": 0.2941, | |
| "num_tokens": 2064961201.0, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.341994970662196, | |
| "grad_norm": 0.22767555713653564, | |
| "learning_rate": 7.321317588564326e-06, | |
| "loss": 0.292, | |
| "num_tokens": 2073631036.0, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.3475831237775915, | |
| "grad_norm": 0.2571980655193329, | |
| "learning_rate": 7.259167184586701e-06, | |
| "loss": 0.2927, | |
| "num_tokens": 2082296071.0, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.3531712768929869, | |
| "grad_norm": 0.3146756887435913, | |
| "learning_rate": 7.1970167806090745e-06, | |
| "loss": 0.2941, | |
| "num_tokens": 2090940329.0, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.3587594300083823, | |
| "grad_norm": 0.26144880056381226, | |
| "learning_rate": 7.134866376631449e-06, | |
| "loss": 0.2931, | |
| "num_tokens": 2099611466.0, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.3643475831237777, | |
| "grad_norm": 0.22552551329135895, | |
| "learning_rate": 7.072715972653823e-06, | |
| "loss": 0.2936, | |
| "num_tokens": 2108254412.0, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.369935736239173, | |
| "grad_norm": 0.23567764461040497, | |
| "learning_rate": 7.0105655686761964e-06, | |
| "loss": 0.2941, | |
| "num_tokens": 2116883454.0, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.3755238893545683, | |
| "grad_norm": 0.26882192492485046, | |
| "learning_rate": 6.948415164698571e-06, | |
| "loss": 0.292, | |
| "num_tokens": 2125531177.0, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.3811120424699637, | |
| "grad_norm": 0.24953489005565643, | |
| "learning_rate": 6.886264760720945e-06, | |
| "loss": 0.2926, | |
| "num_tokens": 2134240135.0, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.386700195585359, | |
| "grad_norm": 0.21380962431430817, | |
| "learning_rate": 6.82411435674332e-06, | |
| "loss": 0.2918, | |
| "num_tokens": 2142853666.0, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.3922883487007545, | |
| "grad_norm": 0.2518530786037445, | |
| "learning_rate": 6.7619639527656935e-06, | |
| "loss": 0.2923, | |
| "num_tokens": 2151466944.0, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.3978765018161496, | |
| "grad_norm": 0.2241937220096588, | |
| "learning_rate": 6.699813548788068e-06, | |
| "loss": 0.2912, | |
| "num_tokens": 2160077640.0, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.403464654931545, | |
| "grad_norm": 0.21372365951538086, | |
| "learning_rate": 6.637663144810442e-06, | |
| "loss": 0.2891, | |
| "num_tokens": 2168745642.0, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.4090528080469404, | |
| "grad_norm": 0.2071543037891388, | |
| "learning_rate": 6.5755127408328155e-06, | |
| "loss": 0.2917, | |
| "num_tokens": 2177378583.0, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.4146409611623358, | |
| "grad_norm": 0.21559269726276398, | |
| "learning_rate": 6.51336233685519e-06, | |
| "loss": 0.291, | |
| "num_tokens": 2186021687.0, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.4202291142777312, | |
| "grad_norm": 0.2227635681629181, | |
| "learning_rate": 6.451211932877565e-06, | |
| "loss": 0.2918, | |
| "num_tokens": 2194630054.0, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.4258172673931266, | |
| "grad_norm": 0.21467848122119904, | |
| "learning_rate": 6.389061528899938e-06, | |
| "loss": 0.2905, | |
| "num_tokens": 2203290213.0, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.431405420508522, | |
| "grad_norm": 0.2505319118499756, | |
| "learning_rate": 6.326911124922313e-06, | |
| "loss": 0.2896, | |
| "num_tokens": 2211929982.0, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.4369935736239172, | |
| "grad_norm": 0.23950278759002686, | |
| "learning_rate": 6.264760720944687e-06, | |
| "loss": 0.2922, | |
| "num_tokens": 2220561765.0, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 1.4425817267393126, | |
| "grad_norm": 0.2342691272497177, | |
| "learning_rate": 6.20261031696706e-06, | |
| "loss": 0.2897, | |
| "num_tokens": 2229218119.0, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.448169879854708, | |
| "grad_norm": 0.2966405749320984, | |
| "learning_rate": 6.140459912989435e-06, | |
| "loss": 0.2908, | |
| "num_tokens": 2237855571.0, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 1.4537580329701034, | |
| "grad_norm": 0.24424313008785248, | |
| "learning_rate": 6.07830950901181e-06, | |
| "loss": 0.2907, | |
| "num_tokens": 2246501573.0, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.4593461860854988, | |
| "grad_norm": 0.23377157747745514, | |
| "learning_rate": 6.016159105034183e-06, | |
| "loss": 0.2909, | |
| "num_tokens": 2255130456.0, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 1.464934339200894, | |
| "grad_norm": 0.21223483979701996, | |
| "learning_rate": 5.9540087010565574e-06, | |
| "loss": 0.29, | |
| "num_tokens": 2263746485.0, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.4705224923162894, | |
| "grad_norm": 0.23736342787742615, | |
| "learning_rate": 5.891858297078932e-06, | |
| "loss": 0.2901, | |
| "num_tokens": 2272362242.0, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 1.4761106454316848, | |
| "grad_norm": 0.2492651641368866, | |
| "learning_rate": 5.829707893101305e-06, | |
| "loss": 0.2895, | |
| "num_tokens": 2280978917.0, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.4816987985470802, | |
| "grad_norm": 0.23298749327659607, | |
| "learning_rate": 5.767557489123679e-06, | |
| "loss": 0.2883, | |
| "num_tokens": 2289618555.0, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 1.4872869516624756, | |
| "grad_norm": 0.23725226521492004, | |
| "learning_rate": 5.7054070851460545e-06, | |
| "loss": 0.2875, | |
| "num_tokens": 2298320508.0, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.492875104777871, | |
| "grad_norm": 0.22084194421768188, | |
| "learning_rate": 5.643256681168427e-06, | |
| "loss": 0.2892, | |
| "num_tokens": 2306960721.0, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 1.4984632578932664, | |
| "grad_norm": 0.24300003051757812, | |
| "learning_rate": 5.581106277190802e-06, | |
| "loss": 0.289, | |
| "num_tokens": 2315621645.0, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.5040514110086618, | |
| "grad_norm": 0.24459832906723022, | |
| "learning_rate": 5.5189558732131765e-06, | |
| "loss": 0.2896, | |
| "num_tokens": 2324231293.0, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 1.509639564124057, | |
| "grad_norm": 0.20767633616924286, | |
| "learning_rate": 5.456805469235551e-06, | |
| "loss": 0.288, | |
| "num_tokens": 2332877527.0, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.5152277172394524, | |
| "grad_norm": 0.2096388041973114, | |
| "learning_rate": 5.394655065257924e-06, | |
| "loss": 0.2892, | |
| "num_tokens": 2341473674.0, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 1.5208158703548478, | |
| "grad_norm": 0.21928216516971588, | |
| "learning_rate": 5.3325046612802985e-06, | |
| "loss": 0.288, | |
| "num_tokens": 2350114516.0, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.526404023470243, | |
| "grad_norm": 0.22901827096939087, | |
| "learning_rate": 5.270354257302674e-06, | |
| "loss": 0.2878, | |
| "num_tokens": 2358726348.0, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 1.5319921765856384, | |
| "grad_norm": 0.23737923800945282, | |
| "learning_rate": 5.208203853325047e-06, | |
| "loss": 0.2873, | |
| "num_tokens": 2367370170.0, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.5375803297010338, | |
| "grad_norm": 0.2369966357946396, | |
| "learning_rate": 5.146053449347421e-06, | |
| "loss": 0.287, | |
| "num_tokens": 2375996366.0, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 1.5431684828164292, | |
| "grad_norm": 0.23443567752838135, | |
| "learning_rate": 5.083903045369796e-06, | |
| "loss": 0.2874, | |
| "num_tokens": 2384613549.0, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.5487566359318246, | |
| "grad_norm": 0.23766714334487915, | |
| "learning_rate": 5.021752641392169e-06, | |
| "loss": 0.2878, | |
| "num_tokens": 2393259198.0, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 1.55434478904722, | |
| "grad_norm": 0.25128668546676636, | |
| "learning_rate": 4.959602237414543e-06, | |
| "loss": 0.288, | |
| "num_tokens": 2401875320.0, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.5599329421626154, | |
| "grad_norm": 0.25003600120544434, | |
| "learning_rate": 4.897451833436918e-06, | |
| "loss": 0.2872, | |
| "num_tokens": 2410464461.0, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 1.5655210952780108, | |
| "grad_norm": 0.23323696851730347, | |
| "learning_rate": 4.835301429459292e-06, | |
| "loss": 0.2877, | |
| "num_tokens": 2419104733.0, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.571109248393406, | |
| "grad_norm": 0.22660532593727112, | |
| "learning_rate": 4.773151025481666e-06, | |
| "loss": 0.2868, | |
| "num_tokens": 2427723495.0, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 1.5766974015088013, | |
| "grad_norm": 0.23528066277503967, | |
| "learning_rate": 4.7110006215040396e-06, | |
| "loss": 0.2858, | |
| "num_tokens": 2436394221.0, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.5822855546241967, | |
| "grad_norm": 0.20659767091274261, | |
| "learning_rate": 4.648850217526415e-06, | |
| "loss": 0.2867, | |
| "num_tokens": 2444987195.0, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 1.587873707739592, | |
| "grad_norm": 0.202503964304924, | |
| "learning_rate": 4.586699813548788e-06, | |
| "loss": 0.287, | |
| "num_tokens": 2453647252.0, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.5934618608549873, | |
| "grad_norm": 0.20872440934181213, | |
| "learning_rate": 4.524549409571163e-06, | |
| "loss": 0.2858, | |
| "num_tokens": 2462329940.0, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 1.5990500139703827, | |
| "grad_norm": 0.2105245143175125, | |
| "learning_rate": 4.462399005593537e-06, | |
| "loss": 0.2873, | |
| "num_tokens": 2471002799.0, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.604638167085778, | |
| "grad_norm": 0.26558104157447815, | |
| "learning_rate": 4.400248601615911e-06, | |
| "loss": 0.2861, | |
| "num_tokens": 2479692110.0, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 1.6102263202011735, | |
| "grad_norm": 0.21139390766620636, | |
| "learning_rate": 4.338098197638285e-06, | |
| "loss": 0.2852, | |
| "num_tokens": 2488300993.0, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.615814473316569, | |
| "grad_norm": 0.215835839509964, | |
| "learning_rate": 4.2759477936606595e-06, | |
| "loss": 0.2844, | |
| "num_tokens": 2496941072.0, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 1.6214026264319643, | |
| "grad_norm": 0.2279125303030014, | |
| "learning_rate": 4.213797389683033e-06, | |
| "loss": 0.2857, | |
| "num_tokens": 2505590520.0, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.6269907795473597, | |
| "grad_norm": 0.2454751431941986, | |
| "learning_rate": 4.151646985705407e-06, | |
| "loss": 0.284, | |
| "num_tokens": 2514264211.0, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 1.6325789326627551, | |
| "grad_norm": 0.22797976434230804, | |
| "learning_rate": 4.0894965817277815e-06, | |
| "loss": 0.2853, | |
| "num_tokens": 2522903645.0, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.6381670857781503, | |
| "grad_norm": 0.2286279946565628, | |
| "learning_rate": 4.027346177750156e-06, | |
| "loss": 0.2841, | |
| "num_tokens": 2531516266.0, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 1.6437552388935457, | |
| "grad_norm": 0.19183382391929626, | |
| "learning_rate": 3.96519577377253e-06, | |
| "loss": 0.2834, | |
| "num_tokens": 2540132651.0, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.649343392008941, | |
| "grad_norm": 0.20147649943828583, | |
| "learning_rate": 3.903045369794904e-06, | |
| "loss": 0.2844, | |
| "num_tokens": 2548789057.0, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 1.6549315451243363, | |
| "grad_norm": 0.21062539517879486, | |
| "learning_rate": 3.8408949658172786e-06, | |
| "loss": 0.286, | |
| "num_tokens": 2557438457.0, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.6605196982397317, | |
| "grad_norm": 0.20658695697784424, | |
| "learning_rate": 3.7787445618396524e-06, | |
| "loss": 0.2844, | |
| "num_tokens": 2566104615.0, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 1.666107851355127, | |
| "grad_norm": 0.21198123693466187, | |
| "learning_rate": 3.7165941578620263e-06, | |
| "loss": 0.2845, | |
| "num_tokens": 2574730671.0, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.6716960044705225, | |
| "grad_norm": 0.22207552194595337, | |
| "learning_rate": 3.6544437538844006e-06, | |
| "loss": 0.2841, | |
| "num_tokens": 2583386151.0, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 1.6772841575859179, | |
| "grad_norm": 0.21470749378204346, | |
| "learning_rate": 3.592293349906775e-06, | |
| "loss": 0.2854, | |
| "num_tokens": 2592007075.0, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.6828723107013133, | |
| "grad_norm": 0.22267666459083557, | |
| "learning_rate": 3.5301429459291487e-06, | |
| "loss": 0.2843, | |
| "num_tokens": 2600618307.0, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 1.6884604638167087, | |
| "grad_norm": 0.25010621547698975, | |
| "learning_rate": 3.467992541951523e-06, | |
| "loss": 0.2834, | |
| "num_tokens": 2609275015.0, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.694048616932104, | |
| "grad_norm": 0.21234430372714996, | |
| "learning_rate": 3.4058421379738972e-06, | |
| "loss": 0.2852, | |
| "num_tokens": 2617927159.0, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 1.6996367700474995, | |
| "grad_norm": 0.212777242064476, | |
| "learning_rate": 3.343691733996271e-06, | |
| "loss": 0.2835, | |
| "num_tokens": 2626598520.0, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.7052249231628946, | |
| "grad_norm": 0.2279113233089447, | |
| "learning_rate": 3.2815413300186454e-06, | |
| "loss": 0.2836, | |
| "num_tokens": 2635241607.0, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 1.71081307627829, | |
| "grad_norm": 0.2212340086698532, | |
| "learning_rate": 3.2193909260410196e-06, | |
| "loss": 0.2828, | |
| "num_tokens": 2643858312.0, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.7164012293936852, | |
| "grad_norm": 0.20195508003234863, | |
| "learning_rate": 3.157240522063394e-06, | |
| "loss": 0.2835, | |
| "num_tokens": 2652491273.0, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 1.7219893825090806, | |
| "grad_norm": 0.21651341021060944, | |
| "learning_rate": 3.0950901180857678e-06, | |
| "loss": 0.2826, | |
| "num_tokens": 2661125711.0, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.727577535624476, | |
| "grad_norm": 0.20481155812740326, | |
| "learning_rate": 3.0329397141081416e-06, | |
| "loss": 0.2849, | |
| "num_tokens": 2669737453.0, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 1.7331656887398714, | |
| "grad_norm": 0.20491968095302582, | |
| "learning_rate": 2.9707893101305163e-06, | |
| "loss": 0.2826, | |
| "num_tokens": 2678374413.0, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.7387538418552668, | |
| "grad_norm": 0.2029823362827301, | |
| "learning_rate": 2.90863890615289e-06, | |
| "loss": 0.2838, | |
| "num_tokens": 2686987442.0, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 1.7443419949706622, | |
| "grad_norm": 0.2510753571987152, | |
| "learning_rate": 2.846488502175264e-06, | |
| "loss": 0.2824, | |
| "num_tokens": 2695607973.0, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.7499301480860576, | |
| "grad_norm": 0.23176781833171844, | |
| "learning_rate": 2.7843380981976387e-06, | |
| "loss": 0.2829, | |
| "num_tokens": 2704257689.0, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 1.755518301201453, | |
| "grad_norm": 0.22666539251804352, | |
| "learning_rate": 2.7221876942200126e-06, | |
| "loss": 0.282, | |
| "num_tokens": 2712898044.0, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.7611064543168484, | |
| "grad_norm": 0.22624371945858002, | |
| "learning_rate": 2.6600372902423864e-06, | |
| "loss": 0.2833, | |
| "num_tokens": 2721529682.0, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 1.7666946074322436, | |
| "grad_norm": 0.24672020971775055, | |
| "learning_rate": 2.597886886264761e-06, | |
| "loss": 0.2837, | |
| "num_tokens": 2730153928.0, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.772282760547639, | |
| "grad_norm": 0.22057084739208221, | |
| "learning_rate": 2.535736482287135e-06, | |
| "loss": 0.2825, | |
| "num_tokens": 2738797091.0, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 1.7778709136630344, | |
| "grad_norm": 0.1964276283979416, | |
| "learning_rate": 2.4735860783095093e-06, | |
| "loss": 0.2831, | |
| "num_tokens": 2747438652.0, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.7834590667784296, | |
| "grad_norm": 0.18753261864185333, | |
| "learning_rate": 2.4114356743318835e-06, | |
| "loss": 0.2836, | |
| "num_tokens": 2756092674.0, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 1.789047219893825, | |
| "grad_norm": 0.19848628342151642, | |
| "learning_rate": 2.349285270354258e-06, | |
| "loss": 0.2813, | |
| "num_tokens": 2764728890.0, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.7946353730092204, | |
| "grad_norm": 0.19272516667842865, | |
| "learning_rate": 2.2871348663766317e-06, | |
| "loss": 0.2829, | |
| "num_tokens": 2773303612.0, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 1.8002235261246158, | |
| "grad_norm": 0.20509815216064453, | |
| "learning_rate": 2.224984462399006e-06, | |
| "loss": 0.2822, | |
| "num_tokens": 2781943420.0, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.8058116792400112, | |
| "grad_norm": 0.19751939177513123, | |
| "learning_rate": 2.16283405842138e-06, | |
| "loss": 0.2813, | |
| "num_tokens": 2790544836.0, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 1.8113998323554066, | |
| "grad_norm": 0.2047342211008072, | |
| "learning_rate": 2.100683654443754e-06, | |
| "loss": 0.2814, | |
| "num_tokens": 2799179051.0, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.816987985470802, | |
| "grad_norm": 0.21204353868961334, | |
| "learning_rate": 2.0385332504661283e-06, | |
| "loss": 0.2819, | |
| "num_tokens": 2807859657.0, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 1.8225761385861974, | |
| "grad_norm": 0.20583118498325348, | |
| "learning_rate": 1.976382846488502e-06, | |
| "loss": 0.2815, | |
| "num_tokens": 2816535171.0, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.8281642917015928, | |
| "grad_norm": 0.21081192791461945, | |
| "learning_rate": 1.9142324425108765e-06, | |
| "loss": 0.2826, | |
| "num_tokens": 2825189719.0, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 1.833752444816988, | |
| "grad_norm": 0.18788641691207886, | |
| "learning_rate": 1.8520820385332505e-06, | |
| "loss": 0.2804, | |
| "num_tokens": 2833874847.0, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.8393405979323834, | |
| "grad_norm": 0.20237120985984802, | |
| "learning_rate": 1.7899316345556248e-06, | |
| "loss": 0.2824, | |
| "num_tokens": 2842459461.0, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 1.8449287510477788, | |
| "grad_norm": 0.1973085254430771, | |
| "learning_rate": 1.7277812305779989e-06, | |
| "loss": 0.2815, | |
| "num_tokens": 2851115535.0, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.850516904163174, | |
| "grad_norm": 0.1954537183046341, | |
| "learning_rate": 1.6656308266003731e-06, | |
| "loss": 0.2811, | |
| "num_tokens": 2859739184.0, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 1.8561050572785693, | |
| "grad_norm": 0.1938507854938507, | |
| "learning_rate": 1.6034804226227472e-06, | |
| "loss": 0.281, | |
| "num_tokens": 2868403974.0, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.8616932103939647, | |
| "grad_norm": 0.20135918259620667, | |
| "learning_rate": 1.5413300186451213e-06, | |
| "loss": 0.2821, | |
| "num_tokens": 2877010029.0, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 1.8672813635093601, | |
| "grad_norm": 0.19300834834575653, | |
| "learning_rate": 1.4791796146674955e-06, | |
| "loss": 0.2818, | |
| "num_tokens": 2885631493.0, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.8728695166247555, | |
| "grad_norm": 0.19174011051654816, | |
| "learning_rate": 1.4170292106898694e-06, | |
| "loss": 0.282, | |
| "num_tokens": 2894271003.0, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 1.878457669740151, | |
| "grad_norm": 0.19045238196849823, | |
| "learning_rate": 1.3548788067122437e-06, | |
| "loss": 0.2812, | |
| "num_tokens": 2902878445.0, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.8840458228555463, | |
| "grad_norm": 0.18684354424476624, | |
| "learning_rate": 1.292728402734618e-06, | |
| "loss": 0.2824, | |
| "num_tokens": 2911477156.0, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 1.8896339759709417, | |
| "grad_norm": 0.1858016550540924, | |
| "learning_rate": 1.230577998756992e-06, | |
| "loss": 0.2804, | |
| "num_tokens": 2920099568.0, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.895222129086337, | |
| "grad_norm": 0.18339896202087402, | |
| "learning_rate": 1.1684275947793663e-06, | |
| "loss": 0.2827, | |
| "num_tokens": 2928733423.0, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 1.9008102822017323, | |
| "grad_norm": 0.1879528909921646, | |
| "learning_rate": 1.1062771908017403e-06, | |
| "loss": 0.2812, | |
| "num_tokens": 2937387124.0, | |
| "step": 1700 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1788, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.0851623166841717e+19, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |