| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.0, |
| "eval_steps": 500, |
| "global_step": 1106, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0018083182640144665, |
| "grad_norm": 124.59085845947266, |
| "learning_rate": 1.7857142857142858e-07, |
| "loss": 1.8767, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.003616636528028933, |
| "grad_norm": 123.01050567626953, |
| "learning_rate": 3.5714285714285716e-07, |
| "loss": 1.8663, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0054249547920434, |
| "grad_norm": 157.79428100585938, |
| "learning_rate": 5.357142857142857e-07, |
| "loss": 1.9201, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.007233273056057866, |
| "grad_norm": 135.23194885253906, |
| "learning_rate": 7.142857142857143e-07, |
| "loss": 1.9675, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.009041591320072333, |
| "grad_norm": 104.50148010253906, |
| "learning_rate": 8.928571428571429e-07, |
| "loss": 2.1019, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0108499095840868, |
| "grad_norm": 121.24005126953125, |
| "learning_rate": 1.0714285714285714e-06, |
| "loss": 2.043, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.012658227848101266, |
| "grad_norm": 78.78042602539062, |
| "learning_rate": 1.25e-06, |
| "loss": 1.9918, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.014466546112115732, |
| "grad_norm": 63.876441955566406, |
| "learning_rate": 1.4285714285714286e-06, |
| "loss": 2.0925, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.0162748643761302, |
| "grad_norm": 128.54800415039062, |
| "learning_rate": 1.6071428571428574e-06, |
| "loss": 1.9507, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.018083182640144666, |
| "grad_norm": 110.11956024169922, |
| "learning_rate": 1.7857142857142859e-06, |
| "loss": 1.9933, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.019891500904159132, |
| "grad_norm": 186.6797332763672, |
| "learning_rate": 1.9642857142857144e-06, |
| "loss": 1.7431, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.0216998191681736, |
| "grad_norm": 289.7953796386719, |
| "learning_rate": 2.1428571428571427e-06, |
| "loss": 1.7807, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.023508137432188065, |
| "grad_norm": 275.05596923828125, |
| "learning_rate": 2.321428571428572e-06, |
| "loss": 1.7687, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.02531645569620253, |
| "grad_norm": 318.3794250488281, |
| "learning_rate": 2.5e-06, |
| "loss": 1.8591, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.027124773960216998, |
| "grad_norm": 154.50942993164062, |
| "learning_rate": 2.6785714285714285e-06, |
| "loss": 1.7368, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.028933092224231464, |
| "grad_norm": 65.9786148071289, |
| "learning_rate": 2.8571428571428573e-06, |
| "loss": 1.654, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.03074141048824593, |
| "grad_norm": 97.9104995727539, |
| "learning_rate": 3.0357142857142856e-06, |
| "loss": 1.7364, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.0325497287522604, |
| "grad_norm": 86.56370544433594, |
| "learning_rate": 3.2142857142857147e-06, |
| "loss": 1.7569, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.034358047016274866, |
| "grad_norm": 58.56660079956055, |
| "learning_rate": 3.3928571428571435e-06, |
| "loss": 1.6885, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.03616636528028933, |
| "grad_norm": 41.65616226196289, |
| "learning_rate": 3.5714285714285718e-06, |
| "loss": 1.5784, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0379746835443038, |
| "grad_norm": 33.33378601074219, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 1.62, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.039783001808318265, |
| "grad_norm": 29.60235595703125, |
| "learning_rate": 3.928571428571429e-06, |
| "loss": 1.6855, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.04159132007233273, |
| "grad_norm": 26.571788787841797, |
| "learning_rate": 4.107142857142857e-06, |
| "loss": 1.7122, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.0433996383363472, |
| "grad_norm": 19.6992130279541, |
| "learning_rate": 4.2857142857142855e-06, |
| "loss": 1.6152, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.045207956600361664, |
| "grad_norm": 40.05808639526367, |
| "learning_rate": 4.464285714285715e-06, |
| "loss": 1.6236, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.04701627486437613, |
| "grad_norm": 11.833436012268066, |
| "learning_rate": 4.642857142857144e-06, |
| "loss": 1.2085, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.048824593128390596, |
| "grad_norm": 19.057842254638672, |
| "learning_rate": 4.821428571428572e-06, |
| "loss": 1.1683, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.05063291139240506, |
| "grad_norm": 21.635419845581055, |
| "learning_rate": 5e-06, |
| "loss": 1.2539, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.05244122965641953, |
| "grad_norm": 19.195417404174805, |
| "learning_rate": 5.1785714285714296e-06, |
| "loss": 1.2789, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.054249547920433995, |
| "grad_norm": 13.066848754882812, |
| "learning_rate": 5.357142857142857e-06, |
| "loss": 1.2662, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.05605786618444846, |
| "grad_norm": 5.356083393096924, |
| "learning_rate": 5.535714285714286e-06, |
| "loss": 1.1504, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.05786618444846293, |
| "grad_norm": 5.366274833679199, |
| "learning_rate": 5.7142857142857145e-06, |
| "loss": 1.1215, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.059674502712477394, |
| "grad_norm": 10.569491386413574, |
| "learning_rate": 5.892857142857144e-06, |
| "loss": 1.1052, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.06148282097649186, |
| "grad_norm": 3.97001576423645, |
| "learning_rate": 6.071428571428571e-06, |
| "loss": 1.0776, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.06329113924050633, |
| "grad_norm": 6.67834997177124, |
| "learning_rate": 6.25e-06, |
| "loss": 1.1684, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.0650994575045208, |
| "grad_norm": 6.470335483551025, |
| "learning_rate": 6.4285714285714295e-06, |
| "loss": 1.0797, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.06690777576853527, |
| "grad_norm": 4.232357501983643, |
| "learning_rate": 6.607142857142858e-06, |
| "loss": 1.0273, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.06871609403254973, |
| "grad_norm": 4.908628940582275, |
| "learning_rate": 6.785714285714287e-06, |
| "loss": 1.0514, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.0705244122965642, |
| "grad_norm": 3.294133424758911, |
| "learning_rate": 6.964285714285714e-06, |
| "loss": 1.1107, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.07233273056057866, |
| "grad_norm": 3.946671962738037, |
| "learning_rate": 7.1428571428571436e-06, |
| "loss": 1.0426, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.07414104882459313, |
| "grad_norm": 5.3715949058532715, |
| "learning_rate": 7.321428571428572e-06, |
| "loss": 1.0513, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.0759493670886076, |
| "grad_norm": 7.43781852722168, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 1.06, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.07775768535262206, |
| "grad_norm": 4.168594837188721, |
| "learning_rate": 7.67857142857143e-06, |
| "loss": 1.1397, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.07956600361663653, |
| "grad_norm": 38.190696716308594, |
| "learning_rate": 7.857142857142858e-06, |
| "loss": 1.0466, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.081374321880651, |
| "grad_norm": 5.13964319229126, |
| "learning_rate": 8.035714285714286e-06, |
| "loss": 1.1128, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.08318264014466546, |
| "grad_norm": 5.730332851409912, |
| "learning_rate": 8.214285714285714e-06, |
| "loss": 1.1362, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.08499095840867993, |
| "grad_norm": 3.4547622203826904, |
| "learning_rate": 8.392857142857144e-06, |
| "loss": 1.1364, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.0867992766726944, |
| "grad_norm": 3.945357322692871, |
| "learning_rate": 8.571428571428571e-06, |
| "loss": 1.0957, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.08860759493670886, |
| "grad_norm": 14.470746994018555, |
| "learning_rate": 8.750000000000001e-06, |
| "loss": 1.0731, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.09041591320072333, |
| "grad_norm": 4.687364101409912, |
| "learning_rate": 8.92857142857143e-06, |
| "loss": 1.1294, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0922242314647378, |
| "grad_norm": 3.4039835929870605, |
| "learning_rate": 9.107142857142858e-06, |
| "loss": 0.9838, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.09403254972875226, |
| "grad_norm": 17.923242568969727, |
| "learning_rate": 9.285714285714288e-06, |
| "loss": 0.9088, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.09584086799276673, |
| "grad_norm": 3.5276126861572266, |
| "learning_rate": 9.464285714285714e-06, |
| "loss": 0.9651, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.09764918625678119, |
| "grad_norm": 4.005324840545654, |
| "learning_rate": 9.642857142857144e-06, |
| "loss": 1.0155, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.09945750452079566, |
| "grad_norm": 3.9271018505096436, |
| "learning_rate": 9.821428571428573e-06, |
| "loss": 1.0829, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.10126582278481013, |
| "grad_norm": 4.813779354095459, |
| "learning_rate": 1e-05, |
| "loss": 0.9691, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.10307414104882459, |
| "grad_norm": 3.8365554809570312, |
| "learning_rate": 9.999977619961366e-06, |
| "loss": 0.9716, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.10488245931283906, |
| "grad_norm": 4.112904071807861, |
| "learning_rate": 9.999910480045805e-06, |
| "loss": 1.0489, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.10669077757685352, |
| "grad_norm": 4.034896373748779, |
| "learning_rate": 9.999798580854356e-06, |
| "loss": 1.0521, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.10849909584086799, |
| "grad_norm": 4.018319129943848, |
| "learning_rate": 9.999641923388745e-06, |
| "loss": 0.9569, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.11030741410488246, |
| "grad_norm": 4.899073123931885, |
| "learning_rate": 9.999440509051367e-06, |
| "loss": 0.9694, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.11211573236889692, |
| "grad_norm": 9.774127960205078, |
| "learning_rate": 9.999194339645292e-06, |
| "loss": 1.0075, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.11392405063291139, |
| "grad_norm": 5.587623596191406, |
| "learning_rate": 9.998903417374228e-06, |
| "loss": 1.0096, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.11573236889692586, |
| "grad_norm": 4.204263687133789, |
| "learning_rate": 9.998567744842518e-06, |
| "loss": 0.9308, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.11754068716094032, |
| "grad_norm": 5.104825973510742, |
| "learning_rate": 9.998187325055107e-06, |
| "loss": 0.8768, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.11934900542495479, |
| "grad_norm": 5.699406623840332, |
| "learning_rate": 9.997762161417517e-06, |
| "loss": 0.9115, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.12115732368896925, |
| "grad_norm": 4.967496871948242, |
| "learning_rate": 9.997292257735822e-06, |
| "loss": 0.8973, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.12296564195298372, |
| "grad_norm": 5.5881500244140625, |
| "learning_rate": 9.996777618216608e-06, |
| "loss": 0.9434, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.12477396021699819, |
| "grad_norm": 5.467623233795166, |
| "learning_rate": 9.996218247466932e-06, |
| "loss": 0.9518, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.12658227848101267, |
| "grad_norm": 5.854726314544678, |
| "learning_rate": 9.995614150494293e-06, |
| "loss": 0.9395, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.12839059674502712, |
| "grad_norm": 8.993833541870117, |
| "learning_rate": 9.994965332706574e-06, |
| "loss": 0.9938, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.1301989150090416, |
| "grad_norm": 6.169260501861572, |
| "learning_rate": 9.994271799912004e-06, |
| "loss": 0.9776, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.13200723327305605, |
| "grad_norm": 6.879613399505615, |
| "learning_rate": 9.993533558319098e-06, |
| "loss": 0.9327, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.13381555153707053, |
| "grad_norm": 6.696218490600586, |
| "learning_rate": 9.992750614536606e-06, |
| "loss": 0.972, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.13562386980108498, |
| "grad_norm": 6.739992618560791, |
| "learning_rate": 9.991922975573453e-06, |
| "loss": 1.0328, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.13743218806509946, |
| "grad_norm": 5.942216873168945, |
| "learning_rate": 9.991050648838676e-06, |
| "loss": 0.834, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.13924050632911392, |
| "grad_norm": 5.648139953613281, |
| "learning_rate": 9.990133642141359e-06, |
| "loss": 0.7784, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.1410488245931284, |
| "grad_norm": 6.231765270233154, |
| "learning_rate": 9.989171963690556e-06, |
| "loss": 0.7989, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.14285714285714285, |
| "grad_norm": 6.385239124298096, |
| "learning_rate": 9.988165622095233e-06, |
| "loss": 0.8615, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.14466546112115733, |
| "grad_norm": 6.530540943145752, |
| "learning_rate": 9.987114626364172e-06, |
| "loss": 0.8554, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.14647377938517178, |
| "grad_norm": 6.088751792907715, |
| "learning_rate": 9.986018985905901e-06, |
| "loss": 0.8795, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.14828209764918626, |
| "grad_norm": 5.742943286895752, |
| "learning_rate": 9.984878710528615e-06, |
| "loss": 0.7886, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.15009041591320071, |
| "grad_norm": 6.685095310211182, |
| "learning_rate": 9.983693810440073e-06, |
| "loss": 0.7599, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.1518987341772152, |
| "grad_norm": 6.117989540100098, |
| "learning_rate": 9.982464296247523e-06, |
| "loss": 0.8052, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.15370705244122965, |
| "grad_norm": 6.1924004554748535, |
| "learning_rate": 9.98119017895759e-06, |
| "loss": 0.7964, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.15551537070524413, |
| "grad_norm": 5.929360866546631, |
| "learning_rate": 9.979871469976197e-06, |
| "loss": 0.8647, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.15732368896925858, |
| "grad_norm": 5.7412872314453125, |
| "learning_rate": 9.978508181108442e-06, |
| "loss": 0.7466, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.15913200723327306, |
| "grad_norm": 5.8075995445251465, |
| "learning_rate": 9.97710032455851e-06, |
| "loss": 0.6711, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.1609403254972875, |
| "grad_norm": 6.012415409088135, |
| "learning_rate": 9.975647912929558e-06, |
| "loss": 0.6933, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.162748643761302, |
| "grad_norm": 5.956928253173828, |
| "learning_rate": 9.974150959223591e-06, |
| "loss": 0.7058, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.16455696202531644, |
| "grad_norm": 5.904573917388916, |
| "learning_rate": 9.972609476841368e-06, |
| "loss": 0.7432, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.16636528028933092, |
| "grad_norm": 6.147512912750244, |
| "learning_rate": 9.971023479582258e-06, |
| "loss": 0.6556, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.16817359855334538, |
| "grad_norm": 5.897213935852051, |
| "learning_rate": 9.969392981644138e-06, |
| "loss": 0.7354, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.16998191681735986, |
| "grad_norm": 6.633100509643555, |
| "learning_rate": 9.967717997623245e-06, |
| "loss": 0.694, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.1717902350813743, |
| "grad_norm": 6.375577449798584, |
| "learning_rate": 9.965998542514066e-06, |
| "loss": 0.6957, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.1735985533453888, |
| "grad_norm": 6.096574783325195, |
| "learning_rate": 9.964234631709188e-06, |
| "loss": 0.6565, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.17540687160940324, |
| "grad_norm": 6.757542610168457, |
| "learning_rate": 9.962426280999168e-06, |
| "loss": 0.6353, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.17721518987341772, |
| "grad_norm": 6.78255558013916, |
| "learning_rate": 9.960573506572391e-06, |
| "loss": 0.6166, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.17902350813743217, |
| "grad_norm": 5.72428035736084, |
| "learning_rate": 9.95867632501492e-06, |
| "loss": 0.7272, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.18083182640144665, |
| "grad_norm": 6.663970470428467, |
| "learning_rate": 9.956734753310355e-06, |
| "loss": 0.6538, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.18264014466546113, |
| "grad_norm": 5.2808098793029785, |
| "learning_rate": 9.954748808839675e-06, |
| "loss": 0.5545, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.1844484629294756, |
| "grad_norm": 5.146191596984863, |
| "learning_rate": 9.952718509381086e-06, |
| "loss": 0.6049, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.18625678119349007, |
| "grad_norm": 5.410797119140625, |
| "learning_rate": 9.950643873109861e-06, |
| "loss": 0.6559, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.18806509945750452, |
| "grad_norm": 5.563495635986328, |
| "learning_rate": 9.948524918598175e-06, |
| "loss": 0.6172, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.189873417721519, |
| "grad_norm": 4.960428714752197, |
| "learning_rate": 9.946361664814942e-06, |
| "loss": 0.6353, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.19168173598553345, |
| "grad_norm": 5.002950191497803, |
| "learning_rate": 9.944154131125643e-06, |
| "loss": 0.6519, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.19349005424954793, |
| "grad_norm": 5.334290027618408, |
| "learning_rate": 9.941902337292156e-06, |
| "loss": 0.5404, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.19529837251356238, |
| "grad_norm": 5.085877418518066, |
| "learning_rate": 9.93960630347257e-06, |
| "loss": 0.5654, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.19710669077757687, |
| "grad_norm": 4.713104724884033, |
| "learning_rate": 9.937266050221015e-06, |
| "loss": 0.4798, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.19891500904159132, |
| "grad_norm": 4.222567558288574, |
| "learning_rate": 9.934881598487478e-06, |
| "loss": 0.6132, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.2007233273056058, |
| "grad_norm": 4.574428081512451, |
| "learning_rate": 9.932452969617607e-06, |
| "loss": 0.5248, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.20253164556962025, |
| "grad_norm": 4.324956893920898, |
| "learning_rate": 9.929980185352525e-06, |
| "loss": 0.5395, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.20433996383363473, |
| "grad_norm": 4.227971076965332, |
| "learning_rate": 9.927463267828635e-06, |
| "loss": 0.4582, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.20614828209764918, |
| "grad_norm": 4.175861358642578, |
| "learning_rate": 9.924902239577419e-06, |
| "loss": 0.482, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.20795660036166366, |
| "grad_norm": 3.743654251098633, |
| "learning_rate": 9.922297123525244e-06, |
| "loss": 0.5059, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.20976491862567812, |
| "grad_norm": 3.767977714538574, |
| "learning_rate": 9.91964794299315e-06, |
| "loss": 0.4781, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.2115732368896926, |
| "grad_norm": 3.4714298248291016, |
| "learning_rate": 9.91695472169664e-06, |
| "loss": 0.5297, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.21338155515370705, |
| "grad_norm": 3.7578494548797607, |
| "learning_rate": 9.914217483745472e-06, |
| "loss": 0.4538, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.21518987341772153, |
| "grad_norm": 3.590576410293579, |
| "learning_rate": 9.911436253643445e-06, |
| "loss": 0.4805, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.21699819168173598, |
| "grad_norm": 3.2946393489837646, |
| "learning_rate": 9.90861105628817e-06, |
| "loss": 0.4111, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.21880650994575046, |
| "grad_norm": 3.1186115741729736, |
| "learning_rate": 9.905741916970863e-06, |
| "loss": 0.5042, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.2206148282097649, |
| "grad_norm": 3.2415828704833984, |
| "learning_rate": 9.902828861376101e-06, |
| "loss": 0.5124, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.2224231464737794, |
| "grad_norm": 3.1252877712249756, |
| "learning_rate": 9.8998719155816e-06, |
| "loss": 0.4682, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.22423146473779385, |
| "grad_norm": 2.8531622886657715, |
| "learning_rate": 9.896871106057989e-06, |
| "loss": 0.4516, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.22603978300180833, |
| "grad_norm": 2.932840585708618, |
| "learning_rate": 9.89382645966856e-06, |
| "loss": 0.4935, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.22784810126582278, |
| "grad_norm": 2.7010486125946045, |
| "learning_rate": 9.890738003669029e-06, |
| "loss": 0.5096, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.22965641952983726, |
| "grad_norm": 2.4493417739868164, |
| "learning_rate": 9.887605765707309e-06, |
| "loss": 0.4621, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.2314647377938517, |
| "grad_norm": 2.3981196880340576, |
| "learning_rate": 9.884429773823238e-06, |
| "loss": 0.3943, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.2332730560578662, |
| "grad_norm": 2.549567222595215, |
| "learning_rate": 9.88121005644835e-06, |
| "loss": 0.3628, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.23508137432188064, |
| "grad_norm": 2.2552852630615234, |
| "learning_rate": 9.877946642405598e-06, |
| "loss": 0.4017, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.23688969258589512, |
| "grad_norm": 2.451272964477539, |
| "learning_rate": 9.874639560909118e-06, |
| "loss": 0.5817, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.23869801084990958, |
| "grad_norm": 2.375648021697998, |
| "learning_rate": 9.871288841563956e-06, |
| "loss": 0.5272, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.24050632911392406, |
| "grad_norm": 2.253958225250244, |
| "learning_rate": 9.867894514365802e-06, |
| "loss": 0.4738, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.2423146473779385, |
| "grad_norm": 2.3769643306732178, |
| "learning_rate": 9.864456609700726e-06, |
| "loss": 0.5198, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.244122965641953, |
| "grad_norm": 2.0296530723571777, |
| "learning_rate": 9.860975158344902e-06, |
| "loss": 0.421, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.24593128390596744, |
| "grad_norm": 2.2663729190826416, |
| "learning_rate": 9.857450191464337e-06, |
| "loss": 0.3671, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.24773960216998192, |
| "grad_norm": 1.969122052192688, |
| "learning_rate": 9.853881740614591e-06, |
| "loss": 0.3928, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.24954792043399637, |
| "grad_norm": 2.0730412006378174, |
| "learning_rate": 9.85026983774049e-06, |
| "loss": 0.3816, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.2513562386980108, |
| "grad_norm": 2.104492425918579, |
| "learning_rate": 9.846614515175843e-06, |
| "loss": 0.4365, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.25316455696202533, |
| "grad_norm": 2.0811867713928223, |
| "learning_rate": 9.842915805643156e-06, |
| "loss": 0.3995, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.2549728752260398, |
| "grad_norm": 2.356283187866211, |
| "learning_rate": 9.839173742253334e-06, |
| "loss": 0.4335, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.25678119349005424, |
| "grad_norm": 1.9231618642807007, |
| "learning_rate": 9.835388358505383e-06, |
| "loss": 0.4127, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.2585895117540687, |
| "grad_norm": 2.0608198642730713, |
| "learning_rate": 9.83155968828612e-06, |
| "loss": 0.4257, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.2603978300180832, |
| "grad_norm": 2.212334394454956, |
| "learning_rate": 9.827687765869859e-06, |
| "loss": 0.3849, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.26220614828209765, |
| "grad_norm": 2.1471047401428223, |
| "learning_rate": 9.823772625918111e-06, |
| "loss": 0.4147, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.2640144665461121, |
| "grad_norm": 2.3254005908966064, |
| "learning_rate": 9.819814303479268e-06, |
| "loss": 0.4096, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.26582278481012656, |
| "grad_norm": 2.1744768619537354, |
| "learning_rate": 9.815812833988292e-06, |
| "loss": 0.4814, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.26763110307414106, |
| "grad_norm": 2.347172260284424, |
| "learning_rate": 9.811768253266401e-06, |
| "loss": 0.4409, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.2694394213381555, |
| "grad_norm": 2.213552713394165, |
| "learning_rate": 9.807680597520746e-06, |
| "loss": 0.559, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.27124773960216997, |
| "grad_norm": 2.426069498062134, |
| "learning_rate": 9.803549903344081e-06, |
| "loss": 0.4766, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.2730560578661845, |
| "grad_norm": 2.633708953857422, |
| "learning_rate": 9.799376207714446e-06, |
| "loss": 0.4687, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.27486437613019893, |
| "grad_norm": 2.165501117706299, |
| "learning_rate": 9.79515954799483e-06, |
| "loss": 0.3533, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.2766726943942134, |
| "grad_norm": 2.4209182262420654, |
| "learning_rate": 9.790899961932833e-06, |
| "loss": 0.378, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.27848101265822783, |
| "grad_norm": 2.345797538757324, |
| "learning_rate": 9.786597487660336e-06, |
| "loss": 0.5108, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.28028933092224234, |
| "grad_norm": 4.470305442810059, |
| "learning_rate": 9.782252163693159e-06, |
| "loss": 0.4871, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.2820976491862568, |
| "grad_norm": 2.4609861373901367, |
| "learning_rate": 9.777864028930705e-06, |
| "loss": 0.4943, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.28390596745027125, |
| "grad_norm": 1.9806599617004395, |
| "learning_rate": 9.773433122655625e-06, |
| "loss": 0.3348, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.2857142857142857, |
| "grad_norm": 2.1933321952819824, |
| "learning_rate": 9.768959484533461e-06, |
| "loss": 0.4652, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.2875226039783002, |
| "grad_norm": 2.601301670074463, |
| "learning_rate": 9.76444315461229e-06, |
| "loss": 0.4319, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.28933092224231466, |
| "grad_norm": 1.9371708631515503, |
| "learning_rate": 9.75988417332237e-06, |
| "loss": 0.3555, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.2911392405063291, |
| "grad_norm": 2.256004810333252, |
| "learning_rate": 9.755282581475769e-06, |
| "loss": 0.4961, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.29294755877034356, |
| "grad_norm": 3.1606812477111816, |
| "learning_rate": 9.750638420266008e-06, |
| "loss": 0.3908, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.29475587703435807, |
| "grad_norm": 2.105372667312622, |
| "learning_rate": 9.745951731267693e-06, |
| "loss": 0.4479, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.2965641952983725, |
| "grad_norm": 2.355839490890503, |
| "learning_rate": 9.741222556436132e-06, |
| "loss": 0.4198, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.298372513562387, |
| "grad_norm": 2.2613070011138916, |
| "learning_rate": 9.736450938106976e-06, |
| "loss": 0.4441, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.30018083182640143, |
| "grad_norm": 2.1388442516326904, |
| "learning_rate": 9.731636918995821e-06, |
| "loss": 0.3449, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.30198915009041594, |
| "grad_norm": 2.255647659301758, |
| "learning_rate": 9.726780542197845e-06, |
| "loss": 0.4772, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.3037974683544304, |
| "grad_norm": 2.3774516582489014, |
| "learning_rate": 9.721881851187406e-06, |
| "loss": 0.4102, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.30560578661844484, |
| "grad_norm": 2.328238010406494, |
| "learning_rate": 9.716940889817662e-06, |
| "loss": 0.3546, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.3074141048824593, |
| "grad_norm": 2.1654934883117676, |
| "learning_rate": 9.711957702320176e-06, |
| "loss": 0.3346, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.3092224231464738, |
| "grad_norm": 2.319549798965454, |
| "learning_rate": 9.706932333304518e-06, |
| "loss": 0.3936, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.31103074141048825, |
| "grad_norm": 2.4396815299987793, |
| "learning_rate": 9.701864827757868e-06, |
| "loss": 0.4511, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.3128390596745027, |
| "grad_norm": 2.3042893409729004, |
| "learning_rate": 9.696755231044618e-06, |
| "loss": 0.356, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.31464737793851716, |
| "grad_norm": 2.3526265621185303, |
| "learning_rate": 9.691603588905956e-06, |
| "loss": 0.3223, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.31645569620253167, |
| "grad_norm": 2.7514851093292236, |
| "learning_rate": 9.68640994745946e-06, |
| "loss": 0.551, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.3182640144665461, |
| "grad_norm": 2.0451977252960205, |
| "learning_rate": 9.681174353198687e-06, |
| "loss": 0.3433, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.32007233273056057, |
| "grad_norm": 2.3086750507354736, |
| "learning_rate": 9.675896852992762e-06, |
| "loss": 0.4105, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.321880650994575, |
| "grad_norm": 1.9786276817321777, |
| "learning_rate": 9.670577494085945e-06, |
| "loss": 0.3799, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.32368896925858953, |
| "grad_norm": 2.3861396312713623, |
| "learning_rate": 9.665216324097222e-06, |
| "loss": 0.5156, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.325497287522604, |
| "grad_norm": 2.164726734161377, |
| "learning_rate": 9.659813391019867e-06, |
| "loss": 0.4335, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.32730560578661844, |
| "grad_norm": 2.0450663566589355, |
| "learning_rate": 9.654368743221022e-06, |
| "loss": 0.3984, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.3291139240506329, |
| "grad_norm": 2.014610767364502, |
| "learning_rate": 9.648882429441258e-06, |
| "loss": 0.3474, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.3309222423146474, |
| "grad_norm": 1.9683150053024292, |
| "learning_rate": 9.643354498794139e-06, |
| "loss": 0.3614, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.33273056057866185, |
| "grad_norm": 1.8113188743591309, |
| "learning_rate": 9.637785000765789e-06, |
| "loss": 0.3701, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.3345388788426763, |
| "grad_norm": 1.8580265045166016, |
| "learning_rate": 9.632173985214438e-06, |
| "loss": 0.2733, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.33634719710669075, |
| "grad_norm": 1.8365068435668945, |
| "learning_rate": 9.626521502369984e-06, |
| "loss": 0.3113, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.33815551537070526, |
| "grad_norm": 1.8660293817520142, |
| "learning_rate": 9.620827602833542e-06, |
| "loss": 0.3299, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.3399638336347197, |
| "grad_norm": 1.9789987802505493, |
| "learning_rate": 9.615092337576987e-06, |
| "loss": 0.3565, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.34177215189873417, |
| "grad_norm": 1.978827714920044, |
| "learning_rate": 9.609315757942504e-06, |
| "loss": 0.3514, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.3435804701627486, |
| "grad_norm": 2.199197769165039, |
| "learning_rate": 9.603497915642122e-06, |
| "loss": 0.4354, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.3453887884267631, |
| "grad_norm": 1.6824909448623657, |
| "learning_rate": 9.597638862757255e-06, |
| "loss": 0.3105, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.3471971066907776, |
| "grad_norm": 1.8425745964050293, |
| "learning_rate": 9.591738651738235e-06, |
| "loss": 0.3257, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.34900542495479203, |
| "grad_norm": 1.7892580032348633, |
| "learning_rate": 9.585797335403843e-06, |
| "loss": 0.3323, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.3508137432188065, |
| "grad_norm": 1.8971889019012451, |
| "learning_rate": 9.579814966940833e-06, |
| "loss": 0.2936, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.352622061482821, |
| "grad_norm": 2.005496025085449, |
| "learning_rate": 9.573791599903463e-06, |
| "loss": 0.3361, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.35443037974683544, |
| "grad_norm": 1.9333208799362183, |
| "learning_rate": 9.567727288213005e-06, |
| "loss": 0.3354, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.3562386980108499, |
| "grad_norm": 1.880614995956421, |
| "learning_rate": 9.561622086157273e-06, |
| "loss": 0.3193, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.35804701627486435, |
| "grad_norm": 1.9405372142791748, |
| "learning_rate": 9.55547604839013e-06, |
| "loss": 0.3773, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.35985533453887886, |
| "grad_norm": 1.8647263050079346, |
| "learning_rate": 9.549289229930997e-06, |
| "loss": 0.4083, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.3616636528028933, |
| "grad_norm": 2.569082498550415, |
| "learning_rate": 9.543061686164374e-06, |
| "loss": 0.5087, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.36347197106690776, |
| "grad_norm": 1.94440495967865, |
| "learning_rate": 9.536793472839325e-06, |
| "loss": 0.4153, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.36528028933092227, |
| "grad_norm": 2.031101942062378, |
| "learning_rate": 9.530484646068996e-06, |
| "loss": 0.3542, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.3670886075949367, |
| "grad_norm": 2.0991175174713135, |
| "learning_rate": 9.524135262330098e-06, |
| "loss": 0.414, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.3688969258589512, |
| "grad_norm": 2.028451919555664, |
| "learning_rate": 9.517745378462417e-06, |
| "loss": 0.496, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.3707052441229656, |
| "grad_norm": 1.760362982749939, |
| "learning_rate": 9.511315051668287e-06, |
| "loss": 0.3633, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.37251356238698013, |
| "grad_norm": 1.908535361289978, |
| "learning_rate": 9.504844339512096e-06, |
| "loss": 0.3192, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.3743218806509946, |
| "grad_norm": 1.8707324266433716, |
| "learning_rate": 9.498333299919759e-06, |
| "loss": 0.3455, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.37613019891500904, |
| "grad_norm": 1.9523345232009888, |
| "learning_rate": 9.491781991178203e-06, |
| "loss": 0.3584, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.3779385171790235, |
| "grad_norm": 1.8394471406936646, |
| "learning_rate": 9.485190471934845e-06, |
| "loss": 0.3222, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.379746835443038, |
| "grad_norm": 2.543618679046631, |
| "learning_rate": 9.478558801197065e-06, |
| "loss": 0.362, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.38155515370705245, |
| "grad_norm": 1.6976804733276367, |
| "learning_rate": 9.471887038331686e-06, |
| "loss": 0.3006, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.3833634719710669, |
| "grad_norm": 1.6421040296554565, |
| "learning_rate": 9.465175243064428e-06, |
| "loss": 0.3134, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.38517179023508136, |
| "grad_norm": 1.725056529045105, |
| "learning_rate": 9.458423475479387e-06, |
| "loss": 0.375, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.38698010849909587, |
| "grad_norm": 1.7434353828430176, |
| "learning_rate": 9.451631796018495e-06, |
| "loss": 0.355, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.3887884267631103, |
| "grad_norm": 1.7160333395004272, |
| "learning_rate": 9.444800265480968e-06, |
| "loss": 0.2843, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.39059674502712477, |
| "grad_norm": 1.8513472080230713, |
| "learning_rate": 9.437928945022772e-06, |
| "loss": 0.3075, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.3924050632911392, |
| "grad_norm": 1.9343299865722656, |
| "learning_rate": 9.431017896156074e-06, |
| "loss": 0.3384, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.39421338155515373, |
| "grad_norm": 1.7150647640228271, |
| "learning_rate": 9.424067180748692e-06, |
| "loss": 0.3089, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.3960216998191682, |
| "grad_norm": 1.6371943950653076, |
| "learning_rate": 9.417076861023539e-06, |
| "loss": 0.2921, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.39783001808318263, |
| "grad_norm": 1.5865685939788818, |
| "learning_rate": 9.410046999558062e-06, |
| "loss": 0.2884, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.3996383363471971, |
| "grad_norm": 1.7501705884933472, |
| "learning_rate": 9.40297765928369e-06, |
| "loss": 0.3182, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.4014466546112116, |
| "grad_norm": 1.8444981575012207, |
| "learning_rate": 9.395868903485269e-06, |
| "loss": 0.3304, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.40325497287522605, |
| "grad_norm": 1.7558904886245728, |
| "learning_rate": 9.388720795800488e-06, |
| "loss": 0.3376, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.4050632911392405, |
| "grad_norm": 2.039940118789673, |
| "learning_rate": 9.381533400219319e-06, |
| "loss": 0.4301, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.40687160940325495, |
| "grad_norm": 2.1482834815979004, |
| "learning_rate": 9.374306781083437e-06, |
| "loss": 0.4223, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.40867992766726946, |
| "grad_norm": 1.8534361124038696, |
| "learning_rate": 9.36704100308565e-06, |
| "loss": 0.3514, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.4104882459312839, |
| "grad_norm": 1.7480493783950806, |
| "learning_rate": 9.359736131269312e-06, |
| "loss": 0.3, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.41229656419529837, |
| "grad_norm": 1.631418228149414, |
| "learning_rate": 9.352392231027752e-06, |
| "loss": 0.2817, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.4141048824593128, |
| "grad_norm": 1.6843211650848389, |
| "learning_rate": 9.345009368103677e-06, |
| "loss": 0.3025, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.4159132007233273, |
| "grad_norm": 2.5229790210723877, |
| "learning_rate": 9.337587608588588e-06, |
| "loss": 0.3805, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.4177215189873418, |
| "grad_norm": 1.8178796768188477, |
| "learning_rate": 9.330127018922195e-06, |
| "loss": 0.3752, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.41952983725135623, |
| "grad_norm": 1.8291960954666138, |
| "learning_rate": 9.322627665891807e-06, |
| "loss": 0.3489, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.4213381555153707, |
| "grad_norm": 1.6301926374435425, |
| "learning_rate": 9.315089616631752e-06, |
| "loss": 0.4083, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.4231464737793852, |
| "grad_norm": 1.5303218364715576, |
| "learning_rate": 9.307512938622762e-06, |
| "loss": 0.3023, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.42495479204339964, |
| "grad_norm": 1.685544490814209, |
| "learning_rate": 9.299897699691377e-06, |
| "loss": 0.3413, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.4267631103074141, |
| "grad_norm": 1.7197544574737549, |
| "learning_rate": 9.292243968009332e-06, |
| "loss": 0.3755, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.42857142857142855, |
| "grad_norm": 1.3292421102523804, |
| "learning_rate": 9.28455181209295e-06, |
| "loss": 0.2676, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.43037974683544306, |
| "grad_norm": 1.6216092109680176, |
| "learning_rate": 9.276821300802535e-06, |
| "loss": 0.3359, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.4321880650994575, |
| "grad_norm": 1.9262677431106567, |
| "learning_rate": 9.269052503341737e-06, |
| "loss": 0.3757, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.43399638336347196, |
| "grad_norm": 1.5813138484954834, |
| "learning_rate": 9.261245489256956e-06, |
| "loss": 0.3469, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.4358047016274864, |
| "grad_norm": 1.3660542964935303, |
| "learning_rate": 9.253400328436699e-06, |
| "loss": 0.2521, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.4376130198915009, |
| "grad_norm": 1.645929217338562, |
| "learning_rate": 9.24551709111097e-06, |
| "loss": 0.278, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.4394213381555154, |
| "grad_norm": 1.5204540491104126, |
| "learning_rate": 9.237595847850628e-06, |
| "loss": 0.2852, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.4412296564195298, |
| "grad_norm": 1.4475878477096558, |
| "learning_rate": 9.229636669566769e-06, |
| "loss": 0.2785, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.4430379746835443, |
| "grad_norm": 1.6262820959091187, |
| "learning_rate": 9.221639627510076e-06, |
| "loss": 0.2624, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.4448462929475588, |
| "grad_norm": 1.6817715167999268, |
| "learning_rate": 9.213604793270196e-06, |
| "loss": 0.303, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.44665461121157324, |
| "grad_norm": 1.6763249635696411, |
| "learning_rate": 9.205532238775089e-06, |
| "loss": 0.2872, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.4484629294755877, |
| "grad_norm": 1.5903000831604004, |
| "learning_rate": 9.197422036290386e-06, |
| "loss": 0.2982, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.45027124773960214, |
| "grad_norm": 1.6391197443008423, |
| "learning_rate": 9.189274258418748e-06, |
| "loss": 0.3317, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.45207956600361665, |
| "grad_norm": 1.815727949142456, |
| "learning_rate": 9.181088978099203e-06, |
| "loss": 0.3693, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.4538878842676311, |
| "grad_norm": 1.7187553644180298, |
| "learning_rate": 9.172866268606514e-06, |
| "loss": 0.3699, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.45569620253164556, |
| "grad_norm": 1.6256039142608643, |
| "learning_rate": 9.164606203550498e-06, |
| "loss": 0.2668, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.45750452079566006, |
| "grad_norm": 1.582828402519226, |
| "learning_rate": 9.156308856875386e-06, |
| "loss": 0.2835, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.4593128390596745, |
| "grad_norm": 1.8006536960601807, |
| "learning_rate": 9.147974302859158e-06, |
| "loss": 0.3347, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.46112115732368897, |
| "grad_norm": 1.8780992031097412, |
| "learning_rate": 9.139602616112864e-06, |
| "loss": 0.367, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.4629294755877034, |
| "grad_norm": 1.8903968334197998, |
| "learning_rate": 9.131193871579975e-06, |
| "loss": 0.3908, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.46473779385171793, |
| "grad_norm": 1.6569807529449463, |
| "learning_rate": 9.122748144535704e-06, |
| "loss": 0.3989, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.4665461121157324, |
| "grad_norm": 1.561207890510559, |
| "learning_rate": 9.114265510586329e-06, |
| "loss": 0.2794, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.46835443037974683, |
| "grad_norm": 1.6755136251449585, |
| "learning_rate": 9.10574604566852e-06, |
| "loss": 0.2977, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.4701627486437613, |
| "grad_norm": 1.5138905048370361, |
| "learning_rate": 9.09718982604866e-06, |
| "loss": 0.3073, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.4719710669077758, |
| "grad_norm": 1.563460111618042, |
| "learning_rate": 9.088596928322158e-06, |
| "loss": 0.3175, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.47377938517179025, |
| "grad_norm": 1.6276342868804932, |
| "learning_rate": 9.079967429412766e-06, |
| "loss": 0.3257, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.4755877034358047, |
| "grad_norm": 1.558644413948059, |
| "learning_rate": 9.071301406571893e-06, |
| "loss": 0.3221, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.47739602169981915, |
| "grad_norm": 1.5441564321517944, |
| "learning_rate": 9.062598937377911e-06, |
| "loss": 0.3554, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.47920433996383366, |
| "grad_norm": 1.562873125076294, |
| "learning_rate": 9.053860099735455e-06, |
| "loss": 0.2965, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.4810126582278481, |
| "grad_norm": 1.6871848106384277, |
| "learning_rate": 9.045084971874738e-06, |
| "loss": 0.322, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.48282097649186256, |
| "grad_norm": 1.4427196979522705, |
| "learning_rate": 9.036273632350839e-06, |
| "loss": 0.2904, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.484629294755877, |
| "grad_norm": 1.6650079488754272, |
| "learning_rate": 9.027426160043005e-06, |
| "loss": 0.3755, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.4864376130198915, |
| "grad_norm": 1.5298763513565063, |
| "learning_rate": 9.018542634153944e-06, |
| "loss": 0.2784, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.488245931283906, |
| "grad_norm": 1.5398807525634766, |
| "learning_rate": 9.00962313420912e-06, |
| "loss": 0.2627, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.49005424954792043, |
| "grad_norm": 1.4287172555923462, |
| "learning_rate": 9.000667740056033e-06, |
| "loss": 0.2319, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.4918625678119349, |
| "grad_norm": 1.5109469890594482, |
| "learning_rate": 8.991676531863507e-06, |
| "loss": 0.2677, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.4936708860759494, |
| "grad_norm": 1.773703932762146, |
| "learning_rate": 8.982649590120982e-06, |
| "loss": 0.38, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.49547920433996384, |
| "grad_norm": 1.7830049991607666, |
| "learning_rate": 8.973586995637778e-06, |
| "loss": 0.3287, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.4972875226039783, |
| "grad_norm": 1.6185346841812134, |
| "learning_rate": 8.964488829542377e-06, |
| "loss": 0.33, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.49909584086799275, |
| "grad_norm": 1.6162431240081787, |
| "learning_rate": 8.955355173281709e-06, |
| "loss": 0.328, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.5009041591320073, |
| "grad_norm": 1.9096221923828125, |
| "learning_rate": 8.946186108620397e-06, |
| "loss": 0.3238, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.5027124773960217, |
| "grad_norm": 1.4491026401519775, |
| "learning_rate": 8.936981717640061e-06, |
| "loss": 0.3222, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.5045207956600362, |
| "grad_norm": 1.4854817390441895, |
| "learning_rate": 8.927742082738542e-06, |
| "loss": 0.2992, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.5063291139240507, |
| "grad_norm": 1.7579244375228882, |
| "learning_rate": 8.9184672866292e-06, |
| "loss": 0.3724, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.5081374321880651, |
| "grad_norm": 1.8710993528366089, |
| "learning_rate": 8.90915741234015e-06, |
| "loss": 0.3663, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.5099457504520796, |
| "grad_norm": 1.777894377708435, |
| "learning_rate": 8.899812543213532e-06, |
| "loss": 0.4384, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.5117540687160941, |
| "grad_norm": 1.5931551456451416, |
| "learning_rate": 8.890432762904757e-06, |
| "loss": 0.3171, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.5135623869801085, |
| "grad_norm": 1.4698400497436523, |
| "learning_rate": 8.881018155381766e-06, |
| "loss": 0.3111, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.515370705244123, |
| "grad_norm": 1.8043538331985474, |
| "learning_rate": 8.871568804924269e-06, |
| "loss": 0.4004, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.5171790235081374, |
| "grad_norm": 1.707382321357727, |
| "learning_rate": 8.862084796122998e-06, |
| "loss": 0.3684, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.5189873417721519, |
| "grad_norm": 1.702929973602295, |
| "learning_rate": 8.852566213878947e-06, |
| "loss": 0.3919, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.5207956600361664, |
| "grad_norm": 1.3536096811294556, |
| "learning_rate": 8.84301314340261e-06, |
| "loss": 0.2297, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.5226039783001808, |
| "grad_norm": 1.5708818435668945, |
| "learning_rate": 8.833425670213223e-06, |
| "loss": 0.3132, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.5244122965641953, |
| "grad_norm": 1.352744460105896, |
| "learning_rate": 8.823803880137993e-06, |
| "loss": 0.2602, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.5262206148282098, |
| "grad_norm": 1.6007314920425415, |
| "learning_rate": 8.814147859311333e-06, |
| "loss": 0.3156, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.5280289330922242, |
| "grad_norm": 1.6960111856460571, |
| "learning_rate": 8.804457694174093e-06, |
| "loss": 0.4026, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.5298372513562387, |
| "grad_norm": 1.5450199842453003, |
| "learning_rate": 8.794733471472778e-06, |
| "loss": 0.3557, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.5316455696202531, |
| "grad_norm": 1.5667040348052979, |
| "learning_rate": 8.784975278258783e-06, |
| "loss": 0.3074, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.5334538878842676, |
| "grad_norm": 1.5064713954925537, |
| "learning_rate": 8.775183201887603e-06, |
| "loss": 0.3242, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.5352622061482821, |
| "grad_norm": 1.6140296459197998, |
| "learning_rate": 8.765357330018056e-06, |
| "loss": 0.2771, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.5370705244122965, |
| "grad_norm": 1.8816146850585938, |
| "learning_rate": 8.755497750611498e-06, |
| "loss": 0.3906, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.538878842676311, |
| "grad_norm": 1.6953233480453491, |
| "learning_rate": 8.745604551931042e-06, |
| "loss": 0.3338, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.5406871609403255, |
| "grad_norm": 1.7361221313476562, |
| "learning_rate": 8.73567782254075e-06, |
| "loss": 0.3105, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.5424954792043399, |
| "grad_norm": 1.6798964738845825, |
| "learning_rate": 8.725717651304856e-06, |
| "loss": 0.3645, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.5443037974683544, |
| "grad_norm": 1.4772828817367554, |
| "learning_rate": 8.715724127386971e-06, |
| "loss": 0.2823, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.546112115732369, |
| "grad_norm": 1.754899263381958, |
| "learning_rate": 8.705697340249275e-06, |
| "loss": 0.3611, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.5479204339963833, |
| "grad_norm": 1.6467196941375732, |
| "learning_rate": 8.695637379651719e-06, |
| "loss": 0.2694, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.5497287522603979, |
| "grad_norm": 1.663810133934021, |
| "learning_rate": 8.685544335651226e-06, |
| "loss": 0.2803, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.5515370705244123, |
| "grad_norm": 1.6183619499206543, |
| "learning_rate": 8.675418298600884e-06, |
| "loss": 0.3227, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.5533453887884268, |
| "grad_norm": 1.7311010360717773, |
| "learning_rate": 8.665259359149132e-06, |
| "loss": 0.4449, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.5551537070524413, |
| "grad_norm": 2.268200159072876, |
| "learning_rate": 8.655067608238953e-06, |
| "loss": 0.298, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.5569620253164557, |
| "grad_norm": 1.66535222530365, |
| "learning_rate": 8.644843137107058e-06, |
| "loss": 0.3834, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.5587703435804702, |
| "grad_norm": 1.6682361364364624, |
| "learning_rate": 8.634586037283072e-06, |
| "loss": 0.3886, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.5605786618444847, |
| "grad_norm": 1.3878757953643799, |
| "learning_rate": 8.62429640058871e-06, |
| "loss": 0.2728, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.5623869801084991, |
| "grad_norm": 1.6548659801483154, |
| "learning_rate": 8.613974319136959e-06, |
| "loss": 0.3578, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.5641952983725136, |
| "grad_norm": 1.6845568418502808, |
| "learning_rate": 8.603619885331251e-06, |
| "loss": 0.3421, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.566003616636528, |
| "grad_norm": 1.4604007005691528, |
| "learning_rate": 8.593233191864638e-06, |
| "loss": 0.3022, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.5678119349005425, |
| "grad_norm": 1.6256940364837646, |
| "learning_rate": 8.582814331718961e-06, |
| "loss": 0.3088, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.569620253164557, |
| "grad_norm": 1.6187587976455688, |
| "learning_rate": 8.572363398164017e-06, |
| "loss": 0.28, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.5714285714285714, |
| "grad_norm": 10.702666282653809, |
| "learning_rate": 8.561880484756726e-06, |
| "loss": 0.3774, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.5732368896925859, |
| "grad_norm": 1.3536791801452637, |
| "learning_rate": 8.551365685340285e-06, |
| "loss": 0.2602, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.5750452079566004, |
| "grad_norm": 1.6421338319778442, |
| "learning_rate": 8.540819094043349e-06, |
| "loss": 0.3563, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.5768535262206148, |
| "grad_norm": 1.4128696918487549, |
| "learning_rate": 8.530240805279159e-06, |
| "loss": 0.2518, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.5786618444846293, |
| "grad_norm": 1.573652982711792, |
| "learning_rate": 8.519630913744726e-06, |
| "loss": 0.3026, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.5804701627486437, |
| "grad_norm": 1.4500043392181396, |
| "learning_rate": 8.508989514419959e-06, |
| "loss": 0.2553, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.5822784810126582, |
| "grad_norm": 1.375697374343872, |
| "learning_rate": 8.498316702566828e-06, |
| "loss": 0.2347, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.5840867992766727, |
| "grad_norm": 1.601431965827942, |
| "learning_rate": 8.487612573728513e-06, |
| "loss": 0.2794, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.5858951175406871, |
| "grad_norm": 1.7059786319732666, |
| "learning_rate": 8.476877223728539e-06, |
| "loss": 0.358, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.5877034358047016, |
| "grad_norm": 1.8292579650878906, |
| "learning_rate": 8.466110748669926e-06, |
| "loss": 0.4206, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.5895117540687161, |
| "grad_norm": 1.5839455127716064, |
| "learning_rate": 8.455313244934324e-06, |
| "loss": 0.3653, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.5913200723327305, |
| "grad_norm": 1.629156231880188, |
| "learning_rate": 8.444484809181155e-06, |
| "loss": 0.3159, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.593128390596745, |
| "grad_norm": 1.526873230934143, |
| "learning_rate": 8.433625538346742e-06, |
| "loss": 0.3332, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.5949367088607594, |
| "grad_norm": 1.5268447399139404, |
| "learning_rate": 8.422735529643445e-06, |
| "loss": 0.2919, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.596745027124774, |
| "grad_norm": 1.5196579694747925, |
| "learning_rate": 8.41181488055879e-06, |
| "loss": 0.3301, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.5985533453887885, |
| "grad_norm": 1.5749566555023193, |
| "learning_rate": 8.400863688854598e-06, |
| "loss": 0.3386, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.6003616636528029, |
| "grad_norm": 1.5672210454940796, |
| "learning_rate": 8.389882052566106e-06, |
| "loss": 0.3103, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.6021699819168174, |
| "grad_norm": 1.8071767091751099, |
| "learning_rate": 8.37887007000109e-06, |
| "loss": 0.3571, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.6039783001808319, |
| "grad_norm": 1.4983811378479004, |
| "learning_rate": 8.36782783973899e-06, |
| "loss": 0.2772, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.6057866184448463, |
| "grad_norm": 1.5349783897399902, |
| "learning_rate": 8.35675546063002e-06, |
| "loss": 0.3161, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.6075949367088608, |
| "grad_norm": 1.4982757568359375, |
| "learning_rate": 8.345653031794292e-06, |
| "loss": 0.3483, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.6094032549728752, |
| "grad_norm": 1.4851738214492798, |
| "learning_rate": 8.334520652620918e-06, |
| "loss": 0.2911, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.6112115732368897, |
| "grad_norm": 1.4346152544021606, |
| "learning_rate": 8.32335842276713e-06, |
| "loss": 0.2647, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.6130198915009042, |
| "grad_norm": 1.5562641620635986, |
| "learning_rate": 8.31216644215738e-06, |
| "loss": 0.3653, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.6148282097649186, |
| "grad_norm": 1.502779245376587, |
| "learning_rate": 8.300944810982452e-06, |
| "loss": 0.3021, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.6166365280289331, |
| "grad_norm": 1.447108507156372, |
| "learning_rate": 8.289693629698564e-06, |
| "loss": 0.2713, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.6184448462929476, |
| "grad_norm": 1.3462806940078735, |
| "learning_rate": 8.278412999026462e-06, |
| "loss": 0.2314, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.620253164556962, |
| "grad_norm": 1.360318660736084, |
| "learning_rate": 8.267103019950529e-06, |
| "loss": 0.2538, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.6220614828209765, |
| "grad_norm": 1.5165733098983765, |
| "learning_rate": 8.255763793717868e-06, |
| "loss": 0.3008, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.6238698010849909, |
| "grad_norm": 1.3933101892471313, |
| "learning_rate": 8.244395421837412e-06, |
| "loss": 0.2313, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.6256781193490054, |
| "grad_norm": 1.4917360544204712, |
| "learning_rate": 8.232998006078998e-06, |
| "loss": 0.3104, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.6274864376130199, |
| "grad_norm": 1.4749935865402222, |
| "learning_rate": 8.221571648472473e-06, |
| "loss": 0.2528, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.6292947558770343, |
| "grad_norm": 1.531565546989441, |
| "learning_rate": 8.210116451306762e-06, |
| "loss": 0.2578, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.6311030741410488, |
| "grad_norm": 1.7813491821289062, |
| "learning_rate": 8.198632517128968e-06, |
| "loss": 0.358, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.6329113924050633, |
| "grad_norm": 1.770026445388794, |
| "learning_rate": 8.18711994874345e-06, |
| "loss": 0.3252, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.6347197106690777, |
| "grad_norm": 1.589426040649414, |
| "learning_rate": 8.175578849210894e-06, |
| "loss": 0.3258, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.6365280289330922, |
| "grad_norm": 1.7726151943206787, |
| "learning_rate": 8.164009321847405e-06, |
| "loss": 0.3183, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.6383363471971067, |
| "grad_norm": 1.4787203073501587, |
| "learning_rate": 8.15241147022357e-06, |
| "loss": 0.2637, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.6401446654611211, |
| "grad_norm": 1.7074470520019531, |
| "learning_rate": 8.140785398163535e-06, |
| "loss": 0.3754, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.6419529837251357, |
| "grad_norm": 1.7455577850341797, |
| "learning_rate": 8.129131209744075e-06, |
| "loss": 0.3453, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.64376130198915, |
| "grad_norm": 1.5032719373703003, |
| "learning_rate": 8.117449009293668e-06, |
| "loss": 0.301, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.6455696202531646, |
| "grad_norm": 1.5343469381332397, |
| "learning_rate": 8.105738901391553e-06, |
| "loss": 0.3515, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.6473779385171791, |
| "grad_norm": 1.6261870861053467, |
| "learning_rate": 8.094000990866795e-06, |
| "loss": 0.2817, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.6491862567811935, |
| "grad_norm": 1.3367079496383667, |
| "learning_rate": 8.08223538279735e-06, |
| "loss": 0.2207, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.650994575045208, |
| "grad_norm": 1.5360643863677979, |
| "learning_rate": 8.070442182509127e-06, |
| "loss": 0.31, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.6528028933092225, |
| "grad_norm": 1.673229455947876, |
| "learning_rate": 8.058621495575032e-06, |
| "loss": 0.3752, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.6546112115732369, |
| "grad_norm": 1.747746467590332, |
| "learning_rate": 8.046773427814043e-06, |
| "loss": 0.373, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.6564195298372514, |
| "grad_norm": 1.3916759490966797, |
| "learning_rate": 8.034898085290239e-06, |
| "loss": 0.2682, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.6582278481012658, |
| "grad_norm": 1.4220057725906372, |
| "learning_rate": 8.022995574311876e-06, |
| "loss": 0.2696, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.6600361663652803, |
| "grad_norm": 1.483280062675476, |
| "learning_rate": 8.011066001430412e-06, |
| "loss": 0.3125, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.6618444846292948, |
| "grad_norm": 1.436482310295105, |
| "learning_rate": 7.99910947343957e-06, |
| "loss": 0.2781, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.6636528028933092, |
| "grad_norm": 1.4785102605819702, |
| "learning_rate": 7.987126097374372e-06, |
| "loss": 0.2998, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.6654611211573237, |
| "grad_norm": 1.388868808746338, |
| "learning_rate": 7.975115980510187e-06, |
| "loss": 0.2345, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.6672694394213382, |
| "grad_norm": 1.443105936050415, |
| "learning_rate": 7.963079230361765e-06, |
| "loss": 0.2669, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.6690777576853526, |
| "grad_norm": 1.4718849658966064, |
| "learning_rate": 7.951015954682281e-06, |
| "loss": 0.293, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.6708860759493671, |
| "grad_norm": 1.5544575452804565, |
| "learning_rate": 7.938926261462366e-06, |
| "loss": 0.2738, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.6726943942133815, |
| "grad_norm": 1.6500585079193115, |
| "learning_rate": 7.926810258929138e-06, |
| "loss": 0.3018, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.674502712477396, |
| "grad_norm": 1.8269141912460327, |
| "learning_rate": 7.914668055545242e-06, |
| "loss": 0.3061, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.6763110307414105, |
| "grad_norm": 1.6963152885437012, |
| "learning_rate": 7.902499760007867e-06, |
| "loss": 0.3793, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.6781193490054249, |
| "grad_norm": 1.9636969566345215, |
| "learning_rate": 7.890305481247786e-06, |
| "loss": 0.3418, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.6799276672694394, |
| "grad_norm": 1.7126883268356323, |
| "learning_rate": 7.87808532842837e-06, |
| "loss": 0.3795, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.6817359855334539, |
| "grad_norm": 1.4325624704360962, |
| "learning_rate": 7.865839410944613e-06, |
| "loss": 0.2684, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.6835443037974683, |
| "grad_norm": 1.4512115716934204, |
| "learning_rate": 7.85356783842216e-06, |
| "loss": 0.2926, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.6853526220614828, |
| "grad_norm": 1.6614792346954346, |
| "learning_rate": 7.841270720716318e-06, |
| "loss": 0.289, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.6871609403254972, |
| "grad_norm": 1.7151885032653809, |
| "learning_rate": 7.828948167911073e-06, |
| "loss": 0.3802, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.6889692585895117, |
| "grad_norm": 1.5852060317993164, |
| "learning_rate": 7.81660029031811e-06, |
| "loss": 0.3071, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.6907775768535263, |
| "grad_norm": 1.5491100549697876, |
| "learning_rate": 7.804227198475823e-06, |
| "loss": 0.3158, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.6925858951175407, |
| "grad_norm": 1.4715349674224854, |
| "learning_rate": 7.791829003148313e-06, |
| "loss": 0.2502, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.6943942133815552, |
| "grad_norm": 1.4923640489578247, |
| "learning_rate": 7.779405815324424e-06, |
| "loss": 0.2891, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.6962025316455697, |
| "grad_norm": 1.5427296161651611, |
| "learning_rate": 7.76695774621672e-06, |
| "loss": 0.2912, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.6980108499095841, |
| "grad_norm": 1.5590311288833618, |
| "learning_rate": 7.754484907260513e-06, |
| "loss": 0.2663, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.6998191681735986, |
| "grad_norm": 1.5675221681594849, |
| "learning_rate": 7.741987410112847e-06, |
| "loss": 0.267, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.701627486437613, |
| "grad_norm": 1.506213903427124, |
| "learning_rate": 7.72946536665151e-06, |
| "loss": 0.2658, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.7034358047016275, |
| "grad_norm": 1.3946253061294556, |
| "learning_rate": 7.716918888974029e-06, |
| "loss": 0.2686, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.705244122965642, |
| "grad_norm": 1.6965469121932983, |
| "learning_rate": 7.704348089396667e-06, |
| "loss": 0.2232, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.7070524412296564, |
| "grad_norm": 1.5790448188781738, |
| "learning_rate": 7.691753080453413e-06, |
| "loss": 0.2911, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.7088607594936709, |
| "grad_norm": 1.4819265604019165, |
| "learning_rate": 7.679133974894984e-06, |
| "loss": 0.2719, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.7106690777576854, |
| "grad_norm": 1.4775956869125366, |
| "learning_rate": 7.666490885687807e-06, |
| "loss": 0.2602, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.7124773960216998, |
| "grad_norm": 1.3449031114578247, |
| "learning_rate": 7.653823926013016e-06, |
| "loss": 0.2391, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.7142857142857143, |
| "grad_norm": 1.365142583847046, |
| "learning_rate": 7.641133209265423e-06, |
| "loss": 0.2492, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.7160940325497287, |
| "grad_norm": 1.6121798753738403, |
| "learning_rate": 7.628418849052523e-06, |
| "loss": 0.3359, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.7179023508137432, |
| "grad_norm": 1.517443299293518, |
| "learning_rate": 7.615680959193469e-06, |
| "loss": 0.2862, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.7197106690777577, |
| "grad_norm": 1.458894968032837, |
| "learning_rate": 7.602919653718044e-06, |
| "loss": 0.283, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.7215189873417721, |
| "grad_norm": 1.5772570371627808, |
| "learning_rate": 7.590135046865652e-06, |
| "loss": 0.3005, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.7233273056057866, |
| "grad_norm": 1.8053752183914185, |
| "learning_rate": 7.577327253084292e-06, |
| "loss": 0.3303, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.7251356238698011, |
| "grad_norm": 1.523858904838562, |
| "learning_rate": 7.564496387029532e-06, |
| "loss": 0.245, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.7269439421338155, |
| "grad_norm": 1.5387307405471802, |
| "learning_rate": 7.551642563563481e-06, |
| "loss": 0.2924, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.72875226039783, |
| "grad_norm": 1.4865695238113403, |
| "learning_rate": 7.5387658977537695e-06, |
| "loss": 0.2731, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.7305605786618445, |
| "grad_norm": 1.6363080739974976, |
| "learning_rate": 7.5258665048725065e-06, |
| "loss": 0.3144, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.7323688969258589, |
| "grad_norm": 1.5519919395446777, |
| "learning_rate": 7.512944500395255e-06, |
| "loss": 0.3401, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.7341772151898734, |
| "grad_norm": 1.5395323038101196, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.2855, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.7359855334538878, |
| "grad_norm": 1.7250220775604248, |
| "learning_rate": 7.48703311956611e-06, |
| "loss": 0.3112, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.7377938517179023, |
| "grad_norm": 1.4305680990219116, |
| "learning_rate": 7.4740439751732994e-06, |
| "loss": 0.3191, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.7396021699819169, |
| "grad_norm": 1.5562596321105957, |
| "learning_rate": 7.461032683100587e-06, |
| "loss": 0.3067, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.7414104882459313, |
| "grad_norm": 1.390204906463623, |
| "learning_rate": 7.447999359825263e-06, |
| "loss": 0.2646, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.7432188065099458, |
| "grad_norm": 1.5041637420654297, |
| "learning_rate": 7.434944122021837e-06, |
| "loss": 0.2877, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.7450271247739603, |
| "grad_norm": 1.5278867483139038, |
| "learning_rate": 7.421867086561001e-06, |
| "loss": 0.2353, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.7468354430379747, |
| "grad_norm": 1.4603511095046997, |
| "learning_rate": 7.408768370508577e-06, |
| "loss": 0.3148, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.7486437613019892, |
| "grad_norm": 1.4478647708892822, |
| "learning_rate": 7.395648091124476e-06, |
| "loss": 0.2963, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.7504520795660036, |
| "grad_norm": 1.669955849647522, |
| "learning_rate": 7.382506365861639e-06, |
| "loss": 0.2516, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.7522603978300181, |
| "grad_norm": 1.3429478406906128, |
| "learning_rate": 7.369343312364994e-06, |
| "loss": 0.2837, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.7540687160940326, |
| "grad_norm": 1.557304859161377, |
| "learning_rate": 7.356159048470402e-06, |
| "loss": 0.3204, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.755877034358047, |
| "grad_norm": 1.5032353401184082, |
| "learning_rate": 7.342953692203594e-06, |
| "loss": 0.2217, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.7576853526220615, |
| "grad_norm": 1.5901117324829102, |
| "learning_rate": 7.3297273617791246e-06, |
| "loss": 0.2959, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.759493670886076, |
| "grad_norm": 1.5197011232376099, |
| "learning_rate": 7.31648017559931e-06, |
| "loss": 0.2872, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.7613019891500904, |
| "grad_norm": 1.4186420440673828, |
| "learning_rate": 7.303212252253163e-06, |
| "loss": 0.2769, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.7631103074141049, |
| "grad_norm": 1.5729889869689941, |
| "learning_rate": 7.289923710515338e-06, |
| "loss": 0.319, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.7649186256781193, |
| "grad_norm": 1.560356616973877, |
| "learning_rate": 7.276614669345069e-06, |
| "loss": 0.3268, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.7667269439421338, |
| "grad_norm": 1.403883695602417, |
| "learning_rate": 7.263285247885097e-06, |
| "loss": 0.2321, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.7685352622061483, |
| "grad_norm": 1.9532519578933716, |
| "learning_rate": 7.249935565460606e-06, |
| "loss": 0.4592, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.7703435804701627, |
| "grad_norm": 1.6798689365386963, |
| "learning_rate": 7.236565741578163e-06, |
| "loss": 0.3231, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.7721518987341772, |
| "grad_norm": 1.5938643217086792, |
| "learning_rate": 7.223175895924638e-06, |
| "loss": 0.2551, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.7739602169981917, |
| "grad_norm": 1.386902093887329, |
| "learning_rate": 7.2097661483661355e-06, |
| "loss": 0.3029, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.7757685352622061, |
| "grad_norm": 1.6395827531814575, |
| "learning_rate": 7.19633661894692e-06, |
| "loss": 0.3045, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.7775768535262206, |
| "grad_norm": 1.7160226106643677, |
| "learning_rate": 7.182887427888351e-06, |
| "loss": 0.4528, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.779385171790235, |
| "grad_norm": 1.6600942611694336, |
| "learning_rate": 7.169418695587791e-06, |
| "loss": 0.3538, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.7811934900542495, |
| "grad_norm": 1.5458256006240845, |
| "learning_rate": 7.155930542617543e-06, |
| "loss": 0.3465, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.783001808318264, |
| "grad_norm": 1.226886510848999, |
| "learning_rate": 7.142423089723758e-06, |
| "loss": 0.2268, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.7848101265822784, |
| "grad_norm": 1.5652273893356323, |
| "learning_rate": 7.128896457825364e-06, |
| "loss": 0.3008, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.786618444846293, |
| "grad_norm": 1.3933650255203247, |
| "learning_rate": 7.11535076801298e-06, |
| "loss": 0.2755, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.7884267631103075, |
| "grad_norm": 1.3730487823486328, |
| "learning_rate": 7.101786141547829e-06, |
| "loss": 0.2982, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.7902350813743219, |
| "grad_norm": 1.4305229187011719, |
| "learning_rate": 7.088202699860656e-06, |
| "loss": 0.3213, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.7920433996383364, |
| "grad_norm": 1.4953081607818604, |
| "learning_rate": 7.074600564550643e-06, |
| "loss": 0.2981, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.7938517179023508, |
| "grad_norm": 1.3501685857772827, |
| "learning_rate": 7.060979857384316e-06, |
| "loss": 0.2656, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.7956600361663653, |
| "grad_norm": 1.3591465950012207, |
| "learning_rate": 7.047340700294454e-06, |
| "loss": 0.2656, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.7974683544303798, |
| "grad_norm": 1.309065818786621, |
| "learning_rate": 7.033683215379002e-06, |
| "loss": 0.2299, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.7992766726943942, |
| "grad_norm": 1.33230459690094, |
| "learning_rate": 7.020007524899976e-06, |
| "loss": 0.2238, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.8010849909584087, |
| "grad_norm": 1.5571876764297485, |
| "learning_rate": 7.006313751282372e-06, |
| "loss": 0.2758, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.8028933092224232, |
| "grad_norm": 1.3781603574752808, |
| "learning_rate": 6.992602017113058e-06, |
| "loss": 0.2044, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.8047016274864376, |
| "grad_norm": 1.4426606893539429, |
| "learning_rate": 6.978872445139695e-06, |
| "loss": 0.2365, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.8065099457504521, |
| "grad_norm": 1.4027334451675415, |
| "learning_rate": 6.965125158269619e-06, |
| "loss": 0.2407, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.8083182640144665, |
| "grad_norm": 1.5120912790298462, |
| "learning_rate": 6.951360279568758e-06, |
| "loss": 0.2799, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.810126582278481, |
| "grad_norm": 1.738258719444275, |
| "learning_rate": 6.9375779322605154e-06, |
| "loss": 0.3353, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.8119349005424955, |
| "grad_norm": 1.58566415309906, |
| "learning_rate": 6.923778239724681e-06, |
| "loss": 0.2767, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.8137432188065099, |
| "grad_norm": 1.571824073791504, |
| "learning_rate": 6.909961325496312e-06, |
| "loss": 0.3004, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.8155515370705244, |
| "grad_norm": 1.6151618957519531, |
| "learning_rate": 6.896127313264643e-06, |
| "loss": 0.3023, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.8173598553345389, |
| "grad_norm": 1.4722002744674683, |
| "learning_rate": 6.88227632687196e-06, |
| "loss": 0.2301, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.8191681735985533, |
| "grad_norm": 1.4254121780395508, |
| "learning_rate": 6.868408490312511e-06, |
| "loss": 0.2401, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.8209764918625678, |
| "grad_norm": 1.4507607221603394, |
| "learning_rate": 6.854523927731383e-06, |
| "loss": 0.2683, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.8227848101265823, |
| "grad_norm": 1.6184362173080444, |
| "learning_rate": 6.840622763423391e-06, |
| "loss": 0.301, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.8245931283905967, |
| "grad_norm": 1.561452031135559, |
| "learning_rate": 6.8267051218319766e-06, |
| "loss": 0.2869, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.8264014466546112, |
| "grad_norm": 1.7055240869522095, |
| "learning_rate": 6.8127711275480805e-06, |
| "loss": 0.3714, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.8282097649186256, |
| "grad_norm": 1.7333972454071045, |
| "learning_rate": 6.798820905309036e-06, |
| "loss": 0.4099, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.8300180831826401, |
| "grad_norm": 1.6271618604660034, |
| "learning_rate": 6.784854579997446e-06, |
| "loss": 0.3044, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.8318264014466547, |
| "grad_norm": 1.4839351177215576, |
| "learning_rate": 6.7708722766400745e-06, |
| "loss": 0.2909, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.833634719710669, |
| "grad_norm": 1.429251790046692, |
| "learning_rate": 6.7568741204067145e-06, |
| "loss": 0.2359, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.8354430379746836, |
| "grad_norm": 1.4327332973480225, |
| "learning_rate": 6.7428602366090764e-06, |
| "loss": 0.2806, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.8372513562386981, |
| "grad_norm": 1.3575445413589478, |
| "learning_rate": 6.728830750699667e-06, |
| "loss": 0.2825, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.8390596745027125, |
| "grad_norm": 1.378030776977539, |
| "learning_rate": 6.714785788270658e-06, |
| "loss": 0.2553, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.840867992766727, |
| "grad_norm": 1.5035066604614258, |
| "learning_rate": 6.700725475052773e-06, |
| "loss": 0.3266, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.8426763110307414, |
| "grad_norm": 1.504051923751831, |
| "learning_rate": 6.686649936914151e-06, |
| "loss": 0.2739, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.8444846292947559, |
| "grad_norm": 1.616783857345581, |
| "learning_rate": 6.672559299859228e-06, |
| "loss": 0.2851, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.8462929475587704, |
| "grad_norm": 1.441886305809021, |
| "learning_rate": 6.658453690027604e-06, |
| "loss": 0.2652, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.8481012658227848, |
| "grad_norm": 1.53322172164917, |
| "learning_rate": 6.644333233692917e-06, |
| "loss": 0.2995, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.8499095840867993, |
| "grad_norm": 1.403977632522583, |
| "learning_rate": 6.63019805726171e-06, |
| "loss": 0.2575, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.8517179023508138, |
| "grad_norm": 1.598618745803833, |
| "learning_rate": 6.616048287272301e-06, |
| "loss": 0.3201, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.8535262206148282, |
| "grad_norm": 1.41034996509552, |
| "learning_rate": 6.601884050393649e-06, |
| "loss": 0.2529, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.8553345388788427, |
| "grad_norm": 1.719070315361023, |
| "learning_rate": 6.587705473424223e-06, |
| "loss": 0.3794, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.8571428571428571, |
| "grad_norm": 1.633659839630127, |
| "learning_rate": 6.57351268329086e-06, |
| "loss": 0.3025, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.8589511754068716, |
| "grad_norm": 1.910181999206543, |
| "learning_rate": 6.55930580704764e-06, |
| "loss": 0.3444, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.8607594936708861, |
| "grad_norm": 1.4851632118225098, |
| "learning_rate": 6.545084971874738e-06, |
| "loss": 0.2492, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.8625678119349005, |
| "grad_norm": 1.38944673538208, |
| "learning_rate": 6.5308503050772884e-06, |
| "loss": 0.2595, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.864376130198915, |
| "grad_norm": 1.2743439674377441, |
| "learning_rate": 6.51660193408425e-06, |
| "loss": 0.186, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.8661844484629295, |
| "grad_norm": 1.5795000791549683, |
| "learning_rate": 6.50233998644726e-06, |
| "loss": 0.3064, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.8679927667269439, |
| "grad_norm": 1.6655584573745728, |
| "learning_rate": 6.4880645898394935e-06, |
| "loss": 0.2734, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.8698010849909584, |
| "grad_norm": 1.4860823154449463, |
| "learning_rate": 6.473775872054522e-06, |
| "loss": 0.2903, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.8716094032549728, |
| "grad_norm": 1.5311415195465088, |
| "learning_rate": 6.459473961005168e-06, |
| "loss": 0.3421, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.8734177215189873, |
| "grad_norm": 1.5267893075942993, |
| "learning_rate": 6.445158984722358e-06, |
| "loss": 0.3595, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.8752260397830018, |
| "grad_norm": 1.5918548107147217, |
| "learning_rate": 6.4308310713539845e-06, |
| "loss": 0.2756, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.8770343580470162, |
| "grad_norm": 1.3084057569503784, |
| "learning_rate": 6.4164903491637475e-06, |
| "loss": 0.2272, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.8788426763110307, |
| "grad_norm": 1.5899354219436646, |
| "learning_rate": 6.402136946530014e-06, |
| "loss": 0.2936, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.8806509945750453, |
| "grad_norm": 1.3249406814575195, |
| "learning_rate": 6.387770991944667e-06, |
| "loss": 0.2621, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.8824593128390597, |
| "grad_norm": 1.8586225509643555, |
| "learning_rate": 6.373392614011952e-06, |
| "loss": 0.3753, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.8842676311030742, |
| "grad_norm": 1.362688660621643, |
| "learning_rate": 6.359001941447331e-06, |
| "loss": 0.2727, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.8860759493670886, |
| "grad_norm": 1.4210124015808105, |
| "learning_rate": 6.344599103076329e-06, |
| "loss": 0.262, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.8878842676311031, |
| "grad_norm": 1.268723964691162, |
| "learning_rate": 6.330184227833376e-06, |
| "loss": 0.2268, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.8896925858951176, |
| "grad_norm": 1.4708172082901, |
| "learning_rate": 6.315757444760659e-06, |
| "loss": 0.2857, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.891500904159132, |
| "grad_norm": 1.5065549612045288, |
| "learning_rate": 6.301318883006962e-06, |
| "loss": 0.2527, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.8933092224231465, |
| "grad_norm": 1.4784266948699951, |
| "learning_rate": 6.286868671826513e-06, |
| "loss": 0.2291, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.895117540687161, |
| "grad_norm": 1.5903269052505493, |
| "learning_rate": 6.272406940577827e-06, |
| "loss": 0.3356, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.8969258589511754, |
| "grad_norm": 1.5249438285827637, |
| "learning_rate": 6.257933818722544e-06, |
| "loss": 0.2397, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.8987341772151899, |
| "grad_norm": 1.426729679107666, |
| "learning_rate": 6.243449435824276e-06, |
| "loss": 0.236, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.9005424954792043, |
| "grad_norm": 1.7714941501617432, |
| "learning_rate": 6.228953921547441e-06, |
| "loss": 0.3306, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.9023508137432188, |
| "grad_norm": 1.5219871997833252, |
| "learning_rate": 6.2144474056561076e-06, |
| "loss": 0.3083, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.9041591320072333, |
| "grad_norm": 1.5918591022491455, |
| "learning_rate": 6.19993001801283e-06, |
| "loss": 0.351, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.9059674502712477, |
| "grad_norm": 1.496276617050171, |
| "learning_rate": 6.185401888577488e-06, |
| "loss": 0.3046, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.9077757685352622, |
| "grad_norm": 1.41849946975708, |
| "learning_rate": 6.17086314740612e-06, |
| "loss": 0.254, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.9095840867992767, |
| "grad_norm": 1.437718391418457, |
| "learning_rate": 6.1563139246497615e-06, |
| "loss": 0.2972, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.9113924050632911, |
| "grad_norm": 1.3922340869903564, |
| "learning_rate": 6.141754350553279e-06, |
| "loss": 0.2571, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.9132007233273056, |
| "grad_norm": 1.597642183303833, |
| "learning_rate": 6.1271845554542065e-06, |
| "loss": 0.314, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.9150090415913201, |
| "grad_norm": 1.5225824117660522, |
| "learning_rate": 6.112604669781572e-06, |
| "loss": 0.3122, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.9168173598553345, |
| "grad_norm": 1.4691764116287231, |
| "learning_rate": 6.098014824054741e-06, |
| "loss": 0.2968, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.918625678119349, |
| "grad_norm": 1.5843572616577148, |
| "learning_rate": 6.083415148882236e-06, |
| "loss": 0.3542, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.9204339963833634, |
| "grad_norm": 1.489445686340332, |
| "learning_rate": 6.068805774960574e-06, |
| "loss": 0.3083, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.9222423146473779, |
| "grad_norm": 1.3550089597702026, |
| "learning_rate": 6.054186833073096e-06, |
| "loss": 0.2574, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.9240506329113924, |
| "grad_norm": 1.2581572532653809, |
| "learning_rate": 6.039558454088796e-06, |
| "loss": 0.2264, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.9258589511754068, |
| "grad_norm": 1.4372060298919678, |
| "learning_rate": 6.024920768961153e-06, |
| "loss": 0.1999, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.9276672694394213, |
| "grad_norm": 1.2871983051300049, |
| "learning_rate": 6.010273908726944e-06, |
| "loss": 0.2595, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.9294755877034359, |
| "grad_norm": 1.4556411504745483, |
| "learning_rate": 5.995618004505091e-06, |
| "loss": 0.3164, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.9312839059674503, |
| "grad_norm": 1.4673136472702026, |
| "learning_rate": 5.980953187495476e-06, |
| "loss": 0.2846, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.9330922242314648, |
| "grad_norm": 1.3011901378631592, |
| "learning_rate": 5.9662795889777666e-06, |
| "loss": 0.2311, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.9349005424954792, |
| "grad_norm": 1.5057899951934814, |
| "learning_rate": 5.951597340310244e-06, |
| "loss": 0.2718, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.9367088607594937, |
| "grad_norm": 1.4500908851623535, |
| "learning_rate": 5.936906572928625e-06, |
| "loss": 0.2973, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.9385171790235082, |
| "grad_norm": 1.53549063205719, |
| "learning_rate": 5.922207418344884e-06, |
| "loss": 0.3095, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.9403254972875226, |
| "grad_norm": 1.6090471744537354, |
| "learning_rate": 5.907500008146082e-06, |
| "loss": 0.2837, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.9421338155515371, |
| "grad_norm": 1.4988715648651123, |
| "learning_rate": 5.892784473993184e-06, |
| "loss": 0.2474, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.9439421338155516, |
| "grad_norm": 1.4724313020706177, |
| "learning_rate": 5.878060947619877e-06, |
| "loss": 0.2491, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.945750452079566, |
| "grad_norm": 1.5639798641204834, |
| "learning_rate": 5.863329560831397e-06, |
| "loss": 0.2355, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.9475587703435805, |
| "grad_norm": 1.585341215133667, |
| "learning_rate": 5.848590445503345e-06, |
| "loss": 0.2999, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.9493670886075949, |
| "grad_norm": 1.9147390127182007, |
| "learning_rate": 5.8338437335805124e-06, |
| "loss": 0.3447, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.9511754068716094, |
| "grad_norm": 1.3038002252578735, |
| "learning_rate": 5.819089557075689e-06, |
| "loss": 0.2896, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.9529837251356239, |
| "grad_norm": 1.4564861059188843, |
| "learning_rate": 5.8043280480684925e-06, |
| "loss": 0.2513, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.9547920433996383, |
| "grad_norm": 1.4620134830474854, |
| "learning_rate": 5.78955933870418e-06, |
| "loss": 0.2145, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.9566003616636528, |
| "grad_norm": 1.608670949935913, |
| "learning_rate": 5.7747835611924655e-06, |
| "loss": 0.3446, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.9584086799276673, |
| "grad_norm": 1.5054386854171753, |
| "learning_rate": 5.760000847806337e-06, |
| "loss": 0.3055, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.9602169981916817, |
| "grad_norm": 1.5572552680969238, |
| "learning_rate": 5.745211330880872e-06, |
| "loss": 0.3188, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.9620253164556962, |
| "grad_norm": 1.5822899341583252, |
| "learning_rate": 5.730415142812059e-06, |
| "loss": 0.3309, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.9638336347197106, |
| "grad_norm": 1.582491159439087, |
| "learning_rate": 5.7156124160555985e-06, |
| "loss": 0.3307, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.9656419529837251, |
| "grad_norm": 1.3483372926712036, |
| "learning_rate": 5.70080328312573e-06, |
| "loss": 0.2641, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.9674502712477396, |
| "grad_norm": 1.3354854583740234, |
| "learning_rate": 5.68598787659404e-06, |
| "loss": 0.2254, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.969258589511754, |
| "grad_norm": 1.2679874897003174, |
| "learning_rate": 5.671166329088278e-06, |
| "loss": 0.2232, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.9710669077757685, |
| "grad_norm": 1.419390320777893, |
| "learning_rate": 5.656338773291165e-06, |
| "loss": 0.2335, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.972875226039783, |
| "grad_norm": 1.31120765209198, |
| "learning_rate": 5.641505341939212e-06, |
| "loss": 0.2434, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.9746835443037974, |
| "grad_norm": 1.42831289768219, |
| "learning_rate": 5.626666167821522e-06, |
| "loss": 0.2789, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.976491862567812, |
| "grad_norm": 1.4863039255142212, |
| "learning_rate": 5.611821383778614e-06, |
| "loss": 0.2905, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.9783001808318263, |
| "grad_norm": 1.2998374700546265, |
| "learning_rate": 5.596971122701221e-06, |
| "loss": 0.241, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.9801084990958409, |
| "grad_norm": 1.4642691612243652, |
| "learning_rate": 5.582115517529114e-06, |
| "loss": 0.3114, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.9819168173598554, |
| "grad_norm": 1.541292667388916, |
| "learning_rate": 5.5672547012499e-06, |
| "loss": 0.2984, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.9837251356238698, |
| "grad_norm": 1.4272589683532715, |
| "learning_rate": 5.55238880689783e-06, |
| "loss": 0.307, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.9855334538878843, |
| "grad_norm": 1.4981824159622192, |
| "learning_rate": 5.537517967552626e-06, |
| "loss": 0.2646, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.9873417721518988, |
| "grad_norm": 1.4142934083938599, |
| "learning_rate": 5.522642316338268e-06, |
| "loss": 0.2581, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.9891500904159132, |
| "grad_norm": 1.5048015117645264, |
| "learning_rate": 5.507761986421818e-06, |
| "loss": 0.2628, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.9909584086799277, |
| "grad_norm": 1.488046646118164, |
| "learning_rate": 5.4928771110122185e-06, |
| "loss": 0.3166, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.9927667269439421, |
| "grad_norm": 1.4790452718734741, |
| "learning_rate": 5.477987823359104e-06, |
| "loss": 0.2557, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.9945750452079566, |
| "grad_norm": 1.6512081623077393, |
| "learning_rate": 5.463094256751608e-06, |
| "loss": 0.3687, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.9963833634719711, |
| "grad_norm": 1.5028005838394165, |
| "learning_rate": 5.448196544517168e-06, |
| "loss": 0.2967, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.9981916817359855, |
| "grad_norm": 1.5956478118896484, |
| "learning_rate": 5.433294820020335e-06, |
| "loss": 0.3197, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 1.464298129081726, |
| "learning_rate": 5.41838921666158e-06, |
| "loss": 0.263, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.0018083182640145, |
| "grad_norm": 1.3117297887802124, |
| "learning_rate": 5.403479867876087e-06, |
| "loss": 0.2027, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.003616636528029, |
| "grad_norm": 1.3781763315200806, |
| "learning_rate": 5.388566907132583e-06, |
| "loss": 0.1909, |
| "step": 555 |
| }, |
| { |
| "epoch": 1.0054249547920433, |
| "grad_norm": 1.4087004661560059, |
| "learning_rate": 5.373650467932122e-06, |
| "loss": 0.2242, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.0072332730560578, |
| "grad_norm": 1.3254539966583252, |
| "learning_rate": 5.358730683806897e-06, |
| "loss": 0.221, |
| "step": 557 |
| }, |
| { |
| "epoch": 1.0090415913200723, |
| "grad_norm": 1.4249013662338257, |
| "learning_rate": 5.343807688319047e-06, |
| "loss": 0.2352, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.0108499095840868, |
| "grad_norm": 1.4847030639648438, |
| "learning_rate": 5.328881615059458e-06, |
| "loss": 0.2912, |
| "step": 559 |
| }, |
| { |
| "epoch": 1.0126582278481013, |
| "grad_norm": 1.2479192018508911, |
| "learning_rate": 5.3139525976465675e-06, |
| "loss": 0.1962, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.0144665461121158, |
| "grad_norm": 1.4667129516601562, |
| "learning_rate": 5.299020769725172e-06, |
| "loss": 0.2087, |
| "step": 561 |
| }, |
| { |
| "epoch": 1.0162748643761301, |
| "grad_norm": 1.1846363544464111, |
| "learning_rate": 5.284086264965224e-06, |
| "loss": 0.1374, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.0180831826401446, |
| "grad_norm": 1.2141779661178589, |
| "learning_rate": 5.269149217060642e-06, |
| "loss": 0.1698, |
| "step": 563 |
| }, |
| { |
| "epoch": 1.0198915009041591, |
| "grad_norm": 1.181235671043396, |
| "learning_rate": 5.2542097597281095e-06, |
| "loss": 0.1543, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.0216998191681737, |
| "grad_norm": 1.3612666130065918, |
| "learning_rate": 5.239268026705878e-06, |
| "loss": 0.2075, |
| "step": 565 |
| }, |
| { |
| "epoch": 1.0235081374321882, |
| "grad_norm": 1.3640803098678589, |
| "learning_rate": 5.224324151752575e-06, |
| "loss": 0.1809, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.0253164556962024, |
| "grad_norm": 1.2999763488769531, |
| "learning_rate": 5.209378268645998e-06, |
| "loss": 0.1773, |
| "step": 567 |
| }, |
| { |
| "epoch": 1.027124773960217, |
| "grad_norm": 1.4859157800674438, |
| "learning_rate": 5.194430511181925e-06, |
| "loss": 0.1986, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.0289330922242315, |
| "grad_norm": 1.568980097770691, |
| "learning_rate": 5.179481013172912e-06, |
| "loss": 0.2389, |
| "step": 569 |
| }, |
| { |
| "epoch": 1.030741410488246, |
| "grad_norm": 1.532390832901001, |
| "learning_rate": 5.1645299084470936e-06, |
| "loss": 0.1698, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.0325497287522605, |
| "grad_norm": 1.5008491277694702, |
| "learning_rate": 5.1495773308469935e-06, |
| "loss": 0.1706, |
| "step": 571 |
| }, |
| { |
| "epoch": 1.0343580470162748, |
| "grad_norm": 1.5538599491119385, |
| "learning_rate": 5.134623414228315e-06, |
| "loss": 0.2351, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.0361663652802893, |
| "grad_norm": 1.3042097091674805, |
| "learning_rate": 5.119668292458751e-06, |
| "loss": 0.1678, |
| "step": 573 |
| }, |
| { |
| "epoch": 1.0379746835443038, |
| "grad_norm": 1.3612871170043945, |
| "learning_rate": 5.1047120994167855e-06, |
| "loss": 0.1975, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.0397830018083183, |
| "grad_norm": 1.5851926803588867, |
| "learning_rate": 5.0897549689904865e-06, |
| "loss": 0.2288, |
| "step": 575 |
| }, |
| { |
| "epoch": 1.0415913200723328, |
| "grad_norm": 1.3465512990951538, |
| "learning_rate": 5.074797035076319e-06, |
| "loss": 0.1772, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.0433996383363473, |
| "grad_norm": 1.5474019050598145, |
| "learning_rate": 5.059838431577937e-06, |
| "loss": 0.2423, |
| "step": 577 |
| }, |
| { |
| "epoch": 1.0452079566003616, |
| "grad_norm": 1.7244025468826294, |
| "learning_rate": 5.04487929240499e-06, |
| "loss": 0.291, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.047016274864376, |
| "grad_norm": 1.509497046470642, |
| "learning_rate": 5.029919751471925e-06, |
| "loss": 0.2104, |
| "step": 579 |
| }, |
| { |
| "epoch": 1.0488245931283906, |
| "grad_norm": 1.299607515335083, |
| "learning_rate": 5.014959942696782e-06, |
| "loss": 0.1796, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.0506329113924051, |
| "grad_norm": 1.3673267364501953, |
| "learning_rate": 5e-06, |
| "loss": 0.1826, |
| "step": 581 |
| }, |
| { |
| "epoch": 1.0524412296564196, |
| "grad_norm": 1.3643478155136108, |
| "learning_rate": 4.98504005730322e-06, |
| "loss": 0.1796, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.054249547920434, |
| "grad_norm": 1.5472909212112427, |
| "learning_rate": 4.970080248528077e-06, |
| "loss": 0.1736, |
| "step": 583 |
| }, |
| { |
| "epoch": 1.0560578661844484, |
| "grad_norm": 1.5176197290420532, |
| "learning_rate": 4.955120707595011e-06, |
| "loss": 0.2345, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.057866184448463, |
| "grad_norm": 1.6651631593704224, |
| "learning_rate": 4.940161568422065e-06, |
| "loss": 0.2461, |
| "step": 585 |
| }, |
| { |
| "epoch": 1.0596745027124774, |
| "grad_norm": 1.336642861366272, |
| "learning_rate": 4.9252029649236835e-06, |
| "loss": 0.1985, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.061482820976492, |
| "grad_norm": 1.5216953754425049, |
| "learning_rate": 4.910245031009515e-06, |
| "loss": 0.2382, |
| "step": 587 |
| }, |
| { |
| "epoch": 1.0632911392405062, |
| "grad_norm": 1.5934653282165527, |
| "learning_rate": 4.895287900583216e-06, |
| "loss": 0.234, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.0650994575045207, |
| "grad_norm": 1.3796063661575317, |
| "learning_rate": 4.88033170754125e-06, |
| "loss": 0.15, |
| "step": 589 |
| }, |
| { |
| "epoch": 1.0669077757685352, |
| "grad_norm": 1.261986255645752, |
| "learning_rate": 4.865376585771687e-06, |
| "loss": 0.1732, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.0687160940325497, |
| "grad_norm": 1.2686052322387695, |
| "learning_rate": 4.850422669153009e-06, |
| "loss": 0.1862, |
| "step": 591 |
| }, |
| { |
| "epoch": 1.0705244122965643, |
| "grad_norm": 1.3567222356796265, |
| "learning_rate": 4.835470091552906e-06, |
| "loss": 0.2148, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.0723327305605788, |
| "grad_norm": 1.3897775411605835, |
| "learning_rate": 4.8205189868270894e-06, |
| "loss": 0.1955, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.074141048824593, |
| "grad_norm": 1.4903334379196167, |
| "learning_rate": 4.805569488818077e-06, |
| "loss": 0.2222, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.0759493670886076, |
| "grad_norm": 1.172352910041809, |
| "learning_rate": 4.7906217313540035e-06, |
| "loss": 0.1574, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.077757685352622, |
| "grad_norm": 1.4119564294815063, |
| "learning_rate": 4.775675848247427e-06, |
| "loss": 0.1851, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.0795660036166366, |
| "grad_norm": 1.2973700761795044, |
| "learning_rate": 4.760731973294122e-06, |
| "loss": 0.1554, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.081374321880651, |
| "grad_norm": 1.3571135997772217, |
| "learning_rate": 4.745790240271892e-06, |
| "loss": 0.1954, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.0831826401446654, |
| "grad_norm": 1.3942945003509521, |
| "learning_rate": 4.73085078293936e-06, |
| "loss": 0.2016, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.0849909584086799, |
| "grad_norm": 1.5880961418151855, |
| "learning_rate": 4.715913735034779e-06, |
| "loss": 0.1762, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.0867992766726944, |
| "grad_norm": 1.5360944271087646, |
| "learning_rate": 4.700979230274829e-06, |
| "loss": 0.21, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.0886075949367089, |
| "grad_norm": 1.6026498079299927, |
| "learning_rate": 4.686047402353433e-06, |
| "loss": 0.2701, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.0904159132007234, |
| "grad_norm": 1.4952352046966553, |
| "learning_rate": 4.6711183849405435e-06, |
| "loss": 0.2294, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.092224231464738, |
| "grad_norm": 1.3100388050079346, |
| "learning_rate": 4.6561923116809545e-06, |
| "loss": 0.161, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.0940325497287522, |
| "grad_norm": 1.4728491306304932, |
| "learning_rate": 4.641269316193104e-06, |
| "loss": 0.2186, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.0958408679927667, |
| "grad_norm": 1.9371023178100586, |
| "learning_rate": 4.626349532067879e-06, |
| "loss": 0.1844, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.0976491862567812, |
| "grad_norm": 1.62328040599823, |
| "learning_rate": 4.611433092867417e-06, |
| "loss": 0.2775, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.0994575045207957, |
| "grad_norm": 1.4356878995895386, |
| "learning_rate": 4.596520132123915e-06, |
| "loss": 0.2043, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.1012658227848102, |
| "grad_norm": 1.4942317008972168, |
| "learning_rate": 4.581610783338424e-06, |
| "loss": 0.2003, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.1030741410488245, |
| "grad_norm": 1.425881028175354, |
| "learning_rate": 4.566705179979665e-06, |
| "loss": 0.2133, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.104882459312839, |
| "grad_norm": 1.2177973985671997, |
| "learning_rate": 4.551803455482833e-06, |
| "loss": 0.1606, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.1066907775768535, |
| "grad_norm": 1.517218828201294, |
| "learning_rate": 4.536905743248394e-06, |
| "loss": 0.2364, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.108499095840868, |
| "grad_norm": 1.442273736000061, |
| "learning_rate": 4.522012176640898e-06, |
| "loss": 0.2205, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.1103074141048825, |
| "grad_norm": 1.3430852890014648, |
| "learning_rate": 4.507122888987782e-06, |
| "loss": 0.1889, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.1121157323688968, |
| "grad_norm": 1.5666782855987549, |
| "learning_rate": 4.4922380135781835e-06, |
| "loss": 0.2544, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.1139240506329113, |
| "grad_norm": 1.3658379316329956, |
| "learning_rate": 4.477357683661734e-06, |
| "loss": 0.1932, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.1157323688969258, |
| "grad_norm": 1.3235565423965454, |
| "learning_rate": 4.462482032447377e-06, |
| "loss": 0.163, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.1175406871609403, |
| "grad_norm": 1.2962212562561035, |
| "learning_rate": 4.447611193102171e-06, |
| "loss": 0.1839, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.1193490054249549, |
| "grad_norm": 1.3023827075958252, |
| "learning_rate": 4.432745298750102e-06, |
| "loss": 0.1856, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.1211573236889691, |
| "grad_norm": 1.5122865438461304, |
| "learning_rate": 4.417884482470887e-06, |
| "loss": 0.2504, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.1229656419529837, |
| "grad_norm": 1.4467133283615112, |
| "learning_rate": 4.4030288772987795e-06, |
| "loss": 0.2083, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.1247739602169982, |
| "grad_norm": 1.2714449167251587, |
| "learning_rate": 4.388178616221389e-06, |
| "loss": 0.143, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.1265822784810127, |
| "grad_norm": 1.4074504375457764, |
| "learning_rate": 4.373333832178478e-06, |
| "loss": 0.1898, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.1283905967450272, |
| "grad_norm": 1.278922200202942, |
| "learning_rate": 4.35849465806079e-06, |
| "loss": 0.1375, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.1301989150090417, |
| "grad_norm": 1.2364168167114258, |
| "learning_rate": 4.3436612267088355e-06, |
| "loss": 0.1439, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.132007233273056, |
| "grad_norm": 1.4670965671539307, |
| "learning_rate": 4.3288336709117246e-06, |
| "loss": 0.1772, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.1338155515370705, |
| "grad_norm": 1.5321455001831055, |
| "learning_rate": 4.314012123405961e-06, |
| "loss": 0.2492, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.135623869801085, |
| "grad_norm": 1.592399001121521, |
| "learning_rate": 4.299196716874271e-06, |
| "loss": 0.2118, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.1374321880650995, |
| "grad_norm": 1.4796591997146606, |
| "learning_rate": 4.284387583944403e-06, |
| "loss": 0.188, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.139240506329114, |
| "grad_norm": 1.2441701889038086, |
| "learning_rate": 4.269584857187942e-06, |
| "loss": 0.16, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.1410488245931285, |
| "grad_norm": 1.564576506614685, |
| "learning_rate": 4.254788669119127e-06, |
| "loss": 0.2022, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.1428571428571428, |
| "grad_norm": 1.6278743743896484, |
| "learning_rate": 4.239999152193664e-06, |
| "loss": 0.2145, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.1446654611211573, |
| "grad_norm": 1.6288756132125854, |
| "learning_rate": 4.225216438807536e-06, |
| "loss": 0.2328, |
| "step": 633 |
| }, |
| { |
| "epoch": 1.1464737793851718, |
| "grad_norm": 1.6530628204345703, |
| "learning_rate": 4.2104406612958216e-06, |
| "loss": 0.2808, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.1482820976491863, |
| "grad_norm": 1.503882646560669, |
| "learning_rate": 4.195671951931509e-06, |
| "loss": 0.2263, |
| "step": 635 |
| }, |
| { |
| "epoch": 1.1500904159132008, |
| "grad_norm": 1.1974093914031982, |
| "learning_rate": 4.180910442924312e-06, |
| "loss": 0.1463, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.1518987341772151, |
| "grad_norm": 1.47310209274292, |
| "learning_rate": 4.166156266419489e-06, |
| "loss": 0.193, |
| "step": 637 |
| }, |
| { |
| "epoch": 1.1537070524412296, |
| "grad_norm": 1.2455514669418335, |
| "learning_rate": 4.1514095544966556e-06, |
| "loss": 0.1488, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.1555153707052441, |
| "grad_norm": 1.3934590816497803, |
| "learning_rate": 4.136670439168605e-06, |
| "loss": 0.2162, |
| "step": 639 |
| }, |
| { |
| "epoch": 1.1573236889692586, |
| "grad_norm": 1.3384346961975098, |
| "learning_rate": 4.121939052380125e-06, |
| "loss": 0.2076, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.1591320072332731, |
| "grad_norm": 1.489557147026062, |
| "learning_rate": 4.107215526006818e-06, |
| "loss": 0.2155, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.1609403254972874, |
| "grad_norm": 1.3429855108261108, |
| "learning_rate": 4.092499991853919e-06, |
| "loss": 0.1869, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.162748643761302, |
| "grad_norm": 1.3671791553497314, |
| "learning_rate": 4.0777925816551175e-06, |
| "loss": 0.1861, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.1645569620253164, |
| "grad_norm": 1.3407803773880005, |
| "learning_rate": 4.063093427071376e-06, |
| "loss": 0.164, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.166365280289331, |
| "grad_norm": 1.5938869714736938, |
| "learning_rate": 4.048402659689757e-06, |
| "loss": 0.2856, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.1681735985533455, |
| "grad_norm": 1.3737971782684326, |
| "learning_rate": 4.033720411022235e-06, |
| "loss": 0.1738, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.1699819168173597, |
| "grad_norm": 1.4273154735565186, |
| "learning_rate": 4.019046812504526e-06, |
| "loss": 0.1974, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.1717902350813743, |
| "grad_norm": 1.3090167045593262, |
| "learning_rate": 4.0043819954949105e-06, |
| "loss": 0.1572, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.1735985533453888, |
| "grad_norm": 1.5619733333587646, |
| "learning_rate": 3.989726091273056e-06, |
| "loss": 0.2376, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.1754068716094033, |
| "grad_norm": 1.6391355991363525, |
| "learning_rate": 3.975079231038848e-06, |
| "loss": 0.2051, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.1772151898734178, |
| "grad_norm": 1.3856724500656128, |
| "learning_rate": 3.960441545911205e-06, |
| "loss": 0.1677, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.179023508137432, |
| "grad_norm": 1.6323115825653076, |
| "learning_rate": 3.9458131669269066e-06, |
| "loss": 0.2374, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.1808318264014466, |
| "grad_norm": 1.5338612794876099, |
| "learning_rate": 3.931194225039427e-06, |
| "loss": 0.2115, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.182640144665461, |
| "grad_norm": 1.3400039672851562, |
| "learning_rate": 3.916584851117766e-06, |
| "loss": 0.1853, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.1844484629294756, |
| "grad_norm": 1.3426306247711182, |
| "learning_rate": 3.90198517594526e-06, |
| "loss": 0.1883, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.18625678119349, |
| "grad_norm": 1.176185965538025, |
| "learning_rate": 3.887395330218429e-06, |
| "loss": 0.1475, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.1880650994575046, |
| "grad_norm": 1.4213191270828247, |
| "learning_rate": 3.872815444545794e-06, |
| "loss": 0.1868, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.189873417721519, |
| "grad_norm": 1.6255871057510376, |
| "learning_rate": 3.8582456494467214e-06, |
| "loss": 0.2424, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.1916817359855334, |
| "grad_norm": 1.3936301469802856, |
| "learning_rate": 3.843686075350239e-06, |
| "loss": 0.2065, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.193490054249548, |
| "grad_norm": 1.6474289894104004, |
| "learning_rate": 3.829136852593881e-06, |
| "loss": 0.2248, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.1952983725135624, |
| "grad_norm": 1.485768437385559, |
| "learning_rate": 3.8145981114225135e-06, |
| "loss": 0.2007, |
| "step": 661 |
| }, |
| { |
| "epoch": 1.197106690777577, |
| "grad_norm": 1.2493586540222168, |
| "learning_rate": 3.8000699819871704e-06, |
| "loss": 0.1713, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.1989150090415914, |
| "grad_norm": 1.2929006814956665, |
| "learning_rate": 3.785552594343894e-06, |
| "loss": 0.1464, |
| "step": 663 |
| }, |
| { |
| "epoch": 1.2007233273056057, |
| "grad_norm": 1.2843424081802368, |
| "learning_rate": 3.7710460784525617e-06, |
| "loss": 0.1572, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.2025316455696202, |
| "grad_norm": 1.338903546333313, |
| "learning_rate": 3.756550564175727e-06, |
| "loss": 0.1904, |
| "step": 665 |
| }, |
| { |
| "epoch": 1.2043399638336347, |
| "grad_norm": 1.3234328031539917, |
| "learning_rate": 3.7420661812774577e-06, |
| "loss": 0.1794, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.2061482820976492, |
| "grad_norm": 1.2833119630813599, |
| "learning_rate": 3.7275930594221752e-06, |
| "loss": 0.1809, |
| "step": 667 |
| }, |
| { |
| "epoch": 1.2079566003616637, |
| "grad_norm": 1.5440844297409058, |
| "learning_rate": 3.7131313281734895e-06, |
| "loss": 0.202, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.209764918625678, |
| "grad_norm": 1.699369192123413, |
| "learning_rate": 3.69868111699304e-06, |
| "loss": 0.237, |
| "step": 669 |
| }, |
| { |
| "epoch": 1.2115732368896925, |
| "grad_norm": 1.37570321559906, |
| "learning_rate": 3.6842425552393424e-06, |
| "loss": 0.1718, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.213381555153707, |
| "grad_norm": 1.528974175453186, |
| "learning_rate": 3.669815772166625e-06, |
| "loss": 0.1925, |
| "step": 671 |
| }, |
| { |
| "epoch": 1.2151898734177216, |
| "grad_norm": 1.3920633792877197, |
| "learning_rate": 3.655400896923672e-06, |
| "loss": 0.1758, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.216998191681736, |
| "grad_norm": 1.432664394378662, |
| "learning_rate": 3.6409980585526707e-06, |
| "loss": 0.1672, |
| "step": 673 |
| }, |
| { |
| "epoch": 1.2188065099457503, |
| "grad_norm": 1.4724292755126953, |
| "learning_rate": 3.62660738598805e-06, |
| "loss": 0.1828, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.2206148282097649, |
| "grad_norm": 1.3286408185958862, |
| "learning_rate": 3.6122290080553334e-06, |
| "loss": 0.1661, |
| "step": 675 |
| }, |
| { |
| "epoch": 1.2224231464737794, |
| "grad_norm": 1.4410812854766846, |
| "learning_rate": 3.5978630534699873e-06, |
| "loss": 0.1767, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.2242314647377939, |
| "grad_norm": 1.597132921218872, |
| "learning_rate": 3.583509650836254e-06, |
| "loss": 0.2085, |
| "step": 677 |
| }, |
| { |
| "epoch": 1.2260397830018084, |
| "grad_norm": 1.8338674306869507, |
| "learning_rate": 3.5691689286460172e-06, |
| "loss": 0.2821, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.2278481012658227, |
| "grad_norm": 1.3840421438217163, |
| "learning_rate": 3.5548410152776414e-06, |
| "loss": 0.1592, |
| "step": 679 |
| }, |
| { |
| "epoch": 1.2296564195298372, |
| "grad_norm": 1.298305630683899, |
| "learning_rate": 3.540526038994834e-06, |
| "loss": 0.1629, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.2314647377938517, |
| "grad_norm": 1.3622138500213623, |
| "learning_rate": 3.526224127945479e-06, |
| "loss": 0.163, |
| "step": 681 |
| }, |
| { |
| "epoch": 1.2332730560578662, |
| "grad_norm": 1.664790153503418, |
| "learning_rate": 3.5119354101605086e-06, |
| "loss": 0.2225, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.2350813743218807, |
| "grad_norm": 1.718390941619873, |
| "learning_rate": 3.4976600135527403e-06, |
| "loss": 0.2209, |
| "step": 683 |
| }, |
| { |
| "epoch": 1.2368896925858952, |
| "grad_norm": 1.634655237197876, |
| "learning_rate": 3.4833980659157507e-06, |
| "loss": 0.2358, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.2386980108499095, |
| "grad_norm": 1.5706018209457397, |
| "learning_rate": 3.469149694922712e-06, |
| "loss": 0.2219, |
| "step": 685 |
| }, |
| { |
| "epoch": 1.240506329113924, |
| "grad_norm": 1.480089545249939, |
| "learning_rate": 3.4549150281252635e-06, |
| "loss": 0.2048, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.2423146473779385, |
| "grad_norm": 1.5125480890274048, |
| "learning_rate": 3.4406941929523607e-06, |
| "loss": 0.2325, |
| "step": 687 |
| }, |
| { |
| "epoch": 1.244122965641953, |
| "grad_norm": 1.3126697540283203, |
| "learning_rate": 3.4264873167091405e-06, |
| "loss": 0.1528, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.2459312839059675, |
| "grad_norm": 1.4432624578475952, |
| "learning_rate": 3.412294526575779e-06, |
| "loss": 0.2041, |
| "step": 689 |
| }, |
| { |
| "epoch": 1.247739602169982, |
| "grad_norm": 1.4925624132156372, |
| "learning_rate": 3.398115949606352e-06, |
| "loss": 0.22, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.2495479204339963, |
| "grad_norm": 1.4070854187011719, |
| "learning_rate": 3.383951712727701e-06, |
| "loss": 0.2023, |
| "step": 691 |
| }, |
| { |
| "epoch": 1.2513562386980108, |
| "grad_norm": 1.3051189184188843, |
| "learning_rate": 3.3698019427382912e-06, |
| "loss": 0.1776, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.2531645569620253, |
| "grad_norm": 1.3715490102767944, |
| "learning_rate": 3.355666766307084e-06, |
| "loss": 0.1957, |
| "step": 693 |
| }, |
| { |
| "epoch": 1.2549728752260398, |
| "grad_norm": 1.4365156888961792, |
| "learning_rate": 3.341546309972398e-06, |
| "loss": 0.1963, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.2567811934900543, |
| "grad_norm": 1.3215476274490356, |
| "learning_rate": 3.327440700140774e-06, |
| "loss": 0.1727, |
| "step": 695 |
| }, |
| { |
| "epoch": 1.2585895117540686, |
| "grad_norm": 1.251080870628357, |
| "learning_rate": 3.3133500630858507e-06, |
| "loss": 0.1531, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.2603978300180831, |
| "grad_norm": 1.3705614805221558, |
| "learning_rate": 3.299274524947229e-06, |
| "loss": 0.2043, |
| "step": 697 |
| }, |
| { |
| "epoch": 1.2622061482820977, |
| "grad_norm": 1.36355721950531, |
| "learning_rate": 3.2852142117293435e-06, |
| "loss": 0.1954, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.2640144665461122, |
| "grad_norm": 1.3615798950195312, |
| "learning_rate": 3.2711692493003357e-06, |
| "loss": 0.1588, |
| "step": 699 |
| }, |
| { |
| "epoch": 1.2658227848101267, |
| "grad_norm": 1.5433591604232788, |
| "learning_rate": 3.2571397633909252e-06, |
| "loss": 0.2013, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.267631103074141, |
| "grad_norm": 1.4738589525222778, |
| "learning_rate": 3.2431258795932863e-06, |
| "loss": 0.2119, |
| "step": 701 |
| }, |
| { |
| "epoch": 1.2694394213381555, |
| "grad_norm": 1.5291759967803955, |
| "learning_rate": 3.229127723359927e-06, |
| "loss": 0.2092, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.27124773960217, |
| "grad_norm": 1.6856266260147095, |
| "learning_rate": 3.215145420002555e-06, |
| "loss": 0.2396, |
| "step": 703 |
| }, |
| { |
| "epoch": 1.2730560578661845, |
| "grad_norm": 1.3590456247329712, |
| "learning_rate": 3.2011790946909673e-06, |
| "loss": 0.2145, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.274864376130199, |
| "grad_norm": 1.4624722003936768, |
| "learning_rate": 3.1872288724519207e-06, |
| "loss": 0.1623, |
| "step": 705 |
| }, |
| { |
| "epoch": 1.2766726943942133, |
| "grad_norm": 1.468032717704773, |
| "learning_rate": 3.173294878168025e-06, |
| "loss": 0.2106, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.2784810126582278, |
| "grad_norm": 1.3411674499511719, |
| "learning_rate": 3.1593772365766107e-06, |
| "loss": 0.1563, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.2802893309222423, |
| "grad_norm": 1.5718902349472046, |
| "learning_rate": 3.1454760722686206e-06, |
| "loss": 0.2691, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.2820976491862568, |
| "grad_norm": 1.5618810653686523, |
| "learning_rate": 3.1315915096874894e-06, |
| "loss": 0.1944, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.2839059674502713, |
| "grad_norm": 1.3696106672286987, |
| "learning_rate": 3.11772367312804e-06, |
| "loss": 0.2013, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.2857142857142856, |
| "grad_norm": 1.5091700553894043, |
| "learning_rate": 3.1038726867353587e-06, |
| "loss": 0.2409, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.2875226039783003, |
| "grad_norm": 1.5826542377471924, |
| "learning_rate": 3.090038674503688e-06, |
| "loss": 0.2375, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.2893309222423146, |
| "grad_norm": 1.4372962713241577, |
| "learning_rate": 3.076221760275321e-06, |
| "loss": 0.1882, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.2911392405063291, |
| "grad_norm": 1.3111985921859741, |
| "learning_rate": 3.0624220677394854e-06, |
| "loss": 0.1689, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.2929475587703436, |
| "grad_norm": 1.4454139471054077, |
| "learning_rate": 3.048639720431244e-06, |
| "loss": 0.1667, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.2947558770343581, |
| "grad_norm": 1.4132417440414429, |
| "learning_rate": 3.0348748417303826e-06, |
| "loss": 0.1781, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.2965641952983726, |
| "grad_norm": 1.3847126960754395, |
| "learning_rate": 3.0211275548603076e-06, |
| "loss": 0.1729, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.298372513562387, |
| "grad_norm": 1.21110200881958, |
| "learning_rate": 3.007397982886942e-06, |
| "loss": 0.1512, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.3001808318264014, |
| "grad_norm": 1.4430028200149536, |
| "learning_rate": 2.9936862487176295e-06, |
| "loss": 0.1658, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.301989150090416, |
| "grad_norm": 1.3040170669555664, |
| "learning_rate": 2.979992475100024e-06, |
| "loss": 0.1705, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.3037974683544304, |
| "grad_norm": 1.420000672340393, |
| "learning_rate": 2.966316784621e-06, |
| "loss": 0.19, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.305605786618445, |
| "grad_norm": 1.5596671104431152, |
| "learning_rate": 2.9526592997055488e-06, |
| "loss": 0.1929, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.3074141048824592, |
| "grad_norm": 1.2487597465515137, |
| "learning_rate": 2.9390201426156855e-06, |
| "loss": 0.135, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.3092224231464737, |
| "grad_norm": 1.2281571626663208, |
| "learning_rate": 2.9253994354493575e-06, |
| "loss": 0.1226, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.3110307414104883, |
| "grad_norm": 1.6670477390289307, |
| "learning_rate": 2.911797300139345e-06, |
| "loss": 0.2273, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.3128390596745028, |
| "grad_norm": 1.5783288478851318, |
| "learning_rate": 2.8982138584521734e-06, |
| "loss": 0.2369, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.3146473779385173, |
| "grad_norm": 1.5759886503219604, |
| "learning_rate": 2.884649231987021e-06, |
| "loss": 0.1865, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.3164556962025316, |
| "grad_norm": 1.6388565301895142, |
| "learning_rate": 2.871103542174637e-06, |
| "loss": 0.248, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.318264014466546, |
| "grad_norm": 1.4341042041778564, |
| "learning_rate": 2.857576910276243e-06, |
| "loss": 0.209, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.3200723327305606, |
| "grad_norm": 1.2234702110290527, |
| "learning_rate": 2.844069457382459e-06, |
| "loss": 0.1543, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.321880650994575, |
| "grad_norm": 1.287217378616333, |
| "learning_rate": 2.83058130441221e-06, |
| "loss": 0.1763, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.3236889692585896, |
| "grad_norm": 1.4264429807662964, |
| "learning_rate": 2.817112572111651e-06, |
| "loss": 0.1983, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.3254972875226039, |
| "grad_norm": 1.4583526849746704, |
| "learning_rate": 2.803663381053081e-06, |
| "loss": 0.2137, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.3273056057866184, |
| "grad_norm": 1.4866368770599365, |
| "learning_rate": 2.790233851633868e-06, |
| "loss": 0.2016, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.3291139240506329, |
| "grad_norm": 1.5707135200500488, |
| "learning_rate": 2.776824104075364e-06, |
| "loss": 0.1965, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.3309222423146474, |
| "grad_norm": 1.2514961957931519, |
| "learning_rate": 2.7634342584218364e-06, |
| "loss": 0.16, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.332730560578662, |
| "grad_norm": 1.4211190938949585, |
| "learning_rate": 2.7500644345393945e-06, |
| "loss": 0.1857, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.3345388788426762, |
| "grad_norm": 1.4186261892318726, |
| "learning_rate": 2.7367147521149052e-06, |
| "loss": 0.1926, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.3363471971066907, |
| "grad_norm": 1.3444159030914307, |
| "learning_rate": 2.723385330654933e-06, |
| "loss": 0.1747, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.3381555153707052, |
| "grad_norm": 1.4014556407928467, |
| "learning_rate": 2.7100762894846633e-06, |
| "loss": 0.1942, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.3399638336347197, |
| "grad_norm": 1.349727749824524, |
| "learning_rate": 2.6967877477468394e-06, |
| "loss": 0.1751, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.3417721518987342, |
| "grad_norm": 1.6538288593292236, |
| "learning_rate": 2.683519824400693e-06, |
| "loss": 0.229, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.3435804701627485, |
| "grad_norm": 1.4319345951080322, |
| "learning_rate": 2.6702726382208775e-06, |
| "loss": 0.1716, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.3453887884267632, |
| "grad_norm": 1.2382214069366455, |
| "learning_rate": 2.657046307796407e-06, |
| "loss": 0.1349, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.3471971066907775, |
| "grad_norm": 1.239733338356018, |
| "learning_rate": 2.6438409515295997e-06, |
| "loss": 0.1595, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.349005424954792, |
| "grad_norm": 1.3710426092147827, |
| "learning_rate": 2.6306566876350072e-06, |
| "loss": 0.1621, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.3508137432188065, |
| "grad_norm": 1.6545993089675903, |
| "learning_rate": 2.617493634138363e-06, |
| "loss": 0.2161, |
| "step": 747 |
| }, |
| { |
| "epoch": 1.352622061482821, |
| "grad_norm": 1.4005903005599976, |
| "learning_rate": 2.6043519088755263e-06, |
| "loss": 0.1447, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.3544303797468356, |
| "grad_norm": 1.3730400800704956, |
| "learning_rate": 2.5912316294914232e-06, |
| "loss": 0.192, |
| "step": 749 |
| }, |
| { |
| "epoch": 1.3562386980108498, |
| "grad_norm": 1.4984370470046997, |
| "learning_rate": 2.578132913439e-06, |
| "loss": 0.186, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.3580470162748643, |
| "grad_norm": 1.4603348970413208, |
| "learning_rate": 2.5650558779781635e-06, |
| "loss": 0.1537, |
| "step": 751 |
| }, |
| { |
| "epoch": 1.3598553345388789, |
| "grad_norm": 1.3268773555755615, |
| "learning_rate": 2.55200064017474e-06, |
| "loss": 0.1829, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.3616636528028934, |
| "grad_norm": 1.7684950828552246, |
| "learning_rate": 2.538967316899414e-06, |
| "loss": 0.2551, |
| "step": 753 |
| }, |
| { |
| "epoch": 1.3634719710669079, |
| "grad_norm": 1.3952032327651978, |
| "learning_rate": 2.5259560248267022e-06, |
| "loss": 0.1929, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.3652802893309222, |
| "grad_norm": 1.3161765336990356, |
| "learning_rate": 2.512966880433891e-06, |
| "loss": 0.1539, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.3670886075949367, |
| "grad_norm": 1.2540570497512817, |
| "learning_rate": 2.5000000000000015e-06, |
| "loss": 0.1687, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.3688969258589512, |
| "grad_norm": 1.4456546306610107, |
| "learning_rate": 2.4870554996047454e-06, |
| "loss": 0.1795, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.3707052441229657, |
| "grad_norm": 1.5679326057434082, |
| "learning_rate": 2.4741334951274948e-06, |
| "loss": 0.2109, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.3725135623869802, |
| "grad_norm": 1.5739426612854004, |
| "learning_rate": 2.461234102246231e-06, |
| "loss": 0.2011, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.3743218806509945, |
| "grad_norm": 1.5570074319839478, |
| "learning_rate": 2.448357436436519e-06, |
| "loss": 0.2254, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.376130198915009, |
| "grad_norm": 1.3705545663833618, |
| "learning_rate": 2.43550361297047e-06, |
| "loss": 0.1858, |
| "step": 761 |
| }, |
| { |
| "epoch": 1.3779385171790235, |
| "grad_norm": 1.437455177307129, |
| "learning_rate": 2.4226727469157097e-06, |
| "loss": 0.1658, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.379746835443038, |
| "grad_norm": 1.4281333684921265, |
| "learning_rate": 2.40986495313435e-06, |
| "loss": 0.1851, |
| "step": 763 |
| }, |
| { |
| "epoch": 1.3815551537070525, |
| "grad_norm": 1.3015307188034058, |
| "learning_rate": 2.3970803462819586e-06, |
| "loss": 0.1678, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.3833634719710668, |
| "grad_norm": 1.3309510946273804, |
| "learning_rate": 2.384319040806533e-06, |
| "loss": 0.1429, |
| "step": 765 |
| }, |
| { |
| "epoch": 1.3851717902350813, |
| "grad_norm": 1.27116060256958, |
| "learning_rate": 2.371581150947476e-06, |
| "loss": 0.1446, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.3869801084990958, |
| "grad_norm": 1.4609099626541138, |
| "learning_rate": 2.3588667907345787e-06, |
| "loss": 0.2185, |
| "step": 767 |
| }, |
| { |
| "epoch": 1.3887884267631103, |
| "grad_norm": 1.1688401699066162, |
| "learning_rate": 2.3461760739869865e-06, |
| "loss": 0.1317, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.3905967450271248, |
| "grad_norm": 1.430402159690857, |
| "learning_rate": 2.3335091143121935e-06, |
| "loss": 0.1736, |
| "step": 769 |
| }, |
| { |
| "epoch": 1.3924050632911391, |
| "grad_norm": 1.3158042430877686, |
| "learning_rate": 2.320866025105016e-06, |
| "loss": 0.1469, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.3942133815551538, |
| "grad_norm": 1.4294010400772095, |
| "learning_rate": 2.3082469195465893e-06, |
| "loss": 0.1801, |
| "step": 771 |
| }, |
| { |
| "epoch": 1.3960216998191681, |
| "grad_norm": 1.2591050863265991, |
| "learning_rate": 2.2956519106033366e-06, |
| "loss": 0.1469, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.3978300180831826, |
| "grad_norm": 1.422146201133728, |
| "learning_rate": 2.283081111025973e-06, |
| "loss": 0.1506, |
| "step": 773 |
| }, |
| { |
| "epoch": 1.3996383363471971, |
| "grad_norm": 1.3352669477462769, |
| "learning_rate": 2.2705346333484925e-06, |
| "loss": 0.1529, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.4014466546112117, |
| "grad_norm": 1.4001466035842896, |
| "learning_rate": 2.258012589887154e-06, |
| "loss": 0.1719, |
| "step": 775 |
| }, |
| { |
| "epoch": 1.4032549728752262, |
| "grad_norm": 1.6978325843811035, |
| "learning_rate": 2.245515092739488e-06, |
| "loss": 0.2443, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.4050632911392404, |
| "grad_norm": 1.5607898235321045, |
| "learning_rate": 2.23304225378328e-06, |
| "loss": 0.1895, |
| "step": 777 |
| }, |
| { |
| "epoch": 1.406871609403255, |
| "grad_norm": 1.7388428449630737, |
| "learning_rate": 2.2205941846755787e-06, |
| "loss": 0.2613, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.4086799276672695, |
| "grad_norm": 1.5756665468215942, |
| "learning_rate": 2.2081709968516867e-06, |
| "loss": 0.2497, |
| "step": 779 |
| }, |
| { |
| "epoch": 1.410488245931284, |
| "grad_norm": 1.3747678995132446, |
| "learning_rate": 2.1957728015241793e-06, |
| "loss": 0.1611, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.4122965641952985, |
| "grad_norm": 1.6689574718475342, |
| "learning_rate": 2.1833997096818897e-06, |
| "loss": 0.1418, |
| "step": 781 |
| }, |
| { |
| "epoch": 1.4141048824593128, |
| "grad_norm": 1.572873592376709, |
| "learning_rate": 2.171051832088928e-06, |
| "loss": 0.2104, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.4159132007233273, |
| "grad_norm": 1.6118040084838867, |
| "learning_rate": 2.158729279283684e-06, |
| "loss": 0.2243, |
| "step": 783 |
| }, |
| { |
| "epoch": 1.4177215189873418, |
| "grad_norm": 1.4820952415466309, |
| "learning_rate": 2.146432161577842e-06, |
| "loss": 0.1776, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.4195298372513563, |
| "grad_norm": 1.289957880973816, |
| "learning_rate": 2.1341605890553895e-06, |
| "loss": 0.1816, |
| "step": 785 |
| }, |
| { |
| "epoch": 1.4213381555153708, |
| "grad_norm": 1.524835228919983, |
| "learning_rate": 2.1219146715716332e-06, |
| "loss": 0.2344, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.423146473779385, |
| "grad_norm": 1.2140674591064453, |
| "learning_rate": 2.109694518752216e-06, |
| "loss": 0.1522, |
| "step": 787 |
| }, |
| { |
| "epoch": 1.4249547920433996, |
| "grad_norm": 1.5748006105422974, |
| "learning_rate": 2.097500239992132e-06, |
| "loss": 0.2018, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.426763110307414, |
| "grad_norm": 1.516115427017212, |
| "learning_rate": 2.085331944454759e-06, |
| "loss": 0.1788, |
| "step": 789 |
| }, |
| { |
| "epoch": 1.4285714285714286, |
| "grad_norm": 1.3118647336959839, |
| "learning_rate": 2.0731897410708618e-06, |
| "loss": 0.1735, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.4303797468354431, |
| "grad_norm": 1.3635505437850952, |
| "learning_rate": 2.061073738537635e-06, |
| "loss": 0.1881, |
| "step": 791 |
| }, |
| { |
| "epoch": 1.4321880650994574, |
| "grad_norm": 1.2797064781188965, |
| "learning_rate": 2.0489840453177198e-06, |
| "loss": 0.1558, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.433996383363472, |
| "grad_norm": 1.5476677417755127, |
| "learning_rate": 2.0369207696382366e-06, |
| "loss": 0.2168, |
| "step": 793 |
| }, |
| { |
| "epoch": 1.4358047016274864, |
| "grad_norm": 1.2250149250030518, |
| "learning_rate": 2.0248840194898155e-06, |
| "loss": 0.1283, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.437613019891501, |
| "grad_norm": 1.4220918416976929, |
| "learning_rate": 2.0128739026256306e-06, |
| "loss": 0.1769, |
| "step": 795 |
| }, |
| { |
| "epoch": 1.4394213381555154, |
| "grad_norm": 1.3836525678634644, |
| "learning_rate": 2.0008905265604316e-06, |
| "loss": 0.1575, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.4412296564195297, |
| "grad_norm": 1.3742254972457886, |
| "learning_rate": 1.9889339985695894e-06, |
| "loss": 0.1642, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.4430379746835442, |
| "grad_norm": 1.3974021673202515, |
| "learning_rate": 1.977004425688126e-06, |
| "loss": 0.1714, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.4448462929475587, |
| "grad_norm": 1.432606816291809, |
| "learning_rate": 1.9651019147097624e-06, |
| "loss": 0.1734, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.4466546112115732, |
| "grad_norm": 1.3600192070007324, |
| "learning_rate": 1.95322657218596e-06, |
| "loss": 0.1499, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.4484629294755877, |
| "grad_norm": 1.4031591415405273, |
| "learning_rate": 1.941378504424968e-06, |
| "loss": 0.1517, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.450271247739602, |
| "grad_norm": 1.366796851158142, |
| "learning_rate": 1.929557817490874e-06, |
| "loss": 0.1869, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.4520795660036168, |
| "grad_norm": 1.7163602113723755, |
| "learning_rate": 1.9177646172026513e-06, |
| "loss": 0.2516, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.453887884267631, |
| "grad_norm": 1.390589952468872, |
| "learning_rate": 1.9059990091332082e-06, |
| "loss": 0.2248, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.4556962025316456, |
| "grad_norm": 1.2839597463607788, |
| "learning_rate": 1.8942610986084487e-06, |
| "loss": 0.1712, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.45750452079566, |
| "grad_norm": 1.2960193157196045, |
| "learning_rate": 1.8825509907063328e-06, |
| "loss": 0.1505, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.4593128390596746, |
| "grad_norm": 1.5872551202774048, |
| "learning_rate": 1.8708687902559264e-06, |
| "loss": 0.2169, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.461121157323689, |
| "grad_norm": 1.5269652605056763, |
| "learning_rate": 1.8592146018364682e-06, |
| "loss": 0.1994, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.4629294755877034, |
| "grad_norm": 1.3784176111221313, |
| "learning_rate": 1.8475885297764307e-06, |
| "loss": 0.1962, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.4647377938517179, |
| "grad_norm": 1.4608467817306519, |
| "learning_rate": 1.8359906781525955e-06, |
| "loss": 0.2519, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.4665461121157324, |
| "grad_norm": 1.257627010345459, |
| "learning_rate": 1.8244211507891064e-06, |
| "loss": 0.1389, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.4683544303797469, |
| "grad_norm": 1.4731613397598267, |
| "learning_rate": 1.8128800512565514e-06, |
| "loss": 0.2222, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.4701627486437614, |
| "grad_norm": 1.4045157432556152, |
| "learning_rate": 1.8013674828710326e-06, |
| "loss": 0.1505, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.4719710669077757, |
| "grad_norm": 1.346962571144104, |
| "learning_rate": 1.7898835486932398e-06, |
| "loss": 0.173, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.4737793851717902, |
| "grad_norm": 1.5003893375396729, |
| "learning_rate": 1.7784283515275292e-06, |
| "loss": 0.1661, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.4755877034358047, |
| "grad_norm": 1.3266466856002808, |
| "learning_rate": 1.7670019939210025e-06, |
| "loss": 0.1681, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.4773960216998192, |
| "grad_norm": 1.4711284637451172, |
| "learning_rate": 1.7556045781625902e-06, |
| "loss": 0.2244, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.4792043399638337, |
| "grad_norm": 1.3809276819229126, |
| "learning_rate": 1.7442362062821323e-06, |
| "loss": 0.1702, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.481012658227848, |
| "grad_norm": 1.4505352973937988, |
| "learning_rate": 1.7328969800494727e-06, |
| "loss": 0.187, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.4828209764918625, |
| "grad_norm": 1.3470630645751953, |
| "learning_rate": 1.7215870009735386e-06, |
| "loss": 0.1699, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.484629294755877, |
| "grad_norm": 1.3713916540145874, |
| "learning_rate": 1.7103063703014372e-06, |
| "loss": 0.177, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.4864376130198915, |
| "grad_norm": 1.39594566822052, |
| "learning_rate": 1.6990551890175488e-06, |
| "loss": 0.1623, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.488245931283906, |
| "grad_norm": 1.512831211090088, |
| "learning_rate": 1.6878335578426225e-06, |
| "loss": 0.1603, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.4900542495479203, |
| "grad_norm": 1.5878227949142456, |
| "learning_rate": 1.6766415772328732e-06, |
| "loss": 0.1859, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.4918625678119348, |
| "grad_norm": 1.2097469568252563, |
| "learning_rate": 1.6654793473790842e-06, |
| "loss": 0.1448, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.4936708860759493, |
| "grad_norm": 1.4577940702438354, |
| "learning_rate": 1.6543469682057105e-06, |
| "loss": 0.1748, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.4954792043399638, |
| "grad_norm": 1.5824118852615356, |
| "learning_rate": 1.6432445393699803e-06, |
| "loss": 0.2448, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.4972875226039783, |
| "grad_norm": 1.4926061630249023, |
| "learning_rate": 1.632172160261012e-06, |
| "loss": 0.2114, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.4990958408679926, |
| "grad_norm": 1.3919429779052734, |
| "learning_rate": 1.621129929998912e-06, |
| "loss": 0.1974, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.5009041591320074, |
| "grad_norm": 1.515310525894165, |
| "learning_rate": 1.610117947433897e-06, |
| "loss": 0.2138, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.5027124773960217, |
| "grad_norm": 1.385372281074524, |
| "learning_rate": 1.5991363111454023e-06, |
| "loss": 0.1616, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.5045207956600362, |
| "grad_norm": 1.4240636825561523, |
| "learning_rate": 1.5881851194412106e-06, |
| "loss": 0.1761, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.5063291139240507, |
| "grad_norm": 1.4712889194488525, |
| "learning_rate": 1.5772644703565564e-06, |
| "loss": 0.2036, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.508137432188065, |
| "grad_norm": 1.475898265838623, |
| "learning_rate": 1.5663744616532612e-06, |
| "loss": 0.2174, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.5099457504520797, |
| "grad_norm": 1.3733068704605103, |
| "learning_rate": 1.5555151908188465e-06, |
| "loss": 0.183, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.511754068716094, |
| "grad_norm": 1.2728660106658936, |
| "learning_rate": 1.544686755065677e-06, |
| "loss": 0.163, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.5135623869801085, |
| "grad_norm": 1.255317211151123, |
| "learning_rate": 1.5338892513300757e-06, |
| "loss": 0.1694, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.515370705244123, |
| "grad_norm": 1.50929856300354, |
| "learning_rate": 1.523122776271463e-06, |
| "loss": 0.2136, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.5171790235081373, |
| "grad_norm": 1.4814870357513428, |
| "learning_rate": 1.5123874262714893e-06, |
| "loss": 0.2196, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.518987341772152, |
| "grad_norm": 1.267453908920288, |
| "learning_rate": 1.5016832974331725e-06, |
| "loss": 0.1548, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.5207956600361663, |
| "grad_norm": 1.3543283939361572, |
| "learning_rate": 1.4910104855800429e-06, |
| "loss": 0.1848, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.5226039783001808, |
| "grad_norm": 1.2082693576812744, |
| "learning_rate": 1.4803690862552755e-06, |
| "loss": 0.1516, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.5244122965641953, |
| "grad_norm": 1.3485254049301147, |
| "learning_rate": 1.4697591947208412e-06, |
| "loss": 0.1513, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.5262206148282098, |
| "grad_norm": 1.4419184923171997, |
| "learning_rate": 1.459180905956653e-06, |
| "loss": 0.1783, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.5280289330922243, |
| "grad_norm": 1.2863306999206543, |
| "learning_rate": 1.4486343146597154e-06, |
| "loss": 0.1569, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.5298372513562386, |
| "grad_norm": 1.366868257522583, |
| "learning_rate": 1.438119515243277e-06, |
| "loss": 0.1732, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.5316455696202531, |
| "grad_norm": 1.3020880222320557, |
| "learning_rate": 1.4276366018359845e-06, |
| "loss": 0.1486, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.5334538878842676, |
| "grad_norm": 1.4607349634170532, |
| "learning_rate": 1.4171856682810386e-06, |
| "loss": 0.1761, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.5352622061482821, |
| "grad_norm": 1.2814878225326538, |
| "learning_rate": 1.4067668081353625e-06, |
| "loss": 0.1529, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.5370705244122966, |
| "grad_norm": 1.507812738418579, |
| "learning_rate": 1.39638011466875e-06, |
| "loss": 0.1911, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.538878842676311, |
| "grad_norm": 1.6292840242385864, |
| "learning_rate": 1.3860256808630429e-06, |
| "loss": 0.2177, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.5406871609403257, |
| "grad_norm": 1.4293454885482788, |
| "learning_rate": 1.3757035994112915e-06, |
| "loss": 0.1967, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.54249547920434, |
| "grad_norm": 1.595735788345337, |
| "learning_rate": 1.3654139627169282e-06, |
| "loss": 0.2231, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.5443037974683544, |
| "grad_norm": 1.3294328451156616, |
| "learning_rate": 1.3551568628929434e-06, |
| "loss": 0.1933, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.546112115732369, |
| "grad_norm": 1.385629653930664, |
| "learning_rate": 1.344932391761049e-06, |
| "loss": 0.187, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.5479204339963832, |
| "grad_norm": 1.2658005952835083, |
| "learning_rate": 1.3347406408508695e-06, |
| "loss": 0.1646, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.549728752260398, |
| "grad_norm": 1.4734207391738892, |
| "learning_rate": 1.3245817013991164e-06, |
| "loss": 0.1971, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.5515370705244123, |
| "grad_norm": 1.4878017902374268, |
| "learning_rate": 1.3144556643487743e-06, |
| "loss": 0.2085, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.5533453887884268, |
| "grad_norm": 1.627400517463684, |
| "learning_rate": 1.3043626203482823e-06, |
| "loss": 0.232, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.5551537070524413, |
| "grad_norm": 1.4053066968917847, |
| "learning_rate": 1.2943026597507268e-06, |
| "loss": 0.1906, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.5569620253164556, |
| "grad_norm": 1.381201148033142, |
| "learning_rate": 1.2842758726130283e-06, |
| "loss": 0.1822, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.5587703435804703, |
| "grad_norm": 1.4989362955093384, |
| "learning_rate": 1.2742823486951434e-06, |
| "loss": 0.2128, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.5605786618444846, |
| "grad_norm": 1.1638606786727905, |
| "learning_rate": 1.2643221774592517e-06, |
| "loss": 0.1255, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.562386980108499, |
| "grad_norm": 1.4444135427474976, |
| "learning_rate": 1.254395448068959e-06, |
| "loss": 0.1892, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.5641952983725136, |
| "grad_norm": 1.5755318403244019, |
| "learning_rate": 1.2445022493885017e-06, |
| "loss": 0.243, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.5660036166365279, |
| "grad_norm": 1.4121593236923218, |
| "learning_rate": 1.234642669981946e-06, |
| "loss": 0.1952, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.5678119349005426, |
| "grad_norm": 1.2563629150390625, |
| "learning_rate": 1.2248167981123992e-06, |
| "loss": 0.1354, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.5696202531645569, |
| "grad_norm": 1.3229103088378906, |
| "learning_rate": 1.2150247217412186e-06, |
| "loss": 0.1531, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.5714285714285714, |
| "grad_norm": 1.481399655342102, |
| "learning_rate": 1.205266528527223e-06, |
| "loss": 0.1914, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.573236889692586, |
| "grad_norm": 1.36403489112854, |
| "learning_rate": 1.195542305825908e-06, |
| "loss": 0.1684, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.5750452079566004, |
| "grad_norm": 1.4882696866989136, |
| "learning_rate": 1.1858521406886674e-06, |
| "loss": 0.2198, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.576853526220615, |
| "grad_norm": 1.2685580253601074, |
| "learning_rate": 1.1761961198620081e-06, |
| "loss": 0.1541, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.5786618444846292, |
| "grad_norm": 1.526816487312317, |
| "learning_rate": 1.1665743297867781e-06, |
| "loss": 0.2088, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.5804701627486437, |
| "grad_norm": 1.7556816339492798, |
| "learning_rate": 1.1569868565973912e-06, |
| "loss": 0.2518, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.5822784810126582, |
| "grad_norm": 1.544098138809204, |
| "learning_rate": 1.1474337861210543e-06, |
| "loss": 0.1968, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.5840867992766727, |
| "grad_norm": 1.4241015911102295, |
| "learning_rate": 1.137915203877003e-06, |
| "loss": 0.142, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.5858951175406872, |
| "grad_norm": 1.4717199802398682, |
| "learning_rate": 1.1284311950757326e-06, |
| "loss": 0.2006, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.5877034358047015, |
| "grad_norm": 1.4794663190841675, |
| "learning_rate": 1.118981844618236e-06, |
| "loss": 0.1872, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.5895117540687163, |
| "grad_norm": 1.3753726482391357, |
| "learning_rate": 1.1095672370952431e-06, |
| "loss": 0.1922, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.5913200723327305, |
| "grad_norm": 1.4431695938110352, |
| "learning_rate": 1.1001874567864696e-06, |
| "loss": 0.1903, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.593128390596745, |
| "grad_norm": 1.121346354484558, |
| "learning_rate": 1.0908425876598512e-06, |
| "loss": 0.1054, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.5949367088607596, |
| "grad_norm": 1.3335767984390259, |
| "learning_rate": 1.0815327133708015e-06, |
| "loss": 0.1544, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.5967450271247738, |
| "grad_norm": 1.4335230588912964, |
| "learning_rate": 1.0722579172614577e-06, |
| "loss": 0.2006, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.5985533453887886, |
| "grad_norm": 1.5786991119384766, |
| "learning_rate": 1.06301828235994e-06, |
| "loss": 0.2021, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.6003616636528029, |
| "grad_norm": 1.3353679180145264, |
| "learning_rate": 1.0538138913796032e-06, |
| "loss": 0.1874, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.6021699819168174, |
| "grad_norm": 1.3402432203292847, |
| "learning_rate": 1.044644826718295e-06, |
| "loss": 0.1683, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.6039783001808319, |
| "grad_norm": 1.2367477416992188, |
| "learning_rate": 1.0355111704576237e-06, |
| "loss": 0.1509, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.6057866184448462, |
| "grad_norm": 1.4592540264129639, |
| "learning_rate": 1.0264130043622245e-06, |
| "loss": 0.1983, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.6075949367088609, |
| "grad_norm": 1.438301682472229, |
| "learning_rate": 1.0173504098790188e-06, |
| "loss": 0.1741, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.6094032549728752, |
| "grad_norm": 1.4034836292266846, |
| "learning_rate": 1.0083234681364934e-06, |
| "loss": 0.1669, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.6112115732368897, |
| "grad_norm": 1.4361186027526855, |
| "learning_rate": 9.993322599439692e-07, |
| "loss": 0.1862, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.6130198915009042, |
| "grad_norm": 1.259032964706421, |
| "learning_rate": 9.903768657908803e-07, |
| "loss": 0.1595, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.6148282097649185, |
| "grad_norm": 1.3813611268997192, |
| "learning_rate": 9.814573658460564e-07, |
| "loss": 0.1753, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.6166365280289332, |
| "grad_norm": 1.337997317314148, |
| "learning_rate": 9.725738399569968e-07, |
| "loss": 0.1527, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.6184448462929475, |
| "grad_norm": 1.438178539276123, |
| "learning_rate": 9.637263676491627e-07, |
| "loss": 0.2042, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.620253164556962, |
| "grad_norm": 1.3483699560165405, |
| "learning_rate": 9.549150281252633e-07, |
| "loss": 0.1884, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.6220614828209765, |
| "grad_norm": 1.3791651725769043, |
| "learning_rate": 9.46139900264546e-07, |
| "loss": 0.1636, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.6238698010849908, |
| "grad_norm": 1.3592714071273804, |
| "learning_rate": 9.374010626220908e-07, |
| "loss": 0.1724, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.6256781193490055, |
| "grad_norm": 1.2142133712768555, |
| "learning_rate": 9.286985934281079e-07, |
| "loss": 0.1284, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.6274864376130198, |
| "grad_norm": 1.467472791671753, |
| "learning_rate": 9.200325705872342e-07, |
| "loss": 0.2012, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.6292947558770343, |
| "grad_norm": 1.6717779636383057, |
| "learning_rate": 9.114030716778433e-07, |
| "loss": 0.2178, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.6311030741410488, |
| "grad_norm": 1.3586127758026123, |
| "learning_rate": 9.028101739513406e-07, |
| "loss": 0.1548, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.6329113924050633, |
| "grad_norm": 1.5966354608535767, |
| "learning_rate": 8.942539543314799e-07, |
| "loss": 0.242, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.6347197106690778, |
| "grad_norm": 1.280997633934021, |
| "learning_rate": 8.857344894136715e-07, |
| "loss": 0.2019, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.6365280289330921, |
| "grad_norm": 1.2589819431304932, |
| "learning_rate": 8.772518554642973e-07, |
| "loss": 0.1418, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.6383363471971069, |
| "grad_norm": 1.241722822189331, |
| "learning_rate": 8.688061284200266e-07, |
| "loss": 0.1535, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.6401446654611211, |
| "grad_norm": 1.5112919807434082, |
| "learning_rate": 8.603973838871388e-07, |
| "loss": 0.1941, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.6419529837251357, |
| "grad_norm": 1.5479835271835327, |
| "learning_rate": 8.520256971408453e-07, |
| "loss": 0.1946, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.6437613019891502, |
| "grad_norm": 1.4731577634811401, |
| "learning_rate": 8.436911431246136e-07, |
| "loss": 0.1527, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.6455696202531644, |
| "grad_norm": 1.6067286729812622, |
| "learning_rate": 8.353937964495029e-07, |
| "loss": 0.2203, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.6473779385171792, |
| "grad_norm": 1.4132425785064697, |
| "learning_rate": 8.271337313934869e-07, |
| "loss": 0.2019, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.6491862567811935, |
| "grad_norm": 1.3919837474822998, |
| "learning_rate": 8.189110219007967e-07, |
| "loss": 0.1666, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.650994575045208, |
| "grad_norm": 1.260819911956787, |
| "learning_rate": 8.107257415812526e-07, |
| "loss": 0.1517, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.6528028933092225, |
| "grad_norm": 1.4202353954315186, |
| "learning_rate": 8.025779637096138e-07, |
| "loss": 0.2081, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.6546112115732368, |
| "grad_norm": 1.3754041194915771, |
| "learning_rate": 7.944677612249113e-07, |
| "loss": 0.1866, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.6564195298372515, |
| "grad_norm": 1.6516166925430298, |
| "learning_rate": 7.863952067298042e-07, |
| "loss": 0.2307, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.6582278481012658, |
| "grad_norm": 1.5449033975601196, |
| "learning_rate": 7.783603724899258e-07, |
| "loss": 0.174, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.6600361663652803, |
| "grad_norm": 1.790122389793396, |
| "learning_rate": 7.70363330433233e-07, |
| "loss": 0.2007, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.6618444846292948, |
| "grad_norm": 1.3342410326004028, |
| "learning_rate": 7.624041521493735e-07, |
| "loss": 0.1553, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.663652802893309, |
| "grad_norm": 1.3326550722122192, |
| "learning_rate": 7.544829088890326e-07, |
| "loss": 0.1913, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.6654611211573238, |
| "grad_norm": 1.4879108667373657, |
| "learning_rate": 7.465996715633028e-07, |
| "loss": 0.2249, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.667269439421338, |
| "grad_norm": 1.4543988704681396, |
| "learning_rate": 7.387545107430455e-07, |
| "loss": 0.1916, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.6690777576853526, |
| "grad_norm": 1.49525785446167, |
| "learning_rate": 7.309474966582636e-07, |
| "loss": 0.1715, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.6708860759493671, |
| "grad_norm": 1.3753548860549927, |
| "learning_rate": 7.23178699197467e-07, |
| "loss": 0.1475, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.6726943942133814, |
| "grad_norm": 1.4657784700393677, |
| "learning_rate": 7.154481879070502e-07, |
| "loss": 0.1763, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.6745027124773961, |
| "grad_norm": 1.640970230102539, |
| "learning_rate": 7.077560319906696e-07, |
| "loss": 0.2059, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.6763110307414104, |
| "grad_norm": 1.5941320657730103, |
| "learning_rate": 7.001023003086243e-07, |
| "loss": 0.2146, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.678119349005425, |
| "grad_norm": 1.595048189163208, |
| "learning_rate": 6.924870613772388e-07, |
| "loss": 0.2296, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.6799276672694394, |
| "grad_norm": 1.2889961004257202, |
| "learning_rate": 6.849103833682491e-07, |
| "loss": 0.1861, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.681735985533454, |
| "grad_norm": 1.2051788568496704, |
| "learning_rate": 6.773723341081945e-07, |
| "loss": 0.1704, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.6835443037974684, |
| "grad_norm": 1.346713900566101, |
| "learning_rate": 6.698729810778065e-07, |
| "loss": 0.1746, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.6853526220614827, |
| "grad_norm": 1.4051306247711182, |
| "learning_rate": 6.624123914114122e-07, |
| "loss": 0.161, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.6871609403254972, |
| "grad_norm": 1.3659924268722534, |
| "learning_rate": 6.549906318963245e-07, |
| "loss": 0.1757, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.6889692585895117, |
| "grad_norm": 1.442762017250061, |
| "learning_rate": 6.476077689722487e-07, |
| "loss": 0.1918, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.6907775768535263, |
| "grad_norm": 1.488540768623352, |
| "learning_rate": 6.402638687306872e-07, |
| "loss": 0.2175, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.6925858951175408, |
| "grad_norm": 1.3435027599334717, |
| "learning_rate": 6.329589969143518e-07, |
| "loss": 0.1643, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.694394213381555, |
| "grad_norm": 1.1935302019119263, |
| "learning_rate": 6.256932189165644e-07, |
| "loss": 0.1306, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.6962025316455698, |
| "grad_norm": 1.3391468524932861, |
| "learning_rate": 6.184665997806832e-07, |
| "loss": 0.1497, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.698010849909584, |
| "grad_norm": 1.5649363994598389, |
| "learning_rate": 6.112792041995125e-07, |
| "loss": 0.2096, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.6998191681735986, |
| "grad_norm": 1.3080732822418213, |
| "learning_rate": 6.041310965147318e-07, |
| "loss": 0.1562, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.701627486437613, |
| "grad_norm": 1.4739066362380981, |
| "learning_rate": 5.9702234071631e-07, |
| "loss": 0.196, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.7034358047016274, |
| "grad_norm": 1.3502253293991089, |
| "learning_rate": 5.899530004419396e-07, |
| "loss": 0.1557, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.705244122965642, |
| "grad_norm": 1.2946463823318481, |
| "learning_rate": 5.829231389764628e-07, |
| "loss": 0.1789, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.7070524412296564, |
| "grad_norm": 1.3189704418182373, |
| "learning_rate": 5.759328192513075e-07, |
| "loss": 0.1559, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.7088607594936709, |
| "grad_norm": 1.6325633525848389, |
| "learning_rate": 5.689821038439264e-07, |
| "loss": 0.2148, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.7106690777576854, |
| "grad_norm": 1.3703547716140747, |
| "learning_rate": 5.620710549772295e-07, |
| "loss": 0.1663, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.7124773960216997, |
| "grad_norm": 1.351824164390564, |
| "learning_rate": 5.55199734519034e-07, |
| "loss": 0.1754, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.7142857142857144, |
| "grad_norm": 1.2838019132614136, |
| "learning_rate": 5.483682039815059e-07, |
| "loss": 0.1472, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.7160940325497287, |
| "grad_norm": 1.3122061491012573, |
| "learning_rate": 5.415765245206128e-07, |
| "loss": 0.1506, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.7179023508137432, |
| "grad_norm": 1.4882829189300537, |
| "learning_rate": 5.348247569355736e-07, |
| "loss": 0.1914, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.7197106690777577, |
| "grad_norm": 1.3123871088027954, |
| "learning_rate": 5.281129616683167e-07, |
| "loss": 0.1368, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.721518987341772, |
| "grad_norm": 1.3857144117355347, |
| "learning_rate": 5.214411988029355e-07, |
| "loss": 0.1688, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.7233273056057867, |
| "grad_norm": 1.731164574623108, |
| "learning_rate": 5.148095280651566e-07, |
| "loss": 0.2503, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.725135623869801, |
| "grad_norm": 1.3402376174926758, |
| "learning_rate": 5.082180088217981e-07, |
| "loss": 0.1786, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.7269439421338155, |
| "grad_norm": 1.2338224649429321, |
| "learning_rate": 5.016667000802417e-07, |
| "loss": 0.144, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.72875226039783, |
| "grad_norm": 1.353575587272644, |
| "learning_rate": 4.951556604879049e-07, |
| "loss": 0.1458, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.7305605786618445, |
| "grad_norm": 1.6582491397857666, |
| "learning_rate": 4.886849483317141e-07, |
| "loss": 0.1942, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.732368896925859, |
| "grad_norm": 1.782560110092163, |
| "learning_rate": 4.822546215375851e-07, |
| "loss": 0.2759, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.7341772151898733, |
| "grad_norm": 1.6829458475112915, |
| "learning_rate": 4.758647376699033e-07, |
| "loss": 0.2606, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.7359855334538878, |
| "grad_norm": 1.549209713935852, |
| "learning_rate": 4.6951535393100654e-07, |
| "loss": 0.2303, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.7377938517179023, |
| "grad_norm": 1.3191518783569336, |
| "learning_rate": 4.632065271606756e-07, |
| "loss": 0.1533, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.7396021699819169, |
| "grad_norm": 1.565401554107666, |
| "learning_rate": 4.569383138356276e-07, |
| "loss": 0.2127, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.7414104882459314, |
| "grad_norm": 1.5010247230529785, |
| "learning_rate": 4.507107700690044e-07, |
| "loss": 0.2363, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.7432188065099457, |
| "grad_norm": 1.3151311874389648, |
| "learning_rate": 4.4452395160987314e-07, |
| "loss": 0.1479, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.7450271247739604, |
| "grad_norm": 1.4224631786346436, |
| "learning_rate": 4.383779138427274e-07, |
| "loss": 0.1784, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.7468354430379747, |
| "grad_norm": 1.3394750356674194, |
| "learning_rate": 4.322727117869951e-07, |
| "loss": 0.1527, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.7486437613019892, |
| "grad_norm": 1.3689308166503906, |
| "learning_rate": 4.2620840009653827e-07, |
| "loss": 0.1804, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.7504520795660037, |
| "grad_norm": 1.4480156898498535, |
| "learning_rate": 4.201850330591678e-07, |
| "loss": 0.2136, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.752260397830018, |
| "grad_norm": 1.5066252946853638, |
| "learning_rate": 4.1420266459615944e-07, |
| "loss": 0.2062, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.7540687160940327, |
| "grad_norm": 1.3773577213287354, |
| "learning_rate": 4.082613482617664e-07, |
| "loss": 0.1553, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.755877034358047, |
| "grad_norm": 1.287730097770691, |
| "learning_rate": 4.0236113724274716e-07, |
| "loss": 0.1399, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.7576853526220615, |
| "grad_norm": 1.420283555984497, |
| "learning_rate": 3.965020843578804e-07, |
| "loss": 0.1636, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.759493670886076, |
| "grad_norm": 1.3530566692352295, |
| "learning_rate": 3.90684242057498e-07, |
| "loss": 0.1625, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.7613019891500903, |
| "grad_norm": 1.2307391166687012, |
| "learning_rate": 3.8490766242301356e-07, |
| "loss": 0.135, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.763110307414105, |
| "grad_norm": 1.3423430919647217, |
| "learning_rate": 3.7917239716645906e-07, |
| "loss": 0.1531, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.7649186256781193, |
| "grad_norm": 1.2529070377349854, |
| "learning_rate": 3.734784976300165e-07, |
| "loss": 0.1566, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.7667269439421338, |
| "grad_norm": 1.437445044517517, |
| "learning_rate": 3.678260147855628e-07, |
| "loss": 0.1802, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.7685352622061483, |
| "grad_norm": 1.633385181427002, |
| "learning_rate": 3.6221499923421164e-07, |
| "loss": 0.2335, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.7703435804701626, |
| "grad_norm": 1.2325958013534546, |
| "learning_rate": 3.566455012058612e-07, |
| "loss": 0.1435, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.7721518987341773, |
| "grad_norm": 1.4155337810516357, |
| "learning_rate": 3.511175705587433e-07, |
| "loss": 0.1985, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.7739602169981916, |
| "grad_norm": 1.4826092720031738, |
| "learning_rate": 3.4563125677897936e-07, |
| "loss": 0.2048, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.7757685352622061, |
| "grad_norm": 1.4274451732635498, |
| "learning_rate": 3.4018660898013423e-07, |
| "loss": 0.1825, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.7775768535262206, |
| "grad_norm": 1.6296826601028442, |
| "learning_rate": 3.347836759027789e-07, |
| "loss": 0.238, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.779385171790235, |
| "grad_norm": 1.4893583059310913, |
| "learning_rate": 3.2942250591405546e-07, |
| "loss": 0.2111, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.7811934900542497, |
| "grad_norm": 1.403668999671936, |
| "learning_rate": 3.241031470072398e-07, |
| "loss": 0.2029, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.783001808318264, |
| "grad_norm": 1.5171066522598267, |
| "learning_rate": 3.18825646801314e-07, |
| "loss": 0.2289, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.7848101265822784, |
| "grad_norm": 1.2951208353042603, |
| "learning_rate": 3.135900525405428e-07, |
| "loss": 0.1638, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.786618444846293, |
| "grad_norm": 1.4618269205093384, |
| "learning_rate": 3.0839641109404627e-07, |
| "loss": 0.2098, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.7884267631103075, |
| "grad_norm": 1.3721157312393188, |
| "learning_rate": 3.03244768955383e-07, |
| "loss": 0.1716, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.790235081374322, |
| "grad_norm": 1.3801895380020142, |
| "learning_rate": 2.9813517224213274e-07, |
| "loss": 0.1815, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.7920433996383363, |
| "grad_norm": 1.4312127828598022, |
| "learning_rate": 2.930676666954846e-07, |
| "loss": 0.1884, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.7938517179023508, |
| "grad_norm": 1.4009455442428589, |
| "learning_rate": 2.8804229767982637e-07, |
| "loss": 0.1819, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.7956600361663653, |
| "grad_norm": 1.3529410362243652, |
| "learning_rate": 2.8305911018233935e-07, |
| "loss": 0.157, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.7974683544303798, |
| "grad_norm": 1.4529396295547485, |
| "learning_rate": 2.7811814881259503e-07, |
| "loss": 0.1873, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.7992766726943943, |
| "grad_norm": 1.3773640394210815, |
| "learning_rate": 2.7321945780215576e-07, |
| "loss": 0.1396, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.8010849909584086, |
| "grad_norm": 1.3521469831466675, |
| "learning_rate": 2.6836308100417874e-07, |
| "loss": 0.1619, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.8028933092224233, |
| "grad_norm": 1.3587416410446167, |
| "learning_rate": 2.6354906189302534e-07, |
| "loss": 0.14, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.8047016274864376, |
| "grad_norm": 1.3548296689987183, |
| "learning_rate": 2.587774435638679e-07, |
| "loss": 0.1268, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.806509945750452, |
| "grad_norm": 1.5057969093322754, |
| "learning_rate": 2.5404826873230926e-07, |
| "loss": 0.1788, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.8083182640144666, |
| "grad_norm": 1.6407438516616821, |
| "learning_rate": 2.4936157973399266e-07, |
| "loss": 0.2138, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.810126582278481, |
| "grad_norm": 1.5798081159591675, |
| "learning_rate": 2.447174185242324e-07, |
| "loss": 0.1921, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.8119349005424956, |
| "grad_norm": 1.5625442266464233, |
| "learning_rate": 2.40115826677631e-07, |
| "loss": 0.1757, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.81374321880651, |
| "grad_norm": 1.638500452041626, |
| "learning_rate": 2.3555684538770995e-07, |
| "loss": 0.2254, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.8155515370705244, |
| "grad_norm": 1.3669939041137695, |
| "learning_rate": 2.3104051546654016e-07, |
| "loss": 0.1808, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.817359855334539, |
| "grad_norm": 1.4740699529647827, |
| "learning_rate": 2.2656687734437577e-07, |
| "loss": 0.2454, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.8191681735985532, |
| "grad_norm": 1.3462810516357422, |
| "learning_rate": 2.2213597106929608e-07, |
| "loss": 0.1583, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.820976491862568, |
| "grad_norm": 1.6884946823120117, |
| "learning_rate": 2.177478363068425e-07, |
| "loss": 0.2415, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.8227848101265822, |
| "grad_norm": 1.511620044708252, |
| "learning_rate": 2.134025123396638e-07, |
| "loss": 0.2227, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.8245931283905967, |
| "grad_norm": 1.4712649583816528, |
| "learning_rate": 2.0910003806716817e-07, |
| "loss": 0.1888, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.8264014466546112, |
| "grad_norm": 1.5498689413070679, |
| "learning_rate": 2.0484045200517222e-07, |
| "loss": 0.2506, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.8282097649186255, |
| "grad_norm": 1.3635046482086182, |
| "learning_rate": 2.006237922855553e-07, |
| "loss": 0.1767, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.8300180831826403, |
| "grad_norm": 1.5200732946395874, |
| "learning_rate": 1.9645009665592073e-07, |
| "loss": 0.1996, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.8318264014466545, |
| "grad_norm": 1.483102798461914, |
| "learning_rate": 1.9231940247925573e-07, |
| "loss": 0.1994, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.833634719710669, |
| "grad_norm": 1.6003260612487793, |
| "learning_rate": 1.882317467335998e-07, |
| "loss": 0.2617, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.8354430379746836, |
| "grad_norm": 1.3227473497390747, |
| "learning_rate": 1.841871660117095e-07, |
| "loss": 0.1602, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.837251356238698, |
| "grad_norm": 1.5393905639648438, |
| "learning_rate": 1.801856965207338e-07, |
| "loss": 0.229, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.8390596745027126, |
| "grad_norm": 1.2200417518615723, |
| "learning_rate": 1.7622737408188984e-07, |
| "loss": 0.1448, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.8408679927667269, |
| "grad_norm": 1.4272844791412354, |
| "learning_rate": 1.7231223413014086e-07, |
| "loss": 0.2081, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.8426763110307414, |
| "grad_norm": 1.3564151525497437, |
| "learning_rate": 1.6844031171388054e-07, |
| "loss": 0.1992, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.8444846292947559, |
| "grad_norm": 1.2938915491104126, |
| "learning_rate": 1.6461164149461805e-07, |
| "loss": 0.163, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.8462929475587704, |
| "grad_norm": 1.307634711265564, |
| "learning_rate": 1.6082625774666793e-07, |
| "loss": 0.1393, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.8481012658227849, |
| "grad_norm": 1.332777738571167, |
| "learning_rate": 1.5708419435684463e-07, |
| "loss": 0.1685, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.8499095840867992, |
| "grad_norm": 1.3043736219406128, |
| "learning_rate": 1.5338548482415726e-07, |
| "loss": 0.1499, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.851717902350814, |
| "grad_norm": 1.3902978897094727, |
| "learning_rate": 1.4973016225951097e-07, |
| "loss": 0.175, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.8535262206148282, |
| "grad_norm": 1.3513139486312866, |
| "learning_rate": 1.4611825938540936e-07, |
| "loss": 0.1508, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.8553345388788427, |
| "grad_norm": 1.4719350337982178, |
| "learning_rate": 1.4254980853566248e-07, |
| "loss": 0.1741, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.8571428571428572, |
| "grad_norm": 1.6112737655639648, |
| "learning_rate": 1.3902484165509877e-07, |
| "loss": 0.2054, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.8589511754068715, |
| "grad_norm": 1.7364201545715332, |
| "learning_rate": 1.3554339029927532e-07, |
| "loss": 0.2887, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.8607594936708862, |
| "grad_norm": 1.2992795705795288, |
| "learning_rate": 1.3210548563419857e-07, |
| "loss": 0.1532, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.8625678119349005, |
| "grad_norm": 1.2579344511032104, |
| "learning_rate": 1.2871115843604508e-07, |
| "loss": 0.1324, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.864376130198915, |
| "grad_norm": 1.2674459218978882, |
| "learning_rate": 1.253604390908819e-07, |
| "loss": 0.1439, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.8661844484629295, |
| "grad_norm": 1.2449123859405518, |
| "learning_rate": 1.220533575944033e-07, |
| "loss": 0.1407, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.8679927667269438, |
| "grad_norm": 1.49626886844635, |
| "learning_rate": 1.1878994355165207e-07, |
| "loss": 0.2121, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.8698010849909585, |
| "grad_norm": 1.4758660793304443, |
| "learning_rate": 1.1557022617676217e-07, |
| "loss": 0.1855, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.8716094032549728, |
| "grad_norm": 1.4279465675354004, |
| "learning_rate": 1.1239423429269203e-07, |
| "loss": 0.1983, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.8734177215189873, |
| "grad_norm": 1.5149667263031006, |
| "learning_rate": 1.0926199633097156e-07, |
| "loss": 0.2079, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.8752260397830018, |
| "grad_norm": 1.47335946559906, |
| "learning_rate": 1.0617354033144289e-07, |
| "loss": 0.215, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.8770343580470161, |
| "grad_norm": 1.147404432296753, |
| "learning_rate": 1.031288939420122e-07, |
| "loss": 0.1251, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.8788426763110309, |
| "grad_norm": 1.3628894090652466, |
| "learning_rate": 1.0012808441839994e-07, |
| "loss": 0.1638, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.8806509945750451, |
| "grad_norm": 1.299794316291809, |
| "learning_rate": 9.717113862389993e-08, |
| "loss": 0.1765, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.8824593128390597, |
| "grad_norm": 1.4835070371627808, |
| "learning_rate": 9.42580830291373e-08, |
| "loss": 0.2342, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.8842676311030742, |
| "grad_norm": 1.4349442720413208, |
| "learning_rate": 9.138894371182983e-08, |
| "loss": 0.2125, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.8860759493670884, |
| "grad_norm": 1.4553990364074707, |
| "learning_rate": 8.856374635655696e-08, |
| "loss": 0.1792, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.8878842676311032, |
| "grad_norm": 1.5098941326141357, |
| "learning_rate": 8.57825162545295e-08, |
| "loss": 0.1675, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.8896925858951175, |
| "grad_norm": 1.490625262260437, |
| "learning_rate": 8.304527830336251e-08, |
| "loss": 0.1983, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.891500904159132, |
| "grad_norm": 1.379252552986145, |
| "learning_rate": 8.035205700685167e-08, |
| "loss": 0.1606, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.8933092224231465, |
| "grad_norm": 1.3355510234832764, |
| "learning_rate": 7.770287647475672e-08, |
| "loss": 0.1503, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.895117540687161, |
| "grad_norm": 1.3615237474441528, |
| "learning_rate": 7.509776042258166e-08, |
| "loss": 0.1521, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.8969258589511755, |
| "grad_norm": 1.5093348026275635, |
| "learning_rate": 7.253673217136659e-08, |
| "loss": 0.1781, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.8987341772151898, |
| "grad_norm": 1.4346864223480225, |
| "learning_rate": 7.001981464747565e-08, |
| "loss": 0.152, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.9005424954792043, |
| "grad_norm": 1.5285435914993286, |
| "learning_rate": 6.75470303823933e-08, |
| "loss": 0.1815, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.9023508137432188, |
| "grad_norm": 1.6307448148727417, |
| "learning_rate": 6.511840151252169e-08, |
| "loss": 0.2082, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.9041591320072333, |
| "grad_norm": 1.6566197872161865, |
| "learning_rate": 6.273394977898528e-08, |
| "loss": 0.2519, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.9059674502712478, |
| "grad_norm": 1.3278251886367798, |
| "learning_rate": 6.039369652743266e-08, |
| "loss": 0.1927, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.907775768535262, |
| "grad_norm": 1.1688714027404785, |
| "learning_rate": 5.809766270784667e-08, |
| "loss": 0.1433, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.9095840867992768, |
| "grad_norm": 1.3987767696380615, |
| "learning_rate": 5.584586887435739e-08, |
| "loss": 0.1864, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.9113924050632911, |
| "grad_norm": 1.4437639713287354, |
| "learning_rate": 5.363833518505834e-08, |
| "loss": 0.196, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.9132007233273056, |
| "grad_norm": 1.4916951656341553, |
| "learning_rate": 5.1475081401825553e-08, |
| "loss": 0.1743, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.9150090415913201, |
| "grad_norm": 1.4764553308486938, |
| "learning_rate": 4.9356126890139356e-08, |
| "loss": 0.2376, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.9168173598553344, |
| "grad_norm": 1.5004583597183228, |
| "learning_rate": 4.7281490618914516e-08, |
| "loss": 0.2351, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.9186256781193491, |
| "grad_norm": 1.1736469268798828, |
| "learning_rate": 4.52511911603265e-08, |
| "loss": 0.1474, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.9204339963833634, |
| "grad_norm": 1.4233418703079224, |
| "learning_rate": 4.32652466896466e-08, |
| "loss": 0.1801, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.922242314647378, |
| "grad_norm": 1.3795275688171387, |
| "learning_rate": 4.13236749850815e-08, |
| "loss": 0.1719, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.9240506329113924, |
| "grad_norm": 1.4570521116256714, |
| "learning_rate": 3.9426493427611177e-08, |
| "loss": 0.1823, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.9258589511754067, |
| "grad_norm": 1.4998348951339722, |
| "learning_rate": 3.7573719000832954e-08, |
| "loss": 0.2146, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.9276672694394215, |
| "grad_norm": 1.483872652053833, |
| "learning_rate": 3.576536829081323e-08, |
| "loss": 0.1949, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.9294755877034357, |
| "grad_norm": 1.3987336158752441, |
| "learning_rate": 3.400145748593542e-08, |
| "loss": 0.1459, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.9312839059674503, |
| "grad_norm": 1.3769676685333252, |
| "learning_rate": 3.2282002376756163e-08, |
| "loss": 0.151, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.9330922242314648, |
| "grad_norm": 1.3427252769470215, |
| "learning_rate": 3.0607018355864326e-08, |
| "loss": 0.1649, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.934900542495479, |
| "grad_norm": 1.5869094133377075, |
| "learning_rate": 2.8976520417742794e-08, |
| "loss": 0.1515, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.9367088607594938, |
| "grad_norm": 1.3570787906646729, |
| "learning_rate": 2.7390523158633552e-08, |
| "loss": 0.1441, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.938517179023508, |
| "grad_norm": 1.270442008972168, |
| "learning_rate": 2.584904077640893e-08, |
| "loss": 0.1294, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.9403254972875226, |
| "grad_norm": 1.5576837062835693, |
| "learning_rate": 2.43520870704439e-08, |
| "loss": 0.1987, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.942133815551537, |
| "grad_norm": 1.3428254127502441, |
| "learning_rate": 2.2899675441490078e-08, |
| "loss": 0.1622, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.9439421338155516, |
| "grad_norm": 1.6970995664596558, |
| "learning_rate": 2.1491818891559156e-08, |
| "loss": 0.242, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.945750452079566, |
| "grad_norm": 1.391972541809082, |
| "learning_rate": 2.012853002380466e-08, |
| "loss": 0.1615, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.9475587703435804, |
| "grad_norm": 1.4631078243255615, |
| "learning_rate": 1.8809821042410358e-08, |
| "loss": 0.1719, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.9493670886075949, |
| "grad_norm": 1.5060595273971558, |
| "learning_rate": 1.753570375247815e-08, |
| "loss": 0.2042, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.9511754068716094, |
| "grad_norm": 1.3056021928787231, |
| "learning_rate": 1.630618955992702e-08, |
| "loss": 0.1851, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.952983725135624, |
| "grad_norm": 1.3215388059616089, |
| "learning_rate": 1.5121289471385915e-08, |
| "loss": 0.1705, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.9547920433996384, |
| "grad_norm": 1.4620890617370605, |
| "learning_rate": 1.3981014094099354e-08, |
| "loss": 0.1952, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.9566003616636527, |
| "grad_norm": 1.6308116912841797, |
| "learning_rate": 1.2885373635829756e-08, |
| "loss": 0.2325, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.9584086799276674, |
| "grad_norm": 1.5802830457687378, |
| "learning_rate": 1.1834377904768046e-08, |
| "loss": 0.264, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.9602169981916817, |
| "grad_norm": 1.3415133953094482, |
| "learning_rate": 1.0828036309443735e-08, |
| "loss": 0.1476, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.9620253164556962, |
| "grad_norm": 1.47640061378479, |
| "learning_rate": 9.866357858642206e-09, |
| "loss": 0.1971, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.9638336347197107, |
| "grad_norm": 1.5643129348754883, |
| "learning_rate": 8.949351161324227e-09, |
| "loss": 0.2245, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.965641952983725, |
| "grad_norm": 1.3774830102920532, |
| "learning_rate": 8.077024426547675e-09, |
| "loss": 0.1857, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.9674502712477397, |
| "grad_norm": 1.5181736946105957, |
| "learning_rate": 7.249385463395375e-09, |
| "loss": 0.1833, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.969258589511754, |
| "grad_norm": 1.389775276184082, |
| "learning_rate": 6.46644168090349e-09, |
| "loss": 0.1817, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.9710669077757685, |
| "grad_norm": 1.3835821151733398, |
| "learning_rate": 5.728200087997126e-09, |
| "loss": 0.1965, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.972875226039783, |
| "grad_norm": 1.4697110652923584, |
| "learning_rate": 5.034667293427053e-09, |
| "loss": 0.2245, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.9746835443037973, |
| "grad_norm": 1.4795719385147095, |
| "learning_rate": 4.385849505708084e-09, |
| "loss": 0.2116, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.976491862567812, |
| "grad_norm": 1.6337482929229736, |
| "learning_rate": 3.781752533068561e-09, |
| "loss": 0.2097, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.9783001808318263, |
| "grad_norm": 1.4029808044433594, |
| "learning_rate": 3.2223817833931803e-09, |
| "loss": 0.165, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.9801084990958409, |
| "grad_norm": 1.5177561044692993, |
| "learning_rate": 2.707742264178026e-09, |
| "loss": 0.1931, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.9819168173598554, |
| "grad_norm": 1.3459399938583374, |
| "learning_rate": 2.237838582483387e-09, |
| "loss": 0.1714, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.9837251356238697, |
| "grad_norm": 1.4402574300765991, |
| "learning_rate": 1.8126749448943437e-09, |
| "loss": 0.1958, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.9855334538878844, |
| "grad_norm": 1.4353395700454712, |
| "learning_rate": 1.4322551574830202e-09, |
| "loss": 0.153, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.9873417721518987, |
| "grad_norm": 1.5452730655670166, |
| "learning_rate": 1.096582625772502e-09, |
| "loss": 0.2276, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.9891500904159132, |
| "grad_norm": 1.3681756258010864, |
| "learning_rate": 8.056603547090813e-10, |
| "loss": 0.1353, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.9909584086799277, |
| "grad_norm": 1.120079755783081, |
| "learning_rate": 5.594909486328348e-10, |
| "loss": 0.1033, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.992766726943942, |
| "grad_norm": 1.3574172258377075, |
| "learning_rate": 3.580766112565304e-10, |
| "loss": 0.1766, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.9945750452079567, |
| "grad_norm": 1.516481876373291, |
| "learning_rate": 2.0141914564453247e-10, |
| "loss": 0.2305, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.996383363471971, |
| "grad_norm": 1.2223807573318481, |
| "learning_rate": 8.951995419614889e-11, |
| "loss": 0.1527, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.9981916817359855, |
| "grad_norm": 1.495068907737732, |
| "learning_rate": 2.2380038635638758e-11, |
| "loss": 0.1962, |
| "step": 1105 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 1.2624982595443726, |
| "learning_rate": 0.0, |
| "loss": 0.1585, |
| "step": 1106 |
| }, |
| { |
| "epoch": 2.0, |
| "step": 1106, |
| "total_flos": 329840792174592.0, |
| "train_loss": 0.33624019250138976, |
| "train_runtime": 8393.9432, |
| "train_samples_per_second": 2.106, |
| "train_steps_per_second": 0.132 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 1106, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 329840792174592.0, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|