Qwen3-tiny / trainer_state.json
DecoderImmortal's picture
Upload folder using huggingface_hub
f9117bd verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 200.0,
"global_step": 5205,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00019212295869356388,
"grad_norm": 27.250099182128906,
"learning_rate": 1.1494252873563217e-06,
"loss": 12.760068893432617,
"step": 1
},
{
"epoch": 0.0009606147934678194,
"grad_norm": 27.193613052368164,
"learning_rate": 5.747126436781608e-06,
"loss": 12.752052307128906,
"step": 5
},
{
"epoch": 0.0019212295869356388,
"grad_norm": 23.87885093688965,
"learning_rate": 1.1494252873563217e-05,
"loss": 12.383438110351562,
"step": 10
},
{
"epoch": 0.002881844380403458,
"grad_norm": 15.234784126281738,
"learning_rate": 1.7241379310344825e-05,
"loss": 11.199150085449219,
"step": 15
},
{
"epoch": 0.0038424591738712775,
"grad_norm": 15.339820861816406,
"learning_rate": 2.2988505747126433e-05,
"loss": 9.962273406982423,
"step": 20
},
{
"epoch": 0.004803073967339097,
"grad_norm": 7.6476054191589355,
"learning_rate": 2.8735632183908045e-05,
"loss": 8.961764526367187,
"step": 25
},
{
"epoch": 0.005763688760806916,
"grad_norm": 5.635213851928711,
"learning_rate": 3.448275862068965e-05,
"loss": 8.127005004882813,
"step": 30
},
{
"epoch": 0.0067243035542747355,
"grad_norm": 4.58963680267334,
"learning_rate": 4.022988505747126e-05,
"loss": 7.473291015625,
"step": 35
},
{
"epoch": 0.007684918347742555,
"grad_norm": 2.4760098457336426,
"learning_rate": 4.5977011494252866e-05,
"loss": 6.995676422119141,
"step": 40
},
{
"epoch": 0.008645533141210375,
"grad_norm": 1.5408024787902832,
"learning_rate": 5.172413793103448e-05,
"loss": 6.636921691894531,
"step": 45
},
{
"epoch": 0.009606147934678195,
"grad_norm": 1.112014651298523,
"learning_rate": 5.747126436781609e-05,
"loss": 6.349236297607422,
"step": 50
},
{
"epoch": 0.010566762728146013,
"grad_norm": 0.9658083319664001,
"learning_rate": 6.32183908045977e-05,
"loss": 6.117427825927734,
"step": 55
},
{
"epoch": 0.011527377521613832,
"grad_norm": 1.8757588863372803,
"learning_rate": 6.89655172413793e-05,
"loss": 5.927629852294922,
"step": 60
},
{
"epoch": 0.012487992315081652,
"grad_norm": 1.7964160442352295,
"learning_rate": 7.471264367816091e-05,
"loss": 5.770274353027344,
"step": 65
},
{
"epoch": 0.013448607108549471,
"grad_norm": 2.1291167736053467,
"learning_rate": 8.045977011494252e-05,
"loss": 5.6375282287597654,
"step": 70
},
{
"epoch": 0.01440922190201729,
"grad_norm": 3.0542609691619873,
"learning_rate": 8.620689655172413e-05,
"loss": 5.525964736938477,
"step": 75
},
{
"epoch": 0.01536983669548511,
"grad_norm": 3.1441893577575684,
"learning_rate": 9.195402298850573e-05,
"loss": 5.427687072753907,
"step": 80
},
{
"epoch": 0.01633045148895293,
"grad_norm": 1.3293317556381226,
"learning_rate": 9.770114942528733e-05,
"loss": 5.335831832885742,
"step": 85
},
{
"epoch": 0.01729106628242075,
"grad_norm": 2.560171127319336,
"learning_rate": 0.00010344827586206896,
"loss": 5.258418273925781,
"step": 90
},
{
"epoch": 0.01825168107588857,
"grad_norm": 2.012646436691284,
"learning_rate": 0.00010919540229885056,
"loss": 5.183747863769531,
"step": 95
},
{
"epoch": 0.01921229586935639,
"grad_norm": 2.6587507724761963,
"learning_rate": 0.00011494252873563218,
"loss": 5.119210052490234,
"step": 100
},
{
"epoch": 0.020172910662824207,
"grad_norm": 2.3047592639923096,
"learning_rate": 0.00012068965517241378,
"loss": 5.057864761352539,
"step": 105
},
{
"epoch": 0.021133525456292025,
"grad_norm": 2.4375789165496826,
"learning_rate": 0.0001264367816091954,
"loss": 5.000635147094727,
"step": 110
},
{
"epoch": 0.022094140249759846,
"grad_norm": 2.1402318477630615,
"learning_rate": 0.000132183908045977,
"loss": 4.947456359863281,
"step": 115
},
{
"epoch": 0.023054755043227664,
"grad_norm": 2.815868377685547,
"learning_rate": 0.0001379310344827586,
"loss": 4.898051071166992,
"step": 120
},
{
"epoch": 0.024015369836695485,
"grad_norm": 2.3648743629455566,
"learning_rate": 0.0001436781609195402,
"loss": 4.855741119384765,
"step": 125
},
{
"epoch": 0.024975984630163303,
"grad_norm": 2.230957508087158,
"learning_rate": 0.00014942528735632183,
"loss": 4.808645629882813,
"step": 130
},
{
"epoch": 0.025936599423631124,
"grad_norm": 1.3839224576950073,
"learning_rate": 0.00015517241379310346,
"loss": 4.757843780517578,
"step": 135
},
{
"epoch": 0.026897214217098942,
"grad_norm": 1.4279674291610718,
"learning_rate": 0.00016091954022988503,
"loss": 4.719438171386718,
"step": 140
},
{
"epoch": 0.027857829010566763,
"grad_norm": 1.4759711027145386,
"learning_rate": 0.00016666666666666666,
"loss": 4.6750732421875,
"step": 145
},
{
"epoch": 0.02881844380403458,
"grad_norm": 1.3809517621994019,
"learning_rate": 0.00017241379310344826,
"loss": 4.63501091003418,
"step": 150
},
{
"epoch": 0.029779058597502402,
"grad_norm": 1.6614536046981812,
"learning_rate": 0.00017816091954022986,
"loss": 4.603597640991211,
"step": 155
},
{
"epoch": 0.03073967339097022,
"grad_norm": 2.904686689376831,
"learning_rate": 0.00018390804597701147,
"loss": 4.574514770507813,
"step": 160
},
{
"epoch": 0.03170028818443804,
"grad_norm": 1.8000333309173584,
"learning_rate": 0.0001896551724137931,
"loss": 4.542916107177734,
"step": 165
},
{
"epoch": 0.03266090297790586,
"grad_norm": 2.0026073455810547,
"learning_rate": 0.00019540229885057467,
"loss": 4.509420013427734,
"step": 170
},
{
"epoch": 0.03362151777137368,
"grad_norm": 1.6088820695877075,
"learning_rate": 0.0002011494252873563,
"loss": 4.4883583068847654,
"step": 175
},
{
"epoch": 0.0345821325648415,
"grad_norm": 3.137084722518921,
"learning_rate": 0.00020689655172413793,
"loss": 4.462603759765625,
"step": 180
},
{
"epoch": 0.03554274735830932,
"grad_norm": 1.9390242099761963,
"learning_rate": 0.00021264367816091953,
"loss": 4.441267395019532,
"step": 185
},
{
"epoch": 0.03650336215177714,
"grad_norm": 2.0831611156463623,
"learning_rate": 0.00021839080459770113,
"loss": 4.420868682861328,
"step": 190
},
{
"epoch": 0.037463976945244955,
"grad_norm": 1.4936137199401855,
"learning_rate": 0.00022413793103448273,
"loss": 4.390471649169922,
"step": 195
},
{
"epoch": 0.03842459173871278,
"grad_norm": 2.1609437465667725,
"learning_rate": 0.00022988505747126436,
"loss": 4.372611999511719,
"step": 200
},
{
"epoch": 0.0393852065321806,
"grad_norm": 1.5894837379455566,
"learning_rate": 0.00023563218390804593,
"loss": 4.3513038635253904,
"step": 205
},
{
"epoch": 0.040345821325648415,
"grad_norm": 2.7555136680603027,
"learning_rate": 0.00024137931034482756,
"loss": 4.324166870117187,
"step": 210
},
{
"epoch": 0.04130643611911623,
"grad_norm": 1.7152142524719238,
"learning_rate": 0.00024712643678160916,
"loss": 4.298264694213867,
"step": 215
},
{
"epoch": 0.04226705091258405,
"grad_norm": 2.7435386180877686,
"learning_rate": 0.0002528735632183908,
"loss": 4.284121322631836,
"step": 220
},
{
"epoch": 0.043227665706051875,
"grad_norm": 1.9910717010498047,
"learning_rate": 0.00025862068965517237,
"loss": 4.270275497436524,
"step": 225
},
{
"epoch": 0.04418828049951969,
"grad_norm": 1.8952627182006836,
"learning_rate": 0.000264367816091954,
"loss": 4.251060867309571,
"step": 230
},
{
"epoch": 0.04514889529298751,
"grad_norm": 1.4182472229003906,
"learning_rate": 0.0002701149425287356,
"loss": 4.223833465576172,
"step": 235
},
{
"epoch": 0.04610951008645533,
"grad_norm": 2.826347589492798,
"learning_rate": 0.0002758620689655172,
"loss": 4.208561706542969,
"step": 240
},
{
"epoch": 0.04707012487992315,
"grad_norm": 2.5493690967559814,
"learning_rate": 0.00028160919540229883,
"loss": 4.194873428344726,
"step": 245
},
{
"epoch": 0.04803073967339097,
"grad_norm": 2.2824618816375732,
"learning_rate": 0.0002873563218390804,
"loss": 4.167812347412109,
"step": 250
},
{
"epoch": 0.04899135446685879,
"grad_norm": 1.4718077182769775,
"learning_rate": 0.00029310344827586203,
"loss": 4.150156402587891,
"step": 255
},
{
"epoch": 0.049951969260326606,
"grad_norm": 2.2785093784332275,
"learning_rate": 0.00029885057471264366,
"loss": 4.126807403564453,
"step": 260
},
{
"epoch": 0.05091258405379443,
"grad_norm": 1.5848008394241333,
"learning_rate": 0.00029999951546647263,
"loss": 4.111884689331054,
"step": 265
},
{
"epoch": 0.05187319884726225,
"grad_norm": 1.6441746950149536,
"learning_rate": 0.0002999975470543828,
"loss": 4.089701843261719,
"step": 270
},
{
"epoch": 0.052833813640730067,
"grad_norm": 1.9900462627410889,
"learning_rate": 0.00029999406450023966,
"loss": 4.0772346496582035,
"step": 275
},
{
"epoch": 0.053794428434197884,
"grad_norm": 1.938680648803711,
"learning_rate": 0.0002999890678391978,
"loss": 4.0556285858154295,
"step": 280
},
{
"epoch": 0.05475504322766571,
"grad_norm": 1.6333754062652588,
"learning_rate": 0.00029998255712169563,
"loss": 4.044793701171875,
"step": 285
},
{
"epoch": 0.05571565802113353,
"grad_norm": 1.8412508964538574,
"learning_rate": 0.00029997453241345533,
"loss": 4.03118896484375,
"step": 290
},
{
"epoch": 0.056676272814601344,
"grad_norm": 1.4252992868423462,
"learning_rate": 0.0002999649937954818,
"loss": 4.01298713684082,
"step": 295
},
{
"epoch": 0.05763688760806916,
"grad_norm": 2.278615951538086,
"learning_rate": 0.0002999539413640621,
"loss": 3.9946434020996096,
"step": 300
},
{
"epoch": 0.05859750240153699,
"grad_norm": 1.5454790592193604,
"learning_rate": 0.0002999413752307644,
"loss": 3.9831748962402345,
"step": 305
},
{
"epoch": 0.059558117195004805,
"grad_norm": 1.334790587425232,
"learning_rate": 0.0002999272955224369,
"loss": 3.9761894226074217,
"step": 310
},
{
"epoch": 0.06051873198847262,
"grad_norm": 1.9382845163345337,
"learning_rate": 0.0002999117023812064,
"loss": 3.9631324768066407,
"step": 315
},
{
"epoch": 0.06147934678194044,
"grad_norm": 2.01582932472229,
"learning_rate": 0.000299894595964477,
"loss": 3.9521259307861327,
"step": 320
},
{
"epoch": 0.06243996157540826,
"grad_norm": 1.8427826166152954,
"learning_rate": 0.0002998759764449286,
"loss": 3.9423927307128905,
"step": 325
},
{
"epoch": 0.06340057636887608,
"grad_norm": 1.6123930215835571,
"learning_rate": 0.0002998558440105148,
"loss": 3.935696029663086,
"step": 330
},
{
"epoch": 0.0643611911623439,
"grad_norm": 1.9502496719360352,
"learning_rate": 0.0002998341988644614,
"loss": 3.923968505859375,
"step": 335
},
{
"epoch": 0.06532180595581172,
"grad_norm": 1.2648119926452637,
"learning_rate": 0.0002998110412252641,
"loss": 3.9131423950195314,
"step": 340
},
{
"epoch": 0.06628242074927954,
"grad_norm": 1.5318907499313354,
"learning_rate": 0.0002997863713266866,
"loss": 3.9074745178222656,
"step": 345
},
{
"epoch": 0.06724303554274735,
"grad_norm": 1.3065940141677856,
"learning_rate": 0.0002997601894177576,
"loss": 3.8965118408203123,
"step": 350
},
{
"epoch": 0.06820365033621517,
"grad_norm": 1.2381218671798706,
"learning_rate": 0.00029973249576276914,
"loss": 3.890979766845703,
"step": 355
},
{
"epoch": 0.069164265129683,
"grad_norm": 1.5115997791290283,
"learning_rate": 0.0002997032906412732,
"loss": 3.8882400512695314,
"step": 360
},
{
"epoch": 0.07012487992315082,
"grad_norm": 1.6171698570251465,
"learning_rate": 0.0002996725743480793,
"loss": 3.876806640625,
"step": 365
},
{
"epoch": 0.07108549471661864,
"grad_norm": 1.0753288269042969,
"learning_rate": 0.00029964034719325147,
"loss": 3.860057067871094,
"step": 370
},
{
"epoch": 0.07204610951008646,
"grad_norm": 1.4012713432312012,
"learning_rate": 0.0002996066095021048,
"loss": 3.857683563232422,
"step": 375
},
{
"epoch": 0.07300672430355427,
"grad_norm": 1.765068769454956,
"learning_rate": 0.0002995713616152028,
"loss": 3.8544296264648437,
"step": 380
},
{
"epoch": 0.07396733909702209,
"grad_norm": 1.5474210977554321,
"learning_rate": 0.0002995346038883532,
"loss": 3.849739837646484,
"step": 385
},
{
"epoch": 0.07492795389048991,
"grad_norm": 1.443384051322937,
"learning_rate": 0.0002994963366926048,
"loss": 3.845722961425781,
"step": 390
},
{
"epoch": 0.07588856868395773,
"grad_norm": 1.4993858337402344,
"learning_rate": 0.0002994565604142439,
"loss": 3.8389007568359377,
"step": 395
},
{
"epoch": 0.07684918347742556,
"grad_norm": 1.4684888124465942,
"learning_rate": 0.00029941527545478976,
"loss": 3.8267383575439453,
"step": 400
},
{
"epoch": 0.07780979827089338,
"grad_norm": 1.2325655221939087,
"learning_rate": 0.00029937248223099136,
"loss": 3.818804168701172,
"step": 405
},
{
"epoch": 0.0787704130643612,
"grad_norm": 1.0254677534103394,
"learning_rate": 0.00029932818117482245,
"loss": 3.812041473388672,
"step": 410
},
{
"epoch": 0.07973102785782901,
"grad_norm": 1.3646955490112305,
"learning_rate": 0.0002992823727334776,
"loss": 3.810979461669922,
"step": 415
},
{
"epoch": 0.08069164265129683,
"grad_norm": 1.6398770809173584,
"learning_rate": 0.00029923505736936774,
"loss": 3.8059127807617186,
"step": 420
},
{
"epoch": 0.08165225744476465,
"grad_norm": 1.2734476327896118,
"learning_rate": 0.0002991862355601151,
"loss": 3.798663330078125,
"step": 425
},
{
"epoch": 0.08261287223823247,
"grad_norm": 1.4652912616729736,
"learning_rate": 0.00029913590779854886,
"loss": 3.7902076721191404,
"step": 430
},
{
"epoch": 0.08357348703170028,
"grad_norm": 1.0962094068527222,
"learning_rate": 0.00029908407459269977,
"loss": 3.786347198486328,
"step": 435
},
{
"epoch": 0.0845341018251681,
"grad_norm": 1.054427981376648,
"learning_rate": 0.0002990307364657954,
"loss": 3.780842590332031,
"step": 440
},
{
"epoch": 0.08549471661863593,
"grad_norm": 1.2044267654418945,
"learning_rate": 0.0002989758939562545,
"loss": 3.7756649017333985,
"step": 445
},
{
"epoch": 0.08645533141210375,
"grad_norm": 1.3676034212112427,
"learning_rate": 0.0002989195476176818,
"loss": 3.7702369689941406,
"step": 450
},
{
"epoch": 0.08741594620557157,
"grad_norm": 1.455597996711731,
"learning_rate": 0.00029886169801886237,
"loss": 3.76458740234375,
"step": 455
},
{
"epoch": 0.08837656099903939,
"grad_norm": 1.4684407711029053,
"learning_rate": 0.00029880234574375576,
"loss": 3.7579875946044923,
"step": 460
},
{
"epoch": 0.0893371757925072,
"grad_norm": 1.4192808866500854,
"learning_rate": 0.00029874149139149037,
"loss": 3.757953643798828,
"step": 465
},
{
"epoch": 0.09029779058597502,
"grad_norm": 0.8747047781944275,
"learning_rate": 0.00029867913557635704,
"loss": 3.7510101318359377,
"step": 470
},
{
"epoch": 0.09125840537944284,
"grad_norm": 1.4938651323318481,
"learning_rate": 0.0002986152789278031,
"loss": 3.746034622192383,
"step": 475
},
{
"epoch": 0.09221902017291066,
"grad_norm": 1.2561233043670654,
"learning_rate": 0.00029854992209042626,
"loss": 3.743701171875,
"step": 480
},
{
"epoch": 0.09317963496637849,
"grad_norm": 1.431540846824646,
"learning_rate": 0.0002984830657239673,
"loss": 3.736903762817383,
"step": 485
},
{
"epoch": 0.0941402497598463,
"grad_norm": 1.0810083150863647,
"learning_rate": 0.00029841471050330424,
"loss": 3.732843017578125,
"step": 490
},
{
"epoch": 0.09510086455331412,
"grad_norm": 0.9057182669639587,
"learning_rate": 0.00029834485711844515,
"loss": 3.7288368225097654,
"step": 495
},
{
"epoch": 0.09606147934678194,
"grad_norm": 1.559844970703125,
"learning_rate": 0.00029827350627452116,
"loss": 3.72186393737793,
"step": 500
},
{
"epoch": 0.09702209414024976,
"grad_norm": 1.3021472692489624,
"learning_rate": 0.00029820065869177954,
"loss": 3.7243560791015624,
"step": 505
},
{
"epoch": 0.09798270893371758,
"grad_norm": 0.9313806891441345,
"learning_rate": 0.0002981263151055762,
"loss": 3.7176589965820312,
"step": 510
},
{
"epoch": 0.0989433237271854,
"grad_norm": 1.2589117288589478,
"learning_rate": 0.0002980504762663683,
"loss": 3.712178039550781,
"step": 515
},
{
"epoch": 0.09990393852065321,
"grad_norm": 1.3793448209762573,
"learning_rate": 0.0002979731429397071,
"loss": 3.707489013671875,
"step": 520
},
{
"epoch": 0.10086455331412104,
"grad_norm": 1.6294807195663452,
"learning_rate": 0.0002978943159062295,
"loss": 3.7033920288085938,
"step": 525
},
{
"epoch": 0.10182516810758886,
"grad_norm": 1.3319975137710571,
"learning_rate": 0.0002978139959616507,
"loss": 3.7044357299804687,
"step": 530
},
{
"epoch": 0.10278578290105668,
"grad_norm": 1.1249921321868896,
"learning_rate": 0.00029773218391675594,
"loss": 3.6977405548095703,
"step": 535
},
{
"epoch": 0.1037463976945245,
"grad_norm": 1.7312142848968506,
"learning_rate": 0.00029764888059739255,
"loss": 3.698072814941406,
"step": 540
},
{
"epoch": 0.10470701248799232,
"grad_norm": 1.3000468015670776,
"learning_rate": 0.00029756408684446136,
"loss": 3.7032306671142576,
"step": 545
},
{
"epoch": 0.10566762728146013,
"grad_norm": 1.2404688596725464,
"learning_rate": 0.0002974778035139081,
"loss": 3.693822479248047,
"step": 550
},
{
"epoch": 0.10662824207492795,
"grad_norm": 0.9569908380508423,
"learning_rate": 0.00029739003147671536,
"loss": 3.6903202056884767,
"step": 555
},
{
"epoch": 0.10758885686839577,
"grad_norm": 1.3814141750335693,
"learning_rate": 0.00029730077161889304,
"loss": 3.6874603271484374,
"step": 560
},
{
"epoch": 0.10854947166186359,
"grad_norm": 1.3034733533859253,
"learning_rate": 0.00029721002484147,
"loss": 3.6825042724609376,
"step": 565
},
{
"epoch": 0.10951008645533142,
"grad_norm": 1.6188613176345825,
"learning_rate": 0.00029711779206048454,
"loss": 3.6817798614501953,
"step": 570
},
{
"epoch": 0.11047070124879924,
"grad_norm": 1.411993384361267,
"learning_rate": 0.0002970240742069755,
"loss": 3.677989959716797,
"step": 575
},
{
"epoch": 0.11143131604226705,
"grad_norm": 1.6159188747406006,
"learning_rate": 0.0002969288722269726,
"loss": 3.673622894287109,
"step": 580
},
{
"epoch": 0.11239193083573487,
"grad_norm": 1.2981064319610596,
"learning_rate": 0.000296832187081487,
"loss": 3.674951934814453,
"step": 585
},
{
"epoch": 0.11335254562920269,
"grad_norm": 1.0490260124206543,
"learning_rate": 0.0002967340197465017,
"loss": 3.665152740478516,
"step": 590
},
{
"epoch": 0.1143131604226705,
"grad_norm": 1.5031790733337402,
"learning_rate": 0.00029663437121296146,
"loss": 3.6685272216796876,
"step": 595
},
{
"epoch": 0.11527377521613832,
"grad_norm": 1.6380650997161865,
"learning_rate": 0.000296533242486763,
"loss": 3.6649364471435546,
"step": 600
},
{
"epoch": 0.11623439000960614,
"grad_norm": 1.1084094047546387,
"learning_rate": 0.0002964306345887447,
"loss": 3.658610153198242,
"step": 605
},
{
"epoch": 0.11719500480307397,
"grad_norm": 1.454255223274231,
"learning_rate": 0.0002963265485546764,
"loss": 3.655583953857422,
"step": 610
},
{
"epoch": 0.11815561959654179,
"grad_norm": 1.5477410554885864,
"learning_rate": 0.00029622098543524884,
"loss": 3.6575542449951173,
"step": 615
},
{
"epoch": 0.11911623439000961,
"grad_norm": 1.0675349235534668,
"learning_rate": 0.00029611394629606324,
"loss": 3.6511856079101563,
"step": 620
},
{
"epoch": 0.12007684918347743,
"grad_norm": 1.7654082775115967,
"learning_rate": 0.0002960054322176204,
"loss": 3.6485317230224608,
"step": 625
},
{
"epoch": 0.12103746397694524,
"grad_norm": 1.3815727233886719,
"learning_rate": 0.0002958954442953096,
"loss": 3.644425964355469,
"step": 630
},
{
"epoch": 0.12199807877041306,
"grad_norm": 1.0989855527877808,
"learning_rate": 0.000295783983639398,
"loss": 3.6440811157226562,
"step": 635
},
{
"epoch": 0.12295869356388088,
"grad_norm": 1.3829193115234375,
"learning_rate": 0.00029567105137501916,
"loss": 3.640919876098633,
"step": 640
},
{
"epoch": 0.1239193083573487,
"grad_norm": 1.3325438499450684,
"learning_rate": 0.00029555664864216156,
"loss": 3.6379947662353516,
"step": 645
},
{
"epoch": 0.12487992315081652,
"grad_norm": 1.112707257270813,
"learning_rate": 0.00029544077659565747,
"loss": 3.636188507080078,
"step": 650
},
{
"epoch": 0.12584053794428435,
"grad_norm": 1.1048657894134521,
"learning_rate": 0.0002953234364051708,
"loss": 3.637079620361328,
"step": 655
},
{
"epoch": 0.12680115273775217,
"grad_norm": 1.0728777647018433,
"learning_rate": 0.00029520462925518575,
"loss": 3.6303642272949217,
"step": 660
},
{
"epoch": 0.12776176753121998,
"grad_norm": 0.9134336113929749,
"learning_rate": 0.00029508435634499467,
"loss": 3.625618743896484,
"step": 665
},
{
"epoch": 0.1287223823246878,
"grad_norm": 1.0948387384414673,
"learning_rate": 0.00029496261888868586,
"loss": 3.6273883819580077,
"step": 670
},
{
"epoch": 0.12968299711815562,
"grad_norm": 1.1168876886367798,
"learning_rate": 0.0002948394181151314,
"loss": 3.6221595764160157,
"step": 675
},
{
"epoch": 0.13064361191162344,
"grad_norm": 1.1925864219665527,
"learning_rate": 0.0002947147552679748,
"loss": 3.6182037353515626,
"step": 680
},
{
"epoch": 0.13160422670509125,
"grad_norm": 1.2935006618499756,
"learning_rate": 0.00029458863160561837,
"loss": 3.6180152893066406,
"step": 685
},
{
"epoch": 0.13256484149855907,
"grad_norm": 1.0551207065582275,
"learning_rate": 0.0002944610484012105,
"loss": 3.614363098144531,
"step": 690
},
{
"epoch": 0.1335254562920269,
"grad_norm": 1.2916655540466309,
"learning_rate": 0.0002943320069426329,
"loss": 3.611812210083008,
"step": 695
},
{
"epoch": 0.1344860710854947,
"grad_norm": 1.084269404411316,
"learning_rate": 0.00029420150853248756,
"loss": 3.609844970703125,
"step": 700
},
{
"epoch": 0.13544668587896252,
"grad_norm": 1.6168688535690308,
"learning_rate": 0.0002940695544880836,
"loss": 3.6106407165527346,
"step": 705
},
{
"epoch": 0.13640730067243034,
"grad_norm": 1.294932246208191,
"learning_rate": 0.0002939361461414238,
"loss": 3.607910919189453,
"step": 710
},
{
"epoch": 0.1373679154658982,
"grad_norm": 0.9968737363815308,
"learning_rate": 0.0002938012848391915,
"loss": 3.6028270721435547,
"step": 715
},
{
"epoch": 0.138328530259366,
"grad_norm": 1.143962025642395,
"learning_rate": 0.0002936649719427367,
"loss": 3.6049777984619142,
"step": 720
},
{
"epoch": 0.13928914505283382,
"grad_norm": 1.4859670400619507,
"learning_rate": 0.00029352720882806267,
"loss": 3.6023681640625,
"step": 725
},
{
"epoch": 0.14024975984630164,
"grad_norm": 1.383272647857666,
"learning_rate": 0.00029338799688581146,
"loss": 3.5986564636230467,
"step": 730
},
{
"epoch": 0.14121037463976946,
"grad_norm": 1.3845287561416626,
"learning_rate": 0.00029324733752125054,
"loss": 3.597808074951172,
"step": 735
},
{
"epoch": 0.14217098943323728,
"grad_norm": 1.5982863903045654,
"learning_rate": 0.0002931052321542581,
"loss": 3.5963520050048827,
"step": 740
},
{
"epoch": 0.1431316042267051,
"grad_norm": 1.0727800130844116,
"learning_rate": 0.00029296168221930904,
"loss": 3.592487335205078,
"step": 745
},
{
"epoch": 0.1440922190201729,
"grad_norm": 1.4507306814193726,
"learning_rate": 0.0002928166891654604,
"loss": 3.595014190673828,
"step": 750
},
{
"epoch": 0.14505283381364073,
"grad_norm": 1.4457939863204956,
"learning_rate": 0.00029267025445633667,
"loss": 3.590290069580078,
"step": 755
},
{
"epoch": 0.14601344860710855,
"grad_norm": 1.1515284776687622,
"learning_rate": 0.0002925223795701149,
"loss": 3.5872840881347656,
"step": 760
},
{
"epoch": 0.14697406340057637,
"grad_norm": 1.0049299001693726,
"learning_rate": 0.00029237306599951007,
"loss": 3.5840923309326174,
"step": 765
},
{
"epoch": 0.14793467819404418,
"grad_norm": 1.628124475479126,
"learning_rate": 0.00029222231525176005,
"loss": 3.5856204986572267,
"step": 770
},
{
"epoch": 0.148895292987512,
"grad_norm": 1.5615901947021484,
"learning_rate": 0.0002920701288486099,
"loss": 3.5824256896972657,
"step": 775
},
{
"epoch": 0.14985590778097982,
"grad_norm": 1.0828890800476074,
"learning_rate": 0.00029191650832629694,
"loss": 3.5833717346191407,
"step": 780
},
{
"epoch": 0.15081652257444764,
"grad_norm": 0.8812053799629211,
"learning_rate": 0.00029176145523553517,
"loss": 3.581065368652344,
"step": 785
},
{
"epoch": 0.15177713736791545,
"grad_norm": 1.3092104196548462,
"learning_rate": 0.0002916049711414996,
"loss": 3.5770198822021486,
"step": 790
},
{
"epoch": 0.15273775216138327,
"grad_norm": 1.4702849388122559,
"learning_rate": 0.00029144705762381036,
"loss": 3.5817501068115236,
"step": 795
},
{
"epoch": 0.15369836695485112,
"grad_norm": 1.110189437866211,
"learning_rate": 0.0002912877162765169,
"loss": 3.5730361938476562,
"step": 800
},
{
"epoch": 0.15465898174831894,
"grad_norm": 0.9270662665367126,
"learning_rate": 0.00029112694870808155,
"loss": 3.5725852966308596,
"step": 805
},
{
"epoch": 0.15561959654178675,
"grad_norm": 1.3707542419433594,
"learning_rate": 0.00029096475654136395,
"loss": 3.5693943023681642,
"step": 810
},
{
"epoch": 0.15658021133525457,
"grad_norm": 1.4934515953063965,
"learning_rate": 0.000290801141413604,
"loss": 3.565943145751953,
"step": 815
},
{
"epoch": 0.1575408261287224,
"grad_norm": 1.0972487926483154,
"learning_rate": 0.00029063610497640576,
"loss": 3.5643775939941404,
"step": 820
},
{
"epoch": 0.1585014409221902,
"grad_norm": 1.2283886671066284,
"learning_rate": 0.0002904696488957204,
"loss": 3.5635589599609374,
"step": 825
},
{
"epoch": 0.15946205571565802,
"grad_norm": 1.164687156677246,
"learning_rate": 0.0002903017748518298,
"loss": 3.555510711669922,
"step": 830
},
{
"epoch": 0.16042267050912584,
"grad_norm": 0.9848024845123291,
"learning_rate": 0.0002901324845393294,
"loss": 3.5610126495361327,
"step": 835
},
{
"epoch": 0.16138328530259366,
"grad_norm": 1.4761382341384888,
"learning_rate": 0.00028996177966711097,
"loss": 3.5633079528808596,
"step": 840
},
{
"epoch": 0.16234390009606148,
"grad_norm": 1.265584945678711,
"learning_rate": 0.0002897896619583455,
"loss": 3.5593791961669923,
"step": 845
},
{
"epoch": 0.1633045148895293,
"grad_norm": 1.2358146905899048,
"learning_rate": 0.0002896161331504659,
"loss": 3.5586807250976564,
"step": 850
},
{
"epoch": 0.1642651296829971,
"grad_norm": 1.2075212001800537,
"learning_rate": 0.00028944119499514913,
"loss": 3.5556678771972656,
"step": 855
},
{
"epoch": 0.16522574447646493,
"grad_norm": 1.3434778451919556,
"learning_rate": 0.0002892648492582989,
"loss": 3.548540496826172,
"step": 860
},
{
"epoch": 0.16618635926993275,
"grad_norm": 0.7819045186042786,
"learning_rate": 0.00028908709772002765,
"loss": 3.5495044708251955,
"step": 865
},
{
"epoch": 0.16714697406340057,
"grad_norm": 1.3979746103286743,
"learning_rate": 0.00028890794217463863,
"loss": 3.5515655517578124,
"step": 870
},
{
"epoch": 0.16810758885686838,
"grad_norm": 1.0144678354263306,
"learning_rate": 0.0002887273844306076,
"loss": 3.547974395751953,
"step": 875
},
{
"epoch": 0.1690682036503362,
"grad_norm": 0.8473827242851257,
"learning_rate": 0.00028854542631056494,
"loss": 3.543077087402344,
"step": 880
},
{
"epoch": 0.17002881844380405,
"grad_norm": 0.9643390774726868,
"learning_rate": 0.0002883620696512769,
"loss": 3.546183776855469,
"step": 885
},
{
"epoch": 0.17098943323727187,
"grad_norm": 1.3916168212890625,
"learning_rate": 0.0002881773163036273,
"loss": 3.5453559875488283,
"step": 890
},
{
"epoch": 0.17195004803073968,
"grad_norm": 0.9916486740112305,
"learning_rate": 0.00028799116813259875,
"loss": 3.543218994140625,
"step": 895
},
{
"epoch": 0.1729106628242075,
"grad_norm": 0.9322313666343689,
"learning_rate": 0.0002878036270172538,
"loss": 3.5352169036865235,
"step": 900
},
{
"epoch": 0.17387127761767532,
"grad_norm": 1.0106613636016846,
"learning_rate": 0.000287614694850716,
"loss": 3.539842224121094,
"step": 905
},
{
"epoch": 0.17483189241114314,
"grad_norm": 0.9181408286094666,
"learning_rate": 0.00028742437354015073,
"loss": 3.53677978515625,
"step": 910
},
{
"epoch": 0.17579250720461095,
"grad_norm": 1.0180490016937256,
"learning_rate": 0.0002872326650067462,
"loss": 3.536122131347656,
"step": 915
},
{
"epoch": 0.17675312199807877,
"grad_norm": 1.140866994857788,
"learning_rate": 0.00028703957118569363,
"loss": 3.5390419006347655,
"step": 920
},
{
"epoch": 0.1777137367915466,
"grad_norm": 1.3825160264968872,
"learning_rate": 0.000286845094026168,
"loss": 3.534088897705078,
"step": 925
},
{
"epoch": 0.1786743515850144,
"grad_norm": 0.9901686310768127,
"learning_rate": 0.0002866492354913086,
"loss": 3.5334190368652343,
"step": 930
},
{
"epoch": 0.17963496637848222,
"grad_norm": 1.3177112340927124,
"learning_rate": 0.0002864519975581986,
"loss": 3.531676483154297,
"step": 935
},
{
"epoch": 0.18059558117195004,
"grad_norm": 1.5046802759170532,
"learning_rate": 0.0002862533822178456,
"loss": 3.5300270080566407,
"step": 940
},
{
"epoch": 0.18155619596541786,
"grad_norm": 0.8187685012817383,
"learning_rate": 0.00028605339147516113,
"loss": 3.527945709228516,
"step": 945
},
{
"epoch": 0.18251681075888568,
"grad_norm": 1.0654352903366089,
"learning_rate": 0.00028585202734894105,
"loss": 3.5284492492675783,
"step": 950
},
{
"epoch": 0.1834774255523535,
"grad_norm": 1.0617858171463013,
"learning_rate": 0.00028564929187184447,
"loss": 3.526523208618164,
"step": 955
},
{
"epoch": 0.1844380403458213,
"grad_norm": 0.9183857440948486,
"learning_rate": 0.00028544518709037363,
"loss": 3.5269630432128904,
"step": 960
},
{
"epoch": 0.18539865513928913,
"grad_norm": 1.3353052139282227,
"learning_rate": 0.000285239715064853,
"loss": 3.5191162109375,
"step": 965
},
{
"epoch": 0.18635926993275698,
"grad_norm": 1.1203348636627197,
"learning_rate": 0.0002850328778694088,
"loss": 3.5222145080566407,
"step": 970
},
{
"epoch": 0.1873198847262248,
"grad_norm": 1.1545228958129883,
"learning_rate": 0.0002848246775919478,
"loss": 3.518872833251953,
"step": 975
},
{
"epoch": 0.1882804995196926,
"grad_norm": 0.9494602084159851,
"learning_rate": 0.0002846151163341364,
"loss": 3.519129180908203,
"step": 980
},
{
"epoch": 0.18924111431316043,
"grad_norm": 0.8861366510391235,
"learning_rate": 0.0002844041962113792,
"loss": 3.517262268066406,
"step": 985
},
{
"epoch": 0.19020172910662825,
"grad_norm": 1.097005844116211,
"learning_rate": 0.00028419191935279793,
"loss": 3.5180564880371095,
"step": 990
},
{
"epoch": 0.19116234390009607,
"grad_norm": 0.9788710474967957,
"learning_rate": 0.00028397828790120965,
"loss": 3.5166107177734376,
"step": 995
},
{
"epoch": 0.19212295869356388,
"grad_norm": 1.3244202136993408,
"learning_rate": 0.0002837633040131055,
"loss": 3.5119117736816405,
"step": 1000
},
{
"epoch": 0.1930835734870317,
"grad_norm": 1.2634823322296143,
"learning_rate": 0.00028354696985862865,
"loss": 3.5120399475097654,
"step": 1005
},
{
"epoch": 0.19404418828049952,
"grad_norm": 1.3096829652786255,
"learning_rate": 0.00028332928762155225,
"loss": 3.5108917236328123,
"step": 1010
},
{
"epoch": 0.19500480307396734,
"grad_norm": 1.0718635320663452,
"learning_rate": 0.0002831102594992579,
"loss": 3.5096744537353515,
"step": 1015
},
{
"epoch": 0.19596541786743515,
"grad_norm": 1.0316826105117798,
"learning_rate": 0.00028288988770271297,
"loss": 3.5067817687988283,
"step": 1020
},
{
"epoch": 0.19692603266090297,
"grad_norm": 1.7893810272216797,
"learning_rate": 0.00028266817445644855,
"loss": 3.5084625244140626,
"step": 1025
},
{
"epoch": 0.1978866474543708,
"grad_norm": 1.3311545848846436,
"learning_rate": 0.0002824451219985369,
"loss": 3.5091777801513673,
"step": 1030
},
{
"epoch": 0.1988472622478386,
"grad_norm": 1.0418428182601929,
"learning_rate": 0.0002822207325805688,
"loss": 3.5157501220703127,
"step": 1035
},
{
"epoch": 0.19980787704130643,
"grad_norm": 1.1201485395431519,
"learning_rate": 0.00028199500846763116,
"loss": 3.5064178466796876,
"step": 1040
},
{
"epoch": 0.20076849183477424,
"grad_norm": 1.0694836378097534,
"learning_rate": 0.0002817679519382836,
"loss": 3.507163238525391,
"step": 1045
},
{
"epoch": 0.2017291066282421,
"grad_norm": 1.2998982667922974,
"learning_rate": 0.0002815395652845359,
"loss": 3.5081203460693358,
"step": 1050
},
{
"epoch": 0.2026897214217099,
"grad_norm": 1.3336325883865356,
"learning_rate": 0.0002813098508118247,
"loss": 3.499872589111328,
"step": 1055
},
{
"epoch": 0.20365033621517772,
"grad_norm": 0.7667920589447021,
"learning_rate": 0.0002810788108389901,
"loss": 3.499583435058594,
"step": 1060
},
{
"epoch": 0.20461095100864554,
"grad_norm": 1.072025179862976,
"learning_rate": 0.0002808464476982526,
"loss": 3.495106506347656,
"step": 1065
},
{
"epoch": 0.20557156580211336,
"grad_norm": 0.964080274105072,
"learning_rate": 0.0002806127637351892,
"loss": 3.4985504150390625,
"step": 1070
},
{
"epoch": 0.20653218059558118,
"grad_norm": 1.0775015354156494,
"learning_rate": 0.00028037776130871,
"loss": 3.4976402282714845,
"step": 1075
},
{
"epoch": 0.207492795389049,
"grad_norm": 1.0506839752197266,
"learning_rate": 0.00028014144279103406,
"loss": 3.4984210968017577,
"step": 1080
},
{
"epoch": 0.2084534101825168,
"grad_norm": 0.8290379643440247,
"learning_rate": 0.0002799038105676658,
"loss": 3.494451141357422,
"step": 1085
},
{
"epoch": 0.20941402497598463,
"grad_norm": 1.1757789850234985,
"learning_rate": 0.00027966486703737066,
"loss": 3.4927711486816406,
"step": 1090
},
{
"epoch": 0.21037463976945245,
"grad_norm": 1.0375887155532837,
"learning_rate": 0.0002794246146121512,
"loss": 3.4952877044677733,
"step": 1095
},
{
"epoch": 0.21133525456292027,
"grad_norm": 0.9264093637466431,
"learning_rate": 0.0002791830557172224,
"loss": 3.49503173828125,
"step": 1100
},
{
"epoch": 0.21229586935638808,
"grad_norm": 0.9268721342086792,
"learning_rate": 0.00027894019279098726,
"loss": 3.4857433319091795,
"step": 1105
},
{
"epoch": 0.2132564841498559,
"grad_norm": 1.1226038932800293,
"learning_rate": 0.00027869602828501234,
"loss": 3.488873291015625,
"step": 1110
},
{
"epoch": 0.21421709894332372,
"grad_norm": 1.1734631061553955,
"learning_rate": 0.00027845056466400297,
"loss": 3.487217330932617,
"step": 1115
},
{
"epoch": 0.21517771373679154,
"grad_norm": 0.9604414701461792,
"learning_rate": 0.0002782038044057783,
"loss": 3.487082672119141,
"step": 1120
},
{
"epoch": 0.21613832853025935,
"grad_norm": 0.9780157208442688,
"learning_rate": 0.0002779557500012462,
"loss": 3.48626708984375,
"step": 1125
},
{
"epoch": 0.21709894332372717,
"grad_norm": 1.1477540731430054,
"learning_rate": 0.0002777064039543784,
"loss": 3.4851287841796874,
"step": 1130
},
{
"epoch": 0.21805955811719502,
"grad_norm": 1.1044639348983765,
"learning_rate": 0.00027745576878218496,
"loss": 3.483640670776367,
"step": 1135
},
{
"epoch": 0.21902017291066284,
"grad_norm": 0.8308424949645996,
"learning_rate": 0.0002772038470146888,
"loss": 3.4800315856933595,
"step": 1140
},
{
"epoch": 0.21998078770413065,
"grad_norm": 1.204115390777588,
"learning_rate": 0.0002769506411949007,
"loss": 3.481386184692383,
"step": 1145
},
{
"epoch": 0.22094140249759847,
"grad_norm": 1.0885634422302246,
"learning_rate": 0.00027669615387879284,
"loss": 3.4802738189697267,
"step": 1150
},
{
"epoch": 0.2219020172910663,
"grad_norm": 1.1829088926315308,
"learning_rate": 0.0002764403876352736,
"loss": 3.4813682556152346,
"step": 1155
},
{
"epoch": 0.2228626320845341,
"grad_norm": 1.4106764793395996,
"learning_rate": 0.0002761833450461613,
"loss": 3.4792640686035154,
"step": 1160
},
{
"epoch": 0.22382324687800192,
"grad_norm": 1.074035406112671,
"learning_rate": 0.0002759250287061583,
"loss": 3.478520965576172,
"step": 1165
},
{
"epoch": 0.22478386167146974,
"grad_norm": 1.211889624595642,
"learning_rate": 0.00027566544122282496,
"loss": 3.478863525390625,
"step": 1170
},
{
"epoch": 0.22574447646493756,
"grad_norm": 0.9881852865219116,
"learning_rate": 0.0002754045852165529,
"loss": 3.4755462646484374,
"step": 1175
},
{
"epoch": 0.22670509125840538,
"grad_norm": 1.0194916725158691,
"learning_rate": 0.00027514246332053876,
"loss": 3.476274108886719,
"step": 1180
},
{
"epoch": 0.2276657060518732,
"grad_norm": 1.0759530067443848,
"learning_rate": 0.0002748790781807577,
"loss": 3.4741195678710937,
"step": 1185
},
{
"epoch": 0.228626320845341,
"grad_norm": 1.181774377822876,
"learning_rate": 0.0002746144324559368,
"loss": 3.4723861694335936,
"step": 1190
},
{
"epoch": 0.22958693563880883,
"grad_norm": 0.9129538536071777,
"learning_rate": 0.00027434852881752774,
"loss": 3.472820281982422,
"step": 1195
},
{
"epoch": 0.23054755043227665,
"grad_norm": 1.01265287399292,
"learning_rate": 0.0002740813699496804,
"loss": 3.472024917602539,
"step": 1200
},
{
"epoch": 0.23150816522574447,
"grad_norm": 1.2006109952926636,
"learning_rate": 0.0002738129585492153,
"loss": 3.4669479370117187,
"step": 1205
},
{
"epoch": 0.23246878001921228,
"grad_norm": 0.8117277026176453,
"learning_rate": 0.0002735432973255967,
"loss": 3.46884765625,
"step": 1210
},
{
"epoch": 0.2334293948126801,
"grad_norm": 0.9207207560539246,
"learning_rate": 0.0002732723890009051,
"loss": 3.4688953399658202,
"step": 1215
},
{
"epoch": 0.23439000960614795,
"grad_norm": 1.1179691553115845,
"learning_rate": 0.00027300023630980985,
"loss": 3.472461700439453,
"step": 1220
},
{
"epoch": 0.23535062439961577,
"grad_norm": 0.971973180770874,
"learning_rate": 0.00027272684199954137,
"loss": 3.467519760131836,
"step": 1225
},
{
"epoch": 0.23631123919308358,
"grad_norm": 0.8591349720954895,
"learning_rate": 0.0002724522088298637,
"loss": 3.4663330078125,
"step": 1230
},
{
"epoch": 0.2372718539865514,
"grad_norm": 1.1340241432189941,
"learning_rate": 0.0002721763395730462,
"loss": 3.466988372802734,
"step": 1235
},
{
"epoch": 0.23823246878001922,
"grad_norm": 0.7911289930343628,
"learning_rate": 0.00027189923701383627,
"loss": 3.463814544677734,
"step": 1240
},
{
"epoch": 0.23919308357348704,
"grad_norm": 1.2324531078338623,
"learning_rate": 0.0002716209039494304,
"loss": 3.46383056640625,
"step": 1245
},
{
"epoch": 0.24015369836695485,
"grad_norm": 1.0037308931350708,
"learning_rate": 0.0002713413431894466,
"loss": 3.4620521545410154,
"step": 1250
},
{
"epoch": 0.24111431316042267,
"grad_norm": 1.3981319665908813,
"learning_rate": 0.00027106055755589566,
"loss": 3.4639919281005858,
"step": 1255
},
{
"epoch": 0.2420749279538905,
"grad_norm": 1.0288619995117188,
"learning_rate": 0.00027077854988315285,
"loss": 3.4653533935546874,
"step": 1260
},
{
"epoch": 0.2430355427473583,
"grad_norm": 1.0437750816345215,
"learning_rate": 0.00027049532301792924,
"loss": 3.4617347717285156,
"step": 1265
},
{
"epoch": 0.24399615754082613,
"grad_norm": 1.018276572227478,
"learning_rate": 0.00027021087981924296,
"loss": 3.4626544952392577,
"step": 1270
},
{
"epoch": 0.24495677233429394,
"grad_norm": 1.2246990203857422,
"learning_rate": 0.0002699252231583904,
"loss": 3.458060073852539,
"step": 1275
},
{
"epoch": 0.24591738712776176,
"grad_norm": 1.440726637840271,
"learning_rate": 0.000269638355918917,
"loss": 3.4551132202148436,
"step": 1280
},
{
"epoch": 0.24687800192122958,
"grad_norm": 1.0400745868682861,
"learning_rate": 0.00026935028099658864,
"loss": 3.455486297607422,
"step": 1285
},
{
"epoch": 0.2478386167146974,
"grad_norm": 1.271278738975525,
"learning_rate": 0.00026906100129936173,
"loss": 3.4623786926269533,
"step": 1290
},
{
"epoch": 0.24879923150816521,
"grad_norm": 1.0657492876052856,
"learning_rate": 0.0002687705197473545,
"loss": 3.455769348144531,
"step": 1295
},
{
"epoch": 0.24975984630163303,
"grad_norm": 0.913031816482544,
"learning_rate": 0.00026847883927281715,
"loss": 3.4589412689208983,
"step": 1300
},
{
"epoch": 0.2507204610951009,
"grad_norm": 0.8772344589233398,
"learning_rate": 0.00026818596282010223,
"loss": 3.4558467864990234,
"step": 1305
},
{
"epoch": 0.2516810758885687,
"grad_norm": 1.1004761457443237,
"learning_rate": 0.00026789189334563507,
"loss": 3.455731964111328,
"step": 1310
},
{
"epoch": 0.2526416906820365,
"grad_norm": 1.0385078191757202,
"learning_rate": 0.00026759663381788407,
"loss": 3.4509082794189454,
"step": 1315
},
{
"epoch": 0.25360230547550433,
"grad_norm": 0.8879292011260986,
"learning_rate": 0.00026730018721733034,
"loss": 3.447312927246094,
"step": 1320
},
{
"epoch": 0.25456292026897215,
"grad_norm": 1.32045316696167,
"learning_rate": 0.0002670025565364379,
"loss": 3.4483909606933594,
"step": 1325
},
{
"epoch": 0.25552353506243997,
"grad_norm": 1.0469266176223755,
"learning_rate": 0.0002667037447796234,
"loss": 3.44598388671875,
"step": 1330
},
{
"epoch": 0.2564841498559078,
"grad_norm": 0.9312946796417236,
"learning_rate": 0.0002664037549632259,
"loss": 3.449735641479492,
"step": 1335
},
{
"epoch": 0.2574447646493756,
"grad_norm": 1.0092568397521973,
"learning_rate": 0.00026610259011547617,
"loss": 3.4483173370361326,
"step": 1340
},
{
"epoch": 0.2584053794428434,
"grad_norm": 0.905603289604187,
"learning_rate": 0.0002658002532764663,
"loss": 3.4480667114257812,
"step": 1345
},
{
"epoch": 0.25936599423631124,
"grad_norm": 1.152783989906311,
"learning_rate": 0.00026549674749811917,
"loss": 3.4437828063964844,
"step": 1350
},
{
"epoch": 0.26032660902977905,
"grad_norm": 0.9953619241714478,
"learning_rate": 0.00026519207584415705,
"loss": 3.444900131225586,
"step": 1355
},
{
"epoch": 0.2612872238232469,
"grad_norm": 0.8666988611221313,
"learning_rate": 0.00026488624139007154,
"loss": 3.4405364990234375,
"step": 1360
},
{
"epoch": 0.2622478386167147,
"grad_norm": 1.0244816541671753,
"learning_rate": 0.0002645792472230917,
"loss": 3.4408805847167967,
"step": 1365
},
{
"epoch": 0.2632084534101825,
"grad_norm": 1.211923599243164,
"learning_rate": 0.0002642710964421535,
"loss": 3.441836929321289,
"step": 1370
},
{
"epoch": 0.2641690682036503,
"grad_norm": 0.9250677824020386,
"learning_rate": 0.0002639617921578681,
"loss": 3.4430908203125,
"step": 1375
},
{
"epoch": 0.26512968299711814,
"grad_norm": 0.9860612154006958,
"learning_rate": 0.0002636513374924908,
"loss": 3.4425369262695313,
"step": 1380
},
{
"epoch": 0.26609029779058596,
"grad_norm": 1.1464399099349976,
"learning_rate": 0.00026333973557988923,
"loss": 3.4433387756347655,
"step": 1385
},
{
"epoch": 0.2670509125840538,
"grad_norm": 0.9755972027778625,
"learning_rate": 0.0002630269895655119,
"loss": 3.4360820770263674,
"step": 1390
},
{
"epoch": 0.2680115273775216,
"grad_norm": 0.7360960245132446,
"learning_rate": 0.00026271310260635633,
"loss": 3.4424560546875,
"step": 1395
},
{
"epoch": 0.2689721421709894,
"grad_norm": 1.3588593006134033,
"learning_rate": 0.0002623980778709374,
"loss": 3.4407089233398436,
"step": 1400
},
{
"epoch": 0.26993275696445723,
"grad_norm": 1.060451626777649,
"learning_rate": 0.0002620819185392551,
"loss": 3.4387184143066407,
"step": 1405
},
{
"epoch": 0.27089337175792505,
"grad_norm": 1.108783483505249,
"learning_rate": 0.00026176462780276246,
"loss": 3.440538787841797,
"step": 1410
},
{
"epoch": 0.27185398655139287,
"grad_norm": 1.033456802368164,
"learning_rate": 0.0002614462088643336,
"loss": 3.438457489013672,
"step": 1415
},
{
"epoch": 0.2728146013448607,
"grad_norm": 1.1797153949737549,
"learning_rate": 0.00026112666493823103,
"loss": 3.4355682373046874,
"step": 1420
},
{
"epoch": 0.2737752161383285,
"grad_norm": 0.8781239986419678,
"learning_rate": 0.00026080599925007355,
"loss": 3.437305450439453,
"step": 1425
},
{
"epoch": 0.2747358309317964,
"grad_norm": 0.812835156917572,
"learning_rate": 0.00026048421503680337,
"loss": 3.4358146667480467,
"step": 1430
},
{
"epoch": 0.2756964457252642,
"grad_norm": 1.6010043621063232,
"learning_rate": 0.00026016131554665377,
"loss": 3.4342422485351562,
"step": 1435
},
{
"epoch": 0.276657060518732,
"grad_norm": 1.0615707635879517,
"learning_rate": 0.000259837304039116,
"loss": 3.434612274169922,
"step": 1440
},
{
"epoch": 0.27761767531219983,
"grad_norm": 0.9948373436927795,
"learning_rate": 0.0002595121837849065,
"loss": 3.434070587158203,
"step": 1445
},
{
"epoch": 0.27857829010566765,
"grad_norm": 1.335551142692566,
"learning_rate": 0.000259185958065934,
"loss": 3.4336421966552733,
"step": 1450
},
{
"epoch": 0.27953890489913547,
"grad_norm": 1.2016087770462036,
"learning_rate": 0.00025885863017526613,
"loss": 3.432381439208984,
"step": 1455
},
{
"epoch": 0.2804995196926033,
"grad_norm": 1.0571645498275757,
"learning_rate": 0.00025853020341709646,
"loss": 3.4373619079589846,
"step": 1460
},
{
"epoch": 0.2814601344860711,
"grad_norm": 0.9469745755195618,
"learning_rate": 0.000258200681106711,
"loss": 3.433266448974609,
"step": 1465
},
{
"epoch": 0.2824207492795389,
"grad_norm": 1.0642951726913452,
"learning_rate": 0.00025787006657045477,
"loss": 3.4274715423583983,
"step": 1470
},
{
"epoch": 0.28338136407300674,
"grad_norm": 1.117781162261963,
"learning_rate": 0.000257538363145698,
"loss": 3.430649185180664,
"step": 1475
},
{
"epoch": 0.28434197886647455,
"grad_norm": 1.1161621809005737,
"learning_rate": 0.00025720557418080304,
"loss": 3.43245849609375,
"step": 1480
},
{
"epoch": 0.28530259365994237,
"grad_norm": 1.0228129625320435,
"learning_rate": 0.00025687170303508977,
"loss": 3.4293182373046873,
"step": 1485
},
{
"epoch": 0.2862632084534102,
"grad_norm": 0.8712406754493713,
"learning_rate": 0.00025653675307880225,
"loss": 3.424097442626953,
"step": 1490
},
{
"epoch": 0.287223823246878,
"grad_norm": 0.8777533173561096,
"learning_rate": 0.00025620072769307463,
"loss": 3.424530029296875,
"step": 1495
},
{
"epoch": 0.2881844380403458,
"grad_norm": 0.9582040309906006,
"learning_rate": 0.00025586363026989677,
"loss": 3.4286567687988283,
"step": 1500
},
{
"epoch": 0.28914505283381364,
"grad_norm": 1.0138081312179565,
"learning_rate": 0.0002555254642120802,
"loss": 3.4281455993652346,
"step": 1505
},
{
"epoch": 0.29010566762728146,
"grad_norm": 1.2888671159744263,
"learning_rate": 0.0002551862329332238,
"loss": 3.425878143310547,
"step": 1510
},
{
"epoch": 0.2910662824207493,
"grad_norm": 0.9225014448165894,
"learning_rate": 0.0002548459398576791,
"loss": 3.426416015625,
"step": 1515
},
{
"epoch": 0.2920268972142171,
"grad_norm": 0.7408014535903931,
"learning_rate": 0.00025450458842051616,
"loss": 3.4226699829101563,
"step": 1520
},
{
"epoch": 0.2929875120076849,
"grad_norm": 1.099754810333252,
"learning_rate": 0.0002541621820674882,
"loss": 3.42437858581543,
"step": 1525
},
{
"epoch": 0.29394812680115273,
"grad_norm": 1.210573434829712,
"learning_rate": 0.0002538187242549976,
"loss": 3.422506332397461,
"step": 1530
},
{
"epoch": 0.29490874159462055,
"grad_norm": 1.1748319864273071,
"learning_rate": 0.00025347421845006056,
"loss": 3.4207489013671877,
"step": 1535
},
{
"epoch": 0.29586935638808837,
"grad_norm": 1.2270821332931519,
"learning_rate": 0.00025312866813027195,
"loss": 3.4193565368652346,
"step": 1540
},
{
"epoch": 0.2968299711815562,
"grad_norm": 1.1345160007476807,
"learning_rate": 0.0002527820767837708,
"loss": 3.4215885162353517,
"step": 1545
},
{
"epoch": 0.297790585975024,
"grad_norm": 1.4022908210754395,
"learning_rate": 0.00025243444790920447,
"loss": 3.4179046630859373,
"step": 1550
},
{
"epoch": 0.2987512007684918,
"grad_norm": 0.9985158443450928,
"learning_rate": 0.0002520857850156936,
"loss": 3.4201751708984376,
"step": 1555
},
{
"epoch": 0.29971181556195964,
"grad_norm": 1.081823468208313,
"learning_rate": 0.0002517360916227968,
"loss": 3.416116714477539,
"step": 1560
},
{
"epoch": 0.30067243035542746,
"grad_norm": 1.2369301319122314,
"learning_rate": 0.000251385371260475,
"loss": 3.4199737548828124,
"step": 1565
},
{
"epoch": 0.3016330451488953,
"grad_norm": 0.8743671178817749,
"learning_rate": 0.0002510336274690557,
"loss": 3.4136940002441407,
"step": 1570
},
{
"epoch": 0.3025936599423631,
"grad_norm": 1.0848971605300903,
"learning_rate": 0.0002506808637991974,
"loss": 3.417308044433594,
"step": 1575
},
{
"epoch": 0.3035542747358309,
"grad_norm": 1.450103521347046,
"learning_rate": 0.0002503270838118537,
"loss": 3.41820068359375,
"step": 1580
},
{
"epoch": 0.3045148895292987,
"grad_norm": 0.9271462559700012,
"learning_rate": 0.0002499722910782374,
"loss": 3.4144283294677735,
"step": 1585
},
{
"epoch": 0.30547550432276654,
"grad_norm": 0.9813090562820435,
"learning_rate": 0.0002496164891797844,
"loss": 3.41821403503418,
"step": 1590
},
{
"epoch": 0.30643611911623436,
"grad_norm": 0.9819661378860474,
"learning_rate": 0.0002492596817081175,
"loss": 3.4166328430175783,
"step": 1595
},
{
"epoch": 0.30739673390970224,
"grad_norm": 1.1119061708450317,
"learning_rate": 0.0002489018722650103,
"loss": 3.4129749298095704,
"step": 1600
},
{
"epoch": 0.30835734870317005,
"grad_norm": 1.0902990102767944,
"learning_rate": 0.0002485430644623507,
"loss": 3.415445327758789,
"step": 1605
},
{
"epoch": 0.30931796349663787,
"grad_norm": 1.1950386762619019,
"learning_rate": 0.00024818326192210447,
"loss": 3.411751937866211,
"step": 1610
},
{
"epoch": 0.3102785782901057,
"grad_norm": 0.8721151351928711,
"learning_rate": 0.0002478224682762787,
"loss": 3.409122085571289,
"step": 1615
},
{
"epoch": 0.3112391930835735,
"grad_norm": 1.1784799098968506,
"learning_rate": 0.0002474606871668852,
"loss": 3.411494827270508,
"step": 1620
},
{
"epoch": 0.3121998078770413,
"grad_norm": 1.107187271118164,
"learning_rate": 0.00024709792224590356,
"loss": 3.4105316162109376,
"step": 1625
},
{
"epoch": 0.31316042267050914,
"grad_norm": 0.8363384008407593,
"learning_rate": 0.0002467341771752446,
"loss": 3.406443786621094,
"step": 1630
},
{
"epoch": 0.31412103746397696,
"grad_norm": 0.8663597702980042,
"learning_rate": 0.000246369455626713,
"loss": 3.409493637084961,
"step": 1635
},
{
"epoch": 0.3150816522574448,
"grad_norm": 0.7546355724334717,
"learning_rate": 0.00024600376128197047,
"loss": 3.403882598876953,
"step": 1640
},
{
"epoch": 0.3160422670509126,
"grad_norm": 1.0707752704620361,
"learning_rate": 0.00024563709783249877,
"loss": 3.405009460449219,
"step": 1645
},
{
"epoch": 0.3170028818443804,
"grad_norm": 1.0536484718322754,
"learning_rate": 0.00024526946897956194,
"loss": 3.4075836181640624,
"step": 1650
},
{
"epoch": 0.31796349663784823,
"grad_norm": 1.0901403427124023,
"learning_rate": 0.00024490087843416947,
"loss": 3.406299591064453,
"step": 1655
},
{
"epoch": 0.31892411143131605,
"grad_norm": 1.224731683731079,
"learning_rate": 0.00024453132991703844,
"loss": 3.408635711669922,
"step": 1660
},
{
"epoch": 0.31988472622478387,
"grad_norm": 0.9345903396606445,
"learning_rate": 0.00024416082715855627,
"loss": 3.4082077026367186,
"step": 1665
},
{
"epoch": 0.3208453410182517,
"grad_norm": 1.0254333019256592,
"learning_rate": 0.00024378937389874276,
"loss": 3.409267807006836,
"step": 1670
},
{
"epoch": 0.3218059558117195,
"grad_norm": 1.0035061836242676,
"learning_rate": 0.0002434169738872126,
"loss": 3.4060768127441405,
"step": 1675
},
{
"epoch": 0.3227665706051873,
"grad_norm": 0.8024983406066895,
"learning_rate": 0.0002430436308831374,
"loss": 3.4000919342041014,
"step": 1680
},
{
"epoch": 0.32372718539865514,
"grad_norm": 0.8286018371582031,
"learning_rate": 0.00024266934865520767,
"loss": 3.405823516845703,
"step": 1685
},
{
"epoch": 0.32468780019212296,
"grad_norm": 0.9187076091766357,
"learning_rate": 0.00024229413098159506,
"loss": 3.402732086181641,
"step": 1690
},
{
"epoch": 0.3256484149855908,
"grad_norm": 0.9258525967597961,
"learning_rate": 0.00024191798164991378,
"loss": 3.4013343811035157,
"step": 1695
},
{
"epoch": 0.3266090297790586,
"grad_norm": 1.202602744102478,
"learning_rate": 0.0002415409044571828,
"loss": 3.4034278869628904,
"step": 1700
},
{
"epoch": 0.3275696445725264,
"grad_norm": 1.0954785346984863,
"learning_rate": 0.00024116290320978724,
"loss": 3.4010189056396483,
"step": 1705
},
{
"epoch": 0.3285302593659942,
"grad_norm": 0.8299508690834045,
"learning_rate": 0.00024078398172344006,
"loss": 3.402983856201172,
"step": 1710
},
{
"epoch": 0.32949087415946204,
"grad_norm": 1.2064135074615479,
"learning_rate": 0.00024040414382314358,
"loss": 3.397635650634766,
"step": 1715
},
{
"epoch": 0.33045148895292986,
"grad_norm": 1.0353888273239136,
"learning_rate": 0.00024002339334315066,
"loss": 3.4012256622314454,
"step": 1720
},
{
"epoch": 0.3314121037463977,
"grad_norm": 0.9317356944084167,
"learning_rate": 0.00023964173412692631,
"loss": 3.3994606018066404,
"step": 1725
},
{
"epoch": 0.3323727185398655,
"grad_norm": 1.0982980728149414,
"learning_rate": 0.00023925917002710865,
"loss": 3.398478698730469,
"step": 1730
},
{
"epoch": 0.3333333333333333,
"grad_norm": 0.8598034977912903,
"learning_rate": 0.0002388757049054701,
"loss": 3.400804138183594,
"step": 1735
},
{
"epoch": 0.33429394812680113,
"grad_norm": 0.9004125595092773,
"learning_rate": 0.00023849134263287836,
"loss": 3.3936607360839846,
"step": 1740
},
{
"epoch": 0.33525456292026895,
"grad_norm": 1.0058287382125854,
"learning_rate": 0.00023810608708925755,
"loss": 3.397069549560547,
"step": 1745
},
{
"epoch": 0.33621517771373677,
"grad_norm": 1.0947821140289307,
"learning_rate": 0.00023771994216354857,
"loss": 3.398267364501953,
"step": 1750
},
{
"epoch": 0.3371757925072046,
"grad_norm": 1.1534372568130493,
"learning_rate": 0.00023733291175367046,
"loss": 3.3978240966796873,
"step": 1755
},
{
"epoch": 0.3381364073006724,
"grad_norm": 0.9231524467468262,
"learning_rate": 0.00023694499976648043,
"loss": 3.4001846313476562,
"step": 1760
},
{
"epoch": 0.3390970220941403,
"grad_norm": 1.1671085357666016,
"learning_rate": 0.0002365562101177349,
"loss": 3.3949043273925783,
"step": 1765
},
{
"epoch": 0.3400576368876081,
"grad_norm": 1.1204719543457031,
"learning_rate": 0.00023616654673204983,
"loss": 3.3966522216796875,
"step": 1770
},
{
"epoch": 0.3410182516810759,
"grad_norm": 0.9162036776542664,
"learning_rate": 0.00023577601354286094,
"loss": 3.3971607208251955,
"step": 1775
},
{
"epoch": 0.34197886647454373,
"grad_norm": 0.8886377811431885,
"learning_rate": 0.00023538461449238422,
"loss": 3.3945545196533202,
"step": 1780
},
{
"epoch": 0.34293948126801155,
"grad_norm": 0.8314016461372375,
"learning_rate": 0.00023499235353157603,
"loss": 3.397966766357422,
"step": 1785
},
{
"epoch": 0.34390009606147937,
"grad_norm": 0.9351712465286255,
"learning_rate": 0.0002345992346200932,
"loss": 3.3912559509277345,
"step": 1790
},
{
"epoch": 0.3448607108549472,
"grad_norm": 1.2932080030441284,
"learning_rate": 0.00023420526172625316,
"loss": 3.399237060546875,
"step": 1795
},
{
"epoch": 0.345821325648415,
"grad_norm": 1.092822551727295,
"learning_rate": 0.00023381043882699377,
"loss": 3.3947265625,
"step": 1800
},
{
"epoch": 0.3467819404418828,
"grad_norm": 1.2571241855621338,
"learning_rate": 0.0002334147699078333,
"loss": 3.3920978546142577,
"step": 1805
},
{
"epoch": 0.34774255523535064,
"grad_norm": 0.7598462104797363,
"learning_rate": 0.00023301825896282992,
"loss": 3.3873401641845704,
"step": 1810
},
{
"epoch": 0.34870317002881845,
"grad_norm": 0.9965523481369019,
"learning_rate": 0.00023262090999454194,
"loss": 3.390736389160156,
"step": 1815
},
{
"epoch": 0.34966378482228627,
"grad_norm": 0.9940029978752136,
"learning_rate": 0.00023222272701398664,
"loss": 3.387689208984375,
"step": 1820
},
{
"epoch": 0.3506243996157541,
"grad_norm": 1.051283597946167,
"learning_rate": 0.00023182371404060047,
"loss": 3.3928714752197267,
"step": 1825
},
{
"epoch": 0.3515850144092219,
"grad_norm": 0.9655299186706543,
"learning_rate": 0.00023142387510219814,
"loss": 3.3853427886962892,
"step": 1830
},
{
"epoch": 0.3525456292026897,
"grad_norm": 0.912288248538971,
"learning_rate": 0.00023102321423493192,
"loss": 3.3889575958251954,
"step": 1835
},
{
"epoch": 0.35350624399615754,
"grad_norm": 0.890324592590332,
"learning_rate": 0.00023062173548325112,
"loss": 3.388275146484375,
"step": 1840
},
{
"epoch": 0.35446685878962536,
"grad_norm": 0.819926381111145,
"learning_rate": 0.0002302194428998611,
"loss": 3.3917514801025392,
"step": 1845
},
{
"epoch": 0.3554274735830932,
"grad_norm": 1.0578113794326782,
"learning_rate": 0.0002298163405456824,
"loss": 3.384230041503906,
"step": 1850
},
{
"epoch": 0.356388088376561,
"grad_norm": 1.0350604057312012,
"learning_rate": 0.00022941243248980966,
"loss": 3.388728713989258,
"step": 1855
},
{
"epoch": 0.3573487031700288,
"grad_norm": 1.0110005140304565,
"learning_rate": 0.0002290077228094708,
"loss": 3.386165237426758,
"step": 1860
},
{
"epoch": 0.35830931796349663,
"grad_norm": 0.7671937346458435,
"learning_rate": 0.00022860221558998554,
"loss": 3.386688995361328,
"step": 1865
},
{
"epoch": 0.35926993275696445,
"grad_norm": 1.032853603363037,
"learning_rate": 0.00022819591492472438,
"loss": 3.385240173339844,
"step": 1870
},
{
"epoch": 0.36023054755043227,
"grad_norm": 0.8370668888092041,
"learning_rate": 0.00022778882491506725,
"loss": 3.3829574584960938,
"step": 1875
},
{
"epoch": 0.3611911623439001,
"grad_norm": 1.0862385034561157,
"learning_rate": 0.00022738094967036208,
"loss": 3.3890167236328126,
"step": 1880
},
{
"epoch": 0.3621517771373679,
"grad_norm": 1.2748796939849854,
"learning_rate": 0.00022697229330788312,
"loss": 3.384657287597656,
"step": 1885
},
{
"epoch": 0.3631123919308357,
"grad_norm": 1.163509726524353,
"learning_rate": 0.00022656285995278984,
"loss": 3.3868003845214845,
"step": 1890
},
{
"epoch": 0.36407300672430354,
"grad_norm": 0.832695484161377,
"learning_rate": 0.00022615265373808488,
"loss": 3.38619384765625,
"step": 1895
},
{
"epoch": 0.36503362151777136,
"grad_norm": 1.1879743337631226,
"learning_rate": 0.00022574167880457245,
"loss": 3.384031295776367,
"step": 1900
},
{
"epoch": 0.3659942363112392,
"grad_norm": 0.9494090676307678,
"learning_rate": 0.00022532993930081668,
"loss": 3.379372406005859,
"step": 1905
},
{
"epoch": 0.366954851104707,
"grad_norm": 0.7896311283111572,
"learning_rate": 0.00022491743938309936,
"loss": 3.3832141876220705,
"step": 1910
},
{
"epoch": 0.3679154658981748,
"grad_norm": 0.911125898361206,
"learning_rate": 0.0002245041832153786,
"loss": 3.3824745178222657,
"step": 1915
},
{
"epoch": 0.3688760806916426,
"grad_norm": 1.241249680519104,
"learning_rate": 0.0002240901749692461,
"loss": 3.3809112548828124,
"step": 1920
},
{
"epoch": 0.36983669548511044,
"grad_norm": 0.8434759974479675,
"learning_rate": 0.00022367541882388554,
"loss": 3.3806549072265626,
"step": 1925
},
{
"epoch": 0.37079731027857826,
"grad_norm": 1.0567084550857544,
"learning_rate": 0.00022325991896603018,
"loss": 3.380388641357422,
"step": 1930
},
{
"epoch": 0.37175792507204614,
"grad_norm": 1.1255742311477661,
"learning_rate": 0.00022284367958992065,
"loss": 3.3810966491699217,
"step": 1935
},
{
"epoch": 0.37271853986551395,
"grad_norm": 0.8697860240936279,
"learning_rate": 0.0002224267048972627,
"loss": 3.381666564941406,
"step": 1940
},
{
"epoch": 0.37367915465898177,
"grad_norm": 1.1864638328552246,
"learning_rate": 0.00022200899909718456,
"loss": 3.3785327911376952,
"step": 1945
},
{
"epoch": 0.3746397694524496,
"grad_norm": 0.8901607394218445,
"learning_rate": 0.00022159056640619457,
"loss": 3.3746604919433594,
"step": 1950
},
{
"epoch": 0.3756003842459174,
"grad_norm": 0.940137505531311,
"learning_rate": 0.00022117141104813876,
"loss": 3.3786911010742187,
"step": 1955
},
{
"epoch": 0.3765609990393852,
"grad_norm": 0.9261166453361511,
"learning_rate": 0.000220751537254158,
"loss": 3.377775192260742,
"step": 1960
},
{
"epoch": 0.37752161383285304,
"grad_norm": 0.9173313975334167,
"learning_rate": 0.00022033094926264548,
"loss": 3.375761795043945,
"step": 1965
},
{
"epoch": 0.37848222862632086,
"grad_norm": 1.0608283281326294,
"learning_rate": 0.00021990965131920358,
"loss": 3.3748458862304687,
"step": 1970
},
{
"epoch": 0.3794428434197887,
"grad_norm": 0.9284070134162903,
"learning_rate": 0.0002194876476766015,
"loss": 3.374094772338867,
"step": 1975
},
{
"epoch": 0.3804034582132565,
"grad_norm": 0.8284513354301453,
"learning_rate": 0.00021906494259473196,
"loss": 3.3742515563964846,
"step": 1980
},
{
"epoch": 0.3813640730067243,
"grad_norm": 1.1894856691360474,
"learning_rate": 0.00021864154034056832,
"loss": 3.3718952178955077,
"step": 1985
},
{
"epoch": 0.38232468780019213,
"grad_norm": 0.9039756655693054,
"learning_rate": 0.00021821744518812154,
"loss": 3.3747280120849608,
"step": 1990
},
{
"epoch": 0.38328530259365995,
"grad_norm": 0.8699926137924194,
"learning_rate": 0.00021779266141839699,
"loss": 3.372933197021484,
"step": 1995
},
{
"epoch": 0.38424591738712777,
"grad_norm": 0.9204120635986328,
"learning_rate": 0.00021736719331935127,
"loss": 3.370801544189453,
"step": 2000
},
{
"epoch": 0.3852065321805956,
"grad_norm": 1.0923502445220947,
"learning_rate": 0.00021694104518584886,
"loss": 3.374231719970703,
"step": 2005
},
{
"epoch": 0.3861671469740634,
"grad_norm": 0.8937914967536926,
"learning_rate": 0.00021651422131961884,
"loss": 3.374364471435547,
"step": 2010
},
{
"epoch": 0.3871277617675312,
"grad_norm": 0.8756441473960876,
"learning_rate": 0.0002160867260292115,
"loss": 3.3731971740722657,
"step": 2015
},
{
"epoch": 0.38808837656099904,
"grad_norm": 0.7807921171188354,
"learning_rate": 0.00021565856362995464,
"loss": 3.3712364196777345,
"step": 2020
},
{
"epoch": 0.38904899135446686,
"grad_norm": 1.0138747692108154,
"learning_rate": 0.00021522973844391024,
"loss": 3.370909881591797,
"step": 2025
},
{
"epoch": 0.3900096061479347,
"grad_norm": 0.9295784831047058,
"learning_rate": 0.00021480025479983077,
"loss": 3.369549560546875,
"step": 2030
},
{
"epoch": 0.3909702209414025,
"grad_norm": 0.7283740043640137,
"learning_rate": 0.00021437011703311545,
"loss": 3.3707386016845704,
"step": 2035
},
{
"epoch": 0.3919308357348703,
"grad_norm": 0.9124945402145386,
"learning_rate": 0.0002139393294857665,
"loss": 3.3750667572021484,
"step": 2040
},
{
"epoch": 0.3928914505283381,
"grad_norm": 1.0313608646392822,
"learning_rate": 0.00021350789650634525,
"loss": 3.369192123413086,
"step": 2045
},
{
"epoch": 0.39385206532180594,
"grad_norm": 0.904303252696991,
"learning_rate": 0.00021307582244992838,
"loss": 3.3705421447753907,
"step": 2050
},
{
"epoch": 0.39481268011527376,
"grad_norm": 0.9359715580940247,
"learning_rate": 0.0002126431116780639,
"loss": 3.3716297149658203,
"step": 2055
},
{
"epoch": 0.3957732949087416,
"grad_norm": 0.9251255989074707,
"learning_rate": 0.00021220976855872712,
"loss": 3.3701812744140627,
"step": 2060
},
{
"epoch": 0.3967339097022094,
"grad_norm": 0.8808803558349609,
"learning_rate": 0.00021177579746627643,
"loss": 3.369044876098633,
"step": 2065
},
{
"epoch": 0.3976945244956772,
"grad_norm": 0.8854672908782959,
"learning_rate": 0.0002113412027814094,
"loss": 3.3659637451171873,
"step": 2070
},
{
"epoch": 0.39865513928914503,
"grad_norm": 0.8999817371368408,
"learning_rate": 0.0002109059888911183,
"loss": 3.365467071533203,
"step": 2075
},
{
"epoch": 0.39961575408261285,
"grad_norm": 0.9394131898880005,
"learning_rate": 0.00021047016018864602,
"loss": 3.372660827636719,
"step": 2080
},
{
"epoch": 0.40057636887608067,
"grad_norm": 0.7608519792556763,
"learning_rate": 0.00021003372107344167,
"loss": 3.367539978027344,
"step": 2085
},
{
"epoch": 0.4015369836695485,
"grad_norm": 1.1321345567703247,
"learning_rate": 0.00020959667595111603,
"loss": 3.3716583251953125,
"step": 2090
},
{
"epoch": 0.4024975984630163,
"grad_norm": 0.9030200242996216,
"learning_rate": 0.00020915902923339722,
"loss": 3.3653472900390624,
"step": 2095
},
{
"epoch": 0.4034582132564842,
"grad_norm": 0.9246105551719666,
"learning_rate": 0.0002087207853380862,
"loss": 3.3666152954101562,
"step": 2100
},
{
"epoch": 0.404418828049952,
"grad_norm": 0.9399805665016174,
"learning_rate": 0.00020828194868901205,
"loss": 3.3659893035888673,
"step": 2105
},
{
"epoch": 0.4053794428434198,
"grad_norm": 0.8061392307281494,
"learning_rate": 0.00020784252371598732,
"loss": 3.366693115234375,
"step": 2110
},
{
"epoch": 0.40634005763688763,
"grad_norm": 1.1672389507293701,
"learning_rate": 0.00020740251485476345,
"loss": 3.3655033111572266,
"step": 2115
},
{
"epoch": 0.40730067243035545,
"grad_norm": 0.7972300052642822,
"learning_rate": 0.00020696192654698592,
"loss": 3.3643836975097656,
"step": 2120
},
{
"epoch": 0.40826128722382327,
"grad_norm": 0.6781191825866699,
"learning_rate": 0.00020652076324014927,
"loss": 3.363838958740234,
"step": 2125
},
{
"epoch": 0.4092219020172911,
"grad_norm": 1.1469253301620483,
"learning_rate": 0.00020607902938755252,
"loss": 3.3665115356445314,
"step": 2130
},
{
"epoch": 0.4101825168107589,
"grad_norm": 0.9588373303413391,
"learning_rate": 0.00020563672944825392,
"loss": 3.3640487670898436,
"step": 2135
},
{
"epoch": 0.4111431316042267,
"grad_norm": 0.8890582323074341,
"learning_rate": 0.00020519386788702602,
"loss": 3.361592102050781,
"step": 2140
},
{
"epoch": 0.41210374639769454,
"grad_norm": 0.9977627396583557,
"learning_rate": 0.0002047504491743107,
"loss": 3.3624588012695313,
"step": 2145
},
{
"epoch": 0.41306436119116235,
"grad_norm": 1.0691829919815063,
"learning_rate": 0.000204306477786174,
"loss": 3.3603302001953126,
"step": 2150
},
{
"epoch": 0.4140249759846302,
"grad_norm": 1.0000925064086914,
"learning_rate": 0.00020386195820426082,
"loss": 3.364809036254883,
"step": 2155
},
{
"epoch": 0.414985590778098,
"grad_norm": 1.147975206375122,
"learning_rate": 0.00020341689491574984,
"loss": 3.3625713348388673,
"step": 2160
},
{
"epoch": 0.4159462055715658,
"grad_norm": 0.9693566560745239,
"learning_rate": 0.00020297129241330817,
"loss": 3.3619319915771486,
"step": 2165
},
{
"epoch": 0.4169068203650336,
"grad_norm": 0.8027826547622681,
"learning_rate": 0.00020252515519504592,
"loss": 3.3606258392333985,
"step": 2170
},
{
"epoch": 0.41786743515850144,
"grad_norm": 0.7421839833259583,
"learning_rate": 0.0002020784877644709,
"loss": 3.3613494873046874,
"step": 2175
},
{
"epoch": 0.41882804995196926,
"grad_norm": 0.7940289378166199,
"learning_rate": 0.00020163129463044308,
"loss": 3.360422897338867,
"step": 2180
},
{
"epoch": 0.4197886647454371,
"grad_norm": 1.0238333940505981,
"learning_rate": 0.0002011835803071292,
"loss": 3.3607128143310545,
"step": 2185
},
{
"epoch": 0.4207492795389049,
"grad_norm": 1.0974775552749634,
"learning_rate": 0.00020073534931395697,
"loss": 3.3562847137451173,
"step": 2190
},
{
"epoch": 0.4217098943323727,
"grad_norm": 0.9029604196548462,
"learning_rate": 0.00020028660617556985,
"loss": 3.3622852325439454,
"step": 2195
},
{
"epoch": 0.42267050912584053,
"grad_norm": 1.0699923038482666,
"learning_rate": 0.00019983735542178086,
"loss": 3.359086608886719,
"step": 2200
},
{
"epoch": 0.42363112391930835,
"grad_norm": 0.9210760593414307,
"learning_rate": 0.00019938760158752725,
"loss": 3.360548400878906,
"step": 2205
},
{
"epoch": 0.42459173871277617,
"grad_norm": 1.0080957412719727,
"learning_rate": 0.00019893734921282448,
"loss": 3.3639129638671874,
"step": 2210
},
{
"epoch": 0.425552353506244,
"grad_norm": 0.668106734752655,
"learning_rate": 0.0001984866028427207,
"loss": 3.3550628662109374,
"step": 2215
},
{
"epoch": 0.4265129682997118,
"grad_norm": 0.9900498390197754,
"learning_rate": 0.00019803536702725044,
"loss": 3.3611160278320313,
"step": 2220
},
{
"epoch": 0.4274735830931796,
"grad_norm": 0.6974493861198425,
"learning_rate": 0.00019758364632138908,
"loss": 3.354916000366211,
"step": 2225
},
{
"epoch": 0.42843419788664744,
"grad_norm": 0.8031367063522339,
"learning_rate": 0.0001971314452850066,
"loss": 3.360482406616211,
"step": 2230
},
{
"epoch": 0.42939481268011526,
"grad_norm": 0.9039629697799683,
"learning_rate": 0.00019667876848282167,
"loss": 3.3570487976074217,
"step": 2235
},
{
"epoch": 0.4303554274735831,
"grad_norm": 0.7340952157974243,
"learning_rate": 0.0001962256204843556,
"loss": 3.3560821533203127,
"step": 2240
},
{
"epoch": 0.4313160422670509,
"grad_norm": 1.069008469581604,
"learning_rate": 0.00019577200586388618,
"loss": 3.358184814453125,
"step": 2245
},
{
"epoch": 0.4322766570605187,
"grad_norm": 0.8336465358734131,
"learning_rate": 0.00019531792920040133,
"loss": 3.359252166748047,
"step": 2250
},
{
"epoch": 0.4332372718539865,
"grad_norm": 1.0504704713821411,
"learning_rate": 0.0001948633950775532,
"loss": 3.3570930480957033,
"step": 2255
},
{
"epoch": 0.43419788664745435,
"grad_norm": 0.879917562007904,
"learning_rate": 0.00019440840808361174,
"loss": 3.3541915893554686,
"step": 2260
},
{
"epoch": 0.43515850144092216,
"grad_norm": 0.9227856397628784,
"learning_rate": 0.00019395297281141828,
"loss": 3.3502120971679688,
"step": 2265
},
{
"epoch": 0.43611911623439004,
"grad_norm": 0.7166210412979126,
"learning_rate": 0.0001934970938583393,
"loss": 3.3565288543701173,
"step": 2270
},
{
"epoch": 0.43707973102785785,
"grad_norm": 0.8305450677871704,
"learning_rate": 0.00019304077582622003,
"loss": 3.352977752685547,
"step": 2275
},
{
"epoch": 0.43804034582132567,
"grad_norm": 0.9930862188339233,
"learning_rate": 0.00019258402332133798,
"loss": 3.360272979736328,
"step": 2280
},
{
"epoch": 0.4390009606147935,
"grad_norm": 1.1689637899398804,
"learning_rate": 0.00019212684095435637,
"loss": 3.3537288665771485,
"step": 2285
},
{
"epoch": 0.4399615754082613,
"grad_norm": 0.7753242254257202,
"learning_rate": 0.00019166923334027765,
"loss": 3.356067657470703,
"step": 2290
},
{
"epoch": 0.4409221902017291,
"grad_norm": 0.8336865901947021,
"learning_rate": 0.00019121120509839692,
"loss": 3.3564964294433595,
"step": 2295
},
{
"epoch": 0.44188280499519694,
"grad_norm": 0.8085048198699951,
"learning_rate": 0.0001907527608522552,
"loss": 3.3494583129882813,
"step": 2300
},
{
"epoch": 0.44284341978866476,
"grad_norm": 1.051607370376587,
"learning_rate": 0.000190293905229593,
"loss": 3.351207733154297,
"step": 2305
},
{
"epoch": 0.4438040345821326,
"grad_norm": 0.7590753436088562,
"learning_rate": 0.00018983464286230327,
"loss": 3.3514862060546875,
"step": 2310
},
{
"epoch": 0.4447646493756004,
"grad_norm": 0.9674370288848877,
"learning_rate": 0.00018937497838638509,
"loss": 3.3499351501464845,
"step": 2315
},
{
"epoch": 0.4457252641690682,
"grad_norm": 0.8794429898262024,
"learning_rate": 0.0001889149164418963,
"loss": 3.3523765563964845,
"step": 2320
},
{
"epoch": 0.44668587896253603,
"grad_norm": 0.7959157228469849,
"learning_rate": 0.00018845446167290705,
"loss": 3.350267791748047,
"step": 2325
},
{
"epoch": 0.44764649375600385,
"grad_norm": 0.625482976436615,
"learning_rate": 0.0001879936187274529,
"loss": 3.348467254638672,
"step": 2330
},
{
"epoch": 0.44860710854947167,
"grad_norm": 1.1344565153121948,
"learning_rate": 0.00018753239225748796,
"loss": 3.35140380859375,
"step": 2335
},
{
"epoch": 0.4495677233429395,
"grad_norm": 1.0100090503692627,
"learning_rate": 0.0001870707869188375,
"loss": 3.3482070922851563,
"step": 2340
},
{
"epoch": 0.4505283381364073,
"grad_norm": 0.9724920988082886,
"learning_rate": 0.00018660880737115146,
"loss": 3.3504077911376955,
"step": 2345
},
{
"epoch": 0.4514889529298751,
"grad_norm": 0.8876535296440125,
"learning_rate": 0.0001861464582778572,
"loss": 3.352552032470703,
"step": 2350
},
{
"epoch": 0.45244956772334294,
"grad_norm": 1.3819876909255981,
"learning_rate": 0.00018568374430611242,
"loss": 3.3520408630371095,
"step": 2355
},
{
"epoch": 0.45341018251681076,
"grad_norm": 1.1260014772415161,
"learning_rate": 0.00018522067012675798,
"loss": 3.353636932373047,
"step": 2360
},
{
"epoch": 0.4543707973102786,
"grad_norm": 0.8487617373466492,
"learning_rate": 0.00018475724041427106,
"loss": 3.3487457275390624,
"step": 2365
},
{
"epoch": 0.4553314121037464,
"grad_norm": 1.024625539779663,
"learning_rate": 0.00018429345984671743,
"loss": 3.3499549865722655,
"step": 2370
},
{
"epoch": 0.4562920268972142,
"grad_norm": 0.6985915303230286,
"learning_rate": 0.0001838293331057048,
"loss": 3.349627685546875,
"step": 2375
},
{
"epoch": 0.457252641690682,
"grad_norm": 0.6404640674591064,
"learning_rate": 0.00018336486487633528,
"loss": 3.347215270996094,
"step": 2380
},
{
"epoch": 0.45821325648414984,
"grad_norm": 1.0900517702102661,
"learning_rate": 0.000182900059847158,
"loss": 3.346749114990234,
"step": 2385
},
{
"epoch": 0.45917387127761766,
"grad_norm": 0.9426947832107544,
"learning_rate": 0.00018243492271012202,
"loss": 3.3453174591064454,
"step": 2390
},
{
"epoch": 0.4601344860710855,
"grad_norm": 0.8978161215782166,
"learning_rate": 0.00018196945816052867,
"loss": 3.3470741271972657,
"step": 2395
},
{
"epoch": 0.4610951008645533,
"grad_norm": 1.0698800086975098,
"learning_rate": 0.00018150367089698452,
"loss": 3.3448417663574217,
"step": 2400
},
{
"epoch": 0.4620557156580211,
"grad_norm": 0.9989749193191528,
"learning_rate": 0.00018103756562135373,
"loss": 3.3498584747314455,
"step": 2405
},
{
"epoch": 0.46301633045148893,
"grad_norm": 1.1512004137039185,
"learning_rate": 0.0001805711470387105,
"loss": 3.344708251953125,
"step": 2410
},
{
"epoch": 0.46397694524495675,
"grad_norm": 0.7708789706230164,
"learning_rate": 0.00018010441985729183,
"loss": 3.341356658935547,
"step": 2415
},
{
"epoch": 0.46493756003842457,
"grad_norm": 0.8291172385215759,
"learning_rate": 0.00017963738878844966,
"loss": 3.3480514526367187,
"step": 2420
},
{
"epoch": 0.4658981748318924,
"grad_norm": 0.8072155714035034,
"learning_rate": 0.00017917005854660374,
"loss": 3.3407440185546875,
"step": 2425
},
{
"epoch": 0.4668587896253602,
"grad_norm": 1.1140457391738892,
"learning_rate": 0.00017870243384919364,
"loss": 3.3480880737304686,
"step": 2430
},
{
"epoch": 0.4678194044188281,
"grad_norm": 1.1302070617675781,
"learning_rate": 0.0001782345194166314,
"loss": 3.3469482421875,
"step": 2435
},
{
"epoch": 0.4687800192122959,
"grad_norm": 0.835529625415802,
"learning_rate": 0.00017776631997225365,
"loss": 3.341139221191406,
"step": 2440
},
{
"epoch": 0.4697406340057637,
"grad_norm": 0.8538889288902283,
"learning_rate": 0.0001772978402422742,
"loss": 3.342560958862305,
"step": 2445
},
{
"epoch": 0.47070124879923153,
"grad_norm": 0.904460608959198,
"learning_rate": 0.0001768290849557361,
"loss": 3.3424705505371093,
"step": 2450
},
{
"epoch": 0.47166186359269935,
"grad_norm": 0.8424951434135437,
"learning_rate": 0.00017636005884446397,
"loss": 3.3413028717041016,
"step": 2455
},
{
"epoch": 0.47262247838616717,
"grad_norm": 0.924486517906189,
"learning_rate": 0.00017589076664301637,
"loss": 3.3444580078125,
"step": 2460
},
{
"epoch": 0.473583093179635,
"grad_norm": 0.9826391935348511,
"learning_rate": 0.00017542121308863776,
"loss": 3.3431236267089846,
"step": 2465
},
{
"epoch": 0.4745437079731028,
"grad_norm": 1.2090203762054443,
"learning_rate": 0.00017495140292121084,
"loss": 3.344011688232422,
"step": 2470
},
{
"epoch": 0.4755043227665706,
"grad_norm": 0.7930108904838562,
"learning_rate": 0.0001744813408832088,
"loss": 3.3414871215820314,
"step": 2475
},
{
"epoch": 0.47646493756003844,
"grad_norm": 0.7430881857872009,
"learning_rate": 0.0001740110317196472,
"loss": 3.3419719696044923,
"step": 2480
},
{
"epoch": 0.47742555235350626,
"grad_norm": 1.0060484409332275,
"learning_rate": 0.0001735404801780362,
"loss": 3.3399085998535156,
"step": 2485
},
{
"epoch": 0.4783861671469741,
"grad_norm": 1.0594747066497803,
"learning_rate": 0.0001730696910083326,
"loss": 3.3429637908935548,
"step": 2490
},
{
"epoch": 0.4793467819404419,
"grad_norm": 0.9936187863349915,
"learning_rate": 0.0001725986689628921,
"loss": 3.3392757415771483,
"step": 2495
},
{
"epoch": 0.4803073967339097,
"grad_norm": 0.6973989009857178,
"learning_rate": 0.00017212741879642096,
"loss": 3.3416038513183595,
"step": 2500
},
{
"epoch": 0.4812680115273775,
"grad_norm": 0.5994371771812439,
"learning_rate": 0.00017165594526592833,
"loss": 3.3429832458496094,
"step": 2505
},
{
"epoch": 0.48222862632084534,
"grad_norm": 0.8716941475868225,
"learning_rate": 0.000171184253130678,
"loss": 3.339080810546875,
"step": 2510
},
{
"epoch": 0.48318924111431316,
"grad_norm": 0.7032945156097412,
"learning_rate": 0.00017071234715214045,
"loss": 3.340536880493164,
"step": 2515
},
{
"epoch": 0.484149855907781,
"grad_norm": 0.7946033477783203,
"learning_rate": 0.0001702402320939449,
"loss": 3.3366943359375,
"step": 2520
},
{
"epoch": 0.4851104707012488,
"grad_norm": 0.8639918565750122,
"learning_rate": 0.00016976791272183098,
"loss": 3.3365756988525392,
"step": 2525
},
{
"epoch": 0.4860710854947166,
"grad_norm": 0.7089393734931946,
"learning_rate": 0.0001692953938036008,
"loss": 3.3373767852783205,
"step": 2530
},
{
"epoch": 0.48703170028818443,
"grad_norm": 0.745219349861145,
"learning_rate": 0.00016882268010907087,
"loss": 3.339314270019531,
"step": 2535
},
{
"epoch": 0.48799231508165225,
"grad_norm": 0.733579695224762,
"learning_rate": 0.00016834977641002377,
"loss": 3.3369155883789063,
"step": 2540
},
{
"epoch": 0.48895292987512007,
"grad_norm": 0.808840274810791,
"learning_rate": 0.00016787668748016008,
"loss": 3.336173248291016,
"step": 2545
},
{
"epoch": 0.4899135446685879,
"grad_norm": 0.885305643081665,
"learning_rate": 0.00016740341809505017,
"loss": 3.34091796875,
"step": 2550
},
{
"epoch": 0.4908741594620557,
"grad_norm": 0.8833165764808655,
"learning_rate": 0.000166929973032086,
"loss": 3.339965057373047,
"step": 2555
},
{
"epoch": 0.4918347742555235,
"grad_norm": 0.948832094669342,
"learning_rate": 0.0001664563570704329,
"loss": 3.341967010498047,
"step": 2560
},
{
"epoch": 0.49279538904899134,
"grad_norm": 0.7376810908317566,
"learning_rate": 0.0001659825749909814,
"loss": 3.339089584350586,
"step": 2565
},
{
"epoch": 0.49375600384245916,
"grad_norm": 0.6202611327171326,
"learning_rate": 0.00016550863157629888,
"loss": 3.337108612060547,
"step": 2570
},
{
"epoch": 0.494716618635927,
"grad_norm": 1.3193303346633911,
"learning_rate": 0.00016503453161058123,
"loss": 3.337067413330078,
"step": 2575
},
{
"epoch": 0.4956772334293948,
"grad_norm": 0.9450001120567322,
"learning_rate": 0.00016456027987960466,
"loss": 3.3382850646972657,
"step": 2580
},
{
"epoch": 0.4966378482228626,
"grad_norm": 0.9356205463409424,
"learning_rate": 0.00016408588117067743,
"loss": 3.334228515625,
"step": 2585
},
{
"epoch": 0.49759846301633043,
"grad_norm": 0.9595420360565186,
"learning_rate": 0.00016361134027259136,
"loss": 3.3412109375,
"step": 2590
},
{
"epoch": 0.49855907780979825,
"grad_norm": 0.9596777558326721,
"learning_rate": 0.00016313666197557373,
"loss": 3.337894058227539,
"step": 2595
},
{
"epoch": 0.49951969260326606,
"grad_norm": 0.6967231631278992,
"learning_rate": 0.00016266185107123864,
"loss": 3.337984085083008,
"step": 2600
},
{
"epoch": 0.5004803073967339,
"grad_norm": 0.638558030128479,
"learning_rate": 0.00016218691235253893,
"loss": 3.3327606201171873,
"step": 2605
},
{
"epoch": 0.5014409221902018,
"grad_norm": 1.0894724130630493,
"learning_rate": 0.0001617118506137175,
"loss": 3.3385719299316405,
"step": 2610
},
{
"epoch": 0.5024015369836695,
"grad_norm": 0.9641280174255371,
"learning_rate": 0.00016123667065025914,
"loss": 3.3361286163330077,
"step": 2615
},
{
"epoch": 0.5033621517771374,
"grad_norm": 0.8381847143173218,
"learning_rate": 0.00016076137725884218,
"loss": 3.334253692626953,
"step": 2620
},
{
"epoch": 0.5043227665706052,
"grad_norm": 0.8258698582649231,
"learning_rate": 0.0001602859752372897,
"loss": 3.3326160430908205,
"step": 2625
},
{
"epoch": 0.505283381364073,
"grad_norm": 0.6313220262527466,
"learning_rate": 0.00015981046938452146,
"loss": 3.3321769714355467,
"step": 2630
},
{
"epoch": 0.5062439961575408,
"grad_norm": 0.6548064947128296,
"learning_rate": 0.0001593348645005054,
"loss": 3.3319812774658204,
"step": 2635
},
{
"epoch": 0.5072046109510087,
"grad_norm": 0.9846540093421936,
"learning_rate": 0.00015885916538620906,
"loss": 3.3337692260742187,
"step": 2640
},
{
"epoch": 0.5081652257444764,
"grad_norm": 0.9483662843704224,
"learning_rate": 0.00015838337684355121,
"loss": 3.335358810424805,
"step": 2645
},
{
"epoch": 0.5091258405379443,
"grad_norm": 0.7134802341461182,
"learning_rate": 0.0001579075036753533,
"loss": 3.333547592163086,
"step": 2650
},
{
"epoch": 0.5100864553314121,
"grad_norm": 0.6113592386245728,
"learning_rate": 0.00015743155068529102,
"loss": 3.3339920043945312,
"step": 2655
},
{
"epoch": 0.5110470701248799,
"grad_norm": 0.7905352115631104,
"learning_rate": 0.0001569555226778459,
"loss": 3.3328788757324217,
"step": 2660
},
{
"epoch": 0.5120076849183477,
"grad_norm": 0.7155654430389404,
"learning_rate": 0.0001564794244582567,
"loss": 3.3339805603027344,
"step": 2665
},
{
"epoch": 0.5129682997118156,
"grad_norm": 0.6985665559768677,
"learning_rate": 0.0001560032608324709,
"loss": 3.332271194458008,
"step": 2670
},
{
"epoch": 0.5139289145052833,
"grad_norm": 0.9106906056404114,
"learning_rate": 0.00015552703660709618,
"loss": 3.3307167053222657,
"step": 2675
},
{
"epoch": 0.5148895292987512,
"grad_norm": 1.00930917263031,
"learning_rate": 0.00015505075658935207,
"loss": 3.3333648681640624,
"step": 2680
},
{
"epoch": 0.515850144092219,
"grad_norm": 0.740381121635437,
"learning_rate": 0.0001545744255870212,
"loss": 3.335179901123047,
"step": 2685
},
{
"epoch": 0.5168107588856868,
"grad_norm": 0.5899009704589844,
"learning_rate": 0.00015409804840840088,
"loss": 3.3299629211425783,
"step": 2690
},
{
"epoch": 0.5177713736791547,
"grad_norm": 1.0662180185317993,
"learning_rate": 0.0001536216298622545,
"loss": 3.332952880859375,
"step": 2695
},
{
"epoch": 0.5187319884726225,
"grad_norm": 0.8961836099624634,
"learning_rate": 0.00015314517475776318,
"loss": 3.331147003173828,
"step": 2700
},
{
"epoch": 0.5196926032660903,
"grad_norm": 0.8946348428726196,
"learning_rate": 0.00015266868790447685,
"loss": 3.3304420471191407,
"step": 2705
},
{
"epoch": 0.5206532180595581,
"grad_norm": 1.0596193075180054,
"learning_rate": 0.0001521921741122661,
"loss": 3.3243175506591798,
"step": 2710
},
{
"epoch": 0.521613832853026,
"grad_norm": 1.1533540487289429,
"learning_rate": 0.00015171563819127342,
"loss": 3.329667663574219,
"step": 2715
},
{
"epoch": 0.5225744476464937,
"grad_norm": 0.8710746765136719,
"learning_rate": 0.00015123908495186464,
"loss": 3.3310035705566405,
"step": 2720
},
{
"epoch": 0.5235350624399616,
"grad_norm": 0.7002533078193665,
"learning_rate": 0.0001507625192045804,
"loss": 3.3324230194091795,
"step": 2725
},
{
"epoch": 0.5244956772334294,
"grad_norm": 1.0313575267791748,
"learning_rate": 0.00015028594576008773,
"loss": 3.3279163360595705,
"step": 2730
},
{
"epoch": 0.5254562920268973,
"grad_norm": 0.832625687122345,
"learning_rate": 0.00014980936942913113,
"loss": 3.331475830078125,
"step": 2735
},
{
"epoch": 0.526416906820365,
"grad_norm": 1.0033438205718994,
"learning_rate": 0.00014933279502248444,
"loss": 3.326156234741211,
"step": 2740
},
{
"epoch": 0.5273775216138329,
"grad_norm": 0.5835141539573669,
"learning_rate": 0.000148856227350902,
"loss": 3.325116729736328,
"step": 2745
},
{
"epoch": 0.5283381364073007,
"grad_norm": 0.5704639554023743,
"learning_rate": 0.00014837967122507015,
"loss": 3.3299407958984375,
"step": 2750
},
{
"epoch": 0.5292987512007685,
"grad_norm": 1.1036860942840576,
"learning_rate": 0.00014790313145555863,
"loss": 3.327469253540039,
"step": 2755
},
{
"epoch": 0.5302593659942363,
"grad_norm": 0.8866048455238342,
"learning_rate": 0.00014742661285277228,
"loss": 3.3271484375,
"step": 2760
},
{
"epoch": 0.5312199807877042,
"grad_norm": 0.8478221297264099,
"learning_rate": 0.00014695012022690205,
"loss": 3.3288875579833985,
"step": 2765
},
{
"epoch": 0.5321805955811719,
"grad_norm": 0.6032402515411377,
"learning_rate": 0.00014647365838787673,
"loss": 3.3231765747070314,
"step": 2770
},
{
"epoch": 0.5331412103746398,
"grad_norm": 1.1227121353149414,
"learning_rate": 0.00014599723214531434,
"loss": 3.326404571533203,
"step": 2775
},
{
"epoch": 0.5341018251681076,
"grad_norm": 0.9632681012153625,
"learning_rate": 0.0001455208463084737,
"loss": 3.32501106262207,
"step": 2780
},
{
"epoch": 0.5350624399615754,
"grad_norm": 0.6381242871284485,
"learning_rate": 0.00014504450568620557,
"loss": 3.328451919555664,
"step": 2785
},
{
"epoch": 0.5360230547550432,
"grad_norm": 0.818413496017456,
"learning_rate": 0.00014456821508690432,
"loss": 3.325485610961914,
"step": 2790
},
{
"epoch": 0.5369836695485111,
"grad_norm": 0.9857754707336426,
"learning_rate": 0.0001440919793184595,
"loss": 3.323963928222656,
"step": 2795
},
{
"epoch": 0.5379442843419788,
"grad_norm": 0.7225484848022461,
"learning_rate": 0.00014361580318820696,
"loss": 3.3251392364501955,
"step": 2800
},
{
"epoch": 0.5389048991354467,
"grad_norm": 0.8891339898109436,
"learning_rate": 0.00014313969150288083,
"loss": 3.3246475219726563,
"step": 2805
},
{
"epoch": 0.5398655139289145,
"grad_norm": 1.0610110759735107,
"learning_rate": 0.00014266364906856442,
"loss": 3.3243381500244142,
"step": 2810
},
{
"epoch": 0.5408261287223823,
"grad_norm": 0.7058207392692566,
"learning_rate": 0.00014218768069064214,
"loss": 3.32562141418457,
"step": 2815
},
{
"epoch": 0.5417867435158501,
"grad_norm": 1.1421717405319214,
"learning_rate": 0.00014171179117375082,
"loss": 3.3252777099609374,
"step": 2820
},
{
"epoch": 0.542747358309318,
"grad_norm": 0.7852960228919983,
"learning_rate": 0.00014123598532173113,
"loss": 3.3269309997558594,
"step": 2825
},
{
"epoch": 0.5437079731027857,
"grad_norm": 0.9386373162269592,
"learning_rate": 0.00014076026793757943,
"loss": 3.325482940673828,
"step": 2830
},
{
"epoch": 0.5446685878962536,
"grad_norm": 0.9038012027740479,
"learning_rate": 0.00014028464382339877,
"loss": 3.3256744384765624,
"step": 2835
},
{
"epoch": 0.5456292026897214,
"grad_norm": 0.6614556312561035,
"learning_rate": 0.0001398091177803509,
"loss": 3.3238311767578126,
"step": 2840
},
{
"epoch": 0.5465898174831892,
"grad_norm": 0.7346850037574768,
"learning_rate": 0.00013933369460860748,
"loss": 3.3215705871582033,
"step": 2845
},
{
"epoch": 0.547550432276657,
"grad_norm": 0.9192922115325928,
"learning_rate": 0.00013885837910730168,
"loss": 3.3206497192382813,
"step": 2850
},
{
"epoch": 0.5485110470701249,
"grad_norm": 0.8936217427253723,
"learning_rate": 0.00013838317607448004,
"loss": 3.325290298461914,
"step": 2855
},
{
"epoch": 0.5494716618635928,
"grad_norm": 0.9450289607048035,
"learning_rate": 0.00013790809030705354,
"loss": 3.324379730224609,
"step": 2860
},
{
"epoch": 0.5504322766570605,
"grad_norm": 0.868273913860321,
"learning_rate": 0.00013743312660074962,
"loss": 3.3241962432861327,
"step": 2865
},
{
"epoch": 0.5513928914505284,
"grad_norm": 0.6488667130470276,
"learning_rate": 0.00013695828975006336,
"loss": 3.3240901947021486,
"step": 2870
},
{
"epoch": 0.5523535062439962,
"grad_norm": 0.4751059114933014,
"learning_rate": 0.00013648358454820957,
"loss": 3.3235042572021483,
"step": 2875
},
{
"epoch": 0.553314121037464,
"grad_norm": 1.1512478590011597,
"learning_rate": 0.00013600901578707402,
"loss": 3.3230491638183595,
"step": 2880
},
{
"epoch": 0.5542747358309318,
"grad_norm": 0.8698762059211731,
"learning_rate": 0.00013553458825716502,
"loss": 3.324365997314453,
"step": 2885
},
{
"epoch": 0.5552353506243997,
"grad_norm": 0.8694410920143127,
"learning_rate": 0.0001350603067475655,
"loss": 3.324441146850586,
"step": 2890
},
{
"epoch": 0.5561959654178674,
"grad_norm": 0.8007134199142456,
"learning_rate": 0.00013458617604588418,
"loss": 3.3229400634765627,
"step": 2895
},
{
"epoch": 0.5571565802113353,
"grad_norm": 0.765385091304779,
"learning_rate": 0.00013411220093820773,
"loss": 3.323167419433594,
"step": 2900
},
{
"epoch": 0.5581171950048031,
"grad_norm": 0.4617004096508026,
"learning_rate": 0.00013363838620905188,
"loss": 3.320812225341797,
"step": 2905
},
{
"epoch": 0.5590778097982709,
"grad_norm": 0.7083408832550049,
"learning_rate": 0.00013316473664131347,
"loss": 3.324010467529297,
"step": 2910
},
{
"epoch": 0.5600384245917387,
"grad_norm": 0.8876819014549255,
"learning_rate": 0.00013269125701622243,
"loss": 3.3241298675537108,
"step": 2915
},
{
"epoch": 0.5609990393852066,
"grad_norm": 0.806339681148529,
"learning_rate": 0.00013221795211329281,
"loss": 3.319907379150391,
"step": 2920
},
{
"epoch": 0.5619596541786743,
"grad_norm": 0.6743384599685669,
"learning_rate": 0.00013174482671027526,
"loss": 3.319793701171875,
"step": 2925
},
{
"epoch": 0.5629202689721422,
"grad_norm": 0.6159988641738892,
"learning_rate": 0.00013127188558310823,
"loss": 3.323670196533203,
"step": 2930
},
{
"epoch": 0.56388088376561,
"grad_norm": 0.9810566902160645,
"learning_rate": 0.0001307991335058702,
"loss": 3.3209304809570312,
"step": 2935
},
{
"epoch": 0.5648414985590778,
"grad_norm": 0.913245677947998,
"learning_rate": 0.00013032657525073122,
"loss": 3.320903778076172,
"step": 2940
},
{
"epoch": 0.5658021133525456,
"grad_norm": 0.6053956151008606,
"learning_rate": 0.00012985421558790473,
"loss": 3.3186370849609377,
"step": 2945
},
{
"epoch": 0.5667627281460135,
"grad_norm": 0.6618540287017822,
"learning_rate": 0.00012938205928559964,
"loss": 3.3157257080078124,
"step": 2950
},
{
"epoch": 0.5677233429394812,
"grad_norm": 0.5338152647018433,
"learning_rate": 0.00012891011110997187,
"loss": 3.3171661376953123,
"step": 2955
},
{
"epoch": 0.5686839577329491,
"grad_norm": 0.7981716394424438,
"learning_rate": 0.0001284383758250767,
"loss": 3.3200511932373047,
"step": 2960
},
{
"epoch": 0.5696445725264169,
"grad_norm": 0.8445151448249817,
"learning_rate": 0.00012796685819282009,
"loss": 3.319135284423828,
"step": 2965
},
{
"epoch": 0.5706051873198847,
"grad_norm": 0.6141214370727539,
"learning_rate": 0.0001274955629729111,
"loss": 3.3177207946777343,
"step": 2970
},
{
"epoch": 0.5715658021133525,
"grad_norm": 1.2424954175949097,
"learning_rate": 0.0001270244949228136,
"loss": 3.322146987915039,
"step": 2975
},
{
"epoch": 0.5725264169068204,
"grad_norm": 0.7265079021453857,
"learning_rate": 0.00012655365879769826,
"loss": 3.3195030212402346,
"step": 2980
},
{
"epoch": 0.5734870317002881,
"grad_norm": 0.5697556734085083,
"learning_rate": 0.00012608305935039475,
"loss": 3.319179153442383,
"step": 2985
},
{
"epoch": 0.574447646493756,
"grad_norm": 0.6604142189025879,
"learning_rate": 0.00012561270133134344,
"loss": 3.317556381225586,
"step": 2990
},
{
"epoch": 0.5754082612872238,
"grad_norm": 0.6255987882614136,
"learning_rate": 0.00012514258948854773,
"loss": 3.3171707153320313,
"step": 2995
},
{
"epoch": 0.5763688760806917,
"grad_norm": 0.6028192639350891,
"learning_rate": 0.00012467272856752593,
"loss": 3.317168426513672,
"step": 3000
},
{
"epoch": 0.5773294908741594,
"grad_norm": 0.8794112801551819,
"learning_rate": 0.0001242031233112634,
"loss": 3.3190616607666015,
"step": 3005
},
{
"epoch": 0.5782901056676273,
"grad_norm": 0.6320680975914001,
"learning_rate": 0.00012373377846016493,
"loss": 3.315277099609375,
"step": 3010
},
{
"epoch": 0.579250720461095,
"grad_norm": 0.5714786052703857,
"learning_rate": 0.0001232646987520064,
"loss": 3.3192188262939455,
"step": 3015
},
{
"epoch": 0.5802113352545629,
"grad_norm": 0.7069401144981384,
"learning_rate": 0.00012279588892188736,
"loss": 3.317308807373047,
"step": 3020
},
{
"epoch": 0.5811719500480308,
"grad_norm": 0.7392922639846802,
"learning_rate": 0.000122327353702183,
"loss": 3.317064666748047,
"step": 3025
},
{
"epoch": 0.5821325648414986,
"grad_norm": 1.0391535758972168,
"learning_rate": 0.0001218590978224966,
"loss": 3.313740539550781,
"step": 3030
},
{
"epoch": 0.5830931796349664,
"grad_norm": 0.7290921211242676,
"learning_rate": 0.00012139112600961159,
"loss": 3.317361831665039,
"step": 3035
},
{
"epoch": 0.5840537944284342,
"grad_norm": 0.5767372250556946,
"learning_rate": 0.00012092344298744383,
"loss": 3.3169986724853517,
"step": 3040
},
{
"epoch": 0.5850144092219021,
"grad_norm": 1.0307767391204834,
"learning_rate": 0.00012045605347699411,
"loss": 3.3194618225097656,
"step": 3045
},
{
"epoch": 0.5859750240153698,
"grad_norm": 0.622552752494812,
"learning_rate": 0.00011998896219630029,
"loss": 3.316600799560547,
"step": 3050
},
{
"epoch": 0.5869356388088377,
"grad_norm": 0.5956411957740784,
"learning_rate": 0.0001195221738603899,
"loss": 3.312654495239258,
"step": 3055
},
{
"epoch": 0.5878962536023055,
"grad_norm": 0.7186912298202515,
"learning_rate": 0.00011905569318123223,
"loss": 3.316633605957031,
"step": 3060
},
{
"epoch": 0.5888568683957733,
"grad_norm": 0.6377474069595337,
"learning_rate": 0.00011858952486769114,
"loss": 3.315812683105469,
"step": 3065
},
{
"epoch": 0.5898174831892411,
"grad_norm": 0.6208492517471313,
"learning_rate": 0.00011812367362547716,
"loss": 3.3148754119873045,
"step": 3070
},
{
"epoch": 0.590778097982709,
"grad_norm": 0.600408673286438,
"learning_rate": 0.0001176581441571002,
"loss": 3.3176692962646483,
"step": 3075
},
{
"epoch": 0.5917387127761767,
"grad_norm": 0.8082125782966614,
"learning_rate": 0.00011719294116182217,
"loss": 3.314889144897461,
"step": 3080
},
{
"epoch": 0.5926993275696446,
"grad_norm": 0.6132190823554993,
"learning_rate": 0.00011672806933560925,
"loss": 3.3137298583984376,
"step": 3085
},
{
"epoch": 0.5936599423631124,
"grad_norm": 0.6647592782974243,
"learning_rate": 0.00011626353337108476,
"loss": 3.3115211486816407,
"step": 3090
},
{
"epoch": 0.5946205571565802,
"grad_norm": 0.5742537379264832,
"learning_rate": 0.00011579933795748164,
"loss": 3.3127456665039063,
"step": 3095
},
{
"epoch": 0.595581171950048,
"grad_norm": 0.6123734712600708,
"learning_rate": 0.00011533548778059508,
"loss": 3.315221405029297,
"step": 3100
},
{
"epoch": 0.5965417867435159,
"grad_norm": 0.8112967014312744,
"learning_rate": 0.00011487198752273552,
"loss": 3.315155792236328,
"step": 3105
},
{
"epoch": 0.5975024015369836,
"grad_norm": 1.0377261638641357,
"learning_rate": 0.0001144088418626809,
"loss": 3.31671142578125,
"step": 3110
},
{
"epoch": 0.5984630163304515,
"grad_norm": 0.6357848644256592,
"learning_rate": 0.00011394605547562989,
"loss": 3.314447784423828,
"step": 3115
},
{
"epoch": 0.5994236311239193,
"grad_norm": 0.5305024981498718,
"learning_rate": 0.00011348363303315434,
"loss": 3.313646697998047,
"step": 3120
},
{
"epoch": 0.6003842459173871,
"grad_norm": 0.6368167400360107,
"learning_rate": 0.00011302157920315244,
"loss": 3.3107112884521483,
"step": 3125
},
{
"epoch": 0.6013448607108549,
"grad_norm": 0.6398528218269348,
"learning_rate": 0.00011255989864980133,
"loss": 3.3126060485839846,
"step": 3130
},
{
"epoch": 0.6023054755043228,
"grad_norm": 0.7452788352966309,
"learning_rate": 0.00011209859603351015,
"loss": 3.3152156829833985,
"step": 3135
},
{
"epoch": 0.6032660902977905,
"grad_norm": 0.6287251710891724,
"learning_rate": 0.00011163767601087301,
"loss": 3.3126220703125,
"step": 3140
},
{
"epoch": 0.6042267050912584,
"grad_norm": 0.6889411807060242,
"learning_rate": 0.00011117714323462186,
"loss": 3.3144298553466798,
"step": 3145
},
{
"epoch": 0.6051873198847262,
"grad_norm": 0.6560638546943665,
"learning_rate": 0.00011071700235357979,
"loss": 3.311948776245117,
"step": 3150
},
{
"epoch": 0.6061479346781941,
"grad_norm": 0.4844714403152466,
"learning_rate": 0.00011025725801261373,
"loss": 3.3093055725097655,
"step": 3155
},
{
"epoch": 0.6071085494716618,
"grad_norm": 0.8295413255691528,
"learning_rate": 0.00010979791485258788,
"loss": 3.3112449645996094,
"step": 3160
},
{
"epoch": 0.6080691642651297,
"grad_norm": 0.8754414916038513,
"learning_rate": 0.00010933897751031671,
"loss": 3.311637115478516,
"step": 3165
},
{
"epoch": 0.6090297790585975,
"grad_norm": 0.5046402812004089,
"learning_rate": 0.00010888045061851807,
"loss": 3.309844207763672,
"step": 3170
},
{
"epoch": 0.6099903938520653,
"grad_norm": 0.6311172246932983,
"learning_rate": 0.00010842233880576681,
"loss": 3.3105926513671875,
"step": 3175
},
{
"epoch": 0.6109510086455331,
"grad_norm": 0.6714319586753845,
"learning_rate": 0.0001079646466964475,
"loss": 3.308180236816406,
"step": 3180
},
{
"epoch": 0.611911623439001,
"grad_norm": 0.6159961223602295,
"learning_rate": 0.00010750737891070824,
"loss": 3.3096988677978514,
"step": 3185
},
{
"epoch": 0.6128722382324687,
"grad_norm": 0.5705515742301941,
"learning_rate": 0.00010705054006441371,
"loss": 3.3115421295166017,
"step": 3190
},
{
"epoch": 0.6138328530259366,
"grad_norm": 0.49575576186180115,
"learning_rate": 0.00010659413476909865,
"loss": 3.3115379333496096,
"step": 3195
},
{
"epoch": 0.6147934678194045,
"grad_norm": 1.0461304187774658,
"learning_rate": 0.00010613816763192152,
"loss": 3.3100265502929687,
"step": 3200
},
{
"epoch": 0.6157540826128722,
"grad_norm": 0.6978023648262024,
"learning_rate": 0.00010568264325561763,
"loss": 3.308924102783203,
"step": 3205
},
{
"epoch": 0.6167146974063401,
"grad_norm": 0.7631133198738098,
"learning_rate": 0.000105227566238453,
"loss": 3.3145515441894533,
"step": 3210
},
{
"epoch": 0.6176753121998079,
"grad_norm": 0.8663102388381958,
"learning_rate": 0.00010477294117417762,
"loss": 3.3145401000976564,
"step": 3215
},
{
"epoch": 0.6186359269932757,
"grad_norm": 0.4759737551212311,
"learning_rate": 0.00010431877265197955,
"loss": 3.311273193359375,
"step": 3220
},
{
"epoch": 0.6195965417867435,
"grad_norm": 0.47145599126815796,
"learning_rate": 0.00010386506525643808,
"loss": 3.306778335571289,
"step": 3225
},
{
"epoch": 0.6205571565802114,
"grad_norm": 0.6582973599433899,
"learning_rate": 0.00010341182356747771,
"loss": 3.3100128173828125,
"step": 3230
},
{
"epoch": 0.6215177713736791,
"grad_norm": 0.6123160123825073,
"learning_rate": 0.00010295905216032203,
"loss": 3.3093284606933593,
"step": 3235
},
{
"epoch": 0.622478386167147,
"grad_norm": 0.5266756415367126,
"learning_rate": 0.00010250675560544717,
"loss": 3.3095718383789063,
"step": 3240
},
{
"epoch": 0.6234390009606148,
"grad_norm": 0.7521538138389587,
"learning_rate": 0.00010205493846853618,
"loss": 3.307404327392578,
"step": 3245
},
{
"epoch": 0.6243996157540826,
"grad_norm": 0.5699673891067505,
"learning_rate": 0.00010160360531043239,
"loss": 3.3089645385742186,
"step": 3250
},
{
"epoch": 0.6253602305475504,
"grad_norm": 0.7614957690238953,
"learning_rate": 0.00010115276068709377,
"loss": 3.3093006134033205,
"step": 3255
},
{
"epoch": 0.6263208453410183,
"grad_norm": 0.652992844581604,
"learning_rate": 0.00010070240914954676,
"loss": 3.306406784057617,
"step": 3260
},
{
"epoch": 0.627281460134486,
"grad_norm": 0.5600461363792419,
"learning_rate": 0.00010025255524384033,
"loss": 3.3075355529785155,
"step": 3265
},
{
"epoch": 0.6282420749279539,
"grad_norm": 0.7442777156829834,
"learning_rate": 9.980320351100028e-05,
"loss": 3.308829498291016,
"step": 3270
},
{
"epoch": 0.6292026897214217,
"grad_norm": 0.8243304491043091,
"learning_rate": 9.935435848698307e-05,
"loss": 3.307141876220703,
"step": 3275
},
{
"epoch": 0.6301633045148896,
"grad_norm": 0.6337246298789978,
"learning_rate": 9.890602470263037e-05,
"loss": 3.311864471435547,
"step": 3280
},
{
"epoch": 0.6311239193083573,
"grad_norm": 0.6521264910697937,
"learning_rate": 9.845820668362308e-05,
"loss": 3.308788299560547,
"step": 3285
},
{
"epoch": 0.6320845341018252,
"grad_norm": 0.6866946220397949,
"learning_rate": 9.801090895043566e-05,
"loss": 3.307585906982422,
"step": 3290
},
{
"epoch": 0.633045148895293,
"grad_norm": 0.6082757711410522,
"learning_rate": 9.756413601829083e-05,
"loss": 3.307001495361328,
"step": 3295
},
{
"epoch": 0.6340057636887608,
"grad_norm": 0.515135645866394,
"learning_rate": 9.711789239711344e-05,
"loss": 3.308676528930664,
"step": 3300
},
{
"epoch": 0.6349663784822286,
"grad_norm": 0.503957211971283,
"learning_rate": 9.667218259148547e-05,
"loss": 3.3088623046875,
"step": 3305
},
{
"epoch": 0.6359269932756965,
"grad_norm": 0.663313627243042,
"learning_rate": 9.62270111006001e-05,
"loss": 3.305764007568359,
"step": 3310
},
{
"epoch": 0.6368876080691642,
"grad_norm": 0.7052382826805115,
"learning_rate": 9.57823824182168e-05,
"loss": 3.305963134765625,
"step": 3315
},
{
"epoch": 0.6378482228626321,
"grad_norm": 0.5659816861152649,
"learning_rate": 9.53383010326155e-05,
"loss": 3.306070327758789,
"step": 3320
},
{
"epoch": 0.6388088376560999,
"grad_norm": 0.46071678400039673,
"learning_rate": 9.489477142655147e-05,
"loss": 3.3049732208251954,
"step": 3325
},
{
"epoch": 0.6397694524495677,
"grad_norm": 0.9557482004165649,
"learning_rate": 9.445179807721012e-05,
"loss": 3.3062675476074217,
"step": 3330
},
{
"epoch": 0.6407300672430355,
"grad_norm": 0.8419716954231262,
"learning_rate": 9.400938545616173e-05,
"loss": 3.308432769775391,
"step": 3335
},
{
"epoch": 0.6416906820365034,
"grad_norm": 0.5912371873855591,
"learning_rate": 9.356753802931646e-05,
"loss": 3.3038196563720703,
"step": 3340
},
{
"epoch": 0.6426512968299711,
"grad_norm": 0.4956425726413727,
"learning_rate": 9.312626025687897e-05,
"loss": 3.3066734313964843,
"step": 3345
},
{
"epoch": 0.643611911623439,
"grad_norm": 0.5250487923622131,
"learning_rate": 9.268555659330364e-05,
"loss": 3.307097625732422,
"step": 3350
},
{
"epoch": 0.6445725264169068,
"grad_norm": 0.5282003283500671,
"learning_rate": 9.22454314872496e-05,
"loss": 3.305289459228516,
"step": 3355
},
{
"epoch": 0.6455331412103746,
"grad_norm": 0.4647374749183655,
"learning_rate": 9.180588938153566e-05,
"loss": 3.3030609130859374,
"step": 3360
},
{
"epoch": 0.6464937560038425,
"grad_norm": 1.0786409378051758,
"learning_rate": 9.136693471309568e-05,
"loss": 3.311345672607422,
"step": 3365
},
{
"epoch": 0.6474543707973103,
"grad_norm": 0.6998493075370789,
"learning_rate": 9.092857191293356e-05,
"loss": 3.3091506958007812,
"step": 3370
},
{
"epoch": 0.6484149855907781,
"grad_norm": 0.446397066116333,
"learning_rate": 9.049080540607875e-05,
"loss": 3.3034263610839845,
"step": 3375
},
{
"epoch": 0.6493756003842459,
"grad_norm": 0.6110076308250427,
"learning_rate": 9.005363961154126e-05,
"loss": 3.3031074523925783,
"step": 3380
},
{
"epoch": 0.6503362151777138,
"grad_norm": 0.4611775279045105,
"learning_rate": 8.961707894226735e-05,
"loss": 3.3057632446289062,
"step": 3385
},
{
"epoch": 0.6512968299711815,
"grad_norm": 0.8725507855415344,
"learning_rate": 8.918112780509494e-05,
"loss": 3.3007755279541016,
"step": 3390
},
{
"epoch": 0.6522574447646494,
"grad_norm": 0.8378633856773376,
"learning_rate": 8.874579060070894e-05,
"loss": 3.3028465270996095,
"step": 3395
},
{
"epoch": 0.6532180595581172,
"grad_norm": 0.6364403367042542,
"learning_rate": 8.831107172359707e-05,
"loss": 3.3054805755615235,
"step": 3400
},
{
"epoch": 0.654178674351585,
"grad_norm": 0.46224355697631836,
"learning_rate": 8.787697556200519e-05,
"loss": 3.304944610595703,
"step": 3405
},
{
"epoch": 0.6551392891450528,
"grad_norm": 0.5552151799201965,
"learning_rate": 8.744350649789347e-05,
"loss": 3.301805114746094,
"step": 3410
},
{
"epoch": 0.6560999039385207,
"grad_norm": 0.6075546741485596,
"learning_rate": 8.701066890689166e-05,
"loss": 3.300829315185547,
"step": 3415
},
{
"epoch": 0.6570605187319885,
"grad_norm": 0.46151185035705566,
"learning_rate": 8.657846715825508e-05,
"loss": 3.30389289855957,
"step": 3420
},
{
"epoch": 0.6580211335254563,
"grad_norm": 0.5615620017051697,
"learning_rate": 8.61469056148209e-05,
"loss": 3.302476501464844,
"step": 3425
},
{
"epoch": 0.6589817483189241,
"grad_norm": 0.47145524621009827,
"learning_rate": 8.571598863296342e-05,
"loss": 3.303362274169922,
"step": 3430
},
{
"epoch": 0.659942363112392,
"grad_norm": 0.4290817081928253,
"learning_rate": 8.528572056255065e-05,
"loss": 3.3021663665771483,
"step": 3435
},
{
"epoch": 0.6609029779058597,
"grad_norm": 0.5213423371315002,
"learning_rate": 8.485610574690021e-05,
"loss": 3.3011238098144533,
"step": 3440
},
{
"epoch": 0.6618635926993276,
"grad_norm": 0.661228358745575,
"learning_rate": 8.442714852273523e-05,
"loss": 3.3015186309814455,
"step": 3445
},
{
"epoch": 0.6628242074927954,
"grad_norm": 0.9645004868507385,
"learning_rate": 8.399885322014123e-05,
"loss": 3.3042228698730467,
"step": 3450
},
{
"epoch": 0.6637848222862632,
"grad_norm": 0.5251511335372925,
"learning_rate": 8.35712241625216e-05,
"loss": 3.3040817260742186,
"step": 3455
},
{
"epoch": 0.664745437079731,
"grad_norm": 0.45383018255233765,
"learning_rate": 8.314426566655458e-05,
"loss": 3.3012584686279296,
"step": 3460
},
{
"epoch": 0.6657060518731989,
"grad_norm": 0.3945951461791992,
"learning_rate": 8.271798204214942e-05,
"loss": 3.3004776000976563,
"step": 3465
},
{
"epoch": 0.6666666666666666,
"grad_norm": 0.4676642119884491,
"learning_rate": 8.229237759240289e-05,
"loss": 3.2975730895996094,
"step": 3470
},
{
"epoch": 0.6676272814601345,
"grad_norm": 0.5338938236236572,
"learning_rate": 8.186745661355595e-05,
"loss": 3.297288513183594,
"step": 3475
},
{
"epoch": 0.6685878962536023,
"grad_norm": 0.6830979585647583,
"learning_rate": 8.144322339495012e-05,
"loss": 3.3035240173339844,
"step": 3480
},
{
"epoch": 0.6695485110470701,
"grad_norm": 0.4912271201610565,
"learning_rate": 8.101968221898453e-05,
"loss": 3.300017547607422,
"step": 3485
},
{
"epoch": 0.6705091258405379,
"grad_norm": 0.5433835387229919,
"learning_rate": 8.059683736107245e-05,
"loss": 3.304372787475586,
"step": 3490
},
{
"epoch": 0.6714697406340058,
"grad_norm": 0.43095162510871887,
"learning_rate": 8.017469308959823e-05,
"loss": 3.302097702026367,
"step": 3495
},
{
"epoch": 0.6724303554274735,
"grad_norm": 0.5721524953842163,
"learning_rate": 7.97532536658742e-05,
"loss": 3.302288055419922,
"step": 3500
},
{
"epoch": 0.6733909702209414,
"grad_norm": 0.8145022392272949,
"learning_rate": 7.933252334409766e-05,
"loss": 3.302367401123047,
"step": 3505
},
{
"epoch": 0.6743515850144092,
"grad_norm": 0.41017457842826843,
"learning_rate": 7.891250637130779e-05,
"loss": 3.3008522033691405,
"step": 3510
},
{
"epoch": 0.675312199807877,
"grad_norm": 0.4966427981853485,
"learning_rate": 7.849320698734306e-05,
"loss": 3.305078887939453,
"step": 3515
},
{
"epoch": 0.6762728146013448,
"grad_norm": 0.44679656624794006,
"learning_rate": 7.80746294247982e-05,
"loss": 3.297787094116211,
"step": 3520
},
{
"epoch": 0.6772334293948127,
"grad_norm": 0.5052957534790039,
"learning_rate": 7.765677790898155e-05,
"loss": 3.2971946716308596,
"step": 3525
},
{
"epoch": 0.6781940441882806,
"grad_norm": 0.5869050025939941,
"learning_rate": 7.723965665787255e-05,
"loss": 3.3033409118652344,
"step": 3530
},
{
"epoch": 0.6791546589817483,
"grad_norm": 0.5643454194068909,
"learning_rate": 7.682326988207877e-05,
"loss": 3.3006324768066406,
"step": 3535
},
{
"epoch": 0.6801152737752162,
"grad_norm": 0.45685872435569763,
"learning_rate": 7.640762178479382e-05,
"loss": 3.2982528686523436,
"step": 3540
},
{
"epoch": 0.681075888568684,
"grad_norm": 0.42118874192237854,
"learning_rate": 7.599271656175476e-05,
"loss": 3.303014373779297,
"step": 3545
},
{
"epoch": 0.6820365033621518,
"grad_norm": 0.482282817363739,
"learning_rate": 7.557855840119976e-05,
"loss": 3.2992012023925783,
"step": 3550
},
{
"epoch": 0.6829971181556196,
"grad_norm": 0.4499205946922302,
"learning_rate": 7.516515148382576e-05,
"loss": 3.2981277465820313,
"step": 3555
},
{
"epoch": 0.6839577329490875,
"grad_norm": 0.6683313846588135,
"learning_rate": 7.475249998274621e-05,
"loss": 3.298491668701172,
"step": 3560
},
{
"epoch": 0.6849183477425552,
"grad_norm": 0.5663706064224243,
"learning_rate": 7.434060806344936e-05,
"loss": 3.301411819458008,
"step": 3565
},
{
"epoch": 0.6858789625360231,
"grad_norm": 0.43267935514450073,
"learning_rate": 7.392947988375555e-05,
"loss": 3.29864616394043,
"step": 3570
},
{
"epoch": 0.6868395773294909,
"grad_norm": 0.43747061491012573,
"learning_rate": 7.351911959377585e-05,
"loss": 3.298960876464844,
"step": 3575
},
{
"epoch": 0.6878001921229587,
"grad_norm": 0.5621949434280396,
"learning_rate": 7.310953133586981e-05,
"loss": 3.2972442626953127,
"step": 3580
},
{
"epoch": 0.6887608069164265,
"grad_norm": 0.4505578279495239,
"learning_rate": 7.27007192446036e-05,
"loss": 3.2979637145996095,
"step": 3585
},
{
"epoch": 0.6897214217098944,
"grad_norm": 0.7796880602836609,
"learning_rate": 7.229268744670883e-05,
"loss": 3.297809600830078,
"step": 3590
},
{
"epoch": 0.6906820365033621,
"grad_norm": 0.6508268713951111,
"learning_rate": 7.188544006104e-05,
"loss": 3.2982086181640624,
"step": 3595
},
{
"epoch": 0.69164265129683,
"grad_norm": 0.44308528304100037,
"learning_rate": 7.147898119853367e-05,
"loss": 3.2976150512695312,
"step": 3600
},
{
"epoch": 0.6926032660902978,
"grad_norm": 0.4720350205898285,
"learning_rate": 7.107331496216676e-05,
"loss": 3.2995452880859375,
"step": 3605
},
{
"epoch": 0.6935638808837656,
"grad_norm": 0.45068585872650146,
"learning_rate": 7.066844544691474e-05,
"loss": 3.301127243041992,
"step": 3610
},
{
"epoch": 0.6945244956772334,
"grad_norm": 0.42395853996276855,
"learning_rate": 7.026437673971107e-05,
"loss": 3.2964126586914064,
"step": 3615
},
{
"epoch": 0.6954851104707013,
"grad_norm": 0.4400024712085724,
"learning_rate": 6.986111291940511e-05,
"loss": 3.2967502593994142,
"step": 3620
},
{
"epoch": 0.696445725264169,
"grad_norm": 0.410159170627594,
"learning_rate": 6.945865805672154e-05,
"loss": 3.2917686462402345,
"step": 3625
},
{
"epoch": 0.6974063400576369,
"grad_norm": 0.6144173741340637,
"learning_rate": 6.905701621421904e-05,
"loss": 3.300373077392578,
"step": 3630
},
{
"epoch": 0.6983669548511047,
"grad_norm": 0.47794288396835327,
"learning_rate": 6.865619144624914e-05,
"loss": 3.297341156005859,
"step": 3635
},
{
"epoch": 0.6993275696445725,
"grad_norm": 0.520939826965332,
"learning_rate": 6.825618779891577e-05,
"loss": 3.298029327392578,
"step": 3640
},
{
"epoch": 0.7002881844380403,
"grad_norm": 0.5281940698623657,
"learning_rate": 6.785700931003381e-05,
"loss": 3.291861724853516,
"step": 3645
},
{
"epoch": 0.7012487992315082,
"grad_norm": 0.3904149532318115,
"learning_rate": 6.745866000908874e-05,
"loss": 3.296683502197266,
"step": 3650
},
{
"epoch": 0.7022094140249759,
"grad_norm": 0.47106507420539856,
"learning_rate": 6.706114391719586e-05,
"loss": 3.29759521484375,
"step": 3655
},
{
"epoch": 0.7031700288184438,
"grad_norm": 0.4817742705345154,
"learning_rate": 6.66644650470597e-05,
"loss": 3.299455261230469,
"step": 3660
},
{
"epoch": 0.7041306436119116,
"grad_norm": 0.473783940076828,
"learning_rate": 6.626862740293338e-05,
"loss": 3.2957839965820312,
"step": 3665
},
{
"epoch": 0.7050912584053795,
"grad_norm": 0.5512199401855469,
"learning_rate": 6.587363498057845e-05,
"loss": 3.297314453125,
"step": 3670
},
{
"epoch": 0.7060518731988472,
"grad_norm": 0.6732028722763062,
"learning_rate": 6.547949176722437e-05,
"loss": 3.2939273834228517,
"step": 3675
},
{
"epoch": 0.7070124879923151,
"grad_norm": 0.5238297581672668,
"learning_rate": 6.508620174152826e-05,
"loss": 3.298822784423828,
"step": 3680
},
{
"epoch": 0.7079731027857828,
"grad_norm": 0.40396222472190857,
"learning_rate": 6.469376887353491e-05,
"loss": 3.29783935546875,
"step": 3685
},
{
"epoch": 0.7089337175792507,
"grad_norm": 0.4132029116153717,
"learning_rate": 6.430219712463634e-05,
"loss": 3.2966102600097655,
"step": 3690
},
{
"epoch": 0.7098943323727186,
"grad_norm": 0.4176599085330963,
"learning_rate": 6.391149044753238e-05,
"loss": 3.2970817565917967,
"step": 3695
},
{
"epoch": 0.7108549471661864,
"grad_norm": 0.3097979426383972,
"learning_rate": 6.352165278619012e-05,
"loss": 3.294615936279297,
"step": 3700
},
{
"epoch": 0.7118155619596542,
"grad_norm": 0.4419673979282379,
"learning_rate": 6.313268807580462e-05,
"loss": 3.2989078521728517,
"step": 3705
},
{
"epoch": 0.712776176753122,
"grad_norm": 0.5254682302474976,
"learning_rate": 6.274460024275895e-05,
"loss": 3.2966861724853516,
"step": 3710
},
{
"epoch": 0.7137367915465899,
"grad_norm": 0.5654774308204651,
"learning_rate": 6.235739320458442e-05,
"loss": 3.293225860595703,
"step": 3715
},
{
"epoch": 0.7146974063400576,
"grad_norm": 0.35445114970207214,
"learning_rate": 6.197107086992156e-05,
"loss": 3.2962112426757812,
"step": 3720
},
{
"epoch": 0.7156580211335255,
"grad_norm": 0.43310287594795227,
"learning_rate": 6.158563713847994e-05,
"loss": 3.2953845977783205,
"step": 3725
},
{
"epoch": 0.7166186359269933,
"grad_norm": 0.4213480055332184,
"learning_rate": 6.12010959009994e-05,
"loss": 3.2970962524414062,
"step": 3730
},
{
"epoch": 0.7175792507204611,
"grad_norm": 0.3784043490886688,
"learning_rate": 6.081745103921047e-05,
"loss": 3.2936870574951174,
"step": 3735
},
{
"epoch": 0.7185398655139289,
"grad_norm": 0.6028347611427307,
"learning_rate": 6.043470642579516e-05,
"loss": 3.2977680206298827,
"step": 3740
},
{
"epoch": 0.7195004803073968,
"grad_norm": 0.44696855545043945,
"learning_rate": 6.005286592434828e-05,
"loss": 3.293193817138672,
"step": 3745
},
{
"epoch": 0.7204610951008645,
"grad_norm": 0.4338856339454651,
"learning_rate": 5.967193338933778e-05,
"loss": 3.294358825683594,
"step": 3750
},
{
"epoch": 0.7214217098943324,
"grad_norm": 0.4633798599243164,
"learning_rate": 5.9291912666066405e-05,
"loss": 3.2951465606689454,
"step": 3755
},
{
"epoch": 0.7223823246878002,
"grad_norm": 0.42019450664520264,
"learning_rate": 5.891280759063265e-05,
"loss": 3.292225646972656,
"step": 3760
},
{
"epoch": 0.723342939481268,
"grad_norm": 0.418599396944046,
"learning_rate": 5.853462198989184e-05,
"loss": 3.2958885192871095,
"step": 3765
},
{
"epoch": 0.7243035542747358,
"grad_norm": 0.35753360390663147,
"learning_rate": 5.815735968141813e-05,
"loss": 3.2968238830566405,
"step": 3770
},
{
"epoch": 0.7252641690682037,
"grad_norm": 0.4825795888900757,
"learning_rate": 5.778102447346514e-05,
"loss": 3.2957897186279297,
"step": 3775
},
{
"epoch": 0.7262247838616714,
"grad_norm": 0.41493552923202515,
"learning_rate": 5.740562016492811e-05,
"loss": 3.2921653747558595,
"step": 3780
},
{
"epoch": 0.7271853986551393,
"grad_norm": 0.5309422612190247,
"learning_rate": 5.703115054530537e-05,
"loss": 3.293100357055664,
"step": 3785
},
{
"epoch": 0.7281460134486071,
"grad_norm": 0.36943376064300537,
"learning_rate": 5.665761939466008e-05,
"loss": 3.294559860229492,
"step": 3790
},
{
"epoch": 0.729106628242075,
"grad_norm": 0.5610998272895813,
"learning_rate": 5.628503048358207e-05,
"loss": 3.292363739013672,
"step": 3795
},
{
"epoch": 0.7300672430355427,
"grad_norm": 0.5705908536911011,
"learning_rate": 5.591338757314968e-05,
"loss": 3.2965774536132812,
"step": 3800
},
{
"epoch": 0.7310278578290106,
"grad_norm": 0.5056052803993225,
"learning_rate": 5.554269441489204e-05,
"loss": 3.295191192626953,
"step": 3805
},
{
"epoch": 0.7319884726224783,
"grad_norm": 0.4820818603038788,
"learning_rate": 5.517295475075102e-05,
"loss": 3.2937217712402345,
"step": 3810
},
{
"epoch": 0.7329490874159462,
"grad_norm": 0.3636917471885681,
"learning_rate": 5.4804172313043465e-05,
"loss": 3.291709136962891,
"step": 3815
},
{
"epoch": 0.733909702209414,
"grad_norm": 0.3684285283088684,
"learning_rate": 5.443635082442363e-05,
"loss": 3.293587493896484,
"step": 3820
},
{
"epoch": 0.7348703170028819,
"grad_norm": 0.741382896900177,
"learning_rate": 5.4069493997845356e-05,
"loss": 3.2945404052734375,
"step": 3825
},
{
"epoch": 0.7358309317963496,
"grad_norm": 0.41890212893486023,
"learning_rate": 5.3703605536524905e-05,
"loss": 3.2955181121826174,
"step": 3830
},
{
"epoch": 0.7367915465898175,
"grad_norm": 0.3295837938785553,
"learning_rate": 5.333868913390338e-05,
"loss": 3.290005111694336,
"step": 3835
},
{
"epoch": 0.7377521613832853,
"grad_norm": 0.3231881558895111,
"learning_rate": 5.2974748473609505e-05,
"loss": 3.291563034057617,
"step": 3840
},
{
"epoch": 0.7387127761767531,
"grad_norm": 0.44054171442985535,
"learning_rate": 5.261178722942242e-05,
"loss": 3.2941848754882814,
"step": 3845
},
{
"epoch": 0.7396733909702209,
"grad_norm": 0.3981585204601288,
"learning_rate": 5.224980906523462e-05,
"loss": 3.292535400390625,
"step": 3850
},
{
"epoch": 0.7406340057636888,
"grad_norm": 0.36969125270843506,
"learning_rate": 5.188881763501486e-05,
"loss": 3.2916770935058595,
"step": 3855
},
{
"epoch": 0.7415946205571565,
"grad_norm": 0.414196252822876,
"learning_rate": 5.152881658277147e-05,
"loss": 3.293859100341797,
"step": 3860
},
{
"epoch": 0.7425552353506244,
"grad_norm": 0.3872639536857605,
"learning_rate": 5.1169809542515404e-05,
"loss": 3.296031188964844,
"step": 3865
},
{
"epoch": 0.7435158501440923,
"grad_norm": 0.4767828583717346,
"learning_rate": 5.081180013822368e-05,
"loss": 3.292156219482422,
"step": 3870
},
{
"epoch": 0.74447646493756,
"grad_norm": 0.41197365522384644,
"learning_rate": 5.045479198380272e-05,
"loss": 3.292400360107422,
"step": 3875
},
{
"epoch": 0.7454370797310279,
"grad_norm": 0.40532881021499634,
"learning_rate": 5.009878868305171e-05,
"loss": 3.2940216064453125,
"step": 3880
},
{
"epoch": 0.7463976945244957,
"grad_norm": 0.3654780685901642,
"learning_rate": 4.9743793829626736e-05,
"loss": 3.293231964111328,
"step": 3885
},
{
"epoch": 0.7473583093179635,
"grad_norm": 0.2990121841430664,
"learning_rate": 4.9389811007003834e-05,
"loss": 3.292677307128906,
"step": 3890
},
{
"epoch": 0.7483189241114313,
"grad_norm": 0.3309316039085388,
"learning_rate": 4.903684378844333e-05,
"loss": 3.294384765625,
"step": 3895
},
{
"epoch": 0.7492795389048992,
"grad_norm": 0.2731437683105469,
"learning_rate": 4.86848957369536e-05,
"loss": 3.292652893066406,
"step": 3900
},
{
"epoch": 0.7502401536983669,
"grad_norm": 0.3116244971752167,
"learning_rate": 4.8333970405254904e-05,
"loss": 3.288296127319336,
"step": 3905
},
{
"epoch": 0.7512007684918348,
"grad_norm": 0.42631855607032776,
"learning_rate": 4.798407133574405e-05,
"loss": 3.2939361572265624,
"step": 3910
},
{
"epoch": 0.7521613832853026,
"grad_norm": 0.35764989256858826,
"learning_rate": 4.7635202060457945e-05,
"loss": 3.2916053771972655,
"step": 3915
},
{
"epoch": 0.7531219980787704,
"grad_norm": 0.2865849733352661,
"learning_rate": 4.72873661010385e-05,
"loss": 3.2932079315185545,
"step": 3920
},
{
"epoch": 0.7540826128722382,
"grad_norm": 0.3286271095275879,
"learning_rate": 4.694056696869688e-05,
"loss": 3.295194625854492,
"step": 3925
},
{
"epoch": 0.7550432276657061,
"grad_norm": 0.346123605966568,
"learning_rate": 4.659480816417785e-05,
"loss": 3.288124847412109,
"step": 3930
},
{
"epoch": 0.7560038424591738,
"grad_norm": 0.4188701808452606,
"learning_rate": 4.6250093177725e-05,
"loss": 3.2938987731933596,
"step": 3935
},
{
"epoch": 0.7569644572526417,
"grad_norm": 0.44901224970817566,
"learning_rate": 4.590642548904479e-05,
"loss": 3.2915252685546874,
"step": 3940
},
{
"epoch": 0.7579250720461095,
"grad_norm": 0.4020468592643738,
"learning_rate": 4.5563808567272e-05,
"loss": 3.290658187866211,
"step": 3945
},
{
"epoch": 0.7588856868395774,
"grad_norm": 0.32219162583351135,
"learning_rate": 4.52222458709345e-05,
"loss": 3.2903762817382813,
"step": 3950
},
{
"epoch": 0.7598463016330451,
"grad_norm": 0.25904515385627747,
"learning_rate": 4.4881740847918155e-05,
"loss": 3.288920593261719,
"step": 3955
},
{
"epoch": 0.760806916426513,
"grad_norm": 0.29081347584724426,
"learning_rate": 4.454229693543251e-05,
"loss": 3.293811798095703,
"step": 3960
},
{
"epoch": 0.7617675312199808,
"grad_norm": 0.24549178779125214,
"learning_rate": 4.420391755997548e-05,
"loss": 3.2932552337646483,
"step": 3965
},
{
"epoch": 0.7627281460134486,
"grad_norm": 0.459926038980484,
"learning_rate": 4.386660613729925e-05,
"loss": 3.2938629150390626,
"step": 3970
},
{
"epoch": 0.7636887608069164,
"grad_norm": 0.4720708429813385,
"learning_rate": 4.35303660723756e-05,
"loss": 3.2878982543945314,
"step": 3975
},
{
"epoch": 0.7646493756003843,
"grad_norm": 0.28443172574043274,
"learning_rate": 4.3195200759361455e-05,
"loss": 3.2919075012207033,
"step": 3980
},
{
"epoch": 0.765609990393852,
"grad_norm": 0.33102986216545105,
"learning_rate": 4.2861113581564884e-05,
"loss": 3.2893836975097654,
"step": 3985
},
{
"epoch": 0.7665706051873199,
"grad_norm": 0.348117858171463,
"learning_rate": 4.252810791141054e-05,
"loss": 3.293404769897461,
"step": 3990
},
{
"epoch": 0.7675312199807877,
"grad_norm": 0.3010897636413574,
"learning_rate": 4.2196187110406054e-05,
"loss": 3.2900314331054688,
"step": 3995
},
{
"epoch": 0.7684918347742555,
"grad_norm": 0.274684339761734,
"learning_rate": 4.186535452910784e-05,
"loss": 3.2879261016845702,
"step": 4000
},
{
"epoch": 0.7694524495677233,
"grad_norm": 0.29900944232940674,
"learning_rate": 4.153561350708732e-05,
"loss": 3.292841339111328,
"step": 4005
},
{
"epoch": 0.7704130643611912,
"grad_norm": 0.30298614501953125,
"learning_rate": 4.12069673728973e-05,
"loss": 3.2894565582275392,
"step": 4010
},
{
"epoch": 0.7713736791546589,
"grad_norm": 0.31402137875556946,
"learning_rate": 4.087941944403815e-05,
"loss": 3.2861660003662108,
"step": 4015
},
{
"epoch": 0.7723342939481268,
"grad_norm": 0.4134189784526825,
"learning_rate": 4.0552973026924625e-05,
"loss": 3.289139175415039,
"step": 4020
},
{
"epoch": 0.7732949087415946,
"grad_norm": 0.3398532569408417,
"learning_rate": 4.022763141685226e-05,
"loss": 3.2890396118164062,
"step": 4025
},
{
"epoch": 0.7742555235350624,
"grad_norm": 0.33045315742492676,
"learning_rate": 3.990339789796418e-05,
"loss": 3.2925796508789062,
"step": 4030
},
{
"epoch": 0.7752161383285303,
"grad_norm": 0.28469014167785645,
"learning_rate": 3.958027574321794e-05,
"loss": 3.2899627685546875,
"step": 4035
},
{
"epoch": 0.7761767531219981,
"grad_norm": 0.23531687259674072,
"learning_rate": 3.9258268214352566e-05,
"loss": 3.286402130126953,
"step": 4040
},
{
"epoch": 0.777137367915466,
"grad_norm": 0.3813510239124298,
"learning_rate": 3.893737856185538e-05,
"loss": 3.2916938781738283,
"step": 4045
},
{
"epoch": 0.7780979827089337,
"grad_norm": 0.3439390957355499,
"learning_rate": 3.861761002492952e-05,
"loss": 3.288800811767578,
"step": 4050
},
{
"epoch": 0.7790585975024016,
"grad_norm": 0.2623279392719269,
"learning_rate": 3.8298965831461024e-05,
"loss": 3.288500213623047,
"step": 4055
},
{
"epoch": 0.7800192122958693,
"grad_norm": 0.3051926791667938,
"learning_rate": 3.798144919798631e-05,
"loss": 3.2871044158935545,
"step": 4060
},
{
"epoch": 0.7809798270893372,
"grad_norm": 0.2598731815814972,
"learning_rate": 3.766506332965976e-05,
"loss": 3.2878665924072266,
"step": 4065
},
{
"epoch": 0.781940441882805,
"grad_norm": 0.3285099267959595,
"learning_rate": 3.734981142022117e-05,
"loss": 3.287023162841797,
"step": 4070
},
{
"epoch": 0.7829010566762729,
"grad_norm": 0.2698347568511963,
"learning_rate": 3.70356966519638e-05,
"loss": 3.2910301208496096,
"step": 4075
},
{
"epoch": 0.7838616714697406,
"grad_norm": 0.2935537099838257,
"learning_rate": 3.672272219570199e-05,
"loss": 3.2882232666015625,
"step": 4080
},
{
"epoch": 0.7848222862632085,
"grad_norm": 0.2180212140083313,
"learning_rate": 3.641089121073934e-05,
"loss": 3.288380432128906,
"step": 4085
},
{
"epoch": 0.7857829010566763,
"grad_norm": 0.24223865568637848,
"learning_rate": 3.610020684483674e-05,
"loss": 3.2879989624023436,
"step": 4090
},
{
"epoch": 0.7867435158501441,
"grad_norm": 0.1990312784910202,
"learning_rate": 3.579067223418046e-05,
"loss": 3.2866798400878907,
"step": 4095
},
{
"epoch": 0.7877041306436119,
"grad_norm": 0.4838339388370514,
"learning_rate": 3.548229050335089e-05,
"loss": 3.290237808227539,
"step": 4100
},
{
"epoch": 0.7886647454370798,
"grad_norm": 0.27035945653915405,
"learning_rate": 3.517506476529045e-05,
"loss": 3.2898136138916017,
"step": 4105
},
{
"epoch": 0.7896253602305475,
"grad_norm": 0.2790147066116333,
"learning_rate": 3.486899812127264e-05,
"loss": 3.2898269653320313,
"step": 4110
},
{
"epoch": 0.7905859750240154,
"grad_norm": 0.2595069110393524,
"learning_rate": 3.456409366087054e-05,
"loss": 3.28946533203125,
"step": 4115
},
{
"epoch": 0.7915465898174832,
"grad_norm": 0.3146030306816101,
"learning_rate": 3.426035446192546e-05,
"loss": 3.2875953674316407,
"step": 4120
},
{
"epoch": 0.792507204610951,
"grad_norm": 0.28389808535575867,
"learning_rate": 3.395778359051634e-05,
"loss": 3.290841293334961,
"step": 4125
},
{
"epoch": 0.7934678194044188,
"grad_norm": 0.23182038962841034,
"learning_rate": 3.365638410092819e-05,
"loss": 3.289868927001953,
"step": 4130
},
{
"epoch": 0.7944284341978867,
"grad_norm": 0.2726062834262848,
"learning_rate": 3.3356159035621746e-05,
"loss": 3.287432098388672,
"step": 4135
},
{
"epoch": 0.7953890489913544,
"grad_norm": 0.21947945654392242,
"learning_rate": 3.3057111425202614e-05,
"loss": 3.286570358276367,
"step": 4140
},
{
"epoch": 0.7963496637848223,
"grad_norm": 0.28224751353263855,
"learning_rate": 3.275924428839043e-05,
"loss": 3.2863037109375,
"step": 4145
},
{
"epoch": 0.7973102785782901,
"grad_norm": 0.22240881621837616,
"learning_rate": 3.246256063198895e-05,
"loss": 3.2870025634765625,
"step": 4150
},
{
"epoch": 0.7982708933717579,
"grad_norm": 0.2880660891532898,
"learning_rate": 3.216706345085499e-05,
"loss": 3.2907535552978517,
"step": 4155
},
{
"epoch": 0.7992315081652257,
"grad_norm": 0.2571323812007904,
"learning_rate": 3.187275572786878e-05,
"loss": 3.283245849609375,
"step": 4160
},
{
"epoch": 0.8001921229586936,
"grad_norm": 0.3757617771625519,
"learning_rate": 3.15796404339036e-05,
"loss": 3.286081314086914,
"step": 4165
},
{
"epoch": 0.8011527377521613,
"grad_norm": 0.34448450803756714,
"learning_rate": 3.128772052779569e-05,
"loss": 3.2928192138671877,
"step": 4170
},
{
"epoch": 0.8021133525456292,
"grad_norm": 0.29771631956100464,
"learning_rate": 3.099699895631474e-05,
"loss": 3.2850051879882813,
"step": 4175
},
{
"epoch": 0.803073967339097,
"grad_norm": 0.19579406082630157,
"learning_rate": 3.0707478654133706e-05,
"loss": 3.2885662078857423,
"step": 4180
},
{
"epoch": 0.8040345821325648,
"grad_norm": 0.21206071972846985,
"learning_rate": 3.041916254379949e-05,
"loss": 3.289264678955078,
"step": 4185
},
{
"epoch": 0.8049951969260326,
"grad_norm": 0.2306758463382721,
"learning_rate": 3.0132053535703342e-05,
"loss": 3.289895248413086,
"step": 4190
},
{
"epoch": 0.8059558117195005,
"grad_norm": 0.24324500560760498,
"learning_rate": 2.984615452805147e-05,
"loss": 3.289009857177734,
"step": 4195
},
{
"epoch": 0.8069164265129684,
"grad_norm": 0.24182389676570892,
"learning_rate": 2.9561468406835865e-05,
"loss": 3.2901374816894533,
"step": 4200
},
{
"epoch": 0.8078770413064361,
"grad_norm": 0.19510437548160553,
"learning_rate": 2.927799804580495e-05,
"loss": 3.288174057006836,
"step": 4205
},
{
"epoch": 0.808837656099904,
"grad_norm": 0.20101343095302582,
"learning_rate": 2.8995746306434853e-05,
"loss": 3.2845272064208983,
"step": 4210
},
{
"epoch": 0.8097982708933718,
"grad_norm": 0.219615176320076,
"learning_rate": 2.871471603790035e-05,
"loss": 3.288011932373047,
"step": 4215
},
{
"epoch": 0.8107588856868396,
"grad_norm": 0.2699833810329437,
"learning_rate": 2.8434910077046163e-05,
"loss": 3.2884559631347656,
"step": 4220
},
{
"epoch": 0.8117195004803074,
"grad_norm": 0.2704545855522156,
"learning_rate": 2.8156331248358295e-05,
"loss": 3.283679962158203,
"step": 4225
},
{
"epoch": 0.8126801152737753,
"grad_norm": 0.22715161740779877,
"learning_rate": 2.787898236393556e-05,
"loss": 3.285501480102539,
"step": 4230
},
{
"epoch": 0.813640730067243,
"grad_norm": 0.2033311128616333,
"learning_rate": 2.7602866223461044e-05,
"loss": 3.284902572631836,
"step": 4235
},
{
"epoch": 0.8146013448607109,
"grad_norm": 0.2283061444759369,
"learning_rate": 2.7327985614174143e-05,
"loss": 3.2900623321533202,
"step": 4240
},
{
"epoch": 0.8155619596541787,
"grad_norm": 0.21356871724128723,
"learning_rate": 2.7054343310842115e-05,
"loss": 3.283802032470703,
"step": 4245
},
{
"epoch": 0.8165225744476465,
"grad_norm": 0.21679487824440002,
"learning_rate": 2.6781942075732294e-05,
"loss": 3.284503936767578,
"step": 4250
},
{
"epoch": 0.8174831892411143,
"grad_norm": 0.2814090847969055,
"learning_rate": 2.65107846585841e-05,
"loss": 3.2889778137207033,
"step": 4255
},
{
"epoch": 0.8184438040345822,
"grad_norm": 0.20454630255699158,
"learning_rate": 2.624087379658123e-05,
"loss": 3.2832550048828124,
"step": 4260
},
{
"epoch": 0.8194044188280499,
"grad_norm": 0.25826799869537354,
"learning_rate": 2.5972212214324162e-05,
"loss": 3.2887802124023438,
"step": 4265
},
{
"epoch": 0.8203650336215178,
"grad_norm": 0.24519048631191254,
"learning_rate": 2.5704802623802595e-05,
"loss": 3.2866302490234376,
"step": 4270
},
{
"epoch": 0.8213256484149856,
"grad_norm": 0.1912376880645752,
"learning_rate": 2.5438647724368054e-05,
"loss": 3.2893089294433593,
"step": 4275
},
{
"epoch": 0.8222862632084534,
"grad_norm": 0.18562142550945282,
"learning_rate": 2.5173750202706666e-05,
"loss": 3.2848739624023438,
"step": 4280
},
{
"epoch": 0.8232468780019212,
"grad_norm": 0.21541374921798706,
"learning_rate": 2.491011273281189e-05,
"loss": 3.285577392578125,
"step": 4285
},
{
"epoch": 0.8242074927953891,
"grad_norm": 0.2195151448249817,
"learning_rate": 2.4647737975957954e-05,
"loss": 3.2902549743652343,
"step": 4290
},
{
"epoch": 0.8251681075888568,
"grad_norm": 0.2502996623516083,
"learning_rate": 2.4386628580672396e-05,
"loss": 3.2855270385742186,
"step": 4295
},
{
"epoch": 0.8261287223823247,
"grad_norm": 0.168674036860466,
"learning_rate": 2.4126787182709796e-05,
"loss": 3.2874530792236327,
"step": 4300
},
{
"epoch": 0.8270893371757925,
"grad_norm": 0.22533266246318817,
"learning_rate": 2.3868216405025002e-05,
"loss": 3.286944580078125,
"step": 4305
},
{
"epoch": 0.8280499519692603,
"grad_norm": 0.21998296678066254,
"learning_rate": 2.361091885774652e-05,
"loss": 3.286793518066406,
"step": 4310
},
{
"epoch": 0.8290105667627281,
"grad_norm": 0.2057517021894455,
"learning_rate": 2.3354897138150536e-05,
"loss": 3.2883201599121095,
"step": 4315
},
{
"epoch": 0.829971181556196,
"grad_norm": 0.2263990193605423,
"learning_rate": 2.3100153830634218e-05,
"loss": 3.2877071380615233,
"step": 4320
},
{
"epoch": 0.8309317963496637,
"grad_norm": 0.17990660667419434,
"learning_rate": 2.284669150669001e-05,
"loss": 3.2851654052734376,
"step": 4325
},
{
"epoch": 0.8318924111431316,
"grad_norm": 0.19730253517627716,
"learning_rate": 2.259451272487957e-05,
"loss": 3.2849620819091796,
"step": 4330
},
{
"epoch": 0.8328530259365994,
"grad_norm": 0.1865607649087906,
"learning_rate": 2.234362003080772e-05,
"loss": 3.2859230041503906,
"step": 4335
},
{
"epoch": 0.8338136407300673,
"grad_norm": 0.21946489810943604,
"learning_rate": 2.2094015957097215e-05,
"loss": 3.2830989837646483,
"step": 4340
},
{
"epoch": 0.834774255523535,
"grad_norm": 0.22765038907527924,
"learning_rate": 2.1845703023362647e-05,
"loss": 3.2854949951171877,
"step": 4345
},
{
"epoch": 0.8357348703170029,
"grad_norm": 0.1913105696439743,
"learning_rate": 2.159868373618544e-05,
"loss": 3.2875335693359373,
"step": 4350
},
{
"epoch": 0.8366954851104706,
"grad_norm": 0.21100343763828278,
"learning_rate": 2.13529605890883e-05,
"loss": 3.2848331451416017,
"step": 4355
},
{
"epoch": 0.8376560999039385,
"grad_norm": 0.20388315618038177,
"learning_rate": 2.110853606251004e-05,
"loss": 3.283430480957031,
"step": 4360
},
{
"epoch": 0.8386167146974063,
"grad_norm": 0.18785250186920166,
"learning_rate": 2.0865412623780858e-05,
"loss": 3.2853065490722657,
"step": 4365
},
{
"epoch": 0.8395773294908742,
"grad_norm": 0.19692327082157135,
"learning_rate": 2.0623592727096916e-05,
"loss": 3.2828216552734375,
"step": 4370
},
{
"epoch": 0.840537944284342,
"grad_norm": 0.2322104126214981,
"learning_rate": 2.0383078813496e-05,
"loss": 3.2829490661621095,
"step": 4375
},
{
"epoch": 0.8414985590778098,
"grad_norm": 0.2523021697998047,
"learning_rate": 2.014387331083268e-05,
"loss": 3.2841728210449217,
"step": 4380
},
{
"epoch": 0.8424591738712777,
"grad_norm": 0.2998299300670624,
"learning_rate": 1.990597863375389e-05,
"loss": 3.2823081970214845,
"step": 4385
},
{
"epoch": 0.8434197886647454,
"grad_norm": 0.22866974771022797,
"learning_rate": 1.966939718367444e-05,
"loss": 3.2830490112304687,
"step": 4390
},
{
"epoch": 0.8443804034582133,
"grad_norm": 0.2374458909034729,
"learning_rate": 1.9434131348752842e-05,
"loss": 3.2861083984375,
"step": 4395
},
{
"epoch": 0.8453410182516811,
"grad_norm": 0.271164208650589,
"learning_rate": 1.920018350386725e-05,
"loss": 3.2853363037109373,
"step": 4400
},
{
"epoch": 0.8463016330451489,
"grad_norm": 0.26674726605415344,
"learning_rate": 1.8967556010591423e-05,
"loss": 3.2835784912109376,
"step": 4405
},
{
"epoch": 0.8472622478386167,
"grad_norm": 0.2277594804763794,
"learning_rate": 1.873625121717089e-05,
"loss": 3.287038040161133,
"step": 4410
},
{
"epoch": 0.8482228626320846,
"grad_norm": 0.21174593269824982,
"learning_rate": 1.850627145849926e-05,
"loss": 3.281999206542969,
"step": 4415
},
{
"epoch": 0.8491834774255523,
"grad_norm": 0.19385212659835815,
"learning_rate": 1.8277619056094684e-05,
"loss": 3.283008575439453,
"step": 4420
},
{
"epoch": 0.8501440922190202,
"grad_norm": 0.22816501557826996,
"learning_rate": 1.805029631807632e-05,
"loss": 3.282820129394531,
"step": 4425
},
{
"epoch": 0.851104707012488,
"grad_norm": 0.20349720120429993,
"learning_rate": 1.7824305539141165e-05,
"loss": 3.2845672607421874,
"step": 4430
},
{
"epoch": 0.8520653218059558,
"grad_norm": 0.17361263930797577,
"learning_rate": 1.7599649000540828e-05,
"loss": 3.284697341918945,
"step": 4435
},
{
"epoch": 0.8530259365994236,
"grad_norm": 0.19036982953548431,
"learning_rate": 1.7376328970058382e-05,
"loss": 3.2811737060546875,
"step": 4440
},
{
"epoch": 0.8539865513928915,
"grad_norm": 0.18866802752017975,
"learning_rate": 1.715434770198582e-05,
"loss": 3.2872802734375,
"step": 4445
},
{
"epoch": 0.8549471661863592,
"grad_norm": 0.17417368292808533,
"learning_rate": 1.6933707437100852e-05,
"loss": 3.2829563140869142,
"step": 4450
},
{
"epoch": 0.8559077809798271,
"grad_norm": 0.1611868292093277,
"learning_rate": 1.67144104026446e-05,
"loss": 3.281053161621094,
"step": 4455
},
{
"epoch": 0.8568683957732949,
"grad_norm": 0.18905870616436005,
"learning_rate": 1.6496458812299073e-05,
"loss": 3.281689453125,
"step": 4460
},
{
"epoch": 0.8578290105667628,
"grad_norm": 0.1868743747472763,
"learning_rate": 1.6279854866164586e-05,
"loss": 3.2842777252197264,
"step": 4465
},
{
"epoch": 0.8587896253602305,
"grad_norm": 0.16863790154457092,
"learning_rate": 1.6064600750737995e-05,
"loss": 3.2836139678955076,
"step": 4470
},
{
"epoch": 0.8597502401536984,
"grad_norm": 0.15220773220062256,
"learning_rate": 1.5850698638890093e-05,
"loss": 3.28204460144043,
"step": 4475
},
{
"epoch": 0.8607108549471661,
"grad_norm": 0.16620075702667236,
"learning_rate": 1.563815068984418e-05,
"loss": 3.2853729248046877,
"step": 4480
},
{
"epoch": 0.861671469740634,
"grad_norm": 0.14935848116874695,
"learning_rate": 1.54269590491539e-05,
"loss": 3.2866542816162108,
"step": 4485
},
{
"epoch": 0.8626320845341018,
"grad_norm": 0.1583867073059082,
"learning_rate": 1.521712584868166e-05,
"loss": 3.283392333984375,
"step": 4490
},
{
"epoch": 0.8635926993275697,
"grad_norm": 0.1681807041168213,
"learning_rate": 1.5008653206577376e-05,
"loss": 3.281229782104492,
"step": 4495
},
{
"epoch": 0.8645533141210374,
"grad_norm": 0.1857963651418686,
"learning_rate": 1.4801543227256685e-05,
"loss": 3.2861068725585936,
"step": 4500
},
{
"epoch": 0.8655139289145053,
"grad_norm": 0.16560649871826172,
"learning_rate": 1.4595798001379965e-05,
"loss": 3.2863983154296874,
"step": 4505
},
{
"epoch": 0.866474543707973,
"grad_norm": 0.17330175638198853,
"learning_rate": 1.4391419605831218e-05,
"loss": 3.284455490112305,
"step": 4510
},
{
"epoch": 0.8674351585014409,
"grad_norm": 0.15583863854408264,
"learning_rate": 1.4188410103696957e-05,
"loss": 3.283338165283203,
"step": 4515
},
{
"epoch": 0.8683957732949087,
"grad_norm": 0.1486099660396576,
"learning_rate": 1.398677154424559e-05,
"loss": 3.282981109619141,
"step": 4520
},
{
"epoch": 0.8693563880883766,
"grad_norm": 0.16416342556476593,
"learning_rate": 1.3786505962906475e-05,
"loss": 3.281314468383789,
"step": 4525
},
{
"epoch": 0.8703170028818443,
"grad_norm": 0.17155486345291138,
"learning_rate": 1.3587615381249622e-05,
"loss": 3.2852855682373048,
"step": 4530
},
{
"epoch": 0.8712776176753122,
"grad_norm": 0.17230698466300964,
"learning_rate": 1.3390101806965165e-05,
"loss": 3.2855682373046875,
"step": 4535
},
{
"epoch": 0.8722382324687801,
"grad_norm": 0.16426202654838562,
"learning_rate": 1.3193967233843083e-05,
"loss": 3.284606170654297,
"step": 4540
},
{
"epoch": 0.8731988472622478,
"grad_norm": 0.18724007904529572,
"learning_rate": 1.2999213641753164e-05,
"loss": 3.281108093261719,
"step": 4545
},
{
"epoch": 0.8741594620557157,
"grad_norm": 0.171888530254364,
"learning_rate": 1.280584299662486e-05,
"loss": 3.2837615966796876,
"step": 4550
},
{
"epoch": 0.8751200768491835,
"grad_norm": 0.15725582838058472,
"learning_rate": 1.2613857250427622e-05,
"loss": 3.281300354003906,
"step": 4555
},
{
"epoch": 0.8760806916426513,
"grad_norm": 0.1605101078748703,
"learning_rate": 1.2423258341151088e-05,
"loss": 3.2830268859863283,
"step": 4560
},
{
"epoch": 0.8770413064361191,
"grad_norm": 0.16305667161941528,
"learning_rate": 1.2234048192785539e-05,
"loss": 3.28362922668457,
"step": 4565
},
{
"epoch": 0.878001921229587,
"grad_norm": 0.1558937281370163,
"learning_rate": 1.2046228715302509e-05,
"loss": 3.28128662109375,
"step": 4570
},
{
"epoch": 0.8789625360230547,
"grad_norm": 0.14114581048488617,
"learning_rate": 1.1859801804635471e-05,
"loss": 3.2840221405029295,
"step": 4575
},
{
"epoch": 0.8799231508165226,
"grad_norm": 0.14987899363040924,
"learning_rate": 1.167476934266065e-05,
"loss": 3.2838886260986326,
"step": 4580
},
{
"epoch": 0.8808837656099904,
"grad_norm": 0.15901191532611847,
"learning_rate": 1.1491133197178177e-05,
"loss": 3.2861114501953126,
"step": 4585
},
{
"epoch": 0.8818443804034583,
"grad_norm": 0.1510273516178131,
"learning_rate": 1.1308895221893088e-05,
"loss": 3.2844474792480467,
"step": 4590
},
{
"epoch": 0.882804995196926,
"grad_norm": 0.14691267907619476,
"learning_rate": 1.1128057256396684e-05,
"loss": 3.2848670959472654,
"step": 4595
},
{
"epoch": 0.8837656099903939,
"grad_norm": 0.16120979189872742,
"learning_rate": 1.0948621126147978e-05,
"loss": 3.2807106018066405,
"step": 4600
},
{
"epoch": 0.8847262247838616,
"grad_norm": 0.161734938621521,
"learning_rate": 1.0770588642455092e-05,
"loss": 3.279821014404297,
"step": 4605
},
{
"epoch": 0.8856868395773295,
"grad_norm": 0.17960962653160095,
"learning_rate": 1.0593961602457346e-05,
"loss": 3.2869110107421875,
"step": 4610
},
{
"epoch": 0.8866474543707973,
"grad_norm": 0.13539910316467285,
"learning_rate": 1.041874178910666e-05,
"loss": 3.283498001098633,
"step": 4615
},
{
"epoch": 0.8876080691642652,
"grad_norm": 0.147013857960701,
"learning_rate": 1.0244930971149918e-05,
"loss": 3.28480224609375,
"step": 4620
},
{
"epoch": 0.8885686839577329,
"grad_norm": 0.16810062527656555,
"learning_rate": 1.0072530903110942e-05,
"loss": 3.284687805175781,
"step": 4625
},
{
"epoch": 0.8895292987512008,
"grad_norm": 0.16976973414421082,
"learning_rate": 9.901543325272753e-06,
"loss": 3.282553863525391,
"step": 4630
},
{
"epoch": 0.8904899135446686,
"grad_norm": 0.16181102395057678,
"learning_rate": 9.731969963660224e-06,
"loss": 3.2854312896728515,
"step": 4635
},
{
"epoch": 0.8914505283381364,
"grad_norm": 0.16076242923736572,
"learning_rate": 9.56381253002233e-06,
"loss": 3.2812950134277346,
"step": 4640
},
{
"epoch": 0.8924111431316042,
"grad_norm": 0.14188607037067413,
"learning_rate": 9.397072721815113e-06,
"loss": 3.284113311767578,
"step": 4645
},
{
"epoch": 0.8933717579250721,
"grad_norm": 0.16452723741531372,
"learning_rate": 9.231752222184496e-06,
"loss": 3.2799339294433594,
"step": 4650
},
{
"epoch": 0.8943323727185398,
"grad_norm": 0.13960616290569305,
"learning_rate": 9.067852699949197e-06,
"loss": 3.2871246337890625,
"step": 4655
},
{
"epoch": 0.8952929875120077,
"grad_norm": 0.1369549185037613,
"learning_rate": 8.905375809584053e-06,
"loss": 3.283050537109375,
"step": 4660
},
{
"epoch": 0.8962536023054755,
"grad_norm": 0.13010026514530182,
"learning_rate": 8.744323191203129e-06,
"loss": 3.2822250366210937,
"step": 4665
},
{
"epoch": 0.8972142170989433,
"grad_norm": 0.14469455182552338,
"learning_rate": 8.5846964705433e-06,
"loss": 3.2816314697265625,
"step": 4670
},
{
"epoch": 0.8981748318924111,
"grad_norm": 0.15112441778182983,
"learning_rate": 8.426497258947813e-06,
"loss": 3.279371643066406,
"step": 4675
},
{
"epoch": 0.899135446685879,
"grad_norm": 0.14177513122558594,
"learning_rate": 8.269727153349915e-06,
"loss": 3.2836181640625,
"step": 4680
},
{
"epoch": 0.9000960614793467,
"grad_norm": 0.1383771449327469,
"learning_rate": 8.114387736256966e-06,
"loss": 3.2833229064941407,
"step": 4685
},
{
"epoch": 0.9010566762728146,
"grad_norm": 0.1385938823223114,
"learning_rate": 7.960480575734162e-06,
"loss": 3.283488464355469,
"step": 4690
},
{
"epoch": 0.9020172910662824,
"grad_norm": 0.1253698468208313,
"learning_rate": 7.808007225388963e-06,
"loss": 3.282206726074219,
"step": 4695
},
{
"epoch": 0.9029779058597502,
"grad_norm": 0.14205971360206604,
"learning_rate": 7.656969224355285e-06,
"loss": 3.2821189880371096,
"step": 4700
},
{
"epoch": 0.9039385206532181,
"grad_norm": 0.13180512189865112,
"learning_rate": 7.507368097277994e-06,
"loss": 3.278270721435547,
"step": 4705
},
{
"epoch": 0.9048991354466859,
"grad_norm": 0.1405053585767746,
"learning_rate": 7.359205354297499e-06,
"loss": 3.279631423950195,
"step": 4710
},
{
"epoch": 0.9058597502401537,
"grad_norm": 0.1261477768421173,
"learning_rate": 7.21248249103451e-06,
"loss": 3.2792625427246094,
"step": 4715
},
{
"epoch": 0.9068203650336215,
"grad_norm": 0.1316026747226715,
"learning_rate": 7.067200988574983e-06,
"loss": 3.2806537628173826,
"step": 4720
},
{
"epoch": 0.9077809798270894,
"grad_norm": 0.1313539296388626,
"learning_rate": 6.923362313455094e-06,
"loss": 3.2771621704101563,
"step": 4725
},
{
"epoch": 0.9087415946205571,
"grad_norm": 0.1397256702184677,
"learning_rate": 6.780967917646518e-06,
"loss": 3.279193115234375,
"step": 4730
},
{
"epoch": 0.909702209414025,
"grad_norm": 0.13661499321460724,
"learning_rate": 6.640019238541727e-06,
"loss": 3.2864913940429688,
"step": 4735
},
{
"epoch": 0.9106628242074928,
"grad_norm": 0.13143934309482574,
"learning_rate": 6.5005176989394335e-06,
"loss": 3.2848949432373047,
"step": 4740
},
{
"epoch": 0.9116234390009607,
"grad_norm": 0.12046822905540466,
"learning_rate": 6.362464707030334e-06,
"loss": 3.283034896850586,
"step": 4745
},
{
"epoch": 0.9125840537944284,
"grad_norm": 0.12852244079113007,
"learning_rate": 6.225861656382825e-06,
"loss": 3.2837677001953125,
"step": 4750
},
{
"epoch": 0.9135446685878963,
"grad_norm": 0.13605345785617828,
"learning_rate": 6.090709925928938e-06,
"loss": 3.2784072875976564,
"step": 4755
},
{
"epoch": 0.914505283381364,
"grad_norm": 0.1370062530040741,
"learning_rate": 5.957010879950446e-06,
"loss": 3.279316711425781,
"step": 4760
},
{
"epoch": 0.9154658981748319,
"grad_norm": 0.14122441411018372,
"learning_rate": 5.824765868065101e-06,
"loss": 3.2786293029785156,
"step": 4765
},
{
"epoch": 0.9164265129682997,
"grad_norm": 0.1247406154870987,
"learning_rate": 5.69397622521291e-06,
"loss": 3.2822948455810548,
"step": 4770
},
{
"epoch": 0.9173871277617676,
"grad_norm": 0.13668857514858246,
"learning_rate": 5.564643271642799e-06,
"loss": 3.282684326171875,
"step": 4775
},
{
"epoch": 0.9183477425552353,
"grad_norm": 0.14488086104393005,
"learning_rate": 5.436768312899226e-06,
"loss": 3.2865325927734377,
"step": 4780
},
{
"epoch": 0.9193083573487032,
"grad_norm": 0.13214252889156342,
"learning_rate": 5.310352639808968e-06,
"loss": 3.2808204650878907,
"step": 4785
},
{
"epoch": 0.920268972142171,
"grad_norm": 0.12839952111244202,
"learning_rate": 5.185397528468155e-06,
"loss": 3.2781848907470703,
"step": 4790
},
{
"epoch": 0.9212295869356388,
"grad_norm": 0.12066492438316345,
"learning_rate": 5.061904240229309e-06,
"loss": 3.285063934326172,
"step": 4795
},
{
"epoch": 0.9221902017291066,
"grad_norm": 0.1341644525527954,
"learning_rate": 4.939874021688739e-06,
"loss": 3.2813148498535156,
"step": 4800
},
{
"epoch": 0.9231508165225745,
"grad_norm": 0.11596546322107315,
"learning_rate": 4.819308104673769e-06,
"loss": 3.281114196777344,
"step": 4805
},
{
"epoch": 0.9241114313160422,
"grad_norm": 0.1205928698182106,
"learning_rate": 4.700207706230513e-06,
"loss": 3.2828353881835937,
"step": 4810
},
{
"epoch": 0.9250720461095101,
"grad_norm": 0.1389501690864563,
"learning_rate": 4.582574028611435e-06,
"loss": 3.2821121215820312,
"step": 4815
},
{
"epoch": 0.9260326609029779,
"grad_norm": 0.11629052460193634,
"learning_rate": 4.466408259263243e-06,
"loss": 3.2819660186767576,
"step": 4820
},
{
"epoch": 0.9269932756964457,
"grad_norm": 0.13381557166576385,
"learning_rate": 4.351711570815014e-06,
"loss": 3.278516387939453,
"step": 4825
},
{
"epoch": 0.9279538904899135,
"grad_norm": 0.12584474682807922,
"learning_rate": 4.238485121066154e-06,
"loss": 3.281688690185547,
"step": 4830
},
{
"epoch": 0.9289145052833814,
"grad_norm": 0.12782976031303406,
"learning_rate": 4.126730052974908e-06,
"loss": 3.279840087890625,
"step": 4835
},
{
"epoch": 0.9298751200768491,
"grad_norm": 0.1391313374042511,
"learning_rate": 4.016447494646718e-06,
"loss": 3.2828041076660157,
"step": 4840
},
{
"epoch": 0.930835734870317,
"grad_norm": 0.11799421906471252,
"learning_rate": 3.907638559322817e-06,
"loss": 3.284708786010742,
"step": 4845
},
{
"epoch": 0.9317963496637848,
"grad_norm": 0.14186686277389526,
"learning_rate": 3.8003043453691207e-06,
"loss": 3.2787437438964844,
"step": 4850
},
{
"epoch": 0.9327569644572526,
"grad_norm": 0.11602967977523804,
"learning_rate": 3.69444593626495e-06,
"loss": 3.28221435546875,
"step": 4855
},
{
"epoch": 0.9337175792507204,
"grad_norm": 0.13371752202510834,
"learning_rate": 3.5900644005922465e-06,
"loss": 3.2848297119140626,
"step": 4860
},
{
"epoch": 0.9346781940441883,
"grad_norm": 0.12414630502462387,
"learning_rate": 3.48716079202474e-06,
"loss": 3.2800872802734373,
"step": 4865
},
{
"epoch": 0.9356388088376562,
"grad_norm": 0.11889371275901794,
"learning_rate": 3.385736149317264e-06,
"loss": 3.2823089599609374,
"step": 4870
},
{
"epoch": 0.9365994236311239,
"grad_norm": 0.1263454407453537,
"learning_rate": 3.2857914962953926e-06,
"loss": 3.2813446044921877,
"step": 4875
},
{
"epoch": 0.9375600384245918,
"grad_norm": 0.12067841738462448,
"learning_rate": 3.1873278418449e-06,
"loss": 3.2796573638916016,
"step": 4880
},
{
"epoch": 0.9385206532180596,
"grad_norm": 0.137226864695549,
"learning_rate": 3.090346179901837e-06,
"loss": 3.2818023681640627,
"step": 4885
},
{
"epoch": 0.9394812680115274,
"grad_norm": 0.11869537830352783,
"learning_rate": 2.99484748944227e-06,
"loss": 3.2809097290039064,
"step": 4890
},
{
"epoch": 0.9404418828049952,
"grad_norm": 0.11975245177745819,
"learning_rate": 2.9008327344724913e-06,
"loss": 3.282526397705078,
"step": 4895
},
{
"epoch": 0.9414024975984631,
"grad_norm": 0.11280205845832825,
"learning_rate": 2.808302864019374e-06,
"loss": 3.279458999633789,
"step": 4900
},
{
"epoch": 0.9423631123919308,
"grad_norm": 0.13222162425518036,
"learning_rate": 2.7172588121205983e-06,
"loss": 3.2817481994628905,
"step": 4905
},
{
"epoch": 0.9433237271853987,
"grad_norm": 0.09990071505308151,
"learning_rate": 2.62770149781541e-06,
"loss": 3.281599426269531,
"step": 4910
},
{
"epoch": 0.9442843419788665,
"grad_norm": 0.1150190606713295,
"learning_rate": 2.539631825135191e-06,
"loss": 3.2885650634765624,
"step": 4915
},
{
"epoch": 0.9452449567723343,
"grad_norm": 0.13486024737358093,
"learning_rate": 2.4530506830944876e-06,
"loss": 3.2844482421875,
"step": 4920
},
{
"epoch": 0.9462055715658021,
"grad_norm": 0.11893726140260696,
"learning_rate": 2.367958945681897e-06,
"loss": 3.28011474609375,
"step": 4925
},
{
"epoch": 0.94716618635927,
"grad_norm": 0.1119316965341568,
"learning_rate": 2.2843574718513114e-06,
"loss": 3.2823932647705076,
"step": 4930
},
{
"epoch": 0.9481268011527377,
"grad_norm": 0.09861624985933304,
"learning_rate": 2.2022471055132553e-06,
"loss": 3.281215286254883,
"step": 4935
},
{
"epoch": 0.9490874159462056,
"grad_norm": 0.12267674505710602,
"learning_rate": 2.1216286755263434e-06,
"loss": 3.281680679321289,
"step": 4940
},
{
"epoch": 0.9500480307396734,
"grad_norm": 0.11797958612442017,
"learning_rate": 2.042502995688905e-06,
"loss": 3.2819839477539063,
"step": 4945
},
{
"epoch": 0.9510086455331412,
"grad_norm": 0.10683488100767136,
"learning_rate": 1.964870864730789e-06,
"loss": 3.2817310333251952,
"step": 4950
},
{
"epoch": 0.951969260326609,
"grad_norm": 0.12260305136442184,
"learning_rate": 1.8887330663053536e-06,
"loss": 3.2814239501953124,
"step": 4955
},
{
"epoch": 0.9529298751200769,
"grad_norm": 0.12215402722358704,
"learning_rate": 1.8140903689814079e-06,
"loss": 3.283163070678711,
"step": 4960
},
{
"epoch": 0.9538904899135446,
"grad_norm": 0.1066419780254364,
"learning_rate": 1.740943526235583e-06,
"loss": 3.2810131072998048,
"step": 4965
},
{
"epoch": 0.9548511047070125,
"grad_norm": 0.10814165323972702,
"learning_rate": 1.6692932764447054e-06,
"loss": 3.281442642211914,
"step": 4970
},
{
"epoch": 0.9558117195004803,
"grad_norm": 0.10270337015390396,
"learning_rate": 1.5991403428783188e-06,
"loss": 3.283908462524414,
"step": 4975
},
{
"epoch": 0.9567723342939481,
"grad_norm": 0.10882619768381119,
"learning_rate": 1.5304854336913752e-06,
"loss": 3.2817230224609375,
"step": 4980
},
{
"epoch": 0.9577329490874159,
"grad_norm": 0.10903850197792053,
"learning_rate": 1.463329241917105e-06,
"loss": 3.282952880859375,
"step": 4985
},
{
"epoch": 0.9586935638808838,
"grad_norm": 0.11845772713422775,
"learning_rate": 1.397672445460024e-06,
"loss": 3.2793148040771483,
"step": 4990
},
{
"epoch": 0.9596541786743515,
"grad_norm": 0.10239589214324951,
"learning_rate": 1.333515707089089e-06,
"loss": 3.28063850402832,
"step": 4995
},
{
"epoch": 0.9606147934678194,
"grad_norm": 0.10683233290910721,
"learning_rate": 1.2708596744309685e-06,
"loss": 3.2818748474121096,
"step": 5000
},
{
"epoch": 0.9615754082612872,
"grad_norm": 0.1100321039557457,
"learning_rate": 1.209704979963616e-06,
"loss": 3.287214660644531,
"step": 5005
},
{
"epoch": 0.962536023054755,
"grad_norm": 0.10722316801548004,
"learning_rate": 1.1500522410096912e-06,
"loss": 3.2820858001708983,
"step": 5010
},
{
"epoch": 0.9634966378482228,
"grad_norm": 0.09480059891939163,
"learning_rate": 1.0919020597305649e-06,
"loss": 3.281991195678711,
"step": 5015
},
{
"epoch": 0.9644572526416907,
"grad_norm": 0.09626810252666473,
"learning_rate": 1.0352550231200407e-06,
"loss": 3.282924270629883,
"step": 5020
},
{
"epoch": 0.9654178674351584,
"grad_norm": 0.09960578382015228,
"learning_rate": 9.801117029985596e-07,
"loss": 3.280469512939453,
"step": 5025
},
{
"epoch": 0.9663784822286263,
"grad_norm": 0.10461635142564774,
"learning_rate": 9.264726560073377e-07,
"loss": 3.282293701171875,
"step": 5030
},
{
"epoch": 0.9673390970220941,
"grad_norm": 0.09554687887430191,
"learning_rate": 8.743384236028051e-07,
"loss": 3.2805267333984376,
"step": 5035
},
{
"epoch": 0.968299711815562,
"grad_norm": 0.09053874015808105,
"learning_rate": 8.237095320511589e-07,
"loss": 3.282093048095703,
"step": 5040
},
{
"epoch": 0.9692603266090298,
"grad_norm": 0.09056618809700012,
"learning_rate": 7.745864924229351e-07,
"loss": 3.2803783416748047,
"step": 5045
},
{
"epoch": 0.9702209414024976,
"grad_norm": 0.09100056439638138,
"learning_rate": 7.26969800588012e-07,
"loss": 3.279576873779297,
"step": 5050
},
{
"epoch": 0.9711815561959655,
"grad_norm": 0.10496609658002853,
"learning_rate": 6.808599372104817e-07,
"loss": 3.2778125762939454,
"step": 5055
},
{
"epoch": 0.9721421709894332,
"grad_norm": 0.09851839393377304,
"learning_rate": 6.362573677438199e-07,
"loss": 3.2856407165527344,
"step": 5060
},
{
"epoch": 0.9731027857829011,
"grad_norm": 0.1031249463558197,
"learning_rate": 5.931625424262731e-07,
"loss": 3.281835174560547,
"step": 5065
},
{
"epoch": 0.9740634005763689,
"grad_norm": 0.0869988277554512,
"learning_rate": 5.51575896276163e-07,
"loss": 3.2821426391601562,
"step": 5070
},
{
"epoch": 0.9750240153698367,
"grad_norm": 0.10024792701005936,
"learning_rate": 5.11497849087622e-07,
"loss": 3.278700256347656,
"step": 5075
},
{
"epoch": 0.9759846301633045,
"grad_norm": 0.08857379108667374,
"learning_rate": 4.7292880542634805e-07,
"loss": 3.2780426025390623,
"step": 5080
},
{
"epoch": 0.9769452449567724,
"grad_norm": 0.08493303507566452,
"learning_rate": 4.358691546254067e-07,
"loss": 3.279518890380859,
"step": 5085
},
{
"epoch": 0.9779058597502401,
"grad_norm": 0.09684525430202484,
"learning_rate": 4.0031927078145176e-07,
"loss": 3.281083679199219,
"step": 5090
},
{
"epoch": 0.978866474543708,
"grad_norm": 0.09520915895700455,
"learning_rate": 3.662795127508111e-07,
"loss": 3.2831382751464844,
"step": 5095
},
{
"epoch": 0.9798270893371758,
"grad_norm": 0.09378495812416077,
"learning_rate": 3.3375022414598994e-07,
"loss": 3.280741882324219,
"step": 5100
},
{
"epoch": 0.9807877041306436,
"grad_norm": 0.0972544252872467,
"learning_rate": 3.027317333321233e-07,
"loss": 3.282391357421875,
"step": 5105
},
{
"epoch": 0.9817483189241114,
"grad_norm": 0.09050152450799942,
"learning_rate": 2.7322435342364556e-07,
"loss": 3.2794769287109373,
"step": 5110
},
{
"epoch": 0.9827089337175793,
"grad_norm": 0.098331019282341,
"learning_rate": 2.452283822812262e-07,
"loss": 3.283687210083008,
"step": 5115
},
{
"epoch": 0.983669548511047,
"grad_norm": 0.09614613652229309,
"learning_rate": 2.1874410250863893e-07,
"loss": 3.2843692779541014,
"step": 5120
},
{
"epoch": 0.9846301633045149,
"grad_norm": 0.08628969639539719,
"learning_rate": 1.9377178145003059e-07,
"loss": 3.2782211303710938,
"step": 5125
},
{
"epoch": 0.9855907780979827,
"grad_norm": 0.09115748107433319,
"learning_rate": 1.7031167118708998e-07,
"loss": 3.282467269897461,
"step": 5130
},
{
"epoch": 0.9865513928914506,
"grad_norm": 0.08609933406114578,
"learning_rate": 1.4836400853666662e-07,
"loss": 3.280436706542969,
"step": 5135
},
{
"epoch": 0.9875120076849183,
"grad_norm": 0.08899597823619843,
"learning_rate": 1.2792901504820595e-07,
"loss": 3.2786022186279298,
"step": 5140
},
{
"epoch": 0.9884726224783862,
"grad_norm": 0.0838359072804451,
"learning_rate": 1.0900689700166776e-07,
"loss": 3.283920669555664,
"step": 5145
},
{
"epoch": 0.989433237271854,
"grad_norm": 0.08638288825750351,
"learning_rate": 9.159784540531124e-08,
"loss": 3.2787384033203124,
"step": 5150
},
{
"epoch": 0.9903938520653218,
"grad_norm": 0.08724058419466019,
"learning_rate": 7.57020359938798e-08,
"loss": 3.2797054290771483,
"step": 5155
},
{
"epoch": 0.9913544668587896,
"grad_norm": 0.08608481287956238,
"learning_rate": 6.131962922673595e-08,
"loss": 3.2824764251708984,
"step": 5160
},
{
"epoch": 0.9923150816522575,
"grad_norm": 0.08680781722068787,
"learning_rate": 4.845077028631239e-08,
"loss": 3.2829864501953123,
"step": 5165
},
{
"epoch": 0.9932756964457252,
"grad_norm": 0.08766987174749374,
"learning_rate": 3.709558907659671e-08,
"loss": 3.279438018798828,
"step": 5170
},
{
"epoch": 0.9942363112391931,
"grad_norm": 0.08375875651836395,
"learning_rate": 2.7254200221848988e-08,
"loss": 3.28199462890625,
"step": 5175
},
{
"epoch": 0.9951969260326609,
"grad_norm": 0.08243842422962189,
"learning_rate": 1.8926703065436087e-08,
"loss": 3.281136322021484,
"step": 5180
},
{
"epoch": 0.9961575408261287,
"grad_norm": 0.08867152035236359,
"learning_rate": 1.2113181668815808e-08,
"loss": 3.2808319091796876,
"step": 5185
},
{
"epoch": 0.9971181556195965,
"grad_norm": 0.08233336359262466,
"learning_rate": 6.813704810704201e-09,
"loss": 3.28259162902832,
"step": 5190
},
{
"epoch": 0.9980787704130644,
"grad_norm": 0.08597059547901154,
"learning_rate": 3.028325986392799e-09,
"loss": 3.281261444091797,
"step": 5195
},
{
"epoch": 0.9990393852065321,
"grad_norm": 0.08740798383951187,
"learning_rate": 7.570834071823905e-10,
"loss": 3.2809608459472654,
"step": 5200
},
{
"epoch": 1.0,
"grad_norm": 0.08548780530691147,
"learning_rate": 0.0,
"loss": 3.2842376708984373,
"step": 5205
}
],
"logging_steps": 5,
"max_steps": 5205,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 9.770678188442963e+19,
"train_batch_size": 64,
"trial_name": null,
"trial_params": null
}