Devy1's picture
Upload checkpoint Qwen2.5-Coder-CONTROL-checkpoints_python_only_2k-1.5B-Base-2 at epoch 2
98b3d85 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 230,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.008705114254624592,
"grad_norm": 1.703125,
"learning_rate": 5e-05,
"loss": 0.8632,
"step": 1
},
{
"epoch": 0.017410228509249184,
"grad_norm": 2.015625,
"learning_rate": 4.9956521739130436e-05,
"loss": 0.7891,
"step": 2
},
{
"epoch": 0.026115342763873776,
"grad_norm": 8.375,
"learning_rate": 4.9913043478260876e-05,
"loss": 0.7896,
"step": 3
},
{
"epoch": 0.03482045701849837,
"grad_norm": 3.328125,
"learning_rate": 4.986956521739131e-05,
"loss": 0.9271,
"step": 4
},
{
"epoch": 0.04352557127312296,
"grad_norm": 1.640625,
"learning_rate": 4.9826086956521736e-05,
"loss": 0.9154,
"step": 5
},
{
"epoch": 0.05223068552774755,
"grad_norm": 1.5390625,
"learning_rate": 4.9782608695652176e-05,
"loss": 0.9267,
"step": 6
},
{
"epoch": 0.060935799782372145,
"grad_norm": 2.71875,
"learning_rate": 4.973913043478261e-05,
"loss": 0.8189,
"step": 7
},
{
"epoch": 0.06964091403699674,
"grad_norm": 1.5859375,
"learning_rate": 4.969565217391304e-05,
"loss": 0.8018,
"step": 8
},
{
"epoch": 0.07834602829162132,
"grad_norm": 1.359375,
"learning_rate": 4.9652173913043483e-05,
"loss": 0.9439,
"step": 9
},
{
"epoch": 0.08705114254624592,
"grad_norm": 2.375,
"learning_rate": 4.960869565217392e-05,
"loss": 0.8444,
"step": 10
},
{
"epoch": 0.0957562568008705,
"grad_norm": 2.125,
"learning_rate": 4.956521739130435e-05,
"loss": 1.013,
"step": 11
},
{
"epoch": 0.1044613710554951,
"grad_norm": 1.7109375,
"learning_rate": 4.9521739130434784e-05,
"loss": 1.1719,
"step": 12
},
{
"epoch": 0.11316648531011969,
"grad_norm": 1.4296875,
"learning_rate": 4.947826086956522e-05,
"loss": 1.0034,
"step": 13
},
{
"epoch": 0.12187159956474429,
"grad_norm": 1.25,
"learning_rate": 4.943478260869566e-05,
"loss": 1.0015,
"step": 14
},
{
"epoch": 0.1305767138193689,
"grad_norm": 1.25,
"learning_rate": 4.939130434782609e-05,
"loss": 0.9413,
"step": 15
},
{
"epoch": 0.13928182807399347,
"grad_norm": 1.5546875,
"learning_rate": 4.9347826086956524e-05,
"loss": 1.0435,
"step": 16
},
{
"epoch": 0.14798694232861806,
"grad_norm": 2.265625,
"learning_rate": 4.930434782608696e-05,
"loss": 0.9285,
"step": 17
},
{
"epoch": 0.15669205658324264,
"grad_norm": 1.3046875,
"learning_rate": 4.926086956521739e-05,
"loss": 0.91,
"step": 18
},
{
"epoch": 0.16539717083786726,
"grad_norm": 1.296875,
"learning_rate": 4.9217391304347824e-05,
"loss": 0.937,
"step": 19
},
{
"epoch": 0.17410228509249184,
"grad_norm": 1.28125,
"learning_rate": 4.9173913043478265e-05,
"loss": 0.9194,
"step": 20
},
{
"epoch": 0.18280739934711643,
"grad_norm": 1.203125,
"learning_rate": 4.91304347826087e-05,
"loss": 0.9392,
"step": 21
},
{
"epoch": 0.191512513601741,
"grad_norm": 1.3203125,
"learning_rate": 4.908695652173913e-05,
"loss": 0.935,
"step": 22
},
{
"epoch": 0.20021762785636563,
"grad_norm": 1.2421875,
"learning_rate": 4.904347826086957e-05,
"loss": 0.7352,
"step": 23
},
{
"epoch": 0.2089227421109902,
"grad_norm": 1.2578125,
"learning_rate": 4.9e-05,
"loss": 0.8006,
"step": 24
},
{
"epoch": 0.2176278563656148,
"grad_norm": 1.3046875,
"learning_rate": 4.895652173913044e-05,
"loss": 1.1003,
"step": 25
},
{
"epoch": 0.22633297062023938,
"grad_norm": 1.578125,
"learning_rate": 4.891304347826087e-05,
"loss": 0.8786,
"step": 26
},
{
"epoch": 0.235038084874864,
"grad_norm": 1.390625,
"learning_rate": 4.8869565217391305e-05,
"loss": 0.9133,
"step": 27
},
{
"epoch": 0.24374319912948858,
"grad_norm": 1.109375,
"learning_rate": 4.8826086956521746e-05,
"loss": 0.9822,
"step": 28
},
{
"epoch": 0.25244831338411317,
"grad_norm": 1.6640625,
"learning_rate": 4.878260869565218e-05,
"loss": 1.0016,
"step": 29
},
{
"epoch": 0.2611534276387378,
"grad_norm": 1.34375,
"learning_rate": 4.873913043478261e-05,
"loss": 1.1215,
"step": 30
},
{
"epoch": 0.26985854189336234,
"grad_norm": 1.171875,
"learning_rate": 4.8695652173913046e-05,
"loss": 1.1457,
"step": 31
},
{
"epoch": 0.27856365614798695,
"grad_norm": 1.3671875,
"learning_rate": 4.865217391304348e-05,
"loss": 1.0368,
"step": 32
},
{
"epoch": 0.28726877040261156,
"grad_norm": 1.140625,
"learning_rate": 4.860869565217391e-05,
"loss": 0.9539,
"step": 33
},
{
"epoch": 0.2959738846572361,
"grad_norm": 1.4296875,
"learning_rate": 4.856521739130435e-05,
"loss": 0.9165,
"step": 34
},
{
"epoch": 0.30467899891186073,
"grad_norm": 1.2265625,
"learning_rate": 4.8521739130434786e-05,
"loss": 1.03,
"step": 35
},
{
"epoch": 0.3133841131664853,
"grad_norm": 1.5703125,
"learning_rate": 4.847826086956522e-05,
"loss": 1.2848,
"step": 36
},
{
"epoch": 0.3220892274211099,
"grad_norm": 1.21875,
"learning_rate": 4.843478260869565e-05,
"loss": 0.8663,
"step": 37
},
{
"epoch": 0.3307943416757345,
"grad_norm": 1.125,
"learning_rate": 4.839130434782609e-05,
"loss": 0.9547,
"step": 38
},
{
"epoch": 0.3394994559303591,
"grad_norm": 1.328125,
"learning_rate": 4.834782608695652e-05,
"loss": 0.872,
"step": 39
},
{
"epoch": 0.3482045701849837,
"grad_norm": 1.140625,
"learning_rate": 4.830434782608696e-05,
"loss": 0.8424,
"step": 40
},
{
"epoch": 0.35690968443960824,
"grad_norm": 1.2578125,
"learning_rate": 4.8260869565217394e-05,
"loss": 0.8246,
"step": 41
},
{
"epoch": 0.36561479869423286,
"grad_norm": 1.3046875,
"learning_rate": 4.8217391304347834e-05,
"loss": 0.8326,
"step": 42
},
{
"epoch": 0.37431991294885747,
"grad_norm": 1.2109375,
"learning_rate": 4.817391304347826e-05,
"loss": 0.9043,
"step": 43
},
{
"epoch": 0.383025027203482,
"grad_norm": 1.296875,
"learning_rate": 4.8130434782608694e-05,
"loss": 0.8888,
"step": 44
},
{
"epoch": 0.39173014145810664,
"grad_norm": 1.2265625,
"learning_rate": 4.8086956521739134e-05,
"loss": 0.778,
"step": 45
},
{
"epoch": 0.40043525571273125,
"grad_norm": 1.0546875,
"learning_rate": 4.804347826086957e-05,
"loss": 0.8684,
"step": 46
},
{
"epoch": 0.4091403699673558,
"grad_norm": 1.5078125,
"learning_rate": 4.8e-05,
"loss": 0.9351,
"step": 47
},
{
"epoch": 0.4178454842219804,
"grad_norm": 1.2421875,
"learning_rate": 4.795652173913044e-05,
"loss": 0.8708,
"step": 48
},
{
"epoch": 0.426550598476605,
"grad_norm": 1.28125,
"learning_rate": 4.7913043478260875e-05,
"loss": 0.9919,
"step": 49
},
{
"epoch": 0.4352557127312296,
"grad_norm": 1.1953125,
"learning_rate": 4.78695652173913e-05,
"loss": 0.8166,
"step": 50
},
{
"epoch": 0.4439608269858542,
"grad_norm": 1.3203125,
"learning_rate": 4.782608695652174e-05,
"loss": 0.9542,
"step": 51
},
{
"epoch": 0.45266594124047876,
"grad_norm": 1.2109375,
"learning_rate": 4.7782608695652175e-05,
"loss": 1.0959,
"step": 52
},
{
"epoch": 0.4613710554951034,
"grad_norm": 1.140625,
"learning_rate": 4.773913043478261e-05,
"loss": 0.8695,
"step": 53
},
{
"epoch": 0.470076169749728,
"grad_norm": 1.5234375,
"learning_rate": 4.769565217391305e-05,
"loss": 1.1411,
"step": 54
},
{
"epoch": 0.47878128400435255,
"grad_norm": 1.125,
"learning_rate": 4.765217391304348e-05,
"loss": 0.8354,
"step": 55
},
{
"epoch": 0.48748639825897716,
"grad_norm": 1.390625,
"learning_rate": 4.7608695652173916e-05,
"loss": 0.9133,
"step": 56
},
{
"epoch": 0.4961915125136017,
"grad_norm": 1.2578125,
"learning_rate": 4.756521739130435e-05,
"loss": 0.9042,
"step": 57
},
{
"epoch": 0.5048966267682263,
"grad_norm": 1.28125,
"learning_rate": 4.752173913043478e-05,
"loss": 0.9707,
"step": 58
},
{
"epoch": 0.5136017410228509,
"grad_norm": 1.2734375,
"learning_rate": 4.747826086956522e-05,
"loss": 0.7845,
"step": 59
},
{
"epoch": 0.5223068552774756,
"grad_norm": 1.3046875,
"learning_rate": 4.7434782608695656e-05,
"loss": 0.955,
"step": 60
},
{
"epoch": 0.5310119695321001,
"grad_norm": 1.546875,
"learning_rate": 4.739130434782609e-05,
"loss": 0.9521,
"step": 61
},
{
"epoch": 0.5397170837867247,
"grad_norm": 1.2578125,
"learning_rate": 4.734782608695652e-05,
"loss": 1.0155,
"step": 62
},
{
"epoch": 0.5484221980413493,
"grad_norm": 1.3125,
"learning_rate": 4.7304347826086956e-05,
"loss": 1.0093,
"step": 63
},
{
"epoch": 0.5571273122959739,
"grad_norm": 1.234375,
"learning_rate": 4.726086956521739e-05,
"loss": 0.8372,
"step": 64
},
{
"epoch": 0.5658324265505985,
"grad_norm": 1.0703125,
"learning_rate": 4.721739130434783e-05,
"loss": 0.8776,
"step": 65
},
{
"epoch": 0.5745375408052231,
"grad_norm": 1.2265625,
"learning_rate": 4.7173913043478264e-05,
"loss": 0.8854,
"step": 66
},
{
"epoch": 0.5832426550598476,
"grad_norm": 1.1171875,
"learning_rate": 4.71304347826087e-05,
"loss": 0.8966,
"step": 67
},
{
"epoch": 0.5919477693144722,
"grad_norm": 1.296875,
"learning_rate": 4.708695652173914e-05,
"loss": 0.9694,
"step": 68
},
{
"epoch": 0.6006528835690969,
"grad_norm": 1.1875,
"learning_rate": 4.7043478260869564e-05,
"loss": 1.062,
"step": 69
},
{
"epoch": 0.6093579978237215,
"grad_norm": 1.078125,
"learning_rate": 4.7e-05,
"loss": 0.8369,
"step": 70
},
{
"epoch": 0.6180631120783461,
"grad_norm": 1.1953125,
"learning_rate": 4.695652173913044e-05,
"loss": 0.7927,
"step": 71
},
{
"epoch": 0.6267682263329706,
"grad_norm": 1.4140625,
"learning_rate": 4.691304347826087e-05,
"loss": 1.3626,
"step": 72
},
{
"epoch": 0.6354733405875952,
"grad_norm": 1.1328125,
"learning_rate": 4.686956521739131e-05,
"loss": 0.7737,
"step": 73
},
{
"epoch": 0.6441784548422198,
"grad_norm": 1.0859375,
"learning_rate": 4.6826086956521745e-05,
"loss": 0.9044,
"step": 74
},
{
"epoch": 0.6528835690968444,
"grad_norm": 1.078125,
"learning_rate": 4.678260869565218e-05,
"loss": 0.9451,
"step": 75
},
{
"epoch": 0.661588683351469,
"grad_norm": 1.234375,
"learning_rate": 4.673913043478261e-05,
"loss": 1.0073,
"step": 76
},
{
"epoch": 0.6702937976060935,
"grad_norm": 1.1484375,
"learning_rate": 4.6695652173913045e-05,
"loss": 1.0206,
"step": 77
},
{
"epoch": 0.6789989118607181,
"grad_norm": 1.25,
"learning_rate": 4.665217391304348e-05,
"loss": 0.7854,
"step": 78
},
{
"epoch": 0.6877040261153428,
"grad_norm": 1.0859375,
"learning_rate": 4.660869565217392e-05,
"loss": 0.9661,
"step": 79
},
{
"epoch": 0.6964091403699674,
"grad_norm": 1.296875,
"learning_rate": 4.656521739130435e-05,
"loss": 1.0657,
"step": 80
},
{
"epoch": 0.705114254624592,
"grad_norm": 1.0390625,
"learning_rate": 4.6521739130434785e-05,
"loss": 0.6992,
"step": 81
},
{
"epoch": 0.7138193688792165,
"grad_norm": 1.0703125,
"learning_rate": 4.647826086956522e-05,
"loss": 0.8135,
"step": 82
},
{
"epoch": 0.7225244831338411,
"grad_norm": 1.359375,
"learning_rate": 4.643478260869565e-05,
"loss": 1.1033,
"step": 83
},
{
"epoch": 0.7312295973884657,
"grad_norm": 1.3828125,
"learning_rate": 4.6391304347826086e-05,
"loss": 0.8077,
"step": 84
},
{
"epoch": 0.7399347116430903,
"grad_norm": 1.0859375,
"learning_rate": 4.6347826086956526e-05,
"loss": 0.8582,
"step": 85
},
{
"epoch": 0.7486398258977149,
"grad_norm": 1.140625,
"learning_rate": 4.630434782608696e-05,
"loss": 1.0223,
"step": 86
},
{
"epoch": 0.7573449401523396,
"grad_norm": 1.2734375,
"learning_rate": 4.62608695652174e-05,
"loss": 0.7866,
"step": 87
},
{
"epoch": 0.766050054406964,
"grad_norm": 1.1484375,
"learning_rate": 4.6217391304347826e-05,
"loss": 0.804,
"step": 88
},
{
"epoch": 0.7747551686615887,
"grad_norm": 0.96484375,
"learning_rate": 4.617391304347826e-05,
"loss": 0.681,
"step": 89
},
{
"epoch": 0.7834602829162133,
"grad_norm": 1.0078125,
"learning_rate": 4.61304347826087e-05,
"loss": 0.9594,
"step": 90
},
{
"epoch": 0.7921653971708379,
"grad_norm": 1.0546875,
"learning_rate": 4.608695652173913e-05,
"loss": 0.9842,
"step": 91
},
{
"epoch": 0.8008705114254625,
"grad_norm": 1.09375,
"learning_rate": 4.6043478260869567e-05,
"loss": 0.9142,
"step": 92
},
{
"epoch": 0.809575625680087,
"grad_norm": 1.15625,
"learning_rate": 4.600000000000001e-05,
"loss": 0.7379,
"step": 93
},
{
"epoch": 0.8182807399347116,
"grad_norm": 1.3984375,
"learning_rate": 4.595652173913044e-05,
"loss": 1.0656,
"step": 94
},
{
"epoch": 0.8269858541893362,
"grad_norm": 1.15625,
"learning_rate": 4.591304347826087e-05,
"loss": 0.9404,
"step": 95
},
{
"epoch": 0.8356909684439608,
"grad_norm": 1.109375,
"learning_rate": 4.586956521739131e-05,
"loss": 1.0705,
"step": 96
},
{
"epoch": 0.8443960826985855,
"grad_norm": 1.1171875,
"learning_rate": 4.582608695652174e-05,
"loss": 1.1848,
"step": 97
},
{
"epoch": 0.85310119695321,
"grad_norm": 1.15625,
"learning_rate": 4.5782608695652174e-05,
"loss": 0.8771,
"step": 98
},
{
"epoch": 0.8618063112078346,
"grad_norm": 1.265625,
"learning_rate": 4.5739130434782614e-05,
"loss": 0.9586,
"step": 99
},
{
"epoch": 0.8705114254624592,
"grad_norm": 1.1015625,
"learning_rate": 4.569565217391305e-05,
"loss": 0.7516,
"step": 100
},
{
"epoch": 0.8792165397170838,
"grad_norm": 1.3125,
"learning_rate": 4.565217391304348e-05,
"loss": 0.8131,
"step": 101
},
{
"epoch": 0.8879216539717084,
"grad_norm": 1.109375,
"learning_rate": 4.5608695652173914e-05,
"loss": 0.9118,
"step": 102
},
{
"epoch": 0.8966267682263329,
"grad_norm": 1.4140625,
"learning_rate": 4.556521739130435e-05,
"loss": 0.8855,
"step": 103
},
{
"epoch": 0.9053318824809575,
"grad_norm": 1.1796875,
"learning_rate": 4.552173913043479e-05,
"loss": 0.7721,
"step": 104
},
{
"epoch": 0.9140369967355821,
"grad_norm": 1.2265625,
"learning_rate": 4.547826086956522e-05,
"loss": 0.7768,
"step": 105
},
{
"epoch": 0.9227421109902068,
"grad_norm": 1.0703125,
"learning_rate": 4.5434782608695655e-05,
"loss": 0.9557,
"step": 106
},
{
"epoch": 0.9314472252448314,
"grad_norm": 1.0625,
"learning_rate": 4.539130434782609e-05,
"loss": 0.8868,
"step": 107
},
{
"epoch": 0.940152339499456,
"grad_norm": 1.15625,
"learning_rate": 4.534782608695652e-05,
"loss": 0.8889,
"step": 108
},
{
"epoch": 0.9488574537540805,
"grad_norm": 1.15625,
"learning_rate": 4.5304347826086955e-05,
"loss": 1.1122,
"step": 109
},
{
"epoch": 0.9575625680087051,
"grad_norm": 1.1875,
"learning_rate": 4.5260869565217395e-05,
"loss": 0.7954,
"step": 110
},
{
"epoch": 0.9662676822633297,
"grad_norm": 1.1796875,
"learning_rate": 4.521739130434783e-05,
"loss": 0.8551,
"step": 111
},
{
"epoch": 0.9749727965179543,
"grad_norm": 1.1953125,
"learning_rate": 4.517391304347826e-05,
"loss": 1.0285,
"step": 112
},
{
"epoch": 0.9836779107725789,
"grad_norm": 1.25,
"learning_rate": 4.51304347826087e-05,
"loss": 1.0737,
"step": 113
},
{
"epoch": 0.9923830250272034,
"grad_norm": 1.015625,
"learning_rate": 4.508695652173913e-05,
"loss": 0.8448,
"step": 114
},
{
"epoch": 1.0,
"grad_norm": 1.2421875,
"learning_rate": 4.504347826086956e-05,
"loss": 0.9171,
"step": 115
},
{
"epoch": 1.0087051142546246,
"grad_norm": 1.234375,
"learning_rate": 4.5e-05,
"loss": 0.7003,
"step": 116
},
{
"epoch": 1.0174102285092492,
"grad_norm": 1.5078125,
"learning_rate": 4.4956521739130436e-05,
"loss": 0.7938,
"step": 117
},
{
"epoch": 1.0261153427638738,
"grad_norm": 1.09375,
"learning_rate": 4.4913043478260876e-05,
"loss": 0.7323,
"step": 118
},
{
"epoch": 1.0348204570184985,
"grad_norm": 1.2421875,
"learning_rate": 4.486956521739131e-05,
"loss": 0.5957,
"step": 119
},
{
"epoch": 1.043525571273123,
"grad_norm": 1.453125,
"learning_rate": 4.482608695652174e-05,
"loss": 0.7519,
"step": 120
},
{
"epoch": 1.0522306855277475,
"grad_norm": 1.4453125,
"learning_rate": 4.478260869565218e-05,
"loss": 0.7003,
"step": 121
},
{
"epoch": 1.060935799782372,
"grad_norm": 1.4296875,
"learning_rate": 4.473913043478261e-05,
"loss": 0.8537,
"step": 122
},
{
"epoch": 1.0696409140369967,
"grad_norm": 1.2265625,
"learning_rate": 4.4695652173913044e-05,
"loss": 0.7305,
"step": 123
},
{
"epoch": 1.0783460282916213,
"grad_norm": 1.3203125,
"learning_rate": 4.4652173913043484e-05,
"loss": 0.6122,
"step": 124
},
{
"epoch": 1.087051142546246,
"grad_norm": 1.296875,
"learning_rate": 4.460869565217392e-05,
"loss": 0.9224,
"step": 125
},
{
"epoch": 1.0957562568008705,
"grad_norm": 0.94921875,
"learning_rate": 4.456521739130435e-05,
"loss": 0.6261,
"step": 126
},
{
"epoch": 1.1044613710554951,
"grad_norm": 1.125,
"learning_rate": 4.4521739130434784e-05,
"loss": 0.7336,
"step": 127
},
{
"epoch": 1.1131664853101197,
"grad_norm": 1.125,
"learning_rate": 4.447826086956522e-05,
"loss": 0.6978,
"step": 128
},
{
"epoch": 1.1218715995647444,
"grad_norm": 1.125,
"learning_rate": 4.443478260869565e-05,
"loss": 0.6352,
"step": 129
},
{
"epoch": 1.130576713819369,
"grad_norm": 1.125,
"learning_rate": 4.439130434782609e-05,
"loss": 0.4918,
"step": 130
},
{
"epoch": 1.1392818280739934,
"grad_norm": 1.25,
"learning_rate": 4.4347826086956525e-05,
"loss": 0.9074,
"step": 131
},
{
"epoch": 1.147986942328618,
"grad_norm": 1.296875,
"learning_rate": 4.430434782608696e-05,
"loss": 0.7653,
"step": 132
},
{
"epoch": 1.1566920565832426,
"grad_norm": 1.046875,
"learning_rate": 4.426086956521739e-05,
"loss": 0.6195,
"step": 133
},
{
"epoch": 1.1653971708378672,
"grad_norm": 1.1953125,
"learning_rate": 4.4217391304347825e-05,
"loss": 0.5887,
"step": 134
},
{
"epoch": 1.1741022850924918,
"grad_norm": 2.15625,
"learning_rate": 4.4173913043478265e-05,
"loss": 0.7727,
"step": 135
},
{
"epoch": 1.1828073993471164,
"grad_norm": 1.0703125,
"learning_rate": 4.41304347826087e-05,
"loss": 0.598,
"step": 136
},
{
"epoch": 1.191512513601741,
"grad_norm": 1.2265625,
"learning_rate": 4.408695652173913e-05,
"loss": 0.7572,
"step": 137
},
{
"epoch": 1.2002176278563657,
"grad_norm": 1.2265625,
"learning_rate": 4.404347826086957e-05,
"loss": 0.6791,
"step": 138
},
{
"epoch": 1.2089227421109903,
"grad_norm": 1.125,
"learning_rate": 4.4000000000000006e-05,
"loss": 0.7195,
"step": 139
},
{
"epoch": 1.2176278563656149,
"grad_norm": 0.984375,
"learning_rate": 4.395652173913043e-05,
"loss": 0.5021,
"step": 140
},
{
"epoch": 1.2263329706202395,
"grad_norm": 1.2578125,
"learning_rate": 4.391304347826087e-05,
"loss": 0.6567,
"step": 141
},
{
"epoch": 1.2350380848748639,
"grad_norm": 1.1875,
"learning_rate": 4.3869565217391306e-05,
"loss": 0.8551,
"step": 142
},
{
"epoch": 1.2437431991294885,
"grad_norm": 1.046875,
"learning_rate": 4.382608695652174e-05,
"loss": 0.7298,
"step": 143
},
{
"epoch": 1.252448313384113,
"grad_norm": 1.3203125,
"learning_rate": 4.378260869565218e-05,
"loss": 0.9639,
"step": 144
},
{
"epoch": 1.2611534276387377,
"grad_norm": 1.0546875,
"learning_rate": 4.373913043478261e-05,
"loss": 0.7321,
"step": 145
},
{
"epoch": 1.2698585418933623,
"grad_norm": 0.9921875,
"learning_rate": 4.3695652173913046e-05,
"loss": 0.6488,
"step": 146
},
{
"epoch": 1.278563656147987,
"grad_norm": 1.0703125,
"learning_rate": 4.365217391304348e-05,
"loss": 0.7783,
"step": 147
},
{
"epoch": 1.2872687704026116,
"grad_norm": 1.1484375,
"learning_rate": 4.360869565217391e-05,
"loss": 0.6042,
"step": 148
},
{
"epoch": 1.2959738846572362,
"grad_norm": 1.09375,
"learning_rate": 4.3565217391304353e-05,
"loss": 0.7951,
"step": 149
},
{
"epoch": 1.3046789989118608,
"grad_norm": 1.09375,
"learning_rate": 4.352173913043479e-05,
"loss": 0.6017,
"step": 150
},
{
"epoch": 1.3133841131664852,
"grad_norm": 1.2109375,
"learning_rate": 4.347826086956522e-05,
"loss": 0.6653,
"step": 151
},
{
"epoch": 1.32208922742111,
"grad_norm": 1.3828125,
"learning_rate": 4.3434782608695654e-05,
"loss": 0.9719,
"step": 152
},
{
"epoch": 1.3307943416757344,
"grad_norm": 1.015625,
"learning_rate": 4.339130434782609e-05,
"loss": 0.6013,
"step": 153
},
{
"epoch": 1.339499455930359,
"grad_norm": 1.1015625,
"learning_rate": 4.334782608695652e-05,
"loss": 0.6483,
"step": 154
},
{
"epoch": 1.3482045701849836,
"grad_norm": 1.0859375,
"learning_rate": 4.330434782608696e-05,
"loss": 0.6805,
"step": 155
},
{
"epoch": 1.3569096844396082,
"grad_norm": 1.140625,
"learning_rate": 4.3260869565217394e-05,
"loss": 0.7598,
"step": 156
},
{
"epoch": 1.3656147986942329,
"grad_norm": 1.046875,
"learning_rate": 4.321739130434783e-05,
"loss": 0.569,
"step": 157
},
{
"epoch": 1.3743199129488575,
"grad_norm": 1.0625,
"learning_rate": 4.317391304347827e-05,
"loss": 0.7223,
"step": 158
},
{
"epoch": 1.383025027203482,
"grad_norm": 1.1328125,
"learning_rate": 4.3130434782608695e-05,
"loss": 0.7309,
"step": 159
},
{
"epoch": 1.3917301414581067,
"grad_norm": 1.21875,
"learning_rate": 4.308695652173913e-05,
"loss": 0.9004,
"step": 160
},
{
"epoch": 1.4004352557127313,
"grad_norm": 1.125,
"learning_rate": 4.304347826086957e-05,
"loss": 0.6035,
"step": 161
},
{
"epoch": 1.4091403699673557,
"grad_norm": 1.2578125,
"learning_rate": 4.3e-05,
"loss": 0.957,
"step": 162
},
{
"epoch": 1.4178454842219805,
"grad_norm": 2.6875,
"learning_rate": 4.2956521739130435e-05,
"loss": 0.5841,
"step": 163
},
{
"epoch": 1.426550598476605,
"grad_norm": 1.1640625,
"learning_rate": 4.2913043478260875e-05,
"loss": 0.8018,
"step": 164
},
{
"epoch": 1.4352557127312295,
"grad_norm": 1.078125,
"learning_rate": 4.286956521739131e-05,
"loss": 0.6603,
"step": 165
},
{
"epoch": 1.4439608269858542,
"grad_norm": 1.40625,
"learning_rate": 4.282608695652174e-05,
"loss": 0.7107,
"step": 166
},
{
"epoch": 1.4526659412404788,
"grad_norm": 1.0390625,
"learning_rate": 4.2782608695652176e-05,
"loss": 0.7577,
"step": 167
},
{
"epoch": 1.4613710554951034,
"grad_norm": 1.0859375,
"learning_rate": 4.273913043478261e-05,
"loss": 0.7573,
"step": 168
},
{
"epoch": 1.470076169749728,
"grad_norm": 1.265625,
"learning_rate": 4.269565217391305e-05,
"loss": 0.7008,
"step": 169
},
{
"epoch": 1.4787812840043526,
"grad_norm": 1.1953125,
"learning_rate": 4.265217391304348e-05,
"loss": 0.6966,
"step": 170
},
{
"epoch": 1.4874863982589772,
"grad_norm": 1.1015625,
"learning_rate": 4.2608695652173916e-05,
"loss": 0.6855,
"step": 171
},
{
"epoch": 1.4961915125136018,
"grad_norm": 1.109375,
"learning_rate": 4.256521739130435e-05,
"loss": 0.67,
"step": 172
},
{
"epoch": 1.5048966267682262,
"grad_norm": 1.15625,
"learning_rate": 4.252173913043478e-05,
"loss": 0.8798,
"step": 173
},
{
"epoch": 1.513601741022851,
"grad_norm": 1.15625,
"learning_rate": 4.2478260869565216e-05,
"loss": 0.7921,
"step": 174
},
{
"epoch": 1.5223068552774754,
"grad_norm": 1.1171875,
"learning_rate": 4.2434782608695657e-05,
"loss": 0.6351,
"step": 175
},
{
"epoch": 1.5310119695321,
"grad_norm": 1.609375,
"learning_rate": 4.239130434782609e-05,
"loss": 0.6681,
"step": 176
},
{
"epoch": 1.5397170837867247,
"grad_norm": 1.265625,
"learning_rate": 4.2347826086956523e-05,
"loss": 0.7897,
"step": 177
},
{
"epoch": 1.5484221980413493,
"grad_norm": 1.2734375,
"learning_rate": 4.230434782608696e-05,
"loss": 0.772,
"step": 178
},
{
"epoch": 1.557127312295974,
"grad_norm": 1.0546875,
"learning_rate": 4.226086956521739e-05,
"loss": 0.5812,
"step": 179
},
{
"epoch": 1.5658324265505985,
"grad_norm": 1.0703125,
"learning_rate": 4.221739130434783e-05,
"loss": 0.5453,
"step": 180
},
{
"epoch": 1.5745375408052231,
"grad_norm": 1.0078125,
"learning_rate": 4.2173913043478264e-05,
"loss": 0.6831,
"step": 181
},
{
"epoch": 1.5832426550598475,
"grad_norm": 1.0625,
"learning_rate": 4.21304347826087e-05,
"loss": 0.767,
"step": 182
},
{
"epoch": 1.5919477693144723,
"grad_norm": 1.1484375,
"learning_rate": 4.208695652173914e-05,
"loss": 0.6145,
"step": 183
},
{
"epoch": 1.6006528835690967,
"grad_norm": 1.1015625,
"learning_rate": 4.204347826086957e-05,
"loss": 0.8171,
"step": 184
},
{
"epoch": 1.6093579978237216,
"grad_norm": 1.4609375,
"learning_rate": 4.2e-05,
"loss": 0.6039,
"step": 185
},
{
"epoch": 1.618063112078346,
"grad_norm": 1.1171875,
"learning_rate": 4.195652173913044e-05,
"loss": 0.7126,
"step": 186
},
{
"epoch": 1.6267682263329706,
"grad_norm": 1.1328125,
"learning_rate": 4.191304347826087e-05,
"loss": 0.5621,
"step": 187
},
{
"epoch": 1.6354733405875952,
"grad_norm": 1.0703125,
"learning_rate": 4.1869565217391305e-05,
"loss": 0.7605,
"step": 188
},
{
"epoch": 1.6441784548422198,
"grad_norm": 1.0625,
"learning_rate": 4.1826086956521745e-05,
"loss": 0.8704,
"step": 189
},
{
"epoch": 1.6528835690968444,
"grad_norm": 1.359375,
"learning_rate": 4.178260869565218e-05,
"loss": 0.7108,
"step": 190
},
{
"epoch": 1.661588683351469,
"grad_norm": 1.0859375,
"learning_rate": 4.1739130434782605e-05,
"loss": 0.67,
"step": 191
},
{
"epoch": 1.6702937976060936,
"grad_norm": 1.1171875,
"learning_rate": 4.1695652173913045e-05,
"loss": 0.9325,
"step": 192
},
{
"epoch": 1.678998911860718,
"grad_norm": 1.2578125,
"learning_rate": 4.165217391304348e-05,
"loss": 0.8226,
"step": 193
},
{
"epoch": 1.6877040261153429,
"grad_norm": 1.03125,
"learning_rate": 4.160869565217391e-05,
"loss": 0.8652,
"step": 194
},
{
"epoch": 1.6964091403699673,
"grad_norm": 1.171875,
"learning_rate": 4.156521739130435e-05,
"loss": 0.6873,
"step": 195
},
{
"epoch": 1.705114254624592,
"grad_norm": 1.1953125,
"learning_rate": 4.1521739130434786e-05,
"loss": 0.7681,
"step": 196
},
{
"epoch": 1.7138193688792165,
"grad_norm": 1.0625,
"learning_rate": 4.147826086956522e-05,
"loss": 0.6852,
"step": 197
},
{
"epoch": 1.722524483133841,
"grad_norm": 1.0546875,
"learning_rate": 4.143478260869565e-05,
"loss": 0.8035,
"step": 198
},
{
"epoch": 1.7312295973884657,
"grad_norm": 1.140625,
"learning_rate": 4.1391304347826086e-05,
"loss": 0.6638,
"step": 199
},
{
"epoch": 1.7399347116430903,
"grad_norm": 1.2265625,
"learning_rate": 4.1347826086956526e-05,
"loss": 0.8636,
"step": 200
},
{
"epoch": 1.748639825897715,
"grad_norm": 1.140625,
"learning_rate": 4.130434782608696e-05,
"loss": 0.9844,
"step": 201
},
{
"epoch": 1.7573449401523396,
"grad_norm": 1.1796875,
"learning_rate": 4.126086956521739e-05,
"loss": 0.7461,
"step": 202
},
{
"epoch": 1.7660500544069642,
"grad_norm": 1.1484375,
"learning_rate": 4.1217391304347827e-05,
"loss": 0.677,
"step": 203
},
{
"epoch": 1.7747551686615886,
"grad_norm": 1.234375,
"learning_rate": 4.117391304347826e-05,
"loss": 0.5882,
"step": 204
},
{
"epoch": 1.7834602829162134,
"grad_norm": 1.2109375,
"learning_rate": 4.1130434782608693e-05,
"loss": 0.7051,
"step": 205
},
{
"epoch": 1.7921653971708378,
"grad_norm": 1.2109375,
"learning_rate": 4.1086956521739134e-05,
"loss": 0.7658,
"step": 206
},
{
"epoch": 1.8008705114254626,
"grad_norm": 1.328125,
"learning_rate": 4.104347826086957e-05,
"loss": 0.8158,
"step": 207
},
{
"epoch": 1.809575625680087,
"grad_norm": 1.4765625,
"learning_rate": 4.1e-05,
"loss": 0.7081,
"step": 208
},
{
"epoch": 1.8182807399347116,
"grad_norm": 1.046875,
"learning_rate": 4.095652173913044e-05,
"loss": 0.648,
"step": 209
},
{
"epoch": 1.8269858541893362,
"grad_norm": 1.2734375,
"learning_rate": 4.091304347826087e-05,
"loss": 0.7721,
"step": 210
},
{
"epoch": 1.8356909684439608,
"grad_norm": 1.03125,
"learning_rate": 4.086956521739131e-05,
"loss": 0.5612,
"step": 211
},
{
"epoch": 1.8443960826985855,
"grad_norm": 1.1015625,
"learning_rate": 4.082608695652174e-05,
"loss": 0.635,
"step": 212
},
{
"epoch": 1.8531011969532099,
"grad_norm": 1.1015625,
"learning_rate": 4.0782608695652174e-05,
"loss": 0.7699,
"step": 213
},
{
"epoch": 1.8618063112078347,
"grad_norm": 1.234375,
"learning_rate": 4.0739130434782615e-05,
"loss": 0.7569,
"step": 214
},
{
"epoch": 1.870511425462459,
"grad_norm": 1.203125,
"learning_rate": 4.069565217391305e-05,
"loss": 0.7783,
"step": 215
},
{
"epoch": 1.879216539717084,
"grad_norm": 1.203125,
"learning_rate": 4.065217391304348e-05,
"loss": 0.7102,
"step": 216
},
{
"epoch": 1.8879216539717083,
"grad_norm": 2.53125,
"learning_rate": 4.0608695652173915e-05,
"loss": 1.0202,
"step": 217
},
{
"epoch": 1.896626768226333,
"grad_norm": 1.2109375,
"learning_rate": 4.056521739130435e-05,
"loss": 0.682,
"step": 218
},
{
"epoch": 1.9053318824809575,
"grad_norm": 1.03125,
"learning_rate": 4.052173913043478e-05,
"loss": 0.648,
"step": 219
},
{
"epoch": 1.9140369967355821,
"grad_norm": 1.234375,
"learning_rate": 4.047826086956522e-05,
"loss": 0.8244,
"step": 220
},
{
"epoch": 1.9227421109902068,
"grad_norm": 1.078125,
"learning_rate": 4.0434782608695655e-05,
"loss": 0.6392,
"step": 221
},
{
"epoch": 1.9314472252448314,
"grad_norm": 1.09375,
"learning_rate": 4.039130434782609e-05,
"loss": 0.7142,
"step": 222
},
{
"epoch": 1.940152339499456,
"grad_norm": 1.1328125,
"learning_rate": 4.034782608695652e-05,
"loss": 0.637,
"step": 223
},
{
"epoch": 1.9488574537540804,
"grad_norm": 1.03125,
"learning_rate": 4.0304347826086956e-05,
"loss": 0.7092,
"step": 224
},
{
"epoch": 1.9575625680087052,
"grad_norm": 1.0390625,
"learning_rate": 4.026086956521739e-05,
"loss": 0.6927,
"step": 225
},
{
"epoch": 1.9662676822633296,
"grad_norm": 1.125,
"learning_rate": 4.021739130434783e-05,
"loss": 0.744,
"step": 226
},
{
"epoch": 1.9749727965179544,
"grad_norm": 1.1484375,
"learning_rate": 4.017391304347826e-05,
"loss": 0.6604,
"step": 227
},
{
"epoch": 1.9836779107725788,
"grad_norm": 1.0625,
"learning_rate": 4.01304347826087e-05,
"loss": 0.7406,
"step": 228
},
{
"epoch": 1.9923830250272034,
"grad_norm": 1.078125,
"learning_rate": 4.008695652173913e-05,
"loss": 0.645,
"step": 229
},
{
"epoch": 2.0,
"grad_norm": 1.4765625,
"learning_rate": 4.004347826086956e-05,
"loss": 0.7339,
"step": 230
}
],
"logging_steps": 1,
"max_steps": 1150,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.183779735553966e+17,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}