| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 5.079365079365079, |
| "eval_steps": 10.0, |
| "global_step": 80, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.06349206349206349, |
| "grad_norm": 2.6884286403656006, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 0.6802, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.12698412698412698, |
| "grad_norm": 2.6933367252349854, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 0.6777, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.19047619047619047, |
| "grad_norm": 2.5917274951934814, |
| "learning_rate": 1.5e-06, |
| "loss": 0.661, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.25396825396825395, |
| "grad_norm": 2.6264889240264893, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.6622, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.31746031746031744, |
| "grad_norm": 2.366546630859375, |
| "learning_rate": 2.5e-06, |
| "loss": 0.6453, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.38095238095238093, |
| "grad_norm": 1.9140746593475342, |
| "learning_rate": 3e-06, |
| "loss": 0.6394, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.4444444444444444, |
| "grad_norm": 1.7500569820404053, |
| "learning_rate": 3.5e-06, |
| "loss": 0.6208, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.5079365079365079, |
| "grad_norm": 1.7186416387557983, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.5705, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.5714285714285714, |
| "grad_norm": 1.8733755350112915, |
| "learning_rate": 4.5e-06, |
| "loss": 0.5859, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.6349206349206349, |
| "grad_norm": 2.6948869228363037, |
| "learning_rate": 5e-06, |
| "loss": 0.5696, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.6984126984126984, |
| "grad_norm": 2.541510820388794, |
| "learning_rate": 4.99847706754774e-06, |
| "loss": 0.5778, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.7619047619047619, |
| "grad_norm": 1.8197821378707886, |
| "learning_rate": 4.993910125649561e-06, |
| "loss": 0.5576, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.8253968253968254, |
| "grad_norm": 1.3835104703903198, |
| "learning_rate": 4.986304738420684e-06, |
| "loss": 0.5198, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.8888888888888888, |
| "grad_norm": 1.6187756061553955, |
| "learning_rate": 4.975670171853926e-06, |
| "loss": 0.5107, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.9523809523809523, |
| "grad_norm": 1.2050142288208008, |
| "learning_rate": 4.962019382530521e-06, |
| "loss": 0.4788, |
| "step": 15 |
| }, |
| { |
| "epoch": 1.0158730158730158, |
| "grad_norm": 0.8275009393692017, |
| "learning_rate": 4.9453690018345144e-06, |
| "loss": 0.4735, |
| "step": 16 |
| }, |
| { |
| "epoch": 1.0793650793650793, |
| "grad_norm": 0.7280167937278748, |
| "learning_rate": 4.925739315689991e-06, |
| "loss": 0.4579, |
| "step": 17 |
| }, |
| { |
| "epoch": 1.1428571428571428, |
| "grad_norm": 0.741590678691864, |
| "learning_rate": 4.903154239845798e-06, |
| "loss": 0.4433, |
| "step": 18 |
| }, |
| { |
| "epoch": 1.2063492063492063, |
| "grad_norm": 0.7426633238792419, |
| "learning_rate": 4.8776412907378845e-06, |
| "loss": 0.439, |
| "step": 19 |
| }, |
| { |
| "epoch": 1.2698412698412698, |
| "grad_norm": 0.6695029735565186, |
| "learning_rate": 4.849231551964771e-06, |
| "loss": 0.4305, |
| "step": 20 |
| }, |
| { |
| "epoch": 1.3333333333333333, |
| "grad_norm": 0.6360190510749817, |
| "learning_rate": 4.817959636416969e-06, |
| "loss": 0.4392, |
| "step": 21 |
| }, |
| { |
| "epoch": 1.3968253968253967, |
| "grad_norm": 0.5890939831733704, |
| "learning_rate": 4.783863644106502e-06, |
| "loss": 0.4257, |
| "step": 22 |
| }, |
| { |
| "epoch": 1.4603174603174602, |
| "grad_norm": 0.5338059663772583, |
| "learning_rate": 4.746985115747918e-06, |
| "loss": 0.4061, |
| "step": 23 |
| }, |
| { |
| "epoch": 1.5238095238095237, |
| "grad_norm": 0.531510591506958, |
| "learning_rate": 4.707368982147318e-06, |
| "loss": 0.4151, |
| "step": 24 |
| }, |
| { |
| "epoch": 1.5873015873015874, |
| "grad_norm": 0.5541301369667053, |
| "learning_rate": 4.665063509461098e-06, |
| "loss": 0.4113, |
| "step": 25 |
| }, |
| { |
| "epoch": 1.6507936507936507, |
| "grad_norm": 0.5271514058113098, |
| "learning_rate": 4.620120240391065e-06, |
| "loss": 0.404, |
| "step": 26 |
| }, |
| { |
| "epoch": 1.7142857142857144, |
| "grad_norm": 0.4665882885456085, |
| "learning_rate": 4.572593931387604e-06, |
| "loss": 0.4029, |
| "step": 27 |
| }, |
| { |
| "epoch": 1.7777777777777777, |
| "grad_norm": 0.483142226934433, |
| "learning_rate": 4.522542485937369e-06, |
| "loss": 0.3984, |
| "step": 28 |
| }, |
| { |
| "epoch": 1.8412698412698414, |
| "grad_norm": 0.4771871566772461, |
| "learning_rate": 4.470026884016805e-06, |
| "loss": 0.3975, |
| "step": 29 |
| }, |
| { |
| "epoch": 1.9047619047619047, |
| "grad_norm": 0.4467179775238037, |
| "learning_rate": 4.415111107797445e-06, |
| "loss": 0.3774, |
| "step": 30 |
| }, |
| { |
| "epoch": 1.9682539682539684, |
| "grad_norm": 0.44207000732421875, |
| "learning_rate": 4.357862063693486e-06, |
| "loss": 0.3938, |
| "step": 31 |
| }, |
| { |
| "epoch": 2.0317460317460316, |
| "grad_norm": 0.42705363035202026, |
| "learning_rate": 4.2983495008466285e-06, |
| "loss": 0.3566, |
| "step": 32 |
| }, |
| { |
| "epoch": 2.0952380952380953, |
| "grad_norm": 0.4670058488845825, |
| "learning_rate": 4.236645926147493e-06, |
| "loss": 0.3548, |
| "step": 33 |
| }, |
| { |
| "epoch": 2.1587301587301586, |
| "grad_norm": 0.46382439136505127, |
| "learning_rate": 4.172826515897146e-06, |
| "loss": 0.3704, |
| "step": 34 |
| }, |
| { |
| "epoch": 2.2222222222222223, |
| "grad_norm": 0.4274114668369293, |
| "learning_rate": 4.106969024216348e-06, |
| "loss": 0.337, |
| "step": 35 |
| }, |
| { |
| "epoch": 2.2857142857142856, |
| "grad_norm": 0.425061970949173, |
| "learning_rate": 4.039153688314146e-06, |
| "loss": 0.3357, |
| "step": 36 |
| }, |
| { |
| "epoch": 2.3492063492063493, |
| "grad_norm": 0.5009689331054688, |
| "learning_rate": 3.969463130731183e-06, |
| "loss": 0.3412, |
| "step": 37 |
| }, |
| { |
| "epoch": 2.4126984126984126, |
| "grad_norm": 0.4565686583518982, |
| "learning_rate": 3.897982258676867e-06, |
| "loss": 0.339, |
| "step": 38 |
| }, |
| { |
| "epoch": 2.4761904761904763, |
| "grad_norm": 0.4084237515926361, |
| "learning_rate": 3.824798160583012e-06, |
| "loss": 0.3267, |
| "step": 39 |
| }, |
| { |
| "epoch": 2.5396825396825395, |
| "grad_norm": 0.41756659746170044, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.3273, |
| "step": 40 |
| }, |
| { |
| "epoch": 2.6031746031746033, |
| "grad_norm": 0.4654518961906433, |
| "learning_rate": 3.6736789069647273e-06, |
| "loss": 0.3357, |
| "step": 41 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.4124338626861572, |
| "learning_rate": 3.595927866972694e-06, |
| "loss": 0.3152, |
| "step": 42 |
| }, |
| { |
| "epoch": 2.7301587301587302, |
| "grad_norm": 0.3937598466873169, |
| "learning_rate": 3.516841607689501e-06, |
| "loss": 0.322, |
| "step": 43 |
| }, |
| { |
| "epoch": 2.7936507936507935, |
| "grad_norm": 0.40822404623031616, |
| "learning_rate": 3.436516483539781e-06, |
| "loss": 0.312, |
| "step": 44 |
| }, |
| { |
| "epoch": 2.857142857142857, |
| "grad_norm": 0.4208524823188782, |
| "learning_rate": 3.3550503583141726e-06, |
| "loss": 0.3178, |
| "step": 45 |
| }, |
| { |
| "epoch": 2.9206349206349205, |
| "grad_norm": 0.4314095377922058, |
| "learning_rate": 3.272542485937369e-06, |
| "loss": 0.3177, |
| "step": 46 |
| }, |
| { |
| "epoch": 2.984126984126984, |
| "grad_norm": 0.4072750508785248, |
| "learning_rate": 3.189093389542498e-06, |
| "loss": 0.3083, |
| "step": 47 |
| }, |
| { |
| "epoch": 3.0476190476190474, |
| "grad_norm": 0.4380381405353546, |
| "learning_rate": 3.1048047389991693e-06, |
| "loss": 0.3011, |
| "step": 48 |
| }, |
| { |
| "epoch": 3.111111111111111, |
| "grad_norm": 0.522165834903717, |
| "learning_rate": 3.019779227044398e-06, |
| "loss": 0.2746, |
| "step": 49 |
| }, |
| { |
| "epoch": 3.1746031746031744, |
| "grad_norm": 0.4826960861682892, |
| "learning_rate": 2.9341204441673267e-06, |
| "loss": 0.2858, |
| "step": 50 |
| }, |
| { |
| "epoch": 3.238095238095238, |
| "grad_norm": 0.43253302574157715, |
| "learning_rate": 2.847932752400164e-06, |
| "loss": 0.2823, |
| "step": 51 |
| }, |
| { |
| "epoch": 3.3015873015873014, |
| "grad_norm": 0.47018662095069885, |
| "learning_rate": 2.761321158169134e-06, |
| "loss": 0.2721, |
| "step": 52 |
| }, |
| { |
| "epoch": 3.365079365079365, |
| "grad_norm": 0.497387170791626, |
| "learning_rate": 2.6743911843603134e-06, |
| "loss": 0.2795, |
| "step": 53 |
| }, |
| { |
| "epoch": 3.4285714285714284, |
| "grad_norm": 0.42762213945388794, |
| "learning_rate": 2.587248741756253e-06, |
| "loss": 0.2788, |
| "step": 54 |
| }, |
| { |
| "epoch": 3.492063492063492, |
| "grad_norm": 0.4315107464790344, |
| "learning_rate": 2.5e-06, |
| "loss": 0.2763, |
| "step": 55 |
| }, |
| { |
| "epoch": 3.5555555555555554, |
| "grad_norm": 0.4290776550769806, |
| "learning_rate": 2.4127512582437486e-06, |
| "loss": 0.2824, |
| "step": 56 |
| }, |
| { |
| "epoch": 3.619047619047619, |
| "grad_norm": 0.43770331144332886, |
| "learning_rate": 2.325608815639687e-06, |
| "loss": 0.2682, |
| "step": 57 |
| }, |
| { |
| "epoch": 3.682539682539683, |
| "grad_norm": 0.41665223240852356, |
| "learning_rate": 2.238678841830867e-06, |
| "loss": 0.2679, |
| "step": 58 |
| }, |
| { |
| "epoch": 3.746031746031746, |
| "grad_norm": 0.4484512507915497, |
| "learning_rate": 2.1520672475998374e-06, |
| "loss": 0.2756, |
| "step": 59 |
| }, |
| { |
| "epoch": 3.8095238095238093, |
| "grad_norm": 0.4341048300266266, |
| "learning_rate": 2.0658795558326745e-06, |
| "loss": 0.2728, |
| "step": 60 |
| }, |
| { |
| "epoch": 3.873015873015873, |
| "grad_norm": 0.4349490702152252, |
| "learning_rate": 1.9802207729556023e-06, |
| "loss": 0.272, |
| "step": 61 |
| }, |
| { |
| "epoch": 3.9365079365079367, |
| "grad_norm": 0.41879743337631226, |
| "learning_rate": 1.895195261000831e-06, |
| "loss": 0.2613, |
| "step": 62 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.43113768100738525, |
| "learning_rate": 1.8109066104575023e-06, |
| "loss": 0.2829, |
| "step": 63 |
| }, |
| { |
| "epoch": 4.063492063492063, |
| "grad_norm": 0.5257534384727478, |
| "learning_rate": 1.7274575140626318e-06, |
| "loss": 0.248, |
| "step": 64 |
| }, |
| { |
| "epoch": 4.1269841269841265, |
| "grad_norm": 0.47642043232917786, |
| "learning_rate": 1.6449496416858285e-06, |
| "loss": 0.2485, |
| "step": 65 |
| }, |
| { |
| "epoch": 4.190476190476191, |
| "grad_norm": 0.45349231362342834, |
| "learning_rate": 1.56348351646022e-06, |
| "loss": 0.2522, |
| "step": 66 |
| }, |
| { |
| "epoch": 4.253968253968254, |
| "grad_norm": 0.4770345389842987, |
| "learning_rate": 1.4831583923105e-06, |
| "loss": 0.2518, |
| "step": 67 |
| }, |
| { |
| "epoch": 4.317460317460317, |
| "grad_norm": 0.5023510456085205, |
| "learning_rate": 1.4040721330273063e-06, |
| "loss": 0.2411, |
| "step": 68 |
| }, |
| { |
| "epoch": 4.380952380952381, |
| "grad_norm": 0.49374037981033325, |
| "learning_rate": 1.3263210930352737e-06, |
| "loss": 0.2343, |
| "step": 69 |
| }, |
| { |
| "epoch": 4.444444444444445, |
| "grad_norm": 0.49705836176872253, |
| "learning_rate": 1.2500000000000007e-06, |
| "loss": 0.2387, |
| "step": 70 |
| }, |
| { |
| "epoch": 4.507936507936508, |
| "grad_norm": 0.4583221673965454, |
| "learning_rate": 1.1752018394169882e-06, |
| "loss": 0.2418, |
| "step": 71 |
| }, |
| { |
| "epoch": 4.571428571428571, |
| "grad_norm": 0.44446587562561035, |
| "learning_rate": 1.1020177413231334e-06, |
| "loss": 0.2459, |
| "step": 72 |
| }, |
| { |
| "epoch": 4.634920634920634, |
| "grad_norm": 0.4567837417125702, |
| "learning_rate": 1.0305368692688175e-06, |
| "loss": 0.2408, |
| "step": 73 |
| }, |
| { |
| "epoch": 4.698412698412699, |
| "grad_norm": 0.4733292758464813, |
| "learning_rate": 9.608463116858544e-07, |
| "loss": 0.2431, |
| "step": 74 |
| }, |
| { |
| "epoch": 4.761904761904762, |
| "grad_norm": 0.4715614318847656, |
| "learning_rate": 8.930309757836517e-07, |
| "loss": 0.2394, |
| "step": 75 |
| }, |
| { |
| "epoch": 4.825396825396825, |
| "grad_norm": 0.4606616199016571, |
| "learning_rate": 8.271734841028553e-07, |
| "loss": 0.2327, |
| "step": 76 |
| }, |
| { |
| "epoch": 4.888888888888889, |
| "grad_norm": 0.45062100887298584, |
| "learning_rate": 7.633540738525066e-07, |
| "loss": 0.2492, |
| "step": 77 |
| }, |
| { |
| "epoch": 4.9523809523809526, |
| "grad_norm": 0.4597299098968506, |
| "learning_rate": 7.016504991533727e-07, |
| "loss": 0.2393, |
| "step": 78 |
| }, |
| { |
| "epoch": 5.015873015873016, |
| "grad_norm": 0.4504229724407196, |
| "learning_rate": 6.421379363065142e-07, |
| "loss": 0.2276, |
| "step": 79 |
| }, |
| { |
| "epoch": 5.079365079365079, |
| "grad_norm": 0.4797239899635315, |
| "learning_rate": 5.848888922025553e-07, |
| "loss": 0.2185, |
| "step": 80 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 100, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 7, |
| "save_steps": 10, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": false, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.8886781958723994e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|