| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 10.0, |
| "eval_steps": 500, |
| "global_step": 590, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.3389830508474576, |
| "grad_norm": 12.3125, |
| "learning_rate": 4.8305084745762714e-05, |
| "loss": 1.176, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.6779661016949152, |
| "grad_norm": 4.5625, |
| "learning_rate": 4.6610169491525425e-05, |
| "loss": 0.4758, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.0169491525423728, |
| "grad_norm": 1.3671875, |
| "learning_rate": 4.491525423728814e-05, |
| "loss": 0.1471, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.3559322033898304, |
| "grad_norm": 1.3515625, |
| "learning_rate": 4.3220338983050854e-05, |
| "loss": 0.0574, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.694915254237288, |
| "grad_norm": 1.234375, |
| "learning_rate": 4.152542372881356e-05, |
| "loss": 0.0611, |
| "step": 100 |
| }, |
| { |
| "epoch": 2.0338983050847457, |
| "grad_norm": 0.76171875, |
| "learning_rate": 3.983050847457627e-05, |
| "loss": 0.0558, |
| "step": 120 |
| }, |
| { |
| "epoch": 2.3728813559322033, |
| "grad_norm": 0.91796875, |
| "learning_rate": 3.813559322033898e-05, |
| "loss": 0.0327, |
| "step": 140 |
| }, |
| { |
| "epoch": 2.711864406779661, |
| "grad_norm": 0.734375, |
| "learning_rate": 3.644067796610169e-05, |
| "loss": 0.032, |
| "step": 160 |
| }, |
| { |
| "epoch": 3.0508474576271185, |
| "grad_norm": 1.03125, |
| "learning_rate": 3.474576271186441e-05, |
| "loss": 0.0317, |
| "step": 180 |
| }, |
| { |
| "epoch": 3.389830508474576, |
| "grad_norm": 0.423828125, |
| "learning_rate": 3.305084745762712e-05, |
| "loss": 0.0212, |
| "step": 200 |
| }, |
| { |
| "epoch": 3.7288135593220337, |
| "grad_norm": 0.75, |
| "learning_rate": 3.135593220338983e-05, |
| "loss": 0.0206, |
| "step": 220 |
| }, |
| { |
| "epoch": 4.067796610169491, |
| "grad_norm": 0.625, |
| "learning_rate": 2.9661016949152544e-05, |
| "loss": 0.0192, |
| "step": 240 |
| }, |
| { |
| "epoch": 4.406779661016949, |
| "grad_norm": 0.625, |
| "learning_rate": 2.7966101694915255e-05, |
| "loss": 0.0139, |
| "step": 260 |
| }, |
| { |
| "epoch": 4.745762711864407, |
| "grad_norm": 0.63671875, |
| "learning_rate": 2.627118644067797e-05, |
| "loss": 0.0152, |
| "step": 280 |
| }, |
| { |
| "epoch": 5.084745762711864, |
| "grad_norm": 0.4140625, |
| "learning_rate": 2.457627118644068e-05, |
| "loss": 0.0138, |
| "step": 300 |
| }, |
| { |
| "epoch": 5.423728813559322, |
| "grad_norm": 0.5390625, |
| "learning_rate": 2.2881355932203392e-05, |
| "loss": 0.0107, |
| "step": 320 |
| }, |
| { |
| "epoch": 5.762711864406779, |
| "grad_norm": 0.578125, |
| "learning_rate": 2.1186440677966103e-05, |
| "loss": 0.0102, |
| "step": 340 |
| }, |
| { |
| "epoch": 6.101694915254237, |
| "grad_norm": 0.373046875, |
| "learning_rate": 1.9491525423728814e-05, |
| "loss": 0.011, |
| "step": 360 |
| }, |
| { |
| "epoch": 6.440677966101695, |
| "grad_norm": 0.3671875, |
| "learning_rate": 1.7796610169491526e-05, |
| "loss": 0.009, |
| "step": 380 |
| }, |
| { |
| "epoch": 6.779661016949152, |
| "grad_norm": 0.64453125, |
| "learning_rate": 1.6101694915254237e-05, |
| "loss": 0.0099, |
| "step": 400 |
| }, |
| { |
| "epoch": 7.11864406779661, |
| "grad_norm": 0.56640625, |
| "learning_rate": 1.440677966101695e-05, |
| "loss": 0.0082, |
| "step": 420 |
| }, |
| { |
| "epoch": 7.4576271186440675, |
| "grad_norm": 0.62890625, |
| "learning_rate": 1.2711864406779661e-05, |
| "loss": 0.0068, |
| "step": 440 |
| }, |
| { |
| "epoch": 7.796610169491525, |
| "grad_norm": 0.26953125, |
| "learning_rate": 1.1016949152542374e-05, |
| "loss": 0.0079, |
| "step": 460 |
| }, |
| { |
| "epoch": 8.135593220338983, |
| "grad_norm": 0.251953125, |
| "learning_rate": 9.322033898305085e-06, |
| "loss": 0.0087, |
| "step": 480 |
| }, |
| { |
| "epoch": 8.474576271186441, |
| "grad_norm": 0.5703125, |
| "learning_rate": 7.627118644067798e-06, |
| "loss": 0.0072, |
| "step": 500 |
| }, |
| { |
| "epoch": 8.813559322033898, |
| "grad_norm": 0.435546875, |
| "learning_rate": 5.932203389830509e-06, |
| "loss": 0.0078, |
| "step": 520 |
| }, |
| { |
| "epoch": 9.152542372881356, |
| "grad_norm": 0.244140625, |
| "learning_rate": 4.23728813559322e-06, |
| "loss": 0.0074, |
| "step": 540 |
| }, |
| { |
| "epoch": 9.491525423728813, |
| "grad_norm": 0.36328125, |
| "learning_rate": 2.5423728813559323e-06, |
| "loss": 0.0072, |
| "step": 560 |
| }, |
| { |
| "epoch": 9.830508474576272, |
| "grad_norm": 0.2734375, |
| "learning_rate": 8.474576271186441e-07, |
| "loss": 0.0084, |
| "step": 580 |
| } |
| ], |
| "logging_steps": 20, |
| "max_steps": 590, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 10, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.08306100740096e+19, |
| "train_batch_size": 64, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|