|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9981515711645101, |
|
"eval_steps": 500, |
|
"global_step": 270, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.018484288354898338, |
|
"grad_norm": 0.00118255615234375, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.0001, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.036968576709796676, |
|
"grad_norm": 0.000579833984375, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.0001, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05545286506469501, |
|
"grad_norm": 0.000614166259765625, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 0.0, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07393715341959335, |
|
"grad_norm": 0.000850677490234375, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.0, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09242144177449169, |
|
"grad_norm": 0.000446319580078125, |
|
"learning_rate": 1.851851851851852e-05, |
|
"loss": 0.0, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11090573012939002, |
|
"grad_norm": 0.00075531005859375, |
|
"learning_rate": 1.9992479525042305e-05, |
|
"loss": 0.0, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12939001848428835, |
|
"grad_norm": 0.0003108978271484375, |
|
"learning_rate": 1.9946562024066018e-05, |
|
"loss": 0.0, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1478743068391867, |
|
"grad_norm": 0.0003833770751953125, |
|
"learning_rate": 1.9859096633447965e-05, |
|
"loss": 0.0, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.16635859519408502, |
|
"grad_norm": 0.000499725341796875, |
|
"learning_rate": 1.973044870579824e-05, |
|
"loss": 0.0, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.18484288354898337, |
|
"grad_norm": 0.000118255615234375, |
|
"learning_rate": 1.95611556177388e-05, |
|
"loss": 0.0, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2033271719038817, |
|
"grad_norm": 7.724761962890625e-05, |
|
"learning_rate": 1.93519245252219e-05, |
|
"loss": 0.0, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.22181146025878004, |
|
"grad_norm": 0.0002689361572265625, |
|
"learning_rate": 1.9103629409661468e-05, |
|
"loss": 0.0, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.24029574861367836, |
|
"grad_norm": 8.821487426757812e-05, |
|
"learning_rate": 1.881730742721608e-05, |
|
"loss": 0.0, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2587800369685767, |
|
"grad_norm": 0.00016307830810546875, |
|
"learning_rate": 1.8494154576472976e-05, |
|
"loss": 0.0, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.27726432532347506, |
|
"grad_norm": 9.441375732421875e-05, |
|
"learning_rate": 1.8135520702629677e-05, |
|
"loss": 0.0, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2957486136783734, |
|
"grad_norm": 0.0002689361572265625, |
|
"learning_rate": 1.7742903859041324e-05, |
|
"loss": 0.0, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3142329020332717, |
|
"grad_norm": 5.793571472167969e-05, |
|
"learning_rate": 1.7317944049686125e-05, |
|
"loss": 0.0, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.33271719038817005, |
|
"grad_norm": 0.00018596649169921875, |
|
"learning_rate": 1.686241637868734e-05, |
|
"loss": 0.0, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3512014787430684, |
|
"grad_norm": 0.00012111663818359375, |
|
"learning_rate": 1.637822363550706e-05, |
|
"loss": 0.0, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.36968576709796674, |
|
"grad_norm": 0.00013446807861328125, |
|
"learning_rate": 1.586738834678418e-05, |
|
"loss": 0.0, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.38817005545286504, |
|
"grad_norm": 6.246566772460938e-05, |
|
"learning_rate": 1.5332044328016916e-05, |
|
"loss": 0.0, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4066543438077634, |
|
"grad_norm": 0.00013828277587890625, |
|
"learning_rate": 1.4774427770379492e-05, |
|
"loss": 0.0, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.42513863216266173, |
|
"grad_norm": 0.0001544952392578125, |
|
"learning_rate": 1.4196867899904292e-05, |
|
"loss": 0.0, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4436229205175601, |
|
"grad_norm": 0.00011968612670898438, |
|
"learning_rate": 1.3601777248047105e-05, |
|
"loss": 0.0, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.46210720887245843, |
|
"grad_norm": 6.532669067382812e-05, |
|
"learning_rate": 1.2991641574276419e-05, |
|
"loss": 0.0, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4805914972273567, |
|
"grad_norm": 9.965896606445312e-05, |
|
"learning_rate": 1.2369009482781191e-05, |
|
"loss": 0.0, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.49907578558225507, |
|
"grad_norm": 5.698204040527344e-05, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 0.0, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5175600739371534, |
|
"grad_norm": 4.124641418457031e-05, |
|
"learning_rate": 1.1096700594125318e-05, |
|
"loss": 0.0, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5360443622920518, |
|
"grad_norm": 9.5367431640625e-05, |
|
"learning_rate": 1.0452338371907065e-05, |
|
"loss": 0.0, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5545286506469501, |
|
"grad_norm": 5.698204040527344e-05, |
|
"learning_rate": 9.806086682281759e-06, |
|
"loss": 0.0, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5730129390018485, |
|
"grad_norm": 9.441375732421875e-05, |
|
"learning_rate": 9.160644990030932e-06, |
|
"loss": 0.0, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5914972273567468, |
|
"grad_norm": 0.00011777877807617188, |
|
"learning_rate": 8.518709376487515e-06, |
|
"loss": 0.0, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.609981515711645, |
|
"grad_norm": 0.0001583099365234375, |
|
"learning_rate": 7.882961277705897e-06, |
|
"loss": 0.0, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6284658040665434, |
|
"grad_norm": 0.000141143798828125, |
|
"learning_rate": 7.256056283806987e-06, |
|
"loss": 0.0, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6469500924214417, |
|
"grad_norm": 0.0001697540283203125, |
|
"learning_rate": 6.640613046284581e-06, |
|
"loss": 0.0, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6654343807763401, |
|
"grad_norm": 0.000431060791015625, |
|
"learning_rate": 6.039202339608432e-06, |
|
"loss": 0.0, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6839186691312384, |
|
"grad_norm": 8.106231689453125e-05, |
|
"learning_rate": 5.454336322814995e-06, |
|
"loss": 0.0, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7024029574861368, |
|
"grad_norm": 9.918212890625e-05, |
|
"learning_rate": 4.888458045941269e-06, |
|
"loss": 0.0, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7208872458410351, |
|
"grad_norm": 4.76837158203125e-05, |
|
"learning_rate": 4.343931245134616e-06, |
|
"loss": 0.0, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7393715341959335, |
|
"grad_norm": 0.00012493133544921875, |
|
"learning_rate": 3.823030469065431e-06, |
|
"loss": 0.0, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7578558225508318, |
|
"grad_norm": 5.340576171875e-05, |
|
"learning_rate": 3.3279315778858034e-06, |
|
"loss": 0.0, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7763401109057301, |
|
"grad_norm": 7.05718994140625e-05, |
|
"learning_rate": 2.8607026544210115e-06, |
|
"loss": 0.0, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7948243992606284, |
|
"grad_norm": 0.0002117156982421875, |
|
"learning_rate": 2.423295365558821e-06, |
|
"loss": 0.0, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8133086876155268, |
|
"grad_norm": 4.38690185546875e-05, |
|
"learning_rate": 2.01753680992107e-06, |
|
"loss": 0.0, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8317929759704251, |
|
"grad_norm": 0.00010395050048828125, |
|
"learning_rate": 1.6451218858706374e-06, |
|
"loss": 0.0, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.8502772643253235, |
|
"grad_norm": 0.00018978118896484375, |
|
"learning_rate": 1.307606211733522e-06, |
|
"loss": 0.0, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8687615526802218, |
|
"grad_norm": 5.53131103515625e-05, |
|
"learning_rate": 1.0063996278090704e-06, |
|
"loss": 0.0, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8872458410351202, |
|
"grad_norm": 6.389617919921875e-05, |
|
"learning_rate": 7.427603073110967e-07, |
|
"loss": 0.0, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9057301293900185, |
|
"grad_norm": 0.00011205673217773438, |
|
"learning_rate": 5.177895008392353e-07, |
|
"loss": 0.0, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.9242144177449169, |
|
"grad_norm": 0.000293731689453125, |
|
"learning_rate": 3.3242693633337986e-07, |
|
"loss": 0.0, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.9426987060998152, |
|
"grad_norm": 0.000179290771484375, |
|
"learning_rate": 1.874468937261531e-07, |
|
"loss": 0.0, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.9611829944547134, |
|
"grad_norm": 0.002349853515625, |
|
"learning_rate": 8.345497068998897e-08, |
|
"loss": 0.0, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9796672828096118, |
|
"grad_norm": 6.437301635742188e-05, |
|
"learning_rate": 2.088555298867978e-08, |
|
"loss": 0.0, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9981515711645101, |
|
"grad_norm": 9.1552734375e-05, |
|
"learning_rate": 0.0, |
|
"loss": 0.0, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9981515711645101, |
|
"eval_loss": 1.3726342331210617e-05, |
|
"eval_runtime": 52.2001, |
|
"eval_samples_per_second": 18.601, |
|
"eval_steps_per_second": 2.337, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9981515711645101, |
|
"step": 270, |
|
"total_flos": 5.260333472022528e+16, |
|
"train_loss": 1.8035727003330572e-05, |
|
"train_runtime": 2689.0507, |
|
"train_samples_per_second": 3.217, |
|
"train_steps_per_second": 0.1 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 270, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.260333472022528e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|