|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9993906154783668, |
|
"eval_steps": 500, |
|
"global_step": 410, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02437538086532602, |
|
"grad_norm": 0.7390244007110596, |
|
"learning_rate": 1.0975609756097562e-05, |
|
"loss": 3.3584, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04875076173065204, |
|
"grad_norm": 0.30350491404533386, |
|
"learning_rate": 2.1951219512195124e-05, |
|
"loss": 3.1749, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07312614259597806, |
|
"grad_norm": 0.3087550699710846, |
|
"learning_rate": 3.292682926829269e-05, |
|
"loss": 2.9963, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09750152346130408, |
|
"grad_norm": 0.20843257009983063, |
|
"learning_rate": 4.51219512195122e-05, |
|
"loss": 2.8865, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1218769043266301, |
|
"grad_norm": 0.2306743711233139, |
|
"learning_rate": 4.996738892723075e-05, |
|
"loss": 2.8482, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.14625228519195613, |
|
"grad_norm": 0.37552645802497864, |
|
"learning_rate": 4.976840705816688e-05, |
|
"loss": 2.8334, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17062766605728213, |
|
"grad_norm": 0.207833930850029, |
|
"learning_rate": 4.939000027687756e-05, |
|
"loss": 2.7419, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.19500304692260817, |
|
"grad_norm": 0.213443323969841, |
|
"learning_rate": 4.8834909801373264e-05, |
|
"loss": 2.6897, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.21937842778793418, |
|
"grad_norm": 0.31744861602783203, |
|
"learning_rate": 4.810715676468894e-05, |
|
"loss": 2.6547, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2437538086532602, |
|
"grad_norm": 0.3662812113761902, |
|
"learning_rate": 4.7212013085378824e-05, |
|
"loss": 2.6865, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2681291895185862, |
|
"grad_norm": 0.2681748867034912, |
|
"learning_rate": 4.615596327719111e-05, |
|
"loss": 2.6123, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.29250457038391225, |
|
"grad_norm": 0.22331994771957397, |
|
"learning_rate": 4.507424333013069e-05, |
|
"loss": 2.5791, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3168799512492383, |
|
"grad_norm": 0.3823238015174866, |
|
"learning_rate": 4.373446685572683e-05, |
|
"loss": 2.5722, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.34125533211456427, |
|
"grad_norm": 1.0558502674102783, |
|
"learning_rate": 4.225897595837744e-05, |
|
"loss": 2.5763, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3656307129798903, |
|
"grad_norm": 0.2607211470603943, |
|
"learning_rate": 4.0658459247330766e-05, |
|
"loss": 2.5901, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.39000609384521634, |
|
"grad_norm": 0.5509762763977051, |
|
"learning_rate": 3.8944511031809865e-05, |
|
"loss": 2.5185, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4143814747105424, |
|
"grad_norm": 0.2571032643318176, |
|
"learning_rate": 3.712954733063284e-05, |
|
"loss": 2.4729, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.43875685557586835, |
|
"grad_norm": 0.25064775347709656, |
|
"learning_rate": 3.5226715929283506e-05, |
|
"loss": 2.5748, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4631322364411944, |
|
"grad_norm": 0.2534530758857727, |
|
"learning_rate": 3.324980113598824e-05, |
|
"loss": 2.5046, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4875076173065204, |
|
"grad_norm": 0.5358842015266418, |
|
"learning_rate": 3.121312392675618e-05, |
|
"loss": 2.5345, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5118829981718465, |
|
"grad_norm": 0.22321267426013947, |
|
"learning_rate": 2.9131438202742124e-05, |
|
"loss": 2.4931, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5362583790371724, |
|
"grad_norm": 0.48219671845436096, |
|
"learning_rate": 2.7019823911454812e-05, |
|
"loss": 2.4892, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5606337599024985, |
|
"grad_norm": 0.22396139800548553, |
|
"learning_rate": 2.4893577806051537e-05, |
|
"loss": 2.5281, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5850091407678245, |
|
"grad_norm": 0.28134435415267944, |
|
"learning_rate": 2.2768102634070147e-05, |
|
"loss": 2.4737, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6093845216331505, |
|
"grad_norm": 0.32630693912506104, |
|
"learning_rate": 2.0658795558326743e-05, |
|
"loss": 2.4923, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.6337599024984766, |
|
"grad_norm": 0.27705642580986023, |
|
"learning_rate": 1.8580936618269696e-05, |
|
"loss": 2.4491, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.6581352833638026, |
|
"grad_norm": 1.0377610921859741, |
|
"learning_rate": 1.6549578039787436e-05, |
|
"loss": 2.5303, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6825106642291285, |
|
"grad_norm": 0.212926983833313, |
|
"learning_rate": 1.4579435195321434e-05, |
|
"loss": 2.4304, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7068860450944546, |
|
"grad_norm": 1.0300441980361938, |
|
"learning_rate": 1.2684780004180414e-05, |
|
"loss": 2.4408, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.7312614259597806, |
|
"grad_norm": 0.33628296852111816, |
|
"learning_rate": 1.0879337545275165e-05, |
|
"loss": 2.4091, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.7556368068251066, |
|
"grad_norm": 0.2797647714614868, |
|
"learning_rate": 9.176186631221958e-06, |
|
"loss": 2.3872, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.7800121876904327, |
|
"grad_norm": 0.21739082038402557, |
|
"learning_rate": 7.587665064066085e-06, |
|
"loss": 2.4045, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.8043875685557587, |
|
"grad_norm": 0.508171796798706, |
|
"learning_rate": 6.125280258962873e-06, |
|
"loss": 2.4953, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.8287629494210847, |
|
"grad_norm": 0.2408667802810669, |
|
"learning_rate": 4.799625883267672e-06, |
|
"loss": 2.3752, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.8531383302864107, |
|
"grad_norm": 0.2731145918369293, |
|
"learning_rate": 3.6203051149095973e-06, |
|
"loss": 2.3658, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8775137111517367, |
|
"grad_norm": 0.3211069703102112, |
|
"learning_rate": 2.595861075973613e-06, |
|
"loss": 2.4198, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.9018890920170628, |
|
"grad_norm": 0.2237868458032608, |
|
"learning_rate": 1.733714945437212e-06, |
|
"loss": 2.4113, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.9262644728823888, |
|
"grad_norm": 0.2606222629547119, |
|
"learning_rate": 1.0401121993794033e-06, |
|
"loss": 2.4011, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.9506398537477148, |
|
"grad_norm": 0.47373390197753906, |
|
"learning_rate": 5.20077368103597e-07, |
|
"loss": 2.4523, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.9750152346130408, |
|
"grad_norm": 0.5579819679260254, |
|
"learning_rate": 1.77377637918405e-07, |
|
"loss": 2.4289, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.9993906154783668, |
|
"grad_norm": 0.25126489996910095, |
|
"learning_rate": 1.4495561248931145e-08, |
|
"loss": 2.4421, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9993906154783668, |
|
"step": 410, |
|
"total_flos": 5.610936315118879e+18, |
|
"train_loss": 2.5786649890062288, |
|
"train_runtime": 34559.7112, |
|
"train_samples_per_second": 0.38, |
|
"train_steps_per_second": 0.012 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 410, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.610936315118879e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|