|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 50.0, |
|
"eval_steps": 500, |
|
"global_step": 107800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.16410106420516968, |
|
"learning_rate": 3.110505565862709e-05, |
|
"loss": 0.8386, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.42184513807296753, |
|
"learning_rate": 3.0960111317254176e-05, |
|
"loss": 0.8027, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.24314333498477936, |
|
"learning_rate": 3.0815166975881264e-05, |
|
"loss": 0.782, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.6049493551254272, |
|
"learning_rate": 3.067022263450835e-05, |
|
"loss": 0.7584, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.6963891386985779, |
|
"learning_rate": 3.0525278293135433e-05, |
|
"loss": 0.7384, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.4310186803340912, |
|
"learning_rate": 3.038033395176252e-05, |
|
"loss": 0.7254, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.713209331035614, |
|
"learning_rate": 3.0235389610389613e-05, |
|
"loss": 0.7181, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.6885315775871277, |
|
"learning_rate": 3.0090445269016697e-05, |
|
"loss": 0.703, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.48805415630340576, |
|
"learning_rate": 2.9945500927643785e-05, |
|
"loss": 0.6923, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.40450435876846313, |
|
"learning_rate": 2.9800556586270873e-05, |
|
"loss": 0.691, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.5716784596443176, |
|
"learning_rate": 2.965561224489796e-05, |
|
"loss": 0.6831, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.512924075126648, |
|
"learning_rate": 2.951066790352505e-05, |
|
"loss": 0.6783, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 0.5809201598167419, |
|
"learning_rate": 2.9365723562152134e-05, |
|
"loss": 0.6682, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 1.9776902198791504, |
|
"learning_rate": 2.922077922077922e-05, |
|
"loss": 0.6654, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.5193782448768616, |
|
"learning_rate": 2.9075834879406306e-05, |
|
"loss": 0.6649, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 0.5309423804283142, |
|
"learning_rate": 2.8930890538033397e-05, |
|
"loss": 0.6616, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 0.5730230212211609, |
|
"learning_rate": 2.8785946196660482e-05, |
|
"loss": 0.6559, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 2.4345550537109375, |
|
"learning_rate": 2.864100185528757e-05, |
|
"loss": 0.6535, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 0.6113856434822083, |
|
"learning_rate": 2.8496057513914658e-05, |
|
"loss": 0.6479, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.5557487607002258, |
|
"learning_rate": 2.8351113172541746e-05, |
|
"loss": 0.6416, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 0.5874636173248291, |
|
"learning_rate": 2.8206168831168834e-05, |
|
"loss": 0.6399, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"grad_norm": 0.4734204411506653, |
|
"learning_rate": 2.8061224489795918e-05, |
|
"loss": 0.6375, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"grad_norm": 0.5736048221588135, |
|
"learning_rate": 2.7916280148423006e-05, |
|
"loss": 0.6336, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"grad_norm": 0.5358075499534607, |
|
"learning_rate": 2.777133580705009e-05, |
|
"loss": 0.6255, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"grad_norm": 0.39945241808891296, |
|
"learning_rate": 2.7626391465677182e-05, |
|
"loss": 0.6245, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"grad_norm": 0.6585483551025391, |
|
"learning_rate": 2.7481447124304266e-05, |
|
"loss": 0.619, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"grad_norm": 0.5212920308113098, |
|
"learning_rate": 2.7336502782931354e-05, |
|
"loss": 0.6195, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"grad_norm": 0.6320118308067322, |
|
"learning_rate": 2.7191558441558442e-05, |
|
"loss": 0.6114, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"grad_norm": 0.7715293169021606, |
|
"learning_rate": 2.704661410018553e-05, |
|
"loss": 0.6088, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"grad_norm": 0.571773111820221, |
|
"learning_rate": 2.6901669758812618e-05, |
|
"loss": 0.6103, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"grad_norm": 0.6421319246292114, |
|
"learning_rate": 2.6756725417439703e-05, |
|
"loss": 0.6025, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"grad_norm": 0.5680719017982483, |
|
"learning_rate": 2.661178107606679e-05, |
|
"loss": 0.5998, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"grad_norm": 0.5537917613983154, |
|
"learning_rate": 2.646683673469388e-05, |
|
"loss": 0.5981, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"grad_norm": 0.4245034158229828, |
|
"learning_rate": 2.6321892393320966e-05, |
|
"loss": 0.5959, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"grad_norm": 0.48047196865081787, |
|
"learning_rate": 2.617694805194805e-05, |
|
"loss": 0.5952, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"grad_norm": 0.4809471368789673, |
|
"learning_rate": 2.603200371057514e-05, |
|
"loss": 0.5923, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"grad_norm": 0.4537654519081116, |
|
"learning_rate": 2.5887059369202227e-05, |
|
"loss": 0.5878, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"grad_norm": 0.5159969329833984, |
|
"learning_rate": 2.5742115027829315e-05, |
|
"loss": 0.5933, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"grad_norm": 0.9971024990081787, |
|
"learning_rate": 2.5597170686456403e-05, |
|
"loss": 0.588, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 0.4496227204799652, |
|
"learning_rate": 2.5452226345083487e-05, |
|
"loss": 0.5877, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"grad_norm": 0.5145105719566345, |
|
"learning_rate": 2.5307282003710575e-05, |
|
"loss": 0.5821, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"grad_norm": 0.5195399522781372, |
|
"learning_rate": 2.5162337662337663e-05, |
|
"loss": 0.5827, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"grad_norm": 0.5431232452392578, |
|
"learning_rate": 2.501739332096475e-05, |
|
"loss": 0.5845, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"grad_norm": 0.4878028333187103, |
|
"learning_rate": 2.4872448979591835e-05, |
|
"loss": 0.5828, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"grad_norm": 0.591066837310791, |
|
"learning_rate": 2.4727504638218923e-05, |
|
"loss": 0.5819, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"grad_norm": 16.133996963500977, |
|
"learning_rate": 2.458256029684601e-05, |
|
"loss": 0.5765, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"grad_norm": 0.5531517863273621, |
|
"learning_rate": 2.44376159554731e-05, |
|
"loss": 0.5781, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"grad_norm": 0.4941064417362213, |
|
"learning_rate": 2.4292671614100187e-05, |
|
"loss": 0.5774, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"grad_norm": 0.5101198554039001, |
|
"learning_rate": 2.414772727272727e-05, |
|
"loss": 0.5717, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"grad_norm": 0.40433865785598755, |
|
"learning_rate": 2.400278293135436e-05, |
|
"loss": 0.5729, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"grad_norm": 0.5296735763549805, |
|
"learning_rate": 2.3857838589981448e-05, |
|
"loss": 0.5766, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"grad_norm": 0.40421661734580994, |
|
"learning_rate": 2.3712894248608535e-05, |
|
"loss": 0.5674, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"grad_norm": 0.48923459649086, |
|
"learning_rate": 2.356794990723562e-05, |
|
"loss": 0.5673, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"grad_norm": 0.4543541371822357, |
|
"learning_rate": 2.3423005565862708e-05, |
|
"loss": 0.5682, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"grad_norm": 0.405658096075058, |
|
"learning_rate": 2.32780612244898e-05, |
|
"loss": 0.5654, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"grad_norm": 0.4983844459056854, |
|
"learning_rate": 2.3133116883116884e-05, |
|
"loss": 0.5657, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"grad_norm": 1.2178577184677124, |
|
"learning_rate": 2.2988172541743972e-05, |
|
"loss": 0.5638, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"grad_norm": 0.4428790509700775, |
|
"learning_rate": 2.2843228200371056e-05, |
|
"loss": 0.5658, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"grad_norm": 0.5377063155174255, |
|
"learning_rate": 2.2698283858998144e-05, |
|
"loss": 0.5609, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"grad_norm": 0.4518877863883972, |
|
"learning_rate": 2.2553339517625232e-05, |
|
"loss": 0.5629, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"grad_norm": 0.5720136761665344, |
|
"learning_rate": 2.240839517625232e-05, |
|
"loss": 0.5639, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"grad_norm": 0.6480860114097595, |
|
"learning_rate": 2.2263450834879408e-05, |
|
"loss": 0.5622, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"grad_norm": 0.40764591097831726, |
|
"learning_rate": 2.2118506493506492e-05, |
|
"loss": 0.5615, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"grad_norm": 0.60052889585495, |
|
"learning_rate": 2.1973562152133584e-05, |
|
"loss": 0.5542, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"grad_norm": 0.5227585434913635, |
|
"learning_rate": 2.182861781076067e-05, |
|
"loss": 0.5556, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"grad_norm": 0.5119400024414062, |
|
"learning_rate": 2.1683673469387756e-05, |
|
"loss": 0.556, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 15.54, |
|
"grad_norm": 0.4895058870315552, |
|
"learning_rate": 2.153872912801484e-05, |
|
"loss": 0.5569, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"grad_norm": 0.5205515623092651, |
|
"learning_rate": 2.1393784786641932e-05, |
|
"loss": 0.555, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 0.4375595152378082, |
|
"learning_rate": 2.1248840445269017e-05, |
|
"loss": 0.5544, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 16.23, |
|
"grad_norm": 0.4849925637245178, |
|
"learning_rate": 2.1103896103896105e-05, |
|
"loss": 0.5536, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"grad_norm": 0.5083441734313965, |
|
"learning_rate": 2.0958951762523192e-05, |
|
"loss": 0.5522, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"grad_norm": 0.561335027217865, |
|
"learning_rate": 2.0814007421150277e-05, |
|
"loss": 0.5523, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"grad_norm": 0.463858962059021, |
|
"learning_rate": 2.066906307977737e-05, |
|
"loss": 0.5506, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"grad_norm": 0.5114089250564575, |
|
"learning_rate": 2.0524118738404453e-05, |
|
"loss": 0.5512, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"grad_norm": 0.9562047123908997, |
|
"learning_rate": 2.037917439703154e-05, |
|
"loss": 0.5506, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"grad_norm": 0.755348265171051, |
|
"learning_rate": 2.0234230055658625e-05, |
|
"loss": 0.5473, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"grad_norm": 0.5000788569450378, |
|
"learning_rate": 2.0089285714285717e-05, |
|
"loss": 0.5443, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 18.09, |
|
"grad_norm": 0.5531164407730103, |
|
"learning_rate": 1.99443413729128e-05, |
|
"loss": 0.548, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"grad_norm": 0.49754297733306885, |
|
"learning_rate": 1.979939703153989e-05, |
|
"loss": 0.5475, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 18.55, |
|
"grad_norm": 0.462971955537796, |
|
"learning_rate": 1.9654452690166977e-05, |
|
"loss": 0.5459, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 18.78, |
|
"grad_norm": 0.9089665412902832, |
|
"learning_rate": 1.950950834879406e-05, |
|
"loss": 0.5491, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 19.02, |
|
"grad_norm": 0.47444915771484375, |
|
"learning_rate": 1.9364564007421153e-05, |
|
"loss": 0.5433, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"grad_norm": 0.45020779967308044, |
|
"learning_rate": 1.9219619666048237e-05, |
|
"loss": 0.5423, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"grad_norm": 0.6115174293518066, |
|
"learning_rate": 1.9074675324675325e-05, |
|
"loss": 0.5455, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"grad_norm": 2.4381721019744873, |
|
"learning_rate": 1.892973098330241e-05, |
|
"loss": 0.5433, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"grad_norm": 0.5207477807998657, |
|
"learning_rate": 1.87847866419295e-05, |
|
"loss": 0.5414, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 20.18, |
|
"grad_norm": 0.4906873106956482, |
|
"learning_rate": 1.8639842300556586e-05, |
|
"loss": 0.5428, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 20.41, |
|
"grad_norm": 0.4558812081813812, |
|
"learning_rate": 1.8494897959183674e-05, |
|
"loss": 0.5464, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 20.64, |
|
"grad_norm": 0.5274574756622314, |
|
"learning_rate": 1.834995361781076e-05, |
|
"loss": 0.5387, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 20.87, |
|
"grad_norm": 0.46367010474205017, |
|
"learning_rate": 1.820500927643785e-05, |
|
"loss": 0.5407, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 21.1, |
|
"grad_norm": 0.45425930619239807, |
|
"learning_rate": 1.8060064935064937e-05, |
|
"loss": 0.5444, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 21.34, |
|
"grad_norm": 0.4426890015602112, |
|
"learning_rate": 1.7915120593692022e-05, |
|
"loss": 0.5409, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 21.57, |
|
"grad_norm": 0.4001689851284027, |
|
"learning_rate": 1.777017625231911e-05, |
|
"loss": 0.5375, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 21.8, |
|
"grad_norm": 0.45908644795417786, |
|
"learning_rate": 1.7625231910946194e-05, |
|
"loss": 0.5375, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 22.03, |
|
"grad_norm": 0.41069209575653076, |
|
"learning_rate": 1.7480287569573286e-05, |
|
"loss": 0.5364, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 22.26, |
|
"grad_norm": 0.5071306824684143, |
|
"learning_rate": 1.733534322820037e-05, |
|
"loss": 0.5392, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 22.5, |
|
"grad_norm": 0.5278964638710022, |
|
"learning_rate": 1.7190398886827458e-05, |
|
"loss": 0.5351, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 22.73, |
|
"grad_norm": 0.5334340929985046, |
|
"learning_rate": 1.7045454545454546e-05, |
|
"loss": 0.5347, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 22.96, |
|
"grad_norm": 0.47415098547935486, |
|
"learning_rate": 1.6900510204081634e-05, |
|
"loss": 0.5378, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 23.19, |
|
"grad_norm": 0.4583585858345032, |
|
"learning_rate": 1.6755565862708722e-05, |
|
"loss": 0.5396, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 23.42, |
|
"grad_norm": 0.42494043707847595, |
|
"learning_rate": 1.6610621521335807e-05, |
|
"loss": 0.5354, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 23.65, |
|
"grad_norm": 0.46691572666168213, |
|
"learning_rate": 1.6465677179962894e-05, |
|
"loss": 0.5383, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 23.89, |
|
"grad_norm": 0.5475151538848877, |
|
"learning_rate": 1.6320732838589982e-05, |
|
"loss": 0.5351, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 24.12, |
|
"grad_norm": 0.4535447061061859, |
|
"learning_rate": 1.617578849721707e-05, |
|
"loss": 0.5346, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 24.35, |
|
"grad_norm": 0.5584075450897217, |
|
"learning_rate": 1.6030844155844155e-05, |
|
"loss": 0.5345, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 24.58, |
|
"grad_norm": 0.7527787685394287, |
|
"learning_rate": 1.5885899814471243e-05, |
|
"loss": 0.5273, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 24.81, |
|
"grad_norm": 0.39385178685188293, |
|
"learning_rate": 1.574095547309833e-05, |
|
"loss": 0.5365, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 25.05, |
|
"grad_norm": 0.5498375296592712, |
|
"learning_rate": 1.559601113172542e-05, |
|
"loss": 0.5375, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 25.28, |
|
"grad_norm": 0.41572290658950806, |
|
"learning_rate": 1.5451066790352507e-05, |
|
"loss": 0.5335, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 25.51, |
|
"grad_norm": 0.4475056827068329, |
|
"learning_rate": 1.530612244897959e-05, |
|
"loss": 0.5335, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 25.74, |
|
"grad_norm": 0.48311153054237366, |
|
"learning_rate": 1.516117810760668e-05, |
|
"loss": 0.5329, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 25.97, |
|
"grad_norm": 0.48509538173675537, |
|
"learning_rate": 1.5016233766233767e-05, |
|
"loss": 0.5307, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 26.21, |
|
"grad_norm": 0.5727821588516235, |
|
"learning_rate": 1.4871289424860855e-05, |
|
"loss": 0.5316, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 26.44, |
|
"grad_norm": 0.46595117449760437, |
|
"learning_rate": 1.4726345083487941e-05, |
|
"loss": 0.5309, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 26.67, |
|
"grad_norm": 0.5022342801094055, |
|
"learning_rate": 1.4581400742115027e-05, |
|
"loss": 0.5354, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 26.9, |
|
"grad_norm": 0.5400568842887878, |
|
"learning_rate": 1.4436456400742115e-05, |
|
"loss": 0.5268, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 27.13, |
|
"grad_norm": 0.49959874153137207, |
|
"learning_rate": 1.4291512059369201e-05, |
|
"loss": 0.5316, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 27.37, |
|
"grad_norm": 0.5435352921485901, |
|
"learning_rate": 1.414656771799629e-05, |
|
"loss": 0.53, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 27.6, |
|
"grad_norm": 0.5168443918228149, |
|
"learning_rate": 1.4001623376623376e-05, |
|
"loss": 0.5272, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 27.83, |
|
"grad_norm": 0.60955411195755, |
|
"learning_rate": 1.3856679035250465e-05, |
|
"loss": 0.5283, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 28.06, |
|
"grad_norm": 0.5063374042510986, |
|
"learning_rate": 1.3711734693877551e-05, |
|
"loss": 0.5308, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 28.29, |
|
"grad_norm": 0.46151864528656006, |
|
"learning_rate": 1.356679035250464e-05, |
|
"loss": 0.5282, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 28.53, |
|
"grad_norm": 0.5224559903144836, |
|
"learning_rate": 1.3421846011131726e-05, |
|
"loss": 0.5261, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 28.76, |
|
"grad_norm": 0.43721848726272583, |
|
"learning_rate": 1.3276901669758814e-05, |
|
"loss": 0.525, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 28.99, |
|
"grad_norm": 0.5033512115478516, |
|
"learning_rate": 1.31319573283859e-05, |
|
"loss": 0.5304, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 29.22, |
|
"grad_norm": 0.5284975171089172, |
|
"learning_rate": 1.2987012987012988e-05, |
|
"loss": 0.5291, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 29.45, |
|
"grad_norm": 0.6417981386184692, |
|
"learning_rate": 1.2842068645640074e-05, |
|
"loss": 0.5267, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 29.68, |
|
"grad_norm": 0.5176807045936584, |
|
"learning_rate": 1.269712430426716e-05, |
|
"loss": 0.5246, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 29.92, |
|
"grad_norm": 0.46041619777679443, |
|
"learning_rate": 1.255217996289425e-05, |
|
"loss": 0.527, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 30.15, |
|
"grad_norm": 0.5162688493728638, |
|
"learning_rate": 1.2407235621521336e-05, |
|
"loss": 0.5242, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 30.38, |
|
"grad_norm": 0.46508926153182983, |
|
"learning_rate": 1.2262291280148424e-05, |
|
"loss": 0.5234, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 30.61, |
|
"grad_norm": 0.4786316156387329, |
|
"learning_rate": 1.211734693877551e-05, |
|
"loss": 0.5287, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 30.84, |
|
"grad_norm": 0.5851900577545166, |
|
"learning_rate": 1.1972402597402598e-05, |
|
"loss": 0.5252, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 31.08, |
|
"grad_norm": 0.5234825611114502, |
|
"learning_rate": 1.1827458256029684e-05, |
|
"loss": 0.5238, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 31.31, |
|
"grad_norm": 0.4396039843559265, |
|
"learning_rate": 1.1682513914656772e-05, |
|
"loss": 0.523, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 31.54, |
|
"grad_norm": 0.4660816788673401, |
|
"learning_rate": 1.1537569573283858e-05, |
|
"loss": 0.5254, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 31.77, |
|
"grad_norm": 0.4561219811439514, |
|
"learning_rate": 1.1392625231910948e-05, |
|
"loss": 0.5202, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"grad_norm": 0.4771936535835266, |
|
"learning_rate": 1.1247680890538034e-05, |
|
"loss": 0.526, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 32.24, |
|
"grad_norm": 0.5595078468322754, |
|
"learning_rate": 1.110273654916512e-05, |
|
"loss": 0.5233, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 32.47, |
|
"grad_norm": 0.5574076175689697, |
|
"learning_rate": 1.0957792207792208e-05, |
|
"loss": 0.5242, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 32.7, |
|
"grad_norm": 0.5540217161178589, |
|
"learning_rate": 1.0812847866419295e-05, |
|
"loss": 0.5207, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 32.93, |
|
"grad_norm": 0.4957340359687805, |
|
"learning_rate": 1.0667903525046383e-05, |
|
"loss": 0.5234, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 33.16, |
|
"grad_norm": 0.5124360918998718, |
|
"learning_rate": 1.0522959183673469e-05, |
|
"loss": 0.5215, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 33.4, |
|
"grad_norm": 0.46945714950561523, |
|
"learning_rate": 1.0378014842300557e-05, |
|
"loss": 0.5253, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 33.63, |
|
"grad_norm": 0.379711389541626, |
|
"learning_rate": 1.0233070500927643e-05, |
|
"loss": 0.5217, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 33.86, |
|
"grad_norm": 0.6671276092529297, |
|
"learning_rate": 1.0088126159554733e-05, |
|
"loss": 0.5184, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 34.09, |
|
"grad_norm": 0.5381418466567993, |
|
"learning_rate": 9.943181818181819e-06, |
|
"loss": 0.5273, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 34.32, |
|
"grad_norm": 0.6995874643325806, |
|
"learning_rate": 9.798237476808907e-06, |
|
"loss": 0.5203, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 34.55, |
|
"grad_norm": 0.4824086129665375, |
|
"learning_rate": 9.653293135435993e-06, |
|
"loss": 0.5186, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 34.79, |
|
"grad_norm": 0.5302020311355591, |
|
"learning_rate": 9.50834879406308e-06, |
|
"loss": 0.5194, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 35.02, |
|
"grad_norm": 0.5452972054481506, |
|
"learning_rate": 9.363404452690167e-06, |
|
"loss": 0.5244, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 35.25, |
|
"grad_norm": 0.6910521984100342, |
|
"learning_rate": 9.218460111317253e-06, |
|
"loss": 0.5185, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 35.48, |
|
"grad_norm": 0.5710629224777222, |
|
"learning_rate": 9.073515769944341e-06, |
|
"loss": 0.519, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 35.71, |
|
"grad_norm": 0.4669000804424286, |
|
"learning_rate": 8.928571428571428e-06, |
|
"loss": 0.5171, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 35.95, |
|
"grad_norm": 0.508492112159729, |
|
"learning_rate": 8.783627087198517e-06, |
|
"loss": 0.5201, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 36.18, |
|
"grad_norm": 0.5005605816841125, |
|
"learning_rate": 8.638682745825603e-06, |
|
"loss": 0.5181, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 36.41, |
|
"grad_norm": 0.5115915536880493, |
|
"learning_rate": 8.493738404452691e-06, |
|
"loss": 0.517, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 36.64, |
|
"grad_norm": 0.5187366604804993, |
|
"learning_rate": 8.348794063079778e-06, |
|
"loss": 0.5197, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 36.87, |
|
"grad_norm": 0.5845080614089966, |
|
"learning_rate": 8.203849721706865e-06, |
|
"loss": 0.5209, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 37.11, |
|
"grad_norm": 0.6533055901527405, |
|
"learning_rate": 8.058905380333952e-06, |
|
"loss": 0.521, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 37.34, |
|
"grad_norm": 0.4986008107662201, |
|
"learning_rate": 7.913961038961038e-06, |
|
"loss": 0.5172, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 37.57, |
|
"grad_norm": 0.557759165763855, |
|
"learning_rate": 7.769016697588126e-06, |
|
"loss": 0.5169, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 37.8, |
|
"grad_norm": 0.651156485080719, |
|
"learning_rate": 7.624072356215214e-06, |
|
"loss": 0.5179, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 38.03, |
|
"grad_norm": 0.5263897180557251, |
|
"learning_rate": 7.479128014842301e-06, |
|
"loss": 0.5202, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 38.27, |
|
"grad_norm": 0.497855007648468, |
|
"learning_rate": 7.334183673469388e-06, |
|
"loss": 0.5169, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 38.5, |
|
"grad_norm": 0.5888237357139587, |
|
"learning_rate": 7.189239332096476e-06, |
|
"loss": 0.5172, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 38.73, |
|
"grad_norm": 0.7321680188179016, |
|
"learning_rate": 7.044294990723562e-06, |
|
"loss": 0.5175, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 38.96, |
|
"grad_norm": 0.5042610168457031, |
|
"learning_rate": 6.899350649350649e-06, |
|
"loss": 0.5175, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 39.19, |
|
"grad_norm": 0.6449227333068848, |
|
"learning_rate": 6.754406307977736e-06, |
|
"loss": 0.5185, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 39.42, |
|
"grad_norm": 0.4842873215675354, |
|
"learning_rate": 6.609461966604823e-06, |
|
"loss": 0.5174, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 39.66, |
|
"grad_norm": 0.5853040218353271, |
|
"learning_rate": 6.464517625231911e-06, |
|
"loss": 0.5158, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 39.89, |
|
"grad_norm": 0.6231125593185425, |
|
"learning_rate": 6.319573283858998e-06, |
|
"loss": 0.518, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 40.12, |
|
"grad_norm": 0.7321327328681946, |
|
"learning_rate": 6.174628942486085e-06, |
|
"loss": 0.5101, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 40.35, |
|
"grad_norm": 0.48840710520744324, |
|
"learning_rate": 6.0296846011131725e-06, |
|
"loss": 0.5188, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 40.58, |
|
"grad_norm": 0.4609430134296417, |
|
"learning_rate": 5.88474025974026e-06, |
|
"loss": 0.5156, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 40.82, |
|
"grad_norm": 0.7623891234397888, |
|
"learning_rate": 5.7397959183673475e-06, |
|
"loss": 0.5135, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 41.05, |
|
"grad_norm": 0.6465628147125244, |
|
"learning_rate": 5.5948515769944346e-06, |
|
"loss": 0.5148, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 41.28, |
|
"grad_norm": 0.6189027428627014, |
|
"learning_rate": 5.449907235621521e-06, |
|
"loss": 0.5162, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 41.51, |
|
"grad_norm": 0.48868080973625183, |
|
"learning_rate": 5.304962894248609e-06, |
|
"loss": 0.516, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 41.74, |
|
"grad_norm": 0.5011306405067444, |
|
"learning_rate": 5.160018552875696e-06, |
|
"loss": 0.513, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 41.98, |
|
"grad_norm": 0.596338152885437, |
|
"learning_rate": 5.015074211502783e-06, |
|
"loss": 0.5149, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 42.21, |
|
"grad_norm": 0.681685209274292, |
|
"learning_rate": 4.87012987012987e-06, |
|
"loss": 0.5139, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 42.44, |
|
"grad_norm": 0.6109485030174255, |
|
"learning_rate": 4.725185528756957e-06, |
|
"loss": 0.5145, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 42.67, |
|
"grad_norm": 0.6468715071678162, |
|
"learning_rate": 4.580241187384045e-06, |
|
"loss": 0.5138, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 42.9, |
|
"grad_norm": 0.47962233424186707, |
|
"learning_rate": 4.435296846011132e-06, |
|
"loss": 0.5095, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 43.14, |
|
"grad_norm": 0.6052845120429993, |
|
"learning_rate": 4.290352504638219e-06, |
|
"loss": 0.5129, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 43.37, |
|
"grad_norm": 0.49540799856185913, |
|
"learning_rate": 4.145408163265306e-06, |
|
"loss": 0.5161, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 43.6, |
|
"grad_norm": 0.5836204290390015, |
|
"learning_rate": 4.000463821892394e-06, |
|
"loss": 0.509, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 43.83, |
|
"grad_norm": 0.42629340291023254, |
|
"learning_rate": 3.85551948051948e-06, |
|
"loss": 0.5135, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 44.06, |
|
"grad_norm": 0.4765370488166809, |
|
"learning_rate": 3.710575139146568e-06, |
|
"loss": 0.5107, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 44.29, |
|
"grad_norm": 0.6870307326316833, |
|
"learning_rate": 3.565630797773655e-06, |
|
"loss": 0.5134, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 44.53, |
|
"grad_norm": 0.4927411675453186, |
|
"learning_rate": 3.4206864564007424e-06, |
|
"loss": 0.5083, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 44.76, |
|
"grad_norm": 0.5409857034683228, |
|
"learning_rate": 3.275742115027829e-06, |
|
"loss": 0.5133, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 44.99, |
|
"grad_norm": 0.5635824799537659, |
|
"learning_rate": 3.1307977736549166e-06, |
|
"loss": 0.5111, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 45.22, |
|
"grad_norm": 0.6565483212471008, |
|
"learning_rate": 2.9858534322820037e-06, |
|
"loss": 0.5099, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 45.45, |
|
"grad_norm": 0.659949779510498, |
|
"learning_rate": 2.840909090909091e-06, |
|
"loss": 0.5124, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 45.69, |
|
"grad_norm": 0.551925539970398, |
|
"learning_rate": 2.6959647495361782e-06, |
|
"loss": 0.5105, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 45.92, |
|
"grad_norm": 0.6591493487358093, |
|
"learning_rate": 2.5510204081632653e-06, |
|
"loss": 0.5106, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 46.15, |
|
"grad_norm": 0.6934979557991028, |
|
"learning_rate": 2.4060760667903524e-06, |
|
"loss": 0.5109, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 46.38, |
|
"grad_norm": 0.5429938435554504, |
|
"learning_rate": 2.26113172541744e-06, |
|
"loss": 0.5081, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 46.61, |
|
"grad_norm": 0.5906868577003479, |
|
"learning_rate": 2.116187384044527e-06, |
|
"loss": 0.5106, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 46.85, |
|
"grad_norm": 0.7781776785850525, |
|
"learning_rate": 1.971243042671614e-06, |
|
"loss": 0.5131, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 47.08, |
|
"grad_norm": 0.5847499966621399, |
|
"learning_rate": 1.8262987012987013e-06, |
|
"loss": 0.5101, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 47.31, |
|
"grad_norm": 0.49553003907203674, |
|
"learning_rate": 1.6813543599257886e-06, |
|
"loss": 0.5129, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 47.54, |
|
"grad_norm": 0.581206738948822, |
|
"learning_rate": 1.5364100185528757e-06, |
|
"loss": 0.5122, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 47.77, |
|
"grad_norm": 0.7169119715690613, |
|
"learning_rate": 1.391465677179963e-06, |
|
"loss": 0.5075, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 48.01, |
|
"grad_norm": 0.7762653231620789, |
|
"learning_rate": 1.24652133580705e-06, |
|
"loss": 0.5121, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 48.24, |
|
"grad_norm": 0.6211891770362854, |
|
"learning_rate": 1.1015769944341372e-06, |
|
"loss": 0.5096, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 48.47, |
|
"grad_norm": 0.6150048971176147, |
|
"learning_rate": 9.566326530612244e-07, |
|
"loss": 0.5141, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 48.7, |
|
"grad_norm": 0.5785318613052368, |
|
"learning_rate": 8.116883116883117e-07, |
|
"loss": 0.5066, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 48.93, |
|
"grad_norm": 0.64094078540802, |
|
"learning_rate": 6.667439703153989e-07, |
|
"loss": 0.5066, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 49.17, |
|
"grad_norm": 0.5458254218101501, |
|
"learning_rate": 5.217996289424861e-07, |
|
"loss": 0.5083, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 49.4, |
|
"grad_norm": 0.6012634634971619, |
|
"learning_rate": 3.768552875695733e-07, |
|
"loss": 0.5112, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 49.63, |
|
"grad_norm": 0.6441764235496521, |
|
"learning_rate": 2.319109461966605e-07, |
|
"loss": 0.5094, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 49.86, |
|
"grad_norm": 0.5359529852867126, |
|
"learning_rate": 8.696660482374769e-08, |
|
"loss": 0.5095, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"step": 107800, |
|
"total_flos": 8.716413062401229e+19, |
|
"train_loss": 0.5550972292020724, |
|
"train_runtime": 13976.6946, |
|
"train_samples_per_second": 61.703, |
|
"train_steps_per_second": 7.713 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 107800, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 50, |
|
"save_steps": 1000000000, |
|
"total_flos": 8.716413062401229e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|