Kraken-Multilingual / kraken_router /trainer_state.json
DavidGF's picture
Upload folder using huggingface_hub
072a327 verified
raw
history blame
18.8 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.283331606754377,
"eval_steps": 500,
"global_step": 51000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.05179736869367036,
"grad_norm": 3.8771300836515366e-08,
"learning_rate": 1.9852007518018084e-05,
"loss": 0.0996,
"step": 500
},
{
"epoch": 0.10359473738734072,
"grad_norm": 9.604158321963041e-07,
"learning_rate": 1.970401503603617e-05,
"loss": 0.256,
"step": 1000
},
{
"epoch": 0.1553921060810111,
"grad_norm": 1.8179751350544393e-05,
"learning_rate": 1.9556022554054253e-05,
"loss": 0.1367,
"step": 1500
},
{
"epoch": 0.20718947477468144,
"grad_norm": 8.53914680192247e-05,
"learning_rate": 1.9408030072072343e-05,
"loss": 0.063,
"step": 2000
},
{
"epoch": 0.2589868434683518,
"grad_norm": 4.6534645662177354e-05,
"learning_rate": 1.9260037590090425e-05,
"loss": 0.0201,
"step": 2500
},
{
"epoch": 0.3107842121620222,
"grad_norm": 6.902104843220513e-08,
"learning_rate": 1.911204510810851e-05,
"loss": 0.106,
"step": 3000
},
{
"epoch": 0.36258158085569253,
"grad_norm": 2.192794745781157e-08,
"learning_rate": 1.8964052626126594e-05,
"loss": 0.0797,
"step": 3500
},
{
"epoch": 0.4143789495493629,
"grad_norm": 4.58152558859698e-13,
"learning_rate": 1.881606014414468e-05,
"loss": 0.0264,
"step": 4000
},
{
"epoch": 0.46617631824303324,
"grad_norm": 4.8503436119062826e-05,
"learning_rate": 1.8668067662162763e-05,
"loss": 0.0329,
"step": 4500
},
{
"epoch": 0.5179736869367036,
"grad_norm": 9.135746950050816e-05,
"learning_rate": 1.852007518018085e-05,
"loss": 0.023,
"step": 5000
},
{
"epoch": 0.569771055630374,
"grad_norm": 2.3920888381212535e-08,
"learning_rate": 1.8372082698198932e-05,
"loss": 0.044,
"step": 5500
},
{
"epoch": 0.6215684243240444,
"grad_norm": 1.1770172932301648e-05,
"learning_rate": 1.8224090216217018e-05,
"loss": 0.0097,
"step": 6000
},
{
"epoch": 0.6733657930177147,
"grad_norm": 5.798747224616818e-05,
"learning_rate": 1.8076097734235104e-05,
"loss": 0.0628,
"step": 6500
},
{
"epoch": 0.7251631617113851,
"grad_norm": 0.0055756960064172745,
"learning_rate": 1.7928105252253187e-05,
"loss": 0.0167,
"step": 7000
},
{
"epoch": 0.7769605304050554,
"grad_norm": 6.234566535567865e-05,
"learning_rate": 1.7780112770271273e-05,
"loss": 0.0342,
"step": 7500
},
{
"epoch": 0.8287578990987258,
"grad_norm": 2.065628723357804e-05,
"learning_rate": 1.7632120288289356e-05,
"loss": 0.0278,
"step": 8000
},
{
"epoch": 0.8805552677923961,
"grad_norm": 0.00034213648177683353,
"learning_rate": 1.7484127806307442e-05,
"loss": 0.0777,
"step": 8500
},
{
"epoch": 0.9323526364860665,
"grad_norm": 0.0024548424407839775,
"learning_rate": 1.7336135324325525e-05,
"loss": 0.0117,
"step": 9000
},
{
"epoch": 0.9841500051797368,
"grad_norm": 2.010272328334395e-05,
"learning_rate": 1.718814284234361e-05,
"loss": 0.0149,
"step": 9500
},
{
"epoch": 1.0359473738734073,
"grad_norm": 3.7440368032548577e-05,
"learning_rate": 1.7040150360361697e-05,
"loss": 0.0059,
"step": 10000
},
{
"epoch": 1.0877447425670776,
"grad_norm": 9.011640031530987e-06,
"learning_rate": 1.689215787837978e-05,
"loss": 0.0245,
"step": 10500
},
{
"epoch": 1.139542111260748,
"grad_norm": 3.9126422052504495e-05,
"learning_rate": 1.6744165396397866e-05,
"loss": 0.0001,
"step": 11000
},
{
"epoch": 1.1913394799544184,
"grad_norm": 8.866464668244589e-06,
"learning_rate": 1.659617291441595e-05,
"loss": 0.0,
"step": 11500
},
{
"epoch": 1.2431368486480887,
"grad_norm": 0.00013718219997826964,
"learning_rate": 1.6448180432434035e-05,
"loss": 0.0181,
"step": 12000
},
{
"epoch": 1.294934217341759,
"grad_norm": 3.808485416811891e-06,
"learning_rate": 1.6300187950452117e-05,
"loss": 0.0,
"step": 12500
},
{
"epoch": 1.3467315860354294,
"grad_norm": 7.217061011033366e-07,
"learning_rate": 1.6152195468470203e-05,
"loss": 0.0266,
"step": 13000
},
{
"epoch": 1.3985289547290998,
"grad_norm": 7.131046731956303e-05,
"learning_rate": 1.600420298648829e-05,
"loss": 0.0266,
"step": 13500
},
{
"epoch": 1.4503263234227701,
"grad_norm": 9.411406608705875e-06,
"learning_rate": 1.5856210504506372e-05,
"loss": 0.0079,
"step": 14000
},
{
"epoch": 1.5021236921164405,
"grad_norm": 7.976142660481855e-05,
"learning_rate": 1.570821802252446e-05,
"loss": 0.022,
"step": 14500
},
{
"epoch": 1.5539210608101108,
"grad_norm": 1.0580498610579525e-06,
"learning_rate": 1.556022554054254e-05,
"loss": 0.0093,
"step": 15000
},
{
"epoch": 1.6057184295037812,
"grad_norm": 6.298066182353068e-06,
"learning_rate": 1.5412233058560627e-05,
"loss": 0.0,
"step": 15500
},
{
"epoch": 1.6575157981974515,
"grad_norm": 2.102418066030065e-12,
"learning_rate": 1.526424057657871e-05,
"loss": 0.0024,
"step": 16000
},
{
"epoch": 1.709313166891122,
"grad_norm": 3.009020701938425e-06,
"learning_rate": 1.5116248094596794e-05,
"loss": 0.009,
"step": 16500
},
{
"epoch": 1.7611105355847922,
"grad_norm": 6.2723142946197186e-06,
"learning_rate": 1.4968255612614882e-05,
"loss": 0.023,
"step": 17000
},
{
"epoch": 1.8129079042784626,
"grad_norm": 5.638932634610683e-06,
"learning_rate": 1.4820263130632967e-05,
"loss": 0.0099,
"step": 17500
},
{
"epoch": 1.8647052729721332,
"grad_norm": 3.8804391806479543e-05,
"learning_rate": 1.4672270648651051e-05,
"loss": 0.0185,
"step": 18000
},
{
"epoch": 1.9165026416658035,
"grad_norm": 3.445857828410226e-06,
"learning_rate": 1.4524278166669134e-05,
"loss": 0.0,
"step": 18500
},
{
"epoch": 1.9683000103594739,
"grad_norm": 0.0029530602041631937,
"learning_rate": 1.4376285684687218e-05,
"loss": 0.0243,
"step": 19000
},
{
"epoch": 2.0200973790531442,
"grad_norm": 7.133132271519571e-07,
"learning_rate": 1.4228293202705303e-05,
"loss": 0.0189,
"step": 19500
},
{
"epoch": 2.0718947477468146,
"grad_norm": 0.00042451228364370763,
"learning_rate": 1.4080300720723387e-05,
"loss": 0.0062,
"step": 20000
},
{
"epoch": 2.123692116440485,
"grad_norm": 1.8360736930844723e-06,
"learning_rate": 1.3932308238741471e-05,
"loss": 0.0067,
"step": 20500
},
{
"epoch": 2.1754894851341553,
"grad_norm": 0.0001334488915745169,
"learning_rate": 1.378431575675956e-05,
"loss": 0.006,
"step": 21000
},
{
"epoch": 2.2272868538278257,
"grad_norm": 4.610120413417462e-06,
"learning_rate": 1.3636323274777644e-05,
"loss": 0.0061,
"step": 21500
},
{
"epoch": 2.279084222521496,
"grad_norm": 2.7200339900446124e-06,
"learning_rate": 1.3488330792795728e-05,
"loss": 0.0,
"step": 22000
},
{
"epoch": 2.3308815912151664,
"grad_norm": 3.3594403703318676e-06,
"learning_rate": 1.3340338310813813e-05,
"loss": 0.009,
"step": 22500
},
{
"epoch": 2.3826789599088367,
"grad_norm": 5.500828137883218e-06,
"learning_rate": 1.3192345828831897e-05,
"loss": 0.0083,
"step": 23000
},
{
"epoch": 2.434476328602507,
"grad_norm": 1103.414306640625,
"learning_rate": 1.304435334684998e-05,
"loss": 0.0058,
"step": 23500
},
{
"epoch": 2.4862736972961774,
"grad_norm": 1.7569537931194645e-06,
"learning_rate": 1.2896360864868064e-05,
"loss": 0.0028,
"step": 24000
},
{
"epoch": 2.5380710659898478,
"grad_norm": 9.93580897556967e-07,
"learning_rate": 1.2748368382886152e-05,
"loss": 0.0052,
"step": 24500
},
{
"epoch": 2.589868434683518,
"grad_norm": 2.71925017225616e-12,
"learning_rate": 1.2600375900904236e-05,
"loss": 0.0,
"step": 25000
},
{
"epoch": 2.6416658033771885,
"grad_norm": 1.8420889318804257e-05,
"learning_rate": 1.245238341892232e-05,
"loss": 0.0195,
"step": 25500
},
{
"epoch": 2.693463172070859,
"grad_norm": 8.20615071006614e-07,
"learning_rate": 1.2304390936940405e-05,
"loss": 0.008,
"step": 26000
},
{
"epoch": 2.745260540764529,
"grad_norm": 6.169057451188564e-05,
"learning_rate": 1.215639845495849e-05,
"loss": 0.0005,
"step": 26500
},
{
"epoch": 2.7970579094581995,
"grad_norm": 2.9846903544239467e-06,
"learning_rate": 1.2008405972976574e-05,
"loss": 0.0037,
"step": 27000
},
{
"epoch": 2.84885527815187,
"grad_norm": 8.764583071751986e-06,
"learning_rate": 1.1860413490994659e-05,
"loss": 0.0156,
"step": 27500
},
{
"epoch": 2.9006526468455403,
"grad_norm": 5.1639810408232734e-05,
"learning_rate": 1.1712421009012743e-05,
"loss": 0.0,
"step": 28000
},
{
"epoch": 2.9524500155392106,
"grad_norm": 8.454779163002968e-06,
"learning_rate": 1.1564428527030829e-05,
"loss": 0.0,
"step": 28500
},
{
"epoch": 3.004247384232881,
"grad_norm": 1.0601724653724887e-07,
"learning_rate": 1.1416436045048913e-05,
"loss": 0.0,
"step": 29000
},
{
"epoch": 3.0560447529265513,
"grad_norm": 3.302725417597685e-06,
"learning_rate": 1.1268443563066998e-05,
"loss": 0.0,
"step": 29500
},
{
"epoch": 3.1078421216202217,
"grad_norm": 8.728113243705593e-06,
"learning_rate": 1.1120451081085082e-05,
"loss": 0.0067,
"step": 30000
},
{
"epoch": 3.159639490313892,
"grad_norm": 2.4715068320801947e-06,
"learning_rate": 1.0972458599103167e-05,
"loss": 0.0,
"step": 30500
},
{
"epoch": 3.2114368590075624,
"grad_norm": 6.171033419377636e-06,
"learning_rate": 1.0824466117121251e-05,
"loss": 0.0,
"step": 31000
},
{
"epoch": 3.2632342277012327,
"grad_norm": 2.5147855922114104e-06,
"learning_rate": 1.0676473635139336e-05,
"loss": 0.0,
"step": 31500
},
{
"epoch": 3.315031596394903,
"grad_norm": 2.676899021025747e-05,
"learning_rate": 1.052848115315742e-05,
"loss": 0.006,
"step": 32000
},
{
"epoch": 3.3668289650885734,
"grad_norm": 2.081859747704584e-05,
"learning_rate": 1.0380488671175506e-05,
"loss": 0.0028,
"step": 32500
},
{
"epoch": 3.418626333782244,
"grad_norm": 2.3868110474722926e-06,
"learning_rate": 1.023249618919359e-05,
"loss": 0.0001,
"step": 33000
},
{
"epoch": 3.470423702475914,
"grad_norm": 2.7923347261094023e-06,
"learning_rate": 1.0084503707211675e-05,
"loss": 0.0,
"step": 33500
},
{
"epoch": 3.5222210711695845,
"grad_norm": 4.678757704823511e-06,
"learning_rate": 9.93651122522976e-06,
"loss": 0.0,
"step": 34000
},
{
"epoch": 3.574018439863255,
"grad_norm": 3.305537575215567e-06,
"learning_rate": 9.788518743247844e-06,
"loss": 0.0,
"step": 34500
},
{
"epoch": 3.625815808556925,
"grad_norm": 2.4619773739686934e-06,
"learning_rate": 9.640526261265928e-06,
"loss": 0.0,
"step": 35000
},
{
"epoch": 3.6776131772505956,
"grad_norm": 2.973723951527063e-07,
"learning_rate": 9.492533779284013e-06,
"loss": 0.0,
"step": 35500
},
{
"epoch": 3.729410545944266,
"grad_norm": 5.624352183986048e-07,
"learning_rate": 9.344541297302097e-06,
"loss": 0.012,
"step": 36000
},
{
"epoch": 3.7812079146379363,
"grad_norm": 1.8933849332825048e-07,
"learning_rate": 9.196548815320182e-06,
"loss": 0.0,
"step": 36500
},
{
"epoch": 3.8330052833316066,
"grad_norm": 3.9811013266444206e-05,
"learning_rate": 9.048556333338268e-06,
"loss": 0.0,
"step": 37000
},
{
"epoch": 3.884802652025277,
"grad_norm": 2.1272378944559023e-05,
"learning_rate": 8.900563851356352e-06,
"loss": 0.0073,
"step": 37500
},
{
"epoch": 3.9366000207189473,
"grad_norm": 8.419656296609901e-07,
"learning_rate": 8.752571369374436e-06,
"loss": 0.0045,
"step": 38000
},
{
"epoch": 3.9883973894126177,
"grad_norm": 1.4807918660153518e-06,
"learning_rate": 8.604578887392521e-06,
"loss": 0.0,
"step": 38500
},
{
"epoch": 4.0401947581062885,
"grad_norm": 2.789050199680787e-07,
"learning_rate": 8.456586405410605e-06,
"loss": 0.0,
"step": 39000
},
{
"epoch": 4.091992126799958,
"grad_norm": 7.712332603659888e-07,
"learning_rate": 8.30859392342869e-06,
"loss": 0.0,
"step": 39500
},
{
"epoch": 4.143789495493629,
"grad_norm": 1.126294500863878e-05,
"learning_rate": 8.160601441446774e-06,
"loss": 0.0,
"step": 40000
},
{
"epoch": 4.195586864187299,
"grad_norm": 1.1078836905653588e-05,
"learning_rate": 8.01260895946486e-06,
"loss": 0.0,
"step": 40500
},
{
"epoch": 4.24738423288097,
"grad_norm": 4.333252491051098e-06,
"learning_rate": 7.864616477482945e-06,
"loss": 0.0,
"step": 41000
},
{
"epoch": 4.29918160157464,
"grad_norm": 7.190360065578716e-06,
"learning_rate": 7.71662399550103e-06,
"loss": 0.0,
"step": 41500
},
{
"epoch": 4.350978970268311,
"grad_norm": 6.172657776915003e-06,
"learning_rate": 7.568631513519114e-06,
"loss": 0.0,
"step": 42000
},
{
"epoch": 4.4027763389619805,
"grad_norm": 1.1028377144839396e-07,
"learning_rate": 7.420639031537199e-06,
"loss": 0.0,
"step": 42500
},
{
"epoch": 4.454573707655651,
"grad_norm": 7.63295773253958e-08,
"learning_rate": 7.272646549555283e-06,
"loss": 0.0,
"step": 43000
},
{
"epoch": 4.506371076349321,
"grad_norm": 8.308877568197204e-07,
"learning_rate": 7.124654067573368e-06,
"loss": 0.0,
"step": 43500
},
{
"epoch": 4.558168445042992,
"grad_norm": 8.788915550894671e-08,
"learning_rate": 6.976661585591452e-06,
"loss": 0.0,
"step": 44000
},
{
"epoch": 4.609965813736662,
"grad_norm": 5.980305104458239e-07,
"learning_rate": 6.828669103609537e-06,
"loss": 0.0,
"step": 44500
},
{
"epoch": 4.661763182430333,
"grad_norm": 0.00010543836833676323,
"learning_rate": 6.680676621627622e-06,
"loss": 0.0034,
"step": 45000
},
{
"epoch": 4.713560551124003,
"grad_norm": 2.6961990442941897e-05,
"learning_rate": 6.532684139645706e-06,
"loss": 0.0,
"step": 45500
},
{
"epoch": 4.765357919817673,
"grad_norm": 2.6214322133455426e-05,
"learning_rate": 6.384691657663791e-06,
"loss": 0.0006,
"step": 46000
},
{
"epoch": 4.817155288511343,
"grad_norm": 6.838554782007122e-06,
"learning_rate": 6.236699175681876e-06,
"loss": 0.0,
"step": 46500
},
{
"epoch": 4.868952657205014,
"grad_norm": 1.3388408660830464e-05,
"learning_rate": 6.08870669369996e-06,
"loss": 0.0,
"step": 47000
},
{
"epoch": 4.920750025898684,
"grad_norm": 1.1914085007447284e-06,
"learning_rate": 5.940714211718045e-06,
"loss": 0.0,
"step": 47500
},
{
"epoch": 4.972547394592355,
"grad_norm": 1.9319197235745378e-05,
"learning_rate": 5.792721729736129e-06,
"loss": 0.0,
"step": 48000
},
{
"epoch": 5.024344763286025,
"grad_norm": 7.528370815634844e-07,
"learning_rate": 5.6447292477542145e-06,
"loss": 0.0058,
"step": 48500
},
{
"epoch": 5.0761421319796955,
"grad_norm": 1.075523073268414e-06,
"learning_rate": 5.496736765772299e-06,
"loss": 0.0,
"step": 49000
},
{
"epoch": 5.1279395006733655,
"grad_norm": 4.6377437001865474e-07,
"learning_rate": 5.348744283790383e-06,
"loss": 0.0,
"step": 49500
},
{
"epoch": 5.179736869367036,
"grad_norm": 6.992227667979023e-07,
"learning_rate": 5.2007518018084694e-06,
"loss": 0.0,
"step": 50000
},
{
"epoch": 5.231534238060706,
"grad_norm": 2.332795929760323e-06,
"learning_rate": 5.052759319826553e-06,
"loss": 0.0,
"step": 50500
},
{
"epoch": 5.283331606754377,
"grad_norm": 5.32125454810739e-07,
"learning_rate": 4.9047668378446374e-06,
"loss": 0.0,
"step": 51000
}
],
"logging_steps": 500,
"max_steps": 67571,
"num_input_tokens_seen": 0,
"num_train_epochs": 7,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.4080058755834675e+17,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}