|
{ |
|
"best_metric": 0.8732394366197183, |
|
"best_model_checkpoint": "deit-base-distilled-patch16-224-65-fold5/checkpoint-260", |
|
"epoch": 92.3076923076923, |
|
"eval_steps": 500, |
|
"global_step": 300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"eval_accuracy": 0.49295774647887325, |
|
"eval_loss": 0.7424866557121277, |
|
"eval_runtime": 0.9428, |
|
"eval_samples_per_second": 75.31, |
|
"eval_steps_per_second": 3.182, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"eval_accuracy": 0.5633802816901409, |
|
"eval_loss": 0.7193098664283752, |
|
"eval_runtime": 0.9623, |
|
"eval_samples_per_second": 73.782, |
|
"eval_steps_per_second": 3.118, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 2.769230769230769, |
|
"eval_accuracy": 0.5915492957746479, |
|
"eval_loss": 0.6808025240898132, |
|
"eval_runtime": 0.9647, |
|
"eval_samples_per_second": 73.601, |
|
"eval_steps_per_second": 3.11, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"grad_norm": 6.528295040130615, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.7309, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.5915492957746479, |
|
"eval_loss": 0.6252564787864685, |
|
"eval_runtime": 0.9206, |
|
"eval_samples_per_second": 77.124, |
|
"eval_steps_per_second": 3.259, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 4.923076923076923, |
|
"eval_accuracy": 0.676056338028169, |
|
"eval_loss": 0.6021668910980225, |
|
"eval_runtime": 0.9277, |
|
"eval_samples_per_second": 76.533, |
|
"eval_steps_per_second": 3.234, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 5.846153846153846, |
|
"eval_accuracy": 0.647887323943662, |
|
"eval_loss": 0.5588602423667908, |
|
"eval_runtime": 0.9212, |
|
"eval_samples_per_second": 77.074, |
|
"eval_steps_per_second": 3.257, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 6.153846153846154, |
|
"grad_norm": 2.3774571418762207, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.6449, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 6.769230769230769, |
|
"eval_accuracy": 0.7183098591549296, |
|
"eval_loss": 0.5559040904045105, |
|
"eval_runtime": 0.942, |
|
"eval_samples_per_second": 75.374, |
|
"eval_steps_per_second": 3.185, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.7183098591549296, |
|
"eval_loss": 0.49099108576774597, |
|
"eval_runtime": 0.9333, |
|
"eval_samples_per_second": 76.075, |
|
"eval_steps_per_second": 3.214, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 8.923076923076923, |
|
"eval_accuracy": 0.7605633802816901, |
|
"eval_loss": 0.4996400773525238, |
|
"eval_runtime": 0.9274, |
|
"eval_samples_per_second": 76.562, |
|
"eval_steps_per_second": 3.235, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 9.23076923076923, |
|
"grad_norm": 4.510417461395264, |
|
"learning_rate": 5e-05, |
|
"loss": 0.5494, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 9.846153846153847, |
|
"eval_accuracy": 0.7323943661971831, |
|
"eval_loss": 0.490305095911026, |
|
"eval_runtime": 0.9249, |
|
"eval_samples_per_second": 76.765, |
|
"eval_steps_per_second": 3.244, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 10.76923076923077, |
|
"eval_accuracy": 0.6619718309859155, |
|
"eval_loss": 0.7331178188323975, |
|
"eval_runtime": 0.925, |
|
"eval_samples_per_second": 76.755, |
|
"eval_steps_per_second": 3.243, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.6901408450704225, |
|
"eval_loss": 0.5053162574768066, |
|
"eval_runtime": 0.9365, |
|
"eval_samples_per_second": 75.815, |
|
"eval_steps_per_second": 3.203, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 12.307692307692308, |
|
"grad_norm": 8.283596992492676, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 0.4793, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 12.923076923076923, |
|
"eval_accuracy": 0.7323943661971831, |
|
"eval_loss": 0.4781084656715393, |
|
"eval_runtime": 0.9253, |
|
"eval_samples_per_second": 76.728, |
|
"eval_steps_per_second": 3.242, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 13.846153846153847, |
|
"eval_accuracy": 0.7464788732394366, |
|
"eval_loss": 0.49969470500946045, |
|
"eval_runtime": 0.9696, |
|
"eval_samples_per_second": 73.226, |
|
"eval_steps_per_second": 3.094, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 14.76923076923077, |
|
"eval_accuracy": 0.7464788732394366, |
|
"eval_loss": 0.5196781158447266, |
|
"eval_runtime": 0.9285, |
|
"eval_samples_per_second": 76.466, |
|
"eval_steps_per_second": 3.231, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 15.384615384615385, |
|
"grad_norm": 2.751551389694214, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 0.4327, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy": 0.7605633802816901, |
|
"eval_loss": 0.5338966846466064, |
|
"eval_runtime": 0.9263, |
|
"eval_samples_per_second": 76.647, |
|
"eval_steps_per_second": 3.239, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 16.923076923076923, |
|
"eval_accuracy": 0.7605633802816901, |
|
"eval_loss": 0.4475107491016388, |
|
"eval_runtime": 0.9361, |
|
"eval_samples_per_second": 75.843, |
|
"eval_steps_per_second": 3.205, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 17.846153846153847, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.48082780838012695, |
|
"eval_runtime": 0.9281, |
|
"eval_samples_per_second": 76.496, |
|
"eval_steps_per_second": 3.232, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 18.46153846153846, |
|
"grad_norm": 4.935845375061035, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.3747, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 18.76923076923077, |
|
"eval_accuracy": 0.7464788732394366, |
|
"eval_loss": 0.48682263493537903, |
|
"eval_runtime": 0.9333, |
|
"eval_samples_per_second": 76.07, |
|
"eval_steps_per_second": 3.214, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy": 0.704225352112676, |
|
"eval_loss": 0.6205573081970215, |
|
"eval_runtime": 0.9305, |
|
"eval_samples_per_second": 76.299, |
|
"eval_steps_per_second": 3.224, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 20.923076923076923, |
|
"eval_accuracy": 0.7323943661971831, |
|
"eval_loss": 0.5270622968673706, |
|
"eval_runtime": 0.9317, |
|
"eval_samples_per_second": 76.209, |
|
"eval_steps_per_second": 3.22, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 21.53846153846154, |
|
"grad_norm": 7.510834217071533, |
|
"learning_rate": 4.259259259259259e-05, |
|
"loss": 0.3474, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 21.846153846153847, |
|
"eval_accuracy": 0.6901408450704225, |
|
"eval_loss": 0.5227216482162476, |
|
"eval_runtime": 0.9233, |
|
"eval_samples_per_second": 76.898, |
|
"eval_steps_per_second": 3.249, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 22.76923076923077, |
|
"eval_accuracy": 0.7464788732394366, |
|
"eval_loss": 0.5078049302101135, |
|
"eval_runtime": 0.9339, |
|
"eval_samples_per_second": 76.025, |
|
"eval_steps_per_second": 3.212, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_accuracy": 0.6901408450704225, |
|
"eval_loss": 0.5842347741127014, |
|
"eval_runtime": 0.9296, |
|
"eval_samples_per_second": 76.378, |
|
"eval_steps_per_second": 3.227, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 24.615384615384617, |
|
"grad_norm": 2.7830913066864014, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 0.267, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 24.923076923076923, |
|
"eval_accuracy": 0.7183098591549296, |
|
"eval_loss": 0.601521372795105, |
|
"eval_runtime": 0.9344, |
|
"eval_samples_per_second": 75.981, |
|
"eval_steps_per_second": 3.21, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 25.846153846153847, |
|
"eval_accuracy": 0.7605633802816901, |
|
"eval_loss": 0.653270423412323, |
|
"eval_runtime": 0.9381, |
|
"eval_samples_per_second": 75.688, |
|
"eval_steps_per_second": 3.198, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 26.76923076923077, |
|
"eval_accuracy": 0.7323943661971831, |
|
"eval_loss": 0.5763739943504333, |
|
"eval_runtime": 0.9321, |
|
"eval_samples_per_second": 76.173, |
|
"eval_steps_per_second": 3.219, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 27.692307692307693, |
|
"grad_norm": 6.375743865966797, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 0.2333, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.4862464666366577, |
|
"eval_runtime": 0.9495, |
|
"eval_samples_per_second": 74.776, |
|
"eval_steps_per_second": 3.16, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 28.923076923076923, |
|
"eval_accuracy": 0.7183098591549296, |
|
"eval_loss": 0.6232538223266602, |
|
"eval_runtime": 0.9407, |
|
"eval_samples_per_second": 75.474, |
|
"eval_steps_per_second": 3.189, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 29.846153846153847, |
|
"eval_accuracy": 0.7464788732394366, |
|
"eval_loss": 0.754891037940979, |
|
"eval_runtime": 0.9293, |
|
"eval_samples_per_second": 76.398, |
|
"eval_steps_per_second": 3.228, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 30.76923076923077, |
|
"grad_norm": 2.766845226287842, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.2635, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 30.76923076923077, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.48901528120040894, |
|
"eval_runtime": 0.94, |
|
"eval_samples_per_second": 75.533, |
|
"eval_steps_per_second": 3.192, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.5615856647491455, |
|
"eval_runtime": 0.9614, |
|
"eval_samples_per_second": 73.849, |
|
"eval_steps_per_second": 3.12, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 32.92307692307692, |
|
"eval_accuracy": 0.7605633802816901, |
|
"eval_loss": 0.550128698348999, |
|
"eval_runtime": 0.9431, |
|
"eval_samples_per_second": 75.286, |
|
"eval_steps_per_second": 3.181, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 33.84615384615385, |
|
"grad_norm": 4.919862747192383, |
|
"learning_rate": 3.518518518518519e-05, |
|
"loss": 0.192, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 33.84615384615385, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.4845168888568878, |
|
"eval_runtime": 0.9359, |
|
"eval_samples_per_second": 75.862, |
|
"eval_steps_per_second": 3.205, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 34.76923076923077, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.511615514755249, |
|
"eval_runtime": 0.9324, |
|
"eval_samples_per_second": 76.147, |
|
"eval_steps_per_second": 3.217, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5016549825668335, |
|
"eval_runtime": 0.9526, |
|
"eval_samples_per_second": 74.529, |
|
"eval_steps_per_second": 3.149, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 36.92307692307692, |
|
"grad_norm": 2.7866628170013428, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.1763, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 36.92307692307692, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.47983619570732117, |
|
"eval_runtime": 0.9561, |
|
"eval_samples_per_second": 74.258, |
|
"eval_steps_per_second": 3.138, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 37.84615384615385, |
|
"eval_accuracy": 0.7746478873239436, |
|
"eval_loss": 0.5327603220939636, |
|
"eval_runtime": 0.9331, |
|
"eval_samples_per_second": 76.093, |
|
"eval_steps_per_second": 3.215, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 38.76923076923077, |
|
"eval_accuracy": 0.7605633802816901, |
|
"eval_loss": 0.6393086910247803, |
|
"eval_runtime": 0.9362, |
|
"eval_samples_per_second": 75.837, |
|
"eval_steps_per_second": 3.204, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"grad_norm": 3.5983285903930664, |
|
"learning_rate": 3.148148148148148e-05, |
|
"loss": 0.172, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.5480958223342896, |
|
"eval_runtime": 0.932, |
|
"eval_samples_per_second": 76.18, |
|
"eval_steps_per_second": 3.219, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 40.92307692307692, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.5867339372634888, |
|
"eval_runtime": 0.9467, |
|
"eval_samples_per_second": 74.998, |
|
"eval_steps_per_second": 3.169, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 41.84615384615385, |
|
"eval_accuracy": 0.704225352112676, |
|
"eval_loss": 0.9222853779792786, |
|
"eval_runtime": 0.9412, |
|
"eval_samples_per_second": 75.436, |
|
"eval_steps_per_second": 3.187, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 42.76923076923077, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.6261908411979675, |
|
"eval_runtime": 0.9446, |
|
"eval_samples_per_second": 75.163, |
|
"eval_steps_per_second": 3.176, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 43.07692307692308, |
|
"grad_norm": 6.744588851928711, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 0.1832, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"eval_accuracy": 0.7746478873239436, |
|
"eval_loss": 0.6090897917747498, |
|
"eval_runtime": 0.9537, |
|
"eval_samples_per_second": 74.447, |
|
"eval_steps_per_second": 3.146, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 44.92307692307692, |
|
"eval_accuracy": 0.7605633802816901, |
|
"eval_loss": 0.5836533308029175, |
|
"eval_runtime": 0.936, |
|
"eval_samples_per_second": 75.851, |
|
"eval_steps_per_second": 3.205, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 45.84615384615385, |
|
"eval_accuracy": 0.7605633802816901, |
|
"eval_loss": 0.5465303063392639, |
|
"eval_runtime": 0.9256, |
|
"eval_samples_per_second": 76.703, |
|
"eval_steps_per_second": 3.241, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 46.15384615384615, |
|
"grad_norm": 5.820557594299316, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.1641, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 46.76923076923077, |
|
"eval_accuracy": 0.7746478873239436, |
|
"eval_loss": 0.6745445132255554, |
|
"eval_runtime": 0.9266, |
|
"eval_samples_per_second": 76.622, |
|
"eval_steps_per_second": 3.238, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.5397518873214722, |
|
"eval_runtime": 0.9297, |
|
"eval_samples_per_second": 76.369, |
|
"eval_steps_per_second": 3.227, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 48.92307692307692, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5387323498725891, |
|
"eval_runtime": 0.943, |
|
"eval_samples_per_second": 75.293, |
|
"eval_steps_per_second": 3.181, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 49.23076923076923, |
|
"grad_norm": 1.1889840364456177, |
|
"learning_rate": 2.5925925925925925e-05, |
|
"loss": 0.1366, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 49.84615384615385, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.5737282037734985, |
|
"eval_runtime": 0.9304, |
|
"eval_samples_per_second": 76.315, |
|
"eval_steps_per_second": 3.225, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 50.76923076923077, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5254746675491333, |
|
"eval_runtime": 0.9335, |
|
"eval_samples_per_second": 76.061, |
|
"eval_steps_per_second": 3.214, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 52.0, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.6485558152198792, |
|
"eval_runtime": 0.9328, |
|
"eval_samples_per_second": 76.117, |
|
"eval_steps_per_second": 3.216, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 52.30769230769231, |
|
"grad_norm": 3.566678285598755, |
|
"learning_rate": 2.4074074074074074e-05, |
|
"loss": 0.149, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 52.92307692307692, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5404361486434937, |
|
"eval_runtime": 0.9413, |
|
"eval_samples_per_second": 75.426, |
|
"eval_steps_per_second": 3.187, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 53.84615384615385, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5654813051223755, |
|
"eval_runtime": 0.9373, |
|
"eval_samples_per_second": 75.746, |
|
"eval_steps_per_second": 3.201, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 54.76923076923077, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.6121301054954529, |
|
"eval_runtime": 0.9398, |
|
"eval_samples_per_second": 75.547, |
|
"eval_steps_per_second": 3.192, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 55.38461538461539, |
|
"grad_norm": 3.30234694480896, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.1196, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.6182261109352112, |
|
"eval_runtime": 0.9409, |
|
"eval_samples_per_second": 75.462, |
|
"eval_steps_per_second": 3.189, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 56.92307692307692, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.6174789071083069, |
|
"eval_runtime": 0.944, |
|
"eval_samples_per_second": 75.21, |
|
"eval_steps_per_second": 3.178, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 57.84615384615385, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5920690894126892, |
|
"eval_runtime": 0.9418, |
|
"eval_samples_per_second": 75.387, |
|
"eval_steps_per_second": 3.185, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 58.46153846153846, |
|
"grad_norm": 2.1697278022766113, |
|
"learning_rate": 2.037037037037037e-05, |
|
"loss": 0.1202, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 58.76923076923077, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5953019261360168, |
|
"eval_runtime": 0.9459, |
|
"eval_samples_per_second": 75.059, |
|
"eval_steps_per_second": 3.172, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.6065444350242615, |
|
"eval_runtime": 0.9408, |
|
"eval_samples_per_second": 75.471, |
|
"eval_steps_per_second": 3.189, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 60.92307692307692, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5448285341262817, |
|
"eval_runtime": 0.9326, |
|
"eval_samples_per_second": 76.133, |
|
"eval_steps_per_second": 3.217, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 61.53846153846154, |
|
"grad_norm": 3.645998001098633, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.1289, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 61.84615384615385, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.5258444547653198, |
|
"eval_runtime": 0.9405, |
|
"eval_samples_per_second": 75.491, |
|
"eval_steps_per_second": 3.19, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 62.76923076923077, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5439636707305908, |
|
"eval_runtime": 0.9352, |
|
"eval_samples_per_second": 75.923, |
|
"eval_steps_per_second": 3.208, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.6082146763801575, |
|
"eval_runtime": 0.9371, |
|
"eval_samples_per_second": 75.764, |
|
"eval_steps_per_second": 3.201, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 64.61538461538461, |
|
"grad_norm": 3.992480993270874, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.1262, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 64.92307692307692, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.6357871890068054, |
|
"eval_runtime": 0.9342, |
|
"eval_samples_per_second": 76.002, |
|
"eval_steps_per_second": 3.211, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 65.84615384615384, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5981662273406982, |
|
"eval_runtime": 0.9361, |
|
"eval_samples_per_second": 75.846, |
|
"eval_steps_per_second": 3.205, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 66.76923076923077, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.5850167870521545, |
|
"eval_runtime": 0.965, |
|
"eval_samples_per_second": 73.577, |
|
"eval_steps_per_second": 3.109, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 67.6923076923077, |
|
"grad_norm": 2.3649914264678955, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.124, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 68.0, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5732671022415161, |
|
"eval_runtime": 0.9312, |
|
"eval_samples_per_second": 76.246, |
|
"eval_steps_per_second": 3.222, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 68.92307692307692, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.5631324648857117, |
|
"eval_runtime": 0.9307, |
|
"eval_samples_per_second": 76.283, |
|
"eval_steps_per_second": 3.223, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 69.84615384615384, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5375062227249146, |
|
"eval_runtime": 0.9261, |
|
"eval_samples_per_second": 76.662, |
|
"eval_steps_per_second": 3.239, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 70.76923076923077, |
|
"grad_norm": 2.5271434783935547, |
|
"learning_rate": 1.2962962962962962e-05, |
|
"loss": 0.1208, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 70.76923076923077, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5157544016838074, |
|
"eval_runtime": 0.9385, |
|
"eval_samples_per_second": 75.656, |
|
"eval_steps_per_second": 3.197, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"eval_accuracy": 0.8169014084507042, |
|
"eval_loss": 0.5430620908737183, |
|
"eval_runtime": 0.9444, |
|
"eval_samples_per_second": 75.177, |
|
"eval_steps_per_second": 3.176, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 72.92307692307692, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.5099484920501709, |
|
"eval_runtime": 0.9479, |
|
"eval_samples_per_second": 74.905, |
|
"eval_steps_per_second": 3.165, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 73.84615384615384, |
|
"grad_norm": 2.769530773162842, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.1126, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 73.84615384615384, |
|
"eval_accuracy": 0.7887323943661971, |
|
"eval_loss": 0.5803037285804749, |
|
"eval_runtime": 0.9361, |
|
"eval_samples_per_second": 75.848, |
|
"eval_steps_per_second": 3.205, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 74.76923076923077, |
|
"eval_accuracy": 0.8028169014084507, |
|
"eval_loss": 0.5416265726089478, |
|
"eval_runtime": 0.9298, |
|
"eval_samples_per_second": 76.361, |
|
"eval_steps_per_second": 3.227, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.5834996104240417, |
|
"eval_runtime": 0.9267, |
|
"eval_samples_per_second": 76.616, |
|
"eval_steps_per_second": 3.237, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 76.92307692307692, |
|
"grad_norm": 5.331729412078857, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 0.1089, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 76.92307692307692, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5923494100570679, |
|
"eval_runtime": 0.9331, |
|
"eval_samples_per_second": 76.087, |
|
"eval_steps_per_second": 3.215, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 77.84615384615384, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5228320360183716, |
|
"eval_runtime": 0.9263, |
|
"eval_samples_per_second": 76.652, |
|
"eval_steps_per_second": 3.239, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 78.76923076923077, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5466788411140442, |
|
"eval_runtime": 0.9292, |
|
"eval_samples_per_second": 76.41, |
|
"eval_steps_per_second": 3.229, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"grad_norm": 3.553982973098755, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.0965, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.5155981183052063, |
|
"eval_runtime": 0.9345, |
|
"eval_samples_per_second": 75.975, |
|
"eval_steps_per_second": 3.21, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 80.92307692307692, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.5082412958145142, |
|
"eval_runtime": 0.9291, |
|
"eval_samples_per_second": 76.42, |
|
"eval_steps_per_second": 3.229, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 81.84615384615384, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.5070865154266357, |
|
"eval_runtime": 0.9317, |
|
"eval_samples_per_second": 76.206, |
|
"eval_steps_per_second": 3.22, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 82.76923076923077, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.5070091485977173, |
|
"eval_runtime": 0.932, |
|
"eval_samples_per_second": 76.176, |
|
"eval_steps_per_second": 3.219, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 83.07692307692308, |
|
"grad_norm": 1.3318876028060913, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 0.0947, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 84.0, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.5268462300300598, |
|
"eval_runtime": 0.9334, |
|
"eval_samples_per_second": 76.068, |
|
"eval_steps_per_second": 3.214, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 84.92307692307692, |
|
"eval_accuracy": 0.8591549295774648, |
|
"eval_loss": 0.528256356716156, |
|
"eval_runtime": 0.938, |
|
"eval_samples_per_second": 75.693, |
|
"eval_steps_per_second": 3.198, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 85.84615384615384, |
|
"eval_accuracy": 0.8450704225352113, |
|
"eval_loss": 0.5261198282241821, |
|
"eval_runtime": 0.9373, |
|
"eval_samples_per_second": 75.752, |
|
"eval_steps_per_second": 3.201, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 86.15384615384616, |
|
"grad_norm": 1.2073172330856323, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.0751, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 86.76923076923077, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.528601884841919, |
|
"eval_runtime": 0.9192, |
|
"eval_samples_per_second": 77.242, |
|
"eval_steps_per_second": 3.264, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5414741039276123, |
|
"eval_runtime": 0.9317, |
|
"eval_samples_per_second": 76.207, |
|
"eval_steps_per_second": 3.22, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 88.92307692307692, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5511464476585388, |
|
"eval_runtime": 0.9273, |
|
"eval_samples_per_second": 76.567, |
|
"eval_steps_per_second": 3.235, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 89.23076923076923, |
|
"grad_norm": 1.936850905418396, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 0.0912, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 89.84615384615384, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5541730523109436, |
|
"eval_runtime": 0.9344, |
|
"eval_samples_per_second": 75.984, |
|
"eval_steps_per_second": 3.211, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 90.76923076923077, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5464221239089966, |
|
"eval_runtime": 0.9256, |
|
"eval_samples_per_second": 76.711, |
|
"eval_steps_per_second": 3.241, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 92.0, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.5409799218177795, |
|
"eval_runtime": 0.9348, |
|
"eval_samples_per_second": 75.953, |
|
"eval_steps_per_second": 3.209, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 92.3076923076923, |
|
"grad_norm": 2.42252516746521, |
|
"learning_rate": 0.0, |
|
"loss": 0.104, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 92.3076923076923, |
|
"eval_accuracy": 0.8309859154929577, |
|
"eval_loss": 0.540703535079956, |
|
"eval_runtime": 0.9779, |
|
"eval_samples_per_second": 72.607, |
|
"eval_steps_per_second": 3.068, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 92.3076923076923, |
|
"step": 300, |
|
"total_flos": 2.8402872494292173e+18, |
|
"train_loss": 0.23062967757383981, |
|
"train_runtime": 1594.1954, |
|
"train_samples_per_second": 24.903, |
|
"train_steps_per_second": 0.188 |
|
}, |
|
{ |
|
"epoch": 92.3076923076923, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_loss": 0.5155981183052063, |
|
"eval_runtime": 0.9923, |
|
"eval_samples_per_second": 71.551, |
|
"eval_steps_per_second": 3.023, |
|
"step": 300 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 300, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 100, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.8402872494292173e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|