TinyPixel commited on
Commit
f815502
·
1 Parent(s): c6f4590

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -199,41 +199,6 @@ Carbon emissions can be estimated using the [Machine Learning Impact calculator]
199
  [More Information Needed]
200
 
201
 
202
- ## Training procedure
203
-
204
-
205
- The following `bitsandbytes` quantization config was used during training:
206
- - quant_method: bitsandbytes
207
- - load_in_8bit: False
208
- - load_in_4bit: True
209
- - llm_int8_threshold: 6.0
210
- - llm_int8_skip_modules: None
211
- - llm_int8_enable_fp32_cpu_offload: False
212
- - llm_int8_has_fp16_weight: False
213
- - bnb_4bit_quant_type: nf4
214
- - bnb_4bit_use_double_quant: False
215
- - bnb_4bit_compute_dtype: float16
216
-
217
- ### Framework versions
218
-
219
-
220
- - PEFT 0.6.3.dev0
221
- ## Training procedure
222
-
223
-
224
- The following `bitsandbytes` quantization config was used during training:
225
- - quant_method: bitsandbytes
226
- - load_in_8bit: False
227
- - load_in_4bit: True
228
- - llm_int8_threshold: 6.0
229
- - llm_int8_skip_modules: None
230
- - llm_int8_enable_fp32_cpu_offload: False
231
- - llm_int8_has_fp16_weight: False
232
- - bnb_4bit_quant_type: nf4
233
- - bnb_4bit_use_double_quant: False
234
- - bnb_4bit_compute_dtype: float16
235
-
236
  ### Framework versions
237
 
238
-
239
- - PEFT 0.6.3.dev0
 
199
  [More Information Needed]
200
 
201
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
202
  ### Framework versions
203
 
204
+ - PEFT 0.7.1.dev0
 
adapter_config.json CHANGED
@@ -8,18 +8,21 @@
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
 
11
  "lora_alpha": 16,
12
  "lora_dropout": 0.1,
 
 
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
  "r": 64,
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "dense_4h_to_h",
20
- "query_key_value",
21
  "dense_h_to_4h",
22
- "dense"
 
 
23
  ],
24
  "task_type": "CAUSAL_LM"
25
  }
 
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
+ "loftq_config": {},
12
  "lora_alpha": 16,
13
  "lora_dropout": 0.1,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
  "r": 64,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
22
  "dense_h_to_4h",
23
+ "query_key_value",
24
+ "dense",
25
+ "dense_4h_to_h"
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9036f43c39a2582ab7b4e4c213d637071022070052c18b5c90acc70e6a2bfb76
3
  size 134235712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64f47eb614eeeac02198516c8e09fcfbc5db7f3c337305b2b9461ad77a8adf1f
3
  size 134235712
optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:324791c173c9b18a5049fda7fe59a0a003329287ea39d462c7959f92e5787fb2
3
- size 268515002
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58c667c86b48949dd4dfb32952fa167e0583f8e5686af20a6e7232dd094f3d48
3
+ size 268514874
rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f77d4c6b053aeb6d4ad04533053b2fc23c8cdef6fe687a06889e574fbaa1660f
3
  size 14244
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:337e893fff51aad090563eae7b251d31e2705a96eb109dfc6fdadfb33b6a6240
3
  size 14244
scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:74485e67705dc36efbfb69b1e54f842e1ff07894d01bb0e36d6d2526a318b300
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68ca608482c17f9314e1c94cd309a18be088851d4c0591a9306e6a01c952c9f3
3
  size 1064
trainer_state.json CHANGED
@@ -1,919 +1,451 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 2.983219390926041,
5
  "eval_steps": 500,
6
- "global_step": 300,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
10
  "log_history": [
11
  {
12
- "epoch": 0.02,
13
- "learning_rate": 4.4444444444444447e-05,
14
- "loss": 2.5615,
15
  "step": 2
16
  },
17
  {
18
- "epoch": 0.04,
19
- "learning_rate": 8.888888888888889e-05,
20
- "loss": 2.5015,
21
  "step": 4
22
  },
23
  {
24
- "epoch": 0.06,
25
- "learning_rate": 0.00013333333333333334,
26
- "loss": 2.4608,
27
  "step": 6
28
  },
29
  {
30
- "epoch": 0.08,
31
- "learning_rate": 0.00017777777777777779,
32
- "loss": 2.3143,
33
  "step": 8
34
  },
35
  {
36
- "epoch": 0.1,
37
- "learning_rate": 0.00019999417253661235,
38
- "loss": 2.4708,
39
  "step": 10
40
  },
41
  {
42
- "epoch": 0.12,
43
- "learning_rate": 0.00019994755690455152,
44
- "loss": 2.4855,
45
  "step": 12
46
  },
47
  {
48
- "epoch": 0.14,
49
- "learning_rate": 0.0001998543473718677,
50
- "loss": 2.3822,
51
  "step": 14
52
  },
53
  {
54
- "epoch": 0.16,
55
- "learning_rate": 0.00019971458739130598,
56
- "loss": 2.4777,
57
  "step": 16
58
  },
59
  {
60
- "epoch": 0.18,
61
- "learning_rate": 0.0001995283421166614,
62
- "loss": 2.101,
63
  "step": 18
64
  },
65
  {
66
- "epoch": 0.2,
67
- "learning_rate": 0.00019929569837240564,
68
- "loss": 1.9602,
69
  "step": 20
70
  },
71
  {
72
- "epoch": 0.22,
73
- "learning_rate": 0.00019901676461321068,
74
- "loss": 2.0819,
75
  "step": 22
76
  },
77
  {
78
- "epoch": 0.24,
79
- "learning_rate": 0.00019869167087338907,
80
- "loss": 2.0897,
81
  "step": 24
82
  },
83
  {
84
- "epoch": 0.26,
85
- "learning_rate": 0.00019832056870627417,
86
- "loss": 2.2203,
87
  "step": 26
88
  },
89
  {
90
- "epoch": 0.28,
91
- "learning_rate": 0.00019790363111356837,
92
- "loss": 2.3596,
93
  "step": 28
94
  },
95
  {
96
- "epoch": 0.3,
97
- "learning_rate": 0.00019744105246469263,
98
- "loss": 2.2905,
99
  "step": 30
100
  },
101
  {
102
- "epoch": 0.32,
103
- "learning_rate": 0.00019693304840617457,
104
- "loss": 2.2056,
105
  "step": 32
106
  },
107
  {
108
- "epoch": 0.34,
109
- "learning_rate": 0.00019637985576111778,
110
- "loss": 2.3075,
111
  "step": 34
112
  },
113
  {
114
- "epoch": 0.36,
115
- "learning_rate": 0.00019578173241879872,
116
- "loss": 2.1565,
117
  "step": 36
118
  },
119
  {
120
- "epoch": 0.38,
121
- "learning_rate": 0.00019513895721444286,
122
- "loss": 2.187,
123
  "step": 38
124
  },
125
  {
126
- "epoch": 0.4,
127
- "learning_rate": 0.00019445182979923654,
128
- "loss": 2.2355,
129
  "step": 40
130
  },
131
  {
132
- "epoch": 0.42,
133
- "learning_rate": 0.00019372067050063438,
134
- "loss": 2.2479,
135
  "step": 42
136
  },
137
  {
138
- "epoch": 0.44,
139
- "learning_rate": 0.00019294582017302797,
140
- "loss": 2.1294,
141
  "step": 44
142
  },
143
  {
144
- "epoch": 0.46,
145
- "learning_rate": 0.0001921276400388451,
146
- "loss": 2.1352,
147
  "step": 46
148
  },
149
  {
150
- "epoch": 0.48,
151
- "learning_rate": 0.00019126651152015403,
152
- "loss": 2.1577,
153
  "step": 48
154
  },
155
  {
156
- "epoch": 0.5,
157
- "learning_rate": 0.00019036283606085053,
158
- "loss": 2.1704,
159
  "step": 50
160
  },
161
  {
162
- "epoch": 0.52,
163
- "learning_rate": 0.00018941703493951164,
164
- "loss": 2.2652,
165
  "step": 52
166
  },
167
  {
168
- "epoch": 0.54,
169
- "learning_rate": 0.00018842954907300236,
170
- "loss": 2.1431,
171
  "step": 54
172
  },
173
  {
174
- "epoch": 0.56,
175
- "learning_rate": 0.0001874008388109276,
176
- "loss": 2.3157,
177
  "step": 56
178
  },
179
  {
180
- "epoch": 0.58,
181
- "learning_rate": 0.00018633138372102468,
182
- "loss": 2.079,
183
  "step": 58
184
  },
185
  {
186
- "epoch": 0.6,
187
- "learning_rate": 0.00018522168236559695,
188
- "loss": 2.2428,
189
  "step": 60
190
  },
191
  {
192
- "epoch": 0.62,
193
- "learning_rate": 0.00018407225206909208,
194
- "loss": 2.1969,
195
  "step": 62
196
  },
197
  {
198
- "epoch": 0.64,
199
- "learning_rate": 0.00018288362867693414,
200
- "loss": 2.1989,
201
  "step": 64
202
  },
203
  {
204
- "epoch": 0.66,
205
- "learning_rate": 0.0001816563663057211,
206
- "loss": 2.2052,
207
  "step": 66
208
  },
209
  {
210
- "epoch": 0.68,
211
- "learning_rate": 0.000180391037084905,
212
- "loss": 2.0672,
213
  "step": 68
214
  },
215
  {
216
- "epoch": 0.7,
217
- "learning_rate": 0.00017908823089007457,
218
- "loss": 2.0967,
219
  "step": 70
220
  },
221
  {
222
- "epoch": 0.72,
223
- "learning_rate": 0.00017774855506796496,
224
- "loss": 2.1023,
225
  "step": 72
226
  },
227
  {
228
- "epoch": 0.74,
229
- "learning_rate": 0.0001763726341533227,
230
- "loss": 2.158,
231
  "step": 74
232
  },
233
  {
234
- "epoch": 0.76,
235
- "learning_rate": 0.0001749611095777581,
236
- "loss": 2.1181,
237
  "step": 76
238
  },
239
  {
240
- "epoch": 0.78,
241
- "learning_rate": 0.00017351463937072004,
242
- "loss": 2.3851,
243
  "step": 78
244
  },
245
  {
246
- "epoch": 0.8,
247
- "learning_rate": 0.000172033897852734,
248
- "loss": 2.1762,
249
  "step": 80
250
  },
251
  {
252
- "epoch": 0.82,
253
- "learning_rate": 0.0001705195753210446,
254
- "loss": 2.1297,
255
  "step": 82
256
  },
257
  {
258
- "epoch": 0.84,
259
- "learning_rate": 0.00016897237772781044,
260
- "loss": 2.2054,
261
  "step": 84
262
  },
263
  {
264
- "epoch": 0.86,
265
- "learning_rate": 0.00016739302635100108,
266
- "loss": 2.2042,
267
  "step": 86
268
  },
269
  {
270
- "epoch": 0.88,
271
- "learning_rate": 0.00016578225745814907,
272
- "loss": 2.2659,
273
  "step": 88
274
  },
275
  {
276
- "epoch": 0.89,
277
- "learning_rate": 0.000164140821963114,
278
- "loss": 2.1998,
279
  "step": 90
280
  },
281
  {
282
- "epoch": 0.91,
283
- "learning_rate": 0.00016246948507601914,
284
- "loss": 2.1428,
285
  "step": 92
286
  },
287
  {
288
- "epoch": 0.93,
289
- "learning_rate": 0.0001607690259465229,
290
- "loss": 2.0276,
291
  "step": 94
292
  },
293
  {
294
- "epoch": 0.95,
295
- "learning_rate": 0.00015904023730059228,
296
- "loss": 2.1533,
297
  "step": 96
298
  },
299
  {
300
- "epoch": 0.97,
301
- "learning_rate": 0.000157283925070947,
302
- "loss": 2.1315,
303
  "step": 98
304
  },
305
  {
306
- "epoch": 0.99,
307
- "learning_rate": 0.000155500908021347,
308
- "loss": 1.9304,
309
  "step": 100
310
  },
311
  {
312
- "epoch": 1.01,
313
- "learning_rate": 0.0001536920173648984,
314
- "loss": 2.0876,
315
  "step": 102
316
  },
317
  {
318
- "epoch": 1.03,
319
- "learning_rate": 0.0001518580963765555,
320
- "loss": 2.0998,
321
  "step": 104
322
  },
323
  {
324
- "epoch": 1.05,
325
- "learning_rate": 0.00015000000000000001,
326
- "loss": 2.1599,
327
  "step": 106
328
  },
329
  {
330
- "epoch": 1.07,
331
- "learning_rate": 0.00014811859444908052,
332
- "loss": 2.1889,
333
  "step": 108
334
  },
335
  {
336
- "epoch": 1.09,
337
- "learning_rate": 0.0001462147568039977,
338
- "loss": 2.2714,
339
  "step": 110
340
  },
341
  {
342
- "epoch": 1.11,
343
- "learning_rate": 0.00014428937460242417,
344
- "loss": 2.1796,
345
  "step": 112
346
  },
347
  {
348
- "epoch": 1.13,
349
- "learning_rate": 0.00014234334542574906,
350
- "loss": 2.0842,
351
  "step": 114
352
  },
353
  {
354
- "epoch": 1.15,
355
- "learning_rate": 0.00014037757648064018,
356
- "loss": 1.9804,
357
  "step": 116
358
  },
359
  {
360
- "epoch": 1.17,
361
- "learning_rate": 0.00013839298417611963,
362
- "loss": 2.12,
363
  "step": 118
364
  },
365
  {
366
- "epoch": 1.19,
367
- "learning_rate": 0.00013639049369634876,
368
- "loss": 2.0456,
369
  "step": 120
370
  },
371
  {
372
- "epoch": 1.21,
373
- "learning_rate": 0.00013437103856932264,
374
- "loss": 2.0077,
375
  "step": 122
376
  },
377
  {
378
- "epoch": 1.23,
379
- "learning_rate": 0.00013233556023167485,
380
- "loss": 2.0013,
381
  "step": 124
382
  },
383
  {
384
- "epoch": 1.25,
385
- "learning_rate": 0.00013028500758979506,
386
- "loss": 2.1535,
387
  "step": 126
388
  },
389
  {
390
- "epoch": 1.27,
391
- "learning_rate": 0.00012822033657746478,
392
- "loss": 2.2161,
393
  "step": 128
394
  },
395
  {
396
- "epoch": 1.29,
397
- "learning_rate": 0.00012614250971021657,
398
- "loss": 2.1199,
399
  "step": 130
400
  },
401
  {
402
- "epoch": 1.31,
403
- "learning_rate": 0.00012405249563662537,
404
- "loss": 2.2261,
405
  "step": 132
406
  },
407
  {
408
- "epoch": 1.33,
409
- "learning_rate": 0.00012195126868674051,
410
- "loss": 2.2076,
411
  "step": 134
412
  },
413
  {
414
- "epoch": 1.35,
415
- "learning_rate": 0.000119839808417869,
416
- "loss": 2.1557,
417
  "step": 136
418
  },
419
  {
420
- "epoch": 1.37,
421
- "learning_rate": 0.0001177190991579223,
422
- "loss": 2.0887,
423
  "step": 138
424
  },
425
  {
426
- "epoch": 1.39,
427
- "learning_rate": 0.00011559012954653865,
428
- "loss": 2.2226,
429
  "step": 140
430
  },
431
  {
432
- "epoch": 1.41,
433
- "learning_rate": 0.00011345389207419588,
434
- "loss": 2.1355,
435
  "step": 142
436
  },
437
  {
438
- "epoch": 1.43,
439
- "learning_rate": 0.00011131138261952845,
440
- "loss": 1.9611,
441
- "step": 144
442
- },
443
- {
444
- "epoch": 1.45,
445
- "learning_rate": 0.0001091635999850655,
446
- "loss": 1.9342,
447
- "step": 146
448
- },
449
- {
450
- "epoch": 1.47,
451
- "learning_rate": 0.00010701154543160541,
452
- "loss": 1.9842,
453
- "step": 148
454
- },
455
- {
456
- "epoch": 1.49,
457
- "learning_rate": 0.00010485622221144484,
458
- "loss": 2.0796,
459
- "step": 150
460
- },
461
- {
462
- "epoch": 1.51,
463
- "learning_rate": 0.00010269863510067872,
464
- "loss": 1.9456,
465
- "step": 152
466
- },
467
- {
468
- "epoch": 1.53,
469
- "learning_rate": 0.00010053978993079045,
470
- "loss": 2.2969,
471
- "step": 154
472
- },
473
- {
474
- "epoch": 1.55,
475
- "learning_rate": 9.838069311974986e-05,
476
- "loss": 2.1745,
477
- "step": 156
478
- },
479
- {
480
- "epoch": 1.57,
481
- "learning_rate": 9.622235120283769e-05,
482
- "loss": 2.0905,
483
- "step": 158
484
- },
485
- {
486
- "epoch": 1.59,
487
- "learning_rate": 9.406577036341548e-05,
488
- "loss": 2.1926,
489
- "step": 160
490
- },
491
- {
492
- "epoch": 1.61,
493
- "learning_rate": 9.19119559638596e-05,
494
- "loss": 2.1519,
495
- "step": 162
496
- },
497
- {
498
- "epoch": 1.63,
499
- "learning_rate": 8.976191207687775e-05,
500
- "loss": 2.0953,
501
- "step": 164
502
- },
503
- {
504
- "epoch": 1.65,
505
- "learning_rate": 8.7616641017427e-05,
506
- "loss": 2.0602,
507
- "step": 166
508
- },
509
- {
510
- "epoch": 1.67,
511
- "learning_rate": 8.5477142875451e-05,
512
- "loss": 2.1189,
513
- "step": 168
514
- },
515
- {
516
- "epoch": 1.69,
517
- "learning_rate": 8.334441504965455e-05,
518
- "loss": 1.8349,
519
- "step": 170
520
- },
521
- {
522
- "epoch": 1.71,
523
- "learning_rate": 8.1219451782533e-05,
524
- "loss": 2.0207,
525
- "step": 172
526
- },
527
- {
528
- "epoch": 1.73,
529
- "learning_rate": 7.91032436968725e-05,
530
- "loss": 2.0835,
531
- "step": 174
532
- },
533
- {
534
- "epoch": 1.75,
535
- "learning_rate": 7.699677733393826e-05,
536
- "loss": 2.1297,
537
- "step": 176
538
- },
539
- {
540
- "epoch": 1.77,
541
- "learning_rate": 7.490103469356513e-05,
542
- "loss": 2.2253,
543
- "step": 178
544
- },
545
- {
546
- "epoch": 1.79,
547
- "learning_rate": 7.281699277636572e-05,
548
- "loss": 2.1392,
549
- "step": 180
550
- },
551
- {
552
- "epoch": 1.81,
553
- "learning_rate": 7.07456231282686e-05,
554
- "loss": 2.2366,
555
- "step": 182
556
- },
557
- {
558
- "epoch": 1.83,
559
- "learning_rate": 6.868789138759976e-05,
560
- "loss": 2.2417,
561
- "step": 184
562
- },
563
- {
564
- "epoch": 1.85,
565
- "learning_rate": 6.664475683491796e-05,
566
- "loss": 2.1114,
567
- "step": 186
568
- },
569
- {
570
- "epoch": 1.87,
571
- "learning_rate": 6.461717194581393e-05,
572
- "loss": 2.1858,
573
- "step": 188
574
- },
575
- {
576
- "epoch": 1.89,
577
- "learning_rate": 6.260608194688206e-05,
578
- "loss": 2.2032,
579
- "step": 190
580
- },
581
- {
582
- "epoch": 1.91,
583
- "learning_rate": 6.061242437507131e-05,
584
- "loss": 2.1147,
585
- "step": 192
586
- },
587
- {
588
- "epoch": 1.93,
589
- "learning_rate": 5.863712864062089e-05,
590
- "loss": 1.9731,
591
- "step": 194
592
- },
593
- {
594
- "epoch": 1.95,
595
- "learning_rate": 5.668111559378471e-05,
596
- "loss": 2.0307,
597
- "step": 196
598
- },
599
- {
600
- "epoch": 1.97,
601
- "learning_rate": 5.474529709554612e-05,
602
- "loss": 2.0143,
603
- "step": 198
604
- },
605
- {
606
- "epoch": 1.99,
607
- "learning_rate": 5.283057559252341e-05,
608
- "loss": 1.8633,
609
- "step": 200
610
- },
611
- {
612
- "epoch": 2.01,
613
- "learning_rate": 5.0937843696263966e-05,
614
- "loss": 1.8288,
615
- "step": 202
616
- },
617
- {
618
- "epoch": 2.03,
619
- "learning_rate": 4.9067983767123736e-05,
620
- "loss": 2.2213,
621
- "step": 204
622
- },
623
- {
624
- "epoch": 2.05,
625
- "learning_rate": 4.722186750292511e-05,
626
- "loss": 2.1991,
627
- "step": 206
628
- },
629
- {
630
- "epoch": 2.07,
631
- "learning_rate": 4.540035553258619e-05,
632
- "loss": 1.986,
633
- "step": 208
634
- },
635
- {
636
- "epoch": 2.09,
637
- "learning_rate": 4.360429701490934e-05,
638
- "loss": 2.1149,
639
- "step": 210
640
- },
641
- {
642
- "epoch": 2.11,
643
- "learning_rate": 4.183452924271776e-05,
644
- "loss": 2.0299,
645
- "step": 212
646
- },
647
- {
648
- "epoch": 2.13,
649
- "learning_rate": 4.009187725252309e-05,
650
- "loss": 2.182,
651
- "step": 214
652
- },
653
- {
654
- "epoch": 2.15,
655
- "learning_rate": 3.8377153439907266e-05,
656
- "loss": 2.125,
657
- "step": 216
658
- },
659
- {
660
- "epoch": 2.17,
661
- "learning_rate": 3.669115718079702e-05,
662
- "loss": 2.0164,
663
- "step": 218
664
- },
665
- {
666
- "epoch": 2.19,
667
- "learning_rate": 3.503467445880789e-05,
668
- "loss": 1.9398,
669
- "step": 220
670
- },
671
- {
672
- "epoch": 2.21,
673
- "learning_rate": 3.340847749883191e-05,
674
- "loss": 1.9637,
675
- "step": 222
676
- },
677
- {
678
- "epoch": 2.23,
679
- "learning_rate": 3.1813324407038825e-05,
680
- "loss": 1.99,
681
- "step": 224
682
- },
683
- {
684
- "epoch": 2.25,
685
- "learning_rate": 3.0249958817459722e-05,
686
- "loss": 1.9515,
687
- "step": 226
688
- },
689
- {
690
- "epoch": 2.27,
691
- "learning_rate": 2.8719109545317103e-05,
692
- "loss": 2.0532,
693
- "step": 228
694
- },
695
- {
696
- "epoch": 2.29,
697
- "learning_rate": 2.722149024726307e-05,
698
- "loss": 2.1069,
699
- "step": 230
700
- },
701
- {
702
- "epoch": 2.31,
703
- "learning_rate": 2.5757799088684654e-05,
704
- "loss": 2.0917,
705
- "step": 232
706
- },
707
- {
708
- "epoch": 2.33,
709
- "learning_rate": 2.432871841823047e-05,
710
- "loss": 2.2022,
711
- "step": 234
712
- },
713
- {
714
- "epoch": 2.35,
715
- "learning_rate": 2.2934914449711087e-05,
716
- "loss": 2.1084,
717
- "step": 236
718
- },
719
- {
720
- "epoch": 2.37,
721
- "learning_rate": 2.157703695152109e-05,
722
- "loss": 2.1929,
723
- "step": 238
724
- },
725
- {
726
- "epoch": 2.39,
727
- "learning_rate": 2.025571894372794e-05,
728
- "loss": 2.0758,
729
- "step": 240
730
- },
731
- {
732
- "epoch": 2.41,
733
- "learning_rate": 1.897157640296825e-05,
734
- "loss": 2.0788,
735
- "step": 242
736
- },
737
- {
738
- "epoch": 2.43,
739
- "learning_rate": 1.772520797528988e-05,
740
- "loss": 1.966,
741
- "step": 244
742
- },
743
- {
744
- "epoch": 2.45,
745
- "learning_rate": 1.65171946970729e-05,
746
- "loss": 2.0296,
747
- "step": 246
748
- },
749
- {
750
- "epoch": 2.47,
751
- "learning_rate": 1.534809972415998e-05,
752
- "loss": 1.967,
753
- "step": 248
754
- },
755
- {
756
- "epoch": 2.49,
757
- "learning_rate": 1.4218468069322578e-05,
758
- "loss": 2.0128,
759
- "step": 250
760
- },
761
- {
762
- "epoch": 2.51,
763
- "learning_rate": 1.3128826348184887e-05,
764
- "loss": 1.8105,
765
- "step": 252
766
- },
767
- {
768
- "epoch": 2.53,
769
- "learning_rate": 1.2079682533724379e-05,
770
- "loss": 2.1251,
771
- "step": 254
772
- },
773
- {
774
- "epoch": 2.55,
775
- "learning_rate": 1.1071525719463095e-05,
776
- "loss": 2.2662,
777
- "step": 256
778
- },
779
- {
780
- "epoch": 2.57,
781
- "learning_rate": 1.010482589146048e-05,
782
- "loss": 2.172,
783
- "step": 258
784
- },
785
- {
786
- "epoch": 2.59,
787
- "learning_rate": 9.180033709213454e-06,
788
- "loss": 2.1301,
789
- "step": 260
790
- },
791
- {
792
- "epoch": 2.61,
793
- "learning_rate": 8.297580295566575e-06,
794
- "loss": 2.0283,
795
- "step": 262
796
- },
797
- {
798
- "epoch": 2.63,
799
- "learning_rate": 7.457877035729588e-06,
800
- "loss": 2.092,
801
- "step": 264
802
- },
803
- {
804
- "epoch": 2.65,
805
- "learning_rate": 6.661315385496425e-06,
806
- "loss": 2.1478,
807
- "step": 266
808
- },
809
- {
810
- "epoch": 2.67,
811
- "learning_rate": 5.908266688755049e-06,
812
- "loss": 2.0034,
813
- "step": 268
814
- },
815
- {
816
- "epoch": 2.68,
817
- "learning_rate": 5.199082004372957e-06,
818
- "loss": 2.0571,
819
- "step": 270
820
- },
821
- {
822
- "epoch": 2.7,
823
- "learning_rate": 4.534091942539475e-06,
824
- "loss": 1.8802,
825
- "step": 272
826
- },
827
- {
828
- "epoch": 2.72,
829
- "learning_rate": 3.913606510640644e-06,
830
- "loss": 2.1067,
831
- "step": 274
832
- },
833
- {
834
- "epoch": 2.74,
835
- "learning_rate": 3.3379149687388867e-06,
836
- "loss": 1.9134,
837
- "step": 276
838
- },
839
- {
840
- "epoch": 2.76,
841
- "learning_rate": 2.8072856947248037e-06,
842
- "loss": 2.1052,
843
- "step": 278
844
- },
845
- {
846
- "epoch": 2.78,
847
- "learning_rate": 2.3219660592038285e-06,
848
- "loss": 1.9601,
849
- "step": 280
850
- },
851
- {
852
- "epoch": 2.8,
853
- "learning_rate": 1.882182310176095e-06,
854
- "loss": 2.0907,
855
- "step": 282
856
- },
857
- {
858
- "epoch": 2.82,
859
- "learning_rate": 1.488139467563354e-06,
860
- "loss": 2.0213,
861
- "step": 284
862
- },
863
- {
864
- "epoch": 2.84,
865
- "learning_rate": 1.1400212276321376e-06,
866
- "loss": 2.2126,
867
- "step": 286
868
- },
869
- {
870
- "epoch": 2.86,
871
- "learning_rate": 8.379898773574924e-07,
872
- "loss": 2.2776,
873
- "step": 288
874
- },
875
- {
876
- "epoch": 2.88,
877
- "learning_rate": 5.821862187675775e-07,
878
- "loss": 2.0965,
879
- "step": 290
880
- },
881
- {
882
- "epoch": 2.9,
883
- "learning_rate": 3.727295033040035e-07,
884
- "loss": 2.0892,
885
- "step": 292
886
- },
887
- {
888
- "epoch": 2.92,
889
- "learning_rate": 2.0971737622883515e-07,
890
- "loss": 1.9065,
891
- "step": 294
892
- },
893
- {
894
- "epoch": 2.94,
895
- "learning_rate": 9.32258311039269e-08,
896
- "loss": 1.9225,
897
- "step": 296
898
- },
899
- {
900
- "epoch": 2.96,
901
- "learning_rate": 2.3309174364027907e-08,
902
- "loss": 1.9352,
903
- "step": 298
904
- },
905
- {
906
- "epoch": 2.98,
907
  "learning_rate": 0.0,
908
- "loss": 1.9156,
909
- "step": 300
910
  }
911
  ],
912
  "logging_steps": 2,
913
- "max_steps": 300,
914
  "num_train_epochs": 3,
915
  "save_steps": 500,
916
- "total_flos": 2.577856434393907e+16,
917
  "trial_name": null,
918
  "trial_params": null
919
  }
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 2.953846153846154,
5
  "eval_steps": 500,
6
+ "global_step": 144,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
10
  "log_history": [
11
  {
12
+ "epoch": 0.04,
13
+ "learning_rate": 8e-05,
14
+ "loss": 2.331,
15
  "step": 2
16
  },
17
  {
18
+ "epoch": 0.08,
19
+ "learning_rate": 0.00016,
20
+ "loss": 2.4252,
21
  "step": 4
22
  },
23
  {
24
+ "epoch": 0.12,
25
+ "learning_rate": 0.0001999744599547812,
26
+ "loss": 2.5627,
27
  "step": 6
28
  },
29
  {
30
+ "epoch": 0.16,
31
+ "learning_rate": 0.00019977021786163598,
32
+ "loss": 2.47,
33
  "step": 8
34
  },
35
  {
36
+ "epoch": 0.21,
37
+ "learning_rate": 0.00019936215093023884,
38
+ "loss": 2.4888,
39
  "step": 10
40
  },
41
  {
42
+ "epoch": 0.25,
43
+ "learning_rate": 0.00019875109281794825,
44
+ "loss": 2.6307,
45
  "step": 12
46
  },
47
  {
48
+ "epoch": 0.29,
49
+ "learning_rate": 0.00019793829188147406,
50
+ "loss": 2.2032,
51
  "step": 14
52
  },
53
  {
54
+ "epoch": 0.33,
55
+ "learning_rate": 0.00019692540862655585,
56
+ "loss": 2.2254,
57
  "step": 16
58
  },
59
  {
60
+ "epoch": 0.37,
61
+ "learning_rate": 0.00019571451231564525,
62
+ "loss": 2.3134,
63
  "step": 18
64
  },
65
  {
66
+ "epoch": 0.41,
67
+ "learning_rate": 0.00019430807674052092,
68
+ "loss": 2.4247,
69
  "step": 20
70
  },
71
  {
72
+ "epoch": 0.45,
73
+ "learning_rate": 0.00019270897516847403,
74
+ "loss": 2.4047,
75
  "step": 22
76
  },
77
  {
78
+ "epoch": 0.49,
79
+ "learning_rate": 0.00019092047447238773,
80
+ "loss": 2.1955,
81
  "step": 24
82
  },
83
  {
84
+ "epoch": 0.53,
85
+ "learning_rate": 0.00018894622845670283,
86
+ "loss": 2.1532,
87
  "step": 26
88
  },
89
  {
90
+ "epoch": 0.57,
91
+ "learning_rate": 0.00018679027039290497,
92
+ "loss": 2.22,
93
  "step": 28
94
  },
95
  {
96
+ "epoch": 0.62,
97
+ "learning_rate": 0.00018445700477978205,
98
+ "loss": 2.3129,
99
  "step": 30
100
  },
101
  {
102
+ "epoch": 0.66,
103
+ "learning_rate": 0.00018195119834528534,
104
+ "loss": 2.319,
105
  "step": 32
106
  },
107
  {
108
+ "epoch": 0.7,
109
+ "learning_rate": 0.00017927797030837768,
110
+ "loss": 2.3718,
111
  "step": 34
112
  },
113
  {
114
+ "epoch": 0.74,
115
+ "learning_rate": 0.0001764427819207624,
116
+ "loss": 2.2181,
117
  "step": 36
118
  },
119
  {
120
+ "epoch": 0.78,
121
+ "learning_rate": 0.00017345142530985887,
122
+ "loss": 2.2146,
123
  "step": 38
124
  },
125
  {
126
+ "epoch": 0.82,
127
+ "learning_rate": 0.00017031001164581828,
128
+ "loss": 2.2694,
129
  "step": 40
130
  },
131
  {
132
+ "epoch": 0.86,
133
+ "learning_rate": 0.0001670249586567531,
134
+ "loss": 2.475,
135
  "step": 42
136
  },
137
  {
138
+ "epoch": 0.9,
139
+ "learning_rate": 0.0001636029775176862,
140
+ "loss": 2.4209,
141
  "step": 44
142
  },
143
  {
144
+ "epoch": 0.94,
145
+ "learning_rate": 0.00016005105914000507,
146
+ "loss": 2.227,
147
  "step": 46
148
  },
149
  {
150
+ "epoch": 0.98,
151
+ "learning_rate": 0.0001563764598894301,
152
+ "loss": 2.3764,
153
  "step": 48
154
  },
155
  {
156
+ "epoch": 1.03,
157
+ "learning_rate": 0.00015258668676167546,
158
+ "loss": 2.3207,
159
  "step": 50
160
  },
161
  {
162
+ "epoch": 1.07,
163
+ "learning_rate": 0.000148689482046087,
164
+ "loss": 2.1737,
165
  "step": 52
166
  },
167
  {
168
+ "epoch": 1.11,
169
+ "learning_rate": 0.00014469280750858854,
170
+ "loss": 2.3004,
171
  "step": 54
172
  },
173
  {
174
+ "epoch": 1.15,
175
+ "learning_rate": 0.00014060482812625055,
176
+ "loss": 2.3923,
177
  "step": 56
178
  },
179
  {
180
+ "epoch": 1.19,
181
+ "learning_rate": 0.00013643389540670962,
182
+ "loss": 2.3233,
183
  "step": 58
184
  },
185
  {
186
+ "epoch": 1.23,
187
+ "learning_rate": 0.0001321885303265172,
188
+ "loss": 2.2213,
189
  "step": 60
190
  },
191
  {
192
+ "epoch": 1.27,
193
+ "learning_rate": 0.0001278774059232723,
194
+ "loss": 2.1091,
195
  "step": 62
196
  },
197
  {
198
+ "epoch": 1.31,
199
+ "learning_rate": 0.0001235093295771032,
200
+ "loss": 2.1015,
201
  "step": 64
202
  },
203
  {
204
+ "epoch": 1.35,
205
+ "learning_rate": 0.00011909322501769406,
206
+ "loss": 2.2032,
207
  "step": 66
208
  },
209
  {
210
+ "epoch": 1.39,
211
+ "learning_rate": 0.00011463811409361667,
212
+ "loss": 2.3454,
213
  "step": 68
214
  },
215
  {
216
+ "epoch": 1.44,
217
+ "learning_rate": 0.00011015309834121081,
218
+ "loss": 2.2736,
219
  "step": 70
220
  },
221
  {
222
+ "epoch": 1.48,
223
+ "learning_rate": 0.00010564734039066699,
224
+ "loss": 2.308,
225
  "step": 72
226
  },
227
  {
228
+ "epoch": 1.52,
229
+ "learning_rate": 0.00010113004524729799,
230
+ "loss": 2.1137,
231
  "step": 74
232
  },
233
  {
234
+ "epoch": 1.56,
235
+ "learning_rate": 9.661044148624037e-05,
236
+ "loss": 2.1818,
237
  "step": 76
238
  },
239
  {
240
+ "epoch": 1.6,
241
+ "learning_rate": 9.209776239900453e-05,
242
+ "loss": 2.3509,
243
  "step": 78
244
  },
245
  {
246
+ "epoch": 1.64,
247
+ "learning_rate": 8.760122713038881e-05,
248
+ "loss": 2.3443,
249
  "step": 80
250
  },
251
  {
252
+ "epoch": 1.68,
253
+ "learning_rate": 8.313002184429529e-05,
254
+ "loss": 2.401,
255
  "step": 82
256
  },
257
  {
258
+ "epoch": 1.72,
259
+ "learning_rate": 7.869328095692312e-05,
260
+ "loss": 2.3839,
261
  "step": 84
262
  },
263
  {
264
+ "epoch": 1.76,
265
+ "learning_rate": 7.430006847567972e-05,
266
+ "loss": 2.0637,
267
  "step": 86
268
  },
269
  {
270
+ "epoch": 1.81,
271
+ "learning_rate": 6.995935948193294e-05,
272
+ "loss": 2.2678,
273
  "step": 88
274
  },
275
  {
276
+ "epoch": 1.85,
277
+ "learning_rate": 6.568002179543409e-05,
278
+ "loss": 2.1899,
279
  "step": 90
280
  },
281
  {
282
+ "epoch": 1.89,
283
+ "learning_rate": 6.147079785787038e-05,
284
+ "loss": 2.3489,
285
  "step": 92
286
  },
287
  {
288
+ "epoch": 1.93,
289
+ "learning_rate": 5.734028687255751e-05,
290
+ "loss": 2.2695,
291
  "step": 94
292
  },
293
  {
294
+ "epoch": 1.97,
295
+ "learning_rate": 5.329692723675994e-05,
296
+ "loss": 2.046,
297
  "step": 96
298
  },
299
  {
300
+ "epoch": 2.01,
301
+ "learning_rate": 4.934897930252886e-05,
302
+ "loss": 2.0616,
303
  "step": 98
304
  },
305
  {
306
+ "epoch": 2.05,
307
+ "learning_rate": 4.550450850127625e-05,
308
+ "loss": 2.2179,
309
  "step": 100
310
  },
311
  {
312
+ "epoch": 2.09,
313
+ "learning_rate": 4.1771368866560665e-05,
314
+ "loss": 2.2652,
315
  "step": 102
316
  },
317
  {
318
+ "epoch": 2.13,
319
+ "learning_rate": 3.815718698874672e-05,
320
+ "loss": 2.444,
321
  "step": 104
322
  },
323
  {
324
+ "epoch": 2.17,
325
+ "learning_rate": 3.466934643431795e-05,
326
+ "loss": 2.2312,
327
  "step": 106
328
  },
329
  {
330
+ "epoch": 2.22,
331
+ "learning_rate": 3.131497266167357e-05,
332
+ "loss": 2.2545,
333
  "step": 108
334
  },
335
  {
336
+ "epoch": 2.26,
337
+ "learning_rate": 2.81009184642253e-05,
338
+ "loss": 1.9974,
339
  "step": 110
340
  },
341
  {
342
+ "epoch": 2.3,
343
+ "learning_rate": 2.5033749970533015e-05,
344
+ "loss": 2.1236,
345
  "step": 112
346
  },
347
  {
348
+ "epoch": 2.34,
349
+ "learning_rate": 2.2119733230080408e-05,
350
+ "loss": 2.0792,
351
  "step": 114
352
  },
353
  {
354
+ "epoch": 2.38,
355
+ "learning_rate": 1.9364821412094857e-05,
356
+ "loss": 2.4105,
357
  "step": 116
358
  },
359
  {
360
+ "epoch": 2.42,
361
+ "learning_rate": 1.6774642643563953e-05,
362
+ "loss": 2.3441,
363
  "step": 118
364
  },
365
  {
366
+ "epoch": 2.46,
367
+ "learning_rate": 1.4354488511294417e-05,
368
+ "loss": 1.9161,
369
  "step": 120
370
  },
371
  {
372
+ "epoch": 2.5,
373
+ "learning_rate": 1.2109303251503434e-05,
374
+ "loss": 1.8637,
375
  "step": 122
376
  },
377
  {
378
+ "epoch": 2.54,
379
+ "learning_rate": 1.0043673649027518e-05,
380
+ "loss": 2.1511,
381
  "step": 124
382
  },
383
  {
384
+ "epoch": 2.58,
385
+ "learning_rate": 8.161819666783888e-06,
386
+ "loss": 2.1222,
387
  "step": 126
388
  },
389
  {
390
+ "epoch": 2.63,
391
+ "learning_rate": 6.467585824627887e-06,
392
+ "loss": 2.3351,
393
  "step": 128
394
  },
395
  {
396
+ "epoch": 2.67,
397
+ "learning_rate": 4.964433345219355e-06,
398
+ "loss": 2.217,
399
  "step": 130
400
  },
401
  {
402
+ "epoch": 2.71,
403
+ "learning_rate": 3.655433082942972e-06,
404
+ "loss": 2.362,
405
  "step": 132
406
  },
407
  {
408
+ "epoch": 2.75,
409
+ "learning_rate": 2.5432592503288e-06,
410
+ "loss": 2.1953,
411
  "step": 134
412
  },
413
  {
414
+ "epoch": 2.79,
415
+ "learning_rate": 1.6301839547892328e-06,
416
+ "loss": 2.1478,
417
  "step": 136
418
  },
419
  {
420
+ "epoch": 2.83,
421
+ "learning_rate": 9.180725568338044e-07,
422
+ "loss": 2.1976,
423
  "step": 138
424
  },
425
  {
426
+ "epoch": 2.87,
427
+ "learning_rate": 4.0837985924448984e-07,
428
+ "loss": 2.1874,
429
  "step": 140
430
  },
431
  {
432
+ "epoch": 2.91,
433
+ "learning_rate": 1.0214713499706597e-07,
434
+ "loss": 2.3805,
435
  "step": 142
436
  },
437
  {
438
+ "epoch": 2.95,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
439
  "learning_rate": 0.0,
440
+ "loss": 2.2218,
441
+ "step": 144
442
  }
443
  ],
444
  "logging_steps": 2,
445
+ "max_steps": 144,
446
  "num_train_epochs": 3,
447
  "save_steps": 500,
448
+ "total_flos": 6408232920367104.0,
449
  "trial_name": null,
450
  "trial_params": null
451
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ca8759ffec6837da55acb02e1d1d81ed7c0fc802ba0aa05b0b5d182e5fd0c5db
3
  size 4600
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd87892201410819e4011448b9fe7093db86d6a66f410aa57e3570916e8f2bf6
3
  size 4600