luis-espinosa commited on
Commit
f86f65d
·
verified ·
1 Parent(s): 07d8df4

Upload trained SetFit model

Browse files
1_Pooling/config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "word_embedding_dimension": 896,
3
- "pooling_mode_cls_token": false,
4
- "pooling_mode_mean_tokens": true,
5
  "pooling_mode_max_tokens": false,
6
  "pooling_mode_mean_sqrt_len_tokens": false,
7
  "pooling_mode_weightedmean_tokens": false,
 
1
  {
2
+ "word_embedding_dimension": 1024,
3
+ "pooling_mode_cls_token": true,
4
+ "pooling_mode_mean_tokens": false,
5
  "pooling_mode_max_tokens": false,
6
  "pooling_mode_mean_sqrt_len_tokens": false,
7
  "pooling_mode_weightedmean_tokens": false,
README.md CHANGED
@@ -5,20 +5,21 @@ tags:
5
  - text-classification
6
  - generated_from_setfit_trainer
7
  widget:
8
- - text: Be.EV partners with Paua to add more than 700 charge points to the Paua network
9
- - text: UAE’s Artificial Intelligence Office, Mastercard and First Abu Dhabi Bank
10
- Launch Joint AI Challenge
11
- - text: 'Supply Licence Review: Ofgem''s role in enforcing industry codes'
12
- - text: Air Astana, Neos Enter into Strategic Partnership
13
- - text: Ofgem protects customers of failed supplier Rutherford Energy Supply Limited
 
14
  metrics:
15
  - accuracy
16
  pipeline_tag: text-classification
17
  library_name: setfit
18
  inference: false
19
- base_model: HIT-TMG/KaLM-embedding-multilingual-mini-instruct-v1.5
20
  model-index:
21
- - name: SetFit with HIT-TMG/KaLM-embedding-multilingual-mini-instruct-v1.5
22
  results:
23
  - task:
24
  type: text-classification
@@ -29,13 +30,13 @@ model-index:
29
  split: test
30
  metrics:
31
  - type: accuracy
32
- value: 0.6441441441441441
33
  name: Accuracy
34
  ---
35
 
36
- # SetFit with HIT-TMG/KaLM-embedding-multilingual-mini-instruct-v1.5
37
 
38
- This is a [SetFit](https://github.com/huggingface/setfit) model that can be used for Text Classification. This SetFit model uses [HIT-TMG/KaLM-embedding-multilingual-mini-instruct-v1.5](https://huggingface.co/HIT-TMG/KaLM-embedding-multilingual-mini-instruct-v1.5) as the Sentence Transformer embedding model. A OneVsRestClassifier instance is used for classification.
39
 
40
  The model has been trained using an efficient few-shot learning technique that involves:
41
 
@@ -46,9 +47,9 @@ The model has been trained using an efficient few-shot learning technique that i
46
 
47
  ### Model Description
48
  - **Model Type:** SetFit
49
- - **Sentence Transformer body:** [HIT-TMG/KaLM-embedding-multilingual-mini-instruct-v1.5](https://huggingface.co/HIT-TMG/KaLM-embedding-multilingual-mini-instruct-v1.5)
50
  - **Classification head:** a OneVsRestClassifier instance
51
- - **Maximum Sequence Length:** 32768 tokens
52
  <!-- - **Number of Classes:** Unknown -->
53
  <!-- - **Training Dataset:** [Unknown](https://huggingface.co/datasets/unknown) -->
54
  <!-- - **Language:** Unknown -->
@@ -65,7 +66,7 @@ The model has been trained using an efficient few-shot learning technique that i
65
  ### Metrics
66
  | Label | Accuracy |
67
  |:--------|:---------|
68
- | **all** | 0.6441 |
69
 
70
  ## Uses
71
 
@@ -85,7 +86,7 @@ from setfit import SetFitModel
85
  # Download from the 🤗 Hub
86
  model = SetFitModel.from_pretrained("amplyfi/all-labels")
87
  # Run inference
88
- preds = model("Air Astana, Neos Enter into Strategic Partnership")
89
  ```
90
 
91
  <!--
@@ -115,16 +116,16 @@ preds = model("Air Astana, Neos Enter into Strategic Partnership")
115
  ## Training Details
116
 
117
  ### Training Set Metrics
118
- | Training set | Min | Median | Max |
119
- |:-------------|:----|:-------|:----|
120
- | Word count | 4 | 9.9797 | 30 |
121
 
122
  ### Training Hyperparameters
123
  - batch_size: (16, 16)
124
- - num_epochs: (2, 2)
125
  - max_steps: -1
126
  - sampling_strategy: oversampling
127
- - num_iterations: 5
128
  - body_learning_rate: (2e-05, 2e-05)
129
  - head_learning_rate: 2e-05
130
  - loss: CosineSimilarityLoss
@@ -139,31 +140,230 @@ preds = model("Air Astana, Neos Enter into Strategic Partnership")
139
  - load_best_model_at_end: False
140
 
141
  ### Training Results
142
- | Epoch | Step | Training Loss | Validation Loss |
143
- |:------:|:----:|:-------------:|:---------------:|
144
- | 0.0018 | 1 | 0.3185 | - |
145
- | 0.0903 | 50 | 0.2296 | - |
146
- | 0.1805 | 100 | 0.1307 | - |
147
- | 0.2708 | 150 | 0.0955 | - |
148
- | 0.3610 | 200 | 0.08 | - |
149
- | 0.4513 | 250 | 0.0687 | - |
150
- | 0.5415 | 300 | 0.0591 | - |
151
- | 0.6318 | 350 | 0.0545 | - |
152
- | 0.7220 | 400 | 0.0538 | - |
153
- | 0.8123 | 450 | 0.0482 | - |
154
- | 0.9025 | 500 | 0.0327 | - |
155
- | 0.9928 | 550 | 0.0332 | - |
156
- | 1.0830 | 600 | 0.0315 | - |
157
- | 1.1733 | 650 | 0.0188 | - |
158
- | 1.2635 | 700 | 0.016 | - |
159
- | 1.3538 | 750 | 0.016 | - |
160
- | 1.4440 | 800 | 0.0167 | - |
161
- | 1.5343 | 850 | 0.0128 | - |
162
- | 1.6245 | 900 | 0.0182 | - |
163
- | 1.7148 | 950 | 0.0113 | - |
164
- | 1.8051 | 1000 | 0.014 | - |
165
- | 1.8953 | 1050 | 0.0151 | - |
166
- | 1.9856 | 1100 | 0.0153 | - |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
167
 
168
  ### Framework Versions
169
  - Python: 3.10.12
 
5
  - text-classification
6
  - generated_from_setfit_trainer
7
  widget:
8
+ - text: Ofgem protects Usio Energy customers' supplies and credit balances
9
+ - text: Ofgem completes investigation into EDF Energy networks - finds no breach of
10
+ obligations
11
+ - text: 'Cyprus, the tech island: reflections on the Reflect Festival'
12
+ - text: Ofgem appoints preferred bidder for Burbo Bank Extension offshore transmission
13
+ assets
14
+ - text: 'Tech Turmoil: Google Discontinues Google Play Music, Users Left in Limbo'
15
  metrics:
16
  - accuracy
17
  pipeline_tag: text-classification
18
  library_name: setfit
19
  inference: false
20
+ base_model: mixedbread-ai/mxbai-embed-large-v1
21
  model-index:
22
+ - name: SetFit with mixedbread-ai/mxbai-embed-large-v1
23
  results:
24
  - task:
25
  type: text-classification
 
30
  split: test
31
  metrics:
32
  - type: accuracy
33
+ value: 0.6621621621621622
34
  name: Accuracy
35
  ---
36
 
37
+ # SetFit with mixedbread-ai/mxbai-embed-large-v1
38
 
39
+ This is a [SetFit](https://github.com/huggingface/setfit) model that can be used for Text Classification. This SetFit model uses [mixedbread-ai/mxbai-embed-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1) as the Sentence Transformer embedding model. A OneVsRestClassifier instance is used for classification.
40
 
41
  The model has been trained using an efficient few-shot learning technique that involves:
42
 
 
47
 
48
  ### Model Description
49
  - **Model Type:** SetFit
50
+ - **Sentence Transformer body:** [mixedbread-ai/mxbai-embed-large-v1](https://huggingface.co/mixedbread-ai/mxbai-embed-large-v1)
51
  - **Classification head:** a OneVsRestClassifier instance
52
+ - **Maximum Sequence Length:** 512 tokens
53
  <!-- - **Number of Classes:** Unknown -->
54
  <!-- - **Training Dataset:** [Unknown](https://huggingface.co/datasets/unknown) -->
55
  <!-- - **Language:** Unknown -->
 
66
  ### Metrics
67
  | Label | Accuracy |
68
  |:--------|:---------|
69
+ | **all** | 0.6622 |
70
 
71
  ## Uses
72
 
 
86
  # Download from the 🤗 Hub
87
  model = SetFitModel.from_pretrained("amplyfi/all-labels")
88
  # Run inference
89
+ preds = model("Cyprus, the tech island: reflections on the Reflect Festival")
90
  ```
91
 
92
  <!--
 
116
  ## Training Details
117
 
118
  ### Training Set Metrics
119
+ | Training set | Min | Median | Max |
120
+ |:-------------|:----|:--------|:----|
121
+ | Word count | 4 | 10.0203 | 30 |
122
 
123
  ### Training Hyperparameters
124
  - batch_size: (16, 16)
125
+ - num_epochs: (10, 10)
126
  - max_steps: -1
127
  - sampling_strategy: oversampling
128
+ - num_iterations: 10
129
  - body_learning_rate: (2e-05, 2e-05)
130
  - head_learning_rate: 2e-05
131
  - loss: CosineSimilarityLoss
 
140
  - load_best_model_at_end: False
141
 
142
  ### Training Results
143
+ | Epoch | Step | Training Loss | Validation Loss |
144
+ |:------:|:-----:|:-------------:|:---------------:|
145
+ | 0.0009 | 1 | 0.2478 | - |
146
+ | 0.0452 | 50 | 0.2177 | - |
147
+ | 0.0903 | 100 | 0.2097 | - |
148
+ | 0.1355 | 150 | 0.1949 | - |
149
+ | 0.1807 | 200 | 0.1787 | - |
150
+ | 0.2258 | 250 | 0.1494 | - |
151
+ | 0.2710 | 300 | 0.1225 | - |
152
+ | 0.3162 | 350 | 0.1146 | - |
153
+ | 0.3613 | 400 | 0.0845 | - |
154
+ | 0.4065 | 450 | 0.0726 | - |
155
+ | 0.4517 | 500 | 0.076 | - |
156
+ | 0.4968 | 550 | 0.0579 | - |
157
+ | 0.5420 | 600 | 0.0688 | - |
158
+ | 0.5872 | 650 | 0.0533 | - |
159
+ | 0.6323 | 700 | 0.0547 | - |
160
+ | 0.6775 | 750 | 0.0492 | - |
161
+ | 0.7227 | 800 | 0.0498 | - |
162
+ | 0.7678 | 850 | 0.037 | - |
163
+ | 0.8130 | 900 | 0.0357 | - |
164
+ | 0.8582 | 950 | 0.0398 | - |
165
+ | 0.9033 | 1000 | 0.0428 | - |
166
+ | 0.9485 | 1050 | 0.0494 | - |
167
+ | 0.9937 | 1100 | 0.0334 | - |
168
+ | 1.0388 | 1150 | 0.0327 | - |
169
+ | 1.0840 | 1200 | 0.0265 | - |
170
+ | 1.1292 | 1250 | 0.0283 | - |
171
+ | 1.1743 | 1300 | 0.0347 | - |
172
+ | 1.2195 | 1350 | 0.0299 | - |
173
+ | 1.2647 | 1400 | 0.0235 | - |
174
+ | 1.3098 | 1450 | 0.0223 | - |
175
+ | 1.3550 | 1500 | 0.0254 | - |
176
+ | 1.4002 | 1550 | 0.0249 | - |
177
+ | 1.4453 | 1600 | 0.0209 | - |
178
+ | 1.4905 | 1650 | 0.0239 | - |
179
+ | 1.5357 | 1700 | 0.0229 | - |
180
+ | 1.5808 | 1750 | 0.0193 | - |
181
+ | 1.6260 | 1800 | 0.017 | - |
182
+ | 1.6712 | 1850 | 0.0205 | - |
183
+ | 1.7164 | 1900 | 0.0162 | - |
184
+ | 1.7615 | 1950 | 0.0179 | - |
185
+ | 1.8067 | 2000 | 0.0196 | - |
186
+ | 1.8519 | 2050 | 0.0138 | - |
187
+ | 1.8970 | 2100 | 0.0196 | - |
188
+ | 1.9422 | 2150 | 0.0118 | - |
189
+ | 1.9874 | 2200 | 0.0166 | - |
190
+ | 2.0325 | 2250 | 0.0104 | - |
191
+ | 2.0777 | 2300 | 0.0139 | - |
192
+ | 2.1229 | 2350 | 0.0139 | - |
193
+ | 2.1680 | 2400 | 0.0122 | - |
194
+ | 2.2132 | 2450 | 0.0128 | - |
195
+ | 2.2584 | 2500 | 0.0111 | - |
196
+ | 2.3035 | 2550 | 0.0136 | - |
197
+ | 2.3487 | 2600 | 0.0108 | - |
198
+ | 2.3939 | 2650 | 0.0112 | - |
199
+ | 2.4390 | 2700 | 0.0117 | - |
200
+ | 2.4842 | 2750 | 0.0147 | - |
201
+ | 2.5294 | 2800 | 0.0139 | - |
202
+ | 2.5745 | 2850 | 0.0137 | - |
203
+ | 2.6197 | 2900 | 0.0124 | - |
204
+ | 2.6649 | 2950 | 0.012 | - |
205
+ | 2.7100 | 3000 | 0.0118 | - |
206
+ | 2.7552 | 3050 | 0.0124 | - |
207
+ | 2.8004 | 3100 | 0.0109 | - |
208
+ | 2.8455 | 3150 | 0.01 | - |
209
+ | 2.8907 | 3200 | 0.0109 | - |
210
+ | 2.9359 | 3250 | 0.0072 | - |
211
+ | 2.9810 | 3300 | 0.0102 | - |
212
+ | 3.0262 | 3350 | 0.0102 | - |
213
+ | 3.0714 | 3400 | 0.0141 | - |
214
+ | 3.1165 | 3450 | 0.0143 | - |
215
+ | 3.1617 | 3500 | 0.0105 | - |
216
+ | 3.2069 | 3550 | 0.0132 | - |
217
+ | 3.2520 | 3600 | 0.011 | - |
218
+ | 3.2972 | 3650 | 0.0104 | - |
219
+ | 3.3424 | 3700 | 0.0104 | - |
220
+ | 3.3875 | 3750 | 0.0078 | - |
221
+ | 3.4327 | 3800 | 0.0095 | - |
222
+ | 3.4779 | 3850 | 0.0118 | - |
223
+ | 3.5230 | 3900 | 0.0076 | - |
224
+ | 3.5682 | 3950 | 0.0087 | - |
225
+ | 3.6134 | 4000 | 0.0098 | - |
226
+ | 3.6585 | 4050 | 0.0114 | - |
227
+ | 3.7037 | 4100 | 0.0086 | - |
228
+ | 3.7489 | 4150 | 0.01 | - |
229
+ | 3.7940 | 4200 | 0.0102 | - |
230
+ | 3.8392 | 4250 | 0.0077 | - |
231
+ | 3.8844 | 4300 | 0.0076 | - |
232
+ | 3.9295 | 4350 | 0.0082 | - |
233
+ | 3.9747 | 4400 | 0.0095 | - |
234
+ | 4.0199 | 4450 | 0.0055 | - |
235
+ | 4.0650 | 4500 | 0.009 | - |
236
+ | 4.1102 | 4550 | 0.0086 | - |
237
+ | 4.1554 | 4600 | 0.0086 | - |
238
+ | 4.2005 | 4650 | 0.0075 | - |
239
+ | 4.2457 | 4700 | 0.009 | - |
240
+ | 4.2909 | 4750 | 0.0068 | - |
241
+ | 4.3360 | 4800 | 0.0096 | - |
242
+ | 4.3812 | 4850 | 0.008 | - |
243
+ | 4.4264 | 4900 | 0.0075 | - |
244
+ | 4.4715 | 4950 | 0.0069 | - |
245
+ | 4.5167 | 5000 | 0.0076 | - |
246
+ | 4.5619 | 5050 | 0.0058 | - |
247
+ | 4.6070 | 5100 | 0.0077 | - |
248
+ | 4.6522 | 5150 | 0.0073 | - |
249
+ | 4.6974 | 5200 | 0.0083 | - |
250
+ | 4.7425 | 5250 | 0.0059 | - |
251
+ | 4.7877 | 5300 | 0.0066 | - |
252
+ | 4.8329 | 5350 | 0.0065 | - |
253
+ | 4.8780 | 5400 | 0.006 | - |
254
+ | 4.9232 | 5450 | 0.008 | - |
255
+ | 4.9684 | 5500 | 0.0073 | - |
256
+ | 5.0136 | 5550 | 0.01 | - |
257
+ | 5.0587 | 5600 | 0.0047 | - |
258
+ | 5.1039 | 5650 | 0.0057 | - |
259
+ | 5.1491 | 5700 | 0.0069 | - |
260
+ | 5.1942 | 5750 | 0.0055 | - |
261
+ | 5.2394 | 5800 | 0.0082 | - |
262
+ | 5.2846 | 5850 | 0.0067 | - |
263
+ | 5.3297 | 5900 | 0.0081 | - |
264
+ | 5.3749 | 5950 | 0.0079 | - |
265
+ | 5.4201 | 6000 | 0.0051 | - |
266
+ | 5.4652 | 6050 | 0.0073 | - |
267
+ | 5.5104 | 6100 | 0.007 | - |
268
+ | 5.5556 | 6150 | 0.0069 | - |
269
+ | 5.6007 | 6200 | 0.0066 | - |
270
+ | 5.6459 | 6250 | 0.0073 | - |
271
+ | 5.6911 | 6300 | 0.0063 | - |
272
+ | 5.7362 | 6350 | 0.0049 | - |
273
+ | 5.7814 | 6400 | 0.0042 | - |
274
+ | 5.8266 | 6450 | 0.0076 | - |
275
+ | 5.8717 | 6500 | 0.0077 | - |
276
+ | 5.9169 | 6550 | 0.0071 | - |
277
+ | 5.9621 | 6600 | 0.0079 | - |
278
+ | 6.0072 | 6650 | 0.0073 | - |
279
+ | 6.0524 | 6700 | 0.0069 | - |
280
+ | 6.0976 | 6750 | 0.0049 | - |
281
+ | 6.1427 | 6800 | 0.0065 | - |
282
+ | 6.1879 | 6850 | 0.0046 | - |
283
+ | 6.2331 | 6900 | 0.0063 | - |
284
+ | 6.2782 | 6950 | 0.0061 | - |
285
+ | 6.3234 | 7000 | 0.0066 | - |
286
+ | 6.3686 | 7050 | 0.0049 | - |
287
+ | 6.4137 | 7100 | 0.0048 | - |
288
+ | 6.4589 | 7150 | 0.0062 | - |
289
+ | 6.5041 | 7200 | 0.0067 | - |
290
+ | 6.5492 | 7250 | 0.0059 | - |
291
+ | 6.5944 | 7300 | 0.0078 | - |
292
+ | 6.6396 | 7350 | 0.0074 | - |
293
+ | 6.6847 | 7400 | 0.0058 | - |
294
+ | 6.7299 | 7450 | 0.007 | - |
295
+ | 6.7751 | 7500 | 0.0059 | - |
296
+ | 6.8202 | 7550 | 0.0061 | - |
297
+ | 6.8654 | 7600 | 0.0057 | - |
298
+ | 6.9106 | 7650 | 0.0062 | - |
299
+ | 6.9557 | 7700 | 0.0056 | - |
300
+ | 7.0009 | 7750 | 0.0054 | - |
301
+ | 7.0461 | 7800 | 0.0063 | - |
302
+ | 7.0912 | 7850 | 0.0066 | - |
303
+ | 7.1364 | 7900 | 0.0051 | - |
304
+ | 7.1816 | 7950 | 0.0063 | - |
305
+ | 7.2267 | 8000 | 0.0053 | - |
306
+ | 7.2719 | 8050 | 0.0045 | - |
307
+ | 7.3171 | 8100 | 0.0065 | - |
308
+ | 7.3622 | 8150 | 0.0057 | - |
309
+ | 7.4074 | 8200 | 0.0068 | - |
310
+ | 7.4526 | 8250 | 0.0058 | - |
311
+ | 7.4977 | 8300 | 0.0077 | - |
312
+ | 7.5429 | 8350 | 0.0062 | - |
313
+ | 7.5881 | 8400 | 0.0057 | - |
314
+ | 7.6332 | 8450 | 0.0047 | - |
315
+ | 7.6784 | 8500 | 0.0051 | - |
316
+ | 7.7236 | 8550 | 0.0063 | - |
317
+ | 7.7687 | 8600 | 0.0043 | - |
318
+ | 7.8139 | 8650 | 0.0041 | - |
319
+ | 7.8591 | 8700 | 0.0055 | - |
320
+ | 7.9042 | 8750 | 0.0049 | - |
321
+ | 7.9494 | 8800 | 0.0066 | - |
322
+ | 7.9946 | 8850 | 0.007 | - |
323
+ | 8.0397 | 8900 | 0.0057 | - |
324
+ | 8.0849 | 8950 | 0.0049 | - |
325
+ | 8.1301 | 9000 | 0.0043 | - |
326
+ | 8.1752 | 9050 | 0.0054 | - |
327
+ | 8.2204 | 9100 | 0.0045 | - |
328
+ | 8.2656 | 9150 | 0.0043 | - |
329
+ | 8.3107 | 9200 | 0.0054 | - |
330
+ | 8.3559 | 9250 | 0.0048 | - |
331
+ | 8.4011 | 9300 | 0.0046 | - |
332
+ | 8.4463 | 9350 | 0.0039 | - |
333
+ | 8.4914 | 9400 | 0.0073 | - |
334
+ | 8.5366 | 9450 | 0.0071 | - |
335
+ | 8.5818 | 9500 | 0.0068 | - |
336
+ | 8.6269 | 9550 | 0.0055 | - |
337
+ | 8.6721 | 9600 | 0.0062 | - |
338
+ | 8.7173 | 9650 | 0.0055 | - |
339
+ | 8.7624 | 9700 | 0.0068 | - |
340
+ | 8.8076 | 9750 | 0.0052 | - |
341
+ | 8.8528 | 9800 | 0.0049 | - |
342
+ | 8.8979 | 9850 | 0.005 | - |
343
+ | 8.9431 | 9900 | 0.0033 | - |
344
+ | 8.9883 | 9950 | 0.0064 | - |
345
+ | 9.0334 | 10000 | 0.0057 | - |
346
+ | 9.0786 | 10050 | 0.0056 | - |
347
+ | 9.1238 | 10100 | 0.0066 | - |
348
+ | 9.1689 | 10150 | 0.0046 | - |
349
+ | 9.2141 | 10200 | 0.0043 | - |
350
+ | 9.2593 | 10250 | 0.0041 | - |
351
+ | 9.3044 | 10300 | 0.0066 | - |
352
+ | 9.3496 | 10350 | 0.0046 | - |
353
+ | 9.3948 | 10400 | 0.0056 | - |
354
+ | 9.4399 | 10450 | 0.0043 | - |
355
+ | 9.4851 | 10500 | 0.0045 | - |
356
+ | 9.5303 | 10550 | 0.0048 | - |
357
+ | 9.5754 | 10600 | 0.0057 | - |
358
+ | 9.6206 | 10650 | 0.0055 | - |
359
+ | 9.6658 | 10700 | 0.0042 | - |
360
+ | 9.7109 | 10750 | 0.0063 | - |
361
+ | 9.7561 | 10800 | 0.0047 | - |
362
+ | 9.8013 | 10850 | 0.0046 | - |
363
+ | 9.8464 | 10900 | 0.0045 | - |
364
+ | 9.8916 | 10950 | 0.0047 | - |
365
+ | 9.9368 | 11000 | 0.0057 | - |
366
+ | 9.9819 | 11050 | 0.0061 | - |
367
 
368
  ### Framework Versions
369
  - Python: 3.10.12
config.json CHANGED
@@ -1,28 +1,26 @@
1
  {
2
- "_name_or_path": "HIT-TMG/KaLM-embedding-multilingual-mini-instruct-v1.5",
3
  "architectures": [
4
- "Qwen2Model"
5
  ],
6
- "attention_dropout": 0.0,
7
- "bos_token_id": 151643,
8
- "eos_token_id": 151643,
9
- "hidden_act": "silu",
10
- "hidden_size": 896,
 
11
  "initializer_range": 0.02,
12
- "intermediate_size": 4864,
13
- "max_position_embeddings": 131072,
14
- "max_window_layers": 24,
15
- "model_type": "qwen2",
16
- "num_attention_heads": 14,
17
  "num_hidden_layers": 24,
18
- "num_key_value_heads": 2,
19
- "rms_norm_eps": 1e-06,
20
- "rope_theta": 1000000.0,
21
- "sliding_window": 131072,
22
- "tie_word_embeddings": true,
23
  "torch_dtype": "float32",
24
  "transformers_version": "4.42.2",
 
25
  "use_cache": false,
26
- "use_sliding_window": false,
27
- "vocab_size": 151936
28
  }
 
1
  {
2
+ "_name_or_path": "mixedbread-ai/mxbai-embed-large-v1",
3
  "architectures": [
4
+ "BertModel"
5
  ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 1024,
12
  "initializer_range": 0.02,
13
+ "intermediate_size": 4096,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 16,
18
  "num_hidden_layers": 24,
19
+ "pad_token_id": 0,
20
+ "position_embedding_type": "absolute",
 
 
 
21
  "torch_dtype": "float32",
22
  "transformers_version": "4.42.2",
23
+ "type_vocab_size": 2,
24
  "use_cache": false,
25
+ "vocab_size": 30522
 
26
  }
config_sentence_transformers.json CHANGED
@@ -5,8 +5,8 @@
5
  "pytorch": "2.5.1+cu124"
6
  },
7
  "prompts": {
8
- "query": "",
9
- "document": ""
10
  },
11
  "default_prompt_name": null,
12
  "similarity_fn_name": "cosine"
 
5
  "pytorch": "2.5.1+cu124"
6
  },
7
  "prompts": {
8
+ "query": "Represent this sentence for searching relevant passages: ",
9
+ "passage": ""
10
  },
11
  "default_prompt_name": null,
12
  "similarity_fn_name": "cosine"
config_setfit.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
- "normalize_embeddings": false,
3
- "labels": null
4
  }
 
1
  {
2
+ "labels": null,
3
+ "normalize_embeddings": false
4
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5292fdb77075fc7c073101ce5d1de5a8519e07ce5428101ed6891b827ea82938
3
- size 1976161736
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:150360bbdd752edd585148838ca495db682aa0faa17187de04a433b464393541
3
+ size 1340612432
model_head.pkl CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f68b0434348a1d9f0132b0f79b1ddd5f3caafbc7d4de9225d0a8bd1bae5fd447
3
- size 136084
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4efc1af17032e7fd31fed51625cd1174dd20f2dfc06cecbda849450d109958d8
3
+ size 154516
modules.json CHANGED
@@ -10,11 +10,5 @@
10
  "name": "1",
11
  "path": "1_Pooling",
12
  "type": "sentence_transformers.models.Pooling"
13
- },
14
- {
15
- "idx": 2,
16
- "name": "2",
17
- "path": "2_Normalize",
18
- "type": "sentence_transformers.models.Normalize"
19
  }
20
  ]
 
10
  "name": "1",
11
  "path": "1_Pooling",
12
  "type": "sentence_transformers.models.Pooling"
 
 
 
 
 
 
13
  }
14
  ]
sentence_bert_config.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
- "max_seq_length": 32768,
3
  "do_lower_case": false
4
  }
 
1
  {
2
+ "max_seq_length": 512,
3
  "do_lower_case": false
4
  }
special_tokens_map.json CHANGED
@@ -1,17 +1,34 @@
1
  {
2
- "additional_special_tokens": [
3
- "<|im_start|>",
4
- "<|im_end|>"
5
- ],
6
- "eos_token": {
7
- "content": "<|endoftext|>",
 
 
 
8
  "lstrip": false,
9
  "normalized": false,
10
  "rstrip": false,
11
  "single_word": false
12
  },
13
  "pad_token": {
14
- "content": "<|endoftext|>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  "lstrip": false,
16
  "normalized": false,
17
  "rstrip": false,
 
1
  {
2
+ "cls_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "mask_token": {
10
+ "content": "[MASK]",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "sep_token": {
24
+ "content": "[SEP]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "unk_token": {
31
+ "content": "[UNK]",
32
  "lstrip": false,
33
  "normalized": false,
34
  "rstrip": false,
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,24 +1,39 @@
1
  {
2
- "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
- "151643": {
5
- "content": "<|endoftext|>",
6
  "lstrip": false,
7
  "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
- "151644": {
13
- "content": "<|im_start|>",
14
  "lstrip": false,
15
  "normalized": false,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
19
  },
20
- "151645": {
21
- "content": "<|im_end|>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  "lstrip": false,
23
  "normalized": false,
24
  "rstrip": false,
@@ -26,31 +41,17 @@
26
  "special": true
27
  }
28
  },
29
- "additional_special_tokens": [
30
- "<|im_start|>",
31
- "<|im_end|>"
32
- ],
33
- "auto_map": {
34
- "AutoTokenizer": [
35
- "HIT-TMG/KaLM-embedding-multilingual-mini-instruct-v1.5--tokenization_qwen.Qwen2Tokenizer",
36
- "HIT-TMG/KaLM-embedding-multilingual-mini-instruct-v1.5--tokenization_qwen.Qwen2TokenizerFast"
37
- ]
38
- },
39
- "bos_token": null,
40
- "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
41
- "clean_up_tokenization_spaces": false,
42
- "eos_token": "<|endoftext|>",
43
- "errors": "replace",
44
- "max_length": 512,
45
- "model_max_length": 32768,
46
- "pad_to_multiple_of": null,
47
- "pad_token": "<|endoftext|>",
48
- "pad_token_type_id": 0,
49
- "padding_side": "left",
50
- "split_special_tokens": false,
51
- "stride": 0,
52
- "tokenizer_class": "Qwen2Tokenizer",
53
- "truncation_side": "right",
54
- "truncation_strategy": "longest_first",
55
- "unk_token": null
56
  }
 
1
  {
 
2
  "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
+ "100": {
12
+ "content": "[UNK]",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
  "lstrip": false,
38
  "normalized": false,
39
  "rstrip": false,
 
41
  "special": true
42
  }
43
  },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "mask_token": "[MASK]",
49
+ "model_max_length": 512,
50
+ "never_split": null,
51
+ "pad_token": "[PAD]",
52
+ "sep_token": "[SEP]",
53
+ "strip_accents": null,
54
+ "tokenize_chinese_chars": true,
55
+ "tokenizer_class": "BertTokenizer",
56
+ "unk_token": "[UNK]"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  }