xuanricheng commited on
Commit
6165312
·
verified ·
1 Parent(s): 6e8d70e

Add results for yi-vision

Browse files
yi-vision/results_2025-01-15T14-59-55.592568.json ADDED
@@ -0,0 +1,325 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "yi-vision",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "ChartQA": {
9
+ "acc": 78.88,
10
+ "acc_stderr": 0,
11
+ "accuracy": 78.88,
12
+ "human_test": {
13
+ "total": 1250,
14
+ "correct": 811,
15
+ "accuracy": 64.88
16
+ },
17
+ "augmented_test": {
18
+ "total": 1250,
19
+ "correct": 1161,
20
+ "accuracy": 92.88
21
+ }
22
+ },
23
+ "CMMMU": {
24
+ "acc": 41.22,
25
+ "acc_stderr": 0,
26
+ "\u5546\u4e1a": {
27
+ "num": 126,
28
+ "correct": 30,
29
+ "accuracy": 23.81
30
+ },
31
+ "\u79d1\u5b66": {
32
+ "num": 204,
33
+ "correct": 78,
34
+ "accuracy": 38.24
35
+ },
36
+ "overall": {
37
+ "num": 900,
38
+ "correct": 371,
39
+ "accuracy": 41.22
40
+ },
41
+ "accuracy": 41.22,
42
+ "\u5065\u5eb7\u4e0e\u533b\u5b66": {
43
+ "num": 153,
44
+ "correct": 77,
45
+ "accuracy": 50.33
46
+ },
47
+ "\u6280\u672f\u4e0e\u5de5\u7a0b": {
48
+ "num": 244,
49
+ "correct": 89,
50
+ "accuracy": 36.48
51
+ },
52
+ "\u827a\u672f\u4e0e\u8bbe\u8ba1": {
53
+ "num": 88,
54
+ "correct": 56,
55
+ "accuracy": 63.64
56
+ },
57
+ "\u4eba\u6587\u793e\u4f1a\u79d1\u5b66": {
58
+ "num": 85,
59
+ "correct": 41,
60
+ "accuracy": 48.24
61
+ }
62
+ },
63
+ "CMMU": {
64
+ "acc": 22.97,
65
+ "acc_stderr": 0,
66
+ "val": {
67
+ "multiple-choice": {
68
+ "hard": {
69
+ "total": 150,
70
+ "correct": 30,
71
+ "accuracy": 20.0
72
+ },
73
+ "normal": {
74
+ "total": 1205,
75
+ "correct": 361,
76
+ "accuracy": 29.96
77
+ }
78
+ },
79
+ "fill-in-the-blank": {
80
+ "hard": {
81
+ "total": 300,
82
+ "correct": 44,
83
+ "accuracy": 14.67
84
+ },
85
+ "normal": {
86
+ "total": 507,
87
+ "correct": 77,
88
+ "accuracy": 15.19
89
+ }
90
+ },
91
+ "multiple-response": {
92
+ "hard": {
93
+ "total": 94,
94
+ "correct": 9,
95
+ "accuracy": 9.57
96
+ },
97
+ "normal": {
98
+ "total": 33,
99
+ "correct": 6,
100
+ "accuracy": 18.18
101
+ }
102
+ }
103
+ },
104
+ "test": {
105
+ "multiple-choice": {
106
+ "hard": {
107
+ "total": 150,
108
+ "correct": 25,
109
+ "accuracy": 16.67
110
+ },
111
+ "normal": {
112
+ "total": 1205,
113
+ "correct": 380,
114
+ "accuracy": 31.54
115
+ }
116
+ },
117
+ "fill-in-the-blank": {
118
+ "hard": {
119
+ "total": 296,
120
+ "correct": 37,
121
+ "accuracy": 12.5
122
+ },
123
+ "normal": {
124
+ "total": 529,
125
+ "correct": 76,
126
+ "accuracy": 14.37
127
+ }
128
+ },
129
+ "multiple-response": {
130
+ "hard": {
131
+ "total": 95,
132
+ "correct": 8,
133
+ "accuracy": 8.42
134
+ },
135
+ "normal": {
136
+ "total": 32,
137
+ "correct": 4,
138
+ "accuracy": 12.5
139
+ }
140
+ }
141
+ },
142
+ "val-overall": {
143
+ "total": 2289,
144
+ "correct": 527,
145
+ "accuracy": 23.02,
146
+ "bias_rate": 7.13
147
+ },
148
+ "test-overall": {
149
+ "total": 2307,
150
+ "correct": 530,
151
+ "accuracy": 22.97,
152
+ "bias_rate": 5.58
153
+ }
154
+ },
155
+ "MMMU": {
156
+ "acc": 46.33,
157
+ "acc_stderr": 0,
158
+ "accuracy": 46.33,
159
+ "subject_score": {
160
+ "Art": 71.67,
161
+ "Math": 46.67,
162
+ "Basic": 53.33,
163
+ "Music": 30.0,
164
+ "Design": 80.0,
165
+ "Energy": 30.0,
166
+ "Manage": 40.0,
167
+ "Public": 46.67,
168
+ "Biology": 46.67,
169
+ "Finance": 36.67,
170
+ "History": 63.33,
171
+ "Physics": 40.0,
172
+ "Clinical": 53.33,
173
+ "Computer": 40.0,
174
+ "Pharmacy": 30.0,
175
+ "Chemistry": 23.33,
176
+ "Economics": 60.0,
177
+ "Geography": 60.0,
178
+ "Marketing": 53.33,
179
+ "Materials": 20.0,
180
+ "Sociology": 56.67,
181
+ "Accounting": 36.67,
182
+ "Literature": 83.33,
183
+ "Mechanical": 23.33,
184
+ "Psychology": 46.67,
185
+ "Agriculture": 53.33,
186
+ "Diagnostics": 43.33,
187
+ "Electronics": 23.33,
188
+ "Architecture": 26.67
189
+ },
190
+ "difficulty_score": {
191
+ "Easy": 54.92,
192
+ "Hard": 30.39,
193
+ "Medium": 47.17
194
+ }
195
+ },
196
+ "MMMU_Pro_standard": {
197
+ "acc": 29.84,
198
+ "acc_stderr": 0,
199
+ "accuracy": 29.84,
200
+ "reject_info": {
201
+ "reject_rate": 0.06,
202
+ "reject_number": 1,
203
+ "total_question": 1730
204
+ },
205
+ "subject_score": {
206
+ "Art": 47.17,
207
+ "Math": 21.67,
208
+ "Music": 20.0,
209
+ "Design": 47.46,
210
+ "Manage": 24.0,
211
+ "Biology": 32.2,
212
+ "Finance": 25.0,
213
+ "History": 42.86,
214
+ "Physics": 23.33,
215
+ "Pharmacy": 31.58,
216
+ "Chemistry": 21.67,
217
+ "Economics": 23.73,
218
+ "Geography": 36.54,
219
+ "Marketing": 33.9,
220
+ "Materials": 13.33,
221
+ "Sociology": 48.15,
222
+ "Accounting": 24.14,
223
+ "Art_Theory": 60.0,
224
+ "Literature": 73.08,
225
+ "Psychology": 28.33,
226
+ "Agriculture": 25.0,
227
+ "Electronics": 21.67,
228
+ "Public_Health": 24.14,
229
+ "Computer_Science": 30.0,
230
+ "Energy_and_Power": 12.07,
231
+ "Clinical_Medicine": 27.12,
232
+ "Basic_Medical_Science": 34.62,
233
+ "Mechanical_Engineering": 20.34,
234
+ "Architecture_and_Engineering": 10.0,
235
+ "Diagnostics_and_Laboratory_Medicine": 25.0
236
+ },
237
+ "difficulty_score": {
238
+ "Easy": 43.75,
239
+ "Hard": 17.96,
240
+ "Medium": 26.62
241
+ }
242
+ },
243
+ "OCRBench": {
244
+ "acc": 69.369,
245
+ "acc_stderr": 0,
246
+ "accuracy": 69.369,
247
+ "final_score": [
248
+ 693,
249
+ 999
250
+ ],
251
+ "reject_info": {
252
+ "reject_rate": 0.1,
253
+ "reject_number": 1,
254
+ "total_question": 1000
255
+ },
256
+ "Doc-oriented VQA": [
257
+ 146,
258
+ 200
259
+ ],
260
+ "Scene Text-centric VQA": [
261
+ 174,
262
+ 200
263
+ ],
264
+ "Handwriting Recognition": [
265
+ 28,
266
+ 50
267
+ ],
268
+ "Digit String Recognition": [
269
+ 21,
270
+ 50
271
+ ],
272
+ "Regular Text Recognition": [
273
+ 48,
274
+ 50
275
+ ],
276
+ "Artistic Text Recognition": [
277
+ 42,
278
+ 50
279
+ ],
280
+ "Irregular Text Recognition": [
281
+ 40,
282
+ 50
283
+ ],
284
+ "Key Information Extraction": [
285
+ 154,
286
+ 200
287
+ ],
288
+ "Non-Semantic Text Recognition": [
289
+ 38,
290
+ 50
291
+ ],
292
+ "Handwritten Mathematical Expression Recognition": [
293
+ 2,
294
+ 99
295
+ ]
296
+ },
297
+ "MathVision": {
298
+ "acc": 14.44,
299
+ "acc_stderr": 0,
300
+ "accuracy": 14.44
301
+ },
302
+ "CII-Bench": {
303
+ "acc": 45.23,
304
+ "acc_stderr": 0,
305
+ "accuracy": 45.23,
306
+ "domain_score": {
307
+ "Art": 49.26,
308
+ "CTC": 48.15,
309
+ "Env.": 53.7,
310
+ "Life": 37.23,
311
+ "Society": 45.95,
312
+ "Politics": 58.33
313
+ },
314
+ "emotion_score": {
315
+ "Neutral": 48.5,
316
+ "Negative": 42.64,
317
+ "Positive": 44.44
318
+ }
319
+ }
320
+ },
321
+ "versions": {},
322
+ "config_tasks": {},
323
+ "summary_tasks": {},
324
+ "summary_general": {}
325
+ }