Datasets:

License:
vlm_results / yi-vision /results_2025-01-15T14-59-55.592568.json
xuejing2409's picture
Update yi-vision/results_2025-01-15T14-59-55.592568.json
b5a3d62 verified
raw
history blame
9.71 kB
{
"config_general": {
"model_name": "yi-vision",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"ChartQA": {
"acc": 78.88,
"acc_stderr": 0,
"accuracy": 78.88,
"human_test": {
"total": 1250,
"correct": 811,
"accuracy": 64.88
},
"augmented_test": {
"total": 1250,
"correct": 1161,
"accuracy": 92.88
}
},
"CMMMU": {
"acc": 41.22,
"acc_stderr": 0,
"\u5546\u4e1a": {
"num": 126,
"correct": 30,
"accuracy": 23.81
},
"\u79d1\u5b66": {
"num": 204,
"correct": 78,
"accuracy": 38.24
},
"overall": {
"num": 900,
"correct": 371,
"accuracy": 41.22
},
"accuracy": 41.22,
"\u5065\u5eb7\u4e0e\u533b\u5b66": {
"num": 153,
"correct": 77,
"accuracy": 50.33
},
"\u6280\u672f\u4e0e\u5de5\u7a0b": {
"num": 244,
"correct": 89,
"accuracy": 36.48
},
"\u827a\u672f\u4e0e\u8bbe\u8ba1": {
"num": 88,
"correct": 56,
"accuracy": 63.64
},
"\u4eba\u6587\u793e\u4f1a\u79d1\u5b66": {
"num": 85,
"correct": 41,
"accuracy": 48.24
}
},
"CMMU": {
"acc": 22.97,
"acc_stderr": 0,
"val": {
"multiple-choice": {
"hard": {
"total": 150,
"correct": 30,
"accuracy": 20.0
},
"normal": {
"total": 1205,
"correct": 361,
"accuracy": 29.96
}
},
"fill-in-the-blank": {
"hard": {
"total": 300,
"correct": 44,
"accuracy": 14.67
},
"normal": {
"total": 507,
"correct": 77,
"accuracy": 15.19
}
},
"multiple-response": {
"hard": {
"total": 94,
"correct": 9,
"accuracy": 9.57
},
"normal": {
"total": 33,
"correct": 6,
"accuracy": 18.18
}
}
},
"test": {
"multiple-choice": {
"hard": {
"total": 150,
"correct": 25,
"accuracy": 16.67
},
"normal": {
"total": 1205,
"correct": 380,
"accuracy": 31.54
}
},
"fill-in-the-blank": {
"hard": {
"total": 296,
"correct": 37,
"accuracy": 12.5
},
"normal": {
"total": 529,
"correct": 76,
"accuracy": 14.37
}
},
"multiple-response": {
"hard": {
"total": 95,
"correct": 8,
"accuracy": 8.42
},
"normal": {
"total": 32,
"correct": 4,
"accuracy": 12.5
}
}
},
"val-overall": {
"total": 2289,
"correct": 527,
"accuracy": 23.02,
"bias_rate": 7.13
},
"test-overall": {
"total": 2307,
"correct": 530,
"accuracy": 22.97,
"bias_rate": 5.58
}
},
"MMMU": {
"acc": 46.33,
"acc_stderr": 0,
"accuracy": 46.33,
"subject_score": {
"Art": 71.67,
"Math": 46.67,
"Basic": 53.33,
"Music": 30.0,
"Design": 80.0,
"Energy": 30.0,
"Manage": 40.0,
"Public": 46.67,
"Biology": 46.67,
"Finance": 36.67,
"History": 63.33,
"Physics": 40.0,
"Clinical": 53.33,
"Computer": 40.0,
"Pharmacy": 30.0,
"Chemistry": 23.33,
"Economics": 60.0,
"Geography": 60.0,
"Marketing": 53.33,
"Materials": 20.0,
"Sociology": 56.67,
"Accounting": 36.67,
"Literature": 83.33,
"Mechanical": 23.33,
"Psychology": 46.67,
"Agriculture": 53.33,
"Diagnostics": 43.33,
"Electronics": 23.33,
"Architecture": 26.67
},
"difficulty_score": {
"Easy": 54.92,
"Hard": 30.39,
"Medium": 47.17
}
},
"MMMU_Pro_standard": {
"acc": 29.84,
"acc_stderr": 0,
"accuracy": 29.84,
"reject_info": {
"reject_rate": 0.06,
"reject_number": 1,
"total_question": 1730
},
"subject_score": {
"Art": 47.17,
"Math": 21.67,
"Music": 20.0,
"Design": 47.46,
"Manage": 24.0,
"Biology": 32.2,
"Finance": 25.0,
"History": 42.86,
"Physics": 23.33,
"Pharmacy": 31.58,
"Chemistry": 21.67,
"Economics": 23.73,
"Geography": 36.54,
"Marketing": 33.9,
"Materials": 13.33,
"Sociology": 48.15,
"Accounting": 24.14,
"Art_Theory": 60.0,
"Literature": 73.08,
"Psychology": 28.33,
"Agriculture": 25.0,
"Electronics": 21.67,
"Public_Health": 24.14,
"Computer_Science": 30.0,
"Energy_and_Power": 12.07,
"Clinical_Medicine": 27.12,
"Basic_Medical_Science": 34.62,
"Mechanical_Engineering": 20.34,
"Architecture_and_Engineering": 10.0,
"Diagnostics_and_Laboratory_Medicine": 25.0
},
"difficulty_score": {
"Easy": 43.75,
"Hard": 17.96,
"Medium": 26.62
}
},
"MMMU_Pro_vision":{
"acc":53.06
},
"OCRBench": {
"acc": 69.369,
"acc_stderr": 0,
"accuracy": 69.369,
"final_score": [
693,
999
],
"reject_info": {
"reject_rate": 0.1,
"reject_number": 1,
"total_question": 1000
},
"Doc-oriented VQA": [
146,
200
],
"Scene Text-centric VQA": [
174,
200
],
"Handwriting Recognition": [
28,
50
],
"Digit String Recognition": [
21,
50
],
"Regular Text Recognition": [
48,
50
],
"Artistic Text Recognition": [
42,
50
],
"Irregular Text Recognition": [
40,
50
],
"Key Information Extraction": [
154,
200
],
"Non-Semantic Text Recognition": [
38,
50
],
"Handwritten Mathematical Expression Recognition": [
2,
99
]
},
"MathVision": {
"acc": 14.44,
"acc_stderr": 0,
"accuracy": 14.44
},
"CII-Bench": {
"acc": 45.23,
"acc_stderr": 0,
"accuracy": 45.23,
"domain_score": {
"Art": 49.26,
"CTC": 48.15,
"Env.": 53.7,
"Life": 37.23,
"Society": 45.95,
"Politics": 58.33
},
"emotion_score": {
"Neutral": 48.5,
"Negative": 42.64,
"Positive": 44.44
}
},
"Blink":{
"acc":0
},
},
"versions": {},
"config_tasks": {},
"summary_tasks": {},
"summary_general": {}
}