{ "config_general": { "model_name": "glm-4v", "model_dtype": "float16", "model_size": 0 }, "results": { "ChartQA": { "acc": 35.08, "acc_stderr": 0, "accuracy": 35.08, "human_test": { "total": 1250, "correct": 469, "accuracy": 37.52 }, "augmented_test": { "total": 1250, "correct": 408, "accuracy": 32.64 } }, "CMMMU": { "acc": 34.78, "acc_stderr": 0, "\u5546\u4e1a": { "num": 126, "correct": 27, "accuracy": 21.43 }, "\u79d1\u5b66": { "num": 204, "correct": 54, "accuracy": 26.47 }, "overall": { "num": 900, "correct": 313, "accuracy": 34.78 }, "accuracy": 34.78, "\u5065\u5eb7\u4e0e\u533b\u5b66": { "num": 153, "correct": 56, "accuracy": 36.6 }, "\u6280\u672f\u4e0e\u5de5\u7a0b": { "num": 244, "correct": 87, "accuracy": 35.66 }, "\u827a\u672f\u4e0e\u8bbe\u8ba1": { "num": 88, "correct": 51, "accuracy": 57.95 }, "\u4eba\u6587\u793e\u4f1a\u79d1\u5b66": { "num": 85, "correct": 38, "accuracy": 44.71 } }, "CMMU": { "acc": 10.58, "acc_stderr": 0, "val": { "multiple-choice": { "hard": { "total": 150, "correct": 6, "accuracy": 4.0 }, "normal": { "total": 1205, "correct": 160, "accuracy": 13.28 } }, "fill-in-the-blank": { "hard": { "total": 300, "correct": 20, "accuracy": 6.67 }, "normal": { "total": 507, "correct": 48, "accuracy": 9.47 } }, "multiple-response": { "hard": { "total": 94, "correct": 0, "accuracy": 0.0 }, "normal": { "total": 33, "correct": 0, "accuracy": 0.0 } } }, "test": { "multiple-choice": { "hard": { "total": 150, "correct": 4, "accuracy": 2.67 }, "normal": { "total": 1205, "correct": 157, "accuracy": 13.03 } }, "fill-in-the-blank": { "hard": { "total": 296, "correct": 25, "accuracy": 8.45 }, "normal": { "total": 529, "correct": 57, "accuracy": 10.78 } }, "multiple-response": { "hard": { "total": 95, "correct": 1, "accuracy": 1.05 }, "normal": { "total": 32, "correct": 0, "accuracy": 0.0 } } }, "val-overall": { "total": 2289, "correct": 234, "accuracy": 10.22, "bias_rate": 128.08 }, "test-overall": { "total": 2307, "correct": 244, "accuracy": 10.58, "bias_rate": 130.25 } }, "MMMU": { "acc": 40.56, "acc_stderr": 0, "accuracy": 40.56, "subject_score": { "Art": 61.67, "Math": 36.67, "Basic": 50.0, "Music": 40.0, "Design": 66.67, "Energy": 46.67, "Manage": 30.0, "Public": 43.33, "Biology": 40.0, "Finance": 20.0, "History": 63.33, "Physics": 6.67, "Clinical": 46.67, "Computer": 36.67, "Pharmacy": 36.67, "Chemistry": 23.33, "Economics": 30.0, "Geography": 30.0, "Marketing": 40.0, "Materials": 26.67, "Sociology": 50.0, "Accounting": 43.33, "Literature": 83.33, "Mechanical": 30.0, "Psychology": 46.67, "Agriculture": 33.33, "Diagnostics": 40.0, "Electronics": 20.0, "Architecture": 33.33 }, "difficulty_score": { "Easy": 49.15, "Hard": 26.52, "Medium": 40.57 } }, "MMMU_Pro_standard": { "acc": 20.58, "acc_stderr": 0, "accuracy": 20.58, "subject_score": { "Art": 24.53, "Math": 21.67, "Music": 23.33, "Design": 31.67, "Manage": 24.0, "Biology": 27.12, "Finance": 16.67, "History": 21.43, "Physics": 3.33, "Pharmacy": 26.32, "Chemistry": 13.33, "Economics": 20.34, "Geography": 25.0, "Marketing": 16.95, "Materials": 15.0, "Sociology": 25.93, "Accounting": 13.79, "Art_Theory": 38.18, "Literature": 48.08, "Psychology": 18.33, "Agriculture": 13.33, "Electronics": 15.0, "Public_Health": 12.07, "Computer_Science": 23.33, "Energy_and_Power": 15.52, "Clinical_Medicine": 28.81, "Basic_Medical_Science": 21.15, "Mechanical_Engineering": 22.03, "Architecture_and_Engineering": 6.67, "Diagnostics_and_Laboratory_Medicine": 11.67 }, "difficulty_score": { "Easy": 27.84, "Hard": 12.47, "Medium": 19.85 } }, "MMMU_Pro_vision": { "acc": 16.71, "acc_stderr": 0, "accuracy": 16.71, "subject_score": { "Art": 28.3, "Math": 20.0, "Music": 25.0, "Design": 20.0, "Manage": 22.0, "Biology": 15.25, "Finance": 8.33, "History": 14.29, "Physics": 15.0, "Pharmacy": 17.54, "Chemistry": 11.67, "Economics": 18.64, "Geography": 23.08, "Marketing": 10.17, "Materials": 11.67, "Sociology": 25.93, "Accounting": 13.79, "Art_Theory": 29.09, "Literature": 44.23, "Psychology": 11.67, "Agriculture": 8.33, "Electronics": 8.33, "Public_Health": 13.79, "Computer_Science": 18.33, "Energy_and_Power": 15.52, "Clinical_Medicine": 8.47, "Basic_Medical_Science": 15.38, "Mechanical_Engineering": 15.25, "Architecture_and_Engineering": 11.67, "Diagnostics_and_Laboratory_Medicine": 8.33 } }, "OCRBench": { "acc": 79.1, "acc_stderr": 0, "accuracy": 79.1, "final_score": [ 791, 1000 ], "Doc-oriented VQA": [ 150, 200 ], "Scene Text-centric VQA": [ 178, 200 ], "Handwriting Recognition": [ 34, 50 ], "Digit String Recognition": [ 32, 50 ], "Regular Text Recognition": [ 49, 50 ], "Artistic Text Recognition": [ 47, 50 ], "Irregular Text Recognition": [ 47, 50 ], "Key Information Extraction": [ 166, 200 ], "Non-Semantic Text Recognition": [ 43, 50 ], "Handwritten Mathematical Expression Recognition": [ 45, 100 ] }, "MathVision": { "acc": 16.02, "acc_stderr": 0, "accuracy": 16.02 }, "CII-Bench": { "acc": 53.99, "acc_stderr": 0, "accuracy": 53.99, "domain_score": { "Art": 49.26, "CTC": 49.63, "Env.": 68.52, "Life": 50.65, "Society": 61.08, "Politics": 50.0 }, "emotion_score": { "Neutral": 54.14, "Negative": 55.47, "Positive": 52.14 } }, "Blink": { "acc": 44.66, "acc_stderr": 0, "Jigsaw": 50.0, "IQ Test": 32.0, "Counting": 60.83, "accuracy": 44.66, "Art Style": 46.15, "Relative Depth": 62.9, "Spatial Relation": 74.13, "Visual Similarity": 54.81, "Forensic Detection": 21.21, "Object Localization": 63.11, "Multi-view Reasoning": 60.9, "Relative Reflectance": 41.79, "Visual Correspondence": 25.58, "Semantic Correspondence": 23.02, "Functional Correspondence": 17.69 } }, "versions": {}, "config_tasks": {}, "summary_tasks": {}, "summary_general": {} }