{ "config_general": { "model_name": "Mono-InternVL-2B", "model_dtype": "float16", "model_size": 0 }, "results": { "CMMMU": { "accuracy": 28.56, "acc_stderr": 0, "acc": 28.56 }, "MMMU": { "accuracy": 28.22, "acc_stderr": 0, "acc": 28.22 }, "MMMU_Pro_standard": { "accuracy": 16.53, "acc_stderr": 0, "acc": 16.53 }, "MMMU_Pro_vision": { "accuracy": 10.0, "subject_score": { "History": 12.5, "Art": 9.43, "Design": 6.67, "Literature": 15.38, "Agriculture": 13.33, "Finance": 8.33, "Sociology": 11.11, "Accounting": 8.62, "Energy_and_Power": 3.45, "Pharmacy": 24.56, "Architecture_and_Engineering": 3.33, "Clinical_Medicine": 10.17, "Public_Health": 3.45, "Physics": 11.67, "Art_Theory": 12.73, "Electronics": 1.67, "Psychology": 15.0, "Biology": 11.86, "Manage": 6.0, "Economics": 11.86, "Mechanical_Engineering": 5.08, "Diagnostics_and_Laboratory_Medicine": 13.33, "Basic_Medical_Science": 7.69, "Computer_Science": 10.0, "Math": 18.33, "Music": 10.0, "Materials": 6.67, "Marketing": 8.47, "Chemistry": 6.67, "Geography": 13.46 }, "acc_stderr": 0, "acc": 10.0 }, "MmvetV2": { "accuracy": 32.4371, "capability_scores": { "math": 17.647058823529413, "ocr": 32.83653846153847, "spat": 30.86294416243654, "rec": 30.873786407767, "know": 26.987179487179493, "gen": 28.43636363636366, "seq": 24.642857142857142 }, "capability_detail_scores": { "math_ocr": 3.6363636363636367, "math_ocr_spat": 26.666666666666668, "ocr_math_rec_spat": 25.0, "rec_spat": 42.857142857142854, "ocr_spat": 40.38461538461539, "ocr_rec_spat": 10.0, "ocr_know_spat": 37.5, "ocr_rec": 25.0, "rec_know_spat": 24.0, "ocr": 59.06249999999999, "rec": 48.8135593220339, "rec_know": 23.076923076923077, "rec_gen_know": 27.799999999999997, "ocr_rec_gen_know": 39.23076923076923, "ocr_rec_gen_spat": 30.69767441860466, "ocr_gen_spat": 60.0, "seq_ocr_gen_spat_math": 50.0, "seq_ocr_spat_math_rec": 0.0, "rec_gen_spat": 20.0, "math_ocr_gen_spat": 60.0, "seq_rec_spat": 14.285714285714285, "seq_ocr_rec_spat": 46.666666666666664, "rec_gen_know_spat": 20.0, "rec_gen": 32.64705882352941, "ocr_rec_know_spat": 37.5, "know_ocr_gen_spat_rec": 0.0, "ocr_math_rec": 0.0, "ocr_rec_gen": 26.0, "seq_ocr_rec_gen": 20.0, "ocr_gen": 22.307692307692307, "seq_rec_gen": 25.71428571428572, "seq_rec": 23.333333333333332, "seq_rec_gen_spat": 41.25, "seq_rec_know": 0.0, "seq_rec_gen_know": 0.0, "seq_ocr_gen_spat_rec": 33.33333333333333, "seq_know_ocr_gen_rec": 10.0, "math_rec_know": 0.0, "seq_ocr_rec": 0.0 }, "acc_stderr": 0, "acc": 32.4371 }, "MathVerse": { "Text Dominant": { "accuracy": 23.73, "correct": 187, "total": 788 }, "Total": { "accuracy": 19.49, "correct": 768, "total": 3940 }, "Text Lite": { "accuracy": 21.83, "correct": 172, "total": 788 }, "Vision Intensive": { "accuracy": 22.21, "correct": 175, "total": 788 }, "Vision Dominant": { "accuracy": 16.75, "correct": 132, "total": 788 }, "Vision Only": { "accuracy": 12.94, "correct": 102, "total": 788 }, "accuracy": 19.49, "acc_stderr": 0, "acc": 19.49 }, "Ocrlite": { "final_score": [ 807, 1645 ], "accuracy": 49.058, "Key Information Extraction-Bookshelf": [ 0, 52 ], "Scene Text-centric VQA-diet_constraints": [ 40, 90 ], "Doc-oriented VQA-Control": [ 71, 189 ], "Doc-oriented VQA": [ 62, 204 ], "Scene Text-centric VQA-Fake_logo": [ 55, 119 ], "Handwritten Mathematical Expression Recognition": [ 32, 100 ], "Key Information Extraction": [ 130, 209 ], "Scene Text-centric VQA-Control": [ 135, 200 ], "Scene Text-centric VQA": [ 142, 282 ], "Artistic Text Recognition": [ 32, 50 ], "Irregular Text Recognition": [ 31, 50 ], "Non-Semantic Text Recognition": [ 29, 50 ], "Regular Text Recognition": [ 48, 50 ], "acc_stderr": 0, "acc": 49.058 }, "OcrliteZh": { "final_score": [ 48, 234 ], "accuracy": 20.513, "Docvqa": [ 0, 10 ], "Chartqa-human": [ 2, 10 ], "Chartqa-au": [ 1, 10 ], "infographic": [ 2, 10 ], "Key Information Extraction": [ 14, 45 ], "Scene Text-centric VQA": [ 1, 40 ], "Artistic Text Recognition": [ 1, 11 ], "IrRegular Text Recognition": [ 2, 11 ], "Non-semantic Text Recognition": [ 6, 12 ], "Regular Text Recognition": [ 4, 11 ], "Handwriting_CN": [ 7, 20 ], "Chinese Unlimited": [ 8, 44 ], "acc_stderr": 0, "acc": 20.513 }, "CharXiv": { "descriptive": { "Overall Score": 30.85, "By Question": { "Q1": 48.36, "Q2": 58.7, "Q3": 29.61, "Q4": 33.85, "Q5": 35.15, "Q6": 15.66, "Q7": 14.53, "Q8": 20.09, "Q9": 19.4, "Q10": 36.3, "Q11": 36.57, "Q12": 33.52, "Q13": 35.62, "Q14": 35.46, "Q15": 44.09, "Q16": 22.22, "Q17": 4.46, "Q18": 16.6, "Q19": 47.69 }, "By Category": { "Information Extraction": 33.57, "Enumeration": 32.28, "Pattern Recognition": 24.67, "Counting": 36.9, "Compositionality": 4.46 }, "By Subplot": { "1 Subplot": 40.28, "2-4 Subplots": 25.6, "5+ Subplots": 23.83 }, "By Subject": { "Computer Science": 31.55, "Economics": 30.07, "Electrical Engineering and Systems Science": 36.76, "Mathematics": 31.85, "Physics": 31.89, "Quantitative Biology": 24.01, "Quantitative Finance": 29.96, "Statistics": 30.97 }, "By Year": { "2020": 30.47, "2021": 29.79, "2022": 32.48, "2023": 30.75 }, "N_valid": 4000, "N_invalid": 0, "Question Type": "Descriptive" }, "reasoning": { "Overall Score": 14.9, "By Answer Type": { "Text-in-Chart": 17.05, "Text-in-General": 26.26, "Number-in-Chart": 12.07, "Number-in-General": 8.73 }, "By Source": { "GPT-Sourced": 17.39, "GPT-Inspired": 13.43, "Completely Human": 14.67 }, "By Subject": { "Computer Science": 18.25, "Economics": 13.04, "Electrical Engineering and Systems Science": 16.81, "Mathematics": 14.07, "Physics": 18.9, "Quantitative Biology": 11.11, "Quantitative Finance": 12.07, "Statistics": 15.04 }, "By Year": { "2020": 17.0, "2021": 14.56, "2022": 12.3, "2023": 15.73 }, "By Subplot": { "1 Subplot": 17.62, "2-4 Subplots": 15.61, "5+ Subplots": 9.32 }, "N_valid": 1000, "N_invalid": 0, "Question Type": "Reasoning" }, "accuracy": 22.88, "acc_stderr": 0, "acc": 22.88 }, "MathVision": { "accuracy": 12.34, "acc_stderr": 0, "acc": 12.34 }, "CII-Bench": { "accuracy": 23.4, "domain_score": { "Life": 14.72, "Art": 27.94, "CTC": 28.89, "Society": 25.41, "Env.": 24.07, "Politics": 33.33 }, "emotion_score": { "Neutral": 27.82, "Negative": 21.89, "Positive": 20.09 }, "acc_stderr": 0, "acc": 23.4 }, "Blink": { "accuracy": 35.72, "Art Style": 43.59, "Counting": 25.83, "Forensic Detection": 31.06, "Functional Correspondence": 14.62, "IQ Test": 20.0, "Jigsaw": 46.0, "Multi-view Reasoning": 40.6, "Object Localization": 59.84, "Relative Depth": 54.84, "Relative Reflectance": 32.84, "Semantic Correspondence": 25.9, "Spatial Relation": 51.05, "Visual Correspondence": 18.6, "Visual Similarity": 42.96, "acc_stderr": 0, "acc": 35.72 } } }