{ "config_general": { "model_name": "Pixtral-Large-Instruct-2411", "model_dtype": "float16", "model_size": 0 }, "results": { "CMMMU": { "艺术与设计": { "num": 88, "correct": 57, "accuracy": 64.77 }, "overall": { "num": 900, "correct": 386, "accuracy": 42.89 }, "商业": { "num": 126, "correct": 34, "accuracy": 26.98 }, "科学": { "num": 204, "correct": 75, "accuracy": 36.76 }, "健康与医学": { "num": 153, "correct": 73, "accuracy": 47.71 }, "人文社会科学": { "num": 85, "correct": 40, "accuracy": 47.06 }, "技术与工程": { "num": 244, "correct": 107, "accuracy": 43.85 }, "accuracy": 42.89, "acc_stderr": 0, "acc": 42.89 }, "MMMU": { "accuracy": 54.22, "subject_score": { "Accounting": 30.0, "Agriculture": 60.0, "Architecture": 40.0, "Art": 81.67, "Basic": 70.0, "Biology": 43.33, "Chemistry": 30.0, "Clinical": 66.67, "Computer": 50.0, "Design": 76.67, "Diagnostics": 46.67, "Economics": 56.67, "Electronics": 40.0, "Energy": 43.33, "Finance": 36.67, "Geography": 60.0, "History": 66.67, "Literature": 83.33, "Manage": 63.33, "Marketing": 50.0, "Materials": 43.33, "Math": 36.67, "Mechanical": 30.0, "Music": 36.67, "Pharmacy": 50.0, "Physics": 50.0, "Psychology": 66.67, "Public": 70.0, "Sociology": 66.67 }, "difficulty_score": { "Medium": 53.77, "Easy": 64.41, "Hard": 38.67 }, "acc_stderr": 0, "acc": 54.22 }, "MMMU_Pro_standard": { "accuracy": 37.28, "subject_score": { "Literature": 57.69, "Design": 61.67, "History": 50.0, "Sociology": 50.0, "Agriculture": 35.0, "Art": 58.49, "Energy_and_Power": 22.41, "Pharmacy": 38.6, "Architecture_and_Engineering": 28.33, "Clinical_Medicine": 44.07, "Accounting": 31.03, "Physics": 30.0, "Electronics": 35.0, "Public_Health": 43.1, "Art_Theory": 65.45, "Manage": 38.0, "Economics": 37.29, "Diagnostics_and_Laboratory_Medicine": 33.33, "Mechanical_Engineering": 25.42, "Basic_Medical_Science": 40.38, "Finance": 25.0, "Computer_Science": 38.33, "Math": 28.33, "Psychology": 38.33, "Biology": 32.2, "Materials": 20.0, "Music": 25.0, "Marketing": 32.2, "Geography": 38.46, "Chemistry": 25.0 }, "difficulty_score": { "Easy": 47.73, "Hard": 26.93, "Medium": 35.58 }, "acc_stderr": 0, "acc": 37.28 }, "MMMU_Pro_vision": { "accuracy": 34.28, "subject_score": { "Art": 52.83, "Agriculture": 26.67, "Literature": 69.23, "Sociology": 38.89, "History": 39.29, "Clinical_Medicine": 28.81, "Pharmacy": 36.84, "Design": 33.33, "Public_Health": 41.38, "Art_Theory": 58.18, "Accounting": 43.1, "Energy_and_Power": 25.86, "Architecture_and_Engineering": 21.67, "Physics": 25.0, "Psychology": 33.33, "Manage": 34.0, "Biology": 37.29, "Diagnostics_and_Laboratory_Medicine": 20.0, "Mechanical_Engineering": 23.73, "Finance": 43.33, "Economics": 42.37, "Basic_Medical_Science": 32.69, "Electronics": 28.33, "Computer_Science": 25.0, "Math": 23.33, "Music": 23.33, "Marketing": 50.85, "Materials": 26.67, "Chemistry": 30.0, "Geography": 21.15 }, "acc_stderr": 0, "acc": 34.28 }, "MmvetV2": { "accuracy": 66.1315, "capability_scores": { "math": 72.64705882352939, "ocr": 72.74038461538464, "spat": 65.38071065989845, "rec": 62.45145631067969, "know": 62.564102564102555, "gen": 64.10909090909097, "seq": 60.17857142857144 }, "capability_detail_scores": { "math_ocr": 71.81818181818183, "math_spat_ocr": 86.0, "math_spat_ocr_rec": 45.0, "spat_rec": 64.64285714285715, "spat_ocr": 86.15384615384616, "spat_ocr_rec": 33.33333333333333, "spat_know_ocr": 100.0, "ocr_rec": 87.5, "spat_know_rec": 45.0, "ocr": 80.3125, "rec": 64.23728813559322, "know_rec": 66.15384615384615, "know_gen_rec": 59.99999999999998, "know_gen_ocr_rec": 62.30769230769231, "spat_gen_ocr_rec": 71.6279069767442, "spat_gen_ocr": 85.00000000000001, "math_gen_ocr_seq_spat": 60.0, "math_ocr_rec_seq_spat": 0.0, "spat_gen_rec": 51.36363636363637, "math_spat_gen_ocr": 40.0, "seq_spat_rec": 50.0, "seq_spat_ocr_rec": 10.0, "spat_know_gen_rec": 63.33333333333333, "gen_rec": 67.94117647058823, "spat_know_ocr_rec": 85.0, "know_gen_ocr_rec_spat": 90.0, "math_ocr_rec": 100.0, "gen_ocr_rec": 86.00000000000001, "seq_gen_ocr_rec": 71.42857142857143, "gen_ocr": 69.23076923076923, "seq_gen_rec": 62.14285714285713, "seq_rec": 83.33333333333334, "seq_spat_gen_rec": 58.75, "seq_know_rec": 100.0, "seq_know_gen_rec": 65.0, "gen_ocr_rec_seq_spat": 53.333333333333336, "know_gen_ocr_rec_seq": 100.0, "math_know_rec": 50.0, "seq_ocr_rec": 0.0 }, "acc_stderr": 0, "acc": 66.1315 }, "MathVerse": { "Vision Intensive": { "accuracy": 27.41, "correct": 216, "total": 788 }, "Total": { "accuracy": 29.06, "correct": 1145, "total": 3940 }, "Vision Only": { "accuracy": 26.9, "correct": 212, "total": 788 }, "Vision Dominant": { "accuracy": 26.02, "correct": 205, "total": 788 }, "Text Lite": { "accuracy": 28.55, "correct": 225, "total": 788 }, "Text Dominant": { "accuracy": 36.42, "correct": 287, "total": 788 }, "accuracy": 29.06, "acc_stderr": 0, "acc": 29.06 }, "Ocrlite": { "final_score": [ 1212, 1645 ], "accuracy": 73.678, "Key Information Extraction-Bookshelf": [ 30, 52 ], "Scene Text-centric VQA-diet_constraints": [ 69, 90 ], "Doc-oriented VQA-Control": [ 137, 189 ], "Doc-oriented VQA": [ 170, 204 ], "Scene Text-centric VQA-Fake_logo": [ 55, 119 ], "Handwritten Mathematical Expression Recognition": [ 26, 100 ], "Key Information Extraction": [ 179, 209 ], "Scene Text-centric VQA-Control": [ 160, 200 ], "Scene Text-centric VQA": [ 224, 282 ], "Artistic Text Recognition": [ 42, 50 ], "Irregular Text Recognition": [ 47, 50 ], "Non-Semantic Text Recognition": [ 24, 50 ], "Regular Text Recognition": [ 49, 50 ], "acc_stderr": 0, "acc": 73.678 }, "OcrliteZh": { "final_score": [ 71, 234 ], "accuracy": 30.342, "Docvqa": [ 4, 10 ], "Chartqa-human": [ 3, 10 ], "Chartqa-au": [ 2, 10 ], "infographic": [ 2, 10 ], "Key Information Extraction": [ 24, 45 ], "Scene Text-centric VQA": [ 20, 40 ], "Artistic Text Recognition": [ 1, 11 ], "IrRegular Text Recognition": [ 0, 11 ], "Non-semantic Text Recognition": [ 0, 12 ], "Regular Text Recognition": [ 0, 11 ], "Handwriting_CN": [ 0, 20 ], "Chinese Unlimited": [ 15, 44 ], "acc_stderr": 0, "acc": 30.342 }, "CharXiv": { "descriptive": { "Overall Score": 86.28, "By Question": { "Q1": 84.84, "Q2": 85.22, "Q3": 75.97, "Q4": 87.55, "Q5": 89.12, "Q6": 83.13, "Q7": 86.32, "Q8": 90.62, "Q9": 87.56, "Q10": 89.73, "Q11": 80.57, "Q12": 84.07, "Q13": 82.19, "Q14": 92.2, "Q15": 96.81, "Q16": 80.56, "Q17": 75.0, "Q18": 90.28, "Q19": 87.69 }, "By Category": { "Information Extraction": 84.64, "Enumeration": 90.56, "Pattern Recognition": 85.81, "Counting": 86.77, "Compositionality": 75.0 }, "By Subplot": { "1 Subplot": 90.09, "2-4 Subplots": 85.85, "5+ Subplots": 80.72 }, "By Subject": { "Computer Science": 86.11, "Economics": 89.86, "Electrical Engineering and Systems Science": 89.29, "Mathematics": 84.63, "Physics": 83.07, "Quantitative Biology": 82.54, "Quantitative Finance": 85.78, "Statistics": 89.16 }, "By Year": { "2020": 85.63, "2021": 86.21, "2022": 87.6, "2023": 85.69 }, "N_valid": 4000, "N_invalid": 0, "Question Type": "Descriptive" }, "reasoning": { "Overall Score": 49.2, "By Answer Type": { "Text-in-Chart": 55.23, "Text-in-General": 51.52, "Number-in-Chart": 47.84, "Number-in-General": 37.99 }, "By Source": { "GPT-Sourced": 53.26, "GPT-Inspired": 48.15, "Completely Human": 48.33 }, "By Subject": { "Computer Science": 47.62, "Economics": 53.62, "Electrical Engineering and Systems Science": 44.54, "Mathematics": 52.59, "Physics": 53.54, "Quantitative Biology": 46.83, "Quantitative Finance": 44.83, "Statistics": 48.67 }, "By Year": { "2020": 44.53, "2021": 53.26, "2022": 45.9, "2023": 52.82 }, "By Subplot": { "1 Subplot": 47.93, "2-4 Subplots": 50.53, "5+ Subplots": 49.15 }, "N_valid": 1000, "N_invalid": 0, "Question Type": "Reasoning" }, "accuracy": 67.74, "acc_stderr": 0, "acc": 67.74 }, "MathVision": { "accuracy": 29.74, "acc_stderr": 0, "acc": 29.74 }, "CII-Bench": { "accuracy": 60.65, "domain_score": { "Art": 63.97, "CTC": 56.3, "Society": 60.54, "Env.": 74.07, "Life": 57.14, "Politics": 70.83 }, "emotion_score": { "Negative": 64.53, "Positive": 56.41, "Neutral": 60.53 }, "acc_stderr": 0, "acc": 60.65 }, "Blink": { "accuracy": 59.23, "Art Style": 78.63, "Counting": 63.33, "Forensic Detection": 47.73, "Functional Correspondence": 40.0, "IQ Test": 30.67, "Jigsaw": 65.33, "Multi-view Reasoning": 53.38, "Object Localization": 53.28, "Relative Depth": 75.0, "Relative Reflectance": 34.33, "Semantic Correspondence": 51.08, "Spatial Relation": 83.22, "Visual Correspondence": 74.42, "Visual Similarity": 78.52, "acc_stderr": 0, "acc": 59.23 } } }