Datasets:

License:
vlm_results / Gemma-3-27b-it /results_2025-01-25T10-42-53.190540.json
daiteng01's picture
Upload 52 files
7498b1c verified
{
"config_general": {
"model_name": "Gemma-3-27b-it",
"model_dtype": "float16",
"model_size": 0
},
"results": {
"CMMMU": {
"艺术与设计": {
"num": 88,
"correct": 53,
"accuracy": 60.23
},
"overall": {
"num": 900,
"correct": 374,
"accuracy": 41.56
},
"商业": {
"num": 126,
"correct": 32,
"accuracy": 25.4
},
"科学": {
"num": 204,
"correct": 81,
"accuracy": 39.71
},
"健康与医学": {
"num": 153,
"correct": 72,
"accuracy": 47.06
},
"人文社会科学": {
"num": 85,
"correct": 39,
"accuracy": 45.88
},
"技术与工程": {
"num": 244,
"correct": 97,
"accuracy": 39.75
},
"accuracy": 41.56,
"acc_stderr": 0,
"acc": 41.56
},
"MMMU": {
"accuracy": 53.89,
"subject_score": {
"Accounting": 40.0,
"Agriculture": 50.0,
"Architecture": 53.33,
"Art": 71.67,
"Basic": 63.33,
"Biology": 33.33,
"Chemistry": 30.0,
"Clinical": 60.0,
"Computer": 46.67,
"Design": 80.0,
"Diagnostics": 40.0,
"Economics": 70.0,
"Electronics": 30.0,
"Energy": 50.0,
"Finance": 33.33,
"Geography": 40.0,
"History": 76.67,
"Literature": 93.33,
"Manage": 50.0,
"Marketing": 53.33,
"Materials": 46.67,
"Math": 56.67,
"Mechanical": 43.33,
"Music": 33.33,
"Pharmacy": 66.67,
"Physics": 33.33,
"Psychology": 73.33,
"Public": 56.67,
"Sociology": 70.0
},
"difficulty_score": {
"Medium": 52.59,
"Easy": 65.08,
"Hard": 38.67
},
"acc_stderr": 0,
"acc": 53.89
},
"MMMU_Pro_standard": {
"accuracy": 36.07,
"subject_score": {
"History": 41.07,
"Design": 56.67,
"Literature": 63.46,
"Sociology": 50.0,
"Art": 60.38,
"Agriculture": 33.33,
"Pharmacy": 36.84,
"Clinical_Medicine": 32.2,
"Accounting": 25.86,
"Public_Health": 18.97,
"Physics": 31.67,
"Energy_and_Power": 29.31,
"Art_Theory": 67.27,
"Psychology": 30.0,
"Biology": 25.42,
"Economics": 33.9,
"Manage": 32.0,
"Finance": 33.33,
"Architecture_and_Engineering": 26.67,
"Diagnostics_and_Laboratory_Medicine": 21.67,
"Basic_Medical_Science": 40.38,
"Mechanical_Engineering": 38.98,
"Electronics": 45.0,
"Computer_Science": 30.0,
"Math": 41.67,
"Music": 26.67,
"Marketing": 35.59,
"Materials": 18.33,
"Chemistry": 33.33,
"Geography": 30.77
},
"difficulty_score": {
"Medium": 32.46,
"Hard": 29.68,
"Easy": 46.4
},
"acc_stderr": 0,
"acc": 36.07
},
"MMMU_Pro_vision": {
"accuracy": 27.98,
"subject_score": {
"Art": 28.3,
"Sociology": 40.74,
"Agriculture": 25.0,
"Design": 40.0,
"History": 32.14,
"Literature": 61.54,
"Finance": 36.67,
"Pharmacy": 24.56,
"Clinical_Medicine": 23.73,
"Accounting": 34.48,
"Physics": 28.33,
"Public_Health": 29.31,
"Energy_and_Power": 18.97,
"Art_Theory": 34.55,
"Psychology": 31.67,
"Architecture_and_Engineering": 15.0,
"Manage": 26.0,
"Biology": 27.12,
"Economics": 30.51,
"Diagnostics_and_Laboratory_Medicine": 16.67,
"Electronics": 16.67,
"Mechanical_Engineering": 22.03,
"Basic_Medical_Science": 28.85,
"Computer_Science": 30.0,
"Math": 25.0,
"Music": 33.33,
"Marketing": 28.81,
"Materials": 15.0,
"Chemistry": 20.0,
"Geography": 19.23
},
"acc_stderr": 0,
"acc": 27.98
},
"MmvetV2": {
"reject_info": {
"reject_rate": 0.19,
"reject_number": 1,
"total_question": 517
},
"accuracy": 66.2209,
"capability_scores": {
"math": 73.52941176470588,
"ocr": 71.00961538461542,
"spat": 61.167512690355316,
"rec": 63.406326034063355,
"know": 61.025641025641,
"gen": 67.67272727272737,
"seq": 61.999999999999986
},
"capability_detail_scores": {
"math_ocr": 68.18181818181817,
"math_spat_ocr": 90.0,
"math_spat_rec_ocr": 50.0,
"spat_rec": 55.714285714285715,
"spat_ocr": 63.46153846153846,
"spat_rec_ocr": 33.33333333333333,
"spat_know_ocr": 87.5,
"rec_ocr": 70.0,
"spat_rec_know": 45.0,
"ocr": 83.75,
"rec": 72.37288135593221,
"rec_know": 64.61538461538461,
"gen_rec_know": 63.49999999999998,
"gen_rec_know_ocr": 73.84615384615387,
"gen_spat_rec_ocr": 73.25581395348838,
"gen_spat_ocr": 85.00000000000001,
"math_gen_ocr_seq_spat": 100.0,
"math_ocr_seq_spat_rec": 0.0,
"gen_spat_rec": 51.36363636363635,
"math_spat_ocr_gen": 100.0,
"spat_rec_seq": 28.57142857142857,
"spat_rec_ocr_seq": 66.66666666666666,
"gen_spat_rec_know": 56.66666666666668,
"gen_rec": 73.5294117647059,
"spat_rec_know_ocr": 0.0,
"gen_know_ocr_spat_rec": 65.0,
"math_rec_ocr": 100.0,
"gen_rec_ocr": 72.0,
"gen_rec_ocr_seq": 77.14285714285715,
"gen_ocr": 73.84615384615384,
"gen_rec_seq": 62.85714285714287,
"rec_seq": 55.99999999999999,
"gen_spat_rec_seq": 85.00000000000001,
"rec_know_seq": 0.0,
"gen_rec_know_seq": 45.0,
"gen_ocr_seq_spat_rec": 53.333333333333336,
"gen_know_ocr_seq_rec": 90.0,
"math_rec_know": 0.0,
"rec_ocr_seq": 100.0
},
"acc_stderr": 0,
"acc": 66.2209
},
"MathVerse": {
"Text Lite": {
"accuracy": 34.9,
"correct": 275,
"total": 788
},
"Total": {
"accuracy": 32.49,
"correct": 1280,
"total": 3940
},
"Vision Dominant": {
"accuracy": 28.43,
"correct": 224,
"total": 788
},
"Vision Intensive": {
"accuracy": 32.36,
"correct": 255,
"total": 788
},
"Text Dominant": {
"accuracy": 43.15,
"correct": 340,
"total": 788
},
"Vision Only": {
"accuracy": 23.6,
"correct": 186,
"total": 788
},
"accuracy": 32.49,
"acc_stderr": 0,
"acc": 32.49
},
"Ocrlite": {
"final_score": [
1129,
1644
],
"accuracy": 68.674,
"Key Information Extraction-Bookshelf": [
28,
51,
0.549,
{
"Default": [
28,
51,
0.549
]
}
],
"Scene Text-centric VQA-diet_constraints": [
57,
90,
0.633,
{
"Default": [
57,
90,
0.633
]
}
],
"Doc-oriented VQA-Control": [
104,
189,
0.55,
{
"Default": [
104,
189,
0.55
]
}
],
"Doc-oriented VQA": [
139,
204,
0.681,
{
"Default": [
139,
204,
0.681
]
}
],
"Scene Text-centric VQA-Fake_logo": [
54,
119,
0.454,
{
"Default": [
54,
119,
0.454
]
}
],
"Handwritten Mathematical Expression Recognition": [
2,
100,
0.02,
{
"Default": [
2,
100,
0.02
]
}
],
"Key Information Extraction": [
178,
209,
0.852,
{
"Default": [
178,
209,
0.852
]
}
],
"Scene Text-centric VQA-Control": [
171,
200,
0.855,
{
"Default": [
171,
200,
0.855
]
}
],
"Scene Text-centric VQA": [
229,
282,
0.812,
{
"Default": [
229,
282,
0.812
]
}
],
"Artistic Text Recognition": [
40,
50,
0.8,
{
"Default": [
40,
50,
0.8
]
}
],
"Irregular Text Recognition": [
40,
50,
0.8,
{
"Default": [
40,
50,
0.8
]
}
],
"Non-Semantic Text Recognition": [
38,
50,
0.76,
{
"Default": [
38,
50,
0.76
]
}
],
"Regular Text Recognition": [
49,
50,
0.98,
{
"Default": [
49,
50,
0.98
]
}
],
"acc_stderr": 0,
"acc": 68.674
},
"OcrliteZh": {
"final_score": [
97,
234
],
"accuracy": 41.453,
"Docvqa": [
3,
10,
0.3,
{
"Default": [
3,
10,
0.3
]
}
],
"Chartqa-human": [
5,
10,
0.5,
{
"Default": [
5,
10,
0.5
]
}
],
"Chartqa-au": [
3,
10,
0.3,
{
"Default": [
3,
10,
0.3
]
}
],
"infographic": [
4,
10,
0.4,
{
"Default": [
4,
10,
0.4
]
}
],
"Key Information Extraction": [
29,
45,
0.644,
{
"Default": [
29,
45,
0.644
]
}
],
"Scene Text-centric VQA": [
19,
40,
0.475,
{
"Default": [
19,
40,
0.475
]
}
],
"Artistic Text Recognition": [
0,
11,
0.0,
{
"Default": [
0,
11,
0.0
]
}
],
"IrRegular Text Recognition": [
2,
11,
0.182,
{
"Default": [
2,
11,
0.182
]
}
],
"Non-semantic Text Recognition": [
4,
12,
0.333,
{
"Default": [
4,
12,
0.333
]
}
],
"Regular Text Recognition": [
6,
11,
0.545,
{
"Default": [
6,
11,
0.545
]
}
],
"Handwriting_CN": [
6,
20,
0.3,
{
"Default": [
6,
20,
0.3
]
}
],
"Chinese Unlimited": [
16,
44,
0.364,
{
"Default": [
16,
44,
0.364
]
}
],
"acc_stderr": 0,
"acc": 41.453
},
"CharXiv": {
"descriptive": {
"Overall Score": 64.35,
"By Question": {
"Q1": 57.79,
"Q2": 74.78,
"Q3": 63.95,
"Q4": 75.1,
"Q5": 69.46,
"Q6": 60.64,
"Q7": 47.86,
"Q8": 68.3,
"Q9": 65.17,
"Q10": 60.96,
"Q11": 57.71,
"Q12": 57.69,
"Q13": 64.84,
"Q14": 63.12,
"Q15": 67.09,
"Q16": 61.11,
"Q17": 39.73,
"Q18": 86.64,
"Q19": 86.15
},
"By Category": {
"Information Extraction": 64.29,
"Enumeration": 65.7,
"Pattern Recognition": 73.58,
"Counting": 63.61,
"Compositionality": 39.73
},
"By Subplot": {
"1 Subplot": 73.38,
"2-4 Subplots": 63.82,
"5+ Subplots": 50.42
},
"By Subject": {
"Computer Science": 65.48,
"Economics": 65.76,
"Electrical Engineering and Systems Science": 70.38,
"Mathematics": 65.93,
"Physics": 58.86,
"Quantitative Biology": 55.95,
"Quantitative Finance": 66.38,
"Statistics": 66.59
},
"By Year": {
"2020": 66.5,
"2021": 61.3,
"2022": 64.45,
"2023": 65.32
},
"N_valid": 4000,
"N_invalid": 0,
"Question Type": "Descriptive"
},
"reasoning": {
"Overall Score": 28.8,
"By Answer Type": {
"Text-in-Chart": 33.18,
"Text-in-General": 33.33,
"Number-in-Chart": 27.16,
"Number-in-General": 20.09
},
"By Source": {
"GPT-Sourced": 33.15,
"GPT-Inspired": 25.46,
"Completely Human": 28.67
},
"By Subject": {
"Computer Science": 30.16,
"Economics": 27.54,
"Electrical Engineering and Systems Science": 27.73,
"Mathematics": 34.07,
"Physics": 29.92,
"Quantitative Biology": 31.75,
"Quantitative Finance": 20.69,
"Statistics": 27.43
},
"By Year": {
"2020": 21.46,
"2021": 28.74,
"2022": 32.38,
"2023": 32.66
},
"By Subplot": {
"1 Subplot": 30.57,
"2-4 Subplots": 29.37,
"5+ Subplots": 25.0
},
"N_valid": 1000,
"N_invalid": 0,
"Question Type": "Reasoning"
},
"accuracy": 46.57,
"acc_stderr": 0,
"acc": 46.57
},
"MathVision": {
"accuracy": 33.22,
"acc_stderr": 0,
"acc": 33.22
},
"CII-Bench": {
"accuracy": 50.85,
"domain_score": {
"Art": 52.21,
"Env.": 55.56,
"Society": 50.27,
"CTC": 45.19,
"Life": 51.52,
"Politics": 62.5
},
"emotion_score": {
"Negative": 55.09,
"Positive": 47.44,
"Neutral": 49.62
},
"acc_stderr": 0,
"acc": 50.85
},
"Blink": {
"accuracy": 54.29,
"Art Style": 70.94,
"Counting": 55.83,
"Forensic Detection": 37.12,
"Functional Correspondence": 30.0,
"IQ Test": 32.67,
"Jigsaw": 75.33,
"Multi-view Reasoning": 51.88,
"Object Localization": 54.1,
"Relative Depth": 66.94,
"Relative Reflectance": 34.33,
"Semantic Correspondence": 35.25,
"Spatial Relation": 75.52,
"Visual Correspondence": 57.56,
"Visual Similarity": 82.96,
"acc_stderr": 0,
"acc": 54.29
}
}
}