Commit
·
d100b92
1
Parent(s):
a878d0b
added res
Browse files- LsTam/stellialm_mini_qwen_9tasks/results_2024-12-29T10-18-29.558030.json +0 -0
- LsTam/stellialm_mini_qwen_9tasks/results_2024-12-29T10-18-29.558030_norm.json +26 -0
- LsTam/stellialm_smallfr_qwen7b_9tplus/results_2024-12-28T23-03-48.611775.json +0 -0
- LsTam/stellialm_smallfr_qwen7b_9tplus/results_2024-12-28T23-03-48.611775_norm.json +26 -0
- LsTam/stellialm_smallfr_qwen7b_lead/results_2024-12-28T22-26-57.398837.json +0 -0
- LsTam/stellialm_smallfr_qwen7b_lead/results_2024-12-28T22-26-57.398837_norm.json +26 -0
- utter-project/EuroLLM-9B/results_2024-12-28T21-47-18.075168.json +0 -0
- utter-project/EuroLLM-9B/results_2024-12-28T21-47-18.075168_norm.json +26 -0
LsTam/stellialm_mini_qwen_9tasks/results_2024-12-29T10-18-29.558030.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
LsTam/stellialm_mini_qwen_9tasks/results_2024-12-29T10-18-29.558030_norm.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"config": {
|
| 3 |
+
"model_name": "LsTam/stellialm_mini_qwen_9tasks",
|
| 4 |
+
"model_dtype": "torch.float16"
|
| 5 |
+
},
|
| 6 |
+
"results": {
|
| 7 |
+
"BBH-fr": {
|
| 8 |
+
"metric_name": 0.2004
|
| 9 |
+
},
|
| 10 |
+
"GPQA-fr": {
|
| 11 |
+
"metric_name": 0.0919
|
| 12 |
+
},
|
| 13 |
+
"IFEval-fr": {
|
| 14 |
+
"metric_name": 0.1406
|
| 15 |
+
},
|
| 16 |
+
"MUSR-fr": {
|
| 17 |
+
"metric_name": 0.025699999999999997
|
| 18 |
+
},
|
| 19 |
+
"MATH Lvl5-fr": {
|
| 20 |
+
"metric_name": 0.1475
|
| 21 |
+
},
|
| 22 |
+
"MMMLU-fr": {
|
| 23 |
+
"metric_name": 0.46090000000000003
|
| 24 |
+
}
|
| 25 |
+
}
|
| 26 |
+
}
|
LsTam/stellialm_smallfr_qwen7b_9tplus/results_2024-12-28T23-03-48.611775.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
LsTam/stellialm_smallfr_qwen7b_9tplus/results_2024-12-28T23-03-48.611775_norm.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"config": {
|
| 3 |
+
"model_name": "LsTam/stellialm_smallfr_qwen7b_9tplus",
|
| 4 |
+
"model_dtype": "torch.float16"
|
| 5 |
+
},
|
| 6 |
+
"results": {
|
| 7 |
+
"BBH-fr": {
|
| 8 |
+
"metric_name": 0.24780000000000002
|
| 9 |
+
},
|
| 10 |
+
"GPQA-fr": {
|
| 11 |
+
"metric_name": 0.1449
|
| 12 |
+
},
|
| 13 |
+
"IFEval-fr": {
|
| 14 |
+
"metric_name": 0.1242
|
| 15 |
+
},
|
| 16 |
+
"MUSR-fr": {
|
| 17 |
+
"metric_name": 0.0436
|
| 18 |
+
},
|
| 19 |
+
"MATH Lvl5-fr": {
|
| 20 |
+
"metric_name": 0.21530000000000002
|
| 21 |
+
},
|
| 22 |
+
"MMMLU-fr": {
|
| 23 |
+
"metric_name": 0.5478999999999999
|
| 24 |
+
}
|
| 25 |
+
}
|
| 26 |
+
}
|
LsTam/stellialm_smallfr_qwen7b_lead/results_2024-12-28T22-26-57.398837.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
LsTam/stellialm_smallfr_qwen7b_lead/results_2024-12-28T22-26-57.398837_norm.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"config": {
|
| 3 |
+
"model_name": "LsTam/stellialm_smallfr_qwen7b_lead",
|
| 4 |
+
"model_dtype": "torch.float16"
|
| 5 |
+
},
|
| 6 |
+
"results": {
|
| 7 |
+
"BBH-fr": {
|
| 8 |
+
"metric_name": 0.2422
|
| 9 |
+
},
|
| 10 |
+
"GPQA-fr": {
|
| 11 |
+
"metric_name": 0.12960000000000002
|
| 12 |
+
},
|
| 13 |
+
"IFEval-fr": {
|
| 14 |
+
"metric_name": 0.1195
|
| 15 |
+
},
|
| 16 |
+
"MUSR-fr": {
|
| 17 |
+
"metric_name": 0.038
|
| 18 |
+
},
|
| 19 |
+
"MATH Lvl5-fr": {
|
| 20 |
+
"metric_name": 0.22519999999999998
|
| 21 |
+
},
|
| 22 |
+
"MMMLU-fr": {
|
| 23 |
+
"metric_name": 0.5555
|
| 24 |
+
}
|
| 25 |
+
}
|
| 26 |
+
}
|
utter-project/EuroLLM-9B/results_2024-12-28T21-47-18.075168.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
utter-project/EuroLLM-9B/results_2024-12-28T21-47-18.075168_norm.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"config": {
|
| 3 |
+
"model_name": "utter-project/EuroLLM-9B",
|
| 4 |
+
"model_dtype": "torch.float16"
|
| 5 |
+
},
|
| 6 |
+
"results": {
|
| 7 |
+
"BBH-fr": {
|
| 8 |
+
"metric_name": 0.18789999999999998
|
| 9 |
+
},
|
| 10 |
+
"GPQA-fr": {
|
| 11 |
+
"metric_name": 0.045899999999999996
|
| 12 |
+
},
|
| 13 |
+
"IFEval-fr": {
|
| 14 |
+
"metric_name": 0.10640000000000001
|
| 15 |
+
},
|
| 16 |
+
"MUSR-fr": {
|
| 17 |
+
"metric_name": 0.0452
|
| 18 |
+
},
|
| 19 |
+
"MATH Lvl5-fr": {
|
| 20 |
+
"metric_name": 0.0478
|
| 21 |
+
},
|
| 22 |
+
"MMMLU-fr": {
|
| 23 |
+
"metric_name": 0.4451
|
| 24 |
+
}
|
| 25 |
+
}
|
| 26 |
+
}
|