lvkaokao
commited on
Commit
·
cd88ae0
1
Parent(s):
1374c54
init evaluation results.
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- ISTA-DASLab/results_2024-05-14-21-00-56.json +651 -0
- ISTA-DASLab/results_2024-05-15-05-17-12.json +651 -0
- ISTA-DASLab/results_2024-05-19-14-09-41.json +586 -0
- ISTA-DASLab/results_2024-05-20-03-47-37.json +586 -0
- ISTA-DASLab/results_2024-05-20-10-51-30.json +592 -0
- Intel/results_2024-04-30-16-11-38.json +596 -0
- Intel/results_2024-04-30-18-06-33.json +596 -0
- Intel/results_2024-04-30-18-42-01.json +596 -0
- Intel/results_2024-04-30-20-44-29.json +596 -0
- Intel/results_2024-04-30-21-43-07.json +596 -0
- Intel/results_2024-04-30-23-05-41.json +580 -0
- Intel/results_2024-05-01-00-22-37.json +596 -0
- Intel/results_2024-05-01-00-56-14.json +596 -0
- Intel/results_2024-05-01-02-20-44.json +596 -0
- Intel/results_2024-05-01-04-10-41.json +596 -0
- Intel/results_2024-05-01-05-22-27.json +596 -0
- Intel/results_2024-05-06-18-48-05.json +596 -0
- Intel/results_2024-05-06-20-48-10.json +596 -0
- Intel/results_2024-05-09-05-59-04.json +596 -0
- Intel/results_2024-05-10-10-37-29.json +596 -0
- Intel/results_2024-05-18-13-12-26.json +595 -0
- Intel/results_2024-05-18-15-50-33.json +595 -0
- Nan-Do/results_2024-05-21-12-09-53.json +579 -0
- PrunaAI/results_2024-05-13-21-54-31.json +579 -0
- Qwen/results_2024-04-26-20-25-12.json +599 -0
- Qwen/results_2024-04-26-21-31-31.json +583 -0
- Qwen/results_2024-04-28-05-05-52.json +583 -0
- Qwen/results_2024-04-28-13-06-41.json +599 -0
- Qwen/results_2024-05-05-22-20-44.json +579 -0
- Qwen/results_2024-05-07-09-41-53.json +579 -0
- SanctumAI/results_2024-05-03-22-24-42.json +579 -0
- TechxGenus/results_2024-05-01-22-34-56.json +583 -0
- TechxGenus/results_2024-05-02-00-07-14.json +588 -0
- TheBloke/results_2024-04-27-02-47-01.json +586 -0
- TheBloke/results_2024-04-27-08-48-07.json +582 -0
- TheBloke/results_2024-04-27-23-05-56.json +579 -0
- TheBloke/results_2024-04-29-01-54-05.json +583 -0
- TheBloke/results_2024-04-29-22-05-21.json +579 -0
- TheBloke/results_2024-05-02-16-52-29.json +582 -0
- TheBloke/results_2024-05-03-08-18-06.json +579 -0
- TheBloke/results_2024-05-07-15-11-38.json +586 -0
- TheBloke/results_2024-05-07-21-25-07.json +579 -0
- TheBloke/results_2024-05-11-15-26-38.json +582 -0
- TheBloke/results_2024-05-11-21-17-09.json +586 -0
- TheBloke/results_2024-05-12-19-26-44.json +586 -0
- alokabhishek/results_2024-05-08-02-05-18.json +589 -0
- astronomer/results_2024-05-13-17-16-12.json +588 -0
- baichuan-inc/results_2024-05-13-19-42-01.json +586 -0
- baichuan-inc/results_2024-05-14-00-28-25.json +586 -0
- casperhansen/results_2024-05-08-20-07-49.json +582 -0
ISTA-DASLab/results_2024-05-14-21-00-56.json
ADDED
@@ -0,0 +1,651 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-14-21-00-56",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "ISTA-DASLab/Mistral-7B-Instruct-v0.2-AQLM-2Bit-2x8",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "2bit",
|
15 |
+
"model_size": 2.27,
|
16 |
+
"model_params": 7,
|
17 |
+
"quant_type": "AQLM",
|
18 |
+
"precision": "2bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.6408839779005525,
|
23 |
+
"acc_stderr,none": 0.013483115202120241,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|hellaswag|0": {
|
27 |
+
"acc,none": 0.5616411073491336,
|
28 |
+
"acc_stderr,none": 0.004951717622007965,
|
29 |
+
"acc_norm,none": 0.7385978888667596,
|
30 |
+
"acc_norm_stderr,none": 0.00438500499892336,
|
31 |
+
"alias": "hellaswag"
|
32 |
+
},
|
33 |
+
"harness|openbookqa|0": {
|
34 |
+
"acc,none": 0.264,
|
35 |
+
"acc_stderr,none": 0.019732885585922098,
|
36 |
+
"acc_norm,none": 0.376,
|
37 |
+
"acc_norm_stderr,none": 0.021683827539286115,
|
38 |
+
"alias": "openbookqa"
|
39 |
+
},
|
40 |
+
"harness|truthfulqa:mc1|0": {
|
41 |
+
"acc,none": 0.4638922888616891,
|
42 |
+
"acc_stderr,none": 0.017457800422268625,
|
43 |
+
"alias": "truthfulqa_mc1"
|
44 |
+
},
|
45 |
+
"harness|truthfulqa:mc2|0": {
|
46 |
+
"acc,none": 0.6316639380666816,
|
47 |
+
"acc_stderr,none": 0.015414325790023395,
|
48 |
+
"alias": "truthfulqa_mc2"
|
49 |
+
},
|
50 |
+
"harness|mmlu|0": {
|
51 |
+
"acc,none": 0.4016521862982481,
|
52 |
+
"acc_stderr,none": 0.004063454732088467,
|
53 |
+
"alias": "mmlu"
|
54 |
+
},
|
55 |
+
"harness|mmlu_humanities|0": {
|
56 |
+
"alias": " - humanities",
|
57 |
+
"acc,none": 0.3742826780021254,
|
58 |
+
"acc_stderr,none": 0.006918946610656743
|
59 |
+
},
|
60 |
+
"harness|mmlu_formal_logic|0": {
|
61 |
+
"alias": " - formal_logic",
|
62 |
+
"acc,none": 0.23809523809523808,
|
63 |
+
"acc_stderr,none": 0.03809523809523809
|
64 |
+
},
|
65 |
+
"harness|mmlu_high_school_european_history|0": {
|
66 |
+
"alias": " - high_school_european_history",
|
67 |
+
"acc,none": 0.503030303030303,
|
68 |
+
"acc_stderr,none": 0.03904272341431857
|
69 |
+
},
|
70 |
+
"harness|mmlu_high_school_us_history|0": {
|
71 |
+
"alias": " - high_school_us_history",
|
72 |
+
"acc,none": 0.49019607843137253,
|
73 |
+
"acc_stderr,none": 0.03508637358630572
|
74 |
+
},
|
75 |
+
"harness|mmlu_high_school_world_history|0": {
|
76 |
+
"alias": " - high_school_world_history",
|
77 |
+
"acc,none": 0.5485232067510548,
|
78 |
+
"acc_stderr,none": 0.0323936001739747
|
79 |
+
},
|
80 |
+
"harness|mmlu_international_law|0": {
|
81 |
+
"alias": " - international_law",
|
82 |
+
"acc,none": 0.5950413223140496,
|
83 |
+
"acc_stderr,none": 0.04481137755942469
|
84 |
+
},
|
85 |
+
"harness|mmlu_jurisprudence|0": {
|
86 |
+
"alias": " - jurisprudence",
|
87 |
+
"acc,none": 0.4444444444444444,
|
88 |
+
"acc_stderr,none": 0.04803752235190193
|
89 |
+
},
|
90 |
+
"harness|mmlu_logical_fallacies|0": {
|
91 |
+
"alias": " - logical_fallacies",
|
92 |
+
"acc,none": 0.44785276073619634,
|
93 |
+
"acc_stderr,none": 0.03906947479456602
|
94 |
+
},
|
95 |
+
"harness|mmlu_moral_disputes|0": {
|
96 |
+
"alias": " - moral_disputes",
|
97 |
+
"acc,none": 0.4479768786127168,
|
98 |
+
"acc_stderr,none": 0.02677299065336182
|
99 |
+
},
|
100 |
+
"harness|mmlu_moral_scenarios|0": {
|
101 |
+
"alias": " - moral_scenarios",
|
102 |
+
"acc,none": 0.23798882681564246,
|
103 |
+
"acc_stderr,none": 0.014242630070574884
|
104 |
+
},
|
105 |
+
"harness|mmlu_philosophy|0": {
|
106 |
+
"alias": " - philosophy",
|
107 |
+
"acc,none": 0.42765273311897106,
|
108 |
+
"acc_stderr,none": 0.028099240775809563
|
109 |
+
},
|
110 |
+
"harness|mmlu_prehistory|0": {
|
111 |
+
"alias": " - prehistory",
|
112 |
+
"acc,none": 0.3888888888888889,
|
113 |
+
"acc_stderr,none": 0.027125115513166854
|
114 |
+
},
|
115 |
+
"harness|mmlu_professional_law|0": {
|
116 |
+
"alias": " - professional_law",
|
117 |
+
"acc,none": 0.33833116036505867,
|
118 |
+
"acc_stderr,none": 0.0120842656263442
|
119 |
+
},
|
120 |
+
"harness|mmlu_world_religions|0": {
|
121 |
+
"alias": " - world_religions",
|
122 |
+
"acc,none": 0.4619883040935672,
|
123 |
+
"acc_stderr,none": 0.03823727092882307
|
124 |
+
},
|
125 |
+
"harness|mmlu_other|0": {
|
126 |
+
"alias": " - other",
|
127 |
+
"acc,none": 0.4280656581911812,
|
128 |
+
"acc_stderr,none": 0.00874063365417062
|
129 |
+
},
|
130 |
+
"harness|mmlu_business_ethics|0": {
|
131 |
+
"alias": " - business_ethics",
|
132 |
+
"acc,none": 0.35,
|
133 |
+
"acc_stderr,none": 0.0479372485441102
|
134 |
+
},
|
135 |
+
"harness|mmlu_clinical_knowledge|0": {
|
136 |
+
"alias": " - clinical_knowledge",
|
137 |
+
"acc,none": 0.4075471698113208,
|
138 |
+
"acc_stderr,none": 0.030242233800854498
|
139 |
+
},
|
140 |
+
"harness|mmlu_college_medicine|0": {
|
141 |
+
"alias": " - college_medicine",
|
142 |
+
"acc,none": 0.35260115606936415,
|
143 |
+
"acc_stderr,none": 0.03643037168958548
|
144 |
+
},
|
145 |
+
"harness|mmlu_global_facts|0": {
|
146 |
+
"alias": " - global_facts",
|
147 |
+
"acc,none": 0.27,
|
148 |
+
"acc_stderr,none": 0.0446196043338474
|
149 |
+
},
|
150 |
+
"harness|mmlu_human_aging|0": {
|
151 |
+
"alias": " - human_aging",
|
152 |
+
"acc,none": 0.42152466367713004,
|
153 |
+
"acc_stderr,none": 0.033141902221106564
|
154 |
+
},
|
155 |
+
"harness|mmlu_management|0": {
|
156 |
+
"alias": " - management",
|
157 |
+
"acc,none": 0.4854368932038835,
|
158 |
+
"acc_stderr,none": 0.049486373240266356
|
159 |
+
},
|
160 |
+
"harness|mmlu_marketing|0": {
|
161 |
+
"alias": " - marketing",
|
162 |
+
"acc,none": 0.6239316239316239,
|
163 |
+
"acc_stderr,none": 0.031733936329694824
|
164 |
+
},
|
165 |
+
"harness|mmlu_medical_genetics|0": {
|
166 |
+
"alias": " - medical_genetics",
|
167 |
+
"acc,none": 0.36,
|
168 |
+
"acc_stderr,none": 0.048241815132442176
|
169 |
+
},
|
170 |
+
"harness|mmlu_miscellaneous|0": {
|
171 |
+
"alias": " - miscellaneous",
|
172 |
+
"acc,none": 0.51213282247765,
|
173 |
+
"acc_stderr,none": 0.017874698667491345
|
174 |
+
},
|
175 |
+
"harness|mmlu_nutrition|0": {
|
176 |
+
"alias": " - nutrition",
|
177 |
+
"acc,none": 0.434640522875817,
|
178 |
+
"acc_stderr,none": 0.028384256704883044
|
179 |
+
},
|
180 |
+
"harness|mmlu_professional_accounting|0": {
|
181 |
+
"alias": " - professional_accounting",
|
182 |
+
"acc,none": 0.32978723404255317,
|
183 |
+
"acc_stderr,none": 0.0280459469420424
|
184 |
+
},
|
185 |
+
"harness|mmlu_professional_medicine|0": {
|
186 |
+
"alias": " - professional_medicine",
|
187 |
+
"acc,none": 0.34191176470588236,
|
188 |
+
"acc_stderr,none": 0.02881472242225418
|
189 |
+
},
|
190 |
+
"harness|mmlu_virology|0": {
|
191 |
+
"alias": " - virology",
|
192 |
+
"acc,none": 0.3192771084337349,
|
193 |
+
"acc_stderr,none": 0.036293353299478595
|
194 |
+
},
|
195 |
+
"harness|mmlu_social_sciences|0": {
|
196 |
+
"alias": " - social_sciences",
|
197 |
+
"acc,none": 0.46246343841403964,
|
198 |
+
"acc_stderr,none": 0.008844738621946012
|
199 |
+
},
|
200 |
+
"harness|mmlu_econometrics|0": {
|
201 |
+
"alias": " - econometrics",
|
202 |
+
"acc,none": 0.2719298245614035,
|
203 |
+
"acc_stderr,none": 0.04185774424022056
|
204 |
+
},
|
205 |
+
"harness|mmlu_high_school_geography|0": {
|
206 |
+
"alias": " - high_school_geography",
|
207 |
+
"acc,none": 0.5202020202020202,
|
208 |
+
"acc_stderr,none": 0.03559443565563919
|
209 |
+
},
|
210 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
211 |
+
"alias": " - high_school_government_and_politics",
|
212 |
+
"acc,none": 0.5544041450777202,
|
213 |
+
"acc_stderr,none": 0.03587014986075659
|
214 |
+
},
|
215 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
216 |
+
"alias": " - high_school_macroeconomics",
|
217 |
+
"acc,none": 0.36923076923076925,
|
218 |
+
"acc_stderr,none": 0.024468615241478916
|
219 |
+
},
|
220 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
221 |
+
"alias": " - high_school_microeconomics",
|
222 |
+
"acc,none": 0.41596638655462187,
|
223 |
+
"acc_stderr,none": 0.03201650100739615
|
224 |
+
},
|
225 |
+
"harness|mmlu_high_school_psychology|0": {
|
226 |
+
"alias": " - high_school_psychology",
|
227 |
+
"acc,none": 0.44954128440366975,
|
228 |
+
"acc_stderr,none": 0.02132788141782337
|
229 |
+
},
|
230 |
+
"harness|mmlu_human_sexuality|0": {
|
231 |
+
"alias": " - human_sexuality",
|
232 |
+
"acc,none": 0.3969465648854962,
|
233 |
+
"acc_stderr,none": 0.04291135671009225
|
234 |
+
},
|
235 |
+
"harness|mmlu_professional_psychology|0": {
|
236 |
+
"alias": " - professional_psychology",
|
237 |
+
"acc,none": 0.4035947712418301,
|
238 |
+
"acc_stderr,none": 0.019848280168401164
|
239 |
+
},
|
240 |
+
"harness|mmlu_public_relations|0": {
|
241 |
+
"alias": " - public_relations",
|
242 |
+
"acc,none": 0.5818181818181818,
|
243 |
+
"acc_stderr,none": 0.047245774057315705
|
244 |
+
},
|
245 |
+
"harness|mmlu_security_studies|0": {
|
246 |
+
"alias": " - security_studies",
|
247 |
+
"acc,none": 0.5591836734693878,
|
248 |
+
"acc_stderr,none": 0.03178419114175363
|
249 |
+
},
|
250 |
+
"harness|mmlu_sociology|0": {
|
251 |
+
"alias": " - sociology",
|
252 |
+
"acc,none": 0.6268656716417911,
|
253 |
+
"acc_stderr,none": 0.03419832608176008
|
254 |
+
},
|
255 |
+
"harness|mmlu_us_foreign_policy|0": {
|
256 |
+
"alias": " - us_foreign_policy",
|
257 |
+
"acc,none": 0.68,
|
258 |
+
"acc_stderr,none": 0.046882617226215034
|
259 |
+
},
|
260 |
+
"harness|mmlu_stem|0": {
|
261 |
+
"alias": " - stem",
|
262 |
+
"acc,none": 0.35712020298128766,
|
263 |
+
"acc_stderr,none": 0.008497330653183912
|
264 |
+
},
|
265 |
+
"harness|mmlu_abstract_algebra|0": {
|
266 |
+
"alias": " - abstract_algebra",
|
267 |
+
"acc,none": 0.35,
|
268 |
+
"acc_stderr,none": 0.0479372485441102
|
269 |
+
},
|
270 |
+
"harness|mmlu_anatomy|0": {
|
271 |
+
"alias": " - anatomy",
|
272 |
+
"acc,none": 0.45185185185185184,
|
273 |
+
"acc_stderr,none": 0.04299268905480864
|
274 |
+
},
|
275 |
+
"harness|mmlu_astronomy|0": {
|
276 |
+
"alias": " - astronomy",
|
277 |
+
"acc,none": 0.39473684210526316,
|
278 |
+
"acc_stderr,none": 0.039777499346220734
|
279 |
+
},
|
280 |
+
"harness|mmlu_college_biology|0": {
|
281 |
+
"alias": " - college_biology",
|
282 |
+
"acc,none": 0.375,
|
283 |
+
"acc_stderr,none": 0.04048439222695598
|
284 |
+
},
|
285 |
+
"harness|mmlu_college_chemistry|0": {
|
286 |
+
"alias": " - college_chemistry",
|
287 |
+
"acc,none": 0.25,
|
288 |
+
"acc_stderr,none": 0.04351941398892446
|
289 |
+
},
|
290 |
+
"harness|mmlu_college_computer_science|0": {
|
291 |
+
"alias": " - college_computer_science",
|
292 |
+
"acc,none": 0.38,
|
293 |
+
"acc_stderr,none": 0.048783173121456316
|
294 |
+
},
|
295 |
+
"harness|mmlu_college_mathematics|0": {
|
296 |
+
"alias": " - college_mathematics",
|
297 |
+
"acc,none": 0.31,
|
298 |
+
"acc_stderr,none": 0.04648231987117316
|
299 |
+
},
|
300 |
+
"harness|mmlu_college_physics|0": {
|
301 |
+
"alias": " - college_physics",
|
302 |
+
"acc,none": 0.23529411764705882,
|
303 |
+
"acc_stderr,none": 0.04220773659171451
|
304 |
+
},
|
305 |
+
"harness|mmlu_computer_security|0": {
|
306 |
+
"alias": " - computer_security",
|
307 |
+
"acc,none": 0.47,
|
308 |
+
"acc_stderr,none": 0.050161355804659205
|
309 |
+
},
|
310 |
+
"harness|mmlu_conceptual_physics|0": {
|
311 |
+
"alias": " - conceptual_physics",
|
312 |
+
"acc,none": 0.3446808510638298,
|
313 |
+
"acc_stderr,none": 0.03106898596312215
|
314 |
+
},
|
315 |
+
"harness|mmlu_electrical_engineering|0": {
|
316 |
+
"alias": " - electrical_engineering",
|
317 |
+
"acc,none": 0.3931034482758621,
|
318 |
+
"acc_stderr,none": 0.040703290137070705
|
319 |
+
},
|
320 |
+
"harness|mmlu_elementary_mathematics|0": {
|
321 |
+
"alias": " - elementary_mathematics",
|
322 |
+
"acc,none": 0.35714285714285715,
|
323 |
+
"acc_stderr,none": 0.024677862841332783
|
324 |
+
},
|
325 |
+
"harness|mmlu_high_school_biology|0": {
|
326 |
+
"alias": " - high_school_biology",
|
327 |
+
"acc,none": 0.4161290322580645,
|
328 |
+
"acc_stderr,none": 0.028040981380761543
|
329 |
+
},
|
330 |
+
"harness|mmlu_high_school_chemistry|0": {
|
331 |
+
"alias": " - high_school_chemistry",
|
332 |
+
"acc,none": 0.3251231527093596,
|
333 |
+
"acc_stderr,none": 0.032957975663112704
|
334 |
+
},
|
335 |
+
"harness|mmlu_high_school_computer_science|0": {
|
336 |
+
"alias": " - high_school_computer_science",
|
337 |
+
"acc,none": 0.48,
|
338 |
+
"acc_stderr,none": 0.050211673156867795
|
339 |
+
},
|
340 |
+
"harness|mmlu_high_school_mathematics|0": {
|
341 |
+
"alias": " - high_school_mathematics",
|
342 |
+
"acc,none": 0.3,
|
343 |
+
"acc_stderr,none": 0.02794045713622841
|
344 |
+
},
|
345 |
+
"harness|mmlu_high_school_physics|0": {
|
346 |
+
"alias": " - high_school_physics",
|
347 |
+
"acc,none": 0.3509933774834437,
|
348 |
+
"acc_stderr,none": 0.03896981964257375
|
349 |
+
},
|
350 |
+
"harness|mmlu_high_school_statistics|0": {
|
351 |
+
"alias": " - high_school_statistics",
|
352 |
+
"acc,none": 0.3101851851851852,
|
353 |
+
"acc_stderr,none": 0.03154696285656628
|
354 |
+
},
|
355 |
+
"harness|mmlu_machine_learning|0": {
|
356 |
+
"alias": " - machine_learning",
|
357 |
+
"acc,none": 0.30357142857142855,
|
358 |
+
"acc_stderr,none": 0.04364226155841044
|
359 |
+
},
|
360 |
+
"harness|arc:easy|0": {
|
361 |
+
"acc,none": 0.7529461279461279,
|
362 |
+
"acc_stderr,none": 0.00885005516145924,
|
363 |
+
"acc_norm,none": 0.7066498316498316,
|
364 |
+
"acc_norm_stderr,none": 0.009342508331708563,
|
365 |
+
"alias": "arc_easy"
|
366 |
+
},
|
367 |
+
"harness|boolq|0": {
|
368 |
+
"acc,none": 0.7825688073394496,
|
369 |
+
"acc_stderr,none": 0.007214641080602781,
|
370 |
+
"alias": "boolq"
|
371 |
+
},
|
372 |
+
"harness|lambada:openai|0": {
|
373 |
+
"perplexity,none": 6.578457255981092,
|
374 |
+
"perplexity_stderr,none": 0.19631273674006466,
|
375 |
+
"acc,none": 0.6089656510770425,
|
376 |
+
"acc_stderr,none": 0.006798544197091019,
|
377 |
+
"alias": "lambada_openai"
|
378 |
+
},
|
379 |
+
"harness|arc:challenge|0": {
|
380 |
+
"acc,none": 0.45563139931740615,
|
381 |
+
"acc_stderr,none": 0.014553749939306864,
|
382 |
+
"acc_norm,none": 0.48976109215017066,
|
383 |
+
"acc_norm_stderr,none": 0.014608326906285015,
|
384 |
+
"alias": "arc_challenge"
|
385 |
+
},
|
386 |
+
"harness|piqa|0": {
|
387 |
+
"acc,none": 0.7742110990206746,
|
388 |
+
"acc_stderr,none": 0.00975498067091733,
|
389 |
+
"acc_norm,none": 0.7758433079434167,
|
390 |
+
"acc_norm_stderr,none": 0.009729897956410027,
|
391 |
+
"alias": "piqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "ISTA-DASLab/Mistral-7B-Instruct-v0.2-AQLM-2Bit-2x8",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7,
|
399 |
+
"architectures": "MistralForCausalLM",
|
400 |
+
"quant_type": "AQLM",
|
401 |
+
"precision": "2bit",
|
402 |
+
"model_params": 7,
|
403 |
+
"model_size": 2.27,
|
404 |
+
"weight_dtype": "int2",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Waiting",
|
409 |
+
"submitted_time": "2024-05-13T11:54:45Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"in_group_size": 8,
|
417 |
+
"linear_weights_not_to_quantize": [
|
418 |
+
"model.layers.0.input_layernorm.weight",
|
419 |
+
"model.layers.0.post_attention_layernorm.weight",
|
420 |
+
"model.layers.1.input_layernorm.weight",
|
421 |
+
"model.layers.1.post_attention_layernorm.weight",
|
422 |
+
"model.layers.2.input_layernorm.weight",
|
423 |
+
"model.layers.2.post_attention_layernorm.weight",
|
424 |
+
"model.layers.3.input_layernorm.weight",
|
425 |
+
"model.layers.3.post_attention_layernorm.weight",
|
426 |
+
"model.layers.4.input_layernorm.weight",
|
427 |
+
"model.layers.4.post_attention_layernorm.weight",
|
428 |
+
"model.layers.5.input_layernorm.weight",
|
429 |
+
"model.layers.5.post_attention_layernorm.weight",
|
430 |
+
"model.layers.6.input_layernorm.weight",
|
431 |
+
"model.layers.6.post_attention_layernorm.weight",
|
432 |
+
"model.layers.7.input_layernorm.weight",
|
433 |
+
"model.layers.7.post_attention_layernorm.weight",
|
434 |
+
"model.layers.8.input_layernorm.weight",
|
435 |
+
"model.layers.8.post_attention_layernorm.weight",
|
436 |
+
"model.layers.9.input_layernorm.weight",
|
437 |
+
"model.layers.9.post_attention_layernorm.weight",
|
438 |
+
"model.layers.10.input_layernorm.weight",
|
439 |
+
"model.layers.10.post_attention_layernorm.weight",
|
440 |
+
"model.layers.11.input_layernorm.weight",
|
441 |
+
"model.layers.11.post_attention_layernorm.weight",
|
442 |
+
"model.layers.12.input_layernorm.weight",
|
443 |
+
"model.layers.12.post_attention_layernorm.weight",
|
444 |
+
"model.layers.13.input_layernorm.weight",
|
445 |
+
"model.layers.13.post_attention_layernorm.weight",
|
446 |
+
"model.layers.14.input_layernorm.weight",
|
447 |
+
"model.layers.14.post_attention_layernorm.weight",
|
448 |
+
"model.layers.15.input_layernorm.weight",
|
449 |
+
"model.layers.15.post_attention_layernorm.weight",
|
450 |
+
"model.layers.16.input_layernorm.weight",
|
451 |
+
"model.layers.16.post_attention_layernorm.weight",
|
452 |
+
"model.layers.17.input_layernorm.weight",
|
453 |
+
"model.layers.17.post_attention_layernorm.weight",
|
454 |
+
"model.layers.18.input_layernorm.weight",
|
455 |
+
"model.layers.18.post_attention_layernorm.weight",
|
456 |
+
"model.layers.19.input_layernorm.weight",
|
457 |
+
"model.layers.19.post_attention_layernorm.weight",
|
458 |
+
"model.layers.20.input_layernorm.weight",
|
459 |
+
"model.layers.20.post_attention_layernorm.weight",
|
460 |
+
"model.layers.21.input_layernorm.weight",
|
461 |
+
"model.layers.21.post_attention_layernorm.weight",
|
462 |
+
"model.layers.22.input_layernorm.weight",
|
463 |
+
"model.layers.22.post_attention_layernorm.weight",
|
464 |
+
"model.layers.23.input_layernorm.weight",
|
465 |
+
"model.layers.23.post_attention_layernorm.weight",
|
466 |
+
"model.layers.24.input_layernorm.weight",
|
467 |
+
"model.layers.24.post_attention_layernorm.weight",
|
468 |
+
"model.layers.25.input_layernorm.weight",
|
469 |
+
"model.layers.25.post_attention_layernorm.weight",
|
470 |
+
"model.layers.26.input_layernorm.weight",
|
471 |
+
"model.layers.26.post_attention_layernorm.weight",
|
472 |
+
"model.layers.27.input_layernorm.weight",
|
473 |
+
"model.layers.27.post_attention_layernorm.weight",
|
474 |
+
"model.layers.28.input_layernorm.weight",
|
475 |
+
"model.layers.28.post_attention_layernorm.weight",
|
476 |
+
"model.layers.29.input_layernorm.weight",
|
477 |
+
"model.layers.29.post_attention_layernorm.weight",
|
478 |
+
"model.layers.30.input_layernorm.weight",
|
479 |
+
"model.layers.30.post_attention_layernorm.weight",
|
480 |
+
"model.layers.31.input_layernorm.weight",
|
481 |
+
"model.layers.31.post_attention_layernorm.weight",
|
482 |
+
"model.embed_tokens.weight",
|
483 |
+
"model.norm.weight",
|
484 |
+
"lm_head.weight"
|
485 |
+
],
|
486 |
+
"nbits_per_codebook": 8,
|
487 |
+
"num_codebooks": 2,
|
488 |
+
"out_group_size": 1,
|
489 |
+
"quant_method": "aqlm"
|
490 |
+
},
|
491 |
+
"versions": {
|
492 |
+
"harness|winogrande|0": 1.0,
|
493 |
+
"harness|hellaswag|0": 1.0,
|
494 |
+
"harness|openbookqa|0": 1.0,
|
495 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
496 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
497 |
+
"harness|mmlu|0": null,
|
498 |
+
"harness|mmlu_humanities|0": null,
|
499 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
503 |
+
"harness|mmlu_international_law|0": 0.0,
|
504 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
505 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
506 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
507 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
508 |
+
"harness|mmlu_philosophy|0": 0.0,
|
509 |
+
"harness|mmlu_prehistory|0": 0.0,
|
510 |
+
"harness|mmlu_professional_law|0": 0.0,
|
511 |
+
"harness|mmlu_world_religions|0": 0.0,
|
512 |
+
"harness|mmlu_other|0": null,
|
513 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
514 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
515 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
516 |
+
"harness|mmlu_global_facts|0": 0.0,
|
517 |
+
"harness|mmlu_human_aging|0": 0.0,
|
518 |
+
"harness|mmlu_management|0": 0.0,
|
519 |
+
"harness|mmlu_marketing|0": 0.0,
|
520 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
521 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
522 |
+
"harness|mmlu_nutrition|0": 0.0,
|
523 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
524 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
525 |
+
"harness|mmlu_virology|0": 0.0,
|
526 |
+
"harness|mmlu_social_sciences|0": null,
|
527 |
+
"harness|mmlu_econometrics|0": 0.0,
|
528 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
529 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
530 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
531 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
532 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
533 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
534 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
535 |
+
"harness|mmlu_public_relations|0": 0.0,
|
536 |
+
"harness|mmlu_security_studies|0": 0.0,
|
537 |
+
"harness|mmlu_sociology|0": 0.0,
|
538 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
539 |
+
"harness|mmlu_stem|0": null,
|
540 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
541 |
+
"harness|mmlu_anatomy|0": 0.0,
|
542 |
+
"harness|mmlu_astronomy|0": 0.0,
|
543 |
+
"harness|mmlu_college_biology|0": 0.0,
|
544 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
545 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
546 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
547 |
+
"harness|mmlu_college_physics|0": 0.0,
|
548 |
+
"harness|mmlu_computer_security|0": 0.0,
|
549 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
550 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
551 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
552 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
553 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
554 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
555 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
556 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
557 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
558 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
559 |
+
"harness|arc:easy|0": 1.0,
|
560 |
+
"harness|boolq|0": 2.0,
|
561 |
+
"harness|lambada:openai|0": 1.0,
|
562 |
+
"harness|arc:challenge|0": 1.0,
|
563 |
+
"harness|piqa|0": 1.0
|
564 |
+
},
|
565 |
+
"n-shot": {
|
566 |
+
"arc_challenge": 0,
|
567 |
+
"arc_easy": 0,
|
568 |
+
"boolq": 0,
|
569 |
+
"hellaswag": 0,
|
570 |
+
"lambada_openai": 0,
|
571 |
+
"mmlu": 0,
|
572 |
+
"mmlu_abstract_algebra": 0,
|
573 |
+
"mmlu_anatomy": 0,
|
574 |
+
"mmlu_astronomy": 0,
|
575 |
+
"mmlu_business_ethics": 0,
|
576 |
+
"mmlu_clinical_knowledge": 0,
|
577 |
+
"mmlu_college_biology": 0,
|
578 |
+
"mmlu_college_chemistry": 0,
|
579 |
+
"mmlu_college_computer_science": 0,
|
580 |
+
"mmlu_college_mathematics": 0,
|
581 |
+
"mmlu_college_medicine": 0,
|
582 |
+
"mmlu_college_physics": 0,
|
583 |
+
"mmlu_computer_security": 0,
|
584 |
+
"mmlu_conceptual_physics": 0,
|
585 |
+
"mmlu_econometrics": 0,
|
586 |
+
"mmlu_electrical_engineering": 0,
|
587 |
+
"mmlu_elementary_mathematics": 0,
|
588 |
+
"mmlu_formal_logic": 0,
|
589 |
+
"mmlu_global_facts": 0,
|
590 |
+
"mmlu_high_school_biology": 0,
|
591 |
+
"mmlu_high_school_chemistry": 0,
|
592 |
+
"mmlu_high_school_computer_science": 0,
|
593 |
+
"mmlu_high_school_european_history": 0,
|
594 |
+
"mmlu_high_school_geography": 0,
|
595 |
+
"mmlu_high_school_government_and_politics": 0,
|
596 |
+
"mmlu_high_school_macroeconomics": 0,
|
597 |
+
"mmlu_high_school_mathematics": 0,
|
598 |
+
"mmlu_high_school_microeconomics": 0,
|
599 |
+
"mmlu_high_school_physics": 0,
|
600 |
+
"mmlu_high_school_psychology": 0,
|
601 |
+
"mmlu_high_school_statistics": 0,
|
602 |
+
"mmlu_high_school_us_history": 0,
|
603 |
+
"mmlu_high_school_world_history": 0,
|
604 |
+
"mmlu_human_aging": 0,
|
605 |
+
"mmlu_human_sexuality": 0,
|
606 |
+
"mmlu_humanities": 0,
|
607 |
+
"mmlu_international_law": 0,
|
608 |
+
"mmlu_jurisprudence": 0,
|
609 |
+
"mmlu_logical_fallacies": 0,
|
610 |
+
"mmlu_machine_learning": 0,
|
611 |
+
"mmlu_management": 0,
|
612 |
+
"mmlu_marketing": 0,
|
613 |
+
"mmlu_medical_genetics": 0,
|
614 |
+
"mmlu_miscellaneous": 0,
|
615 |
+
"mmlu_moral_disputes": 0,
|
616 |
+
"mmlu_moral_scenarios": 0,
|
617 |
+
"mmlu_nutrition": 0,
|
618 |
+
"mmlu_other": 0,
|
619 |
+
"mmlu_philosophy": 0,
|
620 |
+
"mmlu_prehistory": 0,
|
621 |
+
"mmlu_professional_accounting": 0,
|
622 |
+
"mmlu_professional_law": 0,
|
623 |
+
"mmlu_professional_medicine": 0,
|
624 |
+
"mmlu_professional_psychology": 0,
|
625 |
+
"mmlu_public_relations": 0,
|
626 |
+
"mmlu_security_studies": 0,
|
627 |
+
"mmlu_social_sciences": 0,
|
628 |
+
"mmlu_sociology": 0,
|
629 |
+
"mmlu_stem": 0,
|
630 |
+
"mmlu_us_foreign_policy": 0,
|
631 |
+
"mmlu_virology": 0,
|
632 |
+
"mmlu_world_religions": 0,
|
633 |
+
"openbookqa": 0,
|
634 |
+
"piqa": 0,
|
635 |
+
"truthfulqa_mc1": 0,
|
636 |
+
"truthfulqa_mc2": 0,
|
637 |
+
"winogrande": 0
|
638 |
+
},
|
639 |
+
"date": 1715687560.163722,
|
640 |
+
"config": {
|
641 |
+
"model": "hf",
|
642 |
+
"model_args": "pretrained=ISTA-DASLab/Mistral-7B-Instruct-v0.2-AQLM-2Bit-2x8,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
643 |
+
"batch_size": 2,
|
644 |
+
"batch_sizes": [],
|
645 |
+
"device": "cuda",
|
646 |
+
"use_cache": null,
|
647 |
+
"limit": null,
|
648 |
+
"bootstrap_iters": 100000,
|
649 |
+
"gen_kwargs": null
|
650 |
+
}
|
651 |
+
}
|
ISTA-DASLab/results_2024-05-15-05-17-12.json
ADDED
@@ -0,0 +1,651 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-15-05-17-12",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "ISTA-DASLab/Meta-Llama-3-8B-Instruct-AQLM-2Bit-1x16",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "2bit",
|
15 |
+
"model_size": 4.08,
|
16 |
+
"model_params": 7,
|
17 |
+
"quant_type": "AQLM",
|
18 |
+
"precision": "2bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:easy|0": {
|
22 |
+
"acc,none": 0.7693602693602694,
|
23 |
+
"acc_stderr,none": 0.008643708884504999,
|
24 |
+
"acc_norm,none": 0.742003367003367,
|
25 |
+
"acc_norm_stderr,none": 0.008977970005203405,
|
26 |
+
"alias": "arc_easy"
|
27 |
+
},
|
28 |
+
"harness|openbookqa|0": {
|
29 |
+
"acc,none": 0.322,
|
30 |
+
"acc_stderr,none": 0.020916668330019882,
|
31 |
+
"acc_norm,none": 0.396,
|
32 |
+
"acc_norm_stderr,none": 0.021893529941665817,
|
33 |
+
"alias": "openbookqa"
|
34 |
+
},
|
35 |
+
"harness|truthfulqa:mc2|0": {
|
36 |
+
"acc,none": 0.501517122289186,
|
37 |
+
"acc_stderr,none": 0.015111724525572797,
|
38 |
+
"alias": "truthfulqa_mc2"
|
39 |
+
},
|
40 |
+
"harness|mmlu|0": {
|
41 |
+
"acc,none": 0.5641646489104116,
|
42 |
+
"acc_stderr,none": 0.0039754730963606965,
|
43 |
+
"alias": "mmlu"
|
44 |
+
},
|
45 |
+
"harness|mmlu_humanities|0": {
|
46 |
+
"alias": " - humanities",
|
47 |
+
"acc,none": 0.5175345377258236,
|
48 |
+
"acc_stderr,none": 0.006842487140618199
|
49 |
+
},
|
50 |
+
"harness|mmlu_formal_logic|0": {
|
51 |
+
"alias": " - formal_logic",
|
52 |
+
"acc,none": 0.47619047619047616,
|
53 |
+
"acc_stderr,none": 0.04467062628403273
|
54 |
+
},
|
55 |
+
"harness|mmlu_high_school_european_history|0": {
|
56 |
+
"alias": " - high_school_european_history",
|
57 |
+
"acc,none": 0.7272727272727273,
|
58 |
+
"acc_stderr,none": 0.03477691162163659
|
59 |
+
},
|
60 |
+
"harness|mmlu_high_school_us_history|0": {
|
61 |
+
"alias": " - high_school_us_history",
|
62 |
+
"acc,none": 0.7450980392156863,
|
63 |
+
"acc_stderr,none": 0.030587591351604246
|
64 |
+
},
|
65 |
+
"harness|mmlu_high_school_world_history|0": {
|
66 |
+
"alias": " - high_school_world_history",
|
67 |
+
"acc,none": 0.7468354430379747,
|
68 |
+
"acc_stderr,none": 0.028304657943035293
|
69 |
+
},
|
70 |
+
"harness|mmlu_international_law|0": {
|
71 |
+
"alias": " - international_law",
|
72 |
+
"acc,none": 0.7024793388429752,
|
73 |
+
"acc_stderr,none": 0.04173349148083499
|
74 |
+
},
|
75 |
+
"harness|mmlu_jurisprudence|0": {
|
76 |
+
"alias": " - jurisprudence",
|
77 |
+
"acc,none": 0.7037037037037037,
|
78 |
+
"acc_stderr,none": 0.04414343666854933
|
79 |
+
},
|
80 |
+
"harness|mmlu_logical_fallacies|0": {
|
81 |
+
"alias": " - logical_fallacies",
|
82 |
+
"acc,none": 0.6871165644171779,
|
83 |
+
"acc_stderr,none": 0.03642914578292405
|
84 |
+
},
|
85 |
+
"harness|mmlu_moral_disputes|0": {
|
86 |
+
"alias": " - moral_disputes",
|
87 |
+
"acc,none": 0.6416184971098265,
|
88 |
+
"acc_stderr,none": 0.025816756791584204
|
89 |
+
},
|
90 |
+
"harness|mmlu_moral_scenarios|0": {
|
91 |
+
"alias": " - moral_scenarios",
|
92 |
+
"acc,none": 0.25251396648044694,
|
93 |
+
"acc_stderr,none": 0.01453033020146865
|
94 |
+
},
|
95 |
+
"harness|mmlu_philosophy|0": {
|
96 |
+
"alias": " - philosophy",
|
97 |
+
"acc,none": 0.6591639871382636,
|
98 |
+
"acc_stderr,none": 0.026920841260776155
|
99 |
+
},
|
100 |
+
"harness|mmlu_prehistory|0": {
|
101 |
+
"alias": " - prehistory",
|
102 |
+
"acc,none": 0.6450617283950617,
|
103 |
+
"acc_stderr,none": 0.02662415247884585
|
104 |
+
},
|
105 |
+
"harness|mmlu_professional_law|0": {
|
106 |
+
"alias": " - professional_law",
|
107 |
+
"acc,none": 0.43415906127770537,
|
108 |
+
"acc_stderr,none": 0.01265903323706725
|
109 |
+
},
|
110 |
+
"harness|mmlu_world_religions|0": {
|
111 |
+
"alias": " - world_religions",
|
112 |
+
"acc,none": 0.7309941520467836,
|
113 |
+
"acc_stderr,none": 0.0340105262010409
|
114 |
+
},
|
115 |
+
"harness|mmlu_other|0": {
|
116 |
+
"alias": " - other",
|
117 |
+
"acc,none": 0.6501448342452526,
|
118 |
+
"acc_stderr,none": 0.008324635072589546
|
119 |
+
},
|
120 |
+
"harness|mmlu_business_ethics|0": {
|
121 |
+
"alias": " - business_ethics",
|
122 |
+
"acc,none": 0.64,
|
123 |
+
"acc_stderr,none": 0.04824181513244218
|
124 |
+
},
|
125 |
+
"harness|mmlu_clinical_knowledge|0": {
|
126 |
+
"alias": " - clinical_knowledge",
|
127 |
+
"acc,none": 0.6792452830188679,
|
128 |
+
"acc_stderr,none": 0.02872750295788026
|
129 |
+
},
|
130 |
+
"harness|mmlu_college_medicine|0": {
|
131 |
+
"alias": " - college_medicine",
|
132 |
+
"acc,none": 0.5780346820809249,
|
133 |
+
"acc_stderr,none": 0.0376574669386515
|
134 |
+
},
|
135 |
+
"harness|mmlu_global_facts|0": {
|
136 |
+
"alias": " - global_facts",
|
137 |
+
"acc,none": 0.38,
|
138 |
+
"acc_stderr,none": 0.048783173121456316
|
139 |
+
},
|
140 |
+
"harness|mmlu_human_aging|0": {
|
141 |
+
"alias": " - human_aging",
|
142 |
+
"acc,none": 0.6457399103139013,
|
143 |
+
"acc_stderr,none": 0.03210062154134987
|
144 |
+
},
|
145 |
+
"harness|mmlu_management|0": {
|
146 |
+
"alias": " - management",
|
147 |
+
"acc,none": 0.7669902912621359,
|
148 |
+
"acc_stderr,none": 0.04185832598928315
|
149 |
+
},
|
150 |
+
"harness|mmlu_marketing|0": {
|
151 |
+
"alias": " - marketing",
|
152 |
+
"acc,none": 0.8247863247863247,
|
153 |
+
"acc_stderr,none": 0.02490443909891822
|
154 |
+
},
|
155 |
+
"harness|mmlu_medical_genetics|0": {
|
156 |
+
"alias": " - medical_genetics",
|
157 |
+
"acc,none": 0.68,
|
158 |
+
"acc_stderr,none": 0.046882617226215034
|
159 |
+
},
|
160 |
+
"harness|mmlu_miscellaneous|0": {
|
161 |
+
"alias": " - miscellaneous",
|
162 |
+
"acc,none": 0.7522349936143039,
|
163 |
+
"acc_stderr,none": 0.015438083080568963
|
164 |
+
},
|
165 |
+
"harness|mmlu_nutrition|0": {
|
166 |
+
"alias": " - nutrition",
|
167 |
+
"acc,none": 0.6339869281045751,
|
168 |
+
"acc_stderr,none": 0.027582811415159617
|
169 |
+
},
|
170 |
+
"harness|mmlu_professional_accounting|0": {
|
171 |
+
"alias": " - professional_accounting",
|
172 |
+
"acc,none": 0.4787234042553192,
|
173 |
+
"acc_stderr,none": 0.029800481645628693
|
174 |
+
},
|
175 |
+
"harness|mmlu_professional_medicine|0": {
|
176 |
+
"alias": " - professional_medicine",
|
177 |
+
"acc,none": 0.5845588235294118,
|
178 |
+
"acc_stderr,none": 0.029935342707877746
|
179 |
+
},
|
180 |
+
"harness|mmlu_virology|0": {
|
181 |
+
"alias": " - virology",
|
182 |
+
"acc,none": 0.463855421686747,
|
183 |
+
"acc_stderr,none": 0.038823108508905954
|
184 |
+
},
|
185 |
+
"harness|mmlu_social_sciences|0": {
|
186 |
+
"alias": " - social_sciences",
|
187 |
+
"acc,none": 0.6499837504062398,
|
188 |
+
"acc_stderr,none": 0.00839741312665264
|
189 |
+
},
|
190 |
+
"harness|mmlu_econometrics|0": {
|
191 |
+
"alias": " - econometrics",
|
192 |
+
"acc,none": 0.37719298245614036,
|
193 |
+
"acc_stderr,none": 0.04559522141958216
|
194 |
+
},
|
195 |
+
"harness|mmlu_high_school_geography|0": {
|
196 |
+
"alias": " - high_school_geography",
|
197 |
+
"acc,none": 0.7222222222222222,
|
198 |
+
"acc_stderr,none": 0.03191178226713547
|
199 |
+
},
|
200 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
201 |
+
"alias": " - high_school_government_and_politics",
|
202 |
+
"acc,none": 0.7772020725388601,
|
203 |
+
"acc_stderr,none": 0.030031147977641538
|
204 |
+
},
|
205 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
206 |
+
"alias": " - high_school_macroeconomics",
|
207 |
+
"acc,none": 0.541025641025641,
|
208 |
+
"acc_stderr,none": 0.025265525491284295
|
209 |
+
},
|
210 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
211 |
+
"alias": " - high_school_microeconomics",
|
212 |
+
"acc,none": 0.5882352941176471,
|
213 |
+
"acc_stderr,none": 0.031968769891957786
|
214 |
+
},
|
215 |
+
"harness|mmlu_high_school_psychology|0": {
|
216 |
+
"alias": " - high_school_psychology",
|
217 |
+
"acc,none": 0.7577981651376147,
|
218 |
+
"acc_stderr,none": 0.01836817630659862
|
219 |
+
},
|
220 |
+
"harness|mmlu_human_sexuality|0": {
|
221 |
+
"alias": " - human_sexuality",
|
222 |
+
"acc,none": 0.7175572519083969,
|
223 |
+
"acc_stderr,none": 0.03948406125768361
|
224 |
+
},
|
225 |
+
"harness|mmlu_professional_psychology|0": {
|
226 |
+
"alias": " - professional_psychology",
|
227 |
+
"acc,none": 0.5686274509803921,
|
228 |
+
"acc_stderr,none": 0.020036393768352635
|
229 |
+
},
|
230 |
+
"harness|mmlu_public_relations|0": {
|
231 |
+
"alias": " - public_relations",
|
232 |
+
"acc,none": 0.6,
|
233 |
+
"acc_stderr,none": 0.0469237132203465
|
234 |
+
},
|
235 |
+
"harness|mmlu_security_studies|0": {
|
236 |
+
"alias": " - security_studies",
|
237 |
+
"acc,none": 0.636734693877551,
|
238 |
+
"acc_stderr,none": 0.030789051139030806
|
239 |
+
},
|
240 |
+
"harness|mmlu_sociology|0": {
|
241 |
+
"alias": " - sociology",
|
242 |
+
"acc,none": 0.7711442786069652,
|
243 |
+
"acc_stderr,none": 0.029705284056772436
|
244 |
+
},
|
245 |
+
"harness|mmlu_us_foreign_policy|0": {
|
246 |
+
"alias": " - us_foreign_policy",
|
247 |
+
"acc,none": 0.81,
|
248 |
+
"acc_stderr,none": 0.03942772444036624
|
249 |
+
},
|
250 |
+
"harness|mmlu_stem|0": {
|
251 |
+
"alias": " - stem",
|
252 |
+
"acc,none": 0.4652711703139867,
|
253 |
+
"acc_stderr,none": 0.008646280447225355
|
254 |
+
},
|
255 |
+
"harness|mmlu_abstract_algebra|0": {
|
256 |
+
"alias": " - abstract_algebra",
|
257 |
+
"acc,none": 0.34,
|
258 |
+
"acc_stderr,none": 0.04760952285695235
|
259 |
+
},
|
260 |
+
"harness|mmlu_anatomy|0": {
|
261 |
+
"alias": " - anatomy",
|
262 |
+
"acc,none": 0.5703703703703704,
|
263 |
+
"acc_stderr,none": 0.04276349494376599
|
264 |
+
},
|
265 |
+
"harness|mmlu_astronomy|0": {
|
266 |
+
"alias": " - astronomy",
|
267 |
+
"acc,none": 0.5526315789473685,
|
268 |
+
"acc_stderr,none": 0.04046336883978251
|
269 |
+
},
|
270 |
+
"harness|mmlu_college_biology|0": {
|
271 |
+
"alias": " - college_biology",
|
272 |
+
"acc,none": 0.6388888888888888,
|
273 |
+
"acc_stderr,none": 0.04016660030451233
|
274 |
+
},
|
275 |
+
"harness|mmlu_college_chemistry|0": {
|
276 |
+
"alias": " - college_chemistry",
|
277 |
+
"acc,none": 0.36,
|
278 |
+
"acc_stderr,none": 0.04824181513244218
|
279 |
+
},
|
280 |
+
"harness|mmlu_college_computer_science|0": {
|
281 |
+
"alias": " - college_computer_science",
|
282 |
+
"acc,none": 0.45,
|
283 |
+
"acc_stderr,none": 0.05
|
284 |
+
},
|
285 |
+
"harness|mmlu_college_mathematics|0": {
|
286 |
+
"alias": " - college_mathematics",
|
287 |
+
"acc,none": 0.34,
|
288 |
+
"acc_stderr,none": 0.04760952285695235
|
289 |
+
},
|
290 |
+
"harness|mmlu_college_physics|0": {
|
291 |
+
"alias": " - college_physics",
|
292 |
+
"acc,none": 0.3431372549019608,
|
293 |
+
"acc_stderr,none": 0.04724007352383889
|
294 |
+
},
|
295 |
+
"harness|mmlu_computer_security|0": {
|
296 |
+
"alias": " - computer_security",
|
297 |
+
"acc,none": 0.71,
|
298 |
+
"acc_stderr,none": 0.045604802157206845
|
299 |
+
},
|
300 |
+
"harness|mmlu_conceptual_physics|0": {
|
301 |
+
"alias": " - conceptual_physics",
|
302 |
+
"acc,none": 0.4425531914893617,
|
303 |
+
"acc_stderr,none": 0.03246956919789958
|
304 |
+
},
|
305 |
+
"harness|mmlu_electrical_engineering|0": {
|
306 |
+
"alias": " - electrical_engineering",
|
307 |
+
"acc,none": 0.5241379310344828,
|
308 |
+
"acc_stderr,none": 0.0416180850350153
|
309 |
+
},
|
310 |
+
"harness|mmlu_elementary_mathematics|0": {
|
311 |
+
"alias": " - elementary_mathematics",
|
312 |
+
"acc,none": 0.3941798941798942,
|
313 |
+
"acc_stderr,none": 0.02516798233389414
|
314 |
+
},
|
315 |
+
"harness|mmlu_high_school_biology|0": {
|
316 |
+
"alias": " - high_school_biology",
|
317 |
+
"acc,none": 0.6741935483870968,
|
318 |
+
"acc_stderr,none": 0.0266620105785671
|
319 |
+
},
|
320 |
+
"harness|mmlu_high_school_chemistry|0": {
|
321 |
+
"alias": " - high_school_chemistry",
|
322 |
+
"acc,none": 0.41379310344827586,
|
323 |
+
"acc_stderr,none": 0.03465304488406796
|
324 |
+
},
|
325 |
+
"harness|mmlu_high_school_computer_science|0": {
|
326 |
+
"alias": " - high_school_computer_science",
|
327 |
+
"acc,none": 0.6,
|
328 |
+
"acc_stderr,none": 0.049236596391733084
|
329 |
+
},
|
330 |
+
"harness|mmlu_high_school_mathematics|0": {
|
331 |
+
"alias": " - high_school_mathematics",
|
332 |
+
"acc,none": 0.3074074074074074,
|
333 |
+
"acc_stderr,none": 0.028133252578815642
|
334 |
+
},
|
335 |
+
"harness|mmlu_high_school_physics|0": {
|
336 |
+
"alias": " - high_school_physics",
|
337 |
+
"acc,none": 0.4105960264900662,
|
338 |
+
"acc_stderr,none": 0.04016689594849927
|
339 |
+
},
|
340 |
+
"harness|mmlu_high_school_statistics|0": {
|
341 |
+
"alias": " - high_school_statistics",
|
342 |
+
"acc,none": 0.41203703703703703,
|
343 |
+
"acc_stderr,none": 0.03356787758160835
|
344 |
+
},
|
345 |
+
"harness|mmlu_machine_learning|0": {
|
346 |
+
"alias": " - machine_learning",
|
347 |
+
"acc,none": 0.38392857142857145,
|
348 |
+
"acc_stderr,none": 0.04616143075028547
|
349 |
+
},
|
350 |
+
"harness|truthfulqa:mc1|0": {
|
351 |
+
"acc,none": 0.33047735618115054,
|
352 |
+
"acc_stderr,none": 0.016466769613698314,
|
353 |
+
"alias": "truthfulqa_mc1"
|
354 |
+
},
|
355 |
+
"harness|boolq|0": {
|
356 |
+
"acc,none": 0.8033639143730887,
|
357 |
+
"acc_stderr,none": 0.006951528536402847,
|
358 |
+
"alias": "boolq"
|
359 |
+
},
|
360 |
+
"harness|piqa|0": {
|
361 |
+
"acc,none": 0.7742110990206746,
|
362 |
+
"acc_stderr,none": 0.00975498067091734,
|
363 |
+
"acc_norm,none": 0.7731229597388466,
|
364 |
+
"acc_norm_stderr,none": 0.009771584259215153,
|
365 |
+
"alias": "piqa"
|
366 |
+
},
|
367 |
+
"harness|arc:challenge|0": {
|
368 |
+
"acc,none": 0.45819112627986347,
|
369 |
+
"acc_stderr,none": 0.014560220308714697,
|
370 |
+
"acc_norm,none": 0.49146757679180886,
|
371 |
+
"acc_norm_stderr,none": 0.014609263165632191,
|
372 |
+
"alias": "arc_challenge"
|
373 |
+
},
|
374 |
+
"harness|hellaswag|0": {
|
375 |
+
"acc,none": 0.5487950607448715,
|
376 |
+
"acc_stderr,none": 0.004965963647210322,
|
377 |
+
"acc_norm,none": 0.7263493328022307,
|
378 |
+
"acc_norm_stderr,none": 0.0044492062959223195,
|
379 |
+
"alias": "hellaswag"
|
380 |
+
},
|
381 |
+
"harness|winogrande|0": {
|
382 |
+
"acc,none": 0.696921862667719,
|
383 |
+
"acc_stderr,none": 0.01291672746263447,
|
384 |
+
"alias": "winogrande"
|
385 |
+
},
|
386 |
+
"harness|lambada:openai|0": {
|
387 |
+
"perplexity,none": 3.721237013130463,
|
388 |
+
"perplexity_stderr,none": 0.0953372165080242,
|
389 |
+
"acc,none": 0.6904715699592471,
|
390 |
+
"acc_stderr,none": 0.006440732259116663,
|
391 |
+
"alias": "lambada_openai"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "ISTA-DASLab/Meta-Llama-3-8B-Instruct-AQLM-2Bit-1x16",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "AQLM",
|
401 |
+
"precision": "2bit",
|
402 |
+
"model_params": 7,
|
403 |
+
"model_size": 4.08,
|
404 |
+
"weight_dtype": "int2",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Waiting",
|
409 |
+
"submitted_time": "2024-05-13T11:54:45Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"in_group_size": 8,
|
417 |
+
"linear_weights_not_to_quantize": [
|
418 |
+
"model.layers.0.input_layernorm.weight",
|
419 |
+
"model.layers.0.post_attention_layernorm.weight",
|
420 |
+
"model.layers.1.input_layernorm.weight",
|
421 |
+
"model.layers.1.post_attention_layernorm.weight",
|
422 |
+
"model.layers.2.input_layernorm.weight",
|
423 |
+
"model.layers.2.post_attention_layernorm.weight",
|
424 |
+
"model.layers.3.input_layernorm.weight",
|
425 |
+
"model.layers.3.post_attention_layernorm.weight",
|
426 |
+
"model.layers.4.input_layernorm.weight",
|
427 |
+
"model.layers.4.post_attention_layernorm.weight",
|
428 |
+
"model.layers.5.input_layernorm.weight",
|
429 |
+
"model.layers.5.post_attention_layernorm.weight",
|
430 |
+
"model.layers.6.input_layernorm.weight",
|
431 |
+
"model.layers.6.post_attention_layernorm.weight",
|
432 |
+
"model.layers.7.input_layernorm.weight",
|
433 |
+
"model.layers.7.post_attention_layernorm.weight",
|
434 |
+
"model.layers.8.input_layernorm.weight",
|
435 |
+
"model.layers.8.post_attention_layernorm.weight",
|
436 |
+
"model.layers.9.input_layernorm.weight",
|
437 |
+
"model.layers.9.post_attention_layernorm.weight",
|
438 |
+
"model.layers.10.input_layernorm.weight",
|
439 |
+
"model.layers.10.post_attention_layernorm.weight",
|
440 |
+
"model.layers.11.input_layernorm.weight",
|
441 |
+
"model.layers.11.post_attention_layernorm.weight",
|
442 |
+
"model.layers.12.input_layernorm.weight",
|
443 |
+
"model.layers.12.post_attention_layernorm.weight",
|
444 |
+
"model.layers.13.input_layernorm.weight",
|
445 |
+
"model.layers.13.post_attention_layernorm.weight",
|
446 |
+
"model.layers.14.input_layernorm.weight",
|
447 |
+
"model.layers.14.post_attention_layernorm.weight",
|
448 |
+
"model.layers.15.input_layernorm.weight",
|
449 |
+
"model.layers.15.post_attention_layernorm.weight",
|
450 |
+
"model.layers.16.input_layernorm.weight",
|
451 |
+
"model.layers.16.post_attention_layernorm.weight",
|
452 |
+
"model.layers.17.input_layernorm.weight",
|
453 |
+
"model.layers.17.post_attention_layernorm.weight",
|
454 |
+
"model.layers.18.input_layernorm.weight",
|
455 |
+
"model.layers.18.post_attention_layernorm.weight",
|
456 |
+
"model.layers.19.input_layernorm.weight",
|
457 |
+
"model.layers.19.post_attention_layernorm.weight",
|
458 |
+
"model.layers.20.input_layernorm.weight",
|
459 |
+
"model.layers.20.post_attention_layernorm.weight",
|
460 |
+
"model.layers.21.input_layernorm.weight",
|
461 |
+
"model.layers.21.post_attention_layernorm.weight",
|
462 |
+
"model.layers.22.input_layernorm.weight",
|
463 |
+
"model.layers.22.post_attention_layernorm.weight",
|
464 |
+
"model.layers.23.input_layernorm.weight",
|
465 |
+
"model.layers.23.post_attention_layernorm.weight",
|
466 |
+
"model.layers.24.input_layernorm.weight",
|
467 |
+
"model.layers.24.post_attention_layernorm.weight",
|
468 |
+
"model.layers.25.input_layernorm.weight",
|
469 |
+
"model.layers.25.post_attention_layernorm.weight",
|
470 |
+
"model.layers.26.input_layernorm.weight",
|
471 |
+
"model.layers.26.post_attention_layernorm.weight",
|
472 |
+
"model.layers.27.input_layernorm.weight",
|
473 |
+
"model.layers.27.post_attention_layernorm.weight",
|
474 |
+
"model.layers.28.input_layernorm.weight",
|
475 |
+
"model.layers.28.post_attention_layernorm.weight",
|
476 |
+
"model.layers.29.input_layernorm.weight",
|
477 |
+
"model.layers.29.post_attention_layernorm.weight",
|
478 |
+
"model.layers.30.input_layernorm.weight",
|
479 |
+
"model.layers.30.post_attention_layernorm.weight",
|
480 |
+
"model.layers.31.input_layernorm.weight",
|
481 |
+
"model.layers.31.post_attention_layernorm.weight",
|
482 |
+
"model.embed_tokens.weight",
|
483 |
+
"model.norm.weight",
|
484 |
+
"lm_head.weight"
|
485 |
+
],
|
486 |
+
"nbits_per_codebook": 16,
|
487 |
+
"num_codebooks": 1,
|
488 |
+
"out_group_size": 1,
|
489 |
+
"quant_method": "aqlm"
|
490 |
+
},
|
491 |
+
"versions": {
|
492 |
+
"harness|arc:easy|0": 1.0,
|
493 |
+
"harness|openbookqa|0": 1.0,
|
494 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
495 |
+
"harness|mmlu|0": null,
|
496 |
+
"harness|mmlu_humanities|0": null,
|
497 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
501 |
+
"harness|mmlu_international_law|0": 0.0,
|
502 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
503 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
504 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
505 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
506 |
+
"harness|mmlu_philosophy|0": 0.0,
|
507 |
+
"harness|mmlu_prehistory|0": 0.0,
|
508 |
+
"harness|mmlu_professional_law|0": 0.0,
|
509 |
+
"harness|mmlu_world_religions|0": 0.0,
|
510 |
+
"harness|mmlu_other|0": null,
|
511 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
512 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
513 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
514 |
+
"harness|mmlu_global_facts|0": 0.0,
|
515 |
+
"harness|mmlu_human_aging|0": 0.0,
|
516 |
+
"harness|mmlu_management|0": 0.0,
|
517 |
+
"harness|mmlu_marketing|0": 0.0,
|
518 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
519 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
520 |
+
"harness|mmlu_nutrition|0": 0.0,
|
521 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
522 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
523 |
+
"harness|mmlu_virology|0": 0.0,
|
524 |
+
"harness|mmlu_social_sciences|0": null,
|
525 |
+
"harness|mmlu_econometrics|0": 0.0,
|
526 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
527 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
528 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
529 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
530 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
531 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
532 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
533 |
+
"harness|mmlu_public_relations|0": 0.0,
|
534 |
+
"harness|mmlu_security_studies|0": 0.0,
|
535 |
+
"harness|mmlu_sociology|0": 0.0,
|
536 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
537 |
+
"harness|mmlu_stem|0": null,
|
538 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
539 |
+
"harness|mmlu_anatomy|0": 0.0,
|
540 |
+
"harness|mmlu_astronomy|0": 0.0,
|
541 |
+
"harness|mmlu_college_biology|0": 0.0,
|
542 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
543 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
544 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
545 |
+
"harness|mmlu_college_physics|0": 0.0,
|
546 |
+
"harness|mmlu_computer_security|0": 0.0,
|
547 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
548 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
549 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
550 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
551 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
552 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
553 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
554 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
555 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
556 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
557 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
558 |
+
"harness|boolq|0": 2.0,
|
559 |
+
"harness|piqa|0": 1.0,
|
560 |
+
"harness|arc:challenge|0": 1.0,
|
561 |
+
"harness|hellaswag|0": 1.0,
|
562 |
+
"harness|winogrande|0": 1.0,
|
563 |
+
"harness|lambada:openai|0": 1.0
|
564 |
+
},
|
565 |
+
"n-shot": {
|
566 |
+
"arc_challenge": 0,
|
567 |
+
"arc_easy": 0,
|
568 |
+
"boolq": 0,
|
569 |
+
"hellaswag": 0,
|
570 |
+
"lambada_openai": 0,
|
571 |
+
"mmlu": 0,
|
572 |
+
"mmlu_abstract_algebra": 0,
|
573 |
+
"mmlu_anatomy": 0,
|
574 |
+
"mmlu_astronomy": 0,
|
575 |
+
"mmlu_business_ethics": 0,
|
576 |
+
"mmlu_clinical_knowledge": 0,
|
577 |
+
"mmlu_college_biology": 0,
|
578 |
+
"mmlu_college_chemistry": 0,
|
579 |
+
"mmlu_college_computer_science": 0,
|
580 |
+
"mmlu_college_mathematics": 0,
|
581 |
+
"mmlu_college_medicine": 0,
|
582 |
+
"mmlu_college_physics": 0,
|
583 |
+
"mmlu_computer_security": 0,
|
584 |
+
"mmlu_conceptual_physics": 0,
|
585 |
+
"mmlu_econometrics": 0,
|
586 |
+
"mmlu_electrical_engineering": 0,
|
587 |
+
"mmlu_elementary_mathematics": 0,
|
588 |
+
"mmlu_formal_logic": 0,
|
589 |
+
"mmlu_global_facts": 0,
|
590 |
+
"mmlu_high_school_biology": 0,
|
591 |
+
"mmlu_high_school_chemistry": 0,
|
592 |
+
"mmlu_high_school_computer_science": 0,
|
593 |
+
"mmlu_high_school_european_history": 0,
|
594 |
+
"mmlu_high_school_geography": 0,
|
595 |
+
"mmlu_high_school_government_and_politics": 0,
|
596 |
+
"mmlu_high_school_macroeconomics": 0,
|
597 |
+
"mmlu_high_school_mathematics": 0,
|
598 |
+
"mmlu_high_school_microeconomics": 0,
|
599 |
+
"mmlu_high_school_physics": 0,
|
600 |
+
"mmlu_high_school_psychology": 0,
|
601 |
+
"mmlu_high_school_statistics": 0,
|
602 |
+
"mmlu_high_school_us_history": 0,
|
603 |
+
"mmlu_high_school_world_history": 0,
|
604 |
+
"mmlu_human_aging": 0,
|
605 |
+
"mmlu_human_sexuality": 0,
|
606 |
+
"mmlu_humanities": 0,
|
607 |
+
"mmlu_international_law": 0,
|
608 |
+
"mmlu_jurisprudence": 0,
|
609 |
+
"mmlu_logical_fallacies": 0,
|
610 |
+
"mmlu_machine_learning": 0,
|
611 |
+
"mmlu_management": 0,
|
612 |
+
"mmlu_marketing": 0,
|
613 |
+
"mmlu_medical_genetics": 0,
|
614 |
+
"mmlu_miscellaneous": 0,
|
615 |
+
"mmlu_moral_disputes": 0,
|
616 |
+
"mmlu_moral_scenarios": 0,
|
617 |
+
"mmlu_nutrition": 0,
|
618 |
+
"mmlu_other": 0,
|
619 |
+
"mmlu_philosophy": 0,
|
620 |
+
"mmlu_prehistory": 0,
|
621 |
+
"mmlu_professional_accounting": 0,
|
622 |
+
"mmlu_professional_law": 0,
|
623 |
+
"mmlu_professional_medicine": 0,
|
624 |
+
"mmlu_professional_psychology": 0,
|
625 |
+
"mmlu_public_relations": 0,
|
626 |
+
"mmlu_security_studies": 0,
|
627 |
+
"mmlu_social_sciences": 0,
|
628 |
+
"mmlu_sociology": 0,
|
629 |
+
"mmlu_stem": 0,
|
630 |
+
"mmlu_us_foreign_policy": 0,
|
631 |
+
"mmlu_virology": 0,
|
632 |
+
"mmlu_world_religions": 0,
|
633 |
+
"openbookqa": 0,
|
634 |
+
"piqa": 0,
|
635 |
+
"truthfulqa_mc1": 0,
|
636 |
+
"truthfulqa_mc2": 0,
|
637 |
+
"winogrande": 0
|
638 |
+
},
|
639 |
+
"date": 1715715118.262416,
|
640 |
+
"config": {
|
641 |
+
"model": "hf",
|
642 |
+
"model_args": "pretrained=ISTA-DASLab/Meta-Llama-3-8B-Instruct-AQLM-2Bit-1x16,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
643 |
+
"batch_size": 2,
|
644 |
+
"batch_sizes": [],
|
645 |
+
"device": "cuda",
|
646 |
+
"use_cache": null,
|
647 |
+
"limit": null,
|
648 |
+
"bootstrap_iters": 100000,
|
649 |
+
"gen_kwargs": null
|
650 |
+
}
|
651 |
+
}
|
ISTA-DASLab/results_2024-05-19-14-09-41.json
ADDED
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-19-14-09-41",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "ISTA-DASLab/Llama-2-7b-AQLM-2Bit-1x16-hf",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "2bit",
|
15 |
+
"model_size": 2.38,
|
16 |
+
"model_params": 6.48,
|
17 |
+
"quant_type": "AQLM",
|
18 |
+
"precision": "2bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|hellaswag|0": {
|
22 |
+
"acc,none": 0.5342561242780323,
|
23 |
+
"acc_stderr,none": 0.004978056798794863,
|
24 |
+
"acc_norm,none": 0.7136028679545907,
|
25 |
+
"acc_norm_stderr,none": 0.004511533039406169,
|
26 |
+
"alias": "hellaswag"
|
27 |
+
},
|
28 |
+
"harness|truthfulqa:mc2|0": {
|
29 |
+
"acc,none": 0.36398040864303677,
|
30 |
+
"acc_stderr,none": 0.013782783819030715,
|
31 |
+
"alias": "truthfulqa_mc2"
|
32 |
+
},
|
33 |
+
"harness|openbookqa|0": {
|
34 |
+
"acc,none": 0.306,
|
35 |
+
"acc_stderr,none": 0.02062956999834541,
|
36 |
+
"acc_norm,none": 0.412,
|
37 |
+
"acc_norm_stderr,none": 0.02203367799374086,
|
38 |
+
"alias": "openbookqa"
|
39 |
+
},
|
40 |
+
"harness|winogrande|0": {
|
41 |
+
"acc,none": 0.654301499605367,
|
42 |
+
"acc_stderr,none": 0.01336659695193438,
|
43 |
+
"alias": "winogrande"
|
44 |
+
},
|
45 |
+
"harness|lambada:openai|0": {
|
46 |
+
"perplexity,none": 4.091725727763611,
|
47 |
+
"perplexity_stderr,none": 0.08814292215669535,
|
48 |
+
"acc,none": 0.7083252474286823,
|
49 |
+
"acc_stderr,none": 0.006332538704566833,
|
50 |
+
"alias": "lambada_openai"
|
51 |
+
},
|
52 |
+
"harness|truthfulqa:mc1|0": {
|
53 |
+
"acc,none": 0.2350061199510404,
|
54 |
+
"acc_stderr,none": 0.014843061507731608,
|
55 |
+
"alias": "truthfulqa_mc1"
|
56 |
+
},
|
57 |
+
"harness|mmlu|0": {
|
58 |
+
"acc,none": 0.3503774391112377,
|
59 |
+
"acc_stderr,none": 0.003987221774638447,
|
60 |
+
"alias": "mmlu"
|
61 |
+
},
|
62 |
+
"harness|mmlu_humanities|0": {
|
63 |
+
"alias": " - humanities",
|
64 |
+
"acc,none": 0.3477151965993624,
|
65 |
+
"acc_stderr,none": 0.006839503265838798
|
66 |
+
},
|
67 |
+
"harness|mmlu_formal_logic|0": {
|
68 |
+
"alias": " - formal_logic",
|
69 |
+
"acc,none": 0.30158730158730157,
|
70 |
+
"acc_stderr,none": 0.04104947269903394
|
71 |
+
},
|
72 |
+
"harness|mmlu_high_school_european_history|0": {
|
73 |
+
"alias": " - high_school_european_history",
|
74 |
+
"acc,none": 0.45454545454545453,
|
75 |
+
"acc_stderr,none": 0.038881769216741004
|
76 |
+
},
|
77 |
+
"harness|mmlu_high_school_us_history|0": {
|
78 |
+
"alias": " - high_school_us_history",
|
79 |
+
"acc,none": 0.4166666666666667,
|
80 |
+
"acc_stderr,none": 0.034602283272391704
|
81 |
+
},
|
82 |
+
"harness|mmlu_high_school_world_history|0": {
|
83 |
+
"alias": " - high_school_world_history",
|
84 |
+
"acc,none": 0.4008438818565401,
|
85 |
+
"acc_stderr,none": 0.031900803894732356
|
86 |
+
},
|
87 |
+
"harness|mmlu_international_law|0": {
|
88 |
+
"alias": " - international_law",
|
89 |
+
"acc,none": 0.5454545454545454,
|
90 |
+
"acc_stderr,none": 0.04545454545454548
|
91 |
+
},
|
92 |
+
"harness|mmlu_jurisprudence|0": {
|
93 |
+
"alias": " - jurisprudence",
|
94 |
+
"acc,none": 0.37037037037037035,
|
95 |
+
"acc_stderr,none": 0.04668408033024931
|
96 |
+
},
|
97 |
+
"harness|mmlu_logical_fallacies|0": {
|
98 |
+
"alias": " - logical_fallacies",
|
99 |
+
"acc,none": 0.4294478527607362,
|
100 |
+
"acc_stderr,none": 0.03889066619112722
|
101 |
+
},
|
102 |
+
"harness|mmlu_moral_disputes|0": {
|
103 |
+
"alias": " - moral_disputes",
|
104 |
+
"acc,none": 0.407514450867052,
|
105 |
+
"acc_stderr,none": 0.026454578146931505
|
106 |
+
},
|
107 |
+
"harness|mmlu_moral_scenarios|0": {
|
108 |
+
"alias": " - moral_scenarios",
|
109 |
+
"acc,none": 0.23687150837988827,
|
110 |
+
"acc_stderr,none": 0.014219570788103982
|
111 |
+
},
|
112 |
+
"harness|mmlu_philosophy|0": {
|
113 |
+
"alias": " - philosophy",
|
114 |
+
"acc,none": 0.41479099678456594,
|
115 |
+
"acc_stderr,none": 0.02798268045975956
|
116 |
+
},
|
117 |
+
"harness|mmlu_prehistory|0": {
|
118 |
+
"alias": " - prehistory",
|
119 |
+
"acc,none": 0.4444444444444444,
|
120 |
+
"acc_stderr,none": 0.027648477877413324
|
121 |
+
},
|
122 |
+
"harness|mmlu_professional_law|0": {
|
123 |
+
"alias": " - professional_law",
|
124 |
+
"acc,none": 0.2966101694915254,
|
125 |
+
"acc_stderr,none": 0.011665946586082844
|
126 |
+
},
|
127 |
+
"harness|mmlu_world_religions|0": {
|
128 |
+
"alias": " - world_religions",
|
129 |
+
"acc,none": 0.5029239766081871,
|
130 |
+
"acc_stderr,none": 0.03834759370936839
|
131 |
+
},
|
132 |
+
"harness|mmlu_other|0": {
|
133 |
+
"alias": " - other",
|
134 |
+
"acc,none": 0.35854522046990667,
|
135 |
+
"acc_stderr,none": 0.008558316270165067
|
136 |
+
},
|
137 |
+
"harness|mmlu_business_ethics|0": {
|
138 |
+
"alias": " - business_ethics",
|
139 |
+
"acc,none": 0.34,
|
140 |
+
"acc_stderr,none": 0.04760952285695236
|
141 |
+
},
|
142 |
+
"harness|mmlu_clinical_knowledge|0": {
|
143 |
+
"alias": " - clinical_knowledge",
|
144 |
+
"acc,none": 0.35471698113207545,
|
145 |
+
"acc_stderr,none": 0.02944517532819959
|
146 |
+
},
|
147 |
+
"harness|mmlu_college_medicine|0": {
|
148 |
+
"alias": " - college_medicine",
|
149 |
+
"acc,none": 0.31213872832369943,
|
150 |
+
"acc_stderr,none": 0.035331333893236574
|
151 |
+
},
|
152 |
+
"harness|mmlu_global_facts|0": {
|
153 |
+
"alias": " - global_facts",
|
154 |
+
"acc,none": 0.28,
|
155 |
+
"acc_stderr,none": 0.045126085985421276
|
156 |
+
},
|
157 |
+
"harness|mmlu_human_aging|0": {
|
158 |
+
"alias": " - human_aging",
|
159 |
+
"acc,none": 0.3183856502242152,
|
160 |
+
"acc_stderr,none": 0.03126580522513713
|
161 |
+
},
|
162 |
+
"harness|mmlu_management|0": {
|
163 |
+
"alias": " - management",
|
164 |
+
"acc,none": 0.36893203883495146,
|
165 |
+
"acc_stderr,none": 0.04777615181156739
|
166 |
+
},
|
167 |
+
"harness|mmlu_marketing|0": {
|
168 |
+
"alias": " - marketing",
|
169 |
+
"acc,none": 0.44017094017094016,
|
170 |
+
"acc_stderr,none": 0.032520741720630506
|
171 |
+
},
|
172 |
+
"harness|mmlu_medical_genetics|0": {
|
173 |
+
"alias": " - medical_genetics",
|
174 |
+
"acc,none": 0.4,
|
175 |
+
"acc_stderr,none": 0.049236596391733084
|
176 |
+
},
|
177 |
+
"harness|mmlu_miscellaneous|0": {
|
178 |
+
"alias": " - miscellaneous",
|
179 |
+
"acc,none": 0.4061302681992337,
|
180 |
+
"acc_stderr,none": 0.017562037406478912
|
181 |
+
},
|
182 |
+
"harness|mmlu_nutrition|0": {
|
183 |
+
"alias": " - nutrition",
|
184 |
+
"acc,none": 0.4215686274509804,
|
185 |
+
"acc_stderr,none": 0.028275490156791434
|
186 |
+
},
|
187 |
+
"harness|mmlu_professional_accounting|0": {
|
188 |
+
"alias": " - professional_accounting",
|
189 |
+
"acc,none": 0.26595744680851063,
|
190 |
+
"acc_stderr,none": 0.026358065698880592
|
191 |
+
},
|
192 |
+
"harness|mmlu_professional_medicine|0": {
|
193 |
+
"alias": " - professional_medicine",
|
194 |
+
"acc,none": 0.3161764705882353,
|
195 |
+
"acc_stderr,none": 0.028245687391462916
|
196 |
+
},
|
197 |
+
"harness|mmlu_virology|0": {
|
198 |
+
"alias": " - virology",
|
199 |
+
"acc,none": 0.26506024096385544,
|
200 |
+
"acc_stderr,none": 0.03436024037944967
|
201 |
+
},
|
202 |
+
"harness|mmlu_social_sciences|0": {
|
203 |
+
"alias": " - social_sciences",
|
204 |
+
"acc,none": 0.37146571335716605,
|
205 |
+
"acc_stderr,none": 0.008625018116535118
|
206 |
+
},
|
207 |
+
"harness|mmlu_econometrics|0": {
|
208 |
+
"alias": " - econometrics",
|
209 |
+
"acc,none": 0.24561403508771928,
|
210 |
+
"acc_stderr,none": 0.040493392977481425
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_geography|0": {
|
213 |
+
"alias": " - high_school_geography",
|
214 |
+
"acc,none": 0.3888888888888889,
|
215 |
+
"acc_stderr,none": 0.0347327959083696
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
218 |
+
"alias": " - high_school_government_and_politics",
|
219 |
+
"acc,none": 0.47668393782383417,
|
220 |
+
"acc_stderr,none": 0.03604513672442206
|
221 |
+
},
|
222 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
223 |
+
"alias": " - high_school_macroeconomics",
|
224 |
+
"acc,none": 0.3,
|
225 |
+
"acc_stderr,none": 0.023234581088428494
|
226 |
+
},
|
227 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
228 |
+
"alias": " - high_school_microeconomics",
|
229 |
+
"acc,none": 0.3235294117647059,
|
230 |
+
"acc_stderr,none": 0.030388353551886845
|
231 |
+
},
|
232 |
+
"harness|mmlu_high_school_psychology|0": {
|
233 |
+
"alias": " - high_school_psychology",
|
234 |
+
"acc,none": 0.3926605504587156,
|
235 |
+
"acc_stderr,none": 0.020937505161201093
|
236 |
+
},
|
237 |
+
"harness|mmlu_human_sexuality|0": {
|
238 |
+
"alias": " - human_sexuality",
|
239 |
+
"acc,none": 0.3893129770992366,
|
240 |
+
"acc_stderr,none": 0.04276486542814591
|
241 |
+
},
|
242 |
+
"harness|mmlu_professional_psychology|0": {
|
243 |
+
"alias": " - professional_psychology",
|
244 |
+
"acc,none": 0.3349673202614379,
|
245 |
+
"acc_stderr,none": 0.019094228167000307
|
246 |
+
},
|
247 |
+
"harness|mmlu_public_relations|0": {
|
248 |
+
"alias": " - public_relations",
|
249 |
+
"acc,none": 0.2909090909090909,
|
250 |
+
"acc_stderr,none": 0.04350271442923243
|
251 |
+
},
|
252 |
+
"harness|mmlu_security_studies|0": {
|
253 |
+
"alias": " - security_studies",
|
254 |
+
"acc,none": 0.35918367346938773,
|
255 |
+
"acc_stderr,none": 0.03071356045510849
|
256 |
+
},
|
257 |
+
"harness|mmlu_sociology|0": {
|
258 |
+
"alias": " - sociology",
|
259 |
+
"acc,none": 0.527363184079602,
|
260 |
+
"acc_stderr,none": 0.035302355173346824
|
261 |
+
},
|
262 |
+
"harness|mmlu_us_foreign_policy|0": {
|
263 |
+
"alias": " - us_foreign_policy",
|
264 |
+
"acc,none": 0.56,
|
265 |
+
"acc_stderr,none": 0.04988876515698589
|
266 |
+
},
|
267 |
+
"harness|mmlu_stem|0": {
|
268 |
+
"alias": " - stem",
|
269 |
+
"acc,none": 0.32572153504598794,
|
270 |
+
"acc_stderr,none": 0.00831768388964144
|
271 |
+
},
|
272 |
+
"harness|mmlu_abstract_algebra|0": {
|
273 |
+
"alias": " - abstract_algebra",
|
274 |
+
"acc,none": 0.24,
|
275 |
+
"acc_stderr,none": 0.04292346959909284
|
276 |
+
},
|
277 |
+
"harness|mmlu_anatomy|0": {
|
278 |
+
"alias": " - anatomy",
|
279 |
+
"acc,none": 0.3851851851851852,
|
280 |
+
"acc_stderr,none": 0.042039210401562783
|
281 |
+
},
|
282 |
+
"harness|mmlu_astronomy|0": {
|
283 |
+
"alias": " - astronomy",
|
284 |
+
"acc,none": 0.3684210526315789,
|
285 |
+
"acc_stderr,none": 0.03925523381052932
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_biology|0": {
|
288 |
+
"alias": " - college_biology",
|
289 |
+
"acc,none": 0.3263888888888889,
|
290 |
+
"acc_stderr,none": 0.03921067198982266
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_chemistry|0": {
|
293 |
+
"alias": " - college_chemistry",
|
294 |
+
"acc,none": 0.28,
|
295 |
+
"acc_stderr,none": 0.045126085985421255
|
296 |
+
},
|
297 |
+
"harness|mmlu_college_computer_science|0": {
|
298 |
+
"alias": " - college_computer_science",
|
299 |
+
"acc,none": 0.28,
|
300 |
+
"acc_stderr,none": 0.04512608598542127
|
301 |
+
},
|
302 |
+
"harness|mmlu_college_mathematics|0": {
|
303 |
+
"alias": " - college_mathematics",
|
304 |
+
"acc,none": 0.39,
|
305 |
+
"acc_stderr,none": 0.04902071300001974
|
306 |
+
},
|
307 |
+
"harness|mmlu_college_physics|0": {
|
308 |
+
"alias": " - college_physics",
|
309 |
+
"acc,none": 0.20588235294117646,
|
310 |
+
"acc_stderr,none": 0.04023382273617748
|
311 |
+
},
|
312 |
+
"harness|mmlu_computer_security|0": {
|
313 |
+
"alias": " - computer_security",
|
314 |
+
"acc,none": 0.44,
|
315 |
+
"acc_stderr,none": 0.04988876515698589
|
316 |
+
},
|
317 |
+
"harness|mmlu_conceptual_physics|0": {
|
318 |
+
"alias": " - conceptual_physics",
|
319 |
+
"acc,none": 0.31063829787234043,
|
320 |
+
"acc_stderr,none": 0.03025123757921317
|
321 |
+
},
|
322 |
+
"harness|mmlu_electrical_engineering|0": {
|
323 |
+
"alias": " - electrical_engineering",
|
324 |
+
"acc,none": 0.41379310344827586,
|
325 |
+
"acc_stderr,none": 0.041042692118062316
|
326 |
+
},
|
327 |
+
"harness|mmlu_elementary_mathematics|0": {
|
328 |
+
"alias": " - elementary_mathematics",
|
329 |
+
"acc,none": 0.2857142857142857,
|
330 |
+
"acc_stderr,none": 0.02326651221373056
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_biology|0": {
|
333 |
+
"alias": " - high_school_biology",
|
334 |
+
"acc,none": 0.36451612903225805,
|
335 |
+
"acc_stderr,none": 0.027379871229943252
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_chemistry|0": {
|
338 |
+
"alias": " - high_school_chemistry",
|
339 |
+
"acc,none": 0.35960591133004927,
|
340 |
+
"acc_stderr,none": 0.03376458246509567
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_computer_science|0": {
|
343 |
+
"alias": " - high_school_computer_science",
|
344 |
+
"acc,none": 0.31,
|
345 |
+
"acc_stderr,none": 0.04648231987117316
|
346 |
+
},
|
347 |
+
"harness|mmlu_high_school_mathematics|0": {
|
348 |
+
"alias": " - high_school_mathematics",
|
349 |
+
"acc,none": 0.29259259259259257,
|
350 |
+
"acc_stderr,none": 0.027738969632176088
|
351 |
+
},
|
352 |
+
"harness|mmlu_high_school_physics|0": {
|
353 |
+
"alias": " - high_school_physics",
|
354 |
+
"acc,none": 0.2913907284768212,
|
355 |
+
"acc_stderr,none": 0.03710185726119994
|
356 |
+
},
|
357 |
+
"harness|mmlu_high_school_statistics|0": {
|
358 |
+
"alias": " - high_school_statistics",
|
359 |
+
"acc,none": 0.3611111111111111,
|
360 |
+
"acc_stderr,none": 0.03275773486101
|
361 |
+
},
|
362 |
+
"harness|mmlu_machine_learning|0": {
|
363 |
+
"alias": " - machine_learning",
|
364 |
+
"acc,none": 0.25892857142857145,
|
365 |
+
"acc_stderr,none": 0.04157751539865629
|
366 |
+
},
|
367 |
+
"harness|arc:easy|0": {
|
368 |
+
"acc,none": 0.7403198653198653,
|
369 |
+
"acc_stderr,none": 0.008996990428562219,
|
370 |
+
"acc_norm,none": 0.7133838383838383,
|
371 |
+
"acc_norm_stderr,none": 0.009278551100969293,
|
372 |
+
"alias": "arc_easy"
|
373 |
+
},
|
374 |
+
"harness|arc:challenge|0": {
|
375 |
+
"acc,none": 0.39590443686006827,
|
376 |
+
"acc_stderr,none": 0.014291228393536587,
|
377 |
+
"acc_norm,none": 0.41467576791808874,
|
378 |
+
"acc_norm_stderr,none": 0.014397070564409172,
|
379 |
+
"alias": "arc_challenge"
|
380 |
+
},
|
381 |
+
"harness|piqa|0": {
|
382 |
+
"acc,none": 0.7687704026115343,
|
383 |
+
"acc_stderr,none": 0.009837063180625324,
|
384 |
+
"acc_norm,none": 0.7704026115342764,
|
385 |
+
"acc_norm_stderr,none": 0.009812682950815199,
|
386 |
+
"alias": "piqa"
|
387 |
+
},
|
388 |
+
"harness|boolq|0": {
|
389 |
+
"acc,none": 0.7155963302752294,
|
390 |
+
"acc_stderr,none": 0.00789031224598877,
|
391 |
+
"alias": "boolq"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "ISTA-DASLab/Llama-2-7b-AQLM-2Bit-1x16-hf",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 2.38,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "AQLM",
|
401 |
+
"precision": "2bit",
|
402 |
+
"model_params": 6.48,
|
403 |
+
"model_size": 2.38,
|
404 |
+
"weight_dtype": "int2",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Waiting",
|
409 |
+
"submitted_time": "2024-05-15T03:44:59Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"in_group_size": 8,
|
417 |
+
"linear_weights_not_to_quantize": [
|
418 |
+
"model.embed_tokens.weight",
|
419 |
+
"lm_head.weight"
|
420 |
+
],
|
421 |
+
"nbits_per_codebook": 16,
|
422 |
+
"num_codebooks": 1,
|
423 |
+
"out_group_size": 1,
|
424 |
+
"quant_method": "aqlm"
|
425 |
+
},
|
426 |
+
"versions": {
|
427 |
+
"harness|hellaswag|0": 1.0,
|
428 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
429 |
+
"harness|openbookqa|0": 1.0,
|
430 |
+
"harness|winogrande|0": 1.0,
|
431 |
+
"harness|lambada:openai|0": 1.0,
|
432 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
433 |
+
"harness|mmlu|0": null,
|
434 |
+
"harness|mmlu_humanities|0": null,
|
435 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
436 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
437 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
438 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
439 |
+
"harness|mmlu_international_law|0": 0.0,
|
440 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
441 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
442 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
443 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
444 |
+
"harness|mmlu_philosophy|0": 0.0,
|
445 |
+
"harness|mmlu_prehistory|0": 0.0,
|
446 |
+
"harness|mmlu_professional_law|0": 0.0,
|
447 |
+
"harness|mmlu_world_religions|0": 0.0,
|
448 |
+
"harness|mmlu_other|0": null,
|
449 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
450 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
451 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
452 |
+
"harness|mmlu_global_facts|0": 0.0,
|
453 |
+
"harness|mmlu_human_aging|0": 0.0,
|
454 |
+
"harness|mmlu_management|0": 0.0,
|
455 |
+
"harness|mmlu_marketing|0": 0.0,
|
456 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
457 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
458 |
+
"harness|mmlu_nutrition|0": 0.0,
|
459 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
460 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
461 |
+
"harness|mmlu_virology|0": 0.0,
|
462 |
+
"harness|mmlu_social_sciences|0": null,
|
463 |
+
"harness|mmlu_econometrics|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
466 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
467 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
468 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
469 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
470 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
471 |
+
"harness|mmlu_public_relations|0": 0.0,
|
472 |
+
"harness|mmlu_security_studies|0": 0.0,
|
473 |
+
"harness|mmlu_sociology|0": 0.0,
|
474 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
475 |
+
"harness|mmlu_stem|0": null,
|
476 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
477 |
+
"harness|mmlu_anatomy|0": 0.0,
|
478 |
+
"harness|mmlu_astronomy|0": 0.0,
|
479 |
+
"harness|mmlu_college_biology|0": 0.0,
|
480 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
481 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
482 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
483 |
+
"harness|mmlu_college_physics|0": 0.0,
|
484 |
+
"harness|mmlu_computer_security|0": 0.0,
|
485 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
486 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
487 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
491 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
494 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
495 |
+
"harness|arc:easy|0": 1.0,
|
496 |
+
"harness|arc:challenge|0": 1.0,
|
497 |
+
"harness|piqa|0": 1.0,
|
498 |
+
"harness|boolq|0": 2.0
|
499 |
+
},
|
500 |
+
"n-shot": {
|
501 |
+
"arc_challenge": 0,
|
502 |
+
"arc_easy": 0,
|
503 |
+
"boolq": 0,
|
504 |
+
"hellaswag": 0,
|
505 |
+
"lambada_openai": 0,
|
506 |
+
"mmlu": 0,
|
507 |
+
"mmlu_abstract_algebra": 0,
|
508 |
+
"mmlu_anatomy": 0,
|
509 |
+
"mmlu_astronomy": 0,
|
510 |
+
"mmlu_business_ethics": 0,
|
511 |
+
"mmlu_clinical_knowledge": 0,
|
512 |
+
"mmlu_college_biology": 0,
|
513 |
+
"mmlu_college_chemistry": 0,
|
514 |
+
"mmlu_college_computer_science": 0,
|
515 |
+
"mmlu_college_mathematics": 0,
|
516 |
+
"mmlu_college_medicine": 0,
|
517 |
+
"mmlu_college_physics": 0,
|
518 |
+
"mmlu_computer_security": 0,
|
519 |
+
"mmlu_conceptual_physics": 0,
|
520 |
+
"mmlu_econometrics": 0,
|
521 |
+
"mmlu_electrical_engineering": 0,
|
522 |
+
"mmlu_elementary_mathematics": 0,
|
523 |
+
"mmlu_formal_logic": 0,
|
524 |
+
"mmlu_global_facts": 0,
|
525 |
+
"mmlu_high_school_biology": 0,
|
526 |
+
"mmlu_high_school_chemistry": 0,
|
527 |
+
"mmlu_high_school_computer_science": 0,
|
528 |
+
"mmlu_high_school_european_history": 0,
|
529 |
+
"mmlu_high_school_geography": 0,
|
530 |
+
"mmlu_high_school_government_and_politics": 0,
|
531 |
+
"mmlu_high_school_macroeconomics": 0,
|
532 |
+
"mmlu_high_school_mathematics": 0,
|
533 |
+
"mmlu_high_school_microeconomics": 0,
|
534 |
+
"mmlu_high_school_physics": 0,
|
535 |
+
"mmlu_high_school_psychology": 0,
|
536 |
+
"mmlu_high_school_statistics": 0,
|
537 |
+
"mmlu_high_school_us_history": 0,
|
538 |
+
"mmlu_high_school_world_history": 0,
|
539 |
+
"mmlu_human_aging": 0,
|
540 |
+
"mmlu_human_sexuality": 0,
|
541 |
+
"mmlu_humanities": 0,
|
542 |
+
"mmlu_international_law": 0,
|
543 |
+
"mmlu_jurisprudence": 0,
|
544 |
+
"mmlu_logical_fallacies": 0,
|
545 |
+
"mmlu_machine_learning": 0,
|
546 |
+
"mmlu_management": 0,
|
547 |
+
"mmlu_marketing": 0,
|
548 |
+
"mmlu_medical_genetics": 0,
|
549 |
+
"mmlu_miscellaneous": 0,
|
550 |
+
"mmlu_moral_disputes": 0,
|
551 |
+
"mmlu_moral_scenarios": 0,
|
552 |
+
"mmlu_nutrition": 0,
|
553 |
+
"mmlu_other": 0,
|
554 |
+
"mmlu_philosophy": 0,
|
555 |
+
"mmlu_prehistory": 0,
|
556 |
+
"mmlu_professional_accounting": 0,
|
557 |
+
"mmlu_professional_law": 0,
|
558 |
+
"mmlu_professional_medicine": 0,
|
559 |
+
"mmlu_professional_psychology": 0,
|
560 |
+
"mmlu_public_relations": 0,
|
561 |
+
"mmlu_security_studies": 0,
|
562 |
+
"mmlu_social_sciences": 0,
|
563 |
+
"mmlu_sociology": 0,
|
564 |
+
"mmlu_stem": 0,
|
565 |
+
"mmlu_us_foreign_policy": 0,
|
566 |
+
"mmlu_virology": 0,
|
567 |
+
"mmlu_world_religions": 0,
|
568 |
+
"openbookqa": 0,
|
569 |
+
"piqa": 0,
|
570 |
+
"truthfulqa_mc1": 0,
|
571 |
+
"truthfulqa_mc2": 0,
|
572 |
+
"winogrande": 0
|
573 |
+
},
|
574 |
+
"date": 1716092125.7070472,
|
575 |
+
"config": {
|
576 |
+
"model": "hf",
|
577 |
+
"model_args": "pretrained=ISTA-DASLab/Llama-2-7b-AQLM-2Bit-1x16-hf,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
578 |
+
"batch_size": 4,
|
579 |
+
"batch_sizes": [],
|
580 |
+
"device": "cuda",
|
581 |
+
"use_cache": null,
|
582 |
+
"limit": null,
|
583 |
+
"bootstrap_iters": 100000,
|
584 |
+
"gen_kwargs": null
|
585 |
+
}
|
586 |
+
}
|
ISTA-DASLab/results_2024-05-20-03-47-37.json
ADDED
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-20-03-47-37",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "ISTA-DASLab/Llama-2-7b-AQLM-2Bit-8x8-hf",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "2bit",
|
15 |
+
"model_size": 2.73,
|
16 |
+
"model_params": 6.48,
|
17 |
+
"quant_type": "AQLM",
|
18 |
+
"precision": "2bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|openbookqa|0": {
|
22 |
+
"acc,none": 0.284,
|
23 |
+
"acc_stderr,none": 0.02018670369357085,
|
24 |
+
"acc_norm,none": 0.4,
|
25 |
+
"acc_norm_stderr,none": 0.021930844120728505,
|
26 |
+
"alias": "openbookqa"
|
27 |
+
},
|
28 |
+
"harness|lambada:openai|0": {
|
29 |
+
"perplexity,none": 4.623725999863417,
|
30 |
+
"perplexity_stderr,none": 0.10657475064064027,
|
31 |
+
"acc,none": 0.6811566078012808,
|
32 |
+
"acc_stderr,none": 0.006492684061449838,
|
33 |
+
"alias": "lambada_openai"
|
34 |
+
},
|
35 |
+
"harness|mmlu|0": {
|
36 |
+
"acc,none": 0.3007406352371457,
|
37 |
+
"acc_stderr,none": 0.003854246733008758,
|
38 |
+
"alias": "mmlu"
|
39 |
+
},
|
40 |
+
"harness|mmlu_humanities|0": {
|
41 |
+
"alias": " - humanities",
|
42 |
+
"acc,none": 0.3026567481402763,
|
43 |
+
"acc_stderr,none": 0.006671499990771424
|
44 |
+
},
|
45 |
+
"harness|mmlu_formal_logic|0": {
|
46 |
+
"alias": " - formal_logic",
|
47 |
+
"acc,none": 0.21428571428571427,
|
48 |
+
"acc_stderr,none": 0.03670066451047182
|
49 |
+
},
|
50 |
+
"harness|mmlu_high_school_european_history|0": {
|
51 |
+
"alias": " - high_school_european_history",
|
52 |
+
"acc,none": 0.32727272727272727,
|
53 |
+
"acc_stderr,none": 0.03663974994391241
|
54 |
+
},
|
55 |
+
"harness|mmlu_high_school_us_history|0": {
|
56 |
+
"alias": " - high_school_us_history",
|
57 |
+
"acc,none": 0.3627450980392157,
|
58 |
+
"acc_stderr,none": 0.03374499356319355
|
59 |
+
},
|
60 |
+
"harness|mmlu_high_school_world_history|0": {
|
61 |
+
"alias": " - high_school_world_history",
|
62 |
+
"acc,none": 0.4008438818565401,
|
63 |
+
"acc_stderr,none": 0.031900803894732356
|
64 |
+
},
|
65 |
+
"harness|mmlu_international_law|0": {
|
66 |
+
"alias": " - international_law",
|
67 |
+
"acc,none": 0.371900826446281,
|
68 |
+
"acc_stderr,none": 0.04412015806624503
|
69 |
+
},
|
70 |
+
"harness|mmlu_jurisprudence|0": {
|
71 |
+
"alias": " - jurisprudence",
|
72 |
+
"acc,none": 0.35185185185185186,
|
73 |
+
"acc_stderr,none": 0.04616631111801715
|
74 |
+
},
|
75 |
+
"harness|mmlu_logical_fallacies|0": {
|
76 |
+
"alias": " - logical_fallacies",
|
77 |
+
"acc,none": 0.3006134969325153,
|
78 |
+
"acc_stderr,none": 0.0360251131880677
|
79 |
+
},
|
80 |
+
"harness|mmlu_moral_disputes|0": {
|
81 |
+
"alias": " - moral_disputes",
|
82 |
+
"acc,none": 0.3352601156069364,
|
83 |
+
"acc_stderr,none": 0.025416003773165555
|
84 |
+
},
|
85 |
+
"harness|mmlu_moral_scenarios|0": {
|
86 |
+
"alias": " - moral_scenarios",
|
87 |
+
"acc,none": 0.23575418994413408,
|
88 |
+
"acc_stderr,none": 0.014196375686290804
|
89 |
+
},
|
90 |
+
"harness|mmlu_philosophy|0": {
|
91 |
+
"alias": " - philosophy",
|
92 |
+
"acc,none": 0.3215434083601286,
|
93 |
+
"acc_stderr,none": 0.026527724079528872
|
94 |
+
},
|
95 |
+
"harness|mmlu_prehistory|0": {
|
96 |
+
"alias": " - prehistory",
|
97 |
+
"acc,none": 0.3487654320987654,
|
98 |
+
"acc_stderr,none": 0.02651759772446501
|
99 |
+
},
|
100 |
+
"harness|mmlu_professional_law|0": {
|
101 |
+
"alias": " - professional_law",
|
102 |
+
"acc,none": 0.2907431551499348,
|
103 |
+
"acc_stderr,none": 0.011598062372851981
|
104 |
+
},
|
105 |
+
"harness|mmlu_world_religions|0": {
|
106 |
+
"alias": " - world_religions",
|
107 |
+
"acc,none": 0.32748538011695905,
|
108 |
+
"acc_stderr,none": 0.035993357714560276
|
109 |
+
},
|
110 |
+
"harness|mmlu_other|0": {
|
111 |
+
"alias": " - other",
|
112 |
+
"acc,none": 0.3270035403926617,
|
113 |
+
"acc_stderr,none": 0.008393111920442035
|
114 |
+
},
|
115 |
+
"harness|mmlu_business_ethics|0": {
|
116 |
+
"alias": " - business_ethics",
|
117 |
+
"acc,none": 0.27,
|
118 |
+
"acc_stderr,none": 0.044619604333847415
|
119 |
+
},
|
120 |
+
"harness|mmlu_clinical_knowledge|0": {
|
121 |
+
"alias": " - clinical_knowledge",
|
122 |
+
"acc,none": 0.33962264150943394,
|
123 |
+
"acc_stderr,none": 0.02914690474779833
|
124 |
+
},
|
125 |
+
"harness|mmlu_college_medicine|0": {
|
126 |
+
"alias": " - college_medicine",
|
127 |
+
"acc,none": 0.2832369942196532,
|
128 |
+
"acc_stderr,none": 0.03435568056047873
|
129 |
+
},
|
130 |
+
"harness|mmlu_global_facts|0": {
|
131 |
+
"alias": " - global_facts",
|
132 |
+
"acc,none": 0.35,
|
133 |
+
"acc_stderr,none": 0.047937248544110196
|
134 |
+
},
|
135 |
+
"harness|mmlu_human_aging|0": {
|
136 |
+
"alias": " - human_aging",
|
137 |
+
"acc,none": 0.26905829596412556,
|
138 |
+
"acc_stderr,none": 0.029763779406874975
|
139 |
+
},
|
140 |
+
"harness|mmlu_management|0": {
|
141 |
+
"alias": " - management",
|
142 |
+
"acc,none": 0.2815533980582524,
|
143 |
+
"acc_stderr,none": 0.04453254836326469
|
144 |
+
},
|
145 |
+
"harness|mmlu_marketing|0": {
|
146 |
+
"alias": " - marketing",
|
147 |
+
"acc,none": 0.3717948717948718,
|
148 |
+
"acc_stderr,none": 0.03166098891888078
|
149 |
+
},
|
150 |
+
"harness|mmlu_medical_genetics|0": {
|
151 |
+
"alias": " - medical_genetics",
|
152 |
+
"acc,none": 0.41,
|
153 |
+
"acc_stderr,none": 0.049431107042371025
|
154 |
+
},
|
155 |
+
"harness|mmlu_miscellaneous|0": {
|
156 |
+
"alias": " - miscellaneous",
|
157 |
+
"acc,none": 0.3550446998722861,
|
158 |
+
"acc_stderr,none": 0.017112085772772994
|
159 |
+
},
|
160 |
+
"harness|mmlu_nutrition|0": {
|
161 |
+
"alias": " - nutrition",
|
162 |
+
"acc,none": 0.3562091503267974,
|
163 |
+
"acc_stderr,none": 0.027420477662629235
|
164 |
+
},
|
165 |
+
"harness|mmlu_professional_accounting|0": {
|
166 |
+
"alias": " - professional_accounting",
|
167 |
+
"acc,none": 0.30141843971631205,
|
168 |
+
"acc_stderr,none": 0.02737412888263115
|
169 |
+
},
|
170 |
+
"harness|mmlu_professional_medicine|0": {
|
171 |
+
"alias": " - professional_medicine",
|
172 |
+
"acc,none": 0.23529411764705882,
|
173 |
+
"acc_stderr,none": 0.02576725201085597
|
174 |
+
},
|
175 |
+
"harness|mmlu_virology|0": {
|
176 |
+
"alias": " - virology",
|
177 |
+
"acc,none": 0.37349397590361444,
|
178 |
+
"acc_stderr,none": 0.03765845117168862
|
179 |
+
},
|
180 |
+
"harness|mmlu_social_sciences|0": {
|
181 |
+
"alias": " - social_sciences",
|
182 |
+
"acc,none": 0.30744231394215144,
|
183 |
+
"acc_stderr,none": 0.00829988495163527
|
184 |
+
},
|
185 |
+
"harness|mmlu_econometrics|0": {
|
186 |
+
"alias": " - econometrics",
|
187 |
+
"acc,none": 0.2631578947368421,
|
188 |
+
"acc_stderr,none": 0.04142439719489362
|
189 |
+
},
|
190 |
+
"harness|mmlu_high_school_geography|0": {
|
191 |
+
"alias": " - high_school_geography",
|
192 |
+
"acc,none": 0.29797979797979796,
|
193 |
+
"acc_stderr,none": 0.032586303838365555
|
194 |
+
},
|
195 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
196 |
+
"alias": " - high_school_government_and_politics",
|
197 |
+
"acc,none": 0.38341968911917096,
|
198 |
+
"acc_stderr,none": 0.03508984236295342
|
199 |
+
},
|
200 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
201 |
+
"alias": " - high_school_macroeconomics",
|
202 |
+
"acc,none": 0.2564102564102564,
|
203 |
+
"acc_stderr,none": 0.022139081103971527
|
204 |
+
},
|
205 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
206 |
+
"alias": " - high_school_microeconomics",
|
207 |
+
"acc,none": 0.28991596638655465,
|
208 |
+
"acc_stderr,none": 0.029472485833136077
|
209 |
+
},
|
210 |
+
"harness|mmlu_high_school_psychology|0": {
|
211 |
+
"alias": " - high_school_psychology",
|
212 |
+
"acc,none": 0.344954128440367,
|
213 |
+
"acc_stderr,none": 0.020380605405066966
|
214 |
+
},
|
215 |
+
"harness|mmlu_human_sexuality|0": {
|
216 |
+
"alias": " - human_sexuality",
|
217 |
+
"acc,none": 0.3511450381679389,
|
218 |
+
"acc_stderr,none": 0.04186445163013751
|
219 |
+
},
|
220 |
+
"harness|mmlu_professional_psychology|0": {
|
221 |
+
"alias": " - professional_psychology",
|
222 |
+
"acc,none": 0.30392156862745096,
|
223 |
+
"acc_stderr,none": 0.01860755213127983
|
224 |
+
},
|
225 |
+
"harness|mmlu_public_relations|0": {
|
226 |
+
"alias": " - public_relations",
|
227 |
+
"acc,none": 0.36363636363636365,
|
228 |
+
"acc_stderr,none": 0.04607582090719976
|
229 |
+
},
|
230 |
+
"harness|mmlu_security_studies|0": {
|
231 |
+
"alias": " - security_studies",
|
232 |
+
"acc,none": 0.22857142857142856,
|
233 |
+
"acc_stderr,none": 0.026882144922307744
|
234 |
+
},
|
235 |
+
"harness|mmlu_sociology|0": {
|
236 |
+
"alias": " - sociology",
|
237 |
+
"acc,none": 0.3333333333333333,
|
238 |
+
"acc_stderr,none": 0.03333333333333334
|
239 |
+
},
|
240 |
+
"harness|mmlu_us_foreign_policy|0": {
|
241 |
+
"alias": " - us_foreign_policy",
|
242 |
+
"acc,none": 0.31,
|
243 |
+
"acc_stderr,none": 0.04648231987117316
|
244 |
+
},
|
245 |
+
"harness|mmlu_stem|0": {
|
246 |
+
"alias": " - stem",
|
247 |
+
"acc,none": 0.2654614652711703,
|
248 |
+
"acc_stderr,none": 0.007843275093064069
|
249 |
+
},
|
250 |
+
"harness|mmlu_abstract_algebra|0": {
|
251 |
+
"alias": " - abstract_algebra",
|
252 |
+
"acc,none": 0.3,
|
253 |
+
"acc_stderr,none": 0.046056618647183814
|
254 |
+
},
|
255 |
+
"harness|mmlu_anatomy|0": {
|
256 |
+
"alias": " - anatomy",
|
257 |
+
"acc,none": 0.2814814814814815,
|
258 |
+
"acc_stderr,none": 0.03885004245800254
|
259 |
+
},
|
260 |
+
"harness|mmlu_astronomy|0": {
|
261 |
+
"alias": " - astronomy",
|
262 |
+
"acc,none": 0.28289473684210525,
|
263 |
+
"acc_stderr,none": 0.03665349695640767
|
264 |
+
},
|
265 |
+
"harness|mmlu_college_biology|0": {
|
266 |
+
"alias": " - college_biology",
|
267 |
+
"acc,none": 0.2569444444444444,
|
268 |
+
"acc_stderr,none": 0.03653946969442099
|
269 |
+
},
|
270 |
+
"harness|mmlu_college_chemistry|0": {
|
271 |
+
"alias": " - college_chemistry",
|
272 |
+
"acc,none": 0.26,
|
273 |
+
"acc_stderr,none": 0.0440844002276808
|
274 |
+
},
|
275 |
+
"harness|mmlu_college_computer_science|0": {
|
276 |
+
"alias": " - college_computer_science",
|
277 |
+
"acc,none": 0.25,
|
278 |
+
"acc_stderr,none": 0.04351941398892446
|
279 |
+
},
|
280 |
+
"harness|mmlu_college_mathematics|0": {
|
281 |
+
"alias": " - college_mathematics",
|
282 |
+
"acc,none": 0.31,
|
283 |
+
"acc_stderr,none": 0.04648231987117316
|
284 |
+
},
|
285 |
+
"harness|mmlu_college_physics|0": {
|
286 |
+
"alias": " - college_physics",
|
287 |
+
"acc,none": 0.20588235294117646,
|
288 |
+
"acc_stderr,none": 0.04023382273617747
|
289 |
+
},
|
290 |
+
"harness|mmlu_computer_security|0": {
|
291 |
+
"alias": " - computer_security",
|
292 |
+
"acc,none": 0.4,
|
293 |
+
"acc_stderr,none": 0.049236596391733084
|
294 |
+
},
|
295 |
+
"harness|mmlu_conceptual_physics|0": {
|
296 |
+
"alias": " - conceptual_physics",
|
297 |
+
"acc,none": 0.3276595744680851,
|
298 |
+
"acc_stderr,none": 0.030683020843230997
|
299 |
+
},
|
300 |
+
"harness|mmlu_electrical_engineering|0": {
|
301 |
+
"alias": " - electrical_engineering",
|
302 |
+
"acc,none": 0.2620689655172414,
|
303 |
+
"acc_stderr,none": 0.036646663372252565
|
304 |
+
},
|
305 |
+
"harness|mmlu_elementary_mathematics|0": {
|
306 |
+
"alias": " - elementary_mathematics",
|
307 |
+
"acc,none": 0.23809523809523808,
|
308 |
+
"acc_stderr,none": 0.02193587808118476
|
309 |
+
},
|
310 |
+
"harness|mmlu_high_school_biology|0": {
|
311 |
+
"alias": " - high_school_biology",
|
312 |
+
"acc,none": 0.3064516129032258,
|
313 |
+
"acc_stderr,none": 0.026226485652553873
|
314 |
+
},
|
315 |
+
"harness|mmlu_high_school_chemistry|0": {
|
316 |
+
"alias": " - high_school_chemistry",
|
317 |
+
"acc,none": 0.2315270935960591,
|
318 |
+
"acc_stderr,none": 0.029678333141444455
|
319 |
+
},
|
320 |
+
"harness|mmlu_high_school_computer_science|0": {
|
321 |
+
"alias": " - high_school_computer_science",
|
322 |
+
"acc,none": 0.33,
|
323 |
+
"acc_stderr,none": 0.047258156262526045
|
324 |
+
},
|
325 |
+
"harness|mmlu_high_school_mathematics|0": {
|
326 |
+
"alias": " - high_school_mathematics",
|
327 |
+
"acc,none": 0.25555555555555554,
|
328 |
+
"acc_stderr,none": 0.02659393910184408
|
329 |
+
},
|
330 |
+
"harness|mmlu_high_school_physics|0": {
|
331 |
+
"alias": " - high_school_physics",
|
332 |
+
"acc,none": 0.2052980132450331,
|
333 |
+
"acc_stderr,none": 0.03297986648473835
|
334 |
+
},
|
335 |
+
"harness|mmlu_high_school_statistics|0": {
|
336 |
+
"alias": " - high_school_statistics",
|
337 |
+
"acc,none": 0.19444444444444445,
|
338 |
+
"acc_stderr,none": 0.026991454502036737
|
339 |
+
},
|
340 |
+
"harness|mmlu_machine_learning|0": {
|
341 |
+
"alias": " - machine_learning",
|
342 |
+
"acc,none": 0.21428571428571427,
|
343 |
+
"acc_stderr,none": 0.03894641120044792
|
344 |
+
},
|
345 |
+
"harness|boolq|0": {
|
346 |
+
"acc,none": 0.7003058103975535,
|
347 |
+
"acc_stderr,none": 0.00801263880645437,
|
348 |
+
"alias": "boolq"
|
349 |
+
},
|
350 |
+
"harness|truthfulqa:mc2|0": {
|
351 |
+
"acc,none": 0.4019049189158086,
|
352 |
+
"acc_stderr,none": 0.014095106904013666,
|
353 |
+
"alias": "truthfulqa_mc2"
|
354 |
+
},
|
355 |
+
"harness|arc:easy|0": {
|
356 |
+
"acc,none": 0.6641414141414141,
|
357 |
+
"acc_stderr,none": 0.009691180932083496,
|
358 |
+
"acc_norm,none": 0.6380471380471381,
|
359 |
+
"acc_norm_stderr,none": 0.009860991466688476,
|
360 |
+
"alias": "arc_easy"
|
361 |
+
},
|
362 |
+
"harness|winogrande|0": {
|
363 |
+
"acc,none": 0.6471981057616417,
|
364 |
+
"acc_stderr,none": 0.01342972810178896,
|
365 |
+
"alias": "winogrande"
|
366 |
+
},
|
367 |
+
"harness|hellaswag|0": {
|
368 |
+
"acc,none": 0.4977096195976897,
|
369 |
+
"acc_stderr,none": 0.004989729059957427,
|
370 |
+
"acc_norm,none": 0.6783509261103365,
|
371 |
+
"acc_norm_stderr,none": 0.004661544991583015,
|
372 |
+
"alias": "hellaswag"
|
373 |
+
},
|
374 |
+
"harness|piqa|0": {
|
375 |
+
"acc,none": 0.7377584330794341,
|
376 |
+
"acc_stderr,none": 0.01026250256517245,
|
377 |
+
"acc_norm,none": 0.7480957562568009,
|
378 |
+
"acc_norm_stderr,none": 0.010128421335088683,
|
379 |
+
"alias": "piqa"
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc1|0": {
|
382 |
+
"acc,none": 0.2631578947368421,
|
383 |
+
"acc_stderr,none": 0.015415241740237014,
|
384 |
+
"alias": "truthfulqa_mc1"
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.3583617747440273,
|
388 |
+
"acc_stderr,none": 0.014012883334859864,
|
389 |
+
"acc_norm,none": 0.38054607508532423,
|
390 |
+
"acc_norm_stderr,none": 0.01418827771234983,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "ISTA-DASLab/Llama-2-7b-AQLM-2Bit-8x8-hf",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 2.73,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "AQLM",
|
401 |
+
"precision": "2bit",
|
402 |
+
"model_params": 6.48,
|
403 |
+
"model_size": 2.73,
|
404 |
+
"weight_dtype": "int2",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Waiting",
|
409 |
+
"submitted_time": "2024-05-15T03:43:56Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"in_group_size": 32,
|
417 |
+
"linear_weights_not_to_quantize": [
|
418 |
+
"model.embed_tokens.weight",
|
419 |
+
"lm_head.weight"
|
420 |
+
],
|
421 |
+
"nbits_per_codebook": 8,
|
422 |
+
"num_codebooks": 8,
|
423 |
+
"out_group_size": 1,
|
424 |
+
"quant_method": "aqlm"
|
425 |
+
},
|
426 |
+
"versions": {
|
427 |
+
"harness|openbookqa|0": 1.0,
|
428 |
+
"harness|lambada:openai|0": 1.0,
|
429 |
+
"harness|mmlu|0": null,
|
430 |
+
"harness|mmlu_humanities|0": null,
|
431 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
432 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
433 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
434 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
435 |
+
"harness|mmlu_international_law|0": 0.0,
|
436 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
437 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
438 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
439 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
440 |
+
"harness|mmlu_philosophy|0": 0.0,
|
441 |
+
"harness|mmlu_prehistory|0": 0.0,
|
442 |
+
"harness|mmlu_professional_law|0": 0.0,
|
443 |
+
"harness|mmlu_world_religions|0": 0.0,
|
444 |
+
"harness|mmlu_other|0": null,
|
445 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
446 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
447 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
448 |
+
"harness|mmlu_global_facts|0": 0.0,
|
449 |
+
"harness|mmlu_human_aging|0": 0.0,
|
450 |
+
"harness|mmlu_management|0": 0.0,
|
451 |
+
"harness|mmlu_marketing|0": 0.0,
|
452 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
453 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
454 |
+
"harness|mmlu_nutrition|0": 0.0,
|
455 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
456 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
457 |
+
"harness|mmlu_virology|0": 0.0,
|
458 |
+
"harness|mmlu_social_sciences|0": null,
|
459 |
+
"harness|mmlu_econometrics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
465 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
466 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
467 |
+
"harness|mmlu_public_relations|0": 0.0,
|
468 |
+
"harness|mmlu_security_studies|0": 0.0,
|
469 |
+
"harness|mmlu_sociology|0": 0.0,
|
470 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
471 |
+
"harness|mmlu_stem|0": null,
|
472 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
473 |
+
"harness|mmlu_anatomy|0": 0.0,
|
474 |
+
"harness|mmlu_astronomy|0": 0.0,
|
475 |
+
"harness|mmlu_college_biology|0": 0.0,
|
476 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
477 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
478 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
479 |
+
"harness|mmlu_college_physics|0": 0.0,
|
480 |
+
"harness|mmlu_computer_security|0": 0.0,
|
481 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
482 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
483 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
490 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
491 |
+
"harness|boolq|0": 2.0,
|
492 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
493 |
+
"harness|arc:easy|0": 1.0,
|
494 |
+
"harness|winogrande|0": 1.0,
|
495 |
+
"harness|hellaswag|0": 1.0,
|
496 |
+
"harness|piqa|0": 1.0,
|
497 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
498 |
+
"harness|arc:challenge|0": 1.0
|
499 |
+
},
|
500 |
+
"n-shot": {
|
501 |
+
"arc_challenge": 0,
|
502 |
+
"arc_easy": 0,
|
503 |
+
"boolq": 0,
|
504 |
+
"hellaswag": 0,
|
505 |
+
"lambada_openai": 0,
|
506 |
+
"mmlu": 0,
|
507 |
+
"mmlu_abstract_algebra": 0,
|
508 |
+
"mmlu_anatomy": 0,
|
509 |
+
"mmlu_astronomy": 0,
|
510 |
+
"mmlu_business_ethics": 0,
|
511 |
+
"mmlu_clinical_knowledge": 0,
|
512 |
+
"mmlu_college_biology": 0,
|
513 |
+
"mmlu_college_chemistry": 0,
|
514 |
+
"mmlu_college_computer_science": 0,
|
515 |
+
"mmlu_college_mathematics": 0,
|
516 |
+
"mmlu_college_medicine": 0,
|
517 |
+
"mmlu_college_physics": 0,
|
518 |
+
"mmlu_computer_security": 0,
|
519 |
+
"mmlu_conceptual_physics": 0,
|
520 |
+
"mmlu_econometrics": 0,
|
521 |
+
"mmlu_electrical_engineering": 0,
|
522 |
+
"mmlu_elementary_mathematics": 0,
|
523 |
+
"mmlu_formal_logic": 0,
|
524 |
+
"mmlu_global_facts": 0,
|
525 |
+
"mmlu_high_school_biology": 0,
|
526 |
+
"mmlu_high_school_chemistry": 0,
|
527 |
+
"mmlu_high_school_computer_science": 0,
|
528 |
+
"mmlu_high_school_european_history": 0,
|
529 |
+
"mmlu_high_school_geography": 0,
|
530 |
+
"mmlu_high_school_government_and_politics": 0,
|
531 |
+
"mmlu_high_school_macroeconomics": 0,
|
532 |
+
"mmlu_high_school_mathematics": 0,
|
533 |
+
"mmlu_high_school_microeconomics": 0,
|
534 |
+
"mmlu_high_school_physics": 0,
|
535 |
+
"mmlu_high_school_psychology": 0,
|
536 |
+
"mmlu_high_school_statistics": 0,
|
537 |
+
"mmlu_high_school_us_history": 0,
|
538 |
+
"mmlu_high_school_world_history": 0,
|
539 |
+
"mmlu_human_aging": 0,
|
540 |
+
"mmlu_human_sexuality": 0,
|
541 |
+
"mmlu_humanities": 0,
|
542 |
+
"mmlu_international_law": 0,
|
543 |
+
"mmlu_jurisprudence": 0,
|
544 |
+
"mmlu_logical_fallacies": 0,
|
545 |
+
"mmlu_machine_learning": 0,
|
546 |
+
"mmlu_management": 0,
|
547 |
+
"mmlu_marketing": 0,
|
548 |
+
"mmlu_medical_genetics": 0,
|
549 |
+
"mmlu_miscellaneous": 0,
|
550 |
+
"mmlu_moral_disputes": 0,
|
551 |
+
"mmlu_moral_scenarios": 0,
|
552 |
+
"mmlu_nutrition": 0,
|
553 |
+
"mmlu_other": 0,
|
554 |
+
"mmlu_philosophy": 0,
|
555 |
+
"mmlu_prehistory": 0,
|
556 |
+
"mmlu_professional_accounting": 0,
|
557 |
+
"mmlu_professional_law": 0,
|
558 |
+
"mmlu_professional_medicine": 0,
|
559 |
+
"mmlu_professional_psychology": 0,
|
560 |
+
"mmlu_public_relations": 0,
|
561 |
+
"mmlu_security_studies": 0,
|
562 |
+
"mmlu_social_sciences": 0,
|
563 |
+
"mmlu_sociology": 0,
|
564 |
+
"mmlu_stem": 0,
|
565 |
+
"mmlu_us_foreign_policy": 0,
|
566 |
+
"mmlu_virology": 0,
|
567 |
+
"mmlu_world_religions": 0,
|
568 |
+
"openbookqa": 0,
|
569 |
+
"piqa": 0,
|
570 |
+
"truthfulqa_mc1": 0,
|
571 |
+
"truthfulqa_mc2": 0,
|
572 |
+
"winogrande": 0
|
573 |
+
},
|
574 |
+
"date": 1716130571.6830513,
|
575 |
+
"config": {
|
576 |
+
"model": "hf",
|
577 |
+
"model_args": "pretrained=ISTA-DASLab/Llama-2-7b-AQLM-2Bit-8x8-hf,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
578 |
+
"batch_size": 4,
|
579 |
+
"batch_sizes": [],
|
580 |
+
"device": "cuda",
|
581 |
+
"use_cache": null,
|
582 |
+
"limit": null,
|
583 |
+
"bootstrap_iters": 100000,
|
584 |
+
"gen_kwargs": null
|
585 |
+
}
|
586 |
+
}
|
ISTA-DASLab/results_2024-05-20-10-51-30.json
ADDED
@@ -0,0 +1,592 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-20-10-51-30",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "ISTA-DASLab/Llama-3-8B-Instruct-GPTQ-4bit",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.74,
|
16 |
+
"model_params": 7.04,
|
17 |
+
"quant_type": "GPTQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|piqa|0": {
|
22 |
+
"acc,none": 0.76550598476605,
|
23 |
+
"acc_stderr,none": 0.009885203143240548,
|
24 |
+
"acc_norm,none": 0.7742110990206746,
|
25 |
+
"acc_norm_stderr,none": 0.009754980670917334,
|
26 |
+
"alias": "piqa"
|
27 |
+
},
|
28 |
+
"harness|hellaswag|0": {
|
29 |
+
"acc,none": 0.5633339972117108,
|
30 |
+
"acc_stderr,none": 0.00494958956767891,
|
31 |
+
"acc_norm,none": 0.7466640111531567,
|
32 |
+
"acc_norm_stderr,none": 0.004340328204135108,
|
33 |
+
"alias": "hellaswag"
|
34 |
+
},
|
35 |
+
"harness|arc:easy|0": {
|
36 |
+
"acc,none": 0.7929292929292929,
|
37 |
+
"acc_stderr,none": 0.008314665023956551,
|
38 |
+
"acc_norm,none": 0.7643097643097643,
|
39 |
+
"acc_norm_stderr,none": 0.008709108323214466,
|
40 |
+
"alias": "arc_easy"
|
41 |
+
},
|
42 |
+
"harness|winogrande|0": {
|
43 |
+
"acc,none": 0.7348066298342542,
|
44 |
+
"acc_stderr,none": 0.01240654946619286,
|
45 |
+
"alias": "winogrande"
|
46 |
+
},
|
47 |
+
"harness|openbookqa|0": {
|
48 |
+
"acc,none": 0.334,
|
49 |
+
"acc_stderr,none": 0.021113492347743738,
|
50 |
+
"acc_norm,none": 0.428,
|
51 |
+
"acc_norm_stderr,none": 0.02214979066386193,
|
52 |
+
"alias": "openbookqa"
|
53 |
+
},
|
54 |
+
"harness|boolq|0": {
|
55 |
+
"acc,none": 0.8269113149847095,
|
56 |
+
"acc_stderr,none": 0.006616927043886648,
|
57 |
+
"alias": "boolq"
|
58 |
+
},
|
59 |
+
"harness|lambada:openai|0": {
|
60 |
+
"perplexity,none": 3.5061154188638777,
|
61 |
+
"perplexity_stderr,none": 0.10109082998573839,
|
62 |
+
"acc,none": 0.708131185717058,
|
63 |
+
"acc_stderr,none": 0.006333777168216385,
|
64 |
+
"alias": "lambada_openai"
|
65 |
+
},
|
66 |
+
"harness|arc:challenge|0": {
|
67 |
+
"acc,none": 0.5034129692832765,
|
68 |
+
"acc_stderr,none": 0.014611050403244084,
|
69 |
+
"acc_norm,none": 0.5238907849829352,
|
70 |
+
"acc_norm_stderr,none": 0.014594701798071654,
|
71 |
+
"alias": "arc_challenge"
|
72 |
+
},
|
73 |
+
"harness|truthfulqa:mc1|0": {
|
74 |
+
"acc,none": 0.3659730722154223,
|
75 |
+
"acc_stderr,none": 0.016862941684088383,
|
76 |
+
"alias": "truthfulqa_mc1"
|
77 |
+
},
|
78 |
+
"harness|truthfulqa:mc2|0": {
|
79 |
+
"acc,none": 0.5191254909055326,
|
80 |
+
"acc_stderr,none": 0.015156374303657972,
|
81 |
+
"alias": "truthfulqa_mc2"
|
82 |
+
},
|
83 |
+
"harness|mmlu|0": {
|
84 |
+
"acc,none": 0.6071072496795328,
|
85 |
+
"acc_stderr,none": 0.003928160330453562,
|
86 |
+
"alias": "mmlu"
|
87 |
+
},
|
88 |
+
"harness|mmlu_humanities|0": {
|
89 |
+
"alias": " - humanities",
|
90 |
+
"acc,none": 0.5604675876726887,
|
91 |
+
"acc_stderr,none": 0.006861969378650451
|
92 |
+
},
|
93 |
+
"harness|mmlu_formal_logic|0": {
|
94 |
+
"alias": " - formal_logic",
|
95 |
+
"acc,none": 0.4444444444444444,
|
96 |
+
"acc_stderr,none": 0.04444444444444449
|
97 |
+
},
|
98 |
+
"harness|mmlu_high_school_european_history|0": {
|
99 |
+
"alias": " - high_school_european_history",
|
100 |
+
"acc,none": 0.7090909090909091,
|
101 |
+
"acc_stderr,none": 0.03546563019624336
|
102 |
+
},
|
103 |
+
"harness|mmlu_high_school_us_history|0": {
|
104 |
+
"alias": " - high_school_us_history",
|
105 |
+
"acc,none": 0.8137254901960784,
|
106 |
+
"acc_stderr,none": 0.027325470966716333
|
107 |
+
},
|
108 |
+
"harness|mmlu_high_school_world_history|0": {
|
109 |
+
"alias": " - high_school_world_history",
|
110 |
+
"acc,none": 0.8185654008438819,
|
111 |
+
"acc_stderr,none": 0.025085961144579658
|
112 |
+
},
|
113 |
+
"harness|mmlu_international_law|0": {
|
114 |
+
"alias": " - international_law",
|
115 |
+
"acc,none": 0.7520661157024794,
|
116 |
+
"acc_stderr,none": 0.03941897526516301
|
117 |
+
},
|
118 |
+
"harness|mmlu_jurisprudence|0": {
|
119 |
+
"alias": " - jurisprudence",
|
120 |
+
"acc,none": 0.75,
|
121 |
+
"acc_stderr,none": 0.04186091791394607
|
122 |
+
},
|
123 |
+
"harness|mmlu_logical_fallacies|0": {
|
124 |
+
"alias": " - logical_fallacies",
|
125 |
+
"acc,none": 0.6932515337423313,
|
126 |
+
"acc_stderr,none": 0.036230899157241474
|
127 |
+
},
|
128 |
+
"harness|mmlu_moral_disputes|0": {
|
129 |
+
"alias": " - moral_disputes",
|
130 |
+
"acc,none": 0.6676300578034682,
|
131 |
+
"acc_stderr,none": 0.025361168749688235
|
132 |
+
},
|
133 |
+
"harness|mmlu_moral_scenarios|0": {
|
134 |
+
"alias": " - moral_scenarios",
|
135 |
+
"acc,none": 0.33854748603351953,
|
136 |
+
"acc_stderr,none": 0.01582670009648135
|
137 |
+
},
|
138 |
+
"harness|mmlu_philosophy|0": {
|
139 |
+
"alias": " - philosophy",
|
140 |
+
"acc,none": 0.6688102893890675,
|
141 |
+
"acc_stderr,none": 0.026730620728004924
|
142 |
+
},
|
143 |
+
"harness|mmlu_prehistory|0": {
|
144 |
+
"alias": " - prehistory",
|
145 |
+
"acc,none": 0.6882716049382716,
|
146 |
+
"acc_stderr,none": 0.025773111169630433
|
147 |
+
},
|
148 |
+
"harness|mmlu_professional_law|0": {
|
149 |
+
"alias": " - professional_law",
|
150 |
+
"acc,none": 0.47392438070404175,
|
151 |
+
"acc_stderr,none": 0.012752858346533133
|
152 |
+
},
|
153 |
+
"harness|mmlu_world_religions|0": {
|
154 |
+
"alias": " - world_religions",
|
155 |
+
"acc,none": 0.7426900584795322,
|
156 |
+
"acc_stderr,none": 0.03352799844161865
|
157 |
+
},
|
158 |
+
"harness|mmlu_other|0": {
|
159 |
+
"alias": " - other",
|
160 |
+
"acc,none": 0.6810428065658192,
|
161 |
+
"acc_stderr,none": 0.00813434488979903
|
162 |
+
},
|
163 |
+
"harness|mmlu_business_ethics|0": {
|
164 |
+
"alias": " - business_ethics",
|
165 |
+
"acc,none": 0.67,
|
166 |
+
"acc_stderr,none": 0.04725815626252607
|
167 |
+
},
|
168 |
+
"harness|mmlu_clinical_knowledge|0": {
|
169 |
+
"alias": " - clinical_knowledge",
|
170 |
+
"acc,none": 0.6754716981132075,
|
171 |
+
"acc_stderr,none": 0.028815615713432118
|
172 |
+
},
|
173 |
+
"harness|mmlu_college_medicine|0": {
|
174 |
+
"alias": " - college_medicine",
|
175 |
+
"acc,none": 0.630057803468208,
|
176 |
+
"acc_stderr,none": 0.0368122963339432
|
177 |
+
},
|
178 |
+
"harness|mmlu_global_facts|0": {
|
179 |
+
"alias": " - global_facts",
|
180 |
+
"acc,none": 0.42,
|
181 |
+
"acc_stderr,none": 0.049604496374885836
|
182 |
+
},
|
183 |
+
"harness|mmlu_human_aging|0": {
|
184 |
+
"alias": " - human_aging",
|
185 |
+
"acc,none": 0.6367713004484304,
|
186 |
+
"acc_stderr,none": 0.032277904428505
|
187 |
+
},
|
188 |
+
"harness|mmlu_management|0": {
|
189 |
+
"alias": " - management",
|
190 |
+
"acc,none": 0.7766990291262136,
|
191 |
+
"acc_stderr,none": 0.04123553189891431
|
192 |
+
},
|
193 |
+
"harness|mmlu_marketing|0": {
|
194 |
+
"alias": " - marketing",
|
195 |
+
"acc,none": 0.8675213675213675,
|
196 |
+
"acc_stderr,none": 0.022209309073165606
|
197 |
+
},
|
198 |
+
"harness|mmlu_medical_genetics|0": {
|
199 |
+
"alias": " - medical_genetics",
|
200 |
+
"acc,none": 0.76,
|
201 |
+
"acc_stderr,none": 0.04292346959909283
|
202 |
+
},
|
203 |
+
"harness|mmlu_miscellaneous|0": {
|
204 |
+
"alias": " - miscellaneous",
|
205 |
+
"acc,none": 0.7701149425287356,
|
206 |
+
"acc_stderr,none": 0.015046301846691826
|
207 |
+
},
|
208 |
+
"harness|mmlu_nutrition|0": {
|
209 |
+
"alias": " - nutrition",
|
210 |
+
"acc,none": 0.6862745098039216,
|
211 |
+
"acc_stderr,none": 0.026568921015457166
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_accounting|0": {
|
214 |
+
"alias": " - professional_accounting",
|
215 |
+
"acc,none": 0.4858156028368794,
|
216 |
+
"acc_stderr,none": 0.02981549448368206
|
217 |
+
},
|
218 |
+
"harness|mmlu_professional_medicine|0": {
|
219 |
+
"alias": " - professional_medicine",
|
220 |
+
"acc,none": 0.6691176470588235,
|
221 |
+
"acc_stderr,none": 0.028582709753898428
|
222 |
+
},
|
223 |
+
"harness|mmlu_virology|0": {
|
224 |
+
"alias": " - virology",
|
225 |
+
"acc,none": 0.5180722891566265,
|
226 |
+
"acc_stderr,none": 0.03889951252827217
|
227 |
+
},
|
228 |
+
"harness|mmlu_social_sciences|0": {
|
229 |
+
"alias": " - social_sciences",
|
230 |
+
"acc,none": 0.7000324991875203,
|
231 |
+
"acc_stderr,none": 0.00808605616864146
|
232 |
+
},
|
233 |
+
"harness|mmlu_econometrics|0": {
|
234 |
+
"alias": " - econometrics",
|
235 |
+
"acc,none": 0.4298245614035088,
|
236 |
+
"acc_stderr,none": 0.04657047260594963
|
237 |
+
},
|
238 |
+
"harness|mmlu_high_school_geography|0": {
|
239 |
+
"alias": " - high_school_geography",
|
240 |
+
"acc,none": 0.7474747474747475,
|
241 |
+
"acc_stderr,none": 0.030954055470365914
|
242 |
+
},
|
243 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
244 |
+
"alias": " - high_school_government_and_politics",
|
245 |
+
"acc,none": 0.8341968911917098,
|
246 |
+
"acc_stderr,none": 0.026839845022314415
|
247 |
+
},
|
248 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
249 |
+
"alias": " - high_school_macroeconomics",
|
250 |
+
"acc,none": 0.5871794871794872,
|
251 |
+
"acc_stderr,none": 0.024962683564331803
|
252 |
+
},
|
253 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
254 |
+
"alias": " - high_school_microeconomics",
|
255 |
+
"acc,none": 0.6386554621848739,
|
256 |
+
"acc_stderr,none": 0.031204691225150016
|
257 |
+
},
|
258 |
+
"harness|mmlu_high_school_psychology|0": {
|
259 |
+
"alias": " - high_school_psychology",
|
260 |
+
"acc,none": 0.7871559633027523,
|
261 |
+
"acc_stderr,none": 0.017549376389313694
|
262 |
+
},
|
263 |
+
"harness|mmlu_human_sexuality|0": {
|
264 |
+
"alias": " - human_sexuality",
|
265 |
+
"acc,none": 0.7404580152671756,
|
266 |
+
"acc_stderr,none": 0.03844876139785271
|
267 |
+
},
|
268 |
+
"harness|mmlu_professional_psychology|0": {
|
269 |
+
"alias": " - professional_psychology",
|
270 |
+
"acc,none": 0.6388888888888888,
|
271 |
+
"acc_stderr,none": 0.019431775677037317
|
272 |
+
},
|
273 |
+
"harness|mmlu_public_relations|0": {
|
274 |
+
"alias": " - public_relations",
|
275 |
+
"acc,none": 0.6818181818181818,
|
276 |
+
"acc_stderr,none": 0.044612721759105085
|
277 |
+
},
|
278 |
+
"harness|mmlu_security_studies|0": {
|
279 |
+
"alias": " - security_studies",
|
280 |
+
"acc,none": 0.7142857142857143,
|
281 |
+
"acc_stderr,none": 0.02892058322067558
|
282 |
+
},
|
283 |
+
"harness|mmlu_sociology|0": {
|
284 |
+
"alias": " - sociology",
|
285 |
+
"acc,none": 0.8159203980099502,
|
286 |
+
"acc_stderr,none": 0.027403859410786838
|
287 |
+
},
|
288 |
+
"harness|mmlu_us_foreign_policy|0": {
|
289 |
+
"alias": " - us_foreign_policy",
|
290 |
+
"acc,none": 0.84,
|
291 |
+
"acc_stderr,none": 0.03684529491774709
|
292 |
+
},
|
293 |
+
"harness|mmlu_stem|0": {
|
294 |
+
"alias": " - stem",
|
295 |
+
"acc,none": 0.513162067871868,
|
296 |
+
"acc_stderr,none": 0.008641503326837477
|
297 |
+
},
|
298 |
+
"harness|mmlu_abstract_algebra|0": {
|
299 |
+
"alias": " - abstract_algebra",
|
300 |
+
"acc,none": 0.34,
|
301 |
+
"acc_stderr,none": 0.04760952285695236
|
302 |
+
},
|
303 |
+
"harness|mmlu_anatomy|0": {
|
304 |
+
"alias": " - anatomy",
|
305 |
+
"acc,none": 0.6222222222222222,
|
306 |
+
"acc_stderr,none": 0.04188307537595853
|
307 |
+
},
|
308 |
+
"harness|mmlu_astronomy|0": {
|
309 |
+
"alias": " - astronomy",
|
310 |
+
"acc,none": 0.6447368421052632,
|
311 |
+
"acc_stderr,none": 0.03894734487013316
|
312 |
+
},
|
313 |
+
"harness|mmlu_college_biology|0": {
|
314 |
+
"alias": " - college_biology",
|
315 |
+
"acc,none": 0.6944444444444444,
|
316 |
+
"acc_stderr,none": 0.03852084696008534
|
317 |
+
},
|
318 |
+
"harness|mmlu_college_chemistry|0": {
|
319 |
+
"alias": " - college_chemistry",
|
320 |
+
"acc,none": 0.33,
|
321 |
+
"acc_stderr,none": 0.047258156262526045
|
322 |
+
},
|
323 |
+
"harness|mmlu_college_computer_science|0": {
|
324 |
+
"alias": " - college_computer_science",
|
325 |
+
"acc,none": 0.47,
|
326 |
+
"acc_stderr,none": 0.05016135580465919
|
327 |
+
},
|
328 |
+
"harness|mmlu_college_mathematics|0": {
|
329 |
+
"alias": " - college_mathematics",
|
330 |
+
"acc,none": 0.31,
|
331 |
+
"acc_stderr,none": 0.04648231987117316
|
332 |
+
},
|
333 |
+
"harness|mmlu_college_physics|0": {
|
334 |
+
"alias": " - college_physics",
|
335 |
+
"acc,none": 0.43137254901960786,
|
336 |
+
"acc_stderr,none": 0.04928099597287534
|
337 |
+
},
|
338 |
+
"harness|mmlu_computer_security|0": {
|
339 |
+
"alias": " - computer_security",
|
340 |
+
"acc,none": 0.72,
|
341 |
+
"acc_stderr,none": 0.04512608598542128
|
342 |
+
},
|
343 |
+
"harness|mmlu_conceptual_physics|0": {
|
344 |
+
"alias": " - conceptual_physics",
|
345 |
+
"acc,none": 0.48936170212765956,
|
346 |
+
"acc_stderr,none": 0.03267862331014063
|
347 |
+
},
|
348 |
+
"harness|mmlu_electrical_engineering|0": {
|
349 |
+
"alias": " - electrical_engineering",
|
350 |
+
"acc,none": 0.6344827586206897,
|
351 |
+
"acc_stderr,none": 0.04013124195424385
|
352 |
+
},
|
353 |
+
"harness|mmlu_elementary_mathematics|0": {
|
354 |
+
"alias": " - elementary_mathematics",
|
355 |
+
"acc,none": 0.42328042328042326,
|
356 |
+
"acc_stderr,none": 0.025446365634406783
|
357 |
+
},
|
358 |
+
"harness|mmlu_high_school_biology|0": {
|
359 |
+
"alias": " - high_school_biology",
|
360 |
+
"acc,none": 0.7129032258064516,
|
361 |
+
"acc_stderr,none": 0.02573654274559452
|
362 |
+
},
|
363 |
+
"harness|mmlu_high_school_chemistry|0": {
|
364 |
+
"alias": " - high_school_chemistry",
|
365 |
+
"acc,none": 0.4827586206896552,
|
366 |
+
"acc_stderr,none": 0.035158955511657
|
367 |
+
},
|
368 |
+
"harness|mmlu_high_school_computer_science|0": {
|
369 |
+
"alias": " - high_school_computer_science",
|
370 |
+
"acc,none": 0.63,
|
371 |
+
"acc_stderr,none": 0.04852365870939099
|
372 |
+
},
|
373 |
+
"harness|mmlu_high_school_mathematics|0": {
|
374 |
+
"alias": " - high_school_mathematics",
|
375 |
+
"acc,none": 0.3814814814814815,
|
376 |
+
"acc_stderr,none": 0.029616718927497593
|
377 |
+
},
|
378 |
+
"harness|mmlu_high_school_physics|0": {
|
379 |
+
"alias": " - high_school_physics",
|
380 |
+
"acc,none": 0.45695364238410596,
|
381 |
+
"acc_stderr,none": 0.04067325174247443
|
382 |
+
},
|
383 |
+
"harness|mmlu_high_school_statistics|0": {
|
384 |
+
"alias": " - high_school_statistics",
|
385 |
+
"acc,none": 0.4861111111111111,
|
386 |
+
"acc_stderr,none": 0.03408655867977749
|
387 |
+
},
|
388 |
+
"harness|mmlu_machine_learning|0": {
|
389 |
+
"alias": " - machine_learning",
|
390 |
+
"acc,none": 0.4375,
|
391 |
+
"acc_stderr,none": 0.04708567521880525
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "ISTA-DASLab/Llama-3-8B-Instruct-GPTQ-4bit",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 5.74,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "GPTQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 7.04,
|
403 |
+
"model_size": 5.74,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Waiting",
|
409 |
+
"submitted_time": "2024-05-16T08:11:55Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"checkpoint_format": "gptq",
|
418 |
+
"damp_percent": 0.01,
|
419 |
+
"desc_act": true,
|
420 |
+
"exllama_config": {
|
421 |
+
"version": 2
|
422 |
+
},
|
423 |
+
"group_size": 128,
|
424 |
+
"model_file_base_name": null,
|
425 |
+
"model_name_or_path": null,
|
426 |
+
"quant_method": "gptq",
|
427 |
+
"static_groups": false,
|
428 |
+
"sym": true,
|
429 |
+
"true_sequential": true,
|
430 |
+
"use_exllama": true
|
431 |
+
},
|
432 |
+
"versions": {
|
433 |
+
"harness|piqa|0": 1.0,
|
434 |
+
"harness|hellaswag|0": 1.0,
|
435 |
+
"harness|arc:easy|0": 1.0,
|
436 |
+
"harness|winogrande|0": 1.0,
|
437 |
+
"harness|openbookqa|0": 1.0,
|
438 |
+
"harness|boolq|0": 2.0,
|
439 |
+
"harness|lambada:openai|0": 1.0,
|
440 |
+
"harness|arc:challenge|0": 1.0,
|
441 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
442 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
443 |
+
"harness|mmlu|0": null,
|
444 |
+
"harness|mmlu_humanities|0": null,
|
445 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
446 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
447 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
449 |
+
"harness|mmlu_international_law|0": 0.0,
|
450 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
451 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
452 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
453 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
454 |
+
"harness|mmlu_philosophy|0": 0.0,
|
455 |
+
"harness|mmlu_prehistory|0": 0.0,
|
456 |
+
"harness|mmlu_professional_law|0": 0.0,
|
457 |
+
"harness|mmlu_world_religions|0": 0.0,
|
458 |
+
"harness|mmlu_other|0": null,
|
459 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
460 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
461 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
462 |
+
"harness|mmlu_global_facts|0": 0.0,
|
463 |
+
"harness|mmlu_human_aging|0": 0.0,
|
464 |
+
"harness|mmlu_management|0": 0.0,
|
465 |
+
"harness|mmlu_marketing|0": 0.0,
|
466 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
467 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
468 |
+
"harness|mmlu_nutrition|0": 0.0,
|
469 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
470 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
471 |
+
"harness|mmlu_virology|0": 0.0,
|
472 |
+
"harness|mmlu_social_sciences|0": null,
|
473 |
+
"harness|mmlu_econometrics|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
479 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
480 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
481 |
+
"harness|mmlu_public_relations|0": 0.0,
|
482 |
+
"harness|mmlu_security_studies|0": 0.0,
|
483 |
+
"harness|mmlu_sociology|0": 0.0,
|
484 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
485 |
+
"harness|mmlu_stem|0": null,
|
486 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
487 |
+
"harness|mmlu_anatomy|0": 0.0,
|
488 |
+
"harness|mmlu_astronomy|0": 0.0,
|
489 |
+
"harness|mmlu_college_biology|0": 0.0,
|
490 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
491 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
492 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
493 |
+
"harness|mmlu_college_physics|0": 0.0,
|
494 |
+
"harness|mmlu_computer_security|0": 0.0,
|
495 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
496 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
497 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
503 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
504 |
+
"harness|mmlu_machine_learning|0": 0.0
|
505 |
+
},
|
506 |
+
"n-shot": {
|
507 |
+
"arc_challenge": 0,
|
508 |
+
"arc_easy": 0,
|
509 |
+
"boolq": 0,
|
510 |
+
"hellaswag": 0,
|
511 |
+
"lambada_openai": 0,
|
512 |
+
"mmlu": 0,
|
513 |
+
"mmlu_abstract_algebra": 0,
|
514 |
+
"mmlu_anatomy": 0,
|
515 |
+
"mmlu_astronomy": 0,
|
516 |
+
"mmlu_business_ethics": 0,
|
517 |
+
"mmlu_clinical_knowledge": 0,
|
518 |
+
"mmlu_college_biology": 0,
|
519 |
+
"mmlu_college_chemistry": 0,
|
520 |
+
"mmlu_college_computer_science": 0,
|
521 |
+
"mmlu_college_mathematics": 0,
|
522 |
+
"mmlu_college_medicine": 0,
|
523 |
+
"mmlu_college_physics": 0,
|
524 |
+
"mmlu_computer_security": 0,
|
525 |
+
"mmlu_conceptual_physics": 0,
|
526 |
+
"mmlu_econometrics": 0,
|
527 |
+
"mmlu_electrical_engineering": 0,
|
528 |
+
"mmlu_elementary_mathematics": 0,
|
529 |
+
"mmlu_formal_logic": 0,
|
530 |
+
"mmlu_global_facts": 0,
|
531 |
+
"mmlu_high_school_biology": 0,
|
532 |
+
"mmlu_high_school_chemistry": 0,
|
533 |
+
"mmlu_high_school_computer_science": 0,
|
534 |
+
"mmlu_high_school_european_history": 0,
|
535 |
+
"mmlu_high_school_geography": 0,
|
536 |
+
"mmlu_high_school_government_and_politics": 0,
|
537 |
+
"mmlu_high_school_macroeconomics": 0,
|
538 |
+
"mmlu_high_school_mathematics": 0,
|
539 |
+
"mmlu_high_school_microeconomics": 0,
|
540 |
+
"mmlu_high_school_physics": 0,
|
541 |
+
"mmlu_high_school_psychology": 0,
|
542 |
+
"mmlu_high_school_statistics": 0,
|
543 |
+
"mmlu_high_school_us_history": 0,
|
544 |
+
"mmlu_high_school_world_history": 0,
|
545 |
+
"mmlu_human_aging": 0,
|
546 |
+
"mmlu_human_sexuality": 0,
|
547 |
+
"mmlu_humanities": 0,
|
548 |
+
"mmlu_international_law": 0,
|
549 |
+
"mmlu_jurisprudence": 0,
|
550 |
+
"mmlu_logical_fallacies": 0,
|
551 |
+
"mmlu_machine_learning": 0,
|
552 |
+
"mmlu_management": 0,
|
553 |
+
"mmlu_marketing": 0,
|
554 |
+
"mmlu_medical_genetics": 0,
|
555 |
+
"mmlu_miscellaneous": 0,
|
556 |
+
"mmlu_moral_disputes": 0,
|
557 |
+
"mmlu_moral_scenarios": 0,
|
558 |
+
"mmlu_nutrition": 0,
|
559 |
+
"mmlu_other": 0,
|
560 |
+
"mmlu_philosophy": 0,
|
561 |
+
"mmlu_prehistory": 0,
|
562 |
+
"mmlu_professional_accounting": 0,
|
563 |
+
"mmlu_professional_law": 0,
|
564 |
+
"mmlu_professional_medicine": 0,
|
565 |
+
"mmlu_professional_psychology": 0,
|
566 |
+
"mmlu_public_relations": 0,
|
567 |
+
"mmlu_security_studies": 0,
|
568 |
+
"mmlu_social_sciences": 0,
|
569 |
+
"mmlu_sociology": 0,
|
570 |
+
"mmlu_stem": 0,
|
571 |
+
"mmlu_us_foreign_policy": 0,
|
572 |
+
"mmlu_virology": 0,
|
573 |
+
"mmlu_world_religions": 0,
|
574 |
+
"openbookqa": 0,
|
575 |
+
"piqa": 0,
|
576 |
+
"truthfulqa_mc1": 0,
|
577 |
+
"truthfulqa_mc2": 0,
|
578 |
+
"winogrande": 0
|
579 |
+
},
|
580 |
+
"date": 1716167855.9616175,
|
581 |
+
"config": {
|
582 |
+
"model": "hf",
|
583 |
+
"model_args": "pretrained=ISTA-DASLab/Llama-3-8B-Instruct-GPTQ-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
584 |
+
"batch_size": 2,
|
585 |
+
"batch_sizes": [],
|
586 |
+
"device": "cuda",
|
587 |
+
"use_cache": null,
|
588 |
+
"limit": null,
|
589 |
+
"bootstrap_iters": 100000,
|
590 |
+
"gen_kwargs": null
|
591 |
+
}
|
592 |
+
}
|
Intel/results_2024-04-30-16-11-38.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-30-16-11-38",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/Mistral-7B-Instruct-v0.2-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 4.16,
|
16 |
+
"model_params": 7.04,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc2|0": {
|
22 |
+
"acc,none": 0.6594502792868876,
|
23 |
+
"acc_stderr,none": 0.015342480583463202,
|
24 |
+
"alias": "truthfulqa_mc2"
|
25 |
+
},
|
26 |
+
"harness|truthfulqa:mc1|0": {
|
27 |
+
"acc,none": 0.5116279069767442,
|
28 |
+
"acc_stderr,none": 0.017498767175740084,
|
29 |
+
"alias": "truthfulqa_mc1"
|
30 |
+
},
|
31 |
+
"harness|hellaswag|0": {
|
32 |
+
"acc,none": 0.6567416849233221,
|
33 |
+
"acc_stderr,none": 0.004738264944737176,
|
34 |
+
"acc_norm,none": 0.8312089225253934,
|
35 |
+
"acc_norm_stderr,none": 0.003738017734037969,
|
36 |
+
"alias": "hellaswag"
|
37 |
+
},
|
38 |
+
"harness|lambada:openai|0": {
|
39 |
+
"perplexity,none": 3.434887262380849,
|
40 |
+
"perplexity_stderr,none": 0.07410456478433652,
|
41 |
+
"acc,none": 0.7089074325635553,
|
42 |
+
"acc_stderr,none": 0.0063288149295274675,
|
43 |
+
"alias": "lambada_openai"
|
44 |
+
},
|
45 |
+
"harness|arc:challenge|0": {
|
46 |
+
"acc,none": 0.5537542662116041,
|
47 |
+
"acc_stderr,none": 0.014526705548539982,
|
48 |
+
"acc_norm,none": 0.5656996587030717,
|
49 |
+
"acc_norm_stderr,none": 0.014484703048857355,
|
50 |
+
"alias": "arc_challenge"
|
51 |
+
},
|
52 |
+
"harness|openbookqa|0": {
|
53 |
+
"acc,none": 0.342,
|
54 |
+
"acc_stderr,none": 0.02123614719989926,
|
55 |
+
"acc_norm,none": 0.458,
|
56 |
+
"acc_norm_stderr,none": 0.02230396677426996,
|
57 |
+
"alias": "openbookqa"
|
58 |
+
},
|
59 |
+
"harness|boolq|0": {
|
60 |
+
"acc,none": 0.8525993883792049,
|
61 |
+
"acc_stderr,none": 0.006200328377083518,
|
62 |
+
"alias": "boolq"
|
63 |
+
},
|
64 |
+
"harness|arc:easy|0": {
|
65 |
+
"acc,none": 0.8143939393939394,
|
66 |
+
"acc_stderr,none": 0.007977770454202353,
|
67 |
+
"acc_norm,none": 0.7655723905723906,
|
68 |
+
"acc_norm_stderr,none": 0.008692920419348174,
|
69 |
+
"alias": "arc_easy"
|
70 |
+
},
|
71 |
+
"harness|winogrande|0": {
|
72 |
+
"acc,none": 0.739542225730071,
|
73 |
+
"acc_stderr,none": 0.012334833671998292,
|
74 |
+
"alias": "winogrande"
|
75 |
+
},
|
76 |
+
"harness|piqa|0": {
|
77 |
+
"acc,none": 0.8073993471164309,
|
78 |
+
"acc_stderr,none": 0.009200649707017573,
|
79 |
+
"acc_norm,none": 0.8106637649619152,
|
80 |
+
"acc_norm_stderr,none": 0.009140767676615017,
|
81 |
+
"alias": "piqa"
|
82 |
+
},
|
83 |
+
"harness|mmlu|0": {
|
84 |
+
"acc,none": 0.5865973508047286,
|
85 |
+
"acc_stderr,none": 0.003952459169410318,
|
86 |
+
"alias": "mmlu"
|
87 |
+
},
|
88 |
+
"harness|mmlu_humanities|0": {
|
89 |
+
"alias": " - humanities",
|
90 |
+
"acc,none": 0.5385759829968119,
|
91 |
+
"acc_stderr,none": 0.006893694786566793
|
92 |
+
},
|
93 |
+
"harness|mmlu_formal_logic|0": {
|
94 |
+
"alias": " - formal_logic",
|
95 |
+
"acc,none": 0.38095238095238093,
|
96 |
+
"acc_stderr,none": 0.043435254289490965
|
97 |
+
},
|
98 |
+
"harness|mmlu_high_school_european_history|0": {
|
99 |
+
"alias": " - high_school_european_history",
|
100 |
+
"acc,none": 0.7151515151515152,
|
101 |
+
"acc_stderr,none": 0.03524390844511781
|
102 |
+
},
|
103 |
+
"harness|mmlu_high_school_us_history|0": {
|
104 |
+
"alias": " - high_school_us_history",
|
105 |
+
"acc,none": 0.75,
|
106 |
+
"acc_stderr,none": 0.03039153369274154
|
107 |
+
},
|
108 |
+
"harness|mmlu_high_school_world_history|0": {
|
109 |
+
"alias": " - high_school_world_history",
|
110 |
+
"acc,none": 0.7637130801687764,
|
111 |
+
"acc_stderr,none": 0.027652153144159256
|
112 |
+
},
|
113 |
+
"harness|mmlu_international_law|0": {
|
114 |
+
"alias": " - international_law",
|
115 |
+
"acc,none": 0.7355371900826446,
|
116 |
+
"acc_stderr,none": 0.040261875275912046
|
117 |
+
},
|
118 |
+
"harness|mmlu_jurisprudence|0": {
|
119 |
+
"alias": " - jurisprudence",
|
120 |
+
"acc,none": 0.6944444444444444,
|
121 |
+
"acc_stderr,none": 0.044531975073749834
|
122 |
+
},
|
123 |
+
"harness|mmlu_logical_fallacies|0": {
|
124 |
+
"alias": " - logical_fallacies",
|
125 |
+
"acc,none": 0.754601226993865,
|
126 |
+
"acc_stderr,none": 0.03380939813943354
|
127 |
+
},
|
128 |
+
"harness|mmlu_moral_disputes|0": {
|
129 |
+
"alias": " - moral_disputes",
|
130 |
+
"acc,none": 0.6445086705202312,
|
131 |
+
"acc_stderr,none": 0.025770292082977243
|
132 |
+
},
|
133 |
+
"harness|mmlu_moral_scenarios|0": {
|
134 |
+
"alias": " - moral_scenarios",
|
135 |
+
"acc,none": 0.3675977653631285,
|
136 |
+
"acc_stderr,none": 0.01612554382355294
|
137 |
+
},
|
138 |
+
"harness|mmlu_philosophy|0": {
|
139 |
+
"alias": " - philosophy",
|
140 |
+
"acc,none": 0.6559485530546624,
|
141 |
+
"acc_stderr,none": 0.02698147804364803
|
142 |
+
},
|
143 |
+
"harness|mmlu_prehistory|0": {
|
144 |
+
"alias": " - prehistory",
|
145 |
+
"acc,none": 0.6697530864197531,
|
146 |
+
"acc_stderr,none": 0.026168298456732842
|
147 |
+
},
|
148 |
+
"harness|mmlu_professional_law|0": {
|
149 |
+
"alias": " - professional_law",
|
150 |
+
"acc,none": 0.41460234680573665,
|
151 |
+
"acc_stderr,none": 0.012582597058908284
|
152 |
+
},
|
153 |
+
"harness|mmlu_world_religions|0": {
|
154 |
+
"alias": " - world_religions",
|
155 |
+
"acc,none": 0.8070175438596491,
|
156 |
+
"acc_stderr,none": 0.030267457554898465
|
157 |
+
},
|
158 |
+
"harness|mmlu_other|0": {
|
159 |
+
"alias": " - other",
|
160 |
+
"acc,none": 0.6598004505954297,
|
161 |
+
"acc_stderr,none": 0.008186771432404356
|
162 |
+
},
|
163 |
+
"harness|mmlu_business_ethics|0": {
|
164 |
+
"alias": " - business_ethics",
|
165 |
+
"acc,none": 0.64,
|
166 |
+
"acc_stderr,none": 0.04824181513244218
|
167 |
+
},
|
168 |
+
"harness|mmlu_clinical_knowledge|0": {
|
169 |
+
"alias": " - clinical_knowledge",
|
170 |
+
"acc,none": 0.6716981132075471,
|
171 |
+
"acc_stderr,none": 0.02890159361241178
|
172 |
+
},
|
173 |
+
"harness|mmlu_college_medicine|0": {
|
174 |
+
"alias": " - college_medicine",
|
175 |
+
"acc,none": 0.5664739884393064,
|
176 |
+
"acc_stderr,none": 0.03778621079092056
|
177 |
+
},
|
178 |
+
"harness|mmlu_global_facts|0": {
|
179 |
+
"alias": " - global_facts",
|
180 |
+
"acc,none": 0.36,
|
181 |
+
"acc_stderr,none": 0.04824181513244218
|
182 |
+
},
|
183 |
+
"harness|mmlu_human_aging|0": {
|
184 |
+
"alias": " - human_aging",
|
185 |
+
"acc,none": 0.6053811659192825,
|
186 |
+
"acc_stderr,none": 0.03280400504755291
|
187 |
+
},
|
188 |
+
"harness|mmlu_management|0": {
|
189 |
+
"alias": " - management",
|
190 |
+
"acc,none": 0.7281553398058253,
|
191 |
+
"acc_stderr,none": 0.044052680241409216
|
192 |
+
},
|
193 |
+
"harness|mmlu_marketing|0": {
|
194 |
+
"alias": " - marketing",
|
195 |
+
"acc,none": 0.8589743589743589,
|
196 |
+
"acc_stderr,none": 0.02280138253459753
|
197 |
+
},
|
198 |
+
"harness|mmlu_medical_genetics|0": {
|
199 |
+
"alias": " - medical_genetics",
|
200 |
+
"acc,none": 0.67,
|
201 |
+
"acc_stderr,none": 0.047258156262526066
|
202 |
+
},
|
203 |
+
"harness|mmlu_miscellaneous|0": {
|
204 |
+
"alias": " - miscellaneous",
|
205 |
+
"acc,none": 0.7841634738186463,
|
206 |
+
"acc_stderr,none": 0.014711684386139956
|
207 |
+
},
|
208 |
+
"harness|mmlu_nutrition|0": {
|
209 |
+
"alias": " - nutrition",
|
210 |
+
"acc,none": 0.6535947712418301,
|
211 |
+
"acc_stderr,none": 0.027245613047215355
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_accounting|0": {
|
214 |
+
"alias": " - professional_accounting",
|
215 |
+
"acc,none": 0.450354609929078,
|
216 |
+
"acc_stderr,none": 0.029680105565029036
|
217 |
+
},
|
218 |
+
"harness|mmlu_professional_medicine|0": {
|
219 |
+
"alias": " - professional_medicine",
|
220 |
+
"acc,none": 0.6654411764705882,
|
221 |
+
"acc_stderr,none": 0.02866199620233531
|
222 |
+
},
|
223 |
+
"harness|mmlu_virology|0": {
|
224 |
+
"alias": " - virology",
|
225 |
+
"acc,none": 0.4457831325301205,
|
226 |
+
"acc_stderr,none": 0.03869543323472101
|
227 |
+
},
|
228 |
+
"harness|mmlu_social_sciences|0": {
|
229 |
+
"alias": " - social_sciences",
|
230 |
+
"acc,none": 0.68020799480013,
|
231 |
+
"acc_stderr,none": 0.00818336991166656
|
232 |
+
},
|
233 |
+
"harness|mmlu_econometrics|0": {
|
234 |
+
"alias": " - econometrics",
|
235 |
+
"acc,none": 0.42105263157894735,
|
236 |
+
"acc_stderr,none": 0.046446020912223177
|
237 |
+
},
|
238 |
+
"harness|mmlu_high_school_geography|0": {
|
239 |
+
"alias": " - high_school_geography",
|
240 |
+
"acc,none": 0.7626262626262627,
|
241 |
+
"acc_stderr,none": 0.03031371053819889
|
242 |
+
},
|
243 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
244 |
+
"alias": " - high_school_government_and_politics",
|
245 |
+
"acc,none": 0.7927461139896373,
|
246 |
+
"acc_stderr,none": 0.029252823291803638
|
247 |
+
},
|
248 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
249 |
+
"alias": " - high_school_macroeconomics",
|
250 |
+
"acc,none": 0.558974358974359,
|
251 |
+
"acc_stderr,none": 0.025174048384000752
|
252 |
+
},
|
253 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
254 |
+
"alias": " - high_school_microeconomics",
|
255 |
+
"acc,none": 0.6428571428571429,
|
256 |
+
"acc_stderr,none": 0.031124619309328177
|
257 |
+
},
|
258 |
+
"harness|mmlu_high_school_psychology|0": {
|
259 |
+
"alias": " - high_school_psychology",
|
260 |
+
"acc,none": 0.7944954128440367,
|
261 |
+
"acc_stderr,none": 0.017324352325016015
|
262 |
+
},
|
263 |
+
"harness|mmlu_human_sexuality|0": {
|
264 |
+
"alias": " - human_sexuality",
|
265 |
+
"acc,none": 0.6793893129770993,
|
266 |
+
"acc_stderr,none": 0.04093329229834278
|
267 |
+
},
|
268 |
+
"harness|mmlu_professional_psychology|0": {
|
269 |
+
"alias": " - professional_psychology",
|
270 |
+
"acc,none": 0.5816993464052288,
|
271 |
+
"acc_stderr,none": 0.019955975145835546
|
272 |
+
},
|
273 |
+
"harness|mmlu_public_relations|0": {
|
274 |
+
"alias": " - public_relations",
|
275 |
+
"acc,none": 0.7,
|
276 |
+
"acc_stderr,none": 0.04389311454644287
|
277 |
+
},
|
278 |
+
"harness|mmlu_security_studies|0": {
|
279 |
+
"alias": " - security_studies",
|
280 |
+
"acc,none": 0.6693877551020408,
|
281 |
+
"acc_stderr,none": 0.0301164262965406
|
282 |
+
},
|
283 |
+
"harness|mmlu_sociology|0": {
|
284 |
+
"alias": " - sociology",
|
285 |
+
"acc,none": 0.8308457711442786,
|
286 |
+
"acc_stderr,none": 0.02650859065623325
|
287 |
+
},
|
288 |
+
"harness|mmlu_us_foreign_policy|0": {
|
289 |
+
"alias": " - us_foreign_policy",
|
290 |
+
"acc,none": 0.84,
|
291 |
+
"acc_stderr,none": 0.03684529491774709
|
292 |
+
},
|
293 |
+
"harness|mmlu_stem|0": {
|
294 |
+
"alias": " - stem",
|
295 |
+
"acc,none": 0.49476688867745006,
|
296 |
+
"acc_stderr,none": 0.008669668588404067
|
297 |
+
},
|
298 |
+
"harness|mmlu_abstract_algebra|0": {
|
299 |
+
"alias": " - abstract_algebra",
|
300 |
+
"acc,none": 0.27,
|
301 |
+
"acc_stderr,none": 0.0446196043338474
|
302 |
+
},
|
303 |
+
"harness|mmlu_anatomy|0": {
|
304 |
+
"alias": " - anatomy",
|
305 |
+
"acc,none": 0.5925925925925926,
|
306 |
+
"acc_stderr,none": 0.04244633238353228
|
307 |
+
},
|
308 |
+
"harness|mmlu_astronomy|0": {
|
309 |
+
"alias": " - astronomy",
|
310 |
+
"acc,none": 0.6118421052631579,
|
311 |
+
"acc_stderr,none": 0.03965842097512744
|
312 |
+
},
|
313 |
+
"harness|mmlu_college_biology|0": {
|
314 |
+
"alias": " - college_biology",
|
315 |
+
"acc,none": 0.6388888888888888,
|
316 |
+
"acc_stderr,none": 0.04016660030451233
|
317 |
+
},
|
318 |
+
"harness|mmlu_college_chemistry|0": {
|
319 |
+
"alias": " - college_chemistry",
|
320 |
+
"acc,none": 0.44,
|
321 |
+
"acc_stderr,none": 0.04988876515698589
|
322 |
+
},
|
323 |
+
"harness|mmlu_college_computer_science|0": {
|
324 |
+
"alias": " - college_computer_science",
|
325 |
+
"acc,none": 0.53,
|
326 |
+
"acc_stderr,none": 0.05016135580465919
|
327 |
+
},
|
328 |
+
"harness|mmlu_college_mathematics|0": {
|
329 |
+
"alias": " - college_mathematics",
|
330 |
+
"acc,none": 0.37,
|
331 |
+
"acc_stderr,none": 0.048523658709391
|
332 |
+
},
|
333 |
+
"harness|mmlu_college_physics|0": {
|
334 |
+
"alias": " - college_physics",
|
335 |
+
"acc,none": 0.4411764705882353,
|
336 |
+
"acc_stderr,none": 0.049406356306056595
|
337 |
+
},
|
338 |
+
"harness|mmlu_computer_security|0": {
|
339 |
+
"alias": " - computer_security",
|
340 |
+
"acc,none": 0.67,
|
341 |
+
"acc_stderr,none": 0.04725815626252609
|
342 |
+
},
|
343 |
+
"harness|mmlu_conceptual_physics|0": {
|
344 |
+
"alias": " - conceptual_physics",
|
345 |
+
"acc,none": 0.4978723404255319,
|
346 |
+
"acc_stderr,none": 0.03268572658667492
|
347 |
+
},
|
348 |
+
"harness|mmlu_electrical_engineering|0": {
|
349 |
+
"alias": " - electrical_engineering",
|
350 |
+
"acc,none": 0.5793103448275863,
|
351 |
+
"acc_stderr,none": 0.04113914981189261
|
352 |
+
},
|
353 |
+
"harness|mmlu_elementary_mathematics|0": {
|
354 |
+
"alias": " - elementary_mathematics",
|
355 |
+
"acc,none": 0.3994708994708995,
|
356 |
+
"acc_stderr,none": 0.02522545028406788
|
357 |
+
},
|
358 |
+
"harness|mmlu_high_school_biology|0": {
|
359 |
+
"alias": " - high_school_biology",
|
360 |
+
"acc,none": 0.6806451612903226,
|
361 |
+
"acc_stderr,none": 0.026522709674667768
|
362 |
+
},
|
363 |
+
"harness|mmlu_high_school_chemistry|0": {
|
364 |
+
"alias": " - high_school_chemistry",
|
365 |
+
"acc,none": 0.4729064039408867,
|
366 |
+
"acc_stderr,none": 0.035128190778761066
|
367 |
+
},
|
368 |
+
"harness|mmlu_high_school_computer_science|0": {
|
369 |
+
"alias": " - high_school_computer_science",
|
370 |
+
"acc,none": 0.62,
|
371 |
+
"acc_stderr,none": 0.048783173121456316
|
372 |
+
},
|
373 |
+
"harness|mmlu_high_school_mathematics|0": {
|
374 |
+
"alias": " - high_school_mathematics",
|
375 |
+
"acc,none": 0.31851851851851853,
|
376 |
+
"acc_stderr,none": 0.02840653309060846
|
377 |
+
},
|
378 |
+
"harness|mmlu_high_school_physics|0": {
|
379 |
+
"alias": " - high_school_physics",
|
380 |
+
"acc,none": 0.3443708609271523,
|
381 |
+
"acc_stderr,none": 0.03879687024073327
|
382 |
+
},
|
383 |
+
"harness|mmlu_high_school_statistics|0": {
|
384 |
+
"alias": " - high_school_statistics",
|
385 |
+
"acc,none": 0.49074074074074076,
|
386 |
+
"acc_stderr,none": 0.034093869469927006
|
387 |
+
},
|
388 |
+
"harness|mmlu_machine_learning|0": {
|
389 |
+
"alias": " - machine_learning",
|
390 |
+
"acc,none": 0.5089285714285714,
|
391 |
+
"acc_stderr,none": 0.04745033255489123
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/Mistral-7B-Instruct-v0.2-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 7,
|
400 |
+
"architectures": "MistralForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 7,
|
404 |
+
"model_size": 4.524,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.001,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float32",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": true
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
438 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
439 |
+
"harness|hellaswag|0": 1.0,
|
440 |
+
"harness|lambada:openai|0": 1.0,
|
441 |
+
"harness|arc:challenge|0": 1.0,
|
442 |
+
"harness|openbookqa|0": 1.0,
|
443 |
+
"harness|boolq|0": 2.0,
|
444 |
+
"harness|arc:easy|0": 1.0,
|
445 |
+
"harness|winogrande|0": 1.0,
|
446 |
+
"harness|piqa|0": 1.0,
|
447 |
+
"harness|mmlu|0": null,
|
448 |
+
"harness|mmlu_humanities|0": null,
|
449 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
453 |
+
"harness|mmlu_international_law|0": 0.0,
|
454 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
455 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
456 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
457 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
458 |
+
"harness|mmlu_philosophy|0": 0.0,
|
459 |
+
"harness|mmlu_prehistory|0": 0.0,
|
460 |
+
"harness|mmlu_professional_law|0": 0.0,
|
461 |
+
"harness|mmlu_world_religions|0": 0.0,
|
462 |
+
"harness|mmlu_other|0": null,
|
463 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
464 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
465 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
466 |
+
"harness|mmlu_global_facts|0": 0.0,
|
467 |
+
"harness|mmlu_human_aging|0": 0.0,
|
468 |
+
"harness|mmlu_management|0": 0.0,
|
469 |
+
"harness|mmlu_marketing|0": 0.0,
|
470 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
471 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
472 |
+
"harness|mmlu_nutrition|0": 0.0,
|
473 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
474 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
475 |
+
"harness|mmlu_virology|0": 0.0,
|
476 |
+
"harness|mmlu_social_sciences|0": null,
|
477 |
+
"harness|mmlu_econometrics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
483 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
484 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
485 |
+
"harness|mmlu_public_relations|0": 0.0,
|
486 |
+
"harness|mmlu_security_studies|0": 0.0,
|
487 |
+
"harness|mmlu_sociology|0": 0.0,
|
488 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
489 |
+
"harness|mmlu_stem|0": null,
|
490 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
491 |
+
"harness|mmlu_anatomy|0": 0.0,
|
492 |
+
"harness|mmlu_astronomy|0": 0.0,
|
493 |
+
"harness|mmlu_college_biology|0": 0.0,
|
494 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
495 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
496 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_college_physics|0": 0.0,
|
498 |
+
"harness|mmlu_computer_security|0": 0.0,
|
499 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
500 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
501 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
503 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
504 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
505 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
506 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
507 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
508 |
+
"harness|mmlu_machine_learning|0": 0.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714460111.0164344,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/Mistral-7B-Instruct-v0.2-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-04-30-18-06-33.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-30-18-06-33",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/opt-13b-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 7.6,
|
16 |
+
"model_params": 12.7,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc2|0": {
|
22 |
+
"acc,none": 0.3409183456381729,
|
23 |
+
"acc_stderr,none": 0.013323058554365342,
|
24 |
+
"alias": "truthfulqa_mc2"
|
25 |
+
},
|
26 |
+
"harness|truthfulqa:mc1|0": {
|
27 |
+
"acc,none": 0.20563035495716034,
|
28 |
+
"acc_stderr,none": 0.014148482219460969,
|
29 |
+
"alias": "truthfulqa_mc1"
|
30 |
+
},
|
31 |
+
"harness|boolq|0": {
|
32 |
+
"acc,none": 0.6801223241590214,
|
33 |
+
"acc_stderr,none": 0.00815789330083753,
|
34 |
+
"alias": "boolq"
|
35 |
+
},
|
36 |
+
"harness|hellaswag|0": {
|
37 |
+
"acc,none": 0.5177255526787492,
|
38 |
+
"acc_stderr,none": 0.004986644894743126,
|
39 |
+
"acc_norm,none": 0.6910973909579765,
|
40 |
+
"acc_norm_stderr,none": 0.004610966122378296,
|
41 |
+
"alias": "hellaswag"
|
42 |
+
},
|
43 |
+
"harness|winogrande|0": {
|
44 |
+
"acc,none": 0.6448303078137332,
|
45 |
+
"acc_stderr,none": 0.013450047479569256,
|
46 |
+
"alias": "winogrande"
|
47 |
+
},
|
48 |
+
"harness|mmlu|0": {
|
49 |
+
"acc,none": 0.24562028201110953,
|
50 |
+
"acc_stderr,none": 0.0036317764545446356,
|
51 |
+
"alias": "mmlu"
|
52 |
+
},
|
53 |
+
"harness|mmlu_humanities|0": {
|
54 |
+
"alias": " - humanities",
|
55 |
+
"acc,none": 0.24208289054197663,
|
56 |
+
"acc_stderr,none": 0.0062440898985705465
|
57 |
+
},
|
58 |
+
"harness|mmlu_formal_logic|0": {
|
59 |
+
"alias": " - formal_logic",
|
60 |
+
"acc,none": 0.30952380952380953,
|
61 |
+
"acc_stderr,none": 0.04134913018303316
|
62 |
+
},
|
63 |
+
"harness|mmlu_high_school_european_history|0": {
|
64 |
+
"alias": " - high_school_european_history",
|
65 |
+
"acc,none": 0.23030303030303031,
|
66 |
+
"acc_stderr,none": 0.03287666758603488
|
67 |
+
},
|
68 |
+
"harness|mmlu_high_school_us_history|0": {
|
69 |
+
"alias": " - high_school_us_history",
|
70 |
+
"acc,none": 0.23039215686274508,
|
71 |
+
"acc_stderr,none": 0.02955429260569507
|
72 |
+
},
|
73 |
+
"harness|mmlu_high_school_world_history|0": {
|
74 |
+
"alias": " - high_school_world_history",
|
75 |
+
"acc,none": 0.2109704641350211,
|
76 |
+
"acc_stderr,none": 0.02655837250266192
|
77 |
+
},
|
78 |
+
"harness|mmlu_international_law|0": {
|
79 |
+
"alias": " - international_law",
|
80 |
+
"acc,none": 0.1652892561983471,
|
81 |
+
"acc_stderr,none": 0.03390780612972776
|
82 |
+
},
|
83 |
+
"harness|mmlu_jurisprudence|0": {
|
84 |
+
"alias": " - jurisprudence",
|
85 |
+
"acc,none": 0.24074074074074073,
|
86 |
+
"acc_stderr,none": 0.041331194402438376
|
87 |
+
},
|
88 |
+
"harness|mmlu_logical_fallacies|0": {
|
89 |
+
"alias": " - logical_fallacies",
|
90 |
+
"acc,none": 0.22699386503067484,
|
91 |
+
"acc_stderr,none": 0.03291099578615769
|
92 |
+
},
|
93 |
+
"harness|mmlu_moral_disputes|0": {
|
94 |
+
"alias": " - moral_disputes",
|
95 |
+
"acc,none": 0.2832369942196532,
|
96 |
+
"acc_stderr,none": 0.024257901705323374
|
97 |
+
},
|
98 |
+
"harness|mmlu_moral_scenarios|0": {
|
99 |
+
"alias": " - moral_scenarios",
|
100 |
+
"acc,none": 0.23798882681564246,
|
101 |
+
"acc_stderr,none": 0.014242630070574885
|
102 |
+
},
|
103 |
+
"harness|mmlu_philosophy|0": {
|
104 |
+
"alias": " - philosophy",
|
105 |
+
"acc,none": 0.22508038585209003,
|
106 |
+
"acc_stderr,none": 0.023720088516179034
|
107 |
+
},
|
108 |
+
"harness|mmlu_prehistory|0": {
|
109 |
+
"alias": " - prehistory",
|
110 |
+
"acc,none": 0.24691358024691357,
|
111 |
+
"acc_stderr,none": 0.023993501709042117
|
112 |
+
},
|
113 |
+
"harness|mmlu_professional_law|0": {
|
114 |
+
"alias": " - professional_law",
|
115 |
+
"acc,none": 0.24315514993481094,
|
116 |
+
"acc_stderr,none": 0.010956556654417362
|
117 |
+
},
|
118 |
+
"harness|mmlu_world_religions|0": {
|
119 |
+
"alias": " - world_religions",
|
120 |
+
"acc,none": 0.2807017543859649,
|
121 |
+
"acc_stderr,none": 0.034462962170884265
|
122 |
+
},
|
123 |
+
"harness|mmlu_other|0": {
|
124 |
+
"alias": " - other",
|
125 |
+
"acc,none": 0.2378500160926939,
|
126 |
+
"acc_stderr,none": 0.007637855403720031
|
127 |
+
},
|
128 |
+
"harness|mmlu_business_ethics|0": {
|
129 |
+
"alias": " - business_ethics",
|
130 |
+
"acc,none": 0.31,
|
131 |
+
"acc_stderr,none": 0.04648231987117316
|
132 |
+
},
|
133 |
+
"harness|mmlu_clinical_knowledge|0": {
|
134 |
+
"alias": " - clinical_knowledge",
|
135 |
+
"acc,none": 0.2188679245283019,
|
136 |
+
"acc_stderr,none": 0.025447863825108614
|
137 |
+
},
|
138 |
+
"harness|mmlu_college_medicine|0": {
|
139 |
+
"alias": " - college_medicine",
|
140 |
+
"acc,none": 0.24277456647398843,
|
141 |
+
"acc_stderr,none": 0.0326926380614177
|
142 |
+
},
|
143 |
+
"harness|mmlu_global_facts|0": {
|
144 |
+
"alias": " - global_facts",
|
145 |
+
"acc,none": 0.19,
|
146 |
+
"acc_stderr,none": 0.039427724440366234
|
147 |
+
},
|
148 |
+
"harness|mmlu_human_aging|0": {
|
149 |
+
"alias": " - human_aging",
|
150 |
+
"acc,none": 0.273542600896861,
|
151 |
+
"acc_stderr,none": 0.029918586707798827
|
152 |
+
},
|
153 |
+
"harness|mmlu_management|0": {
|
154 |
+
"alias": " - management",
|
155 |
+
"acc,none": 0.22330097087378642,
|
156 |
+
"acc_stderr,none": 0.04123553189891431
|
157 |
+
},
|
158 |
+
"harness|mmlu_marketing|0": {
|
159 |
+
"alias": " - marketing",
|
160 |
+
"acc,none": 0.2264957264957265,
|
161 |
+
"acc_stderr,none": 0.027421007295392912
|
162 |
+
},
|
163 |
+
"harness|mmlu_medical_genetics|0": {
|
164 |
+
"alias": " - medical_genetics",
|
165 |
+
"acc,none": 0.27,
|
166 |
+
"acc_stderr,none": 0.044619604333847394
|
167 |
+
},
|
168 |
+
"harness|mmlu_miscellaneous|0": {
|
169 |
+
"alias": " - miscellaneous",
|
170 |
+
"acc,none": 0.24521072796934865,
|
171 |
+
"acc_stderr,none": 0.015384352284543932
|
172 |
+
},
|
173 |
+
"harness|mmlu_nutrition|0": {
|
174 |
+
"alias": " - nutrition",
|
175 |
+
"acc,none": 0.24836601307189543,
|
176 |
+
"acc_stderr,none": 0.02473998135511359
|
177 |
+
},
|
178 |
+
"harness|mmlu_professional_accounting|0": {
|
179 |
+
"alias": " - professional_accounting",
|
180 |
+
"acc,none": 0.2198581560283688,
|
181 |
+
"acc_stderr,none": 0.024706141070705477
|
182 |
+
},
|
183 |
+
"harness|mmlu_professional_medicine|0": {
|
184 |
+
"alias": " - professional_medicine",
|
185 |
+
"acc,none": 0.18382352941176472,
|
186 |
+
"acc_stderr,none": 0.02352924218519311
|
187 |
+
},
|
188 |
+
"harness|mmlu_virology|0": {
|
189 |
+
"alias": " - virology",
|
190 |
+
"acc,none": 0.2710843373493976,
|
191 |
+
"acc_stderr,none": 0.034605799075530276
|
192 |
+
},
|
193 |
+
"harness|mmlu_social_sciences|0": {
|
194 |
+
"alias": " - social_sciences",
|
195 |
+
"acc,none": 0.2551186220344491,
|
196 |
+
"acc_stderr,none": 0.007849877136827197
|
197 |
+
},
|
198 |
+
"harness|mmlu_econometrics|0": {
|
199 |
+
"alias": " - econometrics",
|
200 |
+
"acc,none": 0.22807017543859648,
|
201 |
+
"acc_stderr,none": 0.03947152782669415
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_geography|0": {
|
204 |
+
"alias": " - high_school_geography",
|
205 |
+
"acc,none": 0.26262626262626265,
|
206 |
+
"acc_stderr,none": 0.03135305009533084
|
207 |
+
},
|
208 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
209 |
+
"alias": " - high_school_government_and_politics",
|
210 |
+
"acc,none": 0.25906735751295334,
|
211 |
+
"acc_stderr,none": 0.0316187791793541
|
212 |
+
},
|
213 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
214 |
+
"alias": " - high_school_macroeconomics",
|
215 |
+
"acc,none": 0.3128205128205128,
|
216 |
+
"acc_stderr,none": 0.023507579020645368
|
217 |
+
},
|
218 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
219 |
+
"alias": " - high_school_microeconomics",
|
220 |
+
"acc,none": 0.2184873949579832,
|
221 |
+
"acc_stderr,none": 0.026841514322958934
|
222 |
+
},
|
223 |
+
"harness|mmlu_high_school_psychology|0": {
|
224 |
+
"alias": " - high_school_psychology",
|
225 |
+
"acc,none": 0.23669724770642203,
|
226 |
+
"acc_stderr,none": 0.018224078117299074
|
227 |
+
},
|
228 |
+
"harness|mmlu_human_sexuality|0": {
|
229 |
+
"alias": " - human_sexuality",
|
230 |
+
"acc,none": 0.2824427480916031,
|
231 |
+
"acc_stderr,none": 0.03948406125768362
|
232 |
+
},
|
233 |
+
"harness|mmlu_professional_psychology|0": {
|
234 |
+
"alias": " - professional_psychology",
|
235 |
+
"acc,none": 0.22712418300653595,
|
236 |
+
"acc_stderr,none": 0.016949853279212383
|
237 |
+
},
|
238 |
+
"harness|mmlu_public_relations|0": {
|
239 |
+
"alias": " - public_relations",
|
240 |
+
"acc,none": 0.24545454545454545,
|
241 |
+
"acc_stderr,none": 0.041220665028782834
|
242 |
+
},
|
243 |
+
"harness|mmlu_security_studies|0": {
|
244 |
+
"alias": " - security_studies",
|
245 |
+
"acc,none": 0.23673469387755103,
|
246 |
+
"acc_stderr,none": 0.02721283588407315
|
247 |
+
},
|
248 |
+
"harness|mmlu_sociology|0": {
|
249 |
+
"alias": " - sociology",
|
250 |
+
"acc,none": 0.2935323383084577,
|
251 |
+
"acc_stderr,none": 0.03220024104534205
|
252 |
+
},
|
253 |
+
"harness|mmlu_us_foreign_policy|0": {
|
254 |
+
"alias": " - us_foreign_policy",
|
255 |
+
"acc,none": 0.34,
|
256 |
+
"acc_stderr,none": 0.04760952285695236
|
257 |
+
},
|
258 |
+
"harness|mmlu_stem|0": {
|
259 |
+
"alias": " - stem",
|
260 |
+
"acc,none": 0.24928639391056137,
|
261 |
+
"acc_stderr,none": 0.007710745946445535
|
262 |
+
},
|
263 |
+
"harness|mmlu_abstract_algebra|0": {
|
264 |
+
"alias": " - abstract_algebra",
|
265 |
+
"acc,none": 0.21,
|
266 |
+
"acc_stderr,none": 0.04093601807403326
|
267 |
+
},
|
268 |
+
"harness|mmlu_anatomy|0": {
|
269 |
+
"alias": " - anatomy",
|
270 |
+
"acc,none": 0.2074074074074074,
|
271 |
+
"acc_stderr,none": 0.03502553170678318
|
272 |
+
},
|
273 |
+
"harness|mmlu_astronomy|0": {
|
274 |
+
"alias": " - astronomy",
|
275 |
+
"acc,none": 0.26973684210526316,
|
276 |
+
"acc_stderr,none": 0.036117805602848975
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_biology|0": {
|
279 |
+
"alias": " - college_biology",
|
280 |
+
"acc,none": 0.2638888888888889,
|
281 |
+
"acc_stderr,none": 0.03685651095897532
|
282 |
+
},
|
283 |
+
"harness|mmlu_college_chemistry|0": {
|
284 |
+
"alias": " - college_chemistry",
|
285 |
+
"acc,none": 0.23,
|
286 |
+
"acc_stderr,none": 0.04229525846816505
|
287 |
+
},
|
288 |
+
"harness|mmlu_college_computer_science|0": {
|
289 |
+
"alias": " - college_computer_science",
|
290 |
+
"acc,none": 0.21,
|
291 |
+
"acc_stderr,none": 0.040936018074033256
|
292 |
+
},
|
293 |
+
"harness|mmlu_college_mathematics|0": {
|
294 |
+
"alias": " - college_mathematics",
|
295 |
+
"acc,none": 0.23,
|
296 |
+
"acc_stderr,none": 0.04229525846816506
|
297 |
+
},
|
298 |
+
"harness|mmlu_college_physics|0": {
|
299 |
+
"alias": " - college_physics",
|
300 |
+
"acc,none": 0.2549019607843137,
|
301 |
+
"acc_stderr,none": 0.04336432707993177
|
302 |
+
},
|
303 |
+
"harness|mmlu_computer_security|0": {
|
304 |
+
"alias": " - computer_security",
|
305 |
+
"acc,none": 0.29,
|
306 |
+
"acc_stderr,none": 0.045604802157206845
|
307 |
+
},
|
308 |
+
"harness|mmlu_conceptual_physics|0": {
|
309 |
+
"alias": " - conceptual_physics",
|
310 |
+
"acc,none": 0.23404255319148937,
|
311 |
+
"acc_stderr,none": 0.02767845257821239
|
312 |
+
},
|
313 |
+
"harness|mmlu_electrical_engineering|0": {
|
314 |
+
"alias": " - electrical_engineering",
|
315 |
+
"acc,none": 0.2482758620689655,
|
316 |
+
"acc_stderr,none": 0.03600105692727771
|
317 |
+
},
|
318 |
+
"harness|mmlu_elementary_mathematics|0": {
|
319 |
+
"alias": " - elementary_mathematics",
|
320 |
+
"acc,none": 0.2328042328042328,
|
321 |
+
"acc_stderr,none": 0.02176596167215453
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_biology|0": {
|
324 |
+
"alias": " - high_school_biology",
|
325 |
+
"acc,none": 0.2838709677419355,
|
326 |
+
"acc_stderr,none": 0.025649381063029244
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_chemistry|0": {
|
329 |
+
"alias": " - high_school_chemistry",
|
330 |
+
"acc,none": 0.21182266009852216,
|
331 |
+
"acc_stderr,none": 0.02874898368994107
|
332 |
+
},
|
333 |
+
"harness|mmlu_high_school_computer_science|0": {
|
334 |
+
"alias": " - high_school_computer_science",
|
335 |
+
"acc,none": 0.23,
|
336 |
+
"acc_stderr,none": 0.042295258468165065
|
337 |
+
},
|
338 |
+
"harness|mmlu_high_school_mathematics|0": {
|
339 |
+
"alias": " - high_school_mathematics",
|
340 |
+
"acc,none": 0.25555555555555554,
|
341 |
+
"acc_stderr,none": 0.026593939101844065
|
342 |
+
},
|
343 |
+
"harness|mmlu_high_school_physics|0": {
|
344 |
+
"alias": " - high_school_physics",
|
345 |
+
"acc,none": 0.2980132450331126,
|
346 |
+
"acc_stderr,none": 0.03734535676787198
|
347 |
+
},
|
348 |
+
"harness|mmlu_high_school_statistics|0": {
|
349 |
+
"alias": " - high_school_statistics",
|
350 |
+
"acc,none": 0.25,
|
351 |
+
"acc_stderr,none": 0.029531221160930918
|
352 |
+
},
|
353 |
+
"harness|mmlu_machine_learning|0": {
|
354 |
+
"alias": " - machine_learning",
|
355 |
+
"acc,none": 0.3125,
|
356 |
+
"acc_stderr,none": 0.043994650575715215
|
357 |
+
},
|
358 |
+
"harness|arc:easy|0": {
|
359 |
+
"acc,none": 0.6717171717171717,
|
360 |
+
"acc_stderr,none": 0.009635749509262163,
|
361 |
+
"acc_norm,none": 0.6119528619528619,
|
362 |
+
"acc_norm_stderr,none": 0.009999295905750659,
|
363 |
+
"alias": "arc_easy"
|
364 |
+
},
|
365 |
+
"harness|openbookqa|0": {
|
366 |
+
"acc,none": 0.278,
|
367 |
+
"acc_stderr,none": 0.02005583388807089,
|
368 |
+
"acc_norm,none": 0.382,
|
369 |
+
"acc_norm_stderr,none": 0.021750820591250834,
|
370 |
+
"alias": "openbookqa"
|
371 |
+
},
|
372 |
+
"harness|piqa|0": {
|
373 |
+
"acc,none": 0.7573449401523396,
|
374 |
+
"acc_stderr,none": 0.01000200256970869,
|
375 |
+
"acc_norm,none": 0.764961915125136,
|
376 |
+
"acc_norm_stderr,none": 0.009893146688805345,
|
377 |
+
"alias": "piqa"
|
378 |
+
},
|
379 |
+
"harness|lambada:openai|0": {
|
380 |
+
"perplexity,none": 3.8881756480152463,
|
381 |
+
"perplexity_stderr,none": 0.08322629498973114,
|
382 |
+
"acc,none": 0.6949349893266059,
|
383 |
+
"acc_stderr,none": 0.0064147592507735746,
|
384 |
+
"alias": "lambada_openai"
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.3250853242320819,
|
388 |
+
"acc_stderr,none": 0.013688147309729117,
|
389 |
+
"acc_norm,none": 0.3532423208191126,
|
390 |
+
"acc_norm_stderr,none": 0.013967822714840053,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/opt-13b-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 13,
|
400 |
+
"architectures": "OptForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 13,
|
404 |
+
"model_size": 8,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.002,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float32",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": false
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
438 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
439 |
+
"harness|boolq|0": 2.0,
|
440 |
+
"harness|hellaswag|0": 1.0,
|
441 |
+
"harness|winogrande|0": 1.0,
|
442 |
+
"harness|mmlu|0": null,
|
443 |
+
"harness|mmlu_humanities|0": null,
|
444 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
445 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
446 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
447 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
448 |
+
"harness|mmlu_international_law|0": 0.0,
|
449 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
450 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
451 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
452 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
453 |
+
"harness|mmlu_philosophy|0": 0.0,
|
454 |
+
"harness|mmlu_prehistory|0": 0.0,
|
455 |
+
"harness|mmlu_professional_law|0": 0.0,
|
456 |
+
"harness|mmlu_world_religions|0": 0.0,
|
457 |
+
"harness|mmlu_other|0": null,
|
458 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
459 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
460 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
461 |
+
"harness|mmlu_global_facts|0": 0.0,
|
462 |
+
"harness|mmlu_human_aging|0": 0.0,
|
463 |
+
"harness|mmlu_management|0": 0.0,
|
464 |
+
"harness|mmlu_marketing|0": 0.0,
|
465 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
466 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
467 |
+
"harness|mmlu_nutrition|0": 0.0,
|
468 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
469 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
470 |
+
"harness|mmlu_virology|0": 0.0,
|
471 |
+
"harness|mmlu_social_sciences|0": null,
|
472 |
+
"harness|mmlu_econometrics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
478 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
479 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
480 |
+
"harness|mmlu_public_relations|0": 0.0,
|
481 |
+
"harness|mmlu_security_studies|0": 0.0,
|
482 |
+
"harness|mmlu_sociology|0": 0.0,
|
483 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
484 |
+
"harness|mmlu_stem|0": null,
|
485 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
486 |
+
"harness|mmlu_anatomy|0": 0.0,
|
487 |
+
"harness|mmlu_astronomy|0": 0.0,
|
488 |
+
"harness|mmlu_college_biology|0": 0.0,
|
489 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
490 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
491 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
492 |
+
"harness|mmlu_college_physics|0": 0.0,
|
493 |
+
"harness|mmlu_computer_security|0": 0.0,
|
494 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
495 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
496 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
503 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
504 |
+
"harness|arc:easy|0": 1.0,
|
505 |
+
"harness|openbookqa|0": 1.0,
|
506 |
+
"harness|piqa|0": 1.0,
|
507 |
+
"harness|lambada:openai|0": 1.0,
|
508 |
+
"harness|arc:challenge|0": 1.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714465126.9301486,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/opt-13b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-04-30-18-42-01.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-30-18-42-01",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/opt-1.3b-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 1.05,
|
16 |
+
"model_params": 1.22,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|lambada:openai|0": {
|
22 |
+
"perplexity,none": 8.27607614433492,
|
23 |
+
"perplexity_stderr,none": 0.22453245796912197,
|
24 |
+
"acc,none": 0.5389093731806714,
|
25 |
+
"acc_stderr,none": 0.006944853492951909,
|
26 |
+
"alias": "lambada_openai"
|
27 |
+
},
|
28 |
+
"harness|openbookqa|0": {
|
29 |
+
"acc,none": 0.214,
|
30 |
+
"acc_stderr,none": 0.018359797502387035,
|
31 |
+
"acc_norm,none": 0.326,
|
32 |
+
"acc_norm_stderr,none": 0.020984009562393557,
|
33 |
+
"alias": "openbookqa"
|
34 |
+
},
|
35 |
+
"harness|arc:easy|0": {
|
36 |
+
"acc,none": 0.5673400673400674,
|
37 |
+
"acc_stderr,none": 0.01016630793264287,
|
38 |
+
"acc_norm,none": 0.5050505050505051,
|
39 |
+
"acc_norm_stderr,none": 0.010259260102565879,
|
40 |
+
"alias": "arc_easy"
|
41 |
+
},
|
42 |
+
"harness|winogrande|0": {
|
43 |
+
"acc,none": 0.584846093133386,
|
44 |
+
"acc_stderr,none": 0.013848684086658587,
|
45 |
+
"alias": "winogrande"
|
46 |
+
},
|
47 |
+
"harness|truthfulqa:mc1|0": {
|
48 |
+
"acc,none": 0.23378212974296206,
|
49 |
+
"acc_stderr,none": 0.01481619599193159,
|
50 |
+
"alias": "truthfulqa_mc1"
|
51 |
+
},
|
52 |
+
"harness|arc:challenge|0": {
|
53 |
+
"acc,none": 0.23208191126279865,
|
54 |
+
"acc_stderr,none": 0.012336718284948856,
|
55 |
+
"acc_norm,none": 0.28668941979522183,
|
56 |
+
"acc_norm_stderr,none": 0.013214986329274777,
|
57 |
+
"alias": "arc_challenge"
|
58 |
+
},
|
59 |
+
"harness|piqa|0": {
|
60 |
+
"acc,none": 0.7067464635473341,
|
61 |
+
"acc_stderr,none": 0.010621818421101924,
|
62 |
+
"acc_norm,none": 0.7100108813928183,
|
63 |
+
"acc_norm_stderr,none": 0.010586899128169326,
|
64 |
+
"alias": "piqa"
|
65 |
+
},
|
66 |
+
"harness|hellaswag|0": {
|
67 |
+
"acc,none": 0.4075881298546106,
|
68 |
+
"acc_stderr,none": 0.004903815885983278,
|
69 |
+
"acc_norm,none": 0.5202150965943039,
|
70 |
+
"acc_norm_stderr,none": 0.004985701593898005,
|
71 |
+
"alias": "hellaswag"
|
72 |
+
},
|
73 |
+
"harness|mmlu|0": {
|
74 |
+
"acc,none": 0.25096140150975643,
|
75 |
+
"acc_stderr,none": 0.003654540601790187,
|
76 |
+
"alias": "mmlu"
|
77 |
+
},
|
78 |
+
"harness|mmlu_humanities|0": {
|
79 |
+
"alias": " - humanities",
|
80 |
+
"acc,none": 0.2503719447396387,
|
81 |
+
"acc_stderr,none": 0.006315777980926855
|
82 |
+
},
|
83 |
+
"harness|mmlu_formal_logic|0": {
|
84 |
+
"alias": " - formal_logic",
|
85 |
+
"acc,none": 0.2777777777777778,
|
86 |
+
"acc_stderr,none": 0.04006168083848877
|
87 |
+
},
|
88 |
+
"harness|mmlu_high_school_european_history|0": {
|
89 |
+
"alias": " - high_school_european_history",
|
90 |
+
"acc,none": 0.21212121212121213,
|
91 |
+
"acc_stderr,none": 0.03192271569548299
|
92 |
+
},
|
93 |
+
"harness|mmlu_high_school_us_history|0": {
|
94 |
+
"alias": " - high_school_us_history",
|
95 |
+
"acc,none": 0.25980392156862747,
|
96 |
+
"acc_stderr,none": 0.03077855467869326
|
97 |
+
},
|
98 |
+
"harness|mmlu_high_school_world_history|0": {
|
99 |
+
"alias": " - high_school_world_history",
|
100 |
+
"acc,none": 0.24472573839662448,
|
101 |
+
"acc_stderr,none": 0.02798569938703642
|
102 |
+
},
|
103 |
+
"harness|mmlu_international_law|0": {
|
104 |
+
"alias": " - international_law",
|
105 |
+
"acc,none": 0.2809917355371901,
|
106 |
+
"acc_stderr,none": 0.04103203830514511
|
107 |
+
},
|
108 |
+
"harness|mmlu_jurisprudence|0": {
|
109 |
+
"alias": " - jurisprudence",
|
110 |
+
"acc,none": 0.35185185185185186,
|
111 |
+
"acc_stderr,none": 0.046166311118017125
|
112 |
+
},
|
113 |
+
"harness|mmlu_logical_fallacies|0": {
|
114 |
+
"alias": " - logical_fallacies",
|
115 |
+
"acc,none": 0.25766871165644173,
|
116 |
+
"acc_stderr,none": 0.03436150827846917
|
117 |
+
},
|
118 |
+
"harness|mmlu_moral_disputes|0": {
|
119 |
+
"alias": " - moral_disputes",
|
120 |
+
"acc,none": 0.2630057803468208,
|
121 |
+
"acc_stderr,none": 0.023703099525258165
|
122 |
+
},
|
123 |
+
"harness|mmlu_moral_scenarios|0": {
|
124 |
+
"alias": " - moral_scenarios",
|
125 |
+
"acc,none": 0.2435754189944134,
|
126 |
+
"acc_stderr,none": 0.01435591196476786
|
127 |
+
},
|
128 |
+
"harness|mmlu_philosophy|0": {
|
129 |
+
"alias": " - philosophy",
|
130 |
+
"acc,none": 0.21543408360128619,
|
131 |
+
"acc_stderr,none": 0.02335022547547143
|
132 |
+
},
|
133 |
+
"harness|mmlu_prehistory|0": {
|
134 |
+
"alias": " - prehistory",
|
135 |
+
"acc,none": 0.2808641975308642,
|
136 |
+
"acc_stderr,none": 0.02500646975579922
|
137 |
+
},
|
138 |
+
"harness|mmlu_professional_law|0": {
|
139 |
+
"alias": " - professional_law",
|
140 |
+
"acc,none": 0.242503259452412,
|
141 |
+
"acc_stderr,none": 0.01094657096634879
|
142 |
+
},
|
143 |
+
"harness|mmlu_world_religions|0": {
|
144 |
+
"alias": " - world_religions",
|
145 |
+
"acc,none": 0.2573099415204678,
|
146 |
+
"acc_stderr,none": 0.03352799844161865
|
147 |
+
},
|
148 |
+
"harness|mmlu_other|0": {
|
149 |
+
"alias": " - other",
|
150 |
+
"acc,none": 0.2520115867396202,
|
151 |
+
"acc_stderr,none": 0.007772842543145439
|
152 |
+
},
|
153 |
+
"harness|mmlu_business_ethics|0": {
|
154 |
+
"alias": " - business_ethics",
|
155 |
+
"acc,none": 0.25,
|
156 |
+
"acc_stderr,none": 0.04351941398892446
|
157 |
+
},
|
158 |
+
"harness|mmlu_clinical_knowledge|0": {
|
159 |
+
"alias": " - clinical_knowledge",
|
160 |
+
"acc,none": 0.23773584905660378,
|
161 |
+
"acc_stderr,none": 0.02619980880756192
|
162 |
+
},
|
163 |
+
"harness|mmlu_college_medicine|0": {
|
164 |
+
"alias": " - college_medicine",
|
165 |
+
"acc,none": 0.23121387283236994,
|
166 |
+
"acc_stderr,none": 0.03214737302029468
|
167 |
+
},
|
168 |
+
"harness|mmlu_global_facts|0": {
|
169 |
+
"alias": " - global_facts",
|
170 |
+
"acc,none": 0.27,
|
171 |
+
"acc_stderr,none": 0.044619604333847394
|
172 |
+
},
|
173 |
+
"harness|mmlu_human_aging|0": {
|
174 |
+
"alias": " - human_aging",
|
175 |
+
"acc,none": 0.34977578475336324,
|
176 |
+
"acc_stderr,none": 0.03200736719484503
|
177 |
+
},
|
178 |
+
"harness|mmlu_management|0": {
|
179 |
+
"alias": " - management",
|
180 |
+
"acc,none": 0.2524271844660194,
|
181 |
+
"acc_stderr,none": 0.043012503996908764
|
182 |
+
},
|
183 |
+
"harness|mmlu_marketing|0": {
|
184 |
+
"alias": " - marketing",
|
185 |
+
"acc,none": 0.24358974358974358,
|
186 |
+
"acc_stderr,none": 0.028120966503914404
|
187 |
+
},
|
188 |
+
"harness|mmlu_medical_genetics|0": {
|
189 |
+
"alias": " - medical_genetics",
|
190 |
+
"acc,none": 0.28,
|
191 |
+
"acc_stderr,none": 0.04512608598542127
|
192 |
+
},
|
193 |
+
"harness|mmlu_miscellaneous|0": {
|
194 |
+
"alias": " - miscellaneous",
|
195 |
+
"acc,none": 0.25287356321839083,
|
196 |
+
"acc_stderr,none": 0.01554337731371968
|
197 |
+
},
|
198 |
+
"harness|mmlu_nutrition|0": {
|
199 |
+
"alias": " - nutrition",
|
200 |
+
"acc,none": 0.21895424836601307,
|
201 |
+
"acc_stderr,none": 0.02367908986180772
|
202 |
+
},
|
203 |
+
"harness|mmlu_professional_accounting|0": {
|
204 |
+
"alias": " - professional_accounting",
|
205 |
+
"acc,none": 0.2801418439716312,
|
206 |
+
"acc_stderr,none": 0.026789172351140228
|
207 |
+
},
|
208 |
+
"harness|mmlu_professional_medicine|0": {
|
209 |
+
"alias": " - professional_medicine",
|
210 |
+
"acc,none": 0.17279411764705882,
|
211 |
+
"acc_stderr,none": 0.02296606758558177
|
212 |
+
},
|
213 |
+
"harness|mmlu_virology|0": {
|
214 |
+
"alias": " - virology",
|
215 |
+
"acc,none": 0.2891566265060241,
|
216 |
+
"acc_stderr,none": 0.03529486801511115
|
217 |
+
},
|
218 |
+
"harness|mmlu_social_sciences|0": {
|
219 |
+
"alias": " - social_sciences",
|
220 |
+
"acc,none": 0.2365940851478713,
|
221 |
+
"acc_stderr,none": 0.007656528011004042
|
222 |
+
},
|
223 |
+
"harness|mmlu_econometrics|0": {
|
224 |
+
"alias": " - econometrics",
|
225 |
+
"acc,none": 0.22807017543859648,
|
226 |
+
"acc_stderr,none": 0.03947152782669415
|
227 |
+
},
|
228 |
+
"harness|mmlu_high_school_geography|0": {
|
229 |
+
"alias": " - high_school_geography",
|
230 |
+
"acc,none": 0.23737373737373738,
|
231 |
+
"acc_stderr,none": 0.030313710538198896
|
232 |
+
},
|
233 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
234 |
+
"alias": " - high_school_government_and_politics",
|
235 |
+
"acc,none": 0.20207253886010362,
|
236 |
+
"acc_stderr,none": 0.02897908979429673
|
237 |
+
},
|
238 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
239 |
+
"alias": " - high_school_macroeconomics",
|
240 |
+
"acc,none": 0.24358974358974358,
|
241 |
+
"acc_stderr,none": 0.021763733684173912
|
242 |
+
},
|
243 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
244 |
+
"alias": " - high_school_microeconomics",
|
245 |
+
"acc,none": 0.2184873949579832,
|
246 |
+
"acc_stderr,none": 0.02684151432295894
|
247 |
+
},
|
248 |
+
"harness|mmlu_high_school_psychology|0": {
|
249 |
+
"alias": " - high_school_psychology",
|
250 |
+
"acc,none": 0.24036697247706423,
|
251 |
+
"acc_stderr,none": 0.01832060732096407
|
252 |
+
},
|
253 |
+
"harness|mmlu_human_sexuality|0": {
|
254 |
+
"alias": " - human_sexuality",
|
255 |
+
"acc,none": 0.183206106870229,
|
256 |
+
"acc_stderr,none": 0.033927709264947335
|
257 |
+
},
|
258 |
+
"harness|mmlu_professional_psychology|0": {
|
259 |
+
"alias": " - professional_psychology",
|
260 |
+
"acc,none": 0.28104575163398693,
|
261 |
+
"acc_stderr,none": 0.018185218954318082
|
262 |
+
},
|
263 |
+
"harness|mmlu_public_relations|0": {
|
264 |
+
"alias": " - public_relations",
|
265 |
+
"acc,none": 0.2727272727272727,
|
266 |
+
"acc_stderr,none": 0.04265792110940589
|
267 |
+
},
|
268 |
+
"harness|mmlu_security_studies|0": {
|
269 |
+
"alias": " - security_studies",
|
270 |
+
"acc,none": 0.1836734693877551,
|
271 |
+
"acc_stderr,none": 0.024789071332007636
|
272 |
+
},
|
273 |
+
"harness|mmlu_sociology|0": {
|
274 |
+
"alias": " - sociology",
|
275 |
+
"acc,none": 0.23383084577114427,
|
276 |
+
"acc_stderr,none": 0.029929415408348387
|
277 |
+
},
|
278 |
+
"harness|mmlu_us_foreign_policy|0": {
|
279 |
+
"alias": " - us_foreign_policy",
|
280 |
+
"acc,none": 0.2,
|
281 |
+
"acc_stderr,none": 0.04020151261036845
|
282 |
+
},
|
283 |
+
"harness|mmlu_stem|0": {
|
284 |
+
"alias": " - stem",
|
285 |
+
"acc,none": 0.26482714874722485,
|
286 |
+
"acc_stderr,none": 0.007847018783828765
|
287 |
+
},
|
288 |
+
"harness|mmlu_abstract_algebra|0": {
|
289 |
+
"alias": " - abstract_algebra",
|
290 |
+
"acc,none": 0.29,
|
291 |
+
"acc_stderr,none": 0.045604802157206824
|
292 |
+
},
|
293 |
+
"harness|mmlu_anatomy|0": {
|
294 |
+
"alias": " - anatomy",
|
295 |
+
"acc,none": 0.34074074074074073,
|
296 |
+
"acc_stderr,none": 0.040943762699967946
|
297 |
+
},
|
298 |
+
"harness|mmlu_astronomy|0": {
|
299 |
+
"alias": " - astronomy",
|
300 |
+
"acc,none": 0.2631578947368421,
|
301 |
+
"acc_stderr,none": 0.03583496176361062
|
302 |
+
},
|
303 |
+
"harness|mmlu_college_biology|0": {
|
304 |
+
"alias": " - college_biology",
|
305 |
+
"acc,none": 0.25,
|
306 |
+
"acc_stderr,none": 0.03621034121889507
|
307 |
+
},
|
308 |
+
"harness|mmlu_college_chemistry|0": {
|
309 |
+
"alias": " - college_chemistry",
|
310 |
+
"acc,none": 0.11,
|
311 |
+
"acc_stderr,none": 0.031446603773522035
|
312 |
+
},
|
313 |
+
"harness|mmlu_college_computer_science|0": {
|
314 |
+
"alias": " - college_computer_science",
|
315 |
+
"acc,none": 0.27,
|
316 |
+
"acc_stderr,none": 0.04461960433384741
|
317 |
+
},
|
318 |
+
"harness|mmlu_college_mathematics|0": {
|
319 |
+
"alias": " - college_mathematics",
|
320 |
+
"acc,none": 0.24,
|
321 |
+
"acc_stderr,none": 0.042923469599092816
|
322 |
+
},
|
323 |
+
"harness|mmlu_college_physics|0": {
|
324 |
+
"alias": " - college_physics",
|
325 |
+
"acc,none": 0.21568627450980393,
|
326 |
+
"acc_stderr,none": 0.04092563958237655
|
327 |
+
},
|
328 |
+
"harness|mmlu_computer_security|0": {
|
329 |
+
"alias": " - computer_security",
|
330 |
+
"acc,none": 0.29,
|
331 |
+
"acc_stderr,none": 0.045604802157206845
|
332 |
+
},
|
333 |
+
"harness|mmlu_conceptual_physics|0": {
|
334 |
+
"alias": " - conceptual_physics",
|
335 |
+
"acc,none": 0.28085106382978725,
|
336 |
+
"acc_stderr,none": 0.02937917046412482
|
337 |
+
},
|
338 |
+
"harness|mmlu_electrical_engineering|0": {
|
339 |
+
"alias": " - electrical_engineering",
|
340 |
+
"acc,none": 0.22758620689655173,
|
341 |
+
"acc_stderr,none": 0.03493950380131184
|
342 |
+
},
|
343 |
+
"harness|mmlu_elementary_mathematics|0": {
|
344 |
+
"alias": " - elementary_mathematics",
|
345 |
+
"acc,none": 0.29894179894179895,
|
346 |
+
"acc_stderr,none": 0.023577604791655805
|
347 |
+
},
|
348 |
+
"harness|mmlu_high_school_biology|0": {
|
349 |
+
"alias": " - high_school_biology",
|
350 |
+
"acc,none": 0.2645161290322581,
|
351 |
+
"acc_stderr,none": 0.02509189237885928
|
352 |
+
},
|
353 |
+
"harness|mmlu_high_school_chemistry|0": {
|
354 |
+
"alias": " - high_school_chemistry",
|
355 |
+
"acc,none": 0.3054187192118227,
|
356 |
+
"acc_stderr,none": 0.03240661565868408
|
357 |
+
},
|
358 |
+
"harness|mmlu_high_school_computer_science|0": {
|
359 |
+
"alias": " - high_school_computer_science",
|
360 |
+
"acc,none": 0.32,
|
361 |
+
"acc_stderr,none": 0.04688261722621504
|
362 |
+
},
|
363 |
+
"harness|mmlu_high_school_mathematics|0": {
|
364 |
+
"alias": " - high_school_mathematics",
|
365 |
+
"acc,none": 0.24074074074074073,
|
366 |
+
"acc_stderr,none": 0.02606715922227579
|
367 |
+
},
|
368 |
+
"harness|mmlu_high_school_physics|0": {
|
369 |
+
"alias": " - high_school_physics",
|
370 |
+
"acc,none": 0.25165562913907286,
|
371 |
+
"acc_stderr,none": 0.035433042343899844
|
372 |
+
},
|
373 |
+
"harness|mmlu_high_school_statistics|0": {
|
374 |
+
"alias": " - high_school_statistics",
|
375 |
+
"acc,none": 0.23148148148148148,
|
376 |
+
"acc_stderr,none": 0.028765111718046955
|
377 |
+
},
|
378 |
+
"harness|mmlu_machine_learning|0": {
|
379 |
+
"alias": " - machine_learning",
|
380 |
+
"acc,none": 0.26785714285714285,
|
381 |
+
"acc_stderr,none": 0.04203277291467764
|
382 |
+
},
|
383 |
+
"harness|truthfulqa:mc2|0": {
|
384 |
+
"acc,none": 0.3799455888517004,
|
385 |
+
"acc_stderr,none": 0.014185571946003417,
|
386 |
+
"alias": "truthfulqa_mc2"
|
387 |
+
},
|
388 |
+
"harness|boolq|0": {
|
389 |
+
"acc,none": 0.5785932721712538,
|
390 |
+
"acc_stderr,none": 0.008636344580414675,
|
391 |
+
"alias": "boolq"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/opt-1.3b-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 1.3,
|
400 |
+
"architectures": "OptForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 1.3,
|
404 |
+
"model_size": 1.05,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.002,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float32",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": false
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|lambada:openai|0": 1.0,
|
438 |
+
"harness|openbookqa|0": 1.0,
|
439 |
+
"harness|arc:easy|0": 1.0,
|
440 |
+
"harness|winogrande|0": 1.0,
|
441 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
442 |
+
"harness|arc:challenge|0": 1.0,
|
443 |
+
"harness|piqa|0": 1.0,
|
444 |
+
"harness|hellaswag|0": 1.0,
|
445 |
+
"harness|mmlu|0": null,
|
446 |
+
"harness|mmlu_humanities|0": null,
|
447 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
449 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
451 |
+
"harness|mmlu_international_law|0": 0.0,
|
452 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
453 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
454 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
455 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
456 |
+
"harness|mmlu_philosophy|0": 0.0,
|
457 |
+
"harness|mmlu_prehistory|0": 0.0,
|
458 |
+
"harness|mmlu_professional_law|0": 0.0,
|
459 |
+
"harness|mmlu_world_religions|0": 0.0,
|
460 |
+
"harness|mmlu_other|0": null,
|
461 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
462 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
463 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
464 |
+
"harness|mmlu_global_facts|0": 0.0,
|
465 |
+
"harness|mmlu_human_aging|0": 0.0,
|
466 |
+
"harness|mmlu_management|0": 0.0,
|
467 |
+
"harness|mmlu_marketing|0": 0.0,
|
468 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
469 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
470 |
+
"harness|mmlu_nutrition|0": 0.0,
|
471 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
472 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
473 |
+
"harness|mmlu_virology|0": 0.0,
|
474 |
+
"harness|mmlu_social_sciences|0": null,
|
475 |
+
"harness|mmlu_econometrics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
481 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
482 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
483 |
+
"harness|mmlu_public_relations|0": 0.0,
|
484 |
+
"harness|mmlu_security_studies|0": 0.0,
|
485 |
+
"harness|mmlu_sociology|0": 0.0,
|
486 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
487 |
+
"harness|mmlu_stem|0": null,
|
488 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
489 |
+
"harness|mmlu_anatomy|0": 0.0,
|
490 |
+
"harness|mmlu_astronomy|0": 0.0,
|
491 |
+
"harness|mmlu_college_biology|0": 0.0,
|
492 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
493 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
494 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
495 |
+
"harness|mmlu_college_physics|0": 0.0,
|
496 |
+
"harness|mmlu_computer_security|0": 0.0,
|
497 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
498 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
499 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
503 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
504 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
505 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
506 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
507 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
508 |
+
"harness|boolq|0": 2.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714471925.0680776,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/opt-1.3b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 4,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-04-30-20-44-29.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-30-20-44-29",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/bloom-7b1-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 6.8,
|
16 |
+
"model_params": 6.09,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.6369376479873717,
|
23 |
+
"acc_stderr,none": 0.013515191866479221,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|truthfulqa:mc1|0": {
|
27 |
+
"acc,none": 0.2350061199510404,
|
28 |
+
"acc_stderr,none": 0.014843061507731606,
|
29 |
+
"alias": "truthfulqa_mc1"
|
30 |
+
},
|
31 |
+
"harness|boolq|0": {
|
32 |
+
"acc,none": 0.6293577981651376,
|
33 |
+
"acc_stderr,none": 0.008447316806409933,
|
34 |
+
"alias": "boolq"
|
35 |
+
},
|
36 |
+
"harness|arc:challenge|0": {
|
37 |
+
"acc,none": 0.3054607508532423,
|
38 |
+
"acc_stderr,none": 0.013460080478002514,
|
39 |
+
"acc_norm,none": 0.3361774744027304,
|
40 |
+
"acc_norm_stderr,none": 0.01380485502620576,
|
41 |
+
"alias": "arc_challenge"
|
42 |
+
},
|
43 |
+
"harness|piqa|0": {
|
44 |
+
"acc,none": 0.7263329706202394,
|
45 |
+
"acc_stderr,none": 0.010402184206229204,
|
46 |
+
"acc_norm,none": 0.735582154515778,
|
47 |
+
"acc_norm_stderr,none": 0.010289787244767168,
|
48 |
+
"alias": "piqa"
|
49 |
+
},
|
50 |
+
"harness|hellaswag|0": {
|
51 |
+
"acc,none": 0.4618601872137024,
|
52 |
+
"acc_stderr,none": 0.004975243508752004,
|
53 |
+
"acc_norm,none": 0.619398526190002,
|
54 |
+
"acc_norm_stderr,none": 0.004845424524764082,
|
55 |
+
"alias": "hellaswag"
|
56 |
+
},
|
57 |
+
"harness|lambada:openai|0": {
|
58 |
+
"perplexity,none": 6.685916235858979,
|
59 |
+
"perplexity_stderr,none": 0.17913874719457962,
|
60 |
+
"acc,none": 0.5728701727149234,
|
61 |
+
"acc_stderr,none": 0.006891601045518706,
|
62 |
+
"alias": "lambada_openai"
|
63 |
+
},
|
64 |
+
"harness|openbookqa|0": {
|
65 |
+
"acc,none": 0.244,
|
66 |
+
"acc_stderr,none": 0.01922673489361459,
|
67 |
+
"acc_norm,none": 0.356,
|
68 |
+
"acc_norm_stderr,none": 0.021434712356072645,
|
69 |
+
"alias": "openbookqa"
|
70 |
+
},
|
71 |
+
"harness|mmlu|0": {
|
72 |
+
"acc,none": 0.259792052414186,
|
73 |
+
"acc_stderr,none": 0.0036992967525030333,
|
74 |
+
"alias": "mmlu"
|
75 |
+
},
|
76 |
+
"harness|mmlu_humanities|0": {
|
77 |
+
"alias": " - humanities",
|
78 |
+
"acc,none": 0.25696068012752393,
|
79 |
+
"acc_stderr,none": 0.006371837439598881
|
80 |
+
},
|
81 |
+
"harness|mmlu_formal_logic|0": {
|
82 |
+
"alias": " - formal_logic",
|
83 |
+
"acc,none": 0.31746031746031744,
|
84 |
+
"acc_stderr,none": 0.04163453031302859
|
85 |
+
},
|
86 |
+
"harness|mmlu_high_school_european_history|0": {
|
87 |
+
"alias": " - high_school_european_history",
|
88 |
+
"acc,none": 0.24242424242424243,
|
89 |
+
"acc_stderr,none": 0.03346409881055953
|
90 |
+
},
|
91 |
+
"harness|mmlu_high_school_us_history|0": {
|
92 |
+
"alias": " - high_school_us_history",
|
93 |
+
"acc,none": 0.24509803921568626,
|
94 |
+
"acc_stderr,none": 0.03019028245350194
|
95 |
+
},
|
96 |
+
"harness|mmlu_high_school_world_history|0": {
|
97 |
+
"alias": " - high_school_world_history",
|
98 |
+
"acc,none": 0.26582278481012656,
|
99 |
+
"acc_stderr,none": 0.028756799629658335
|
100 |
+
},
|
101 |
+
"harness|mmlu_international_law|0": {
|
102 |
+
"alias": " - international_law",
|
103 |
+
"acc,none": 0.2727272727272727,
|
104 |
+
"acc_stderr,none": 0.04065578140908705
|
105 |
+
},
|
106 |
+
"harness|mmlu_jurisprudence|0": {
|
107 |
+
"alias": " - jurisprudence",
|
108 |
+
"acc,none": 0.23148148148148148,
|
109 |
+
"acc_stderr,none": 0.04077494709252628
|
110 |
+
},
|
111 |
+
"harness|mmlu_logical_fallacies|0": {
|
112 |
+
"alias": " - logical_fallacies",
|
113 |
+
"acc,none": 0.2147239263803681,
|
114 |
+
"acc_stderr,none": 0.03226219377286774
|
115 |
+
},
|
116 |
+
"harness|mmlu_moral_disputes|0": {
|
117 |
+
"alias": " - moral_disputes",
|
118 |
+
"acc,none": 0.2745664739884393,
|
119 |
+
"acc_stderr,none": 0.02402774515526502
|
120 |
+
},
|
121 |
+
"harness|mmlu_moral_scenarios|0": {
|
122 |
+
"alias": " - moral_scenarios",
|
123 |
+
"acc,none": 0.2435754189944134,
|
124 |
+
"acc_stderr,none": 0.014355911964767864
|
125 |
+
},
|
126 |
+
"harness|mmlu_philosophy|0": {
|
127 |
+
"alias": " - philosophy",
|
128 |
+
"acc,none": 0.22186495176848875,
|
129 |
+
"acc_stderr,none": 0.023598858292863047
|
130 |
+
},
|
131 |
+
"harness|mmlu_prehistory|0": {
|
132 |
+
"alias": " - prehistory",
|
133 |
+
"acc,none": 0.24382716049382716,
|
134 |
+
"acc_stderr,none": 0.023891879541959586
|
135 |
+
},
|
136 |
+
"harness|mmlu_professional_law|0": {
|
137 |
+
"alias": " - professional_law",
|
138 |
+
"acc,none": 0.273142112125163,
|
139 |
+
"acc_stderr,none": 0.011380150567830398
|
140 |
+
},
|
141 |
+
"harness|mmlu_world_religions|0": {
|
142 |
+
"alias": " - world_religions",
|
143 |
+
"acc,none": 0.25146198830409355,
|
144 |
+
"acc_stderr,none": 0.033275044238468436
|
145 |
+
},
|
146 |
+
"harness|mmlu_other|0": {
|
147 |
+
"alias": " - other",
|
148 |
+
"acc,none": 0.24557450917283552,
|
149 |
+
"acc_stderr,none": 0.00770723810131652
|
150 |
+
},
|
151 |
+
"harness|mmlu_business_ethics|0": {
|
152 |
+
"alias": " - business_ethics",
|
153 |
+
"acc,none": 0.28,
|
154 |
+
"acc_stderr,none": 0.04512608598542127
|
155 |
+
},
|
156 |
+
"harness|mmlu_clinical_knowledge|0": {
|
157 |
+
"alias": " - clinical_knowledge",
|
158 |
+
"acc,none": 0.27547169811320754,
|
159 |
+
"acc_stderr,none": 0.027495663683724067
|
160 |
+
},
|
161 |
+
"harness|mmlu_college_medicine|0": {
|
162 |
+
"alias": " - college_medicine",
|
163 |
+
"acc,none": 0.24855491329479767,
|
164 |
+
"acc_stderr,none": 0.03295304696818318
|
165 |
+
},
|
166 |
+
"harness|mmlu_global_facts|0": {
|
167 |
+
"alias": " - global_facts",
|
168 |
+
"acc,none": 0.21,
|
169 |
+
"acc_stderr,none": 0.040936018074033256
|
170 |
+
},
|
171 |
+
"harness|mmlu_human_aging|0": {
|
172 |
+
"alias": " - human_aging",
|
173 |
+
"acc,none": 0.17937219730941703,
|
174 |
+
"acc_stderr,none": 0.0257498195691928
|
175 |
+
},
|
176 |
+
"harness|mmlu_management|0": {
|
177 |
+
"alias": " - management",
|
178 |
+
"acc,none": 0.30097087378640774,
|
179 |
+
"acc_stderr,none": 0.045416094465039476
|
180 |
+
},
|
181 |
+
"harness|mmlu_marketing|0": {
|
182 |
+
"alias": " - marketing",
|
183 |
+
"acc,none": 0.2777777777777778,
|
184 |
+
"acc_stderr,none": 0.02934311479809447
|
185 |
+
},
|
186 |
+
"harness|mmlu_medical_genetics|0": {
|
187 |
+
"alias": " - medical_genetics",
|
188 |
+
"acc,none": 0.32,
|
189 |
+
"acc_stderr,none": 0.046882617226215034
|
190 |
+
},
|
191 |
+
"harness|mmlu_miscellaneous|0": {
|
192 |
+
"alias": " - miscellaneous",
|
193 |
+
"acc,none": 0.227330779054917,
|
194 |
+
"acc_stderr,none": 0.014987270640946024
|
195 |
+
},
|
196 |
+
"harness|mmlu_nutrition|0": {
|
197 |
+
"alias": " - nutrition",
|
198 |
+
"acc,none": 0.2875816993464052,
|
199 |
+
"acc_stderr,none": 0.02591780611714716
|
200 |
+
},
|
201 |
+
"harness|mmlu_professional_accounting|0": {
|
202 |
+
"alias": " - professional_accounting",
|
203 |
+
"acc,none": 0.26595744680851063,
|
204 |
+
"acc_stderr,none": 0.026358065698880592
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_medicine|0": {
|
207 |
+
"alias": " - professional_medicine",
|
208 |
+
"acc,none": 0.22426470588235295,
|
209 |
+
"acc_stderr,none": 0.025336848563332355
|
210 |
+
},
|
211 |
+
"harness|mmlu_virology|0": {
|
212 |
+
"alias": " - virology",
|
213 |
+
"acc,none": 0.1686746987951807,
|
214 |
+
"acc_stderr,none": 0.029152009627856544
|
215 |
+
},
|
216 |
+
"harness|mmlu_social_sciences|0": {
|
217 |
+
"alias": " - social_sciences",
|
218 |
+
"acc,none": 0.27494312642183943,
|
219 |
+
"acc_stderr,none": 0.008053261916222852
|
220 |
+
},
|
221 |
+
"harness|mmlu_econometrics|0": {
|
222 |
+
"alias": " - econometrics",
|
223 |
+
"acc,none": 0.2543859649122807,
|
224 |
+
"acc_stderr,none": 0.040969851398436716
|
225 |
+
},
|
226 |
+
"harness|mmlu_high_school_geography|0": {
|
227 |
+
"alias": " - high_school_geography",
|
228 |
+
"acc,none": 0.30808080808080807,
|
229 |
+
"acc_stderr,none": 0.03289477330098615
|
230 |
+
},
|
231 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
232 |
+
"alias": " - high_school_government_and_politics",
|
233 |
+
"acc,none": 0.32124352331606215,
|
234 |
+
"acc_stderr,none": 0.033699508685490674
|
235 |
+
},
|
236 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
237 |
+
"alias": " - high_school_macroeconomics",
|
238 |
+
"acc,none": 0.2564102564102564,
|
239 |
+
"acc_stderr,none": 0.022139081103971545
|
240 |
+
},
|
241 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
242 |
+
"alias": " - high_school_microeconomics",
|
243 |
+
"acc,none": 0.27310924369747897,
|
244 |
+
"acc_stderr,none": 0.028942004040998164
|
245 |
+
},
|
246 |
+
"harness|mmlu_high_school_psychology|0": {
|
247 |
+
"alias": " - high_school_psychology",
|
248 |
+
"acc,none": 0.29541284403669726,
|
249 |
+
"acc_stderr,none": 0.019560619182976
|
250 |
+
},
|
251 |
+
"harness|mmlu_human_sexuality|0": {
|
252 |
+
"alias": " - human_sexuality",
|
253 |
+
"acc,none": 0.2366412213740458,
|
254 |
+
"acc_stderr,none": 0.03727673575596917
|
255 |
+
},
|
256 |
+
"harness|mmlu_professional_psychology|0": {
|
257 |
+
"alias": " - professional_psychology",
|
258 |
+
"acc,none": 0.25326797385620914,
|
259 |
+
"acc_stderr,none": 0.01759348689536683
|
260 |
+
},
|
261 |
+
"harness|mmlu_public_relations|0": {
|
262 |
+
"alias": " - public_relations",
|
263 |
+
"acc,none": 0.2818181818181818,
|
264 |
+
"acc_stderr,none": 0.04309118709946459
|
265 |
+
},
|
266 |
+
"harness|mmlu_security_studies|0": {
|
267 |
+
"alias": " - security_studies",
|
268 |
+
"acc,none": 0.2979591836734694,
|
269 |
+
"acc_stderr,none": 0.02927956741106568
|
270 |
+
},
|
271 |
+
"harness|mmlu_sociology|0": {
|
272 |
+
"alias": " - sociology",
|
273 |
+
"acc,none": 0.2537313432835821,
|
274 |
+
"acc_stderr,none": 0.030769444967296024
|
275 |
+
},
|
276 |
+
"harness|mmlu_us_foreign_policy|0": {
|
277 |
+
"alias": " - us_foreign_policy",
|
278 |
+
"acc,none": 0.27,
|
279 |
+
"acc_stderr,none": 0.0446196043338474
|
280 |
+
},
|
281 |
+
"harness|mmlu_stem|0": {
|
282 |
+
"alias": " - stem",
|
283 |
+
"acc,none": 0.26324135743736127,
|
284 |
+
"acc_stderr,none": 0.007846675733198834
|
285 |
+
},
|
286 |
+
"harness|mmlu_abstract_algebra|0": {
|
287 |
+
"alias": " - abstract_algebra",
|
288 |
+
"acc,none": 0.25,
|
289 |
+
"acc_stderr,none": 0.04351941398892446
|
290 |
+
},
|
291 |
+
"harness|mmlu_anatomy|0": {
|
292 |
+
"alias": " - anatomy",
|
293 |
+
"acc,none": 0.21481481481481482,
|
294 |
+
"acc_stderr,none": 0.035478541985608236
|
295 |
+
},
|
296 |
+
"harness|mmlu_astronomy|0": {
|
297 |
+
"alias": " - astronomy",
|
298 |
+
"acc,none": 0.29605263157894735,
|
299 |
+
"acc_stderr,none": 0.037150621549989056
|
300 |
+
},
|
301 |
+
"harness|mmlu_college_biology|0": {
|
302 |
+
"alias": " - college_biology",
|
303 |
+
"acc,none": 0.24305555555555555,
|
304 |
+
"acc_stderr,none": 0.03586879280080341
|
305 |
+
},
|
306 |
+
"harness|mmlu_college_chemistry|0": {
|
307 |
+
"alias": " - college_chemistry",
|
308 |
+
"acc,none": 0.31,
|
309 |
+
"acc_stderr,none": 0.04648231987117316
|
310 |
+
},
|
311 |
+
"harness|mmlu_college_computer_science|0": {
|
312 |
+
"alias": " - college_computer_science",
|
313 |
+
"acc,none": 0.3,
|
314 |
+
"acc_stderr,none": 0.046056618647183814
|
315 |
+
},
|
316 |
+
"harness|mmlu_college_mathematics|0": {
|
317 |
+
"alias": " - college_mathematics",
|
318 |
+
"acc,none": 0.27,
|
319 |
+
"acc_stderr,none": 0.044619604333847394
|
320 |
+
},
|
321 |
+
"harness|mmlu_college_physics|0": {
|
322 |
+
"alias": " - college_physics",
|
323 |
+
"acc,none": 0.22549019607843138,
|
324 |
+
"acc_stderr,none": 0.041583075330832865
|
325 |
+
},
|
326 |
+
"harness|mmlu_computer_security|0": {
|
327 |
+
"alias": " - computer_security",
|
328 |
+
"acc,none": 0.24,
|
329 |
+
"acc_stderr,none": 0.04292346959909283
|
330 |
+
},
|
331 |
+
"harness|mmlu_conceptual_physics|0": {
|
332 |
+
"alias": " - conceptual_physics",
|
333 |
+
"acc,none": 0.32340425531914896,
|
334 |
+
"acc_stderr,none": 0.030579442773610334
|
335 |
+
},
|
336 |
+
"harness|mmlu_electrical_engineering|0": {
|
337 |
+
"alias": " - electrical_engineering",
|
338 |
+
"acc,none": 0.25517241379310346,
|
339 |
+
"acc_stderr,none": 0.03632984052707842
|
340 |
+
},
|
341 |
+
"harness|mmlu_elementary_mathematics|0": {
|
342 |
+
"alias": " - elementary_mathematics",
|
343 |
+
"acc,none": 0.2566137566137566,
|
344 |
+
"acc_stderr,none": 0.022494510767503154
|
345 |
+
},
|
346 |
+
"harness|mmlu_high_school_biology|0": {
|
347 |
+
"alias": " - high_school_biology",
|
348 |
+
"acc,none": 0.22580645161290322,
|
349 |
+
"acc_stderr,none": 0.023785577884181012
|
350 |
+
},
|
351 |
+
"harness|mmlu_high_school_chemistry|0": {
|
352 |
+
"alias": " - high_school_chemistry",
|
353 |
+
"acc,none": 0.2660098522167488,
|
354 |
+
"acc_stderr,none": 0.031089826002937523
|
355 |
+
},
|
356 |
+
"harness|mmlu_high_school_computer_science|0": {
|
357 |
+
"alias": " - high_school_computer_science",
|
358 |
+
"acc,none": 0.29,
|
359 |
+
"acc_stderr,none": 0.04560480215720684
|
360 |
+
},
|
361 |
+
"harness|mmlu_high_school_mathematics|0": {
|
362 |
+
"alias": " - high_school_mathematics",
|
363 |
+
"acc,none": 0.22962962962962963,
|
364 |
+
"acc_stderr,none": 0.025644108639267624
|
365 |
+
},
|
366 |
+
"harness|mmlu_high_school_physics|0": {
|
367 |
+
"alias": " - high_school_physics",
|
368 |
+
"acc,none": 0.2980132450331126,
|
369 |
+
"acc_stderr,none": 0.03734535676787198
|
370 |
+
},
|
371 |
+
"harness|mmlu_high_school_statistics|0": {
|
372 |
+
"alias": " - high_school_statistics",
|
373 |
+
"acc,none": 0.26851851851851855,
|
374 |
+
"acc_stderr,none": 0.030225226160012386
|
375 |
+
},
|
376 |
+
"harness|mmlu_machine_learning|0": {
|
377 |
+
"alias": " - machine_learning",
|
378 |
+
"acc,none": 0.29464285714285715,
|
379 |
+
"acc_stderr,none": 0.043270409325787296
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc2|0": {
|
382 |
+
"acc,none": 0.3900684489421272,
|
383 |
+
"acc_stderr,none": 0.014022176384445112,
|
384 |
+
"alias": "truthfulqa_mc2"
|
385 |
+
},
|
386 |
+
"harness|arc:easy|0": {
|
387 |
+
"acc,none": 0.6443602693602694,
|
388 |
+
"acc_stderr,none": 0.009822854395535487,
|
389 |
+
"acc_norm,none": 0.5732323232323232,
|
390 |
+
"acc_norm_stderr,none": 0.010149141043955643,
|
391 |
+
"alias": "arc_easy"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/bloom-7b1-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 7,
|
400 |
+
"architectures": "BloomForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 7,
|
404 |
+
"model_size": 6.8,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.001,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float32",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": true
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|winogrande|0": 1.0,
|
438 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
439 |
+
"harness|boolq|0": 2.0,
|
440 |
+
"harness|arc:challenge|0": 1.0,
|
441 |
+
"harness|piqa|0": 1.0,
|
442 |
+
"harness|hellaswag|0": 1.0,
|
443 |
+
"harness|lambada:openai|0": 1.0,
|
444 |
+
"harness|openbookqa|0": 1.0,
|
445 |
+
"harness|mmlu|0": null,
|
446 |
+
"harness|mmlu_humanities|0": null,
|
447 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
449 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
451 |
+
"harness|mmlu_international_law|0": 0.0,
|
452 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
453 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
454 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
455 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
456 |
+
"harness|mmlu_philosophy|0": 0.0,
|
457 |
+
"harness|mmlu_prehistory|0": 0.0,
|
458 |
+
"harness|mmlu_professional_law|0": 0.0,
|
459 |
+
"harness|mmlu_world_religions|0": 0.0,
|
460 |
+
"harness|mmlu_other|0": null,
|
461 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
462 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
463 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
464 |
+
"harness|mmlu_global_facts|0": 0.0,
|
465 |
+
"harness|mmlu_human_aging|0": 0.0,
|
466 |
+
"harness|mmlu_management|0": 0.0,
|
467 |
+
"harness|mmlu_marketing|0": 0.0,
|
468 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
469 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
470 |
+
"harness|mmlu_nutrition|0": 0.0,
|
471 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
472 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
473 |
+
"harness|mmlu_virology|0": 0.0,
|
474 |
+
"harness|mmlu_social_sciences|0": null,
|
475 |
+
"harness|mmlu_econometrics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
481 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
482 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
483 |
+
"harness|mmlu_public_relations|0": 0.0,
|
484 |
+
"harness|mmlu_security_studies|0": 0.0,
|
485 |
+
"harness|mmlu_sociology|0": 0.0,
|
486 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
487 |
+
"harness|mmlu_stem|0": null,
|
488 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
489 |
+
"harness|mmlu_anatomy|0": 0.0,
|
490 |
+
"harness|mmlu_astronomy|0": 0.0,
|
491 |
+
"harness|mmlu_college_biology|0": 0.0,
|
492 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
493 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
494 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
495 |
+
"harness|mmlu_college_physics|0": 0.0,
|
496 |
+
"harness|mmlu_computer_security|0": 0.0,
|
497 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
498 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
499 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
503 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
504 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
505 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
506 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
507 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
508 |
+
"harness|arc:easy|0": 1.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714476866.6397374,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/bloom-7b1-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-04-30-21-43-07.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-30-21-43-07",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/gpt-j-6b-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 4.59,
|
16 |
+
"model_params": 5.69,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:easy|0": {
|
22 |
+
"acc,none": 0.6708754208754208,
|
23 |
+
"acc_stderr,none": 0.009642048058060989,
|
24 |
+
"acc_norm,none": 0.61489898989899,
|
25 |
+
"acc_norm_stderr,none": 0.009985214798737251,
|
26 |
+
"alias": "arc_easy"
|
27 |
+
},
|
28 |
+
"harness|truthfulqa:mc2|0": {
|
29 |
+
"acc,none": 0.3567771778055868,
|
30 |
+
"acc_stderr,none": 0.01353660390992912,
|
31 |
+
"alias": "truthfulqa_mc2"
|
32 |
+
},
|
33 |
+
"harness|boolq|0": {
|
34 |
+
"acc,none": 0.6510703363914373,
|
35 |
+
"acc_stderr,none": 0.008336340399970096,
|
36 |
+
"alias": "boolq"
|
37 |
+
},
|
38 |
+
"harness|arc:challenge|0": {
|
39 |
+
"acc,none": 0.3430034129692833,
|
40 |
+
"acc_stderr,none": 0.013872423223718166,
|
41 |
+
"acc_norm,none": 0.36006825938566556,
|
42 |
+
"acc_norm_stderr,none": 0.014027516814585186,
|
43 |
+
"alias": "arc_challenge"
|
44 |
+
},
|
45 |
+
"harness|hellaswag|0": {
|
46 |
+
"acc,none": 0.48974307906791475,
|
47 |
+
"acc_stderr,none": 0.004988731406780659,
|
48 |
+
"acc_norm,none": 0.6563433578968333,
|
49 |
+
"acc_norm_stderr,none": 0.004739575380508871,
|
50 |
+
"alias": "hellaswag"
|
51 |
+
},
|
52 |
+
"harness|winogrande|0": {
|
53 |
+
"acc,none": 0.6424625098658248,
|
54 |
+
"acc_stderr,none": 0.013470007443920691,
|
55 |
+
"alias": "winogrande"
|
56 |
+
},
|
57 |
+
"harness|lambada:openai|0": {
|
58 |
+
"perplexity,none": 4.081184717841156,
|
59 |
+
"perplexity_stderr,none": 0.08880031107310538,
|
60 |
+
"acc,none": 0.6844556568988939,
|
61 |
+
"acc_stderr,none": 0.006474629636371581,
|
62 |
+
"alias": "lambada_openai"
|
63 |
+
},
|
64 |
+
"harness|openbookqa|0": {
|
65 |
+
"acc,none": 0.282,
|
66 |
+
"acc_stderr,none": 0.020143572847290788,
|
67 |
+
"acc_norm,none": 0.388,
|
68 |
+
"acc_norm_stderr,none": 0.021814300984787635,
|
69 |
+
"alias": "openbookqa"
|
70 |
+
},
|
71 |
+
"harness|piqa|0": {
|
72 |
+
"acc,none": 0.7448313384113167,
|
73 |
+
"acc_stderr,none": 0.010171571592521822,
|
74 |
+
"acc_norm,none": 0.7568008705114254,
|
75 |
+
"acc_norm_stderr,none": 0.010009611953858948,
|
76 |
+
"alias": "piqa"
|
77 |
+
},
|
78 |
+
"harness|truthfulqa:mc1|0": {
|
79 |
+
"acc,none": 0.20930232558139536,
|
80 |
+
"acc_stderr,none": 0.01424121943478583,
|
81 |
+
"alias": "truthfulqa_mc1"
|
82 |
+
},
|
83 |
+
"harness|mmlu|0": {
|
84 |
+
"acc,none": 0.2750320467169919,
|
85 |
+
"acc_stderr,none": 0.0037576659952161065,
|
86 |
+
"alias": "mmlu"
|
87 |
+
},
|
88 |
+
"harness|mmlu_humanities|0": {
|
89 |
+
"alias": " - humanities",
|
90 |
+
"acc,none": 0.24654622741764082,
|
91 |
+
"acc_stderr,none": 0.0062818343189083165
|
92 |
+
},
|
93 |
+
"harness|mmlu_formal_logic|0": {
|
94 |
+
"alias": " - formal_logic",
|
95 |
+
"acc,none": 0.3333333333333333,
|
96 |
+
"acc_stderr,none": 0.04216370213557835
|
97 |
+
},
|
98 |
+
"harness|mmlu_high_school_european_history|0": {
|
99 |
+
"alias": " - high_school_european_history",
|
100 |
+
"acc,none": 0.23636363636363636,
|
101 |
+
"acc_stderr,none": 0.03317505930009179
|
102 |
+
},
|
103 |
+
"harness|mmlu_high_school_us_history|0": {
|
104 |
+
"alias": " - high_school_us_history",
|
105 |
+
"acc,none": 0.25,
|
106 |
+
"acc_stderr,none": 0.03039153369274154
|
107 |
+
},
|
108 |
+
"harness|mmlu_high_school_world_history|0": {
|
109 |
+
"alias": " - high_school_world_history",
|
110 |
+
"acc,none": 0.22362869198312235,
|
111 |
+
"acc_stderr,none": 0.027123298205229976
|
112 |
+
},
|
113 |
+
"harness|mmlu_international_law|0": {
|
114 |
+
"alias": " - international_law",
|
115 |
+
"acc,none": 0.17355371900826447,
|
116 |
+
"acc_stderr,none": 0.03457272836917669
|
117 |
+
},
|
118 |
+
"harness|mmlu_jurisprudence|0": {
|
119 |
+
"alias": " - jurisprudence",
|
120 |
+
"acc,none": 0.25,
|
121 |
+
"acc_stderr,none": 0.04186091791394607
|
122 |
+
},
|
123 |
+
"harness|mmlu_logical_fallacies|0": {
|
124 |
+
"alias": " - logical_fallacies",
|
125 |
+
"acc,none": 0.22699386503067484,
|
126 |
+
"acc_stderr,none": 0.03291099578615769
|
127 |
+
},
|
128 |
+
"harness|mmlu_moral_disputes|0": {
|
129 |
+
"alias": " - moral_disputes",
|
130 |
+
"acc,none": 0.2398843930635838,
|
131 |
+
"acc_stderr,none": 0.022989592543123567
|
132 |
+
},
|
133 |
+
"harness|mmlu_moral_scenarios|0": {
|
134 |
+
"alias": " - moral_scenarios",
|
135 |
+
"acc,none": 0.24022346368715083,
|
136 |
+
"acc_stderr,none": 0.014288343803925319
|
137 |
+
},
|
138 |
+
"harness|mmlu_philosophy|0": {
|
139 |
+
"alias": " - philosophy",
|
140 |
+
"acc,none": 0.2508038585209003,
|
141 |
+
"acc_stderr,none": 0.024619771956697168
|
142 |
+
},
|
143 |
+
"harness|mmlu_prehistory|0": {
|
144 |
+
"alias": " - prehistory",
|
145 |
+
"acc,none": 0.27469135802469136,
|
146 |
+
"acc_stderr,none": 0.024836057868294677
|
147 |
+
},
|
148 |
+
"harness|mmlu_professional_law|0": {
|
149 |
+
"alias": " - professional_law",
|
150 |
+
"acc,none": 0.2561929595827901,
|
151 |
+
"acc_stderr,none": 0.011149173153110582
|
152 |
+
},
|
153 |
+
"harness|mmlu_world_religions|0": {
|
154 |
+
"alias": " - world_religions",
|
155 |
+
"acc,none": 0.1871345029239766,
|
156 |
+
"acc_stderr,none": 0.029913127232368053
|
157 |
+
},
|
158 |
+
"harness|mmlu_other|0": {
|
159 |
+
"alias": " - other",
|
160 |
+
"acc,none": 0.276472481493402,
|
161 |
+
"acc_stderr,none": 0.008009070391301637
|
162 |
+
},
|
163 |
+
"harness|mmlu_business_ethics|0": {
|
164 |
+
"alias": " - business_ethics",
|
165 |
+
"acc,none": 0.35,
|
166 |
+
"acc_stderr,none": 0.0479372485441102
|
167 |
+
},
|
168 |
+
"harness|mmlu_clinical_knowledge|0": {
|
169 |
+
"alias": " - clinical_knowledge",
|
170 |
+
"acc,none": 0.2943396226415094,
|
171 |
+
"acc_stderr,none": 0.028049186315695248
|
172 |
+
},
|
173 |
+
"harness|mmlu_college_medicine|0": {
|
174 |
+
"alias": " - college_medicine",
|
175 |
+
"acc,none": 0.26011560693641617,
|
176 |
+
"acc_stderr,none": 0.033450369167889904
|
177 |
+
},
|
178 |
+
"harness|mmlu_global_facts|0": {
|
179 |
+
"alias": " - global_facts",
|
180 |
+
"acc,none": 0.27,
|
181 |
+
"acc_stderr,none": 0.044619604333847394
|
182 |
+
},
|
183 |
+
"harness|mmlu_human_aging|0": {
|
184 |
+
"alias": " - human_aging",
|
185 |
+
"acc,none": 0.29596412556053814,
|
186 |
+
"acc_stderr,none": 0.030636591348699813
|
187 |
+
},
|
188 |
+
"harness|mmlu_management|0": {
|
189 |
+
"alias": " - management",
|
190 |
+
"acc,none": 0.34951456310679613,
|
191 |
+
"acc_stderr,none": 0.047211885060971716
|
192 |
+
},
|
193 |
+
"harness|mmlu_marketing|0": {
|
194 |
+
"alias": " - marketing",
|
195 |
+
"acc,none": 0.2863247863247863,
|
196 |
+
"acc_stderr,none": 0.02961432369045665
|
197 |
+
},
|
198 |
+
"harness|mmlu_medical_genetics|0": {
|
199 |
+
"alias": " - medical_genetics",
|
200 |
+
"acc,none": 0.33,
|
201 |
+
"acc_stderr,none": 0.047258156262526045
|
202 |
+
},
|
203 |
+
"harness|mmlu_miscellaneous|0": {
|
204 |
+
"alias": " - miscellaneous",
|
205 |
+
"acc,none": 0.23116219667943805,
|
206 |
+
"acc_stderr,none": 0.01507552323810108
|
207 |
+
},
|
208 |
+
"harness|mmlu_nutrition|0": {
|
209 |
+
"alias": " - nutrition",
|
210 |
+
"acc,none": 0.3333333333333333,
|
211 |
+
"acc_stderr,none": 0.02699254433929724
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_accounting|0": {
|
214 |
+
"alias": " - professional_accounting",
|
215 |
+
"acc,none": 0.2695035460992908,
|
216 |
+
"acc_stderr,none": 0.026469036818590627
|
217 |
+
},
|
218 |
+
"harness|mmlu_professional_medicine|0": {
|
219 |
+
"alias": " - professional_medicine",
|
220 |
+
"acc,none": 0.23161764705882354,
|
221 |
+
"acc_stderr,none": 0.025626533803777562
|
222 |
+
},
|
223 |
+
"harness|mmlu_virology|0": {
|
224 |
+
"alias": " - virology",
|
225 |
+
"acc,none": 0.30120481927710846,
|
226 |
+
"acc_stderr,none": 0.0357160923005348
|
227 |
+
},
|
228 |
+
"harness|mmlu_social_sciences|0": {
|
229 |
+
"alias": " - social_sciences",
|
230 |
+
"acc,none": 0.3084172895677608,
|
231 |
+
"acc_stderr,none": 0.008289452693955061
|
232 |
+
},
|
233 |
+
"harness|mmlu_econometrics|0": {
|
234 |
+
"alias": " - econometrics",
|
235 |
+
"acc,none": 0.24561403508771928,
|
236 |
+
"acc_stderr,none": 0.04049339297748142
|
237 |
+
},
|
238 |
+
"harness|mmlu_high_school_geography|0": {
|
239 |
+
"alias": " - high_school_geography",
|
240 |
+
"acc,none": 0.2828282828282828,
|
241 |
+
"acc_stderr,none": 0.03208779558786752
|
242 |
+
},
|
243 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
244 |
+
"alias": " - high_school_government_and_politics",
|
245 |
+
"acc,none": 0.3471502590673575,
|
246 |
+
"acc_stderr,none": 0.03435696168361355
|
247 |
+
},
|
248 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
249 |
+
"alias": " - high_school_macroeconomics",
|
250 |
+
"acc,none": 0.34615384615384615,
|
251 |
+
"acc_stderr,none": 0.024121125416941183
|
252 |
+
},
|
253 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
254 |
+
"alias": " - high_school_microeconomics",
|
255 |
+
"acc,none": 0.35294117647058826,
|
256 |
+
"acc_stderr,none": 0.031041941304059274
|
257 |
+
},
|
258 |
+
"harness|mmlu_high_school_psychology|0": {
|
259 |
+
"alias": " - high_school_psychology",
|
260 |
+
"acc,none": 0.3100917431192661,
|
261 |
+
"acc_stderr,none": 0.019830849684439752
|
262 |
+
},
|
263 |
+
"harness|mmlu_human_sexuality|0": {
|
264 |
+
"alias": " - human_sexuality",
|
265 |
+
"acc,none": 0.3816793893129771,
|
266 |
+
"acc_stderr,none": 0.042607351576445594
|
267 |
+
},
|
268 |
+
"harness|mmlu_professional_psychology|0": {
|
269 |
+
"alias": " - professional_psychology",
|
270 |
+
"acc,none": 0.22712418300653595,
|
271 |
+
"acc_stderr,none": 0.01694985327921238
|
272 |
+
},
|
273 |
+
"harness|mmlu_public_relations|0": {
|
274 |
+
"alias": " - public_relations",
|
275 |
+
"acc,none": 0.3090909090909091,
|
276 |
+
"acc_stderr,none": 0.044262946482000985
|
277 |
+
},
|
278 |
+
"harness|mmlu_security_studies|0": {
|
279 |
+
"alias": " - security_studies",
|
280 |
+
"acc,none": 0.3836734693877551,
|
281 |
+
"acc_stderr,none": 0.031130880396235922
|
282 |
+
},
|
283 |
+
"harness|mmlu_sociology|0": {
|
284 |
+
"alias": " - sociology",
|
285 |
+
"acc,none": 0.3283582089552239,
|
286 |
+
"acc_stderr,none": 0.033206858897443244
|
287 |
+
},
|
288 |
+
"harness|mmlu_us_foreign_policy|0": {
|
289 |
+
"alias": " - us_foreign_policy",
|
290 |
+
"acc,none": 0.27,
|
291 |
+
"acc_stderr,none": 0.044619604333847394
|
292 |
+
},
|
293 |
+
"harness|mmlu_stem|0": {
|
294 |
+
"alias": " - stem",
|
295 |
+
"acc,none": 0.28353948620361563,
|
296 |
+
"acc_stderr,none": 0.008028532611263519
|
297 |
+
},
|
298 |
+
"harness|mmlu_abstract_algebra|0": {
|
299 |
+
"alias": " - abstract_algebra",
|
300 |
+
"acc,none": 0.28,
|
301 |
+
"acc_stderr,none": 0.045126085985421276
|
302 |
+
},
|
303 |
+
"harness|mmlu_anatomy|0": {
|
304 |
+
"alias": " - anatomy",
|
305 |
+
"acc,none": 0.28888888888888886,
|
306 |
+
"acc_stderr,none": 0.0391545063041425
|
307 |
+
},
|
308 |
+
"harness|mmlu_astronomy|0": {
|
309 |
+
"alias": " - astronomy",
|
310 |
+
"acc,none": 0.34210526315789475,
|
311 |
+
"acc_stderr,none": 0.038607315993160904
|
312 |
+
},
|
313 |
+
"harness|mmlu_college_biology|0": {
|
314 |
+
"alias": " - college_biology",
|
315 |
+
"acc,none": 0.2152777777777778,
|
316 |
+
"acc_stderr,none": 0.03437079344106134
|
317 |
+
},
|
318 |
+
"harness|mmlu_college_chemistry|0": {
|
319 |
+
"alias": " - college_chemistry",
|
320 |
+
"acc,none": 0.35,
|
321 |
+
"acc_stderr,none": 0.0479372485441102
|
322 |
+
},
|
323 |
+
"harness|mmlu_college_computer_science|0": {
|
324 |
+
"alias": " - college_computer_science",
|
325 |
+
"acc,none": 0.28,
|
326 |
+
"acc_stderr,none": 0.04512608598542127
|
327 |
+
},
|
328 |
+
"harness|mmlu_college_mathematics|0": {
|
329 |
+
"alias": " - college_mathematics",
|
330 |
+
"acc,none": 0.34,
|
331 |
+
"acc_stderr,none": 0.04760952285695235
|
332 |
+
},
|
333 |
+
"harness|mmlu_college_physics|0": {
|
334 |
+
"alias": " - college_physics",
|
335 |
+
"acc,none": 0.27450980392156865,
|
336 |
+
"acc_stderr,none": 0.04440521906179325
|
337 |
+
},
|
338 |
+
"harness|mmlu_computer_security|0": {
|
339 |
+
"alias": " - computer_security",
|
340 |
+
"acc,none": 0.27,
|
341 |
+
"acc_stderr,none": 0.04461960433384739
|
342 |
+
},
|
343 |
+
"harness|mmlu_conceptual_physics|0": {
|
344 |
+
"alias": " - conceptual_physics",
|
345 |
+
"acc,none": 0.3191489361702128,
|
346 |
+
"acc_stderr,none": 0.03047297336338007
|
347 |
+
},
|
348 |
+
"harness|mmlu_electrical_engineering|0": {
|
349 |
+
"alias": " - electrical_engineering",
|
350 |
+
"acc,none": 0.296551724137931,
|
351 |
+
"acc_stderr,none": 0.03806142687309994
|
352 |
+
},
|
353 |
+
"harness|mmlu_elementary_mathematics|0": {
|
354 |
+
"alias": " - elementary_mathematics",
|
355 |
+
"acc,none": 0.2619047619047619,
|
356 |
+
"acc_stderr,none": 0.022644212615525214
|
357 |
+
},
|
358 |
+
"harness|mmlu_high_school_biology|0": {
|
359 |
+
"alias": " - high_school_biology",
|
360 |
+
"acc,none": 0.25483870967741934,
|
361 |
+
"acc_stderr,none": 0.024790118459332208
|
362 |
+
},
|
363 |
+
"harness|mmlu_high_school_chemistry|0": {
|
364 |
+
"alias": " - high_school_chemistry",
|
365 |
+
"acc,none": 0.31527093596059114,
|
366 |
+
"acc_stderr,none": 0.03269080871970186
|
367 |
+
},
|
368 |
+
"harness|mmlu_high_school_computer_science|0": {
|
369 |
+
"alias": " - high_school_computer_science",
|
370 |
+
"acc,none": 0.24,
|
371 |
+
"acc_stderr,none": 0.04292346959909283
|
372 |
+
},
|
373 |
+
"harness|mmlu_high_school_mathematics|0": {
|
374 |
+
"alias": " - high_school_mathematics",
|
375 |
+
"acc,none": 0.2740740740740741,
|
376 |
+
"acc_stderr,none": 0.02719593480408563
|
377 |
+
},
|
378 |
+
"harness|mmlu_high_school_physics|0": {
|
379 |
+
"alias": " - high_school_physics",
|
380 |
+
"acc,none": 0.31788079470198677,
|
381 |
+
"acc_stderr,none": 0.03802039760107903
|
382 |
+
},
|
383 |
+
"harness|mmlu_high_school_statistics|0": {
|
384 |
+
"alias": " - high_school_statistics",
|
385 |
+
"acc,none": 0.25,
|
386 |
+
"acc_stderr,none": 0.029531221160930918
|
387 |
+
},
|
388 |
+
"harness|mmlu_machine_learning|0": {
|
389 |
+
"alias": " - machine_learning",
|
390 |
+
"acc,none": 0.2857142857142857,
|
391 |
+
"acc_stderr,none": 0.04287858751340456
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/gpt-j-6b-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 6,
|
400 |
+
"architectures": "GPTJForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 6,
|
404 |
+
"model_size": 4,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.002,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": false
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|arc:easy|0": 1.0,
|
438 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
439 |
+
"harness|boolq|0": 2.0,
|
440 |
+
"harness|arc:challenge|0": 1.0,
|
441 |
+
"harness|hellaswag|0": 1.0,
|
442 |
+
"harness|winogrande|0": 1.0,
|
443 |
+
"harness|lambada:openai|0": 1.0,
|
444 |
+
"harness|openbookqa|0": 1.0,
|
445 |
+
"harness|piqa|0": 1.0,
|
446 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
447 |
+
"harness|mmlu|0": null,
|
448 |
+
"harness|mmlu_humanities|0": null,
|
449 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
453 |
+
"harness|mmlu_international_law|0": 0.0,
|
454 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
455 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
456 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
457 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
458 |
+
"harness|mmlu_philosophy|0": 0.0,
|
459 |
+
"harness|mmlu_prehistory|0": 0.0,
|
460 |
+
"harness|mmlu_professional_law|0": 0.0,
|
461 |
+
"harness|mmlu_world_religions|0": 0.0,
|
462 |
+
"harness|mmlu_other|0": null,
|
463 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
464 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
465 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
466 |
+
"harness|mmlu_global_facts|0": 0.0,
|
467 |
+
"harness|mmlu_human_aging|0": 0.0,
|
468 |
+
"harness|mmlu_management|0": 0.0,
|
469 |
+
"harness|mmlu_marketing|0": 0.0,
|
470 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
471 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
472 |
+
"harness|mmlu_nutrition|0": 0.0,
|
473 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
474 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
475 |
+
"harness|mmlu_virology|0": 0.0,
|
476 |
+
"harness|mmlu_social_sciences|0": null,
|
477 |
+
"harness|mmlu_econometrics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
483 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
484 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
485 |
+
"harness|mmlu_public_relations|0": 0.0,
|
486 |
+
"harness|mmlu_security_studies|0": 0.0,
|
487 |
+
"harness|mmlu_sociology|0": 0.0,
|
488 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
489 |
+
"harness|mmlu_stem|0": null,
|
490 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
491 |
+
"harness|mmlu_anatomy|0": 0.0,
|
492 |
+
"harness|mmlu_astronomy|0": 0.0,
|
493 |
+
"harness|mmlu_college_biology|0": 0.0,
|
494 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
495 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
496 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_college_physics|0": 0.0,
|
498 |
+
"harness|mmlu_computer_security|0": 0.0,
|
499 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
500 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
501 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
503 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
504 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
505 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
506 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
507 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
508 |
+
"harness|mmlu_machine_learning|0": 0.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714481307.9563165,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/gpt-j-6b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 4,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-04-30-23-05-41.json
ADDED
@@ -0,0 +1,580 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-30-23-05-41",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/falcon-7b-instruct-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 4,
|
16 |
+
"model_params": 7,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc2|0": {
|
22 |
+
"acc,none": 0.43856164323940094,
|
23 |
+
"acc_stderr,none": 0.014798984792732898,
|
24 |
+
"alias": "truthfulqa_mc2"
|
25 |
+
},
|
26 |
+
"harness|winogrande|0": {
|
27 |
+
"acc,none": 0.65982636148382,
|
28 |
+
"acc_stderr,none": 0.013315218762417395,
|
29 |
+
"alias": "winogrande"
|
30 |
+
},
|
31 |
+
"harness|arc:easy|0": {
|
32 |
+
"acc,none": 0.7201178451178452,
|
33 |
+
"acc_stderr,none": 0.00921207752465653,
|
34 |
+
"acc_norm,none": 0.6776094276094277,
|
35 |
+
"acc_norm_stderr,none": 0.009590672908157436,
|
36 |
+
"alias": "arc_easy"
|
37 |
+
},
|
38 |
+
"harness|hellaswag|0": {
|
39 |
+
"acc,none": 0.5128460466042621,
|
40 |
+
"acc_stderr,none": 0.004988134303021786,
|
41 |
+
"acc_norm,none": 0.691894045010954,
|
42 |
+
"acc_norm_stderr,none": 0.004607669909914966,
|
43 |
+
"alias": "hellaswag"
|
44 |
+
},
|
45 |
+
"harness|lambada:openai|0": {
|
46 |
+
"perplexity,none": 5.293340874409289,
|
47 |
+
"perplexity_stderr,none": 0.12325842419162542,
|
48 |
+
"acc,none": 0.6404036483601785,
|
49 |
+
"acc_stderr,none": 0.006685695764730398,
|
50 |
+
"alias": "lambada_openai"
|
51 |
+
},
|
52 |
+
"harness|piqa|0": {
|
53 |
+
"acc,none": 0.7758433079434167,
|
54 |
+
"acc_stderr,none": 0.009729897956410032,
|
55 |
+
"acc_norm,none": 0.7818280739934712,
|
56 |
+
"acc_norm_stderr,none": 0.009636081958374381,
|
57 |
+
"alias": "piqa"
|
58 |
+
},
|
59 |
+
"harness|truthfulqa:mc1|0": {
|
60 |
+
"acc,none": 0.2864137086903305,
|
61 |
+
"acc_stderr,none": 0.01582614243950237,
|
62 |
+
"alias": "truthfulqa_mc1"
|
63 |
+
},
|
64 |
+
"harness|boolq|0": {
|
65 |
+
"acc,none": 0.7033639143730887,
|
66 |
+
"acc_stderr,none": 0.007989039569104798,
|
67 |
+
"alias": "boolq"
|
68 |
+
},
|
69 |
+
"harness|mmlu|0": {
|
70 |
+
"acc,none": 0.24818401937046006,
|
71 |
+
"acc_stderr,none": 0.0036400372036635735,
|
72 |
+
"alias": "mmlu"
|
73 |
+
},
|
74 |
+
"harness|mmlu_humanities|0": {
|
75 |
+
"alias": " - humanities",
|
76 |
+
"acc,none": 0.24654622741764082,
|
77 |
+
"acc_stderr,none": 0.0062797290603445926
|
78 |
+
},
|
79 |
+
"harness|mmlu_formal_logic|0": {
|
80 |
+
"alias": " - formal_logic",
|
81 |
+
"acc,none": 0.2698412698412698,
|
82 |
+
"acc_stderr,none": 0.03970158273235172
|
83 |
+
},
|
84 |
+
"harness|mmlu_high_school_european_history|0": {
|
85 |
+
"alias": " - high_school_european_history",
|
86 |
+
"acc,none": 0.24242424242424243,
|
87 |
+
"acc_stderr,none": 0.03346409881055953
|
88 |
+
},
|
89 |
+
"harness|mmlu_high_school_us_history|0": {
|
90 |
+
"alias": " - high_school_us_history",
|
91 |
+
"acc,none": 0.27450980392156865,
|
92 |
+
"acc_stderr,none": 0.03132179803083292
|
93 |
+
},
|
94 |
+
"harness|mmlu_high_school_world_history|0": {
|
95 |
+
"alias": " - high_school_world_history",
|
96 |
+
"acc,none": 0.29535864978902954,
|
97 |
+
"acc_stderr,none": 0.029696338713422893
|
98 |
+
},
|
99 |
+
"harness|mmlu_international_law|0": {
|
100 |
+
"alias": " - international_law",
|
101 |
+
"acc,none": 0.30578512396694213,
|
102 |
+
"acc_stderr,none": 0.04205953933884124
|
103 |
+
},
|
104 |
+
"harness|mmlu_jurisprudence|0": {
|
105 |
+
"alias": " - jurisprudence",
|
106 |
+
"acc,none": 0.3148148148148148,
|
107 |
+
"acc_stderr,none": 0.04489931073591312
|
108 |
+
},
|
109 |
+
"harness|mmlu_logical_fallacies|0": {
|
110 |
+
"alias": " - logical_fallacies",
|
111 |
+
"acc,none": 0.2147239263803681,
|
112 |
+
"acc_stderr,none": 0.032262193772867744
|
113 |
+
},
|
114 |
+
"harness|mmlu_moral_disputes|0": {
|
115 |
+
"alias": " - moral_disputes",
|
116 |
+
"acc,none": 0.26878612716763006,
|
117 |
+
"acc_stderr,none": 0.023868003262500114
|
118 |
+
},
|
119 |
+
"harness|mmlu_moral_scenarios|0": {
|
120 |
+
"alias": " - moral_scenarios",
|
121 |
+
"acc,none": 0.23798882681564246,
|
122 |
+
"acc_stderr,none": 0.014242630070574882
|
123 |
+
},
|
124 |
+
"harness|mmlu_philosophy|0": {
|
125 |
+
"alias": " - philosophy",
|
126 |
+
"acc,none": 0.1832797427652733,
|
127 |
+
"acc_stderr,none": 0.021974198848265795
|
128 |
+
},
|
129 |
+
"harness|mmlu_prehistory|0": {
|
130 |
+
"alias": " - prehistory",
|
131 |
+
"acc,none": 0.25,
|
132 |
+
"acc_stderr,none": 0.02409347123262133
|
133 |
+
},
|
134 |
+
"harness|mmlu_professional_law|0": {
|
135 |
+
"alias": " - professional_law",
|
136 |
+
"acc,none": 0.24119947848761408,
|
137 |
+
"acc_stderr,none": 0.010926496102034961
|
138 |
+
},
|
139 |
+
"harness|mmlu_world_religions|0": {
|
140 |
+
"alias": " - world_religions",
|
141 |
+
"acc,none": 0.23391812865497075,
|
142 |
+
"acc_stderr,none": 0.03246721765117824
|
143 |
+
},
|
144 |
+
"harness|mmlu_other|0": {
|
145 |
+
"alias": " - other",
|
146 |
+
"acc,none": 0.272288381074992,
|
147 |
+
"acc_stderr,none": 0.00796038235295151
|
148 |
+
},
|
149 |
+
"harness|mmlu_business_ethics|0": {
|
150 |
+
"alias": " - business_ethics",
|
151 |
+
"acc,none": 0.36,
|
152 |
+
"acc_stderr,none": 0.048241815132442176
|
153 |
+
},
|
154 |
+
"harness|mmlu_clinical_knowledge|0": {
|
155 |
+
"alias": " - clinical_knowledge",
|
156 |
+
"acc,none": 0.22641509433962265,
|
157 |
+
"acc_stderr,none": 0.025757559893106723
|
158 |
+
},
|
159 |
+
"harness|mmlu_college_medicine|0": {
|
160 |
+
"alias": " - college_medicine",
|
161 |
+
"acc,none": 0.21965317919075145,
|
162 |
+
"acc_stderr,none": 0.031568093627031744
|
163 |
+
},
|
164 |
+
"harness|mmlu_global_facts|0": {
|
165 |
+
"alias": " - global_facts",
|
166 |
+
"acc,none": 0.25,
|
167 |
+
"acc_stderr,none": 0.04351941398892446
|
168 |
+
},
|
169 |
+
"harness|mmlu_human_aging|0": {
|
170 |
+
"alias": " - human_aging",
|
171 |
+
"acc,none": 0.36771300448430494,
|
172 |
+
"acc_stderr,none": 0.03236198350928276
|
173 |
+
},
|
174 |
+
"harness|mmlu_management|0": {
|
175 |
+
"alias": " - management",
|
176 |
+
"acc,none": 0.2912621359223301,
|
177 |
+
"acc_stderr,none": 0.04498676320572922
|
178 |
+
},
|
179 |
+
"harness|mmlu_marketing|0": {
|
180 |
+
"alias": " - marketing",
|
181 |
+
"acc,none": 0.27350427350427353,
|
182 |
+
"acc_stderr,none": 0.029202540153431173
|
183 |
+
},
|
184 |
+
"harness|mmlu_medical_genetics|0": {
|
185 |
+
"alias": " - medical_genetics",
|
186 |
+
"acc,none": 0.34,
|
187 |
+
"acc_stderr,none": 0.04760952285695236
|
188 |
+
},
|
189 |
+
"harness|mmlu_miscellaneous|0": {
|
190 |
+
"alias": " - miscellaneous",
|
191 |
+
"acc,none": 0.2784163473818646,
|
192 |
+
"acc_stderr,none": 0.016028295188992462
|
193 |
+
},
|
194 |
+
"harness|mmlu_nutrition|0": {
|
195 |
+
"alias": " - nutrition",
|
196 |
+
"acc,none": 0.27450980392156865,
|
197 |
+
"acc_stderr,none": 0.025553169991826517
|
198 |
+
},
|
199 |
+
"harness|mmlu_professional_accounting|0": {
|
200 |
+
"alias": " - professional_accounting",
|
201 |
+
"acc,none": 0.2553191489361702,
|
202 |
+
"acc_stderr,none": 0.026011992930901992
|
203 |
+
},
|
204 |
+
"harness|mmlu_professional_medicine|0": {
|
205 |
+
"alias": " - professional_medicine",
|
206 |
+
"acc,none": 0.19117647058823528,
|
207 |
+
"acc_stderr,none": 0.023886881922440335
|
208 |
+
},
|
209 |
+
"harness|mmlu_virology|0": {
|
210 |
+
"alias": " - virology",
|
211 |
+
"acc,none": 0.3072289156626506,
|
212 |
+
"acc_stderr,none": 0.03591566797824664
|
213 |
+
},
|
214 |
+
"harness|mmlu_social_sciences|0": {
|
215 |
+
"alias": " - social_sciences",
|
216 |
+
"acc,none": 0.23756906077348067,
|
217 |
+
"acc_stderr,none": 0.007677188667986144
|
218 |
+
},
|
219 |
+
"harness|mmlu_econometrics|0": {
|
220 |
+
"alias": " - econometrics",
|
221 |
+
"acc,none": 0.2543859649122807,
|
222 |
+
"acc_stderr,none": 0.040969851398436695
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_geography|0": {
|
225 |
+
"alias": " - high_school_geography",
|
226 |
+
"acc,none": 0.23737373737373738,
|
227 |
+
"acc_stderr,none": 0.03031371053819888
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
230 |
+
"alias": " - high_school_government_and_politics",
|
231 |
+
"acc,none": 0.21243523316062177,
|
232 |
+
"acc_stderr,none": 0.02951928261681726
|
233 |
+
},
|
234 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
235 |
+
"alias": " - high_school_macroeconomics",
|
236 |
+
"acc,none": 0.21794871794871795,
|
237 |
+
"acc_stderr,none": 0.02093244577446319
|
238 |
+
},
|
239 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
240 |
+
"alias": " - high_school_microeconomics",
|
241 |
+
"acc,none": 0.23109243697478993,
|
242 |
+
"acc_stderr,none": 0.02738140692786897
|
243 |
+
},
|
244 |
+
"harness|mmlu_high_school_psychology|0": {
|
245 |
+
"alias": " - high_school_psychology",
|
246 |
+
"acc,none": 0.23853211009174313,
|
247 |
+
"acc_stderr,none": 0.01827257581023186
|
248 |
+
},
|
249 |
+
"harness|mmlu_human_sexuality|0": {
|
250 |
+
"alias": " - human_sexuality",
|
251 |
+
"acc,none": 0.2824427480916031,
|
252 |
+
"acc_stderr,none": 0.03948406125768361
|
253 |
+
},
|
254 |
+
"harness|mmlu_professional_psychology|0": {
|
255 |
+
"alias": " - professional_psychology",
|
256 |
+
"acc,none": 0.24673202614379086,
|
257 |
+
"acc_stderr,none": 0.017440820367402497
|
258 |
+
},
|
259 |
+
"harness|mmlu_public_relations|0": {
|
260 |
+
"alias": " - public_relations",
|
261 |
+
"acc,none": 0.2909090909090909,
|
262 |
+
"acc_stderr,none": 0.04350271442923243
|
263 |
+
},
|
264 |
+
"harness|mmlu_security_studies|0": {
|
265 |
+
"alias": " - security_studies",
|
266 |
+
"acc,none": 0.19591836734693877,
|
267 |
+
"acc_stderr,none": 0.025409301953225678
|
268 |
+
},
|
269 |
+
"harness|mmlu_sociology|0": {
|
270 |
+
"alias": " - sociology",
|
271 |
+
"acc,none": 0.24378109452736318,
|
272 |
+
"acc_stderr,none": 0.03036049015401465
|
273 |
+
},
|
274 |
+
"harness|mmlu_us_foreign_policy|0": {
|
275 |
+
"alias": " - us_foreign_policy",
|
276 |
+
"acc,none": 0.27,
|
277 |
+
"acc_stderr,none": 0.0446196043338474
|
278 |
+
},
|
279 |
+
"harness|mmlu_stem|0": {
|
280 |
+
"alias": " - stem",
|
281 |
+
"acc,none": 0.23723437995559785,
|
282 |
+
"acc_stderr,none": 0.007571103457514372
|
283 |
+
},
|
284 |
+
"harness|mmlu_abstract_algebra|0": {
|
285 |
+
"alias": " - abstract_algebra",
|
286 |
+
"acc,none": 0.26,
|
287 |
+
"acc_stderr,none": 0.04408440022768078
|
288 |
+
},
|
289 |
+
"harness|mmlu_anatomy|0": {
|
290 |
+
"alias": " - anatomy",
|
291 |
+
"acc,none": 0.24444444444444444,
|
292 |
+
"acc_stderr,none": 0.03712537833614866
|
293 |
+
},
|
294 |
+
"harness|mmlu_astronomy|0": {
|
295 |
+
"alias": " - astronomy",
|
296 |
+
"acc,none": 0.18421052631578946,
|
297 |
+
"acc_stderr,none": 0.0315469804508223
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_biology|0": {
|
300 |
+
"alias": " - college_biology",
|
301 |
+
"acc,none": 0.2708333333333333,
|
302 |
+
"acc_stderr,none": 0.03716177437566017
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_chemistry|0": {
|
305 |
+
"alias": " - college_chemistry",
|
306 |
+
"acc,none": 0.21,
|
307 |
+
"acc_stderr,none": 0.040936018074033256
|
308 |
+
},
|
309 |
+
"harness|mmlu_college_computer_science|0": {
|
310 |
+
"alias": " - college_computer_science",
|
311 |
+
"acc,none": 0.25,
|
312 |
+
"acc_stderr,none": 0.04351941398892446
|
313 |
+
},
|
314 |
+
"harness|mmlu_college_mathematics|0": {
|
315 |
+
"alias": " - college_mathematics",
|
316 |
+
"acc,none": 0.21,
|
317 |
+
"acc_stderr,none": 0.040936018074033256
|
318 |
+
},
|
319 |
+
"harness|mmlu_college_physics|0": {
|
320 |
+
"alias": " - college_physics",
|
321 |
+
"acc,none": 0.24509803921568626,
|
322 |
+
"acc_stderr,none": 0.042801058373643966
|
323 |
+
},
|
324 |
+
"harness|mmlu_computer_security|0": {
|
325 |
+
"alias": " - computer_security",
|
326 |
+
"acc,none": 0.28,
|
327 |
+
"acc_stderr,none": 0.04512608598542127
|
328 |
+
},
|
329 |
+
"harness|mmlu_conceptual_physics|0": {
|
330 |
+
"alias": " - conceptual_physics",
|
331 |
+
"acc,none": 0.3021276595744681,
|
332 |
+
"acc_stderr,none": 0.030017554471880554
|
333 |
+
},
|
334 |
+
"harness|mmlu_electrical_engineering|0": {
|
335 |
+
"alias": " - electrical_engineering",
|
336 |
+
"acc,none": 0.2206896551724138,
|
337 |
+
"acc_stderr,none": 0.03455930201924812
|
338 |
+
},
|
339 |
+
"harness|mmlu_elementary_mathematics|0": {
|
340 |
+
"alias": " - elementary_mathematics",
|
341 |
+
"acc,none": 0.23544973544973544,
|
342 |
+
"acc_stderr,none": 0.02185150982203171
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_biology|0": {
|
345 |
+
"alias": " - high_school_biology",
|
346 |
+
"acc,none": 0.23225806451612904,
|
347 |
+
"acc_stderr,none": 0.024022256130308235
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_chemistry|0": {
|
350 |
+
"alias": " - high_school_chemistry",
|
351 |
+
"acc,none": 0.21182266009852216,
|
352 |
+
"acc_stderr,none": 0.028748983689941075
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_computer_science|0": {
|
355 |
+
"alias": " - high_school_computer_science",
|
356 |
+
"acc,none": 0.28,
|
357 |
+
"acc_stderr,none": 0.04512608598542127
|
358 |
+
},
|
359 |
+
"harness|mmlu_high_school_mathematics|0": {
|
360 |
+
"alias": " - high_school_mathematics",
|
361 |
+
"acc,none": 0.24074074074074073,
|
362 |
+
"acc_stderr,none": 0.026067159222275794
|
363 |
+
},
|
364 |
+
"harness|mmlu_high_school_physics|0": {
|
365 |
+
"alias": " - high_school_physics",
|
366 |
+
"acc,none": 0.2185430463576159,
|
367 |
+
"acc_stderr,none": 0.03374235550425694
|
368 |
+
},
|
369 |
+
"harness|mmlu_high_school_statistics|0": {
|
370 |
+
"alias": " - high_school_statistics",
|
371 |
+
"acc,none": 0.16203703703703703,
|
372 |
+
"acc_stderr,none": 0.02513045365226846
|
373 |
+
},
|
374 |
+
"harness|mmlu_machine_learning|0": {
|
375 |
+
"alias": " - machine_learning",
|
376 |
+
"acc,none": 0.30357142857142855,
|
377 |
+
"acc_stderr,none": 0.04364226155841044
|
378 |
+
},
|
379 |
+
"harness|openbookqa|0": {
|
380 |
+
"acc,none": 0.31,
|
381 |
+
"acc_stderr,none": 0.0207040410217248,
|
382 |
+
"acc_norm,none": 0.412,
|
383 |
+
"acc_norm_stderr,none": 0.02203367799374086,
|
384 |
+
"alias": "openbookqa"
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.40187713310580203,
|
388 |
+
"acc_stderr,none": 0.014327268614578278,
|
389 |
+
"acc_norm,none": 0.4274744027303754,
|
390 |
+
"acc_norm_stderr,none": 0.014456862944650652,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/falcon-7b-instruct-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 7,
|
400 |
+
"architectures": "FalconForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 7,
|
404 |
+
"model_size": 4,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"quant_method": "GPTQ",
|
418 |
+
"ftype": "*Q4_0.gguf"
|
419 |
+
},
|
420 |
+
"versions": {
|
421 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
422 |
+
"harness|winogrande|0": 1.0,
|
423 |
+
"harness|arc:easy|0": 1.0,
|
424 |
+
"harness|hellaswag|0": 1.0,
|
425 |
+
"harness|lambada:openai|0": 1.0,
|
426 |
+
"harness|piqa|0": 1.0,
|
427 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
428 |
+
"harness|boolq|0": 2.0,
|
429 |
+
"harness|mmlu|0": null,
|
430 |
+
"harness|mmlu_humanities|0": null,
|
431 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
432 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
433 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
434 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
435 |
+
"harness|mmlu_international_law|0": 0.0,
|
436 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
437 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
438 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
439 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
440 |
+
"harness|mmlu_philosophy|0": 0.0,
|
441 |
+
"harness|mmlu_prehistory|0": 0.0,
|
442 |
+
"harness|mmlu_professional_law|0": 0.0,
|
443 |
+
"harness|mmlu_world_religions|0": 0.0,
|
444 |
+
"harness|mmlu_other|0": null,
|
445 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
446 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
447 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
448 |
+
"harness|mmlu_global_facts|0": 0.0,
|
449 |
+
"harness|mmlu_human_aging|0": 0.0,
|
450 |
+
"harness|mmlu_management|0": 0.0,
|
451 |
+
"harness|mmlu_marketing|0": 0.0,
|
452 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
453 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
454 |
+
"harness|mmlu_nutrition|0": 0.0,
|
455 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
456 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
457 |
+
"harness|mmlu_virology|0": 0.0,
|
458 |
+
"harness|mmlu_social_sciences|0": null,
|
459 |
+
"harness|mmlu_econometrics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
465 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
466 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
467 |
+
"harness|mmlu_public_relations|0": 0.0,
|
468 |
+
"harness|mmlu_security_studies|0": 0.0,
|
469 |
+
"harness|mmlu_sociology|0": 0.0,
|
470 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
471 |
+
"harness|mmlu_stem|0": null,
|
472 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
473 |
+
"harness|mmlu_anatomy|0": 0.0,
|
474 |
+
"harness|mmlu_astronomy|0": 0.0,
|
475 |
+
"harness|mmlu_college_biology|0": 0.0,
|
476 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
477 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
478 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
479 |
+
"harness|mmlu_college_physics|0": 0.0,
|
480 |
+
"harness|mmlu_computer_security|0": 0.0,
|
481 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
482 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
483 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
490 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
491 |
+
"harness|openbookqa|0": 1.0,
|
492 |
+
"harness|arc:challenge|0": 1.0
|
493 |
+
},
|
494 |
+
"n-shot": {
|
495 |
+
"arc_challenge": 0,
|
496 |
+
"arc_easy": 0,
|
497 |
+
"boolq": 0,
|
498 |
+
"hellaswag": 0,
|
499 |
+
"lambada_openai": 0,
|
500 |
+
"mmlu": 0,
|
501 |
+
"mmlu_abstract_algebra": 0,
|
502 |
+
"mmlu_anatomy": 0,
|
503 |
+
"mmlu_astronomy": 0,
|
504 |
+
"mmlu_business_ethics": 0,
|
505 |
+
"mmlu_clinical_knowledge": 0,
|
506 |
+
"mmlu_college_biology": 0,
|
507 |
+
"mmlu_college_chemistry": 0,
|
508 |
+
"mmlu_college_computer_science": 0,
|
509 |
+
"mmlu_college_mathematics": 0,
|
510 |
+
"mmlu_college_medicine": 0,
|
511 |
+
"mmlu_college_physics": 0,
|
512 |
+
"mmlu_computer_security": 0,
|
513 |
+
"mmlu_conceptual_physics": 0,
|
514 |
+
"mmlu_econometrics": 0,
|
515 |
+
"mmlu_electrical_engineering": 0,
|
516 |
+
"mmlu_elementary_mathematics": 0,
|
517 |
+
"mmlu_formal_logic": 0,
|
518 |
+
"mmlu_global_facts": 0,
|
519 |
+
"mmlu_high_school_biology": 0,
|
520 |
+
"mmlu_high_school_chemistry": 0,
|
521 |
+
"mmlu_high_school_computer_science": 0,
|
522 |
+
"mmlu_high_school_european_history": 0,
|
523 |
+
"mmlu_high_school_geography": 0,
|
524 |
+
"mmlu_high_school_government_and_politics": 0,
|
525 |
+
"mmlu_high_school_macroeconomics": 0,
|
526 |
+
"mmlu_high_school_mathematics": 0,
|
527 |
+
"mmlu_high_school_microeconomics": 0,
|
528 |
+
"mmlu_high_school_physics": 0,
|
529 |
+
"mmlu_high_school_psychology": 0,
|
530 |
+
"mmlu_high_school_statistics": 0,
|
531 |
+
"mmlu_high_school_us_history": 0,
|
532 |
+
"mmlu_high_school_world_history": 0,
|
533 |
+
"mmlu_human_aging": 0,
|
534 |
+
"mmlu_human_sexuality": 0,
|
535 |
+
"mmlu_humanities": 0,
|
536 |
+
"mmlu_international_law": 0,
|
537 |
+
"mmlu_jurisprudence": 0,
|
538 |
+
"mmlu_logical_fallacies": 0,
|
539 |
+
"mmlu_machine_learning": 0,
|
540 |
+
"mmlu_management": 0,
|
541 |
+
"mmlu_marketing": 0,
|
542 |
+
"mmlu_medical_genetics": 0,
|
543 |
+
"mmlu_miscellaneous": 0,
|
544 |
+
"mmlu_moral_disputes": 0,
|
545 |
+
"mmlu_moral_scenarios": 0,
|
546 |
+
"mmlu_nutrition": 0,
|
547 |
+
"mmlu_other": 0,
|
548 |
+
"mmlu_philosophy": 0,
|
549 |
+
"mmlu_prehistory": 0,
|
550 |
+
"mmlu_professional_accounting": 0,
|
551 |
+
"mmlu_professional_law": 0,
|
552 |
+
"mmlu_professional_medicine": 0,
|
553 |
+
"mmlu_professional_psychology": 0,
|
554 |
+
"mmlu_public_relations": 0,
|
555 |
+
"mmlu_security_studies": 0,
|
556 |
+
"mmlu_social_sciences": 0,
|
557 |
+
"mmlu_sociology": 0,
|
558 |
+
"mmlu_stem": 0,
|
559 |
+
"mmlu_us_foreign_policy": 0,
|
560 |
+
"mmlu_virology": 0,
|
561 |
+
"mmlu_world_religions": 0,
|
562 |
+
"openbookqa": 0,
|
563 |
+
"piqa": 0,
|
564 |
+
"truthfulqa_mc1": 0,
|
565 |
+
"truthfulqa_mc2": 0,
|
566 |
+
"winogrande": 0
|
567 |
+
},
|
568 |
+
"date": 1714484834.062388,
|
569 |
+
"config": {
|
570 |
+
"model": "hf",
|
571 |
+
"model_args": "pretrained=Intel/falcon-7b-instruct-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
572 |
+
"batch_size": 2,
|
573 |
+
"batch_sizes": [],
|
574 |
+
"device": "cuda",
|
575 |
+
"use_cache": null,
|
576 |
+
"limit": null,
|
577 |
+
"bootstrap_iters": 100000,
|
578 |
+
"gen_kwargs": null
|
579 |
+
}
|
580 |
+
}
|
Intel/results_2024-05-01-00-22-37.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-01-00-22-37",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/Qwen1.5-7B-Chat-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.86,
|
16 |
+
"model_params": 6.54,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc2|0": {
|
22 |
+
"acc,none": 0.5173507158944602,
|
23 |
+
"acc_stderr,none": 0.01586170239259832,
|
24 |
+
"alias": "truthfulqa_mc2"
|
25 |
+
},
|
26 |
+
"harness|mmlu|0": {
|
27 |
+
"acc,none": 0.594715852442672,
|
28 |
+
"acc_stderr,none": 0.0039619613793425955,
|
29 |
+
"alias": "mmlu"
|
30 |
+
},
|
31 |
+
"harness|mmlu_humanities|0": {
|
32 |
+
"alias": " - humanities",
|
33 |
+
"acc,none": 0.5483528161530287,
|
34 |
+
"acc_stderr,none": 0.006915577304368975
|
35 |
+
},
|
36 |
+
"harness|mmlu_formal_logic|0": {
|
37 |
+
"alias": " - formal_logic",
|
38 |
+
"acc,none": 0.3888888888888889,
|
39 |
+
"acc_stderr,none": 0.04360314860077459
|
40 |
+
},
|
41 |
+
"harness|mmlu_high_school_european_history|0": {
|
42 |
+
"alias": " - high_school_european_history",
|
43 |
+
"acc,none": 0.7575757575757576,
|
44 |
+
"acc_stderr,none": 0.03346409881055953
|
45 |
+
},
|
46 |
+
"harness|mmlu_high_school_us_history|0": {
|
47 |
+
"alias": " - high_school_us_history",
|
48 |
+
"acc,none": 0.7647058823529411,
|
49 |
+
"acc_stderr,none": 0.02977177522814565
|
50 |
+
},
|
51 |
+
"harness|mmlu_high_school_world_history|0": {
|
52 |
+
"alias": " - high_school_world_history",
|
53 |
+
"acc,none": 0.7679324894514767,
|
54 |
+
"acc_stderr,none": 0.027479744550808514
|
55 |
+
},
|
56 |
+
"harness|mmlu_international_law|0": {
|
57 |
+
"alias": " - international_law",
|
58 |
+
"acc,none": 0.7024793388429752,
|
59 |
+
"acc_stderr,none": 0.04173349148083499
|
60 |
+
},
|
61 |
+
"harness|mmlu_jurisprudence|0": {
|
62 |
+
"alias": " - jurisprudence",
|
63 |
+
"acc,none": 0.7870370370370371,
|
64 |
+
"acc_stderr,none": 0.039578354719809784
|
65 |
+
},
|
66 |
+
"harness|mmlu_logical_fallacies|0": {
|
67 |
+
"alias": " - logical_fallacies",
|
68 |
+
"acc,none": 0.7116564417177914,
|
69 |
+
"acc_stderr,none": 0.03559039531617342
|
70 |
+
},
|
71 |
+
"harness|mmlu_moral_disputes|0": {
|
72 |
+
"alias": " - moral_disputes",
|
73 |
+
"acc,none": 0.653179190751445,
|
74 |
+
"acc_stderr,none": 0.02562472399403046
|
75 |
+
},
|
76 |
+
"harness|mmlu_moral_scenarios|0": {
|
77 |
+
"alias": " - moral_scenarios",
|
78 |
+
"acc,none": 0.3675977653631285,
|
79 |
+
"acc_stderr,none": 0.016125543823552944
|
80 |
+
},
|
81 |
+
"harness|mmlu_philosophy|0": {
|
82 |
+
"alias": " - philosophy",
|
83 |
+
"acc,none": 0.6752411575562701,
|
84 |
+
"acc_stderr,none": 0.026596782287697046
|
85 |
+
},
|
86 |
+
"harness|mmlu_prehistory|0": {
|
87 |
+
"alias": " - prehistory",
|
88 |
+
"acc,none": 0.6388888888888888,
|
89 |
+
"acc_stderr,none": 0.02672586880910079
|
90 |
+
},
|
91 |
+
"harness|mmlu_professional_law|0": {
|
92 |
+
"alias": " - professional_law",
|
93 |
+
"acc,none": 0.4439374185136897,
|
94 |
+
"acc_stderr,none": 0.012689708167787686
|
95 |
+
},
|
96 |
+
"harness|mmlu_world_religions|0": {
|
97 |
+
"alias": " - world_religions",
|
98 |
+
"acc,none": 0.7543859649122807,
|
99 |
+
"acc_stderr,none": 0.0330140594698725
|
100 |
+
},
|
101 |
+
"harness|mmlu_other|0": {
|
102 |
+
"alias": " - other",
|
103 |
+
"acc,none": 0.6585130350820727,
|
104 |
+
"acc_stderr,none": 0.008229134682918844
|
105 |
+
},
|
106 |
+
"harness|mmlu_business_ethics|0": {
|
107 |
+
"alias": " - business_ethics",
|
108 |
+
"acc,none": 0.65,
|
109 |
+
"acc_stderr,none": 0.047937248544110196
|
110 |
+
},
|
111 |
+
"harness|mmlu_clinical_knowledge|0": {
|
112 |
+
"alias": " - clinical_knowledge",
|
113 |
+
"acc,none": 0.6641509433962264,
|
114 |
+
"acc_stderr,none": 0.029067220146644823
|
115 |
+
},
|
116 |
+
"harness|mmlu_college_medicine|0": {
|
117 |
+
"alias": " - college_medicine",
|
118 |
+
"acc,none": 0.5491329479768786,
|
119 |
+
"acc_stderr,none": 0.03794012674697029
|
120 |
+
},
|
121 |
+
"harness|mmlu_global_facts|0": {
|
122 |
+
"alias": " - global_facts",
|
123 |
+
"acc,none": 0.4,
|
124 |
+
"acc_stderr,none": 0.049236596391733084
|
125 |
+
},
|
126 |
+
"harness|mmlu_human_aging|0": {
|
127 |
+
"alias": " - human_aging",
|
128 |
+
"acc,none": 0.6322869955156951,
|
129 |
+
"acc_stderr,none": 0.03236198350928276
|
130 |
+
},
|
131 |
+
"harness|mmlu_management|0": {
|
132 |
+
"alias": " - management",
|
133 |
+
"acc,none": 0.7378640776699029,
|
134 |
+
"acc_stderr,none": 0.04354631077260595
|
135 |
+
},
|
136 |
+
"harness|mmlu_marketing|0": {
|
137 |
+
"alias": " - marketing",
|
138 |
+
"acc,none": 0.8547008547008547,
|
139 |
+
"acc_stderr,none": 0.023086635086841403
|
140 |
+
},
|
141 |
+
"harness|mmlu_medical_genetics|0": {
|
142 |
+
"alias": " - medical_genetics",
|
143 |
+
"acc,none": 0.7,
|
144 |
+
"acc_stderr,none": 0.046056618647183814
|
145 |
+
},
|
146 |
+
"harness|mmlu_miscellaneous|0": {
|
147 |
+
"alias": " - miscellaneous",
|
148 |
+
"acc,none": 0.7637292464878672,
|
149 |
+
"acc_stderr,none": 0.015190473717037484
|
150 |
+
},
|
151 |
+
"harness|mmlu_nutrition|0": {
|
152 |
+
"alias": " - nutrition",
|
153 |
+
"acc,none": 0.6895424836601307,
|
154 |
+
"acc_stderr,none": 0.026493033225145905
|
155 |
+
},
|
156 |
+
"harness|mmlu_professional_accounting|0": {
|
157 |
+
"alias": " - professional_accounting",
|
158 |
+
"acc,none": 0.425531914893617,
|
159 |
+
"acc_stderr,none": 0.029494827600144363
|
160 |
+
},
|
161 |
+
"harness|mmlu_professional_medicine|0": {
|
162 |
+
"alias": " - professional_medicine",
|
163 |
+
"acc,none": 0.6323529411764706,
|
164 |
+
"acc_stderr,none": 0.029289413409403192
|
165 |
+
},
|
166 |
+
"harness|mmlu_virology|0": {
|
167 |
+
"alias": " - virology",
|
168 |
+
"acc,none": 0.4939759036144578,
|
169 |
+
"acc_stderr,none": 0.03892212195333047
|
170 |
+
},
|
171 |
+
"harness|mmlu_social_sciences|0": {
|
172 |
+
"alias": " - social_sciences",
|
173 |
+
"acc,none": 0.6746831329216769,
|
174 |
+
"acc_stderr,none": 0.00823386269401441
|
175 |
+
},
|
176 |
+
"harness|mmlu_econometrics|0": {
|
177 |
+
"alias": " - econometrics",
|
178 |
+
"acc,none": 0.4298245614035088,
|
179 |
+
"acc_stderr,none": 0.046570472605949625
|
180 |
+
},
|
181 |
+
"harness|mmlu_high_school_geography|0": {
|
182 |
+
"alias": " - high_school_geography",
|
183 |
+
"acc,none": 0.7828282828282829,
|
184 |
+
"acc_stderr,none": 0.02937661648494564
|
185 |
+
},
|
186 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
187 |
+
"alias": " - high_school_government_and_politics",
|
188 |
+
"acc,none": 0.772020725388601,
|
189 |
+
"acc_stderr,none": 0.030276909945178277
|
190 |
+
},
|
191 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
192 |
+
"alias": " - high_school_macroeconomics",
|
193 |
+
"acc,none": 0.5948717948717949,
|
194 |
+
"acc_stderr,none": 0.024890471769938142
|
195 |
+
},
|
196 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
197 |
+
"alias": " - high_school_microeconomics",
|
198 |
+
"acc,none": 0.6470588235294118,
|
199 |
+
"acc_stderr,none": 0.031041941304059288
|
200 |
+
},
|
201 |
+
"harness|mmlu_high_school_psychology|0": {
|
202 |
+
"alias": " - high_school_psychology",
|
203 |
+
"acc,none": 0.7963302752293578,
|
204 |
+
"acc_stderr,none": 0.017266742087630807
|
205 |
+
},
|
206 |
+
"harness|mmlu_human_sexuality|0": {
|
207 |
+
"alias": " - human_sexuality",
|
208 |
+
"acc,none": 0.7175572519083969,
|
209 |
+
"acc_stderr,none": 0.03948406125768361
|
210 |
+
},
|
211 |
+
"harness|mmlu_professional_psychology|0": {
|
212 |
+
"alias": " - professional_psychology",
|
213 |
+
"acc,none": 0.5473856209150327,
|
214 |
+
"acc_stderr,none": 0.020136790918492523
|
215 |
+
},
|
216 |
+
"harness|mmlu_public_relations|0": {
|
217 |
+
"alias": " - public_relations",
|
218 |
+
"acc,none": 0.6272727272727273,
|
219 |
+
"acc_stderr,none": 0.04631381319425464
|
220 |
+
},
|
221 |
+
"harness|mmlu_security_studies|0": {
|
222 |
+
"alias": " - security_studies",
|
223 |
+
"acc,none": 0.6857142857142857,
|
224 |
+
"acc_stderr,none": 0.02971932942241747
|
225 |
+
},
|
226 |
+
"harness|mmlu_sociology|0": {
|
227 |
+
"alias": " - sociology",
|
228 |
+
"acc,none": 0.7860696517412935,
|
229 |
+
"acc_stderr,none": 0.028996909693328906
|
230 |
+
},
|
231 |
+
"harness|mmlu_us_foreign_policy|0": {
|
232 |
+
"alias": " - us_foreign_policy",
|
233 |
+
"acc,none": 0.79,
|
234 |
+
"acc_stderr,none": 0.040936018074033256
|
235 |
+
},
|
236 |
+
"harness|mmlu_stem|0": {
|
237 |
+
"alias": " - stem",
|
238 |
+
"acc,none": 0.5229939739930225,
|
239 |
+
"acc_stderr,none": 0.008632328990636458
|
240 |
+
},
|
241 |
+
"harness|mmlu_abstract_algebra|0": {
|
242 |
+
"alias": " - abstract_algebra",
|
243 |
+
"acc,none": 0.39,
|
244 |
+
"acc_stderr,none": 0.04902071300001974
|
245 |
+
},
|
246 |
+
"harness|mmlu_anatomy|0": {
|
247 |
+
"alias": " - anatomy",
|
248 |
+
"acc,none": 0.5259259259259259,
|
249 |
+
"acc_stderr,none": 0.04313531696750575
|
250 |
+
},
|
251 |
+
"harness|mmlu_astronomy|0": {
|
252 |
+
"alias": " - astronomy",
|
253 |
+
"acc,none": 0.6513157894736842,
|
254 |
+
"acc_stderr,none": 0.038781398887976104
|
255 |
+
},
|
256 |
+
"harness|mmlu_college_biology|0": {
|
257 |
+
"alias": " - college_biology",
|
258 |
+
"acc,none": 0.6527777777777778,
|
259 |
+
"acc_stderr,none": 0.039812405437178615
|
260 |
+
},
|
261 |
+
"harness|mmlu_college_chemistry|0": {
|
262 |
+
"alias": " - college_chemistry",
|
263 |
+
"acc,none": 0.43,
|
264 |
+
"acc_stderr,none": 0.049756985195624284
|
265 |
+
},
|
266 |
+
"harness|mmlu_college_computer_science|0": {
|
267 |
+
"alias": " - college_computer_science",
|
268 |
+
"acc,none": 0.57,
|
269 |
+
"acc_stderr,none": 0.049756985195624284
|
270 |
+
},
|
271 |
+
"harness|mmlu_college_mathematics|0": {
|
272 |
+
"alias": " - college_mathematics",
|
273 |
+
"acc,none": 0.32,
|
274 |
+
"acc_stderr,none": 0.046882617226215034
|
275 |
+
},
|
276 |
+
"harness|mmlu_college_physics|0": {
|
277 |
+
"alias": " - college_physics",
|
278 |
+
"acc,none": 0.37254901960784315,
|
279 |
+
"acc_stderr,none": 0.04810840148082633
|
280 |
+
},
|
281 |
+
"harness|mmlu_computer_security|0": {
|
282 |
+
"alias": " - computer_security",
|
283 |
+
"acc,none": 0.77,
|
284 |
+
"acc_stderr,none": 0.04229525846816505
|
285 |
+
},
|
286 |
+
"harness|mmlu_conceptual_physics|0": {
|
287 |
+
"alias": " - conceptual_physics",
|
288 |
+
"acc,none": 0.5531914893617021,
|
289 |
+
"acc_stderr,none": 0.0325005368436584
|
290 |
+
},
|
291 |
+
"harness|mmlu_electrical_engineering|0": {
|
292 |
+
"alias": " - electrical_engineering",
|
293 |
+
"acc,none": 0.5655172413793104,
|
294 |
+
"acc_stderr,none": 0.04130740879555498
|
295 |
+
},
|
296 |
+
"harness|mmlu_elementary_mathematics|0": {
|
297 |
+
"alias": " - elementary_mathematics",
|
298 |
+
"acc,none": 0.4656084656084656,
|
299 |
+
"acc_stderr,none": 0.02569032176249384
|
300 |
+
},
|
301 |
+
"harness|mmlu_high_school_biology|0": {
|
302 |
+
"alias": " - high_school_biology",
|
303 |
+
"acc,none": 0.7129032258064516,
|
304 |
+
"acc_stderr,none": 0.025736542745594525
|
305 |
+
},
|
306 |
+
"harness|mmlu_high_school_chemistry|0": {
|
307 |
+
"alias": " - high_school_chemistry",
|
308 |
+
"acc,none": 0.5369458128078818,
|
309 |
+
"acc_stderr,none": 0.035083705204426656
|
310 |
+
},
|
311 |
+
"harness|mmlu_high_school_computer_science|0": {
|
312 |
+
"alias": " - high_school_computer_science",
|
313 |
+
"acc,none": 0.7,
|
314 |
+
"acc_stderr,none": 0.046056618647183814
|
315 |
+
},
|
316 |
+
"harness|mmlu_high_school_mathematics|0": {
|
317 |
+
"alias": " - high_school_mathematics",
|
318 |
+
"acc,none": 0.34074074074074073,
|
319 |
+
"acc_stderr,none": 0.02889774874113114
|
320 |
+
},
|
321 |
+
"harness|mmlu_high_school_physics|0": {
|
322 |
+
"alias": " - high_school_physics",
|
323 |
+
"acc,none": 0.3841059602649007,
|
324 |
+
"acc_stderr,none": 0.03971301814719197
|
325 |
+
},
|
326 |
+
"harness|mmlu_high_school_statistics|0": {
|
327 |
+
"alias": " - high_school_statistics",
|
328 |
+
"acc,none": 0.5416666666666666,
|
329 |
+
"acc_stderr,none": 0.03398110890294636
|
330 |
+
},
|
331 |
+
"harness|mmlu_machine_learning|0": {
|
332 |
+
"alias": " - machine_learning",
|
333 |
+
"acc,none": 0.39285714285714285,
|
334 |
+
"acc_stderr,none": 0.046355501356099754
|
335 |
+
},
|
336 |
+
"harness|winogrande|0": {
|
337 |
+
"acc,none": 0.6535122336227308,
|
338 |
+
"acc_stderr,none": 0.013373773411685651,
|
339 |
+
"alias": "winogrande"
|
340 |
+
},
|
341 |
+
"harness|arc:easy|0": {
|
342 |
+
"acc,none": 0.6877104377104377,
|
343 |
+
"acc_stderr,none": 0.009509325983631444,
|
344 |
+
"acc_norm,none": 0.6224747474747475,
|
345 |
+
"acc_norm_stderr,none": 0.009947227833469428,
|
346 |
+
"alias": "arc_easy"
|
347 |
+
},
|
348 |
+
"harness|boolq|0": {
|
349 |
+
"acc,none": 0.8388379204892966,
|
350 |
+
"acc_stderr,none": 0.006430770316534764,
|
351 |
+
"alias": "boolq"
|
352 |
+
},
|
353 |
+
"harness|hellaswag|0": {
|
354 |
+
"acc,none": 0.5849432383987253,
|
355 |
+
"acc_stderr,none": 0.004917248150601868,
|
356 |
+
"acc_norm,none": 0.7642899820752838,
|
357 |
+
"acc_norm_stderr,none": 0.004235743182042662,
|
358 |
+
"alias": "hellaswag"
|
359 |
+
},
|
360 |
+
"harness|arc:challenge|0": {
|
361 |
+
"acc,none": 0.431740614334471,
|
362 |
+
"acc_stderr,none": 0.014474591427196204,
|
363 |
+
"acc_norm,none": 0.4462457337883959,
|
364 |
+
"acc_norm_stderr,none": 0.014526705548539982,
|
365 |
+
"alias": "arc_challenge"
|
366 |
+
},
|
367 |
+
"harness|lambada:openai|0": {
|
368 |
+
"perplexity,none": 5.570175233579023,
|
369 |
+
"perplexity_stderr,none": 0.18749563803186947,
|
370 |
+
"acc,none": 0.6111003299049098,
|
371 |
+
"acc_stderr,none": 0.0067918348844501425,
|
372 |
+
"alias": "lambada_openai"
|
373 |
+
},
|
374 |
+
"harness|openbookqa|0": {
|
375 |
+
"acc,none": 0.33,
|
376 |
+
"acc_stderr,none": 0.021049612166134796,
|
377 |
+
"acc_norm,none": 0.43,
|
378 |
+
"acc_norm_stderr,none": 0.02216263442665284,
|
379 |
+
"alias": "openbookqa"
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc1|0": {
|
382 |
+
"acc,none": 0.3525091799265606,
|
383 |
+
"acc_stderr,none": 0.01672464638075655,
|
384 |
+
"alias": "truthfulqa_mc1"
|
385 |
+
},
|
386 |
+
"harness|piqa|0": {
|
387 |
+
"acc,none": 0.7480957562568009,
|
388 |
+
"acc_stderr,none": 0.010128421335088678,
|
389 |
+
"acc_norm,none": 0.7546245919477693,
|
390 |
+
"acc_norm_stderr,none": 0.010039831320422401,
|
391 |
+
"alias": "piqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/Qwen1.5-7B-Chat-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 7,
|
400 |
+
"architectures": "QwenForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 7,
|
404 |
+
"model_size": 4,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.002,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": true
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
438 |
+
"harness|mmlu|0": null,
|
439 |
+
"harness|mmlu_humanities|0": null,
|
440 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
441 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
442 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
443 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
444 |
+
"harness|mmlu_international_law|0": 0.0,
|
445 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
446 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
447 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
448 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
449 |
+
"harness|mmlu_philosophy|0": 0.0,
|
450 |
+
"harness|mmlu_prehistory|0": 0.0,
|
451 |
+
"harness|mmlu_professional_law|0": 0.0,
|
452 |
+
"harness|mmlu_world_religions|0": 0.0,
|
453 |
+
"harness|mmlu_other|0": null,
|
454 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
455 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
456 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
457 |
+
"harness|mmlu_global_facts|0": 0.0,
|
458 |
+
"harness|mmlu_human_aging|0": 0.0,
|
459 |
+
"harness|mmlu_management|0": 0.0,
|
460 |
+
"harness|mmlu_marketing|0": 0.0,
|
461 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
462 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
463 |
+
"harness|mmlu_nutrition|0": 0.0,
|
464 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
465 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
466 |
+
"harness|mmlu_virology|0": 0.0,
|
467 |
+
"harness|mmlu_social_sciences|0": null,
|
468 |
+
"harness|mmlu_econometrics|0": 0.0,
|
469 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
470 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
471 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
474 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
475 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
476 |
+
"harness|mmlu_public_relations|0": 0.0,
|
477 |
+
"harness|mmlu_security_studies|0": 0.0,
|
478 |
+
"harness|mmlu_sociology|0": 0.0,
|
479 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
480 |
+
"harness|mmlu_stem|0": null,
|
481 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
482 |
+
"harness|mmlu_anatomy|0": 0.0,
|
483 |
+
"harness|mmlu_astronomy|0": 0.0,
|
484 |
+
"harness|mmlu_college_biology|0": 0.0,
|
485 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
486 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
487 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_college_physics|0": 0.0,
|
489 |
+
"harness|mmlu_computer_security|0": 0.0,
|
490 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
491 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
492 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
494 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
495 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
499 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
500 |
+
"harness|winogrande|0": 1.0,
|
501 |
+
"harness|arc:easy|0": 1.0,
|
502 |
+
"harness|boolq|0": 2.0,
|
503 |
+
"harness|hellaswag|0": 1.0,
|
504 |
+
"harness|arc:challenge|0": 1.0,
|
505 |
+
"harness|lambada:openai|0": 1.0,
|
506 |
+
"harness|openbookqa|0": 1.0,
|
507 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
508 |
+
"harness|piqa|0": 1.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714489707.5071816,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/Qwen1.5-7B-Chat-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-05-01-00-56-14.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-01-00-56-14",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/Qwen1.5-0.5B-Chat-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 0.78,
|
16 |
+
"model_params": 0.31,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|hellaswag|0": {
|
22 |
+
"acc,none": 0.36008763194582755,
|
23 |
+
"acc_stderr,none": 0.004790445139186363,
|
24 |
+
"acc_norm,none": 0.4422425811591316,
|
25 |
+
"acc_norm_stderr,none": 0.004956378590571537,
|
26 |
+
"alias": "hellaswag"
|
27 |
+
},
|
28 |
+
"harness|mmlu|0": {
|
29 |
+
"acc,none": 0.2910554052129326,
|
30 |
+
"acc_stderr,none": 0.0037995259748246695,
|
31 |
+
"alias": "mmlu"
|
32 |
+
},
|
33 |
+
"harness|mmlu_humanities|0": {
|
34 |
+
"alias": " - humanities",
|
35 |
+
"acc,none": 0.29861849096705634,
|
36 |
+
"acc_stderr,none": 0.006616169165331545
|
37 |
+
},
|
38 |
+
"harness|mmlu_formal_logic|0": {
|
39 |
+
"alias": " - formal_logic",
|
40 |
+
"acc,none": 0.30158730158730157,
|
41 |
+
"acc_stderr,none": 0.04104947269903394
|
42 |
+
},
|
43 |
+
"harness|mmlu_high_school_european_history|0": {
|
44 |
+
"alias": " - high_school_european_history",
|
45 |
+
"acc,none": 0.4303030303030303,
|
46 |
+
"acc_stderr,none": 0.038662259628790774
|
47 |
+
},
|
48 |
+
"harness|mmlu_high_school_us_history|0": {
|
49 |
+
"alias": " - high_school_us_history",
|
50 |
+
"acc,none": 0.35784313725490197,
|
51 |
+
"acc_stderr,none": 0.033644872860882975
|
52 |
+
},
|
53 |
+
"harness|mmlu_high_school_world_history|0": {
|
54 |
+
"alias": " - high_school_world_history",
|
55 |
+
"acc,none": 0.45569620253164556,
|
56 |
+
"acc_stderr,none": 0.032419206846933335
|
57 |
+
},
|
58 |
+
"harness|mmlu_international_law|0": {
|
59 |
+
"alias": " - international_law",
|
60 |
+
"acc,none": 0.4297520661157025,
|
61 |
+
"acc_stderr,none": 0.04519082021319773
|
62 |
+
},
|
63 |
+
"harness|mmlu_jurisprudence|0": {
|
64 |
+
"alias": " - jurisprudence",
|
65 |
+
"acc,none": 0.39814814814814814,
|
66 |
+
"acc_stderr,none": 0.047323326159788154
|
67 |
+
},
|
68 |
+
"harness|mmlu_logical_fallacies|0": {
|
69 |
+
"alias": " - logical_fallacies",
|
70 |
+
"acc,none": 0.3374233128834356,
|
71 |
+
"acc_stderr,none": 0.03714908409935575
|
72 |
+
},
|
73 |
+
"harness|mmlu_moral_disputes|0": {
|
74 |
+
"alias": " - moral_disputes",
|
75 |
+
"acc,none": 0.315028901734104,
|
76 |
+
"acc_stderr,none": 0.0250093137900697
|
77 |
+
},
|
78 |
+
"harness|mmlu_moral_scenarios|0": {
|
79 |
+
"alias": " - moral_scenarios",
|
80 |
+
"acc,none": 0.23798882681564246,
|
81 |
+
"acc_stderr,none": 0.014242630070574885
|
82 |
+
},
|
83 |
+
"harness|mmlu_philosophy|0": {
|
84 |
+
"alias": " - philosophy",
|
85 |
+
"acc,none": 0.2958199356913183,
|
86 |
+
"acc_stderr,none": 0.025922371788818788
|
87 |
+
},
|
88 |
+
"harness|mmlu_prehistory|0": {
|
89 |
+
"alias": " - prehistory",
|
90 |
+
"acc,none": 0.32407407407407407,
|
91 |
+
"acc_stderr,none": 0.026041766202717156
|
92 |
+
},
|
93 |
+
"harness|mmlu_professional_law|0": {
|
94 |
+
"alias": " - professional_law",
|
95 |
+
"acc,none": 0.25684485006518903,
|
96 |
+
"acc_stderr,none": 0.011158455853098838
|
97 |
+
},
|
98 |
+
"harness|mmlu_world_religions|0": {
|
99 |
+
"alias": " - world_religions",
|
100 |
+
"acc,none": 0.30409356725146197,
|
101 |
+
"acc_stderr,none": 0.035282112582452306
|
102 |
+
},
|
103 |
+
"harness|mmlu_other|0": {
|
104 |
+
"alias": " - other",
|
105 |
+
"acc,none": 0.3202446089475378,
|
106 |
+
"acc_stderr,none": 0.008268989007731709
|
107 |
+
},
|
108 |
+
"harness|mmlu_business_ethics|0": {
|
109 |
+
"alias": " - business_ethics",
|
110 |
+
"acc,none": 0.38,
|
111 |
+
"acc_stderr,none": 0.04878317312145633
|
112 |
+
},
|
113 |
+
"harness|mmlu_clinical_knowledge|0": {
|
114 |
+
"alias": " - clinical_knowledge",
|
115 |
+
"acc,none": 0.2528301886792453,
|
116 |
+
"acc_stderr,none": 0.026749899771241235
|
117 |
+
},
|
118 |
+
"harness|mmlu_college_medicine|0": {
|
119 |
+
"alias": " - college_medicine",
|
120 |
+
"acc,none": 0.2658959537572254,
|
121 |
+
"acc_stderr,none": 0.033687629322594316
|
122 |
+
},
|
123 |
+
"harness|mmlu_global_facts|0": {
|
124 |
+
"alias": " - global_facts",
|
125 |
+
"acc,none": 0.17,
|
126 |
+
"acc_stderr,none": 0.0377525168068637
|
127 |
+
},
|
128 |
+
"harness|mmlu_human_aging|0": {
|
129 |
+
"alias": " - human_aging",
|
130 |
+
"acc,none": 0.39461883408071746,
|
131 |
+
"acc_stderr,none": 0.03280400504755291
|
132 |
+
},
|
133 |
+
"harness|mmlu_management|0": {
|
134 |
+
"alias": " - management",
|
135 |
+
"acc,none": 0.32038834951456313,
|
136 |
+
"acc_stderr,none": 0.046202840822800406
|
137 |
+
},
|
138 |
+
"harness|mmlu_marketing|0": {
|
139 |
+
"alias": " - marketing",
|
140 |
+
"acc,none": 0.4358974358974359,
|
141 |
+
"acc_stderr,none": 0.03248577511578401
|
142 |
+
},
|
143 |
+
"harness|mmlu_medical_genetics|0": {
|
144 |
+
"alias": " - medical_genetics",
|
145 |
+
"acc,none": 0.38,
|
146 |
+
"acc_stderr,none": 0.048783173121456316
|
147 |
+
},
|
148 |
+
"harness|mmlu_miscellaneous|0": {
|
149 |
+
"alias": " - miscellaneous",
|
150 |
+
"acc,none": 0.37037037037037035,
|
151 |
+
"acc_stderr,none": 0.01726860756000578
|
152 |
+
},
|
153 |
+
"harness|mmlu_nutrition|0": {
|
154 |
+
"alias": " - nutrition",
|
155 |
+
"acc,none": 0.3562091503267974,
|
156 |
+
"acc_stderr,none": 0.027420477662629228
|
157 |
+
},
|
158 |
+
"harness|mmlu_professional_accounting|0": {
|
159 |
+
"alias": " - professional_accounting",
|
160 |
+
"acc,none": 0.2198581560283688,
|
161 |
+
"acc_stderr,none": 0.024706141070705477
|
162 |
+
},
|
163 |
+
"harness|mmlu_professional_medicine|0": {
|
164 |
+
"alias": " - professional_medicine",
|
165 |
+
"acc,none": 0.1875,
|
166 |
+
"acc_stderr,none": 0.023709788253811766
|
167 |
+
},
|
168 |
+
"harness|mmlu_virology|0": {
|
169 |
+
"alias": " - virology",
|
170 |
+
"acc,none": 0.3253012048192771,
|
171 |
+
"acc_stderr,none": 0.03647168523683226
|
172 |
+
},
|
173 |
+
"harness|mmlu_social_sciences|0": {
|
174 |
+
"alias": " - social_sciences",
|
175 |
+
"acc,none": 0.2934676633084173,
|
176 |
+
"acc_stderr,none": 0.00818128243976792
|
177 |
+
},
|
178 |
+
"harness|mmlu_econometrics|0": {
|
179 |
+
"alias": " - econometrics",
|
180 |
+
"acc,none": 0.20175438596491227,
|
181 |
+
"acc_stderr,none": 0.037752050135836386
|
182 |
+
},
|
183 |
+
"harness|mmlu_high_school_geography|0": {
|
184 |
+
"alias": " - high_school_geography",
|
185 |
+
"acc,none": 0.32323232323232326,
|
186 |
+
"acc_stderr,none": 0.03332299921070643
|
187 |
+
},
|
188 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
189 |
+
"alias": " - high_school_government_and_politics",
|
190 |
+
"acc,none": 0.2538860103626943,
|
191 |
+
"acc_stderr,none": 0.03141024780565318
|
192 |
+
},
|
193 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
194 |
+
"alias": " - high_school_macroeconomics",
|
195 |
+
"acc,none": 0.27692307692307694,
|
196 |
+
"acc_stderr,none": 0.022688042352424994
|
197 |
+
},
|
198 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
199 |
+
"alias": " - high_school_microeconomics",
|
200 |
+
"acc,none": 0.2689075630252101,
|
201 |
+
"acc_stderr,none": 0.02880139219363127
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_psychology|0": {
|
204 |
+
"alias": " - high_school_psychology",
|
205 |
+
"acc,none": 0.3174311926605505,
|
206 |
+
"acc_stderr,none": 0.0199571521984605
|
207 |
+
},
|
208 |
+
"harness|mmlu_human_sexuality|0": {
|
209 |
+
"alias": " - human_sexuality",
|
210 |
+
"acc,none": 0.3893129770992366,
|
211 |
+
"acc_stderr,none": 0.04276486542814591
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_psychology|0": {
|
214 |
+
"alias": " - professional_psychology",
|
215 |
+
"acc,none": 0.29248366013071897,
|
216 |
+
"acc_stderr,none": 0.018403415710109793
|
217 |
+
},
|
218 |
+
"harness|mmlu_public_relations|0": {
|
219 |
+
"alias": " - public_relations",
|
220 |
+
"acc,none": 0.2909090909090909,
|
221 |
+
"acc_stderr,none": 0.04350271442923243
|
222 |
+
},
|
223 |
+
"harness|mmlu_security_studies|0": {
|
224 |
+
"alias": " - security_studies",
|
225 |
+
"acc,none": 0.20816326530612245,
|
226 |
+
"acc_stderr,none": 0.025991117672813292
|
227 |
+
},
|
228 |
+
"harness|mmlu_sociology|0": {
|
229 |
+
"alias": " - sociology",
|
230 |
+
"acc,none": 0.3383084577114428,
|
231 |
+
"acc_stderr,none": 0.03345563070339193
|
232 |
+
},
|
233 |
+
"harness|mmlu_us_foreign_policy|0": {
|
234 |
+
"alias": " - us_foreign_policy",
|
235 |
+
"acc,none": 0.41,
|
236 |
+
"acc_stderr,none": 0.049431107042371025
|
237 |
+
},
|
238 |
+
"harness|mmlu_stem|0": {
|
239 |
+
"alias": " - stem",
|
240 |
+
"acc,none": 0.24865207738661593,
|
241 |
+
"acc_stderr,none": 0.0076626997718791485
|
242 |
+
},
|
243 |
+
"harness|mmlu_abstract_algebra|0": {
|
244 |
+
"alias": " - abstract_algebra",
|
245 |
+
"acc,none": 0.24,
|
246 |
+
"acc_stderr,none": 0.04292346959909282
|
247 |
+
},
|
248 |
+
"harness|mmlu_anatomy|0": {
|
249 |
+
"alias": " - anatomy",
|
250 |
+
"acc,none": 0.2962962962962963,
|
251 |
+
"acc_stderr,none": 0.03944624162501116
|
252 |
+
},
|
253 |
+
"harness|mmlu_astronomy|0": {
|
254 |
+
"alias": " - astronomy",
|
255 |
+
"acc,none": 0.2631578947368421,
|
256 |
+
"acc_stderr,none": 0.03583496176361064
|
257 |
+
},
|
258 |
+
"harness|mmlu_college_biology|0": {
|
259 |
+
"alias": " - college_biology",
|
260 |
+
"acc,none": 0.2986111111111111,
|
261 |
+
"acc_stderr,none": 0.03827052357950756
|
262 |
+
},
|
263 |
+
"harness|mmlu_college_chemistry|0": {
|
264 |
+
"alias": " - college_chemistry",
|
265 |
+
"acc,none": 0.2,
|
266 |
+
"acc_stderr,none": 0.040201512610368445
|
267 |
+
},
|
268 |
+
"harness|mmlu_college_computer_science|0": {
|
269 |
+
"alias": " - college_computer_science",
|
270 |
+
"acc,none": 0.32,
|
271 |
+
"acc_stderr,none": 0.04688261722621504
|
272 |
+
},
|
273 |
+
"harness|mmlu_college_mathematics|0": {
|
274 |
+
"alias": " - college_mathematics",
|
275 |
+
"acc,none": 0.32,
|
276 |
+
"acc_stderr,none": 0.046882617226215034
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_physics|0": {
|
279 |
+
"alias": " - college_physics",
|
280 |
+
"acc,none": 0.2647058823529412,
|
281 |
+
"acc_stderr,none": 0.04389869956808778
|
282 |
+
},
|
283 |
+
"harness|mmlu_computer_security|0": {
|
284 |
+
"alias": " - computer_security",
|
285 |
+
"acc,none": 0.34,
|
286 |
+
"acc_stderr,none": 0.04760952285695236
|
287 |
+
},
|
288 |
+
"harness|mmlu_conceptual_physics|0": {
|
289 |
+
"alias": " - conceptual_physics",
|
290 |
+
"acc,none": 0.26382978723404255,
|
291 |
+
"acc_stderr,none": 0.028809989854102973
|
292 |
+
},
|
293 |
+
"harness|mmlu_electrical_engineering|0": {
|
294 |
+
"alias": " - electrical_engineering",
|
295 |
+
"acc,none": 0.2689655172413793,
|
296 |
+
"acc_stderr,none": 0.036951833116502325
|
297 |
+
},
|
298 |
+
"harness|mmlu_elementary_mathematics|0": {
|
299 |
+
"alias": " - elementary_mathematics",
|
300 |
+
"acc,none": 0.18783068783068782,
|
301 |
+
"acc_stderr,none": 0.020115734141521104
|
302 |
+
},
|
303 |
+
"harness|mmlu_high_school_biology|0": {
|
304 |
+
"alias": " - high_school_biology",
|
305 |
+
"acc,none": 0.27419354838709675,
|
306 |
+
"acc_stderr,none": 0.02537813997088521
|
307 |
+
},
|
308 |
+
"harness|mmlu_high_school_chemistry|0": {
|
309 |
+
"alias": " - high_school_chemistry",
|
310 |
+
"acc,none": 0.1921182266009852,
|
311 |
+
"acc_stderr,none": 0.027719315709614778
|
312 |
+
},
|
313 |
+
"harness|mmlu_high_school_computer_science|0": {
|
314 |
+
"alias": " - high_school_computer_science",
|
315 |
+
"acc,none": 0.3,
|
316 |
+
"acc_stderr,none": 0.046056618647183814
|
317 |
+
},
|
318 |
+
"harness|mmlu_high_school_mathematics|0": {
|
319 |
+
"alias": " - high_school_mathematics",
|
320 |
+
"acc,none": 0.22962962962962963,
|
321 |
+
"acc_stderr,none": 0.025644108639267624
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_physics|0": {
|
324 |
+
"alias": " - high_school_physics",
|
325 |
+
"acc,none": 0.2119205298013245,
|
326 |
+
"acc_stderr,none": 0.03336767086567977
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_statistics|0": {
|
329 |
+
"alias": " - high_school_statistics",
|
330 |
+
"acc,none": 0.1527777777777778,
|
331 |
+
"acc_stderr,none": 0.024536326026134217
|
332 |
+
},
|
333 |
+
"harness|mmlu_machine_learning|0": {
|
334 |
+
"alias": " - machine_learning",
|
335 |
+
"acc,none": 0.3482142857142857,
|
336 |
+
"acc_stderr,none": 0.04521829902833585
|
337 |
+
},
|
338 |
+
"harness|arc:easy|0": {
|
339 |
+
"acc,none": 0.4696969696969697,
|
340 |
+
"acc_stderr,none": 0.010240923608726537,
|
341 |
+
"acc_norm,none": 0.41919191919191917,
|
342 |
+
"acc_norm_stderr,none": 0.01012490528249118,
|
343 |
+
"alias": "arc_easy"
|
344 |
+
},
|
345 |
+
"harness|piqa|0": {
|
346 |
+
"acc,none": 0.6632208922742111,
|
347 |
+
"acc_stderr,none": 0.011026738925251179,
|
348 |
+
"acc_norm,none": 0.6594124047878128,
|
349 |
+
"acc_norm_stderr,none": 0.011057027540404739,
|
350 |
+
"alias": "piqa"
|
351 |
+
},
|
352 |
+
"harness|truthfulqa:mc2|0": {
|
353 |
+
"acc,none": 0.42577722320751654,
|
354 |
+
"acc_stderr,none": 0.015077974583911373,
|
355 |
+
"alias": "truthfulqa_mc2"
|
356 |
+
},
|
357 |
+
"harness|boolq|0": {
|
358 |
+
"acc,none": 0.4379204892966361,
|
359 |
+
"acc_stderr,none": 0.008677388652709263,
|
360 |
+
"alias": "boolq"
|
361 |
+
},
|
362 |
+
"harness|openbookqa|0": {
|
363 |
+
"acc,none": 0.188,
|
364 |
+
"acc_stderr,none": 0.01749067888034626,
|
365 |
+
"acc_norm,none": 0.312,
|
366 |
+
"acc_norm_stderr,none": 0.020740596536488087,
|
367 |
+
"alias": "openbookqa"
|
368 |
+
},
|
369 |
+
"harness|winogrande|0": {
|
370 |
+
"acc,none": 0.5430149960536701,
|
371 |
+
"acc_stderr,none": 0.01400038676159829,
|
372 |
+
"alias": "winogrande"
|
373 |
+
},
|
374 |
+
"harness|truthfulqa:mc1|0": {
|
375 |
+
"acc,none": 0.2484700122399021,
|
376 |
+
"acc_stderr,none": 0.015127427096520677,
|
377 |
+
"alias": "truthfulqa_mc1"
|
378 |
+
},
|
379 |
+
"harness|lambada:openai|0": {
|
380 |
+
"perplexity,none": 30.634680468634887,
|
381 |
+
"perplexity_stderr,none": 1.491757631028465,
|
382 |
+
"acc,none": 0.4001552493692994,
|
383 |
+
"acc_stderr,none": 0.0068256774766065256,
|
384 |
+
"alias": "lambada_openai"
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.24658703071672355,
|
388 |
+
"acc_stderr,none": 0.012595726268790122,
|
389 |
+
"acc_norm,none": 0.29266211604095566,
|
390 |
+
"acc_norm_stderr,none": 0.013295916103619411,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/Qwen1.5-0.5B-Chat-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 1,
|
400 |
+
"architectures": "QwenForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 1,
|
404 |
+
"model_size": 0.5,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 200,
|
425 |
+
"lr": 0.005,
|
426 |
+
"minmax_lr": 0.005,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": true
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|hellaswag|0": 1.0,
|
438 |
+
"harness|mmlu|0": null,
|
439 |
+
"harness|mmlu_humanities|0": null,
|
440 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
441 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
442 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
443 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
444 |
+
"harness|mmlu_international_law|0": 0.0,
|
445 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
446 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
447 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
448 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
449 |
+
"harness|mmlu_philosophy|0": 0.0,
|
450 |
+
"harness|mmlu_prehistory|0": 0.0,
|
451 |
+
"harness|mmlu_professional_law|0": 0.0,
|
452 |
+
"harness|mmlu_world_religions|0": 0.0,
|
453 |
+
"harness|mmlu_other|0": null,
|
454 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
455 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
456 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
457 |
+
"harness|mmlu_global_facts|0": 0.0,
|
458 |
+
"harness|mmlu_human_aging|0": 0.0,
|
459 |
+
"harness|mmlu_management|0": 0.0,
|
460 |
+
"harness|mmlu_marketing|0": 0.0,
|
461 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
462 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
463 |
+
"harness|mmlu_nutrition|0": 0.0,
|
464 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
465 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
466 |
+
"harness|mmlu_virology|0": 0.0,
|
467 |
+
"harness|mmlu_social_sciences|0": null,
|
468 |
+
"harness|mmlu_econometrics|0": 0.0,
|
469 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
470 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
471 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
474 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
475 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
476 |
+
"harness|mmlu_public_relations|0": 0.0,
|
477 |
+
"harness|mmlu_security_studies|0": 0.0,
|
478 |
+
"harness|mmlu_sociology|0": 0.0,
|
479 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
480 |
+
"harness|mmlu_stem|0": null,
|
481 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
482 |
+
"harness|mmlu_anatomy|0": 0.0,
|
483 |
+
"harness|mmlu_astronomy|0": 0.0,
|
484 |
+
"harness|mmlu_college_biology|0": 0.0,
|
485 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
486 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
487 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_college_physics|0": 0.0,
|
489 |
+
"harness|mmlu_computer_security|0": 0.0,
|
490 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
491 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
492 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
494 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
495 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
499 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
500 |
+
"harness|arc:easy|0": 1.0,
|
501 |
+
"harness|piqa|0": 1.0,
|
502 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
503 |
+
"harness|boolq|0": 2.0,
|
504 |
+
"harness|openbookqa|0": 1.0,
|
505 |
+
"harness|winogrande|0": 1.0,
|
506 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
507 |
+
"harness|lambada:openai|0": 1.0,
|
508 |
+
"harness|arc:challenge|0": 1.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714494322.3458931,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/Qwen1.5-0.5B-Chat-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 4,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-05-01-02-20-44.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-01-02-20-44",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/gemma-7b-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 7.18,
|
16 |
+
"model_params": 7.82,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:easy|0": {
|
22 |
+
"acc,none": 0.8152356902356902,
|
23 |
+
"acc_stderr,none": 0.007963772171570793,
|
24 |
+
"acc_norm,none": 0.8085016835016835,
|
25 |
+
"acc_norm_stderr,none": 0.00807404447731971,
|
26 |
+
"alias": "arc_easy"
|
27 |
+
},
|
28 |
+
"harness|boolq|0": {
|
29 |
+
"acc,none": 0.8281345565749235,
|
30 |
+
"acc_stderr,none": 0.006598379269781497,
|
31 |
+
"alias": "boolq"
|
32 |
+
},
|
33 |
+
"harness|truthfulqa:mc2|0": {
|
34 |
+
"acc,none": 0.42284386411413843,
|
35 |
+
"acc_stderr,none": 0.014468335415445921,
|
36 |
+
"alias": "truthfulqa_mc2"
|
37 |
+
},
|
38 |
+
"harness|arc:challenge|0": {
|
39 |
+
"acc,none": 0.5017064846416383,
|
40 |
+
"acc_stderr,none": 0.014611305705056995,
|
41 |
+
"acc_norm,none": 0.5324232081911263,
|
42 |
+
"acc_norm_stderr,none": 0.014580637569995426,
|
43 |
+
"alias": "arc_challenge"
|
44 |
+
},
|
45 |
+
"harness|truthfulqa:mc1|0": {
|
46 |
+
"acc,none": 0.28886168910648713,
|
47 |
+
"acc_stderr,none": 0.015866346401384308,
|
48 |
+
"alias": "truthfulqa_mc1"
|
49 |
+
},
|
50 |
+
"harness|piqa|0": {
|
51 |
+
"acc,none": 0.8057671381936888,
|
52 |
+
"acc_stderr,none": 0.009230209366168288,
|
53 |
+
"acc_norm,none": 0.8128400435255713,
|
54 |
+
"acc_norm_stderr,none": 0.009100273290473552,
|
55 |
+
"alias": "piqa"
|
56 |
+
},
|
57 |
+
"harness|winogrande|0": {
|
58 |
+
"acc,none": 0.745067087608524,
|
59 |
+
"acc_stderr,none": 0.012248806969376422,
|
60 |
+
"alias": "winogrande"
|
61 |
+
},
|
62 |
+
"harness|mmlu|0": {
|
63 |
+
"acc,none": 0.6110952855718559,
|
64 |
+
"acc_stderr,none": 0.003862144549291668,
|
65 |
+
"alias": "mmlu"
|
66 |
+
},
|
67 |
+
"harness|mmlu_humanities|0": {
|
68 |
+
"alias": " - humanities",
|
69 |
+
"acc,none": 0.5464399574920298,
|
70 |
+
"acc_stderr,none": 0.006685840827507319
|
71 |
+
},
|
72 |
+
"harness|mmlu_formal_logic|0": {
|
73 |
+
"alias": " - formal_logic",
|
74 |
+
"acc,none": 0.42857142857142855,
|
75 |
+
"acc_stderr,none": 0.04426266681379909
|
76 |
+
},
|
77 |
+
"harness|mmlu_high_school_european_history|0": {
|
78 |
+
"alias": " - high_school_european_history",
|
79 |
+
"acc,none": 0.7151515151515152,
|
80 |
+
"acc_stderr,none": 0.035243908445117815
|
81 |
+
},
|
82 |
+
"harness|mmlu_high_school_us_history|0": {
|
83 |
+
"alias": " - high_school_us_history",
|
84 |
+
"acc,none": 0.7696078431372549,
|
85 |
+
"acc_stderr,none": 0.029554292605695063
|
86 |
+
},
|
87 |
+
"harness|mmlu_high_school_world_history|0": {
|
88 |
+
"alias": " - high_school_world_history",
|
89 |
+
"acc,none": 0.7890295358649789,
|
90 |
+
"acc_stderr,none": 0.02655837250266192
|
91 |
+
},
|
92 |
+
"harness|mmlu_international_law|0": {
|
93 |
+
"alias": " - international_law",
|
94 |
+
"acc,none": 0.7851239669421488,
|
95 |
+
"acc_stderr,none": 0.037494924487096966
|
96 |
+
},
|
97 |
+
"harness|mmlu_jurisprudence|0": {
|
98 |
+
"alias": " - jurisprudence",
|
99 |
+
"acc,none": 0.7592592592592593,
|
100 |
+
"acc_stderr,none": 0.041331194402438376
|
101 |
+
},
|
102 |
+
"harness|mmlu_logical_fallacies|0": {
|
103 |
+
"alias": " - logical_fallacies",
|
104 |
+
"acc,none": 0.754601226993865,
|
105 |
+
"acc_stderr,none": 0.03380939813943354
|
106 |
+
},
|
107 |
+
"harness|mmlu_moral_disputes|0": {
|
108 |
+
"alias": " - moral_disputes",
|
109 |
+
"acc,none": 0.6734104046242775,
|
110 |
+
"acc_stderr,none": 0.025248264774242836
|
111 |
+
},
|
112 |
+
"harness|mmlu_moral_scenarios|0": {
|
113 |
+
"alias": " - moral_scenarios",
|
114 |
+
"acc,none": 0.2424581005586592,
|
115 |
+
"acc_stderr,none": 0.014333522059217887
|
116 |
+
},
|
117 |
+
"harness|mmlu_philosophy|0": {
|
118 |
+
"alias": " - philosophy",
|
119 |
+
"acc,none": 0.6913183279742765,
|
120 |
+
"acc_stderr,none": 0.02623696588115326
|
121 |
+
},
|
122 |
+
"harness|mmlu_prehistory|0": {
|
123 |
+
"alias": " - prehistory",
|
124 |
+
"acc,none": 0.7160493827160493,
|
125 |
+
"acc_stderr,none": 0.025089478523765134
|
126 |
+
},
|
127 |
+
"harness|mmlu_professional_law|0": {
|
128 |
+
"alias": " - professional_law",
|
129 |
+
"acc,none": 0.46479791395045633,
|
130 |
+
"acc_stderr,none": 0.01273854737130395
|
131 |
+
},
|
132 |
+
"harness|mmlu_world_religions|0": {
|
133 |
+
"alias": " - world_religions",
|
134 |
+
"acc,none": 0.847953216374269,
|
135 |
+
"acc_stderr,none": 0.02753912288906145
|
136 |
+
},
|
137 |
+
"harness|mmlu_other|0": {
|
138 |
+
"alias": " - other",
|
139 |
+
"acc,none": 0.6906984229159961,
|
140 |
+
"acc_stderr,none": 0.007976549912624318
|
141 |
+
},
|
142 |
+
"harness|mmlu_business_ethics|0": {
|
143 |
+
"alias": " - business_ethics",
|
144 |
+
"acc,none": 0.61,
|
145 |
+
"acc_stderr,none": 0.04902071300001974
|
146 |
+
},
|
147 |
+
"harness|mmlu_clinical_knowledge|0": {
|
148 |
+
"alias": " - clinical_knowledge",
|
149 |
+
"acc,none": 0.690566037735849,
|
150 |
+
"acc_stderr,none": 0.028450154794118637
|
151 |
+
},
|
152 |
+
"harness|mmlu_college_medicine|0": {
|
153 |
+
"alias": " - college_medicine",
|
154 |
+
"acc,none": 0.5895953757225434,
|
155 |
+
"acc_stderr,none": 0.037507570448955356
|
156 |
+
},
|
157 |
+
"harness|mmlu_global_facts|0": {
|
158 |
+
"alias": " - global_facts",
|
159 |
+
"acc,none": 0.42,
|
160 |
+
"acc_stderr,none": 0.049604496374885836
|
161 |
+
},
|
162 |
+
"harness|mmlu_human_aging|0": {
|
163 |
+
"alias": " - human_aging",
|
164 |
+
"acc,none": 0.6905829596412556,
|
165 |
+
"acc_stderr,none": 0.031024411740572206
|
166 |
+
},
|
167 |
+
"harness|mmlu_management|0": {
|
168 |
+
"alias": " - management",
|
169 |
+
"acc,none": 0.8446601941747572,
|
170 |
+
"acc_stderr,none": 0.03586594738573974
|
171 |
+
},
|
172 |
+
"harness|mmlu_marketing|0": {
|
173 |
+
"alias": " - marketing",
|
174 |
+
"acc,none": 0.8974358974358975,
|
175 |
+
"acc_stderr,none": 0.019875655027867443
|
176 |
+
},
|
177 |
+
"harness|mmlu_medical_genetics|0": {
|
178 |
+
"alias": " - medical_genetics",
|
179 |
+
"acc,none": 0.68,
|
180 |
+
"acc_stderr,none": 0.04688261722621504
|
181 |
+
},
|
182 |
+
"harness|mmlu_miscellaneous|0": {
|
183 |
+
"alias": " - miscellaneous",
|
184 |
+
"acc,none": 0.8071519795657727,
|
185 |
+
"acc_stderr,none": 0.014108533515757433
|
186 |
+
},
|
187 |
+
"harness|mmlu_nutrition|0": {
|
188 |
+
"alias": " - nutrition",
|
189 |
+
"acc,none": 0.6993464052287581,
|
190 |
+
"acc_stderr,none": 0.026256053835718964
|
191 |
+
},
|
192 |
+
"harness|mmlu_professional_accounting|0": {
|
193 |
+
"alias": " - professional_accounting",
|
194 |
+
"acc,none": 0.48226950354609927,
|
195 |
+
"acc_stderr,none": 0.02980873964223777
|
196 |
+
},
|
197 |
+
"harness|mmlu_professional_medicine|0": {
|
198 |
+
"alias": " - professional_medicine",
|
199 |
+
"acc,none": 0.6433823529411765,
|
200 |
+
"acc_stderr,none": 0.029097209568411955
|
201 |
+
},
|
202 |
+
"harness|mmlu_virology|0": {
|
203 |
+
"alias": " - virology",
|
204 |
+
"acc,none": 0.4939759036144578,
|
205 |
+
"acc_stderr,none": 0.03892212195333045
|
206 |
+
},
|
207 |
+
"harness|mmlu_social_sciences|0": {
|
208 |
+
"alias": " - social_sciences",
|
209 |
+
"acc,none": 0.7062073448163796,
|
210 |
+
"acc_stderr,none": 0.008040998950033304
|
211 |
+
},
|
212 |
+
"harness|mmlu_econometrics|0": {
|
213 |
+
"alias": " - econometrics",
|
214 |
+
"acc,none": 0.43859649122807015,
|
215 |
+
"acc_stderr,none": 0.04668000738510455
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_geography|0": {
|
218 |
+
"alias": " - high_school_geography",
|
219 |
+
"acc,none": 0.797979797979798,
|
220 |
+
"acc_stderr,none": 0.02860620428922989
|
221 |
+
},
|
222 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
223 |
+
"alias": " - high_school_government_and_politics",
|
224 |
+
"acc,none": 0.8238341968911918,
|
225 |
+
"acc_stderr,none": 0.027493504244548064
|
226 |
+
},
|
227 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
228 |
+
"alias": " - high_school_macroeconomics",
|
229 |
+
"acc,none": 0.617948717948718,
|
230 |
+
"acc_stderr,none": 0.02463554916390823
|
231 |
+
},
|
232 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
233 |
+
"alias": " - high_school_microeconomics",
|
234 |
+
"acc,none": 0.6512605042016807,
|
235 |
+
"acc_stderr,none": 0.030956636328566548
|
236 |
+
},
|
237 |
+
"harness|mmlu_high_school_psychology|0": {
|
238 |
+
"alias": " - high_school_psychology",
|
239 |
+
"acc,none": 0.8128440366972477,
|
240 |
+
"acc_stderr,none": 0.016722684526200154
|
241 |
+
},
|
242 |
+
"harness|mmlu_human_sexuality|0": {
|
243 |
+
"alias": " - human_sexuality",
|
244 |
+
"acc,none": 0.7099236641221374,
|
245 |
+
"acc_stderr,none": 0.03980066246467765
|
246 |
+
},
|
247 |
+
"harness|mmlu_professional_psychology|0": {
|
248 |
+
"alias": " - professional_psychology",
|
249 |
+
"acc,none": 0.6356209150326797,
|
250 |
+
"acc_stderr,none": 0.019469518221573695
|
251 |
+
},
|
252 |
+
"harness|mmlu_public_relations|0": {
|
253 |
+
"alias": " - public_relations",
|
254 |
+
"acc,none": 0.6454545454545455,
|
255 |
+
"acc_stderr,none": 0.04582004841505417
|
256 |
+
},
|
257 |
+
"harness|mmlu_security_studies|0": {
|
258 |
+
"alias": " - security_studies",
|
259 |
+
"acc,none": 0.7183673469387755,
|
260 |
+
"acc_stderr,none": 0.02879518557429127
|
261 |
+
},
|
262 |
+
"harness|mmlu_sociology|0": {
|
263 |
+
"alias": " - sociology",
|
264 |
+
"acc,none": 0.7611940298507462,
|
265 |
+
"acc_stderr,none": 0.030147775935409217
|
266 |
+
},
|
267 |
+
"harness|mmlu_us_foreign_policy|0": {
|
268 |
+
"alias": " - us_foreign_policy",
|
269 |
+
"acc,none": 0.85,
|
270 |
+
"acc_stderr,none": 0.03588702812826371
|
271 |
+
},
|
272 |
+
"harness|mmlu_stem|0": {
|
273 |
+
"alias": " - stem",
|
274 |
+
"acc,none": 0.5363146209958769,
|
275 |
+
"acc_stderr,none": 0.00854104744342032
|
276 |
+
},
|
277 |
+
"harness|mmlu_abstract_algebra|0": {
|
278 |
+
"alias": " - abstract_algebra",
|
279 |
+
"acc,none": 0.36,
|
280 |
+
"acc_stderr,none": 0.04824181513244218
|
281 |
+
},
|
282 |
+
"harness|mmlu_anatomy|0": {
|
283 |
+
"alias": " - anatomy",
|
284 |
+
"acc,none": 0.6,
|
285 |
+
"acc_stderr,none": 0.04232073695151589
|
286 |
+
},
|
287 |
+
"harness|mmlu_astronomy|0": {
|
288 |
+
"alias": " - astronomy",
|
289 |
+
"acc,none": 0.6776315789473685,
|
290 |
+
"acc_stderr,none": 0.038035102483515854
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_biology|0": {
|
293 |
+
"alias": " - college_biology",
|
294 |
+
"acc,none": 0.7777777777777778,
|
295 |
+
"acc_stderr,none": 0.03476590104304134
|
296 |
+
},
|
297 |
+
"harness|mmlu_college_chemistry|0": {
|
298 |
+
"alias": " - college_chemistry",
|
299 |
+
"acc,none": 0.46,
|
300 |
+
"acc_stderr,none": 0.05009082659620332
|
301 |
+
},
|
302 |
+
"harness|mmlu_college_computer_science|0": {
|
303 |
+
"alias": " - college_computer_science",
|
304 |
+
"acc,none": 0.5,
|
305 |
+
"acc_stderr,none": 0.050251890762960605
|
306 |
+
},
|
307 |
+
"harness|mmlu_college_mathematics|0": {
|
308 |
+
"alias": " - college_mathematics",
|
309 |
+
"acc,none": 0.37,
|
310 |
+
"acc_stderr,none": 0.048523658709391
|
311 |
+
},
|
312 |
+
"harness|mmlu_college_physics|0": {
|
313 |
+
"alias": " - college_physics",
|
314 |
+
"acc,none": 0.45098039215686275,
|
315 |
+
"acc_stderr,none": 0.049512182523962604
|
316 |
+
},
|
317 |
+
"harness|mmlu_computer_security|0": {
|
318 |
+
"alias": " - computer_security",
|
319 |
+
"acc,none": 0.71,
|
320 |
+
"acc_stderr,none": 0.045604802157206845
|
321 |
+
},
|
322 |
+
"harness|mmlu_conceptual_physics|0": {
|
323 |
+
"alias": " - conceptual_physics",
|
324 |
+
"acc,none": 0.6085106382978723,
|
325 |
+
"acc_stderr,none": 0.03190701242326812
|
326 |
+
},
|
327 |
+
"harness|mmlu_electrical_engineering|0": {
|
328 |
+
"alias": " - electrical_engineering",
|
329 |
+
"acc,none": 0.5655172413793104,
|
330 |
+
"acc_stderr,none": 0.04130740879555497
|
331 |
+
},
|
332 |
+
"harness|mmlu_elementary_mathematics|0": {
|
333 |
+
"alias": " - elementary_mathematics",
|
334 |
+
"acc,none": 0.42592592592592593,
|
335 |
+
"acc_stderr,none": 0.025467149045469557
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_biology|0": {
|
338 |
+
"alias": " - high_school_biology",
|
339 |
+
"acc,none": 0.7709677419354839,
|
340 |
+
"acc_stderr,none": 0.023904914311782655
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_chemistry|0": {
|
343 |
+
"alias": " - high_school_chemistry",
|
344 |
+
"acc,none": 0.5517241379310345,
|
345 |
+
"acc_stderr,none": 0.03499113137676744
|
346 |
+
},
|
347 |
+
"harness|mmlu_high_school_computer_science|0": {
|
348 |
+
"alias": " - high_school_computer_science",
|
349 |
+
"acc,none": 0.62,
|
350 |
+
"acc_stderr,none": 0.048783173121456316
|
351 |
+
},
|
352 |
+
"harness|mmlu_high_school_mathematics|0": {
|
353 |
+
"alias": " - high_school_mathematics",
|
354 |
+
"acc,none": 0.3333333333333333,
|
355 |
+
"acc_stderr,none": 0.02874204090394849
|
356 |
+
},
|
357 |
+
"harness|mmlu_high_school_physics|0": {
|
358 |
+
"alias": " - high_school_physics",
|
359 |
+
"acc,none": 0.36423841059602646,
|
360 |
+
"acc_stderr,none": 0.03929111781242742
|
361 |
+
},
|
362 |
+
"harness|mmlu_high_school_statistics|0": {
|
363 |
+
"alias": " - high_school_statistics",
|
364 |
+
"acc,none": 0.5462962962962963,
|
365 |
+
"acc_stderr,none": 0.03395322726375797
|
366 |
+
},
|
367 |
+
"harness|mmlu_machine_learning|0": {
|
368 |
+
"alias": " - machine_learning",
|
369 |
+
"acc,none": 0.41964285714285715,
|
370 |
+
"acc_stderr,none": 0.04684099321077106
|
371 |
+
},
|
372 |
+
"harness|lambada:openai|0": {
|
373 |
+
"perplexity,none": 3.391288578457952,
|
374 |
+
"perplexity_stderr,none": 0.06834212955306411,
|
375 |
+
"acc,none": 0.7252086163399961,
|
376 |
+
"acc_stderr,none": 0.006219351548299038,
|
377 |
+
"alias": "lambada_openai"
|
378 |
+
},
|
379 |
+
"harness|openbookqa|0": {
|
380 |
+
"acc,none": 0.338,
|
381 |
+
"acc_stderr,none": 0.02117566569520941,
|
382 |
+
"acc_norm,none": 0.45,
|
383 |
+
"acc_norm_stderr,none": 0.022270877485360444,
|
384 |
+
"alias": "openbookqa"
|
385 |
+
},
|
386 |
+
"harness|hellaswag|0": {
|
387 |
+
"acc,none": 0.6032662816172077,
|
388 |
+
"acc_stderr,none": 0.004882200364432386,
|
389 |
+
"acc_norm,none": 0.799044015136427,
|
390 |
+
"acc_norm_stderr,none": 0.003998962580974678,
|
391 |
+
"alias": "hellaswag"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/gemma-7b-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 7,
|
400 |
+
"architectures": "GemmaForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 7,
|
404 |
+
"model_size": 4,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.002,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": true
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|arc:easy|0": 1.0,
|
438 |
+
"harness|boolq|0": 2.0,
|
439 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
440 |
+
"harness|arc:challenge|0": 1.0,
|
441 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
442 |
+
"harness|piqa|0": 1.0,
|
443 |
+
"harness|winogrande|0": 1.0,
|
444 |
+
"harness|mmlu|0": null,
|
445 |
+
"harness|mmlu_humanities|0": null,
|
446 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
447 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
449 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
450 |
+
"harness|mmlu_international_law|0": 0.0,
|
451 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
452 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
453 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
454 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
455 |
+
"harness|mmlu_philosophy|0": 0.0,
|
456 |
+
"harness|mmlu_prehistory|0": 0.0,
|
457 |
+
"harness|mmlu_professional_law|0": 0.0,
|
458 |
+
"harness|mmlu_world_religions|0": 0.0,
|
459 |
+
"harness|mmlu_other|0": null,
|
460 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
461 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
462 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
463 |
+
"harness|mmlu_global_facts|0": 0.0,
|
464 |
+
"harness|mmlu_human_aging|0": 0.0,
|
465 |
+
"harness|mmlu_management|0": 0.0,
|
466 |
+
"harness|mmlu_marketing|0": 0.0,
|
467 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
468 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
469 |
+
"harness|mmlu_nutrition|0": 0.0,
|
470 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
471 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
472 |
+
"harness|mmlu_virology|0": 0.0,
|
473 |
+
"harness|mmlu_social_sciences|0": null,
|
474 |
+
"harness|mmlu_econometrics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
480 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
481 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
482 |
+
"harness|mmlu_public_relations|0": 0.0,
|
483 |
+
"harness|mmlu_security_studies|0": 0.0,
|
484 |
+
"harness|mmlu_sociology|0": 0.0,
|
485 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
486 |
+
"harness|mmlu_stem|0": null,
|
487 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
488 |
+
"harness|mmlu_anatomy|0": 0.0,
|
489 |
+
"harness|mmlu_astronomy|0": 0.0,
|
490 |
+
"harness|mmlu_college_biology|0": 0.0,
|
491 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
492 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
493 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
494 |
+
"harness|mmlu_college_physics|0": 0.0,
|
495 |
+
"harness|mmlu_computer_security|0": 0.0,
|
496 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
497 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
498 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
503 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
504 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
505 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
506 |
+
"harness|lambada:openai|0": 1.0,
|
507 |
+
"harness|openbookqa|0": 1.0,
|
508 |
+
"harness|hellaswag|0": 1.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714496298.0373547,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/gemma-7b-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-05-01-04-10-41.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-01-04-10-41",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.6,
|
16 |
+
"model_params": 10.57,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.7695343330702447,
|
23 |
+
"acc_stderr,none": 0.011835872164836671,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|lambada:openai|0": {
|
27 |
+
"perplexity,none": 3.064920029771224,
|
28 |
+
"perplexity_stderr,none": 0.07016086763498619,
|
29 |
+
"acc,none": 0.7335532699398408,
|
30 |
+
"acc_stderr,none": 0.006159324694087315,
|
31 |
+
"alias": "lambada_openai"
|
32 |
+
},
|
33 |
+
"harness|openbookqa|0": {
|
34 |
+
"acc,none": 0.356,
|
35 |
+
"acc_stderr,none": 0.02143471235607264,
|
36 |
+
"acc_norm,none": 0.46,
|
37 |
+
"acc_norm_stderr,none": 0.022311333245289666,
|
38 |
+
"alias": "openbookqa"
|
39 |
+
},
|
40 |
+
"harness|arc:easy|0": {
|
41 |
+
"acc,none": 0.8265993265993266,
|
42 |
+
"acc_stderr,none": 0.007768570412816704,
|
43 |
+
"acc_norm,none": 0.8211279461279462,
|
44 |
+
"acc_norm_stderr,none": 0.007864024474332735,
|
45 |
+
"alias": "arc_easy"
|
46 |
+
},
|
47 |
+
"harness|arc:challenge|0": {
|
48 |
+
"acc,none": 0.6049488054607508,
|
49 |
+
"acc_stderr,none": 0.014285898292938162,
|
50 |
+
"acc_norm,none": 0.6313993174061433,
|
51 |
+
"acc_norm_stderr,none": 0.014097810678042194,
|
52 |
+
"alias": "arc_challenge"
|
53 |
+
},
|
54 |
+
"harness|piqa|0": {
|
55 |
+
"acc,none": 0.8073993471164309,
|
56 |
+
"acc_stderr,none": 0.009200649707017568,
|
57 |
+
"acc_norm,none": 0.8079434167573449,
|
58 |
+
"acc_norm_stderr,none": 0.00919074029512647,
|
59 |
+
"alias": "piqa"
|
60 |
+
},
|
61 |
+
"harness|truthfulqa:mc1|0": {
|
62 |
+
"acc,none": 0.5605875152998776,
|
63 |
+
"acc_stderr,none": 0.017374520482513714,
|
64 |
+
"alias": "truthfulqa_mc1"
|
65 |
+
},
|
66 |
+
"harness|truthfulqa:mc2|0": {
|
67 |
+
"acc,none": 0.7179802896446443,
|
68 |
+
"acc_stderr,none": 0.014981196795486781,
|
69 |
+
"alias": "truthfulqa_mc2"
|
70 |
+
},
|
71 |
+
"harness|hellaswag|0": {
|
72 |
+
"acc,none": 0.6829316869149572,
|
73 |
+
"acc_stderr,none": 0.004643832742876648,
|
74 |
+
"acc_norm,none": 0.8527185819557856,
|
75 |
+
"acc_norm_stderr,none": 0.0035366196730199986,
|
76 |
+
"alias": "hellaswag"
|
77 |
+
},
|
78 |
+
"harness|boolq|0": {
|
79 |
+
"acc,none": 0.882874617737003,
|
80 |
+
"acc_stderr,none": 0.005624288190378992,
|
81 |
+
"alias": "boolq"
|
82 |
+
},
|
83 |
+
"harness|mmlu|0": {
|
84 |
+
"acc,none": 0.6242700470018516,
|
85 |
+
"acc_stderr,none": 0.0038176950123177803,
|
86 |
+
"alias": "mmlu"
|
87 |
+
},
|
88 |
+
"harness|mmlu_humanities|0": {
|
89 |
+
"alias": " - humanities",
|
90 |
+
"acc,none": 0.5685441020191286,
|
91 |
+
"acc_stderr,none": 0.006612407808418779
|
92 |
+
},
|
93 |
+
"harness|mmlu_formal_logic|0": {
|
94 |
+
"alias": " - formal_logic",
|
95 |
+
"acc,none": 0.4126984126984127,
|
96 |
+
"acc_stderr,none": 0.04403438954768177
|
97 |
+
},
|
98 |
+
"harness|mmlu_high_school_european_history|0": {
|
99 |
+
"alias": " - high_school_european_history",
|
100 |
+
"acc,none": 0.8121212121212121,
|
101 |
+
"acc_stderr,none": 0.03050193405942914
|
102 |
+
},
|
103 |
+
"harness|mmlu_high_school_us_history|0": {
|
104 |
+
"alias": " - high_school_us_history",
|
105 |
+
"acc,none": 0.8235294117647058,
|
106 |
+
"acc_stderr,none": 0.026756401538078945
|
107 |
+
},
|
108 |
+
"harness|mmlu_high_school_world_history|0": {
|
109 |
+
"alias": " - high_school_world_history",
|
110 |
+
"acc,none": 0.8607594936708861,
|
111 |
+
"acc_stderr,none": 0.022535526352692712
|
112 |
+
},
|
113 |
+
"harness|mmlu_international_law|0": {
|
114 |
+
"alias": " - international_law",
|
115 |
+
"acc,none": 0.7933884297520661,
|
116 |
+
"acc_stderr,none": 0.03695980128098826
|
117 |
+
},
|
118 |
+
"harness|mmlu_jurisprudence|0": {
|
119 |
+
"alias": " - jurisprudence",
|
120 |
+
"acc,none": 0.7592592592592593,
|
121 |
+
"acc_stderr,none": 0.041331194402438376
|
122 |
+
},
|
123 |
+
"harness|mmlu_logical_fallacies|0": {
|
124 |
+
"alias": " - logical_fallacies",
|
125 |
+
"acc,none": 0.6993865030674846,
|
126 |
+
"acc_stderr,none": 0.03602511318806771
|
127 |
+
},
|
128 |
+
"harness|mmlu_moral_disputes|0": {
|
129 |
+
"alias": " - moral_disputes",
|
130 |
+
"acc,none": 0.7167630057803468,
|
131 |
+
"acc_stderr,none": 0.024257901705323374
|
132 |
+
},
|
133 |
+
"harness|mmlu_moral_scenarios|0": {
|
134 |
+
"alias": " - moral_scenarios",
|
135 |
+
"acc,none": 0.2547486033519553,
|
136 |
+
"acc_stderr,none": 0.014572650383409155
|
137 |
+
},
|
138 |
+
"harness|mmlu_philosophy|0": {
|
139 |
+
"alias": " - philosophy",
|
140 |
+
"acc,none": 0.707395498392283,
|
141 |
+
"acc_stderr,none": 0.02583989833487798
|
142 |
+
},
|
143 |
+
"harness|mmlu_prehistory|0": {
|
144 |
+
"alias": " - prehistory",
|
145 |
+
"acc,none": 0.75,
|
146 |
+
"acc_stderr,none": 0.02409347123262133
|
147 |
+
},
|
148 |
+
"harness|mmlu_professional_law|0": {
|
149 |
+
"alias": " - professional_law",
|
150 |
+
"acc,none": 0.49022164276401564,
|
151 |
+
"acc_stderr,none": 0.012767793787729333
|
152 |
+
},
|
153 |
+
"harness|mmlu_world_religions|0": {
|
154 |
+
"alias": " - world_religions",
|
155 |
+
"acc,none": 0.783625730994152,
|
156 |
+
"acc_stderr,none": 0.031581495393387324
|
157 |
+
},
|
158 |
+
"harness|mmlu_other|0": {
|
159 |
+
"alias": " - other",
|
160 |
+
"acc,none": 0.7058255551979401,
|
161 |
+
"acc_stderr,none": 0.007888326651888832
|
162 |
+
},
|
163 |
+
"harness|mmlu_business_ethics|0": {
|
164 |
+
"alias": " - business_ethics",
|
165 |
+
"acc,none": 0.66,
|
166 |
+
"acc_stderr,none": 0.04760952285695237
|
167 |
+
},
|
168 |
+
"harness|mmlu_clinical_knowledge|0": {
|
169 |
+
"alias": " - clinical_knowledge",
|
170 |
+
"acc,none": 0.7132075471698113,
|
171 |
+
"acc_stderr,none": 0.02783491252754407
|
172 |
+
},
|
173 |
+
"harness|mmlu_college_medicine|0": {
|
174 |
+
"alias": " - college_medicine",
|
175 |
+
"acc,none": 0.6358381502890174,
|
176 |
+
"acc_stderr,none": 0.03669072477416907
|
177 |
+
},
|
178 |
+
"harness|mmlu_global_facts|0": {
|
179 |
+
"alias": " - global_facts",
|
180 |
+
"acc,none": 0.34,
|
181 |
+
"acc_stderr,none": 0.04760952285695236
|
182 |
+
},
|
183 |
+
"harness|mmlu_human_aging|0": {
|
184 |
+
"alias": " - human_aging",
|
185 |
+
"acc,none": 0.6816143497757847,
|
186 |
+
"acc_stderr,none": 0.03126580522513713
|
187 |
+
},
|
188 |
+
"harness|mmlu_management|0": {
|
189 |
+
"alias": " - management",
|
190 |
+
"acc,none": 0.8155339805825242,
|
191 |
+
"acc_stderr,none": 0.03840423627288276
|
192 |
+
},
|
193 |
+
"harness|mmlu_marketing|0": {
|
194 |
+
"alias": " - marketing",
|
195 |
+
"acc,none": 0.8846153846153846,
|
196 |
+
"acc_stderr,none": 0.020930193185179333
|
197 |
+
},
|
198 |
+
"harness|mmlu_medical_genetics|0": {
|
199 |
+
"alias": " - medical_genetics",
|
200 |
+
"acc,none": 0.71,
|
201 |
+
"acc_stderr,none": 0.04560480215720684
|
202 |
+
},
|
203 |
+
"harness|mmlu_miscellaneous|0": {
|
204 |
+
"alias": " - miscellaneous",
|
205 |
+
"acc,none": 0.80970625798212,
|
206 |
+
"acc_stderr,none": 0.01403694585038138
|
207 |
+
},
|
208 |
+
"harness|mmlu_nutrition|0": {
|
209 |
+
"alias": " - nutrition",
|
210 |
+
"acc,none": 0.7189542483660131,
|
211 |
+
"acc_stderr,none": 0.025738854797818716
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_accounting|0": {
|
214 |
+
"alias": " - professional_accounting",
|
215 |
+
"acc,none": 0.5141843971631206,
|
216 |
+
"acc_stderr,none": 0.02981549448368206
|
217 |
+
},
|
218 |
+
"harness|mmlu_professional_medicine|0": {
|
219 |
+
"alias": " - professional_medicine",
|
220 |
+
"acc,none": 0.7058823529411765,
|
221 |
+
"acc_stderr,none": 0.027678468642144703
|
222 |
+
},
|
223 |
+
"harness|mmlu_virology|0": {
|
224 |
+
"alias": " - virology",
|
225 |
+
"acc,none": 0.536144578313253,
|
226 |
+
"acc_stderr,none": 0.03882310850890594
|
227 |
+
},
|
228 |
+
"harness|mmlu_social_sciences|0": {
|
229 |
+
"alias": " - social_sciences",
|
230 |
+
"acc,none": 0.7299317517062074,
|
231 |
+
"acc_stderr,none": 0.007800110585101146
|
232 |
+
},
|
233 |
+
"harness|mmlu_econometrics|0": {
|
234 |
+
"alias": " - econometrics",
|
235 |
+
"acc,none": 0.43859649122807015,
|
236 |
+
"acc_stderr,none": 0.04668000738510455
|
237 |
+
},
|
238 |
+
"harness|mmlu_high_school_geography|0": {
|
239 |
+
"alias": " - high_school_geography",
|
240 |
+
"acc,none": 0.8333333333333334,
|
241 |
+
"acc_stderr,none": 0.026552207828215296
|
242 |
+
},
|
243 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
244 |
+
"alias": " - high_school_government_and_politics",
|
245 |
+
"acc,none": 0.8860103626943006,
|
246 |
+
"acc_stderr,none": 0.022935144053919432
|
247 |
+
},
|
248 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
249 |
+
"alias": " - high_school_macroeconomics",
|
250 |
+
"acc,none": 0.658974358974359,
|
251 |
+
"acc_stderr,none": 0.02403548967633508
|
252 |
+
},
|
253 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
254 |
+
"alias": " - high_school_microeconomics",
|
255 |
+
"acc,none": 0.680672268907563,
|
256 |
+
"acc_stderr,none": 0.030283995525884403
|
257 |
+
},
|
258 |
+
"harness|mmlu_high_school_psychology|0": {
|
259 |
+
"alias": " - high_school_psychology",
|
260 |
+
"acc,none": 0.8256880733944955,
|
261 |
+
"acc_stderr,none": 0.016265675632010347
|
262 |
+
},
|
263 |
+
"harness|mmlu_human_sexuality|0": {
|
264 |
+
"alias": " - human_sexuality",
|
265 |
+
"acc,none": 0.732824427480916,
|
266 |
+
"acc_stderr,none": 0.038808483010823944
|
267 |
+
},
|
268 |
+
"harness|mmlu_professional_psychology|0": {
|
269 |
+
"alias": " - professional_psychology",
|
270 |
+
"acc,none": 0.6470588235294118,
|
271 |
+
"acc_stderr,none": 0.019333142020797164
|
272 |
+
},
|
273 |
+
"harness|mmlu_public_relations|0": {
|
274 |
+
"alias": " - public_relations",
|
275 |
+
"acc,none": 0.6636363636363637,
|
276 |
+
"acc_stderr,none": 0.04525393596302506
|
277 |
+
},
|
278 |
+
"harness|mmlu_security_studies|0": {
|
279 |
+
"alias": " - security_studies",
|
280 |
+
"acc,none": 0.7061224489795919,
|
281 |
+
"acc_stderr,none": 0.029162738410249765
|
282 |
+
},
|
283 |
+
"harness|mmlu_sociology|0": {
|
284 |
+
"alias": " - sociology",
|
285 |
+
"acc,none": 0.8208955223880597,
|
286 |
+
"acc_stderr,none": 0.027113286753111837
|
287 |
+
},
|
288 |
+
"harness|mmlu_us_foreign_policy|0": {
|
289 |
+
"alias": " - us_foreign_policy",
|
290 |
+
"acc,none": 0.88,
|
291 |
+
"acc_stderr,none": 0.03265986323710905
|
292 |
+
},
|
293 |
+
"harness|mmlu_stem|0": {
|
294 |
+
"alias": " - stem",
|
295 |
+
"acc,none": 0.5239454487789407,
|
296 |
+
"acc_stderr,none": 0.008564264452351101
|
297 |
+
},
|
298 |
+
"harness|mmlu_abstract_algebra|0": {
|
299 |
+
"alias": " - abstract_algebra",
|
300 |
+
"acc,none": 0.35,
|
301 |
+
"acc_stderr,none": 0.0479372485441102
|
302 |
+
},
|
303 |
+
"harness|mmlu_anatomy|0": {
|
304 |
+
"alias": " - anatomy",
|
305 |
+
"acc,none": 0.6,
|
306 |
+
"acc_stderr,none": 0.04232073695151589
|
307 |
+
},
|
308 |
+
"harness|mmlu_astronomy|0": {
|
309 |
+
"alias": " - astronomy",
|
310 |
+
"acc,none": 0.7236842105263158,
|
311 |
+
"acc_stderr,none": 0.03639057569952929
|
312 |
+
},
|
313 |
+
"harness|mmlu_college_biology|0": {
|
314 |
+
"alias": " - college_biology",
|
315 |
+
"acc,none": 0.7083333333333334,
|
316 |
+
"acc_stderr,none": 0.03800968060554858
|
317 |
+
},
|
318 |
+
"harness|mmlu_college_chemistry|0": {
|
319 |
+
"alias": " - college_chemistry",
|
320 |
+
"acc,none": 0.4,
|
321 |
+
"acc_stderr,none": 0.04923659639173309
|
322 |
+
},
|
323 |
+
"harness|mmlu_college_computer_science|0": {
|
324 |
+
"alias": " - college_computer_science",
|
325 |
+
"acc,none": 0.45,
|
326 |
+
"acc_stderr,none": 0.05000000000000001
|
327 |
+
},
|
328 |
+
"harness|mmlu_college_mathematics|0": {
|
329 |
+
"alias": " - college_mathematics",
|
330 |
+
"acc,none": 0.36,
|
331 |
+
"acc_stderr,none": 0.04824181513244218
|
332 |
+
},
|
333 |
+
"harness|mmlu_college_physics|0": {
|
334 |
+
"alias": " - college_physics",
|
335 |
+
"acc,none": 0.3627450980392157,
|
336 |
+
"acc_stderr,none": 0.04784060704105655
|
337 |
+
},
|
338 |
+
"harness|mmlu_computer_security|0": {
|
339 |
+
"alias": " - computer_security",
|
340 |
+
"acc,none": 0.71,
|
341 |
+
"acc_stderr,none": 0.045604802157206845
|
342 |
+
},
|
343 |
+
"harness|mmlu_conceptual_physics|0": {
|
344 |
+
"alias": " - conceptual_physics",
|
345 |
+
"acc,none": 0.5702127659574469,
|
346 |
+
"acc_stderr,none": 0.03236214467715563
|
347 |
+
},
|
348 |
+
"harness|mmlu_electrical_engineering|0": {
|
349 |
+
"alias": " - electrical_engineering",
|
350 |
+
"acc,none": 0.5586206896551724,
|
351 |
+
"acc_stderr,none": 0.04137931034482757
|
352 |
+
},
|
353 |
+
"harness|mmlu_elementary_mathematics|0": {
|
354 |
+
"alias": " - elementary_mathematics",
|
355 |
+
"acc,none": 0.47619047619047616,
|
356 |
+
"acc_stderr,none": 0.025722097064388525
|
357 |
+
},
|
358 |
+
"harness|mmlu_high_school_biology|0": {
|
359 |
+
"alias": " - high_school_biology",
|
360 |
+
"acc,none": 0.7580645161290323,
|
361 |
+
"acc_stderr,none": 0.024362599693031114
|
362 |
+
},
|
363 |
+
"harness|mmlu_high_school_chemistry|0": {
|
364 |
+
"alias": " - high_school_chemistry",
|
365 |
+
"acc,none": 0.47783251231527096,
|
366 |
+
"acc_stderr,none": 0.03514528562175007
|
367 |
+
},
|
368 |
+
"harness|mmlu_high_school_computer_science|0": {
|
369 |
+
"alias": " - high_school_computer_science",
|
370 |
+
"acc,none": 0.64,
|
371 |
+
"acc_stderr,none": 0.04824181513244218
|
372 |
+
},
|
373 |
+
"harness|mmlu_high_school_mathematics|0": {
|
374 |
+
"alias": " - high_school_mathematics",
|
375 |
+
"acc,none": 0.34814814814814815,
|
376 |
+
"acc_stderr,none": 0.02904560029061625
|
377 |
+
},
|
378 |
+
"harness|mmlu_high_school_physics|0": {
|
379 |
+
"alias": " - high_school_physics",
|
380 |
+
"acc,none": 0.31125827814569534,
|
381 |
+
"acc_stderr,none": 0.03780445850526733
|
382 |
+
},
|
383 |
+
"harness|mmlu_high_school_statistics|0": {
|
384 |
+
"alias": " - high_school_statistics",
|
385 |
+
"acc,none": 0.5185185185185185,
|
386 |
+
"acc_stderr,none": 0.03407632093854052
|
387 |
+
},
|
388 |
+
"harness|mmlu_machine_learning|0": {
|
389 |
+
"alias": " - machine_learning",
|
390 |
+
"acc,none": 0.45535714285714285,
|
391 |
+
"acc_stderr,none": 0.04726835553719099
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 10.7,
|
400 |
+
"architectures": "LlamaForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 10.7,
|
404 |
+
"model_size": 5.6,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 200,
|
425 |
+
"lr": 0.005,
|
426 |
+
"minmax_lr": 0.002,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": false
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|winogrande|0": 1.0,
|
438 |
+
"harness|lambada:openai|0": 1.0,
|
439 |
+
"harness|openbookqa|0": 1.0,
|
440 |
+
"harness|arc:easy|0": 1.0,
|
441 |
+
"harness|arc:challenge|0": 1.0,
|
442 |
+
"harness|piqa|0": 1.0,
|
443 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
444 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
445 |
+
"harness|hellaswag|0": 1.0,
|
446 |
+
"harness|boolq|0": 2.0,
|
447 |
+
"harness|mmlu|0": null,
|
448 |
+
"harness|mmlu_humanities|0": null,
|
449 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
451 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
453 |
+
"harness|mmlu_international_law|0": 0.0,
|
454 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
455 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
456 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
457 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
458 |
+
"harness|mmlu_philosophy|0": 0.0,
|
459 |
+
"harness|mmlu_prehistory|0": 0.0,
|
460 |
+
"harness|mmlu_professional_law|0": 0.0,
|
461 |
+
"harness|mmlu_world_religions|0": 0.0,
|
462 |
+
"harness|mmlu_other|0": null,
|
463 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
464 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
465 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
466 |
+
"harness|mmlu_global_facts|0": 0.0,
|
467 |
+
"harness|mmlu_human_aging|0": 0.0,
|
468 |
+
"harness|mmlu_management|0": 0.0,
|
469 |
+
"harness|mmlu_marketing|0": 0.0,
|
470 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
471 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
472 |
+
"harness|mmlu_nutrition|0": 0.0,
|
473 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
474 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
475 |
+
"harness|mmlu_virology|0": 0.0,
|
476 |
+
"harness|mmlu_social_sciences|0": null,
|
477 |
+
"harness|mmlu_econometrics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
483 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
484 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
485 |
+
"harness|mmlu_public_relations|0": 0.0,
|
486 |
+
"harness|mmlu_security_studies|0": 0.0,
|
487 |
+
"harness|mmlu_sociology|0": 0.0,
|
488 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
489 |
+
"harness|mmlu_stem|0": null,
|
490 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
491 |
+
"harness|mmlu_anatomy|0": 0.0,
|
492 |
+
"harness|mmlu_astronomy|0": 0.0,
|
493 |
+
"harness|mmlu_college_biology|0": 0.0,
|
494 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
495 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
496 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_college_physics|0": 0.0,
|
498 |
+
"harness|mmlu_computer_security|0": 0.0,
|
499 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
500 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
501 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
503 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
504 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
505 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
506 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
507 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
508 |
+
"harness|mmlu_machine_learning|0": 0.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714501508.9874723,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-05-01-05-22-27.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-01-05-22-27",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/Phi-3-mini-4k-instruct-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 2.28,
|
16 |
+
"model_params": 3.66,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|piqa|0": {
|
22 |
+
"acc,none": 0.7932535364526659,
|
23 |
+
"acc_stderr,none": 0.009448665514183262,
|
24 |
+
"acc_norm,none": 0.795429815016322,
|
25 |
+
"acc_norm_stderr,none": 0.009411688039193606,
|
26 |
+
"alias": "piqa"
|
27 |
+
},
|
28 |
+
"harness|hellaswag|0": {
|
29 |
+
"acc,none": 0.5945030870344553,
|
30 |
+
"acc_stderr,none": 0.004899845087183108,
|
31 |
+
"acc_norm,none": 0.7794264090818562,
|
32 |
+
"acc_norm_stderr,none": 0.00413786037078583,
|
33 |
+
"alias": "hellaswag"
|
34 |
+
},
|
35 |
+
"harness|arc:easy|0": {
|
36 |
+
"acc,none": 0.8333333333333334,
|
37 |
+
"acc_stderr,none": 0.007647191129018641,
|
38 |
+
"acc_norm,none": 0.8085016835016835,
|
39 |
+
"acc_norm_stderr,none": 0.008074044477319723,
|
40 |
+
"alias": "arc_easy"
|
41 |
+
},
|
42 |
+
"harness|boolq|0": {
|
43 |
+
"acc,none": 0.8617737003058104,
|
44 |
+
"acc_stderr,none": 0.006036490185165252,
|
45 |
+
"alias": "boolq"
|
46 |
+
},
|
47 |
+
"harness|arc:challenge|0": {
|
48 |
+
"acc,none": 0.5708191126279863,
|
49 |
+
"acc_stderr,none": 0.014464085894870653,
|
50 |
+
"acc_norm,none": 0.5827645051194539,
|
51 |
+
"acc_norm_stderr,none": 0.014409825518403079,
|
52 |
+
"alias": "arc_challenge"
|
53 |
+
},
|
54 |
+
"harness|mmlu|0": {
|
55 |
+
"acc,none": 0.666215638797892,
|
56 |
+
"acc_stderr,none": 0.003791130008412831,
|
57 |
+
"alias": "mmlu"
|
58 |
+
},
|
59 |
+
"harness|mmlu_humanities|0": {
|
60 |
+
"alias": " - humanities",
|
61 |
+
"acc,none": 0.6374070138150904,
|
62 |
+
"acc_stderr,none": 0.006758438130469111
|
63 |
+
},
|
64 |
+
"harness|mmlu_formal_logic|0": {
|
65 |
+
"alias": " - formal_logic",
|
66 |
+
"acc,none": 0.5317460317460317,
|
67 |
+
"acc_stderr,none": 0.04463112720677173
|
68 |
+
},
|
69 |
+
"harness|mmlu_high_school_european_history|0": {
|
70 |
+
"alias": " - high_school_european_history",
|
71 |
+
"acc,none": 0.8121212121212121,
|
72 |
+
"acc_stderr,none": 0.03050193405942914
|
73 |
+
},
|
74 |
+
"harness|mmlu_high_school_us_history|0": {
|
75 |
+
"alias": " - high_school_us_history",
|
76 |
+
"acc,none": 0.803921568627451,
|
77 |
+
"acc_stderr,none": 0.027865942286639325
|
78 |
+
},
|
79 |
+
"harness|mmlu_high_school_world_history|0": {
|
80 |
+
"alias": " - high_school_world_history",
|
81 |
+
"acc,none": 0.8185654008438819,
|
82 |
+
"acc_stderr,none": 0.02508596114457965
|
83 |
+
},
|
84 |
+
"harness|mmlu_international_law|0": {
|
85 |
+
"alias": " - international_law",
|
86 |
+
"acc,none": 0.8347107438016529,
|
87 |
+
"acc_stderr,none": 0.03390780612972776
|
88 |
+
},
|
89 |
+
"harness|mmlu_jurisprudence|0": {
|
90 |
+
"alias": " - jurisprudence",
|
91 |
+
"acc,none": 0.7407407407407407,
|
92 |
+
"acc_stderr,none": 0.042365112580946315
|
93 |
+
},
|
94 |
+
"harness|mmlu_logical_fallacies|0": {
|
95 |
+
"alias": " - logical_fallacies",
|
96 |
+
"acc,none": 0.8159509202453987,
|
97 |
+
"acc_stderr,none": 0.030446777687971726
|
98 |
+
},
|
99 |
+
"harness|mmlu_moral_disputes|0": {
|
100 |
+
"alias": " - moral_disputes",
|
101 |
+
"acc,none": 0.7225433526011561,
|
102 |
+
"acc_stderr,none": 0.024105712607754307
|
103 |
+
},
|
104 |
+
"harness|mmlu_moral_scenarios|0": {
|
105 |
+
"alias": " - moral_scenarios",
|
106 |
+
"acc,none": 0.5519553072625698,
|
107 |
+
"acc_stderr,none": 0.016631976628930595
|
108 |
+
},
|
109 |
+
"harness|mmlu_philosophy|0": {
|
110 |
+
"alias": " - philosophy",
|
111 |
+
"acc,none": 0.7009646302250804,
|
112 |
+
"acc_stderr,none": 0.026003301117885135
|
113 |
+
},
|
114 |
+
"harness|mmlu_prehistory|0": {
|
115 |
+
"alias": " - prehistory",
|
116 |
+
"acc,none": 0.7746913580246914,
|
117 |
+
"acc_stderr,none": 0.02324620264781975
|
118 |
+
},
|
119 |
+
"harness|mmlu_professional_law|0": {
|
120 |
+
"alias": " - professional_law",
|
121 |
+
"acc,none": 0.5052151238591917,
|
122 |
+
"acc_stderr,none": 0.01276954144965255
|
123 |
+
},
|
124 |
+
"harness|mmlu_world_religions|0": {
|
125 |
+
"alias": " - world_religions",
|
126 |
+
"acc,none": 0.8070175438596491,
|
127 |
+
"acc_stderr,none": 0.030267457554898465
|
128 |
+
},
|
129 |
+
"harness|mmlu_other|0": {
|
130 |
+
"alias": " - other",
|
131 |
+
"acc,none": 0.7022851625362085,
|
132 |
+
"acc_stderr,none": 0.0078979256605874
|
133 |
+
},
|
134 |
+
"harness|mmlu_business_ethics|0": {
|
135 |
+
"alias": " - business_ethics",
|
136 |
+
"acc,none": 0.68,
|
137 |
+
"acc_stderr,none": 0.04688261722621504
|
138 |
+
},
|
139 |
+
"harness|mmlu_clinical_knowledge|0": {
|
140 |
+
"alias": " - clinical_knowledge",
|
141 |
+
"acc,none": 0.7584905660377359,
|
142 |
+
"acc_stderr,none": 0.026341480371118352
|
143 |
+
},
|
144 |
+
"harness|mmlu_college_medicine|0": {
|
145 |
+
"alias": " - college_medicine",
|
146 |
+
"acc,none": 0.6416184971098265,
|
147 |
+
"acc_stderr,none": 0.03656343653353159
|
148 |
+
},
|
149 |
+
"harness|mmlu_global_facts|0": {
|
150 |
+
"alias": " - global_facts",
|
151 |
+
"acc,none": 0.36,
|
152 |
+
"acc_stderr,none": 0.048241815132442176
|
153 |
+
},
|
154 |
+
"harness|mmlu_human_aging|0": {
|
155 |
+
"alias": " - human_aging",
|
156 |
+
"acc,none": 0.672645739910314,
|
157 |
+
"acc_stderr,none": 0.03149384670994131
|
158 |
+
},
|
159 |
+
"harness|mmlu_management|0": {
|
160 |
+
"alias": " - management",
|
161 |
+
"acc,none": 0.8155339805825242,
|
162 |
+
"acc_stderr,none": 0.03840423627288276
|
163 |
+
},
|
164 |
+
"harness|mmlu_marketing|0": {
|
165 |
+
"alias": " - marketing",
|
166 |
+
"acc,none": 0.8846153846153846,
|
167 |
+
"acc_stderr,none": 0.02093019318517933
|
168 |
+
},
|
169 |
+
"harness|mmlu_medical_genetics|0": {
|
170 |
+
"alias": " - medical_genetics",
|
171 |
+
"acc,none": 0.71,
|
172 |
+
"acc_stderr,none": 0.04560480215720683
|
173 |
+
},
|
174 |
+
"harness|mmlu_miscellaneous|0": {
|
175 |
+
"alias": " - miscellaneous",
|
176 |
+
"acc,none": 0.8071519795657727,
|
177 |
+
"acc_stderr,none": 0.014108533515757431
|
178 |
+
},
|
179 |
+
"harness|mmlu_nutrition|0": {
|
180 |
+
"alias": " - nutrition",
|
181 |
+
"acc,none": 0.6895424836601307,
|
182 |
+
"acc_stderr,none": 0.0264930332251459
|
183 |
+
},
|
184 |
+
"harness|mmlu_professional_accounting|0": {
|
185 |
+
"alias": " - professional_accounting",
|
186 |
+
"acc,none": 0.5,
|
187 |
+
"acc_stderr,none": 0.029827499313594685
|
188 |
+
},
|
189 |
+
"harness|mmlu_professional_medicine|0": {
|
190 |
+
"alias": " - professional_medicine",
|
191 |
+
"acc,none": 0.6911764705882353,
|
192 |
+
"acc_stderr,none": 0.028064998167040094
|
193 |
+
},
|
194 |
+
"harness|mmlu_virology|0": {
|
195 |
+
"alias": " - virology",
|
196 |
+
"acc,none": 0.4939759036144578,
|
197 |
+
"acc_stderr,none": 0.03892212195333047
|
198 |
+
},
|
199 |
+
"harness|mmlu_social_sciences|0": {
|
200 |
+
"alias": " - social_sciences",
|
201 |
+
"acc,none": 0.7741306467338317,
|
202 |
+
"acc_stderr,none": 0.007389924316755524
|
203 |
+
},
|
204 |
+
"harness|mmlu_econometrics|0": {
|
205 |
+
"alias": " - econometrics",
|
206 |
+
"acc,none": 0.5614035087719298,
|
207 |
+
"acc_stderr,none": 0.04668000738510455
|
208 |
+
},
|
209 |
+
"harness|mmlu_high_school_geography|0": {
|
210 |
+
"alias": " - high_school_geography",
|
211 |
+
"acc,none": 0.8383838383838383,
|
212 |
+
"acc_stderr,none": 0.026225919863629293
|
213 |
+
},
|
214 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
215 |
+
"alias": " - high_school_government_and_politics",
|
216 |
+
"acc,none": 0.8808290155440415,
|
217 |
+
"acc_stderr,none": 0.02338193534812144
|
218 |
+
},
|
219 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
220 |
+
"alias": " - high_school_macroeconomics",
|
221 |
+
"acc,none": 0.7,
|
222 |
+
"acc_stderr,none": 0.02323458108842849
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
225 |
+
"alias": " - high_school_microeconomics",
|
226 |
+
"acc,none": 0.7941176470588235,
|
227 |
+
"acc_stderr,none": 0.026265024608275882
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_psychology|0": {
|
230 |
+
"alias": " - high_school_psychology",
|
231 |
+
"acc,none": 0.8807339449541285,
|
232 |
+
"acc_stderr,none": 0.01389572929258898
|
233 |
+
},
|
234 |
+
"harness|mmlu_human_sexuality|0": {
|
235 |
+
"alias": " - human_sexuality",
|
236 |
+
"acc,none": 0.732824427480916,
|
237 |
+
"acc_stderr,none": 0.03880848301082396
|
238 |
+
},
|
239 |
+
"harness|mmlu_professional_psychology|0": {
|
240 |
+
"alias": " - professional_psychology",
|
241 |
+
"acc,none": 0.704248366013072,
|
242 |
+
"acc_stderr,none": 0.018463154132632813
|
243 |
+
},
|
244 |
+
"harness|mmlu_public_relations|0": {
|
245 |
+
"alias": " - public_relations",
|
246 |
+
"acc,none": 0.6454545454545455,
|
247 |
+
"acc_stderr,none": 0.04582004841505417
|
248 |
+
},
|
249 |
+
"harness|mmlu_security_studies|0": {
|
250 |
+
"alias": " - security_studies",
|
251 |
+
"acc,none": 0.7714285714285715,
|
252 |
+
"acc_stderr,none": 0.026882144922307744
|
253 |
+
},
|
254 |
+
"harness|mmlu_sociology|0": {
|
255 |
+
"alias": " - sociology",
|
256 |
+
"acc,none": 0.845771144278607,
|
257 |
+
"acc_stderr,none": 0.02553843336857833
|
258 |
+
},
|
259 |
+
"harness|mmlu_us_foreign_policy|0": {
|
260 |
+
"alias": " - us_foreign_policy",
|
261 |
+
"acc,none": 0.83,
|
262 |
+
"acc_stderr,none": 0.03775251680686371
|
263 |
+
},
|
264 |
+
"harness|mmlu_stem|0": {
|
265 |
+
"alias": " - stem",
|
266 |
+
"acc,none": 0.5683476054551221,
|
267 |
+
"acc_stderr,none": 0.008412846957194594
|
268 |
+
},
|
269 |
+
"harness|mmlu_abstract_algebra|0": {
|
270 |
+
"alias": " - abstract_algebra",
|
271 |
+
"acc,none": 0.37,
|
272 |
+
"acc_stderr,none": 0.04852365870939099
|
273 |
+
},
|
274 |
+
"harness|mmlu_anatomy|0": {
|
275 |
+
"alias": " - anatomy",
|
276 |
+
"acc,none": 0.6518518518518519,
|
277 |
+
"acc_stderr,none": 0.041153246103369526
|
278 |
+
},
|
279 |
+
"harness|mmlu_astronomy|0": {
|
280 |
+
"alias": " - astronomy",
|
281 |
+
"acc,none": 0.75,
|
282 |
+
"acc_stderr,none": 0.03523807393012047
|
283 |
+
},
|
284 |
+
"harness|mmlu_college_biology|0": {
|
285 |
+
"alias": " - college_biology",
|
286 |
+
"acc,none": 0.7777777777777778,
|
287 |
+
"acc_stderr,none": 0.03476590104304134
|
288 |
+
},
|
289 |
+
"harness|mmlu_college_chemistry|0": {
|
290 |
+
"alias": " - college_chemistry",
|
291 |
+
"acc,none": 0.46,
|
292 |
+
"acc_stderr,none": 0.05009082659620332
|
293 |
+
},
|
294 |
+
"harness|mmlu_college_computer_science|0": {
|
295 |
+
"alias": " - college_computer_science",
|
296 |
+
"acc,none": 0.46,
|
297 |
+
"acc_stderr,none": 0.05009082659620333
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_mathematics|0": {
|
300 |
+
"alias": " - college_mathematics",
|
301 |
+
"acc,none": 0.31,
|
302 |
+
"acc_stderr,none": 0.04648231987117317
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_physics|0": {
|
305 |
+
"alias": " - college_physics",
|
306 |
+
"acc,none": 0.39215686274509803,
|
307 |
+
"acc_stderr,none": 0.04858083574266345
|
308 |
+
},
|
309 |
+
"harness|mmlu_computer_security|0": {
|
310 |
+
"alias": " - computer_security",
|
311 |
+
"acc,none": 0.78,
|
312 |
+
"acc_stderr,none": 0.041633319989322605
|
313 |
+
},
|
314 |
+
"harness|mmlu_conceptual_physics|0": {
|
315 |
+
"alias": " - conceptual_physics",
|
316 |
+
"acc,none": 0.6468085106382979,
|
317 |
+
"acc_stderr,none": 0.031245325202761923
|
318 |
+
},
|
319 |
+
"harness|mmlu_electrical_engineering|0": {
|
320 |
+
"alias": " - electrical_engineering",
|
321 |
+
"acc,none": 0.6068965517241379,
|
322 |
+
"acc_stderr,none": 0.040703290137070705
|
323 |
+
},
|
324 |
+
"harness|mmlu_elementary_mathematics|0": {
|
325 |
+
"alias": " - elementary_mathematics",
|
326 |
+
"acc,none": 0.5158730158730159,
|
327 |
+
"acc_stderr,none": 0.025738330639412152
|
328 |
+
},
|
329 |
+
"harness|mmlu_high_school_biology|0": {
|
330 |
+
"alias": " - high_school_biology",
|
331 |
+
"acc,none": 0.8096774193548387,
|
332 |
+
"acc_stderr,none": 0.02233170761182307
|
333 |
+
},
|
334 |
+
"harness|mmlu_high_school_chemistry|0": {
|
335 |
+
"alias": " - high_school_chemistry",
|
336 |
+
"acc,none": 0.5665024630541872,
|
337 |
+
"acc_stderr,none": 0.03486731727419873
|
338 |
+
},
|
339 |
+
"harness|mmlu_high_school_computer_science|0": {
|
340 |
+
"alias": " - high_school_computer_science",
|
341 |
+
"acc,none": 0.67,
|
342 |
+
"acc_stderr,none": 0.04725815626252607
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_mathematics|0": {
|
345 |
+
"alias": " - high_school_mathematics",
|
346 |
+
"acc,none": 0.32222222222222224,
|
347 |
+
"acc_stderr,none": 0.028493465091028597
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_physics|0": {
|
350 |
+
"alias": " - high_school_physics",
|
351 |
+
"acc,none": 0.4370860927152318,
|
352 |
+
"acc_stderr,none": 0.040500357222306355
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_statistics|0": {
|
355 |
+
"alias": " - high_school_statistics",
|
356 |
+
"acc,none": 0.5694444444444444,
|
357 |
+
"acc_stderr,none": 0.03376922151252336
|
358 |
+
},
|
359 |
+
"harness|mmlu_machine_learning|0": {
|
360 |
+
"alias": " - machine_learning",
|
361 |
+
"acc,none": 0.5,
|
362 |
+
"acc_stderr,none": 0.04745789978762494
|
363 |
+
},
|
364 |
+
"harness|truthfulqa:mc2|0": {
|
365 |
+
"acc,none": 0.57314569155882,
|
366 |
+
"acc_stderr,none": 0.015370888671817379,
|
367 |
+
"alias": "truthfulqa_mc2"
|
368 |
+
},
|
369 |
+
"harness|lambada:openai|0": {
|
370 |
+
"perplexity,none": 4.232981066177356,
|
371 |
+
"perplexity_stderr,none": 0.1029519445833899,
|
372 |
+
"acc,none": 0.6813506695129051,
|
373 |
+
"acc_stderr,none": 0.006491632434663263,
|
374 |
+
"alias": "lambada_openai"
|
375 |
+
},
|
376 |
+
"harness|winogrande|0": {
|
377 |
+
"acc,none": 0.7348066298342542,
|
378 |
+
"acc_stderr,none": 0.01240654946619286,
|
379 |
+
"alias": "winogrande"
|
380 |
+
},
|
381 |
+
"harness|openbookqa|0": {
|
382 |
+
"acc,none": 0.386,
|
383 |
+
"acc_stderr,none": 0.02179352921928116,
|
384 |
+
"acc_norm,none": 0.468,
|
385 |
+
"acc_norm_stderr,none": 0.0223371864790443,
|
386 |
+
"alias": "openbookqa"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc1|0": {
|
389 |
+
"acc,none": 0.386780905752754,
|
390 |
+
"acc_stderr,none": 0.017048857010515107,
|
391 |
+
"alias": "truthfulqa_mc1"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/Phi-3-mini-4k-instruct-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 1,
|
400 |
+
"architectures": "Phi3ForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 1,
|
404 |
+
"model_size": 2.2,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.001,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float32",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": false
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|piqa|0": 1.0,
|
438 |
+
"harness|hellaswag|0": 1.0,
|
439 |
+
"harness|arc:easy|0": 1.0,
|
440 |
+
"harness|boolq|0": 2.0,
|
441 |
+
"harness|arc:challenge|0": 1.0,
|
442 |
+
"harness|mmlu|0": null,
|
443 |
+
"harness|mmlu_humanities|0": null,
|
444 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
445 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
446 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
447 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
448 |
+
"harness|mmlu_international_law|0": 0.0,
|
449 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
450 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
451 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
452 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
453 |
+
"harness|mmlu_philosophy|0": 0.0,
|
454 |
+
"harness|mmlu_prehistory|0": 0.0,
|
455 |
+
"harness|mmlu_professional_law|0": 0.0,
|
456 |
+
"harness|mmlu_world_religions|0": 0.0,
|
457 |
+
"harness|mmlu_other|0": null,
|
458 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
459 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
460 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
461 |
+
"harness|mmlu_global_facts|0": 0.0,
|
462 |
+
"harness|mmlu_human_aging|0": 0.0,
|
463 |
+
"harness|mmlu_management|0": 0.0,
|
464 |
+
"harness|mmlu_marketing|0": 0.0,
|
465 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
466 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
467 |
+
"harness|mmlu_nutrition|0": 0.0,
|
468 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
469 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
470 |
+
"harness|mmlu_virology|0": 0.0,
|
471 |
+
"harness|mmlu_social_sciences|0": null,
|
472 |
+
"harness|mmlu_econometrics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
478 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
479 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
480 |
+
"harness|mmlu_public_relations|0": 0.0,
|
481 |
+
"harness|mmlu_security_studies|0": 0.0,
|
482 |
+
"harness|mmlu_sociology|0": 0.0,
|
483 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
484 |
+
"harness|mmlu_stem|0": null,
|
485 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
486 |
+
"harness|mmlu_anatomy|0": 0.0,
|
487 |
+
"harness|mmlu_astronomy|0": 0.0,
|
488 |
+
"harness|mmlu_college_biology|0": 0.0,
|
489 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
490 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
491 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
492 |
+
"harness|mmlu_college_physics|0": 0.0,
|
493 |
+
"harness|mmlu_computer_security|0": 0.0,
|
494 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
495 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
496 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
503 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
504 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
505 |
+
"harness|lambada:openai|0": 1.0,
|
506 |
+
"harness|winogrande|0": 1.0,
|
507 |
+
"harness|openbookqa|0": 1.0,
|
508 |
+
"harness|truthfulqa:mc1|0": 2.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714509384.8957896,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/Phi-3-mini-4k-instruct-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 4,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-05-06-18-48-05.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-06-18-48-05",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/Baichuan2-7B-Chat-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.43,
|
16 |
+
"model_params": 6.53,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc2|0": {
|
22 |
+
"acc,none": 0.47472158177098,
|
23 |
+
"acc_stderr,none": 0.015422315616226206,
|
24 |
+
"alias": "truthfulqa_mc2"
|
25 |
+
},
|
26 |
+
"harness|arc:challenge|0": {
|
27 |
+
"acc,none": 0.4104095563139932,
|
28 |
+
"acc_stderr,none": 0.014374922192642662,
|
29 |
+
"acc_norm,none": 0.42406143344709896,
|
30 |
+
"acc_norm_stderr,none": 0.014441889627464396,
|
31 |
+
"alias": "arc_challenge"
|
32 |
+
},
|
33 |
+
"harness|lambada:openai|0": {
|
34 |
+
"perplexity,none": 3.971916376229947,
|
35 |
+
"perplexity_stderr,none": 0.1125823000420938,
|
36 |
+
"acc,none": 0.6751406947409276,
|
37 |
+
"acc_stderr,none": 0.006524644766835827,
|
38 |
+
"alias": "lambada_openai"
|
39 |
+
},
|
40 |
+
"harness|truthfulqa:mc1|0": {
|
41 |
+
"acc,none": 0.31946144430844553,
|
42 |
+
"acc_stderr,none": 0.016322644182960505,
|
43 |
+
"alias": "truthfulqa_mc1"
|
44 |
+
},
|
45 |
+
"harness|boolq|0": {
|
46 |
+
"acc,none": 0.7825688073394496,
|
47 |
+
"acc_stderr,none": 0.007214641080602786,
|
48 |
+
"alias": "boolq"
|
49 |
+
},
|
50 |
+
"harness|mmlu|0": {
|
51 |
+
"acc,none": 0.5061956986184304,
|
52 |
+
"acc_stderr,none": 0.004020003727804312,
|
53 |
+
"alias": "mmlu"
|
54 |
+
},
|
55 |
+
"harness|mmlu_humanities|0": {
|
56 |
+
"alias": " - humanities",
|
57 |
+
"acc,none": 0.45993623804463335,
|
58 |
+
"acc_stderr,none": 0.006890141515530341
|
59 |
+
},
|
60 |
+
"harness|mmlu_formal_logic|0": {
|
61 |
+
"alias": " - formal_logic",
|
62 |
+
"acc,none": 0.3253968253968254,
|
63 |
+
"acc_stderr,none": 0.041905964388711366
|
64 |
+
},
|
65 |
+
"harness|mmlu_high_school_european_history|0": {
|
66 |
+
"alias": " - high_school_european_history",
|
67 |
+
"acc,none": 0.6727272727272727,
|
68 |
+
"acc_stderr,none": 0.03663974994391242
|
69 |
+
},
|
70 |
+
"harness|mmlu_high_school_us_history|0": {
|
71 |
+
"alias": " - high_school_us_history",
|
72 |
+
"acc,none": 0.696078431372549,
|
73 |
+
"acc_stderr,none": 0.03228210387037894
|
74 |
+
},
|
75 |
+
"harness|mmlu_high_school_world_history|0": {
|
76 |
+
"alias": " - high_school_world_history",
|
77 |
+
"acc,none": 0.7088607594936709,
|
78 |
+
"acc_stderr,none": 0.029571601065753374
|
79 |
+
},
|
80 |
+
"harness|mmlu_international_law|0": {
|
81 |
+
"alias": " - international_law",
|
82 |
+
"acc,none": 0.6198347107438017,
|
83 |
+
"acc_stderr,none": 0.04431324501968432
|
84 |
+
},
|
85 |
+
"harness|mmlu_jurisprudence|0": {
|
86 |
+
"alias": " - jurisprudence",
|
87 |
+
"acc,none": 0.6296296296296297,
|
88 |
+
"acc_stderr,none": 0.04668408033024931
|
89 |
+
},
|
90 |
+
"harness|mmlu_logical_fallacies|0": {
|
91 |
+
"alias": " - logical_fallacies",
|
92 |
+
"acc,none": 0.588957055214724,
|
93 |
+
"acc_stderr,none": 0.038656978537853624
|
94 |
+
},
|
95 |
+
"harness|mmlu_moral_disputes|0": {
|
96 |
+
"alias": " - moral_disputes",
|
97 |
+
"acc,none": 0.5028901734104047,
|
98 |
+
"acc_stderr,none": 0.02691864538323901
|
99 |
+
},
|
100 |
+
"harness|mmlu_moral_scenarios|0": {
|
101 |
+
"alias": " - moral_scenarios",
|
102 |
+
"acc,none": 0.2446927374301676,
|
103 |
+
"acc_stderr,none": 0.014378169884098423
|
104 |
+
},
|
105 |
+
"harness|mmlu_philosophy|0": {
|
106 |
+
"alias": " - philosophy",
|
107 |
+
"acc,none": 0.6012861736334405,
|
108 |
+
"acc_stderr,none": 0.0278093225857745
|
109 |
+
},
|
110 |
+
"harness|mmlu_prehistory|0": {
|
111 |
+
"alias": " - prehistory",
|
112 |
+
"acc,none": 0.5679012345679012,
|
113 |
+
"acc_stderr,none": 0.02756301097160667
|
114 |
+
},
|
115 |
+
"harness|mmlu_professional_law|0": {
|
116 |
+
"alias": " - professional_law",
|
117 |
+
"acc,none": 0.3741851368970013,
|
118 |
+
"acc_stderr,none": 0.012359335618172056
|
119 |
+
},
|
120 |
+
"harness|mmlu_world_religions|0": {
|
121 |
+
"alias": " - world_religions",
|
122 |
+
"acc,none": 0.7309941520467836,
|
123 |
+
"acc_stderr,none": 0.03401052620104089
|
124 |
+
},
|
125 |
+
"harness|mmlu_other|0": {
|
126 |
+
"alias": " - other",
|
127 |
+
"acc,none": 0.5902800128741551,
|
128 |
+
"acc_stderr,none": 0.00854032139746814
|
129 |
+
},
|
130 |
+
"harness|mmlu_business_ethics|0": {
|
131 |
+
"alias": " - business_ethics",
|
132 |
+
"acc,none": 0.61,
|
133 |
+
"acc_stderr,none": 0.04902071300001974
|
134 |
+
},
|
135 |
+
"harness|mmlu_clinical_knowledge|0": {
|
136 |
+
"alias": " - clinical_knowledge",
|
137 |
+
"acc,none": 0.569811320754717,
|
138 |
+
"acc_stderr,none": 0.030471445867183235
|
139 |
+
},
|
140 |
+
"harness|mmlu_college_medicine|0": {
|
141 |
+
"alias": " - college_medicine",
|
142 |
+
"acc,none": 0.48554913294797686,
|
143 |
+
"acc_stderr,none": 0.03810871630454764
|
144 |
+
},
|
145 |
+
"harness|mmlu_global_facts|0": {
|
146 |
+
"alias": " - global_facts",
|
147 |
+
"acc,none": 0.3,
|
148 |
+
"acc_stderr,none": 0.046056618647183814
|
149 |
+
},
|
150 |
+
"harness|mmlu_human_aging|0": {
|
151 |
+
"alias": " - human_aging",
|
152 |
+
"acc,none": 0.5650224215246636,
|
153 |
+
"acc_stderr,none": 0.033272833702713445
|
154 |
+
},
|
155 |
+
"harness|mmlu_management|0": {
|
156 |
+
"alias": " - management",
|
157 |
+
"acc,none": 0.6407766990291263,
|
158 |
+
"acc_stderr,none": 0.047504583990416946
|
159 |
+
},
|
160 |
+
"harness|mmlu_marketing|0": {
|
161 |
+
"alias": " - marketing",
|
162 |
+
"acc,none": 0.7777777777777778,
|
163 |
+
"acc_stderr,none": 0.0272360139461967
|
164 |
+
},
|
165 |
+
"harness|mmlu_medical_genetics|0": {
|
166 |
+
"alias": " - medical_genetics",
|
167 |
+
"acc,none": 0.6,
|
168 |
+
"acc_stderr,none": 0.049236596391733084
|
169 |
+
},
|
170 |
+
"harness|mmlu_miscellaneous|0": {
|
171 |
+
"alias": " - miscellaneous",
|
172 |
+
"acc,none": 0.7279693486590039,
|
173 |
+
"acc_stderr,none": 0.015913367447500517
|
174 |
+
},
|
175 |
+
"harness|mmlu_nutrition|0": {
|
176 |
+
"alias": " - nutrition",
|
177 |
+
"acc,none": 0.5816993464052288,
|
178 |
+
"acc_stderr,none": 0.02824513402438729
|
179 |
+
},
|
180 |
+
"harness|mmlu_professional_accounting|0": {
|
181 |
+
"alias": " - professional_accounting",
|
182 |
+
"acc,none": 0.3829787234042553,
|
183 |
+
"acc_stderr,none": 0.02899908090480618
|
184 |
+
},
|
185 |
+
"harness|mmlu_professional_medicine|0": {
|
186 |
+
"alias": " - professional_medicine",
|
187 |
+
"acc,none": 0.5220588235294118,
|
188 |
+
"acc_stderr,none": 0.030343264224213528
|
189 |
+
},
|
190 |
+
"harness|mmlu_virology|0": {
|
191 |
+
"alias": " - virology",
|
192 |
+
"acc,none": 0.4578313253012048,
|
193 |
+
"acc_stderr,none": 0.0387862677100236
|
194 |
+
},
|
195 |
+
"harness|mmlu_social_sciences|0": {
|
196 |
+
"alias": " - social_sciences",
|
197 |
+
"acc,none": 0.5823854403639909,
|
198 |
+
"acc_stderr,none": 0.00859896000077255
|
199 |
+
},
|
200 |
+
"harness|mmlu_econometrics|0": {
|
201 |
+
"alias": " - econometrics",
|
202 |
+
"acc,none": 0.24561403508771928,
|
203 |
+
"acc_stderr,none": 0.040493392977481404
|
204 |
+
},
|
205 |
+
"harness|mmlu_high_school_geography|0": {
|
206 |
+
"alias": " - high_school_geography",
|
207 |
+
"acc,none": 0.6666666666666666,
|
208 |
+
"acc_stderr,none": 0.03358618145732523
|
209 |
+
},
|
210 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
211 |
+
"alias": " - high_school_government_and_politics",
|
212 |
+
"acc,none": 0.7305699481865285,
|
213 |
+
"acc_stderr,none": 0.03201867122877793
|
214 |
+
},
|
215 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
216 |
+
"alias": " - high_school_macroeconomics",
|
217 |
+
"acc,none": 0.44358974358974357,
|
218 |
+
"acc_stderr,none": 0.0251891498947642
|
219 |
+
},
|
220 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
221 |
+
"alias": " - high_school_microeconomics",
|
222 |
+
"acc,none": 0.4957983193277311,
|
223 |
+
"acc_stderr,none": 0.0324773433444811
|
224 |
+
},
|
225 |
+
"harness|mmlu_high_school_psychology|0": {
|
226 |
+
"alias": " - high_school_psychology",
|
227 |
+
"acc,none": 0.7009174311926606,
|
228 |
+
"acc_stderr,none": 0.019630417285415182
|
229 |
+
},
|
230 |
+
"harness|mmlu_human_sexuality|0": {
|
231 |
+
"alias": " - human_sexuality",
|
232 |
+
"acc,none": 0.6870229007633588,
|
233 |
+
"acc_stderr,none": 0.04066962905677697
|
234 |
+
},
|
235 |
+
"harness|mmlu_professional_psychology|0": {
|
236 |
+
"alias": " - professional_psychology",
|
237 |
+
"acc,none": 0.477124183006536,
|
238 |
+
"acc_stderr,none": 0.020206653187884786
|
239 |
+
},
|
240 |
+
"harness|mmlu_public_relations|0": {
|
241 |
+
"alias": " - public_relations",
|
242 |
+
"acc,none": 0.6545454545454545,
|
243 |
+
"acc_stderr,none": 0.04554619617541054
|
244 |
+
},
|
245 |
+
"harness|mmlu_security_studies|0": {
|
246 |
+
"alias": " - security_studies",
|
247 |
+
"acc,none": 0.5877551020408164,
|
248 |
+
"acc_stderr,none": 0.031512360446742674
|
249 |
+
},
|
250 |
+
"harness|mmlu_sociology|0": {
|
251 |
+
"alias": " - sociology",
|
252 |
+
"acc,none": 0.7213930348258707,
|
253 |
+
"acc_stderr,none": 0.031700561834973086
|
254 |
+
},
|
255 |
+
"harness|mmlu_us_foreign_policy|0": {
|
256 |
+
"alias": " - us_foreign_policy",
|
257 |
+
"acc,none": 0.75,
|
258 |
+
"acc_stderr,none": 0.04351941398892446
|
259 |
+
},
|
260 |
+
"harness|mmlu_stem|0": {
|
261 |
+
"alias": " - stem",
|
262 |
+
"acc,none": 0.41801458928005075,
|
263 |
+
"acc_stderr,none": 0.008577192198635467
|
264 |
+
},
|
265 |
+
"harness|mmlu_abstract_algebra|0": {
|
266 |
+
"alias": " - abstract_algebra",
|
267 |
+
"acc,none": 0.32,
|
268 |
+
"acc_stderr,none": 0.04688261722621503
|
269 |
+
},
|
270 |
+
"harness|mmlu_anatomy|0": {
|
271 |
+
"alias": " - anatomy",
|
272 |
+
"acc,none": 0.4888888888888889,
|
273 |
+
"acc_stderr,none": 0.043182754919779756
|
274 |
+
},
|
275 |
+
"harness|mmlu_astronomy|0": {
|
276 |
+
"alias": " - astronomy",
|
277 |
+
"acc,none": 0.5460526315789473,
|
278 |
+
"acc_stderr,none": 0.04051646342874143
|
279 |
+
},
|
280 |
+
"harness|mmlu_college_biology|0": {
|
281 |
+
"alias": " - college_biology",
|
282 |
+
"acc,none": 0.5347222222222222,
|
283 |
+
"acc_stderr,none": 0.04171115858181618
|
284 |
+
},
|
285 |
+
"harness|mmlu_college_chemistry|0": {
|
286 |
+
"alias": " - college_chemistry",
|
287 |
+
"acc,none": 0.41,
|
288 |
+
"acc_stderr,none": 0.04943110704237102
|
289 |
+
},
|
290 |
+
"harness|mmlu_college_computer_science|0": {
|
291 |
+
"alias": " - college_computer_science",
|
292 |
+
"acc,none": 0.52,
|
293 |
+
"acc_stderr,none": 0.050211673156867795
|
294 |
+
},
|
295 |
+
"harness|mmlu_college_mathematics|0": {
|
296 |
+
"alias": " - college_mathematics",
|
297 |
+
"acc,none": 0.33,
|
298 |
+
"acc_stderr,none": 0.04725815626252604
|
299 |
+
},
|
300 |
+
"harness|mmlu_college_physics|0": {
|
301 |
+
"alias": " - college_physics",
|
302 |
+
"acc,none": 0.28431372549019607,
|
303 |
+
"acc_stderr,none": 0.04488482852329017
|
304 |
+
},
|
305 |
+
"harness|mmlu_computer_security|0": {
|
306 |
+
"alias": " - computer_security",
|
307 |
+
"acc,none": 0.64,
|
308 |
+
"acc_stderr,none": 0.048241815132442176
|
309 |
+
},
|
310 |
+
"harness|mmlu_conceptual_physics|0": {
|
311 |
+
"alias": " - conceptual_physics",
|
312 |
+
"acc,none": 0.43829787234042555,
|
313 |
+
"acc_stderr,none": 0.03243618636108101
|
314 |
+
},
|
315 |
+
"harness|mmlu_electrical_engineering|0": {
|
316 |
+
"alias": " - electrical_engineering",
|
317 |
+
"acc,none": 0.4068965517241379,
|
318 |
+
"acc_stderr,none": 0.04093793981266237
|
319 |
+
},
|
320 |
+
"harness|mmlu_elementary_mathematics|0": {
|
321 |
+
"alias": " - elementary_mathematics",
|
322 |
+
"acc,none": 0.29365079365079366,
|
323 |
+
"acc_stderr,none": 0.023456037383982022
|
324 |
+
},
|
325 |
+
"harness|mmlu_high_school_biology|0": {
|
326 |
+
"alias": " - high_school_biology",
|
327 |
+
"acc,none": 0.5806451612903226,
|
328 |
+
"acc_stderr,none": 0.02807158890109185
|
329 |
+
},
|
330 |
+
"harness|mmlu_high_school_chemistry|0": {
|
331 |
+
"alias": " - high_school_chemistry",
|
332 |
+
"acc,none": 0.4088669950738916,
|
333 |
+
"acc_stderr,none": 0.034590588158832314
|
334 |
+
},
|
335 |
+
"harness|mmlu_high_school_computer_science|0": {
|
336 |
+
"alias": " - high_school_computer_science",
|
337 |
+
"acc,none": 0.51,
|
338 |
+
"acc_stderr,none": 0.05024183937956913
|
339 |
+
},
|
340 |
+
"harness|mmlu_high_school_mathematics|0": {
|
341 |
+
"alias": " - high_school_mathematics",
|
342 |
+
"acc,none": 0.24444444444444444,
|
343 |
+
"acc_stderr,none": 0.026202766534652148
|
344 |
+
},
|
345 |
+
"harness|mmlu_high_school_physics|0": {
|
346 |
+
"alias": " - high_school_physics",
|
347 |
+
"acc,none": 0.33112582781456956,
|
348 |
+
"acc_stderr,none": 0.038425817186598696
|
349 |
+
},
|
350 |
+
"harness|mmlu_high_school_statistics|0": {
|
351 |
+
"alias": " - high_school_statistics",
|
352 |
+
"acc,none": 0.46296296296296297,
|
353 |
+
"acc_stderr,none": 0.03400603625538272
|
354 |
+
},
|
355 |
+
"harness|mmlu_machine_learning|0": {
|
356 |
+
"alias": " - machine_learning",
|
357 |
+
"acc,none": 0.3392857142857143,
|
358 |
+
"acc_stderr,none": 0.04493949068613538
|
359 |
+
},
|
360 |
+
"harness|piqa|0": {
|
361 |
+
"acc,none": 0.735582154515778,
|
362 |
+
"acc_stderr,none": 0.010289787244767182,
|
363 |
+
"acc_norm,none": 0.735582154515778,
|
364 |
+
"acc_norm_stderr,none": 0.010289787244767175,
|
365 |
+
"alias": "piqa"
|
366 |
+
},
|
367 |
+
"harness|hellaswag|0": {
|
368 |
+
"acc,none": 0.5289782911770564,
|
369 |
+
"acc_stderr,none": 0.004981394110706147,
|
370 |
+
"acc_norm,none": 0.7034455287791277,
|
371 |
+
"acc_norm_stderr,none": 0.004558049018764614,
|
372 |
+
"alias": "hellaswag"
|
373 |
+
},
|
374 |
+
"harness|openbookqa|0": {
|
375 |
+
"acc,none": 0.31,
|
376 |
+
"acc_stderr,none": 0.0207040410217248,
|
377 |
+
"acc_norm,none": 0.398,
|
378 |
+
"acc_norm_stderr,none": 0.021912377885779977,
|
379 |
+
"alias": "openbookqa"
|
380 |
+
},
|
381 |
+
"harness|arc:easy|0": {
|
382 |
+
"acc,none": 0.7201178451178452,
|
383 |
+
"acc_stderr,none": 0.00921207752465653,
|
384 |
+
"acc_norm,none": 0.6721380471380471,
|
385 |
+
"acc_norm_stderr,none": 0.009632587076170016,
|
386 |
+
"alias": "arc_easy"
|
387 |
+
},
|
388 |
+
"harness|winogrande|0": {
|
389 |
+
"acc,none": 0.6858721389108129,
|
390 |
+
"acc_stderr,none": 0.013045416716072566,
|
391 |
+
"alias": "winogrande"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/Baichuan2-7B-Chat-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 7,
|
400 |
+
"architectures": "BaiChuan2ForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 7,
|
404 |
+
"model_size": 4,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.002,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": true
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
438 |
+
"harness|arc:challenge|0": 1.0,
|
439 |
+
"harness|lambada:openai|0": 1.0,
|
440 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
441 |
+
"harness|boolq|0": 2.0,
|
442 |
+
"harness|mmlu|0": null,
|
443 |
+
"harness|mmlu_humanities|0": null,
|
444 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
445 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
446 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
447 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
448 |
+
"harness|mmlu_international_law|0": 0.0,
|
449 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
450 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
451 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
452 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
453 |
+
"harness|mmlu_philosophy|0": 0.0,
|
454 |
+
"harness|mmlu_prehistory|0": 0.0,
|
455 |
+
"harness|mmlu_professional_law|0": 0.0,
|
456 |
+
"harness|mmlu_world_religions|0": 0.0,
|
457 |
+
"harness|mmlu_other|0": null,
|
458 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
459 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
460 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
461 |
+
"harness|mmlu_global_facts|0": 0.0,
|
462 |
+
"harness|mmlu_human_aging|0": 0.0,
|
463 |
+
"harness|mmlu_management|0": 0.0,
|
464 |
+
"harness|mmlu_marketing|0": 0.0,
|
465 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
466 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
467 |
+
"harness|mmlu_nutrition|0": 0.0,
|
468 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
469 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
470 |
+
"harness|mmlu_virology|0": 0.0,
|
471 |
+
"harness|mmlu_social_sciences|0": null,
|
472 |
+
"harness|mmlu_econometrics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
478 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
479 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
480 |
+
"harness|mmlu_public_relations|0": 0.0,
|
481 |
+
"harness|mmlu_security_studies|0": 0.0,
|
482 |
+
"harness|mmlu_sociology|0": 0.0,
|
483 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
484 |
+
"harness|mmlu_stem|0": null,
|
485 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
486 |
+
"harness|mmlu_anatomy|0": 0.0,
|
487 |
+
"harness|mmlu_astronomy|0": 0.0,
|
488 |
+
"harness|mmlu_college_biology|0": 0.0,
|
489 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
490 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
491 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
492 |
+
"harness|mmlu_college_physics|0": 0.0,
|
493 |
+
"harness|mmlu_computer_security|0": 0.0,
|
494 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
495 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
496 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
503 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
504 |
+
"harness|piqa|0": 1.0,
|
505 |
+
"harness|hellaswag|0": 1.0,
|
506 |
+
"harness|openbookqa|0": 1.0,
|
507 |
+
"harness|arc:easy|0": 1.0,
|
508 |
+
"harness|winogrande|0": 1.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714987804.8012214,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/Baichuan2-7B-Chat-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-05-06-20-48-10.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-06-20-48-10",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/Baichuan2-13B-Chat-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 9.14,
|
16 |
+
"model_params": 12.72,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|lambada:openai|0": {
|
22 |
+
"perplexity,none": 3.1866066378427735,
|
23 |
+
"perplexity_stderr,none": 0.08196592947860162,
|
24 |
+
"acc,none": 0.7131767902192897,
|
25 |
+
"acc_stderr,none": 0.006301120995354314,
|
26 |
+
"alias": "lambada_openai"
|
27 |
+
},
|
28 |
+
"harness|winogrande|0": {
|
29 |
+
"acc,none": 0.7300710339384373,
|
30 |
+
"acc_stderr,none": 0.012476433372002596,
|
31 |
+
"alias": "winogrande"
|
32 |
+
},
|
33 |
+
"harness|boolq|0": {
|
34 |
+
"acc,none": 0.8211009174311926,
|
35 |
+
"acc_stderr,none": 0.006703395833491562,
|
36 |
+
"alias": "boolq"
|
37 |
+
},
|
38 |
+
"harness|piqa|0": {
|
39 |
+
"acc,none": 0.7584330794341676,
|
40 |
+
"acc_stderr,none": 0.009986718001804486,
|
41 |
+
"acc_norm,none": 0.7589771490750816,
|
42 |
+
"acc_norm_stderr,none": 0.009979042717267312,
|
43 |
+
"alias": "piqa"
|
44 |
+
},
|
45 |
+
"harness|mmlu|0": {
|
46 |
+
"acc,none": 0.557826520438684,
|
47 |
+
"acc_stderr,none": 0.003961001443358886,
|
48 |
+
"alias": "mmlu"
|
49 |
+
},
|
50 |
+
"harness|mmlu_humanities|0": {
|
51 |
+
"alias": " - humanities",
|
52 |
+
"acc,none": 0.5115834218916047,
|
53 |
+
"acc_stderr,none": 0.0068344977913292115
|
54 |
+
},
|
55 |
+
"harness|mmlu_formal_logic|0": {
|
56 |
+
"alias": " - formal_logic",
|
57 |
+
"acc,none": 0.42063492063492064,
|
58 |
+
"acc_stderr,none": 0.04415438226743743
|
59 |
+
},
|
60 |
+
"harness|mmlu_high_school_european_history|0": {
|
61 |
+
"alias": " - high_school_european_history",
|
62 |
+
"acc,none": 0.7333333333333333,
|
63 |
+
"acc_stderr,none": 0.03453131801885417
|
64 |
+
},
|
65 |
+
"harness|mmlu_high_school_us_history|0": {
|
66 |
+
"alias": " - high_school_us_history",
|
67 |
+
"acc,none": 0.7941176470588235,
|
68 |
+
"acc_stderr,none": 0.028379449451588663
|
69 |
+
},
|
70 |
+
"harness|mmlu_high_school_world_history|0": {
|
71 |
+
"alias": " - high_school_world_history",
|
72 |
+
"acc,none": 0.7341772151898734,
|
73 |
+
"acc_stderr,none": 0.028756799629658335
|
74 |
+
},
|
75 |
+
"harness|mmlu_international_law|0": {
|
76 |
+
"alias": " - international_law",
|
77 |
+
"acc,none": 0.6859504132231405,
|
78 |
+
"acc_stderr,none": 0.04236964753041018
|
79 |
+
},
|
80 |
+
"harness|mmlu_jurisprudence|0": {
|
81 |
+
"alias": " - jurisprudence",
|
82 |
+
"acc,none": 0.6851851851851852,
|
83 |
+
"acc_stderr,none": 0.04489931073591312
|
84 |
+
},
|
85 |
+
"harness|mmlu_logical_fallacies|0": {
|
86 |
+
"alias": " - logical_fallacies",
|
87 |
+
"acc,none": 0.6993865030674846,
|
88 |
+
"acc_stderr,none": 0.0360251131880677
|
89 |
+
},
|
90 |
+
"harness|mmlu_moral_disputes|0": {
|
91 |
+
"alias": " - moral_disputes",
|
92 |
+
"acc,none": 0.6127167630057804,
|
93 |
+
"acc_stderr,none": 0.026226158605124655
|
94 |
+
},
|
95 |
+
"harness|mmlu_moral_scenarios|0": {
|
96 |
+
"alias": " - moral_scenarios",
|
97 |
+
"acc,none": 0.24692737430167597,
|
98 |
+
"acc_stderr,none": 0.014422292204808838
|
99 |
+
},
|
100 |
+
"harness|mmlu_philosophy|0": {
|
101 |
+
"alias": " - philosophy",
|
102 |
+
"acc,none": 0.6237942122186495,
|
103 |
+
"acc_stderr,none": 0.027513925683549427
|
104 |
+
},
|
105 |
+
"harness|mmlu_prehistory|0": {
|
106 |
+
"alias": " - prehistory",
|
107 |
+
"acc,none": 0.6296296296296297,
|
108 |
+
"acc_stderr,none": 0.026869490744815247
|
109 |
+
},
|
110 |
+
"harness|mmlu_professional_law|0": {
|
111 |
+
"alias": " - professional_law",
|
112 |
+
"acc,none": 0.4335071707953064,
|
113 |
+
"acc_stderr,none": 0.012656810383983972
|
114 |
+
},
|
115 |
+
"harness|mmlu_world_religions|0": {
|
116 |
+
"alias": " - world_religions",
|
117 |
+
"acc,none": 0.7602339181286549,
|
118 |
+
"acc_stderr,none": 0.03274485211946956
|
119 |
+
},
|
120 |
+
"harness|mmlu_other|0": {
|
121 |
+
"alias": " - other",
|
122 |
+
"acc,none": 0.638236240746701,
|
123 |
+
"acc_stderr,none": 0.008303983171879983
|
124 |
+
},
|
125 |
+
"harness|mmlu_business_ethics|0": {
|
126 |
+
"alias": " - business_ethics",
|
127 |
+
"acc,none": 0.58,
|
128 |
+
"acc_stderr,none": 0.049604496374885836
|
129 |
+
},
|
130 |
+
"harness|mmlu_clinical_knowledge|0": {
|
131 |
+
"alias": " - clinical_knowledge",
|
132 |
+
"acc,none": 0.6,
|
133 |
+
"acc_stderr,none": 0.03015113445777629
|
134 |
+
},
|
135 |
+
"harness|mmlu_college_medicine|0": {
|
136 |
+
"alias": " - college_medicine",
|
137 |
+
"acc,none": 0.5375722543352601,
|
138 |
+
"acc_stderr,none": 0.0380168510452446
|
139 |
+
},
|
140 |
+
"harness|mmlu_global_facts|0": {
|
141 |
+
"alias": " - global_facts",
|
142 |
+
"acc,none": 0.36,
|
143 |
+
"acc_stderr,none": 0.048241815132442176
|
144 |
+
},
|
145 |
+
"harness|mmlu_human_aging|0": {
|
146 |
+
"alias": " - human_aging",
|
147 |
+
"acc,none": 0.6278026905829597,
|
148 |
+
"acc_stderr,none": 0.032443052830087304
|
149 |
+
},
|
150 |
+
"harness|mmlu_management|0": {
|
151 |
+
"alias": " - management",
|
152 |
+
"acc,none": 0.7766990291262136,
|
153 |
+
"acc_stderr,none": 0.04123553189891431
|
154 |
+
},
|
155 |
+
"harness|mmlu_marketing|0": {
|
156 |
+
"alias": " - marketing",
|
157 |
+
"acc,none": 0.8247863247863247,
|
158 |
+
"acc_stderr,none": 0.02490443909891822
|
159 |
+
},
|
160 |
+
"harness|mmlu_medical_genetics|0": {
|
161 |
+
"alias": " - medical_genetics",
|
162 |
+
"acc,none": 0.66,
|
163 |
+
"acc_stderr,none": 0.04760952285695237
|
164 |
+
},
|
165 |
+
"harness|mmlu_miscellaneous|0": {
|
166 |
+
"alias": " - miscellaneous",
|
167 |
+
"acc,none": 0.7854406130268199,
|
168 |
+
"acc_stderr,none": 0.014680033956893346
|
169 |
+
},
|
170 |
+
"harness|mmlu_nutrition|0": {
|
171 |
+
"alias": " - nutrition",
|
172 |
+
"acc,none": 0.6078431372549019,
|
173 |
+
"acc_stderr,none": 0.027956046165424516
|
174 |
+
},
|
175 |
+
"harness|mmlu_professional_accounting|0": {
|
176 |
+
"alias": " - professional_accounting",
|
177 |
+
"acc,none": 0.4432624113475177,
|
178 |
+
"acc_stderr,none": 0.029634838473766006
|
179 |
+
},
|
180 |
+
"harness|mmlu_professional_medicine|0": {
|
181 |
+
"alias": " - professional_medicine",
|
182 |
+
"acc,none": 0.5661764705882353,
|
183 |
+
"acc_stderr,none": 0.03010563657001663
|
184 |
+
},
|
185 |
+
"harness|mmlu_virology|0": {
|
186 |
+
"alias": " - virology",
|
187 |
+
"acc,none": 0.46987951807228917,
|
188 |
+
"acc_stderr,none": 0.03885425420866767
|
189 |
+
},
|
190 |
+
"harness|mmlu_social_sciences|0": {
|
191 |
+
"alias": " - social_sciences",
|
192 |
+
"acc,none": 0.6441338966525837,
|
193 |
+
"acc_stderr,none": 0.008375137422879046
|
194 |
+
},
|
195 |
+
"harness|mmlu_econometrics|0": {
|
196 |
+
"alias": " - econometrics",
|
197 |
+
"acc,none": 0.35964912280701755,
|
198 |
+
"acc_stderr,none": 0.04514496132873632
|
199 |
+
},
|
200 |
+
"harness|mmlu_high_school_geography|0": {
|
201 |
+
"alias": " - high_school_geography",
|
202 |
+
"acc,none": 0.7121212121212122,
|
203 |
+
"acc_stderr,none": 0.03225883512300992
|
204 |
+
},
|
205 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
206 |
+
"alias": " - high_school_government_and_politics",
|
207 |
+
"acc,none": 0.7979274611398963,
|
208 |
+
"acc_stderr,none": 0.02897908979429673
|
209 |
+
},
|
210 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
211 |
+
"alias": " - high_school_macroeconomics",
|
212 |
+
"acc,none": 0.5282051282051282,
|
213 |
+
"acc_stderr,none": 0.0253106392549339
|
214 |
+
},
|
215 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
216 |
+
"alias": " - high_school_microeconomics",
|
217 |
+
"acc,none": 0.5252100840336135,
|
218 |
+
"acc_stderr,none": 0.0324371805513741
|
219 |
+
},
|
220 |
+
"harness|mmlu_high_school_psychology|0": {
|
221 |
+
"alias": " - high_school_psychology",
|
222 |
+
"acc,none": 0.7614678899082569,
|
223 |
+
"acc_stderr,none": 0.018272575810231867
|
224 |
+
},
|
225 |
+
"harness|mmlu_human_sexuality|0": {
|
226 |
+
"alias": " - human_sexuality",
|
227 |
+
"acc,none": 0.6946564885496184,
|
228 |
+
"acc_stderr,none": 0.04039314978724562
|
229 |
+
},
|
230 |
+
"harness|mmlu_professional_psychology|0": {
|
231 |
+
"alias": " - professional_psychology",
|
232 |
+
"acc,none": 0.5604575163398693,
|
233 |
+
"acc_stderr,none": 0.02007942040808792
|
234 |
+
},
|
235 |
+
"harness|mmlu_public_relations|0": {
|
236 |
+
"alias": " - public_relations",
|
237 |
+
"acc,none": 0.5727272727272728,
|
238 |
+
"acc_stderr,none": 0.04738198703545483
|
239 |
+
},
|
240 |
+
"harness|mmlu_security_studies|0": {
|
241 |
+
"alias": " - security_studies",
|
242 |
+
"acc,none": 0.6612244897959184,
|
243 |
+
"acc_stderr,none": 0.030299506562154188
|
244 |
+
},
|
245 |
+
"harness|mmlu_sociology|0": {
|
246 |
+
"alias": " - sociology",
|
247 |
+
"acc,none": 0.7761194029850746,
|
248 |
+
"acc_stderr,none": 0.029475250236017193
|
249 |
+
},
|
250 |
+
"harness|mmlu_us_foreign_policy|0": {
|
251 |
+
"alias": " - us_foreign_policy",
|
252 |
+
"acc,none": 0.85,
|
253 |
+
"acc_stderr,none": 0.0358870281282637
|
254 |
+
},
|
255 |
+
"harness|mmlu_stem|0": {
|
256 |
+
"alias": " - stem",
|
257 |
+
"acc,none": 0.46336822074215034,
|
258 |
+
"acc_stderr,none": 0.008568121791309898
|
259 |
+
},
|
260 |
+
"harness|mmlu_abstract_algebra|0": {
|
261 |
+
"alias": " - abstract_algebra",
|
262 |
+
"acc,none": 0.3,
|
263 |
+
"acc_stderr,none": 0.046056618647183814
|
264 |
+
},
|
265 |
+
"harness|mmlu_anatomy|0": {
|
266 |
+
"alias": " - anatomy",
|
267 |
+
"acc,none": 0.5259259259259259,
|
268 |
+
"acc_stderr,none": 0.04313531696750575
|
269 |
+
},
|
270 |
+
"harness|mmlu_astronomy|0": {
|
271 |
+
"alias": " - astronomy",
|
272 |
+
"acc,none": 0.6052631578947368,
|
273 |
+
"acc_stderr,none": 0.039777499346220734
|
274 |
+
},
|
275 |
+
"harness|mmlu_college_biology|0": {
|
276 |
+
"alias": " - college_biology",
|
277 |
+
"acc,none": 0.6388888888888888,
|
278 |
+
"acc_stderr,none": 0.04016660030451232
|
279 |
+
},
|
280 |
+
"harness|mmlu_college_chemistry|0": {
|
281 |
+
"alias": " - college_chemistry",
|
282 |
+
"acc,none": 0.41,
|
283 |
+
"acc_stderr,none": 0.04943110704237102
|
284 |
+
},
|
285 |
+
"harness|mmlu_college_computer_science|0": {
|
286 |
+
"alias": " - college_computer_science",
|
287 |
+
"acc,none": 0.49,
|
288 |
+
"acc_stderr,none": 0.05024183937956911
|
289 |
+
},
|
290 |
+
"harness|mmlu_college_mathematics|0": {
|
291 |
+
"alias": " - college_mathematics",
|
292 |
+
"acc,none": 0.31,
|
293 |
+
"acc_stderr,none": 0.04648231987117316
|
294 |
+
},
|
295 |
+
"harness|mmlu_college_physics|0": {
|
296 |
+
"alias": " - college_physics",
|
297 |
+
"acc,none": 0.4019607843137255,
|
298 |
+
"acc_stderr,none": 0.04878608714466996
|
299 |
+
},
|
300 |
+
"harness|mmlu_computer_security|0": {
|
301 |
+
"alias": " - computer_security",
|
302 |
+
"acc,none": 0.71,
|
303 |
+
"acc_stderr,none": 0.04560480215720685
|
304 |
+
},
|
305 |
+
"harness|mmlu_conceptual_physics|0": {
|
306 |
+
"alias": " - conceptual_physics",
|
307 |
+
"acc,none": 0.4851063829787234,
|
308 |
+
"acc_stderr,none": 0.032671518489247764
|
309 |
+
},
|
310 |
+
"harness|mmlu_electrical_engineering|0": {
|
311 |
+
"alias": " - electrical_engineering",
|
312 |
+
"acc,none": 0.503448275862069,
|
313 |
+
"acc_stderr,none": 0.04166567577101579
|
314 |
+
},
|
315 |
+
"harness|mmlu_elementary_mathematics|0": {
|
316 |
+
"alias": " - elementary_mathematics",
|
317 |
+
"acc,none": 0.37037037037037035,
|
318 |
+
"acc_stderr,none": 0.02487081525105711
|
319 |
+
},
|
320 |
+
"harness|mmlu_high_school_biology|0": {
|
321 |
+
"alias": " - high_school_biology",
|
322 |
+
"acc,none": 0.7096774193548387,
|
323 |
+
"acc_stderr,none": 0.02582210611941589
|
324 |
+
},
|
325 |
+
"harness|mmlu_high_school_chemistry|0": {
|
326 |
+
"alias": " - high_school_chemistry",
|
327 |
+
"acc,none": 0.43842364532019706,
|
328 |
+
"acc_stderr,none": 0.03491207857486519
|
329 |
+
},
|
330 |
+
"harness|mmlu_high_school_computer_science|0": {
|
331 |
+
"alias": " - high_school_computer_science",
|
332 |
+
"acc,none": 0.58,
|
333 |
+
"acc_stderr,none": 0.049604496374885836
|
334 |
+
},
|
335 |
+
"harness|mmlu_high_school_mathematics|0": {
|
336 |
+
"alias": " - high_school_mathematics",
|
337 |
+
"acc,none": 0.26666666666666666,
|
338 |
+
"acc_stderr,none": 0.026962424325073824
|
339 |
+
},
|
340 |
+
"harness|mmlu_high_school_physics|0": {
|
341 |
+
"alias": " - high_school_physics",
|
342 |
+
"acc,none": 0.3708609271523179,
|
343 |
+
"acc_stderr,none": 0.03943966699183629
|
344 |
+
},
|
345 |
+
"harness|mmlu_high_school_statistics|0": {
|
346 |
+
"alias": " - high_school_statistics",
|
347 |
+
"acc,none": 0.38425925925925924,
|
348 |
+
"acc_stderr,none": 0.03317354514310742
|
349 |
+
},
|
350 |
+
"harness|mmlu_machine_learning|0": {
|
351 |
+
"alias": " - machine_learning",
|
352 |
+
"acc,none": 0.3392857142857143,
|
353 |
+
"acc_stderr,none": 0.04493949068613539
|
354 |
+
},
|
355 |
+
"harness|arc:easy|0": {
|
356 |
+
"acc,none": 0.7491582491582491,
|
357 |
+
"acc_stderr,none": 0.008895183010487391,
|
358 |
+
"acc_norm,none": 0.7041245791245792,
|
359 |
+
"acc_norm_stderr,none": 0.00936585413414007,
|
360 |
+
"alias": "arc_easy"
|
361 |
+
},
|
362 |
+
"harness|arc:challenge|0": {
|
363 |
+
"acc,none": 0.4735494880546075,
|
364 |
+
"acc_stderr,none": 0.014590931358120163,
|
365 |
+
"acc_norm,none": 0.4718430034129693,
|
366 |
+
"acc_norm_stderr,none": 0.014588204105102203,
|
367 |
+
"alias": "arc_challenge"
|
368 |
+
},
|
369 |
+
"harness|truthfulqa:mc2|0": {
|
370 |
+
"acc,none": 0.5149555773005338,
|
371 |
+
"acc_stderr,none": 0.01579541827157361,
|
372 |
+
"alias": "truthfulqa_mc2"
|
373 |
+
},
|
374 |
+
"harness|truthfulqa:mc1|0": {
|
375 |
+
"acc,none": 0.3635250917992656,
|
376 |
+
"acc_stderr,none": 0.016838862883965834,
|
377 |
+
"alias": "truthfulqa_mc1"
|
378 |
+
},
|
379 |
+
"harness|hellaswag|0": {
|
380 |
+
"acc,none": 0.5670185222067318,
|
381 |
+
"acc_stderr,none": 0.004944755230598397,
|
382 |
+
"acc_norm,none": 0.7461661023700458,
|
383 |
+
"acc_norm_stderr,none": 0.004343142545094187,
|
384 |
+
"alias": "hellaswag"
|
385 |
+
},
|
386 |
+
"harness|openbookqa|0": {
|
387 |
+
"acc,none": 0.312,
|
388 |
+
"acc_stderr,none": 0.02074059653648807,
|
389 |
+
"acc_norm,none": 0.43,
|
390 |
+
"acc_norm_stderr,none": 0.02216263442665284,
|
391 |
+
"alias": "openbookqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/Baichuan2-13B-Chat-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 13,
|
400 |
+
"architectures": "BaiChuanForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 13,
|
404 |
+
"model_size": 7.3,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-04-23T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.1",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.002,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "torch.float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false,
|
434 |
+
"use_quant_input": true
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|lambada:openai|0": 1.0,
|
438 |
+
"harness|winogrande|0": 1.0,
|
439 |
+
"harness|boolq|0": 2.0,
|
440 |
+
"harness|piqa|0": 1.0,
|
441 |
+
"harness|mmlu|0": null,
|
442 |
+
"harness|mmlu_humanities|0": null,
|
443 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
444 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
445 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
446 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
447 |
+
"harness|mmlu_international_law|0": 0.0,
|
448 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
449 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
450 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
451 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
452 |
+
"harness|mmlu_philosophy|0": 0.0,
|
453 |
+
"harness|mmlu_prehistory|0": 0.0,
|
454 |
+
"harness|mmlu_professional_law|0": 0.0,
|
455 |
+
"harness|mmlu_world_religions|0": 0.0,
|
456 |
+
"harness|mmlu_other|0": null,
|
457 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
458 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
459 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
460 |
+
"harness|mmlu_global_facts|0": 0.0,
|
461 |
+
"harness|mmlu_human_aging|0": 0.0,
|
462 |
+
"harness|mmlu_management|0": 0.0,
|
463 |
+
"harness|mmlu_marketing|0": 0.0,
|
464 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
465 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
466 |
+
"harness|mmlu_nutrition|0": 0.0,
|
467 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
468 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
469 |
+
"harness|mmlu_virology|0": 0.0,
|
470 |
+
"harness|mmlu_social_sciences|0": null,
|
471 |
+
"harness|mmlu_econometrics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
477 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
478 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
479 |
+
"harness|mmlu_public_relations|0": 0.0,
|
480 |
+
"harness|mmlu_security_studies|0": 0.0,
|
481 |
+
"harness|mmlu_sociology|0": 0.0,
|
482 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
483 |
+
"harness|mmlu_stem|0": null,
|
484 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
485 |
+
"harness|mmlu_anatomy|0": 0.0,
|
486 |
+
"harness|mmlu_astronomy|0": 0.0,
|
487 |
+
"harness|mmlu_college_biology|0": 0.0,
|
488 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
489 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
490 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
491 |
+
"harness|mmlu_college_physics|0": 0.0,
|
492 |
+
"harness|mmlu_computer_security|0": 0.0,
|
493 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
494 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
495 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
502 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
503 |
+
"harness|arc:easy|0": 1.0,
|
504 |
+
"harness|arc:challenge|0": 1.0,
|
505 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
506 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
507 |
+
"harness|hellaswag|0": 1.0,
|
508 |
+
"harness|openbookqa|0": 1.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1714992606.0254002,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/Baichuan2-13B-Chat-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-05-09-05-59-04.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-09-05-59-04",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/Llama-2-7b-chat-hf-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 3.5,
|
16 |
+
"model_params": 7.0,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:easy|0": {
|
22 |
+
"acc,none": 0.7335858585858586,
|
23 |
+
"acc_stderr,none": 0.009071357971078681,
|
24 |
+
"acc_norm,none": 0.6982323232323232,
|
25 |
+
"acc_norm_stderr,none": 0.009418994158522532,
|
26 |
+
"alias": "arc_easy"
|
27 |
+
},
|
28 |
+
"harness|truthfulqa:mc2|0": {
|
29 |
+
"acc,none": 0.46442705279789154,
|
30 |
+
"acc_stderr,none": 0.015722742603615374,
|
31 |
+
"alias": "truthfulqa_mc2"
|
32 |
+
},
|
33 |
+
"harness|lambada:openai|0": {
|
34 |
+
"perplexity,none": 3.330271144590987,
|
35 |
+
"perplexity_stderr,none": 0.08880199591810012,
|
36 |
+
"acc,none": 0.7017271492334562,
|
37 |
+
"acc_stderr,none": 0.006373868144287134,
|
38 |
+
"alias": "lambada_openai"
|
39 |
+
},
|
40 |
+
"harness|hellaswag|0": {
|
41 |
+
"acc,none": 0.5699063931487751,
|
42 |
+
"acc_stderr,none": 0.004940771559475488,
|
43 |
+
"acc_norm,none": 0.7482573192591118,
|
44 |
+
"acc_norm_stderr,none": 0.004331271717773906,
|
45 |
+
"alias": "hellaswag"
|
46 |
+
},
|
47 |
+
"harness|truthfulqa:mc1|0": {
|
48 |
+
"acc,none": 0.31456548347613217,
|
49 |
+
"acc_stderr,none": 0.01625524199317917,
|
50 |
+
"alias": "truthfulqa_mc1"
|
51 |
+
},
|
52 |
+
"harness|piqa|0": {
|
53 |
+
"acc,none": 0.766050054406964,
|
54 |
+
"acc_stderr,none": 0.00987723689513747,
|
55 |
+
"acc_norm,none": 0.764417845484222,
|
56 |
+
"acc_norm_stderr,none": 0.009901067586473909,
|
57 |
+
"alias": "piqa"
|
58 |
+
},
|
59 |
+
"harness|arc:challenge|0": {
|
60 |
+
"acc,none": 0.4402730375426621,
|
61 |
+
"acc_stderr,none": 0.014506769524804246,
|
62 |
+
"acc_norm,none": 0.43686006825938567,
|
63 |
+
"acc_norm_stderr,none": 0.014494421584256525,
|
64 |
+
"alias": "arc_challenge"
|
65 |
+
},
|
66 |
+
"harness|winogrande|0": {
|
67 |
+
"acc,none": 0.6582478295185478,
|
68 |
+
"acc_stderr,none": 0.013330103018622847,
|
69 |
+
"alias": "winogrande"
|
70 |
+
},
|
71 |
+
"harness|mmlu|0": {
|
72 |
+
"acc,none": 0.4620424440962826,
|
73 |
+
"acc_stderr,none": 0.004045238527354896,
|
74 |
+
"alias": "mmlu"
|
75 |
+
},
|
76 |
+
"harness|mmlu_humanities|0": {
|
77 |
+
"alias": " - humanities",
|
78 |
+
"acc,none": 0.42848034006376196,
|
79 |
+
"acc_stderr,none": 0.0069328072054616755
|
80 |
+
},
|
81 |
+
"harness|mmlu_formal_logic|0": {
|
82 |
+
"alias": " - formal_logic",
|
83 |
+
"acc,none": 0.25396825396825395,
|
84 |
+
"acc_stderr,none": 0.03893259610604674
|
85 |
+
},
|
86 |
+
"harness|mmlu_high_school_european_history|0": {
|
87 |
+
"alias": " - high_school_european_history",
|
88 |
+
"acc,none": 0.5636363636363636,
|
89 |
+
"acc_stderr,none": 0.03872592983524753
|
90 |
+
},
|
91 |
+
"harness|mmlu_high_school_us_history|0": {
|
92 |
+
"alias": " - high_school_us_history",
|
93 |
+
"acc,none": 0.6519607843137255,
|
94 |
+
"acc_stderr,none": 0.03343311240488419
|
95 |
+
},
|
96 |
+
"harness|mmlu_high_school_world_history|0": {
|
97 |
+
"alias": " - high_school_world_history",
|
98 |
+
"acc,none": 0.620253164556962,
|
99 |
+
"acc_stderr,none": 0.031591887529658504
|
100 |
+
},
|
101 |
+
"harness|mmlu_international_law|0": {
|
102 |
+
"alias": " - international_law",
|
103 |
+
"acc,none": 0.6033057851239669,
|
104 |
+
"acc_stderr,none": 0.04465869780531009
|
105 |
+
},
|
106 |
+
"harness|mmlu_jurisprudence|0": {
|
107 |
+
"alias": " - jurisprudence",
|
108 |
+
"acc,none": 0.5555555555555556,
|
109 |
+
"acc_stderr,none": 0.04803752235190192
|
110 |
+
},
|
111 |
+
"harness|mmlu_logical_fallacies|0": {
|
112 |
+
"alias": " - logical_fallacies",
|
113 |
+
"acc,none": 0.5705521472392638,
|
114 |
+
"acc_stderr,none": 0.038890666191127216
|
115 |
+
},
|
116 |
+
"harness|mmlu_moral_disputes|0": {
|
117 |
+
"alias": " - moral_disputes",
|
118 |
+
"acc,none": 0.5057803468208093,
|
119 |
+
"acc_stderr,none": 0.026917296179149123
|
120 |
+
},
|
121 |
+
"harness|mmlu_moral_scenarios|0": {
|
122 |
+
"alias": " - moral_scenarios",
|
123 |
+
"acc,none": 0.2424581005586592,
|
124 |
+
"acc_stderr,none": 0.014333522059217887
|
125 |
+
},
|
126 |
+
"harness|mmlu_philosophy|0": {
|
127 |
+
"alias": " - philosophy",
|
128 |
+
"acc,none": 0.5144694533762058,
|
129 |
+
"acc_stderr,none": 0.02838619808417768
|
130 |
+
},
|
131 |
+
"harness|mmlu_prehistory|0": {
|
132 |
+
"alias": " - prehistory",
|
133 |
+
"acc,none": 0.5277777777777778,
|
134 |
+
"acc_stderr,none": 0.027777777777777804
|
135 |
+
},
|
136 |
+
"harness|mmlu_professional_law|0": {
|
137 |
+
"alias": " - professional_law",
|
138 |
+
"acc,none": 0.35658409387222945,
|
139 |
+
"acc_stderr,none": 0.012233642989273891
|
140 |
+
},
|
141 |
+
"harness|mmlu_world_religions|0": {
|
142 |
+
"alias": " - world_religions",
|
143 |
+
"acc,none": 0.672514619883041,
|
144 |
+
"acc_stderr,none": 0.035993357714560276
|
145 |
+
},
|
146 |
+
"harness|mmlu_other|0": {
|
147 |
+
"alias": " - other",
|
148 |
+
"acc,none": 0.5494045703250724,
|
149 |
+
"acc_stderr,none": 0.008696781011633008
|
150 |
+
},
|
151 |
+
"harness|mmlu_business_ethics|0": {
|
152 |
+
"alias": " - business_ethics",
|
153 |
+
"acc,none": 0.51,
|
154 |
+
"acc_stderr,none": 0.05024183937956913
|
155 |
+
},
|
156 |
+
"harness|mmlu_clinical_knowledge|0": {
|
157 |
+
"alias": " - clinical_knowledge",
|
158 |
+
"acc,none": 0.539622641509434,
|
159 |
+
"acc_stderr,none": 0.030676096599389184
|
160 |
+
},
|
161 |
+
"harness|mmlu_college_medicine|0": {
|
162 |
+
"alias": " - college_medicine",
|
163 |
+
"acc,none": 0.3988439306358382,
|
164 |
+
"acc_stderr,none": 0.03733626655383509
|
165 |
+
},
|
166 |
+
"harness|mmlu_global_facts|0": {
|
167 |
+
"alias": " - global_facts",
|
168 |
+
"acc,none": 0.41,
|
169 |
+
"acc_stderr,none": 0.049431107042371025
|
170 |
+
},
|
171 |
+
"harness|mmlu_human_aging|0": {
|
172 |
+
"alias": " - human_aging",
|
173 |
+
"acc,none": 0.5964125560538116,
|
174 |
+
"acc_stderr,none": 0.03292802819330314
|
175 |
+
},
|
176 |
+
"harness|mmlu_management|0": {
|
177 |
+
"alias": " - management",
|
178 |
+
"acc,none": 0.6407766990291263,
|
179 |
+
"acc_stderr,none": 0.04750458399041696
|
180 |
+
},
|
181 |
+
"harness|mmlu_marketing|0": {
|
182 |
+
"alias": " - marketing",
|
183 |
+
"acc,none": 0.7307692307692307,
|
184 |
+
"acc_stderr,none": 0.02905858830374884
|
185 |
+
},
|
186 |
+
"harness|mmlu_medical_genetics|0": {
|
187 |
+
"alias": " - medical_genetics",
|
188 |
+
"acc,none": 0.51,
|
189 |
+
"acc_stderr,none": 0.05024183937956911
|
190 |
+
},
|
191 |
+
"harness|mmlu_miscellaneous|0": {
|
192 |
+
"alias": " - miscellaneous",
|
193 |
+
"acc,none": 0.6717752234993615,
|
194 |
+
"acc_stderr,none": 0.016791685640192892
|
195 |
+
},
|
196 |
+
"harness|mmlu_nutrition|0": {
|
197 |
+
"alias": " - nutrition",
|
198 |
+
"acc,none": 0.5261437908496732,
|
199 |
+
"acc_stderr,none": 0.028590752958852387
|
200 |
+
},
|
201 |
+
"harness|mmlu_professional_accounting|0": {
|
202 |
+
"alias": " - professional_accounting",
|
203 |
+
"acc,none": 0.36879432624113473,
|
204 |
+
"acc_stderr,none": 0.028782227561347254
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_medicine|0": {
|
207 |
+
"alias": " - professional_medicine",
|
208 |
+
"acc,none": 0.4227941176470588,
|
209 |
+
"acc_stderr,none": 0.030008562845003476
|
210 |
+
},
|
211 |
+
"harness|mmlu_virology|0": {
|
212 |
+
"alias": " - virology",
|
213 |
+
"acc,none": 0.4578313253012048,
|
214 |
+
"acc_stderr,none": 0.0387862677100236
|
215 |
+
},
|
216 |
+
"harness|mmlu_social_sciences|0": {
|
217 |
+
"alias": " - social_sciences",
|
218 |
+
"acc,none": 0.5271368215794605,
|
219 |
+
"acc_stderr,none": 0.008780899524181778
|
220 |
+
},
|
221 |
+
"harness|mmlu_econometrics|0": {
|
222 |
+
"alias": " - econometrics",
|
223 |
+
"acc,none": 0.2894736842105263,
|
224 |
+
"acc_stderr,none": 0.04266339443159394
|
225 |
+
},
|
226 |
+
"harness|mmlu_high_school_geography|0": {
|
227 |
+
"alias": " - high_school_geography",
|
228 |
+
"acc,none": 0.5757575757575758,
|
229 |
+
"acc_stderr,none": 0.035212249088415845
|
230 |
+
},
|
231 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
232 |
+
"alias": " - high_school_government_and_politics",
|
233 |
+
"acc,none": 0.6683937823834197,
|
234 |
+
"acc_stderr,none": 0.03397636541089118
|
235 |
+
},
|
236 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
237 |
+
"alias": " - high_school_macroeconomics",
|
238 |
+
"acc,none": 0.4230769230769231,
|
239 |
+
"acc_stderr,none": 0.02504919787604234
|
240 |
+
},
|
241 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
242 |
+
"alias": " - high_school_microeconomics",
|
243 |
+
"acc,none": 0.37815126050420167,
|
244 |
+
"acc_stderr,none": 0.031499305777849054
|
245 |
+
},
|
246 |
+
"harness|mmlu_high_school_psychology|0": {
|
247 |
+
"alias": " - high_school_psychology",
|
248 |
+
"acc,none": 0.6165137614678899,
|
249 |
+
"acc_stderr,none": 0.02084715664191598
|
250 |
+
},
|
251 |
+
"harness|mmlu_human_sexuality|0": {
|
252 |
+
"alias": " - human_sexuality",
|
253 |
+
"acc,none": 0.5572519083969466,
|
254 |
+
"acc_stderr,none": 0.04356447202665069
|
255 |
+
},
|
256 |
+
"harness|mmlu_professional_psychology|0": {
|
257 |
+
"alias": " - professional_psychology",
|
258 |
+
"acc,none": 0.4444444444444444,
|
259 |
+
"acc_stderr,none": 0.02010258389588718
|
260 |
+
},
|
261 |
+
"harness|mmlu_public_relations|0": {
|
262 |
+
"alias": " - public_relations",
|
263 |
+
"acc,none": 0.5727272727272728,
|
264 |
+
"acc_stderr,none": 0.04738198703545483
|
265 |
+
},
|
266 |
+
"harness|mmlu_security_studies|0": {
|
267 |
+
"alias": " - security_studies",
|
268 |
+
"acc,none": 0.5387755102040817,
|
269 |
+
"acc_stderr,none": 0.031912820526692774
|
270 |
+
},
|
271 |
+
"harness|mmlu_sociology|0": {
|
272 |
+
"alias": " - sociology",
|
273 |
+
"acc,none": 0.7263681592039801,
|
274 |
+
"acc_stderr,none": 0.03152439186555402
|
275 |
+
},
|
276 |
+
"harness|mmlu_us_foreign_policy|0": {
|
277 |
+
"alias": " - us_foreign_policy",
|
278 |
+
"acc,none": 0.69,
|
279 |
+
"acc_stderr,none": 0.04648231987117316
|
280 |
+
},
|
281 |
+
"harness|mmlu_stem|0": {
|
282 |
+
"alias": " - stem",
|
283 |
+
"acc,none": 0.362511893434824,
|
284 |
+
"acc_stderr,none": 0.008406035153052648
|
285 |
+
},
|
286 |
+
"harness|mmlu_abstract_algebra|0": {
|
287 |
+
"alias": " - abstract_algebra",
|
288 |
+
"acc,none": 0.3,
|
289 |
+
"acc_stderr,none": 0.046056618647183814
|
290 |
+
},
|
291 |
+
"harness|mmlu_anatomy|0": {
|
292 |
+
"alias": " - anatomy",
|
293 |
+
"acc,none": 0.48148148148148145,
|
294 |
+
"acc_stderr,none": 0.043163785995113245
|
295 |
+
},
|
296 |
+
"harness|mmlu_astronomy|0": {
|
297 |
+
"alias": " - astronomy",
|
298 |
+
"acc,none": 0.46710526315789475,
|
299 |
+
"acc_stderr,none": 0.040601270352363966
|
300 |
+
},
|
301 |
+
"harness|mmlu_college_biology|0": {
|
302 |
+
"alias": " - college_biology",
|
303 |
+
"acc,none": 0.4583333333333333,
|
304 |
+
"acc_stderr,none": 0.04166666666666665
|
305 |
+
},
|
306 |
+
"harness|mmlu_college_chemistry|0": {
|
307 |
+
"alias": " - college_chemistry",
|
308 |
+
"acc,none": 0.24,
|
309 |
+
"acc_stderr,none": 0.04292346959909283
|
310 |
+
},
|
311 |
+
"harness|mmlu_college_computer_science|0": {
|
312 |
+
"alias": " - college_computer_science",
|
313 |
+
"acc,none": 0.3,
|
314 |
+
"acc_stderr,none": 0.046056618647183814
|
315 |
+
},
|
316 |
+
"harness|mmlu_college_mathematics|0": {
|
317 |
+
"alias": " - college_mathematics",
|
318 |
+
"acc,none": 0.29,
|
319 |
+
"acc_stderr,none": 0.04560480215720684
|
320 |
+
},
|
321 |
+
"harness|mmlu_college_physics|0": {
|
322 |
+
"alias": " - college_physics",
|
323 |
+
"acc,none": 0.18627450980392157,
|
324 |
+
"acc_stderr,none": 0.038739587141493545
|
325 |
+
},
|
326 |
+
"harness|mmlu_computer_security|0": {
|
327 |
+
"alias": " - computer_security",
|
328 |
+
"acc,none": 0.58,
|
329 |
+
"acc_stderr,none": 0.049604496374885836
|
330 |
+
},
|
331 |
+
"harness|mmlu_conceptual_physics|0": {
|
332 |
+
"alias": " - conceptual_physics",
|
333 |
+
"acc,none": 0.39574468085106385,
|
334 |
+
"acc_stderr,none": 0.03196758697835362
|
335 |
+
},
|
336 |
+
"harness|mmlu_electrical_engineering|0": {
|
337 |
+
"alias": " - electrical_engineering",
|
338 |
+
"acc,none": 0.45517241379310347,
|
339 |
+
"acc_stderr,none": 0.04149886942192117
|
340 |
+
},
|
341 |
+
"harness|mmlu_elementary_mathematics|0": {
|
342 |
+
"alias": " - elementary_mathematics",
|
343 |
+
"acc,none": 0.2962962962962963,
|
344 |
+
"acc_stderr,none": 0.02351729433596329
|
345 |
+
},
|
346 |
+
"harness|mmlu_high_school_biology|0": {
|
347 |
+
"alias": " - high_school_biology",
|
348 |
+
"acc,none": 0.5064516129032258,
|
349 |
+
"acc_stderr,none": 0.028441638233540505
|
350 |
+
},
|
351 |
+
"harness|mmlu_high_school_chemistry|0": {
|
352 |
+
"alias": " - high_school_chemistry",
|
353 |
+
"acc,none": 0.3103448275862069,
|
354 |
+
"acc_stderr,none": 0.032550867699701024
|
355 |
+
},
|
356 |
+
"harness|mmlu_high_school_computer_science|0": {
|
357 |
+
"alias": " - high_school_computer_science",
|
358 |
+
"acc,none": 0.38,
|
359 |
+
"acc_stderr,none": 0.04878317312145633
|
360 |
+
},
|
361 |
+
"harness|mmlu_high_school_mathematics|0": {
|
362 |
+
"alias": " - high_school_mathematics",
|
363 |
+
"acc,none": 0.2962962962962963,
|
364 |
+
"acc_stderr,none": 0.027840811495871927
|
365 |
+
},
|
366 |
+
"harness|mmlu_high_school_physics|0": {
|
367 |
+
"alias": " - high_school_physics",
|
368 |
+
"acc,none": 0.271523178807947,
|
369 |
+
"acc_stderr,none": 0.036313298039696525
|
370 |
+
},
|
371 |
+
"harness|mmlu_high_school_statistics|0": {
|
372 |
+
"alias": " - high_school_statistics",
|
373 |
+
"acc,none": 0.2638888888888889,
|
374 |
+
"acc_stderr,none": 0.03005820270430985
|
375 |
+
},
|
376 |
+
"harness|mmlu_machine_learning|0": {
|
377 |
+
"alias": " - machine_learning",
|
378 |
+
"acc,none": 0.39285714285714285,
|
379 |
+
"acc_stderr,none": 0.04635550135609976
|
380 |
+
},
|
381 |
+
"harness|boolq|0": {
|
382 |
+
"acc,none": 0.8064220183486238,
|
383 |
+
"acc_stderr,none": 0.006910376454601405,
|
384 |
+
"alias": "boolq"
|
385 |
+
},
|
386 |
+
"harness|openbookqa|0": {
|
387 |
+
"acc,none": 0.334,
|
388 |
+
"acc_stderr,none": 0.021113492347743727,
|
389 |
+
"acc_norm,none": 0.438,
|
390 |
+
"acc_norm_stderr,none": 0.022210326363977417,
|
391 |
+
"alias": "openbookqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/Llama-2-7b-chat-hf-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 7.0,
|
400 |
+
"architectures": "LlamaForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 7.0,
|
404 |
+
"model_size": 3.5,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Pending",
|
410 |
+
"submitted_time": "2024-05-08T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.2.0.dev",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"enable_quanted_input": true,
|
423 |
+
"group_size": 128,
|
424 |
+
"is_marlin_format": false,
|
425 |
+
"iters": 200,
|
426 |
+
"lr": 0.005,
|
427 |
+
"minmax_lr": 0.005,
|
428 |
+
"model_file_base_name": "model",
|
429 |
+
"model_name_or_path": null,
|
430 |
+
"quant_method": "gptq",
|
431 |
+
"scale_dtype": "float16",
|
432 |
+
"static_groups": false,
|
433 |
+
"sym": false,
|
434 |
+
"true_sequential": false
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|arc:easy|0": 1.0,
|
438 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
439 |
+
"harness|lambada:openai|0": 1.0,
|
440 |
+
"harness|hellaswag|0": 1.0,
|
441 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
442 |
+
"harness|piqa|0": 1.0,
|
443 |
+
"harness|arc:challenge|0": 1.0,
|
444 |
+
"harness|winogrande|0": 1.0,
|
445 |
+
"harness|mmlu|0": null,
|
446 |
+
"harness|mmlu_humanities|0": null,
|
447 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
449 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
450 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
451 |
+
"harness|mmlu_international_law|0": 0.0,
|
452 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
453 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
454 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
455 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
456 |
+
"harness|mmlu_philosophy|0": 0.0,
|
457 |
+
"harness|mmlu_prehistory|0": 0.0,
|
458 |
+
"harness|mmlu_professional_law|0": 0.0,
|
459 |
+
"harness|mmlu_world_religions|0": 0.0,
|
460 |
+
"harness|mmlu_other|0": null,
|
461 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
462 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
463 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
464 |
+
"harness|mmlu_global_facts|0": 0.0,
|
465 |
+
"harness|mmlu_human_aging|0": 0.0,
|
466 |
+
"harness|mmlu_management|0": 0.0,
|
467 |
+
"harness|mmlu_marketing|0": 0.0,
|
468 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
469 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
470 |
+
"harness|mmlu_nutrition|0": 0.0,
|
471 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
472 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
473 |
+
"harness|mmlu_virology|0": 0.0,
|
474 |
+
"harness|mmlu_social_sciences|0": null,
|
475 |
+
"harness|mmlu_econometrics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
481 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
482 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
483 |
+
"harness|mmlu_public_relations|0": 0.0,
|
484 |
+
"harness|mmlu_security_studies|0": 0.0,
|
485 |
+
"harness|mmlu_sociology|0": 0.0,
|
486 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
487 |
+
"harness|mmlu_stem|0": null,
|
488 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
489 |
+
"harness|mmlu_anatomy|0": 0.0,
|
490 |
+
"harness|mmlu_astronomy|0": 0.0,
|
491 |
+
"harness|mmlu_college_biology|0": 0.0,
|
492 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
493 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
494 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
495 |
+
"harness|mmlu_college_physics|0": 0.0,
|
496 |
+
"harness|mmlu_computer_security|0": 0.0,
|
497 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
498 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
499 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
503 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
504 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
505 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
506 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
507 |
+
"harness|boolq|0": 2.0,
|
508 |
+
"harness|openbookqa|0": 1.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1715201004.6439905,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/Llama-2-7b-chat-hf-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-05-10-10-37-29.json
ADDED
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-10-10-37-29",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/Meta-Llama-3-8B-Instruct-int4-inc",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.4,
|
16 |
+
"model_params": 7.2,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc2|0": {
|
22 |
+
"acc,none": 0.5146934010760681,
|
23 |
+
"acc_stderr,none": 0.0152582547538094,
|
24 |
+
"alias": "truthfulqa_mc2"
|
25 |
+
},
|
26 |
+
"harness|hellaswag|0": {
|
27 |
+
"acc,none": 0.570902210714997,
|
28 |
+
"acc_stderr,none": 0.0049393581455613,
|
29 |
+
"acc_norm,none": 0.7518422624975104,
|
30 |
+
"acc_norm_stderr,none": 0.004310610616845716,
|
31 |
+
"alias": "hellaswag"
|
32 |
+
},
|
33 |
+
"harness|arc:easy|0": {
|
34 |
+
"acc,none": 0.811026936026936,
|
35 |
+
"acc_stderr,none": 0.00803314829980193,
|
36 |
+
"acc_norm,none": 0.789983164983165,
|
37 |
+
"acc_norm_stderr,none": 0.008358034622322224,
|
38 |
+
"alias": "arc_easy"
|
39 |
+
},
|
40 |
+
"harness|mmlu|0": {
|
41 |
+
"acc,none": 0.6241276171485544,
|
42 |
+
"acc_stderr,none": 0.00383558025213648,
|
43 |
+
"alias": "mmlu"
|
44 |
+
},
|
45 |
+
"harness|mmlu_humanities|0": {
|
46 |
+
"alias": " - humanities",
|
47 |
+
"acc,none": 0.5628055260361318,
|
48 |
+
"acc_stderr,none": 0.006689295581999755
|
49 |
+
},
|
50 |
+
"harness|mmlu_formal_logic|0": {
|
51 |
+
"alias": " - formal_logic",
|
52 |
+
"acc,none": 0.49206349206349204,
|
53 |
+
"acc_stderr,none": 0.044715725362943486
|
54 |
+
},
|
55 |
+
"harness|mmlu_high_school_european_history|0": {
|
56 |
+
"alias": " - high_school_european_history",
|
57 |
+
"acc,none": 0.7151515151515152,
|
58 |
+
"acc_stderr,none": 0.03524390844511781
|
59 |
+
},
|
60 |
+
"harness|mmlu_high_school_us_history|0": {
|
61 |
+
"alias": " - high_school_us_history",
|
62 |
+
"acc,none": 0.8137254901960784,
|
63 |
+
"acc_stderr,none": 0.027325470966716333
|
64 |
+
},
|
65 |
+
"harness|mmlu_high_school_world_history|0": {
|
66 |
+
"alias": " - high_school_world_history",
|
67 |
+
"acc,none": 0.810126582278481,
|
68 |
+
"acc_stderr,none": 0.02553010046023351
|
69 |
+
},
|
70 |
+
"harness|mmlu_international_law|0": {
|
71 |
+
"alias": " - international_law",
|
72 |
+
"acc,none": 0.743801652892562,
|
73 |
+
"acc_stderr,none": 0.03984979653302872
|
74 |
+
},
|
75 |
+
"harness|mmlu_jurisprudence|0": {
|
76 |
+
"alias": " - jurisprudence",
|
77 |
+
"acc,none": 0.8055555555555556,
|
78 |
+
"acc_stderr,none": 0.038260763248848646
|
79 |
+
},
|
80 |
+
"harness|mmlu_logical_fallacies|0": {
|
81 |
+
"alias": " - logical_fallacies",
|
82 |
+
"acc,none": 0.7791411042944786,
|
83 |
+
"acc_stderr,none": 0.032591773927421776
|
84 |
+
},
|
85 |
+
"harness|mmlu_moral_disputes|0": {
|
86 |
+
"alias": " - moral_disputes",
|
87 |
+
"acc,none": 0.7023121387283237,
|
88 |
+
"acc_stderr,none": 0.024617055388677006
|
89 |
+
},
|
90 |
+
"harness|mmlu_moral_scenarios|0": {
|
91 |
+
"alias": " - moral_scenarios",
|
92 |
+
"acc,none": 0.26256983240223464,
|
93 |
+
"acc_stderr,none": 0.014716824273017765
|
94 |
+
},
|
95 |
+
"harness|mmlu_philosophy|0": {
|
96 |
+
"alias": " - philosophy",
|
97 |
+
"acc,none": 0.6913183279742765,
|
98 |
+
"acc_stderr,none": 0.026236965881153256
|
99 |
+
},
|
100 |
+
"harness|mmlu_prehistory|0": {
|
101 |
+
"alias": " - prehistory",
|
102 |
+
"acc,none": 0.7376543209876543,
|
103 |
+
"acc_stderr,none": 0.024477222856135114
|
104 |
+
},
|
105 |
+
"harness|mmlu_professional_law|0": {
|
106 |
+
"alias": " - professional_law",
|
107 |
+
"acc,none": 0.4830508474576271,
|
108 |
+
"acc_stderr,none": 0.012762896889210867
|
109 |
+
},
|
110 |
+
"harness|mmlu_world_religions|0": {
|
111 |
+
"alias": " - world_religions",
|
112 |
+
"acc,none": 0.7777777777777778,
|
113 |
+
"acc_stderr,none": 0.03188578017686398
|
114 |
+
},
|
115 |
+
"harness|mmlu_other|0": {
|
116 |
+
"alias": " - other",
|
117 |
+
"acc,none": 0.7093659478596717,
|
118 |
+
"acc_stderr,none": 0.007876660614156239
|
119 |
+
},
|
120 |
+
"harness|mmlu_business_ethics|0": {
|
121 |
+
"alias": " - business_ethics",
|
122 |
+
"acc,none": 0.67,
|
123 |
+
"acc_stderr,none": 0.04725815626252607
|
124 |
+
},
|
125 |
+
"harness|mmlu_clinical_knowledge|0": {
|
126 |
+
"alias": " - clinical_knowledge",
|
127 |
+
"acc,none": 0.7056603773584905,
|
128 |
+
"acc_stderr,none": 0.02804918631569524
|
129 |
+
},
|
130 |
+
"harness|mmlu_college_medicine|0": {
|
131 |
+
"alias": " - college_medicine",
|
132 |
+
"acc,none": 0.653179190751445,
|
133 |
+
"acc_stderr,none": 0.03629146670159663
|
134 |
+
},
|
135 |
+
"harness|mmlu_global_facts|0": {
|
136 |
+
"alias": " - global_facts",
|
137 |
+
"acc,none": 0.41,
|
138 |
+
"acc_stderr,none": 0.04943110704237102
|
139 |
+
},
|
140 |
+
"harness|mmlu_human_aging|0": {
|
141 |
+
"alias": " - human_aging",
|
142 |
+
"acc,none": 0.6860986547085202,
|
143 |
+
"acc_stderr,none": 0.031146796482972465
|
144 |
+
},
|
145 |
+
"harness|mmlu_management|0": {
|
146 |
+
"alias": " - management",
|
147 |
+
"acc,none": 0.8252427184466019,
|
148 |
+
"acc_stderr,none": 0.037601780060266196
|
149 |
+
},
|
150 |
+
"harness|mmlu_marketing|0": {
|
151 |
+
"alias": " - marketing",
|
152 |
+
"acc,none": 0.8931623931623932,
|
153 |
+
"acc_stderr,none": 0.020237149008990922
|
154 |
+
},
|
155 |
+
"harness|mmlu_medical_genetics|0": {
|
156 |
+
"alias": " - medical_genetics",
|
157 |
+
"acc,none": 0.79,
|
158 |
+
"acc_stderr,none": 0.040936018074033256
|
159 |
+
},
|
160 |
+
"harness|mmlu_miscellaneous|0": {
|
161 |
+
"alias": " - miscellaneous",
|
162 |
+
"acc,none": 0.8033205619412516,
|
163 |
+
"acc_stderr,none": 0.014214138556913912
|
164 |
+
},
|
165 |
+
"harness|mmlu_nutrition|0": {
|
166 |
+
"alias": " - nutrition",
|
167 |
+
"acc,none": 0.7189542483660131,
|
168 |
+
"acc_stderr,none": 0.025738854797818723
|
169 |
+
},
|
170 |
+
"harness|mmlu_professional_accounting|0": {
|
171 |
+
"alias": " - professional_accounting",
|
172 |
+
"acc,none": 0.5319148936170213,
|
173 |
+
"acc_stderr,none": 0.029766675075873866
|
174 |
+
},
|
175 |
+
"harness|mmlu_professional_medicine|0": {
|
176 |
+
"alias": " - professional_medicine",
|
177 |
+
"acc,none": 0.7022058823529411,
|
178 |
+
"acc_stderr,none": 0.027778298701545443
|
179 |
+
},
|
180 |
+
"harness|mmlu_virology|0": {
|
181 |
+
"alias": " - virology",
|
182 |
+
"acc,none": 0.4819277108433735,
|
183 |
+
"acc_stderr,none": 0.03889951252827216
|
184 |
+
},
|
185 |
+
"harness|mmlu_social_sciences|0": {
|
186 |
+
"alias": " - social_sciences",
|
187 |
+
"acc,none": 0.7331816704582386,
|
188 |
+
"acc_stderr,none": 0.007832357132624299
|
189 |
+
},
|
190 |
+
"harness|mmlu_econometrics|0": {
|
191 |
+
"alias": " - econometrics",
|
192 |
+
"acc,none": 0.49122807017543857,
|
193 |
+
"acc_stderr,none": 0.04702880432049615
|
194 |
+
},
|
195 |
+
"harness|mmlu_high_school_geography|0": {
|
196 |
+
"alias": " - high_school_geography",
|
197 |
+
"acc,none": 0.7676767676767676,
|
198 |
+
"acc_stderr,none": 0.03008862949021749
|
199 |
+
},
|
200 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
201 |
+
"alias": " - high_school_government_and_politics",
|
202 |
+
"acc,none": 0.8652849740932642,
|
203 |
+
"acc_stderr,none": 0.024639789097709437
|
204 |
+
},
|
205 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
206 |
+
"alias": " - high_school_macroeconomics",
|
207 |
+
"acc,none": 0.6435897435897436,
|
208 |
+
"acc_stderr,none": 0.024283140529467305
|
209 |
+
},
|
210 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
211 |
+
"alias": " - high_school_microeconomics",
|
212 |
+
"acc,none": 0.7100840336134454,
|
213 |
+
"acc_stderr,none": 0.029472485833136094
|
214 |
+
},
|
215 |
+
"harness|mmlu_high_school_psychology|0": {
|
216 |
+
"alias": " - high_school_psychology",
|
217 |
+
"acc,none": 0.8128440366972477,
|
218 |
+
"acc_stderr,none": 0.016722684526200165
|
219 |
+
},
|
220 |
+
"harness|mmlu_human_sexuality|0": {
|
221 |
+
"alias": " - human_sexuality",
|
222 |
+
"acc,none": 0.7709923664122137,
|
223 |
+
"acc_stderr,none": 0.036853466317118506
|
224 |
+
},
|
225 |
+
"harness|mmlu_professional_psychology|0": {
|
226 |
+
"alias": " - professional_psychology",
|
227 |
+
"acc,none": 0.6699346405228758,
|
228 |
+
"acc_stderr,none": 0.019023726160724553
|
229 |
+
},
|
230 |
+
"harness|mmlu_public_relations|0": {
|
231 |
+
"alias": " - public_relations",
|
232 |
+
"acc,none": 0.6818181818181818,
|
233 |
+
"acc_stderr,none": 0.044612721759105085
|
234 |
+
},
|
235 |
+
"harness|mmlu_security_studies|0": {
|
236 |
+
"alias": " - security_studies",
|
237 |
+
"acc,none": 0.7306122448979592,
|
238 |
+
"acc_stderr,none": 0.02840125202902294
|
239 |
+
},
|
240 |
+
"harness|mmlu_sociology|0": {
|
241 |
+
"alias": " - sociology",
|
242 |
+
"acc,none": 0.8407960199004975,
|
243 |
+
"acc_stderr,none": 0.02587064676616914
|
244 |
+
},
|
245 |
+
"harness|mmlu_us_foreign_policy|0": {
|
246 |
+
"alias": " - us_foreign_policy",
|
247 |
+
"acc,none": 0.84,
|
248 |
+
"acc_stderr,none": 0.03684529491774709
|
249 |
+
},
|
250 |
+
"harness|mmlu_stem|0": {
|
251 |
+
"alias": " - stem",
|
252 |
+
"acc,none": 0.5252140818268316,
|
253 |
+
"acc_stderr,none": 0.008572192732014554
|
254 |
+
},
|
255 |
+
"harness|mmlu_abstract_algebra|0": {
|
256 |
+
"alias": " - abstract_algebra",
|
257 |
+
"acc,none": 0.27,
|
258 |
+
"acc_stderr,none": 0.044619604333847415
|
259 |
+
},
|
260 |
+
"harness|mmlu_anatomy|0": {
|
261 |
+
"alias": " - anatomy",
|
262 |
+
"acc,none": 0.6222222222222222,
|
263 |
+
"acc_stderr,none": 0.04188307537595853
|
264 |
+
},
|
265 |
+
"harness|mmlu_astronomy|0": {
|
266 |
+
"alias": " - astronomy",
|
267 |
+
"acc,none": 0.6842105263157895,
|
268 |
+
"acc_stderr,none": 0.0378272898086547
|
269 |
+
},
|
270 |
+
"harness|mmlu_college_biology|0": {
|
271 |
+
"alias": " - college_biology",
|
272 |
+
"acc,none": 0.7291666666666666,
|
273 |
+
"acc_stderr,none": 0.037161774375660164
|
274 |
+
},
|
275 |
+
"harness|mmlu_college_chemistry|0": {
|
276 |
+
"alias": " - college_chemistry",
|
277 |
+
"acc,none": 0.41,
|
278 |
+
"acc_stderr,none": 0.049431107042371025
|
279 |
+
},
|
280 |
+
"harness|mmlu_college_computer_science|0": {
|
281 |
+
"alias": " - college_computer_science",
|
282 |
+
"acc,none": 0.47,
|
283 |
+
"acc_stderr,none": 0.05016135580465919
|
284 |
+
},
|
285 |
+
"harness|mmlu_college_mathematics|0": {
|
286 |
+
"alias": " - college_mathematics",
|
287 |
+
"acc,none": 0.32,
|
288 |
+
"acc_stderr,none": 0.046882617226215034
|
289 |
+
},
|
290 |
+
"harness|mmlu_college_physics|0": {
|
291 |
+
"alias": " - college_physics",
|
292 |
+
"acc,none": 0.46078431372549017,
|
293 |
+
"acc_stderr,none": 0.04959859966384181
|
294 |
+
},
|
295 |
+
"harness|mmlu_computer_security|0": {
|
296 |
+
"alias": " - computer_security",
|
297 |
+
"acc,none": 0.75,
|
298 |
+
"acc_stderr,none": 0.04351941398892446
|
299 |
+
},
|
300 |
+
"harness|mmlu_conceptual_physics|0": {
|
301 |
+
"alias": " - conceptual_physics",
|
302 |
+
"acc,none": 0.5404255319148936,
|
303 |
+
"acc_stderr,none": 0.03257901482099834
|
304 |
+
},
|
305 |
+
"harness|mmlu_electrical_engineering|0": {
|
306 |
+
"alias": " - electrical_engineering",
|
307 |
+
"acc,none": 0.6206896551724138,
|
308 |
+
"acc_stderr,none": 0.040434618619167466
|
309 |
+
},
|
310 |
+
"harness|mmlu_elementary_mathematics|0": {
|
311 |
+
"alias": " - elementary_mathematics",
|
312 |
+
"acc,none": 0.4497354497354497,
|
313 |
+
"acc_stderr,none": 0.02562085704293665
|
314 |
+
},
|
315 |
+
"harness|mmlu_high_school_biology|0": {
|
316 |
+
"alias": " - high_school_biology",
|
317 |
+
"acc,none": 0.7419354838709677,
|
318 |
+
"acc_stderr,none": 0.024892469172462836
|
319 |
+
},
|
320 |
+
"harness|mmlu_high_school_chemistry|0": {
|
321 |
+
"alias": " - high_school_chemistry",
|
322 |
+
"acc,none": 0.45320197044334976,
|
323 |
+
"acc_stderr,none": 0.03502544650845872
|
324 |
+
},
|
325 |
+
"harness|mmlu_high_school_computer_science|0": {
|
326 |
+
"alias": " - high_school_computer_science",
|
327 |
+
"acc,none": 0.68,
|
328 |
+
"acc_stderr,none": 0.04688261722621504
|
329 |
+
},
|
330 |
+
"harness|mmlu_high_school_mathematics|0": {
|
331 |
+
"alias": " - high_school_mathematics",
|
332 |
+
"acc,none": 0.3592592592592593,
|
333 |
+
"acc_stderr,none": 0.029252905927251976
|
334 |
+
},
|
335 |
+
"harness|mmlu_high_school_physics|0": {
|
336 |
+
"alias": " - high_school_physics",
|
337 |
+
"acc,none": 0.4370860927152318,
|
338 |
+
"acc_stderr,none": 0.04050035722230636
|
339 |
+
},
|
340 |
+
"harness|mmlu_high_school_statistics|0": {
|
341 |
+
"alias": " - high_school_statistics",
|
342 |
+
"acc,none": 0.46296296296296297,
|
343 |
+
"acc_stderr,none": 0.03400603625538272
|
344 |
+
},
|
345 |
+
"harness|mmlu_machine_learning|0": {
|
346 |
+
"alias": " - machine_learning",
|
347 |
+
"acc,none": 0.48214285714285715,
|
348 |
+
"acc_stderr,none": 0.047427623612430116
|
349 |
+
},
|
350 |
+
"harness|piqa|0": {
|
351 |
+
"acc,none": 0.7861806311207835,
|
352 |
+
"acc_stderr,none": 0.009565994206915594,
|
353 |
+
"acc_norm,none": 0.7856365614798694,
|
354 |
+
"acc_norm_stderr,none": 0.009574842136050933,
|
355 |
+
"alias": "piqa"
|
356 |
+
},
|
357 |
+
"harness|boolq|0": {
|
358 |
+
"acc,none": 0.8321100917431192,
|
359 |
+
"acc_stderr,none": 0.006537252053566842,
|
360 |
+
"alias": "boolq"
|
361 |
+
},
|
362 |
+
"harness|truthfulqa:mc1|0": {
|
363 |
+
"acc,none": 0.3635250917992656,
|
364 |
+
"acc_stderr,none": 0.016838862883965838,
|
365 |
+
"alias": "truthfulqa_mc1"
|
366 |
+
},
|
367 |
+
"harness|winogrande|0": {
|
368 |
+
"acc,none": 0.7213891081294396,
|
369 |
+
"acc_stderr,none": 0.01259989664949388,
|
370 |
+
"alias": "winogrande"
|
371 |
+
},
|
372 |
+
"harness|lambada:openai|0": {
|
373 |
+
"perplexity,none": 3.24000265267783,
|
374 |
+
"perplexity_stderr,none": 0.0824803761631906,
|
375 |
+
"acc,none": 0.7131767902192897,
|
376 |
+
"acc_stderr,none": 0.006301120995354307,
|
377 |
+
"alias": "lambada_openai"
|
378 |
+
},
|
379 |
+
"harness|arc:challenge|0": {
|
380 |
+
"acc,none": 0.5187713310580204,
|
381 |
+
"acc_stderr,none": 0.014601090150633964,
|
382 |
+
"acc_norm,none": 0.5699658703071673,
|
383 |
+
"acc_norm_stderr,none": 0.014467631559137994,
|
384 |
+
"alias": "arc_challenge"
|
385 |
+
},
|
386 |
+
"harness|openbookqa|0": {
|
387 |
+
"acc,none": 0.352,
|
388 |
+
"acc_stderr,none": 0.021380042385946048,
|
389 |
+
"acc_norm,none": 0.428,
|
390 |
+
"acc_norm_stderr,none": 0.022149790663861926,
|
391 |
+
"alias": "openbookqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/Meta-Llama-3-8B-Instruct-int4-inc",
|
396 |
+
"local": true,
|
397 |
+
"revision": "main",
|
398 |
+
"private": false,
|
399 |
+
"params": 7.2,
|
400 |
+
"architectures": "LlamaForCausalLM",
|
401 |
+
"quant_type": "AutoRound",
|
402 |
+
"precision": "4bit",
|
403 |
+
"model_params": 7.2,
|
404 |
+
"model_size": 5.4,
|
405 |
+
"weight_dtype": "int4",
|
406 |
+
"compute_dtype": "float16",
|
407 |
+
"gguf_ftype": "*Q4_0.gguf",
|
408 |
+
"hardware": "gpu",
|
409 |
+
"status": "Finished",
|
410 |
+
"submitted_time": "2024-05-08T15:44:22Z",
|
411 |
+
"model_type": "quantization",
|
412 |
+
"job_id": -1,
|
413 |
+
"job_start_time": null,
|
414 |
+
"scripts": "ITREX"
|
415 |
+
},
|
416 |
+
"quantization_config": {
|
417 |
+
"autoround_version": "0.2.0.dev",
|
418 |
+
"bits": 4,
|
419 |
+
"damp_percent": 0.01,
|
420 |
+
"desc_act": false,
|
421 |
+
"enable_minmax_tuning": true,
|
422 |
+
"enable_quanted_input": true,
|
423 |
+
"group_size": 128,
|
424 |
+
"is_marlin_format": false,
|
425 |
+
"iters": 200,
|
426 |
+
"lr": 0.005,
|
427 |
+
"minmax_lr": 0.005,
|
428 |
+
"model_file_base_name": "model",
|
429 |
+
"model_name_or_path": null,
|
430 |
+
"quant_method": "gptq",
|
431 |
+
"scale_dtype": "float16",
|
432 |
+
"static_groups": false,
|
433 |
+
"sym": false,
|
434 |
+
"true_sequential": false
|
435 |
+
},
|
436 |
+
"versions": {
|
437 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
438 |
+
"harness|hellaswag|0": 1.0,
|
439 |
+
"harness|arc:easy|0": 1.0,
|
440 |
+
"harness|mmlu|0": null,
|
441 |
+
"harness|mmlu_humanities|0": null,
|
442 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
443 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
444 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
445 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
446 |
+
"harness|mmlu_international_law|0": 0.0,
|
447 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
448 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
449 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
450 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
451 |
+
"harness|mmlu_philosophy|0": 0.0,
|
452 |
+
"harness|mmlu_prehistory|0": 0.0,
|
453 |
+
"harness|mmlu_professional_law|0": 0.0,
|
454 |
+
"harness|mmlu_world_religions|0": 0.0,
|
455 |
+
"harness|mmlu_other|0": null,
|
456 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
457 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
458 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
459 |
+
"harness|mmlu_global_facts|0": 0.0,
|
460 |
+
"harness|mmlu_human_aging|0": 0.0,
|
461 |
+
"harness|mmlu_management|0": 0.0,
|
462 |
+
"harness|mmlu_marketing|0": 0.0,
|
463 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
464 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
465 |
+
"harness|mmlu_nutrition|0": 0.0,
|
466 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
467 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
468 |
+
"harness|mmlu_virology|0": 0.0,
|
469 |
+
"harness|mmlu_social_sciences|0": null,
|
470 |
+
"harness|mmlu_econometrics|0": 0.0,
|
471 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
476 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
477 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
478 |
+
"harness|mmlu_public_relations|0": 0.0,
|
479 |
+
"harness|mmlu_security_studies|0": 0.0,
|
480 |
+
"harness|mmlu_sociology|0": 0.0,
|
481 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
482 |
+
"harness|mmlu_stem|0": null,
|
483 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
484 |
+
"harness|mmlu_anatomy|0": 0.0,
|
485 |
+
"harness|mmlu_astronomy|0": 0.0,
|
486 |
+
"harness|mmlu_college_biology|0": 0.0,
|
487 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
488 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
489 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
490 |
+
"harness|mmlu_college_physics|0": 0.0,
|
491 |
+
"harness|mmlu_computer_security|0": 0.0,
|
492 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
493 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
494 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
495 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
501 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
502 |
+
"harness|piqa|0": 1.0,
|
503 |
+
"harness|boolq|0": 2.0,
|
504 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
505 |
+
"harness|winogrande|0": 1.0,
|
506 |
+
"harness|lambada:openai|0": 1.0,
|
507 |
+
"harness|arc:challenge|0": 1.0,
|
508 |
+
"harness|openbookqa|0": 1.0
|
509 |
+
},
|
510 |
+
"n-shot": {
|
511 |
+
"arc_challenge": 0,
|
512 |
+
"arc_easy": 0,
|
513 |
+
"boolq": 0,
|
514 |
+
"hellaswag": 0,
|
515 |
+
"lambada_openai": 0,
|
516 |
+
"mmlu": 0,
|
517 |
+
"mmlu_abstract_algebra": 0,
|
518 |
+
"mmlu_anatomy": 0,
|
519 |
+
"mmlu_astronomy": 0,
|
520 |
+
"mmlu_business_ethics": 0,
|
521 |
+
"mmlu_clinical_knowledge": 0,
|
522 |
+
"mmlu_college_biology": 0,
|
523 |
+
"mmlu_college_chemistry": 0,
|
524 |
+
"mmlu_college_computer_science": 0,
|
525 |
+
"mmlu_college_mathematics": 0,
|
526 |
+
"mmlu_college_medicine": 0,
|
527 |
+
"mmlu_college_physics": 0,
|
528 |
+
"mmlu_computer_security": 0,
|
529 |
+
"mmlu_conceptual_physics": 0,
|
530 |
+
"mmlu_econometrics": 0,
|
531 |
+
"mmlu_electrical_engineering": 0,
|
532 |
+
"mmlu_elementary_mathematics": 0,
|
533 |
+
"mmlu_formal_logic": 0,
|
534 |
+
"mmlu_global_facts": 0,
|
535 |
+
"mmlu_high_school_biology": 0,
|
536 |
+
"mmlu_high_school_chemistry": 0,
|
537 |
+
"mmlu_high_school_computer_science": 0,
|
538 |
+
"mmlu_high_school_european_history": 0,
|
539 |
+
"mmlu_high_school_geography": 0,
|
540 |
+
"mmlu_high_school_government_and_politics": 0,
|
541 |
+
"mmlu_high_school_macroeconomics": 0,
|
542 |
+
"mmlu_high_school_mathematics": 0,
|
543 |
+
"mmlu_high_school_microeconomics": 0,
|
544 |
+
"mmlu_high_school_physics": 0,
|
545 |
+
"mmlu_high_school_psychology": 0,
|
546 |
+
"mmlu_high_school_statistics": 0,
|
547 |
+
"mmlu_high_school_us_history": 0,
|
548 |
+
"mmlu_high_school_world_history": 0,
|
549 |
+
"mmlu_human_aging": 0,
|
550 |
+
"mmlu_human_sexuality": 0,
|
551 |
+
"mmlu_humanities": 0,
|
552 |
+
"mmlu_international_law": 0,
|
553 |
+
"mmlu_jurisprudence": 0,
|
554 |
+
"mmlu_logical_fallacies": 0,
|
555 |
+
"mmlu_machine_learning": 0,
|
556 |
+
"mmlu_management": 0,
|
557 |
+
"mmlu_marketing": 0,
|
558 |
+
"mmlu_medical_genetics": 0,
|
559 |
+
"mmlu_miscellaneous": 0,
|
560 |
+
"mmlu_moral_disputes": 0,
|
561 |
+
"mmlu_moral_scenarios": 0,
|
562 |
+
"mmlu_nutrition": 0,
|
563 |
+
"mmlu_other": 0,
|
564 |
+
"mmlu_philosophy": 0,
|
565 |
+
"mmlu_prehistory": 0,
|
566 |
+
"mmlu_professional_accounting": 0,
|
567 |
+
"mmlu_professional_law": 0,
|
568 |
+
"mmlu_professional_medicine": 0,
|
569 |
+
"mmlu_professional_psychology": 0,
|
570 |
+
"mmlu_public_relations": 0,
|
571 |
+
"mmlu_security_studies": 0,
|
572 |
+
"mmlu_social_sciences": 0,
|
573 |
+
"mmlu_sociology": 0,
|
574 |
+
"mmlu_stem": 0,
|
575 |
+
"mmlu_us_foreign_policy": 0,
|
576 |
+
"mmlu_virology": 0,
|
577 |
+
"mmlu_world_religions": 0,
|
578 |
+
"openbookqa": 0,
|
579 |
+
"piqa": 0,
|
580 |
+
"truthfulqa_mc1": 0,
|
581 |
+
"truthfulqa_mc2": 0,
|
582 |
+
"winogrande": 0
|
583 |
+
},
|
584 |
+
"date": 1715304126.9653401,
|
585 |
+
"config": {
|
586 |
+
"model": "hf",
|
587 |
+
"model_args": "pretrained=Intel/Meta-Llama-3-8B-Instruct-int4-inc,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
588 |
+
"batch_size": 2,
|
589 |
+
"batch_sizes": [],
|
590 |
+
"device": "cuda",
|
591 |
+
"use_cache": null,
|
592 |
+
"limit": null,
|
593 |
+
"bootstrap_iters": 100000,
|
594 |
+
"gen_kwargs": null
|
595 |
+
}
|
596 |
+
}
|
Intel/results_2024-05-18-13-12-26.json
ADDED
@@ -0,0 +1,595 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-18-13-12-26",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc-v0.3",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.6,
|
16 |
+
"model_params": 10.7,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.7584846093133386,
|
23 |
+
"acc_stderr,none": 0.01202898378201188,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|arc:challenge|0": {
|
27 |
+
"acc,none": 0.6040955631399317,
|
28 |
+
"acc_stderr,none": 0.014291228393536592,
|
29 |
+
"acc_norm,none": 0.6305460750853242,
|
30 |
+
"acc_norm_stderr,none": 0.014104578366491887,
|
31 |
+
"alias": "arc_challenge"
|
32 |
+
},
|
33 |
+
"harness|mmlu|0": {
|
34 |
+
"acc,none": 0.6271186440677966,
|
35 |
+
"acc_stderr,none": 0.00382640388007823,
|
36 |
+
"alias": "mmlu"
|
37 |
+
},
|
38 |
+
"harness|mmlu_humanities|0": {
|
39 |
+
"alias": " - humanities",
|
40 |
+
"acc,none": 0.5732199787460148,
|
41 |
+
"acc_stderr,none": 0.006665399518756252
|
42 |
+
},
|
43 |
+
"harness|mmlu_formal_logic|0": {
|
44 |
+
"alias": " - formal_logic",
|
45 |
+
"acc,none": 0.3888888888888889,
|
46 |
+
"acc_stderr,none": 0.04360314860077459
|
47 |
+
},
|
48 |
+
"harness|mmlu_high_school_european_history|0": {
|
49 |
+
"alias": " - high_school_european_history",
|
50 |
+
"acc,none": 0.7757575757575758,
|
51 |
+
"acc_stderr,none": 0.032568666616811015
|
52 |
+
},
|
53 |
+
"harness|mmlu_high_school_us_history|0": {
|
54 |
+
"alias": " - high_school_us_history",
|
55 |
+
"acc,none": 0.8382352941176471,
|
56 |
+
"acc_stderr,none": 0.025845017986926924
|
57 |
+
},
|
58 |
+
"harness|mmlu_high_school_world_history|0": {
|
59 |
+
"alias": " - high_school_world_history",
|
60 |
+
"acc,none": 0.8396624472573839,
|
61 |
+
"acc_stderr,none": 0.023884380925965672
|
62 |
+
},
|
63 |
+
"harness|mmlu_international_law|0": {
|
64 |
+
"alias": " - international_law",
|
65 |
+
"acc,none": 0.8099173553719008,
|
66 |
+
"acc_stderr,none": 0.035817969517092825
|
67 |
+
},
|
68 |
+
"harness|mmlu_jurisprudence|0": {
|
69 |
+
"alias": " - jurisprudence",
|
70 |
+
"acc,none": 0.7592592592592593,
|
71 |
+
"acc_stderr,none": 0.041331194402438376
|
72 |
+
},
|
73 |
+
"harness|mmlu_logical_fallacies|0": {
|
74 |
+
"alias": " - logical_fallacies",
|
75 |
+
"acc,none": 0.7300613496932515,
|
76 |
+
"acc_stderr,none": 0.0348782516849789
|
77 |
+
},
|
78 |
+
"harness|mmlu_moral_disputes|0": {
|
79 |
+
"alias": " - moral_disputes",
|
80 |
+
"acc,none": 0.7254335260115607,
|
81 |
+
"acc_stderr,none": 0.02402774515526501
|
82 |
+
},
|
83 |
+
"harness|mmlu_moral_scenarios|0": {
|
84 |
+
"alias": " - moral_scenarios",
|
85 |
+
"acc,none": 0.2927374301675978,
|
86 |
+
"acc_stderr,none": 0.01521810954441019
|
87 |
+
},
|
88 |
+
"harness|mmlu_philosophy|0": {
|
89 |
+
"alias": " - philosophy",
|
90 |
+
"acc,none": 0.7041800643086816,
|
91 |
+
"acc_stderr,none": 0.025922371788818788
|
92 |
+
},
|
93 |
+
"harness|mmlu_prehistory|0": {
|
94 |
+
"alias": " - prehistory",
|
95 |
+
"acc,none": 0.7376543209876543,
|
96 |
+
"acc_stderr,none": 0.024477222856135114
|
97 |
+
},
|
98 |
+
"harness|mmlu_professional_law|0": {
|
99 |
+
"alias": " - professional_law",
|
100 |
+
"acc,none": 0.48435462842242505,
|
101 |
+
"acc_stderr,none": 0.012763982838120962
|
102 |
+
},
|
103 |
+
"harness|mmlu_world_religions|0": {
|
104 |
+
"alias": " - world_religions",
|
105 |
+
"acc,none": 0.8011695906432749,
|
106 |
+
"acc_stderr,none": 0.03061111655743253
|
107 |
+
},
|
108 |
+
"harness|mmlu_other|0": {
|
109 |
+
"alias": " - other",
|
110 |
+
"acc,none": 0.7048599935629224,
|
111 |
+
"acc_stderr,none": 0.007909488801050074
|
112 |
+
},
|
113 |
+
"harness|mmlu_business_ethics|0": {
|
114 |
+
"alias": " - business_ethics",
|
115 |
+
"acc,none": 0.62,
|
116 |
+
"acc_stderr,none": 0.048783173121456316
|
117 |
+
},
|
118 |
+
"harness|mmlu_clinical_knowledge|0": {
|
119 |
+
"alias": " - clinical_knowledge",
|
120 |
+
"acc,none": 0.7132075471698113,
|
121 |
+
"acc_stderr,none": 0.02783491252754407
|
122 |
+
},
|
123 |
+
"harness|mmlu_college_medicine|0": {
|
124 |
+
"alias": " - college_medicine",
|
125 |
+
"acc,none": 0.6242774566473989,
|
126 |
+
"acc_stderr,none": 0.036928207672648664
|
127 |
+
},
|
128 |
+
"harness|mmlu_global_facts|0": {
|
129 |
+
"alias": " - global_facts",
|
130 |
+
"acc,none": 0.41,
|
131 |
+
"acc_stderr,none": 0.04943110704237102
|
132 |
+
},
|
133 |
+
"harness|mmlu_human_aging|0": {
|
134 |
+
"alias": " - human_aging",
|
135 |
+
"acc,none": 0.6681614349775785,
|
136 |
+
"acc_stderr,none": 0.03160295143776679
|
137 |
+
},
|
138 |
+
"harness|mmlu_management|0": {
|
139 |
+
"alias": " - management",
|
140 |
+
"acc,none": 0.8155339805825242,
|
141 |
+
"acc_stderr,none": 0.03840423627288276
|
142 |
+
},
|
143 |
+
"harness|mmlu_marketing|0": {
|
144 |
+
"alias": " - marketing",
|
145 |
+
"acc,none": 0.8717948717948718,
|
146 |
+
"acc_stderr,none": 0.02190190511507333
|
147 |
+
},
|
148 |
+
"harness|mmlu_medical_genetics|0": {
|
149 |
+
"alias": " - medical_genetics",
|
150 |
+
"acc,none": 0.76,
|
151 |
+
"acc_stderr,none": 0.042923469599092816
|
152 |
+
},
|
153 |
+
"harness|mmlu_miscellaneous|0": {
|
154 |
+
"alias": " - miscellaneous",
|
155 |
+
"acc,none": 0.8007662835249042,
|
156 |
+
"acc_stderr,none": 0.014283378044296413
|
157 |
+
},
|
158 |
+
"harness|mmlu_nutrition|0": {
|
159 |
+
"alias": " - nutrition",
|
160 |
+
"acc,none": 0.7450980392156863,
|
161 |
+
"acc_stderr,none": 0.02495418432487991
|
162 |
+
},
|
163 |
+
"harness|mmlu_professional_accounting|0": {
|
164 |
+
"alias": " - professional_accounting",
|
165 |
+
"acc,none": 0.4858156028368794,
|
166 |
+
"acc_stderr,none": 0.02981549448368206
|
167 |
+
},
|
168 |
+
"harness|mmlu_professional_medicine|0": {
|
169 |
+
"alias": " - professional_medicine",
|
170 |
+
"acc,none": 0.7205882352941176,
|
171 |
+
"acc_stderr,none": 0.027257202606114948
|
172 |
+
},
|
173 |
+
"harness|mmlu_virology|0": {
|
174 |
+
"alias": " - virology",
|
175 |
+
"acc,none": 0.536144578313253,
|
176 |
+
"acc_stderr,none": 0.03882310850890593
|
177 |
+
},
|
178 |
+
"harness|mmlu_social_sciences|0": {
|
179 |
+
"alias": " - social_sciences",
|
180 |
+
"acc,none": 0.7367565810854728,
|
181 |
+
"acc_stderr,none": 0.007759497044594671
|
182 |
+
},
|
183 |
+
"harness|mmlu_econometrics|0": {
|
184 |
+
"alias": " - econometrics",
|
185 |
+
"acc,none": 0.5,
|
186 |
+
"acc_stderr,none": 0.047036043419179864
|
187 |
+
},
|
188 |
+
"harness|mmlu_high_school_geography|0": {
|
189 |
+
"alias": " - high_school_geography",
|
190 |
+
"acc,none": 0.8282828282828283,
|
191 |
+
"acc_stderr,none": 0.026869716187429917
|
192 |
+
},
|
193 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
194 |
+
"alias": " - high_school_government_and_politics",
|
195 |
+
"acc,none": 0.8808290155440415,
|
196 |
+
"acc_stderr,none": 0.023381935348121448
|
197 |
+
},
|
198 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
199 |
+
"alias": " - high_school_macroeconomics",
|
200 |
+
"acc,none": 0.658974358974359,
|
201 |
+
"acc_stderr,none": 0.02403548967633508
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
204 |
+
"alias": " - high_school_microeconomics",
|
205 |
+
"acc,none": 0.6932773109243697,
|
206 |
+
"acc_stderr,none": 0.029953823891887037
|
207 |
+
},
|
208 |
+
"harness|mmlu_high_school_psychology|0": {
|
209 |
+
"alias": " - high_school_psychology",
|
210 |
+
"acc,none": 0.8293577981651377,
|
211 |
+
"acc_stderr,none": 0.016129271025099878
|
212 |
+
},
|
213 |
+
"harness|mmlu_human_sexuality|0": {
|
214 |
+
"alias": " - human_sexuality",
|
215 |
+
"acc,none": 0.7404580152671756,
|
216 |
+
"acc_stderr,none": 0.03844876139785271
|
217 |
+
},
|
218 |
+
"harness|mmlu_professional_psychology|0": {
|
219 |
+
"alias": " - professional_psychology",
|
220 |
+
"acc,none": 0.6503267973856209,
|
221 |
+
"acc_stderr,none": 0.01929196189506638
|
222 |
+
},
|
223 |
+
"harness|mmlu_public_relations|0": {
|
224 |
+
"alias": " - public_relations",
|
225 |
+
"acc,none": 0.7,
|
226 |
+
"acc_stderr,none": 0.04389311454644287
|
227 |
+
},
|
228 |
+
"harness|mmlu_security_studies|0": {
|
229 |
+
"alias": " - security_studies",
|
230 |
+
"acc,none": 0.7061224489795919,
|
231 |
+
"acc_stderr,none": 0.02916273841024977
|
232 |
+
},
|
233 |
+
"harness|mmlu_sociology|0": {
|
234 |
+
"alias": " - sociology",
|
235 |
+
"acc,none": 0.8308457711442786,
|
236 |
+
"acc_stderr,none": 0.02650859065623324
|
237 |
+
},
|
238 |
+
"harness|mmlu_us_foreign_policy|0": {
|
239 |
+
"alias": " - us_foreign_policy",
|
240 |
+
"acc,none": 0.9,
|
241 |
+
"acc_stderr,none": 0.030151134457776348
|
242 |
+
},
|
243 |
+
"harness|mmlu_stem|0": {
|
244 |
+
"alias": " - stem",
|
245 |
+
"acc,none": 0.5239454487789407,
|
246 |
+
"acc_stderr,none": 0.00856605560521422
|
247 |
+
},
|
248 |
+
"harness|mmlu_abstract_algebra|0": {
|
249 |
+
"alias": " - abstract_algebra",
|
250 |
+
"acc,none": 0.37,
|
251 |
+
"acc_stderr,none": 0.04852365870939098
|
252 |
+
},
|
253 |
+
"harness|mmlu_anatomy|0": {
|
254 |
+
"alias": " - anatomy",
|
255 |
+
"acc,none": 0.5925925925925926,
|
256 |
+
"acc_stderr,none": 0.04244633238353228
|
257 |
+
},
|
258 |
+
"harness|mmlu_astronomy|0": {
|
259 |
+
"alias": " - astronomy",
|
260 |
+
"acc,none": 0.6973684210526315,
|
261 |
+
"acc_stderr,none": 0.03738520676119667
|
262 |
+
},
|
263 |
+
"harness|mmlu_college_biology|0": {
|
264 |
+
"alias": " - college_biology",
|
265 |
+
"acc,none": 0.7291666666666666,
|
266 |
+
"acc_stderr,none": 0.037161774375660164
|
267 |
+
},
|
268 |
+
"harness|mmlu_college_chemistry|0": {
|
269 |
+
"alias": " - college_chemistry",
|
270 |
+
"acc,none": 0.45,
|
271 |
+
"acc_stderr,none": 0.05
|
272 |
+
},
|
273 |
+
"harness|mmlu_college_computer_science|0": {
|
274 |
+
"alias": " - college_computer_science",
|
275 |
+
"acc,none": 0.49,
|
276 |
+
"acc_stderr,none": 0.05024183937956911
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_mathematics|0": {
|
279 |
+
"alias": " - college_mathematics",
|
280 |
+
"acc,none": 0.29,
|
281 |
+
"acc_stderr,none": 0.04560480215720683
|
282 |
+
},
|
283 |
+
"harness|mmlu_college_physics|0": {
|
284 |
+
"alias": " - college_physics",
|
285 |
+
"acc,none": 0.4019607843137255,
|
286 |
+
"acc_stderr,none": 0.04878608714466996
|
287 |
+
},
|
288 |
+
"harness|mmlu_computer_security|0": {
|
289 |
+
"alias": " - computer_security",
|
290 |
+
"acc,none": 0.7,
|
291 |
+
"acc_stderr,none": 0.046056618647183814
|
292 |
+
},
|
293 |
+
"harness|mmlu_conceptual_physics|0": {
|
294 |
+
"alias": " - conceptual_physics",
|
295 |
+
"acc,none": 0.5446808510638298,
|
296 |
+
"acc_stderr,none": 0.03255525359340355
|
297 |
+
},
|
298 |
+
"harness|mmlu_electrical_engineering|0": {
|
299 |
+
"alias": " - electrical_engineering",
|
300 |
+
"acc,none": 0.5448275862068965,
|
301 |
+
"acc_stderr,none": 0.04149886942192118
|
302 |
+
},
|
303 |
+
"harness|mmlu_elementary_mathematics|0": {
|
304 |
+
"alias": " - elementary_mathematics",
|
305 |
+
"acc,none": 0.47883597883597884,
|
306 |
+
"acc_stderr,none": 0.025728230952130716
|
307 |
+
},
|
308 |
+
"harness|mmlu_high_school_biology|0": {
|
309 |
+
"alias": " - high_school_biology",
|
310 |
+
"acc,none": 0.7612903225806451,
|
311 |
+
"acc_stderr,none": 0.024251071262208837
|
312 |
+
},
|
313 |
+
"harness|mmlu_high_school_chemistry|0": {
|
314 |
+
"alias": " - high_school_chemistry",
|
315 |
+
"acc,none": 0.458128078817734,
|
316 |
+
"acc_stderr,none": 0.03505630140785741
|
317 |
+
},
|
318 |
+
"harness|mmlu_high_school_computer_science|0": {
|
319 |
+
"alias": " - high_school_computer_science",
|
320 |
+
"acc,none": 0.65,
|
321 |
+
"acc_stderr,none": 0.0479372485441102
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_mathematics|0": {
|
324 |
+
"alias": " - high_school_mathematics",
|
325 |
+
"acc,none": 0.35555555555555557,
|
326 |
+
"acc_stderr,none": 0.029185714949857416
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_physics|0": {
|
329 |
+
"alias": " - high_school_physics",
|
330 |
+
"acc,none": 0.2980132450331126,
|
331 |
+
"acc_stderr,none": 0.03734535676787198
|
332 |
+
},
|
333 |
+
"harness|mmlu_high_school_statistics|0": {
|
334 |
+
"alias": " - high_school_statistics",
|
335 |
+
"acc,none": 0.5416666666666666,
|
336 |
+
"acc_stderr,none": 0.03398110890294636
|
337 |
+
},
|
338 |
+
"harness|mmlu_machine_learning|0": {
|
339 |
+
"alias": " - machine_learning",
|
340 |
+
"acc,none": 0.44642857142857145,
|
341 |
+
"acc_stderr,none": 0.04718471485219588
|
342 |
+
},
|
343 |
+
"harness|arc:easy|0": {
|
344 |
+
"acc,none": 0.8354377104377104,
|
345 |
+
"acc_stderr,none": 0.007608348390282838,
|
346 |
+
"acc_norm,none": 0.8202861952861953,
|
347 |
+
"acc_norm_stderr,none": 0.007878465068489264,
|
348 |
+
"alias": "arc_easy"
|
349 |
+
},
|
350 |
+
"harness|boolq|0": {
|
351 |
+
"acc,none": 0.8862385321100917,
|
352 |
+
"acc_stderr,none": 0.005553483010095675,
|
353 |
+
"alias": "boolq"
|
354 |
+
},
|
355 |
+
"harness|piqa|0": {
|
356 |
+
"acc,none": 0.8128400435255713,
|
357 |
+
"acc_stderr,none": 0.00910027329047355,
|
358 |
+
"acc_norm,none": 0.8122959738846572,
|
359 |
+
"acc_norm_stderr,none": 0.009110440292132567,
|
360 |
+
"alias": "piqa"
|
361 |
+
},
|
362 |
+
"harness|openbookqa|0": {
|
363 |
+
"acc,none": 0.382,
|
364 |
+
"acc_stderr,none": 0.021750820591250834,
|
365 |
+
"acc_norm,none": 0.482,
|
366 |
+
"acc_norm_stderr,none": 0.02236856511738799,
|
367 |
+
"alias": "openbookqa"
|
368 |
+
},
|
369 |
+
"harness|hellaswag|0": {
|
370 |
+
"acc,none": 0.6845249950209121,
|
371 |
+
"acc_stderr,none": 0.004637550478007366,
|
372 |
+
"acc_norm,none": 0.8606851224855606,
|
373 |
+
"acc_norm_stderr,none": 0.003455671196993104,
|
374 |
+
"alias": "hellaswag"
|
375 |
+
},
|
376 |
+
"harness|lambada:openai|0": {
|
377 |
+
"perplexity,none": 3.024395600515001,
|
378 |
+
"perplexity_stderr,none": 0.06930475108030439,
|
379 |
+
"acc,none": 0.7351057636328352,
|
380 |
+
"acc_stderr,none": 0.006147849695828245,
|
381 |
+
"alias": "lambada_openai"
|
382 |
+
},
|
383 |
+
"harness|truthfulqa:mc2|0": {
|
384 |
+
"acc,none": 0.7129938777873611,
|
385 |
+
"acc_stderr,none": 0.014994700631570199,
|
386 |
+
"alias": "truthfulqa_mc2"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc1|0": {
|
389 |
+
"acc,none": 0.5691554467564259,
|
390 |
+
"acc_stderr,none": 0.017335272475332366,
|
391 |
+
"alias": "truthfulqa_mc1"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc-v0.3",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 10.7,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "AutoRound",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 10.7,
|
403 |
+
"model_size": 5.6,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Waiting",
|
409 |
+
"submitted_time": "2024-05-11T11:55:16Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"autoround_version": "0.2.0.dev",
|
417 |
+
"bits": 4,
|
418 |
+
"damp_percent": 0.01,
|
419 |
+
"desc_act": false,
|
420 |
+
"enable_minmax_tuning": true,
|
421 |
+
"enable_quanted_input": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 1000,
|
425 |
+
"lr": 0.001,
|
426 |
+
"minmax_lr": 0.01,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false
|
434 |
+
},
|
435 |
+
"versions": {
|
436 |
+
"harness|winogrande|0": 1.0,
|
437 |
+
"harness|arc:challenge|0": 1.0,
|
438 |
+
"harness|mmlu|0": null,
|
439 |
+
"harness|mmlu_humanities|0": null,
|
440 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
441 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
442 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
443 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
444 |
+
"harness|mmlu_international_law|0": 0.0,
|
445 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
446 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
447 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
448 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
449 |
+
"harness|mmlu_philosophy|0": 0.0,
|
450 |
+
"harness|mmlu_prehistory|0": 0.0,
|
451 |
+
"harness|mmlu_professional_law|0": 0.0,
|
452 |
+
"harness|mmlu_world_religions|0": 0.0,
|
453 |
+
"harness|mmlu_other|0": null,
|
454 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
455 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
456 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
457 |
+
"harness|mmlu_global_facts|0": 0.0,
|
458 |
+
"harness|mmlu_human_aging|0": 0.0,
|
459 |
+
"harness|mmlu_management|0": 0.0,
|
460 |
+
"harness|mmlu_marketing|0": 0.0,
|
461 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
462 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
463 |
+
"harness|mmlu_nutrition|0": 0.0,
|
464 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
465 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
466 |
+
"harness|mmlu_virology|0": 0.0,
|
467 |
+
"harness|mmlu_social_sciences|0": null,
|
468 |
+
"harness|mmlu_econometrics|0": 0.0,
|
469 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
470 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
471 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
474 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
475 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
476 |
+
"harness|mmlu_public_relations|0": 0.0,
|
477 |
+
"harness|mmlu_security_studies|0": 0.0,
|
478 |
+
"harness|mmlu_sociology|0": 0.0,
|
479 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
480 |
+
"harness|mmlu_stem|0": null,
|
481 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
482 |
+
"harness|mmlu_anatomy|0": 0.0,
|
483 |
+
"harness|mmlu_astronomy|0": 0.0,
|
484 |
+
"harness|mmlu_college_biology|0": 0.0,
|
485 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
486 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
487 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_college_physics|0": 0.0,
|
489 |
+
"harness|mmlu_computer_security|0": 0.0,
|
490 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
491 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
492 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
494 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
495 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
499 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
500 |
+
"harness|arc:easy|0": 1.0,
|
501 |
+
"harness|boolq|0": 2.0,
|
502 |
+
"harness|piqa|0": 1.0,
|
503 |
+
"harness|openbookqa|0": 1.0,
|
504 |
+
"harness|hellaswag|0": 1.0,
|
505 |
+
"harness|lambada:openai|0": 1.0,
|
506 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
507 |
+
"harness|truthfulqa:mc1|0": 2.0
|
508 |
+
},
|
509 |
+
"n-shot": {
|
510 |
+
"arc_challenge": 0,
|
511 |
+
"arc_easy": 0,
|
512 |
+
"boolq": 0,
|
513 |
+
"hellaswag": 0,
|
514 |
+
"lambada_openai": 0,
|
515 |
+
"mmlu": 0,
|
516 |
+
"mmlu_abstract_algebra": 0,
|
517 |
+
"mmlu_anatomy": 0,
|
518 |
+
"mmlu_astronomy": 0,
|
519 |
+
"mmlu_business_ethics": 0,
|
520 |
+
"mmlu_clinical_knowledge": 0,
|
521 |
+
"mmlu_college_biology": 0,
|
522 |
+
"mmlu_college_chemistry": 0,
|
523 |
+
"mmlu_college_computer_science": 0,
|
524 |
+
"mmlu_college_mathematics": 0,
|
525 |
+
"mmlu_college_medicine": 0,
|
526 |
+
"mmlu_college_physics": 0,
|
527 |
+
"mmlu_computer_security": 0,
|
528 |
+
"mmlu_conceptual_physics": 0,
|
529 |
+
"mmlu_econometrics": 0,
|
530 |
+
"mmlu_electrical_engineering": 0,
|
531 |
+
"mmlu_elementary_mathematics": 0,
|
532 |
+
"mmlu_formal_logic": 0,
|
533 |
+
"mmlu_global_facts": 0,
|
534 |
+
"mmlu_high_school_biology": 0,
|
535 |
+
"mmlu_high_school_chemistry": 0,
|
536 |
+
"mmlu_high_school_computer_science": 0,
|
537 |
+
"mmlu_high_school_european_history": 0,
|
538 |
+
"mmlu_high_school_geography": 0,
|
539 |
+
"mmlu_high_school_government_and_politics": 0,
|
540 |
+
"mmlu_high_school_macroeconomics": 0,
|
541 |
+
"mmlu_high_school_mathematics": 0,
|
542 |
+
"mmlu_high_school_microeconomics": 0,
|
543 |
+
"mmlu_high_school_physics": 0,
|
544 |
+
"mmlu_high_school_psychology": 0,
|
545 |
+
"mmlu_high_school_statistics": 0,
|
546 |
+
"mmlu_high_school_us_history": 0,
|
547 |
+
"mmlu_high_school_world_history": 0,
|
548 |
+
"mmlu_human_aging": 0,
|
549 |
+
"mmlu_human_sexuality": 0,
|
550 |
+
"mmlu_humanities": 0,
|
551 |
+
"mmlu_international_law": 0,
|
552 |
+
"mmlu_jurisprudence": 0,
|
553 |
+
"mmlu_logical_fallacies": 0,
|
554 |
+
"mmlu_machine_learning": 0,
|
555 |
+
"mmlu_management": 0,
|
556 |
+
"mmlu_marketing": 0,
|
557 |
+
"mmlu_medical_genetics": 0,
|
558 |
+
"mmlu_miscellaneous": 0,
|
559 |
+
"mmlu_moral_disputes": 0,
|
560 |
+
"mmlu_moral_scenarios": 0,
|
561 |
+
"mmlu_nutrition": 0,
|
562 |
+
"mmlu_other": 0,
|
563 |
+
"mmlu_philosophy": 0,
|
564 |
+
"mmlu_prehistory": 0,
|
565 |
+
"mmlu_professional_accounting": 0,
|
566 |
+
"mmlu_professional_law": 0,
|
567 |
+
"mmlu_professional_medicine": 0,
|
568 |
+
"mmlu_professional_psychology": 0,
|
569 |
+
"mmlu_public_relations": 0,
|
570 |
+
"mmlu_security_studies": 0,
|
571 |
+
"mmlu_social_sciences": 0,
|
572 |
+
"mmlu_sociology": 0,
|
573 |
+
"mmlu_stem": 0,
|
574 |
+
"mmlu_us_foreign_policy": 0,
|
575 |
+
"mmlu_virology": 0,
|
576 |
+
"mmlu_world_religions": 0,
|
577 |
+
"openbookqa": 0,
|
578 |
+
"piqa": 0,
|
579 |
+
"truthfulqa_mc1": 0,
|
580 |
+
"truthfulqa_mc2": 0,
|
581 |
+
"winogrande": 0
|
582 |
+
},
|
583 |
+
"date": 1715999695.543391,
|
584 |
+
"config": {
|
585 |
+
"model": "hf",
|
586 |
+
"model_args": "pretrained=/dataset/SOLAR-10.7B-Instruct-v1.0-samples1024-1w-2-tbs-16_iter1000,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
587 |
+
"batch_size": 1,
|
588 |
+
"batch_sizes": [],
|
589 |
+
"device": "cuda",
|
590 |
+
"use_cache": null,
|
591 |
+
"limit": null,
|
592 |
+
"bootstrap_iters": 100000,
|
593 |
+
"gen_kwargs": null
|
594 |
+
}
|
595 |
+
}
|
Intel/results_2024-05-18-15-50-33.json
ADDED
@@ -0,0 +1,595 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-18-15-50-33",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc-v0.2",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.6,
|
16 |
+
"model_params": 10.7,
|
17 |
+
"quant_type": "AutoRound",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|boolq|0": {
|
22 |
+
"acc,none": 0.8871559633027523,
|
23 |
+
"acc_stderr,none": 0.00553390672214442,
|
24 |
+
"alias": "boolq"
|
25 |
+
},
|
26 |
+
"harness|openbookqa|0": {
|
27 |
+
"acc,none": 0.37,
|
28 |
+
"acc_stderr,none": 0.02161328916516578,
|
29 |
+
"acc_norm,none": 0.47,
|
30 |
+
"acc_norm_stderr,none": 0.022342748192502843,
|
31 |
+
"alias": "openbookqa"
|
32 |
+
},
|
33 |
+
"harness|lambada:openai|0": {
|
34 |
+
"perplexity,none": 3.09900146016817,
|
35 |
+
"perplexity_stderr,none": 0.07101046844025238,
|
36 |
+
"acc,none": 0.7281195420143606,
|
37 |
+
"acc_stderr,none": 0.006198725615232723,
|
38 |
+
"alias": "lambada_openai"
|
39 |
+
},
|
40 |
+
"harness|mmlu|0": {
|
41 |
+
"acc,none": 0.6329582680529839,
|
42 |
+
"acc_stderr,none": 0.003826857804473612,
|
43 |
+
"alias": "mmlu"
|
44 |
+
},
|
45 |
+
"harness|mmlu_humanities|0": {
|
46 |
+
"alias": " - humanities",
|
47 |
+
"acc,none": 0.5806588735387885,
|
48 |
+
"acc_stderr,none": 0.006695269750055442
|
49 |
+
},
|
50 |
+
"harness|mmlu_formal_logic|0": {
|
51 |
+
"alias": " - formal_logic",
|
52 |
+
"acc,none": 0.4126984126984127,
|
53 |
+
"acc_stderr,none": 0.04403438954768176
|
54 |
+
},
|
55 |
+
"harness|mmlu_high_school_european_history|0": {
|
56 |
+
"alias": " - high_school_european_history",
|
57 |
+
"acc,none": 0.7818181818181819,
|
58 |
+
"acc_stderr,none": 0.03225078108306289
|
59 |
+
},
|
60 |
+
"harness|mmlu_high_school_us_history|0": {
|
61 |
+
"alias": " - high_school_us_history",
|
62 |
+
"acc,none": 0.8284313725490197,
|
63 |
+
"acc_stderr,none": 0.02646056956124066
|
64 |
+
},
|
65 |
+
"harness|mmlu_high_school_world_history|0": {
|
66 |
+
"alias": " - high_school_world_history",
|
67 |
+
"acc,none": 0.8607594936708861,
|
68 |
+
"acc_stderr,none": 0.022535526352692712
|
69 |
+
},
|
70 |
+
"harness|mmlu_international_law|0": {
|
71 |
+
"alias": " - international_law",
|
72 |
+
"acc,none": 0.8016528925619835,
|
73 |
+
"acc_stderr,none": 0.03640118271990947
|
74 |
+
},
|
75 |
+
"harness|mmlu_jurisprudence|0": {
|
76 |
+
"alias": " - jurisprudence",
|
77 |
+
"acc,none": 0.7870370370370371,
|
78 |
+
"acc_stderr,none": 0.0395783547198098
|
79 |
+
},
|
80 |
+
"harness|mmlu_logical_fallacies|0": {
|
81 |
+
"alias": " - logical_fallacies",
|
82 |
+
"acc,none": 0.7239263803680982,
|
83 |
+
"acc_stderr,none": 0.035123852837050495
|
84 |
+
},
|
85 |
+
"harness|mmlu_moral_disputes|0": {
|
86 |
+
"alias": " - moral_disputes",
|
87 |
+
"acc,none": 0.7196531791907514,
|
88 |
+
"acc_stderr,none": 0.024182427496577612
|
89 |
+
},
|
90 |
+
"harness|mmlu_moral_scenarios|0": {
|
91 |
+
"alias": " - moral_scenarios",
|
92 |
+
"acc,none": 0.329608938547486,
|
93 |
+
"acc_stderr,none": 0.015721531075183866
|
94 |
+
},
|
95 |
+
"harness|mmlu_philosophy|0": {
|
96 |
+
"alias": " - philosophy",
|
97 |
+
"acc,none": 0.707395498392283,
|
98 |
+
"acc_stderr,none": 0.02583989833487798
|
99 |
+
},
|
100 |
+
"harness|mmlu_prehistory|0": {
|
101 |
+
"alias": " - prehistory",
|
102 |
+
"acc,none": 0.7623456790123457,
|
103 |
+
"acc_stderr,none": 0.02368359183700855
|
104 |
+
},
|
105 |
+
"harness|mmlu_professional_law|0": {
|
106 |
+
"alias": " - professional_law",
|
107 |
+
"acc,none": 0.4791395045632334,
|
108 |
+
"acc_stderr,none": 0.012759117066518012
|
109 |
+
},
|
110 |
+
"harness|mmlu_world_religions|0": {
|
111 |
+
"alias": " - world_religions",
|
112 |
+
"acc,none": 0.7719298245614035,
|
113 |
+
"acc_stderr,none": 0.032180937956023566
|
114 |
+
},
|
115 |
+
"harness|mmlu_other|0": {
|
116 |
+
"alias": " - other",
|
117 |
+
"acc,none": 0.7077566784679755,
|
118 |
+
"acc_stderr,none": 0.007899220244504642
|
119 |
+
},
|
120 |
+
"harness|mmlu_business_ethics|0": {
|
121 |
+
"alias": " - business_ethics",
|
122 |
+
"acc,none": 0.67,
|
123 |
+
"acc_stderr,none": 0.04725815626252609
|
124 |
+
},
|
125 |
+
"harness|mmlu_clinical_knowledge|0": {
|
126 |
+
"alias": " - clinical_knowledge",
|
127 |
+
"acc,none": 0.6943396226415094,
|
128 |
+
"acc_stderr,none": 0.028353298073322663
|
129 |
+
},
|
130 |
+
"harness|mmlu_college_medicine|0": {
|
131 |
+
"alias": " - college_medicine",
|
132 |
+
"acc,none": 0.6416184971098265,
|
133 |
+
"acc_stderr,none": 0.03656343653353159
|
134 |
+
},
|
135 |
+
"harness|mmlu_global_facts|0": {
|
136 |
+
"alias": " - global_facts",
|
137 |
+
"acc,none": 0.38,
|
138 |
+
"acc_stderr,none": 0.04878317312145632
|
139 |
+
},
|
140 |
+
"harness|mmlu_human_aging|0": {
|
141 |
+
"alias": " - human_aging",
|
142 |
+
"acc,none": 0.6816143497757847,
|
143 |
+
"acc_stderr,none": 0.03126580522513713
|
144 |
+
},
|
145 |
+
"harness|mmlu_management|0": {
|
146 |
+
"alias": " - management",
|
147 |
+
"acc,none": 0.8058252427184466,
|
148 |
+
"acc_stderr,none": 0.03916667762822584
|
149 |
+
},
|
150 |
+
"harness|mmlu_marketing|0": {
|
151 |
+
"alias": " - marketing",
|
152 |
+
"acc,none": 0.8547008547008547,
|
153 |
+
"acc_stderr,none": 0.02308663508684141
|
154 |
+
},
|
155 |
+
"harness|mmlu_medical_genetics|0": {
|
156 |
+
"alias": " - medical_genetics",
|
157 |
+
"acc,none": 0.74,
|
158 |
+
"acc_stderr,none": 0.04408440022768078
|
159 |
+
},
|
160 |
+
"harness|mmlu_miscellaneous|0": {
|
161 |
+
"alias": " - miscellaneous",
|
162 |
+
"acc,none": 0.8148148148148148,
|
163 |
+
"acc_stderr,none": 0.013890862162876166
|
164 |
+
},
|
165 |
+
"harness|mmlu_nutrition|0": {
|
166 |
+
"alias": " - nutrition",
|
167 |
+
"acc,none": 0.7091503267973857,
|
168 |
+
"acc_stderr,none": 0.02600480036395213
|
169 |
+
},
|
170 |
+
"harness|mmlu_professional_accounting|0": {
|
171 |
+
"alias": " - professional_accounting",
|
172 |
+
"acc,none": 0.5070921985815603,
|
173 |
+
"acc_stderr,none": 0.02982449855912901
|
174 |
+
},
|
175 |
+
"harness|mmlu_professional_medicine|0": {
|
176 |
+
"alias": " - professional_medicine",
|
177 |
+
"acc,none": 0.7389705882352942,
|
178 |
+
"acc_stderr,none": 0.026679252270103124
|
179 |
+
},
|
180 |
+
"harness|mmlu_virology|0": {
|
181 |
+
"alias": " - virology",
|
182 |
+
"acc,none": 0.5481927710843374,
|
183 |
+
"acc_stderr,none": 0.03874371556587953
|
184 |
+
},
|
185 |
+
"harness|mmlu_social_sciences|0": {
|
186 |
+
"alias": " - social_sciences",
|
187 |
+
"acc,none": 0.7413064673383165,
|
188 |
+
"acc_stderr,none": 0.0077164131254551915
|
189 |
+
},
|
190 |
+
"harness|mmlu_econometrics|0": {
|
191 |
+
"alias": " - econometrics",
|
192 |
+
"acc,none": 0.45614035087719296,
|
193 |
+
"acc_stderr,none": 0.046854730419077895
|
194 |
+
},
|
195 |
+
"harness|mmlu_high_school_geography|0": {
|
196 |
+
"alias": " - high_school_geography",
|
197 |
+
"acc,none": 0.8181818181818182,
|
198 |
+
"acc_stderr,none": 0.02747960301053878
|
199 |
+
},
|
200 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
201 |
+
"alias": " - high_school_government_and_politics",
|
202 |
+
"acc,none": 0.8860103626943006,
|
203 |
+
"acc_stderr,none": 0.022935144053919426
|
204 |
+
},
|
205 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
206 |
+
"alias": " - high_school_macroeconomics",
|
207 |
+
"acc,none": 0.6666666666666666,
|
208 |
+
"acc_stderr,none": 0.023901157979402534
|
209 |
+
},
|
210 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
211 |
+
"alias": " - high_school_microeconomics",
|
212 |
+
"acc,none": 0.6932773109243697,
|
213 |
+
"acc_stderr,none": 0.02995382389188703
|
214 |
+
},
|
215 |
+
"harness|mmlu_high_school_psychology|0": {
|
216 |
+
"alias": " - high_school_psychology",
|
217 |
+
"acc,none": 0.8348623853211009,
|
218 |
+
"acc_stderr,none": 0.015919557829976064
|
219 |
+
},
|
220 |
+
"harness|mmlu_human_sexuality|0": {
|
221 |
+
"alias": " - human_sexuality",
|
222 |
+
"acc,none": 0.7480916030534351,
|
223 |
+
"acc_stderr,none": 0.03807387116306086
|
224 |
+
},
|
225 |
+
"harness|mmlu_professional_psychology|0": {
|
226 |
+
"alias": " - professional_psychology",
|
227 |
+
"acc,none": 0.6715686274509803,
|
228 |
+
"acc_stderr,none": 0.018999707383162662
|
229 |
+
},
|
230 |
+
"harness|mmlu_public_relations|0": {
|
231 |
+
"alias": " - public_relations",
|
232 |
+
"acc,none": 0.7090909090909091,
|
233 |
+
"acc_stderr,none": 0.04350271442923243
|
234 |
+
},
|
235 |
+
"harness|mmlu_security_studies|0": {
|
236 |
+
"alias": " - security_studies",
|
237 |
+
"acc,none": 0.7183673469387755,
|
238 |
+
"acc_stderr,none": 0.028795185574291286
|
239 |
+
},
|
240 |
+
"harness|mmlu_sociology|0": {
|
241 |
+
"alias": " - sociology",
|
242 |
+
"acc,none": 0.8159203980099502,
|
243 |
+
"acc_stderr,none": 0.02740385941078683
|
244 |
+
},
|
245 |
+
"harness|mmlu_us_foreign_policy|0": {
|
246 |
+
"alias": " - us_foreign_policy",
|
247 |
+
"acc,none": 0.89,
|
248 |
+
"acc_stderr,none": 0.031446603773522014
|
249 |
+
},
|
250 |
+
"harness|mmlu_stem|0": {
|
251 |
+
"alias": " - stem",
|
252 |
+
"acc,none": 0.531557247066286,
|
253 |
+
"acc_stderr,none": 0.008564466581764778
|
254 |
+
},
|
255 |
+
"harness|mmlu_abstract_algebra|0": {
|
256 |
+
"alias": " - abstract_algebra",
|
257 |
+
"acc,none": 0.36,
|
258 |
+
"acc_stderr,none": 0.04824181513244218
|
259 |
+
},
|
260 |
+
"harness|mmlu_anatomy|0": {
|
261 |
+
"alias": " - anatomy",
|
262 |
+
"acc,none": 0.6074074074074074,
|
263 |
+
"acc_stderr,none": 0.04218506215368879
|
264 |
+
},
|
265 |
+
"harness|mmlu_astronomy|0": {
|
266 |
+
"alias": " - astronomy",
|
267 |
+
"acc,none": 0.7039473684210527,
|
268 |
+
"acc_stderr,none": 0.037150621549989056
|
269 |
+
},
|
270 |
+
"harness|mmlu_college_biology|0": {
|
271 |
+
"alias": " - college_biology",
|
272 |
+
"acc,none": 0.7708333333333334,
|
273 |
+
"acc_stderr,none": 0.035146974678623884
|
274 |
+
},
|
275 |
+
"harness|mmlu_college_chemistry|0": {
|
276 |
+
"alias": " - college_chemistry",
|
277 |
+
"acc,none": 0.42,
|
278 |
+
"acc_stderr,none": 0.049604496374885836
|
279 |
+
},
|
280 |
+
"harness|mmlu_college_computer_science|0": {
|
281 |
+
"alias": " - college_computer_science",
|
282 |
+
"acc,none": 0.42,
|
283 |
+
"acc_stderr,none": 0.04960449637488584
|
284 |
+
},
|
285 |
+
"harness|mmlu_college_mathematics|0": {
|
286 |
+
"alias": " - college_mathematics",
|
287 |
+
"acc,none": 0.34,
|
288 |
+
"acc_stderr,none": 0.04760952285695236
|
289 |
+
},
|
290 |
+
"harness|mmlu_college_physics|0": {
|
291 |
+
"alias": " - college_physics",
|
292 |
+
"acc,none": 0.37254901960784315,
|
293 |
+
"acc_stderr,none": 0.04810840148082633
|
294 |
+
},
|
295 |
+
"harness|mmlu_computer_security|0": {
|
296 |
+
"alias": " - computer_security",
|
297 |
+
"acc,none": 0.7,
|
298 |
+
"acc_stderr,none": 0.046056618647183814
|
299 |
+
},
|
300 |
+
"harness|mmlu_conceptual_physics|0": {
|
301 |
+
"alias": " - conceptual_physics",
|
302 |
+
"acc,none": 0.5914893617021276,
|
303 |
+
"acc_stderr,none": 0.032134180267015755
|
304 |
+
},
|
305 |
+
"harness|mmlu_electrical_engineering|0": {
|
306 |
+
"alias": " - electrical_engineering",
|
307 |
+
"acc,none": 0.5655172413793104,
|
308 |
+
"acc_stderr,none": 0.04130740879555498
|
309 |
+
},
|
310 |
+
"harness|mmlu_elementary_mathematics|0": {
|
311 |
+
"alias": " - elementary_mathematics",
|
312 |
+
"acc,none": 0.46825396825396826,
|
313 |
+
"acc_stderr,none": 0.025699352832131792
|
314 |
+
},
|
315 |
+
"harness|mmlu_high_school_biology|0": {
|
316 |
+
"alias": " - high_school_biology",
|
317 |
+
"acc,none": 0.7580645161290323,
|
318 |
+
"acc_stderr,none": 0.024362599693031103
|
319 |
+
},
|
320 |
+
"harness|mmlu_high_school_chemistry|0": {
|
321 |
+
"alias": " - high_school_chemistry",
|
322 |
+
"acc,none": 0.4729064039408867,
|
323 |
+
"acc_stderr,none": 0.03512819077876106
|
324 |
+
},
|
325 |
+
"harness|mmlu_high_school_computer_science|0": {
|
326 |
+
"alias": " - high_school_computer_science",
|
327 |
+
"acc,none": 0.65,
|
328 |
+
"acc_stderr,none": 0.0479372485441102
|
329 |
+
},
|
330 |
+
"harness|mmlu_high_school_mathematics|0": {
|
331 |
+
"alias": " - high_school_mathematics",
|
332 |
+
"acc,none": 0.3814814814814815,
|
333 |
+
"acc_stderr,none": 0.02961671892749758
|
334 |
+
},
|
335 |
+
"harness|mmlu_high_school_physics|0": {
|
336 |
+
"alias": " - high_school_physics",
|
337 |
+
"acc,none": 0.3443708609271523,
|
338 |
+
"acc_stderr,none": 0.03879687024073327
|
339 |
+
},
|
340 |
+
"harness|mmlu_high_school_statistics|0": {
|
341 |
+
"alias": " - high_school_statistics",
|
342 |
+
"acc,none": 0.5138888888888888,
|
343 |
+
"acc_stderr,none": 0.03408655867977749
|
344 |
+
},
|
345 |
+
"harness|mmlu_machine_learning|0": {
|
346 |
+
"alias": " - machine_learning",
|
347 |
+
"acc,none": 0.48214285714285715,
|
348 |
+
"acc_stderr,none": 0.047427623612430116
|
349 |
+
},
|
350 |
+
"harness|hellaswag|0": {
|
351 |
+
"acc,none": 0.6846245767775344,
|
352 |
+
"acc_stderr,none": 0.004637155743563884,
|
353 |
+
"acc_norm,none": 0.8569010157339175,
|
354 |
+
"acc_norm_stderr,none": 0.003494581076398528,
|
355 |
+
"alias": "hellaswag"
|
356 |
+
},
|
357 |
+
"harness|winogrande|0": {
|
358 |
+
"acc,none": 0.7569060773480663,
|
359 |
+
"acc_stderr,none": 0.012055665630431043,
|
360 |
+
"alias": "winogrande"
|
361 |
+
},
|
362 |
+
"harness|arc:challenge|0": {
|
363 |
+
"acc,none": 0.5998293515358362,
|
364 |
+
"acc_stderr,none": 0.01431719778780917,
|
365 |
+
"acc_norm,none": 0.6228668941979523,
|
366 |
+
"acc_norm_stderr,none": 0.014163366896192598,
|
367 |
+
"alias": "arc_challenge"
|
368 |
+
},
|
369 |
+
"harness|truthfulqa:mc1|0": {
|
370 |
+
"acc,none": 0.5691554467564259,
|
371 |
+
"acc_stderr,none": 0.01733527247533237,
|
372 |
+
"alias": "truthfulqa_mc1"
|
373 |
+
},
|
374 |
+
"harness|arc:easy|0": {
|
375 |
+
"acc,none": 0.8257575757575758,
|
376 |
+
"acc_stderr,none": 0.007783437255488237,
|
377 |
+
"acc_norm,none": 0.8186026936026936,
|
378 |
+
"acc_norm_stderr,none": 0.007907153952801702,
|
379 |
+
"alias": "arc_easy"
|
380 |
+
},
|
381 |
+
"harness|piqa|0": {
|
382 |
+
"acc,none": 0.7959738846572362,
|
383 |
+
"acc_stderr,none": 0.009402378102942617,
|
384 |
+
"acc_norm,none": 0.7986942328618063,
|
385 |
+
"acc_norm_stderr,none": 0.009355431098990435,
|
386 |
+
"alias": "piqa"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc2|0": {
|
389 |
+
"acc,none": 0.7146477892593979,
|
390 |
+
"acc_stderr,none": 0.01488794988014781,
|
391 |
+
"alias": "truthfulqa_mc2"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Intel/SOLAR-10.7B-Instruct-v1.0-int4-inc-v0.2",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 10.7,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "AutoRound",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 10.7,
|
403 |
+
"model_size": 5.6,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Waiting",
|
409 |
+
"submitted_time": "2024-05-11T11:55:16Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"autoround_version": "0.2.0.dev",
|
417 |
+
"bits": 4,
|
418 |
+
"damp_percent": 0.01,
|
419 |
+
"desc_act": false,
|
420 |
+
"enable_minmax_tuning": true,
|
421 |
+
"enable_quanted_input": true,
|
422 |
+
"group_size": 128,
|
423 |
+
"is_marlin_format": false,
|
424 |
+
"iters": 200,
|
425 |
+
"lr": 0.005,
|
426 |
+
"minmax_lr": 0.01,
|
427 |
+
"model_file_base_name": "model",
|
428 |
+
"model_name_or_path": null,
|
429 |
+
"quant_method": "gptq",
|
430 |
+
"scale_dtype": "float16",
|
431 |
+
"static_groups": false,
|
432 |
+
"sym": false,
|
433 |
+
"true_sequential": false
|
434 |
+
},
|
435 |
+
"versions": {
|
436 |
+
"harness|boolq|0": 2.0,
|
437 |
+
"harness|openbookqa|0": 1.0,
|
438 |
+
"harness|lambada:openai|0": 1.0,
|
439 |
+
"harness|mmlu|0": null,
|
440 |
+
"harness|mmlu_humanities|0": null,
|
441 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
442 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
443 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
444 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
445 |
+
"harness|mmlu_international_law|0": 0.0,
|
446 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
447 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
448 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
449 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
450 |
+
"harness|mmlu_philosophy|0": 0.0,
|
451 |
+
"harness|mmlu_prehistory|0": 0.0,
|
452 |
+
"harness|mmlu_professional_law|0": 0.0,
|
453 |
+
"harness|mmlu_world_religions|0": 0.0,
|
454 |
+
"harness|mmlu_other|0": null,
|
455 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
456 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
457 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
458 |
+
"harness|mmlu_global_facts|0": 0.0,
|
459 |
+
"harness|mmlu_human_aging|0": 0.0,
|
460 |
+
"harness|mmlu_management|0": 0.0,
|
461 |
+
"harness|mmlu_marketing|0": 0.0,
|
462 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
463 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
464 |
+
"harness|mmlu_nutrition|0": 0.0,
|
465 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
466 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
467 |
+
"harness|mmlu_virology|0": 0.0,
|
468 |
+
"harness|mmlu_social_sciences|0": null,
|
469 |
+
"harness|mmlu_econometrics|0": 0.0,
|
470 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
471 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
475 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
476 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
477 |
+
"harness|mmlu_public_relations|0": 0.0,
|
478 |
+
"harness|mmlu_security_studies|0": 0.0,
|
479 |
+
"harness|mmlu_sociology|0": 0.0,
|
480 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
481 |
+
"harness|mmlu_stem|0": null,
|
482 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
483 |
+
"harness|mmlu_anatomy|0": 0.0,
|
484 |
+
"harness|mmlu_astronomy|0": 0.0,
|
485 |
+
"harness|mmlu_college_biology|0": 0.0,
|
486 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
487 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
488 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
489 |
+
"harness|mmlu_college_physics|0": 0.0,
|
490 |
+
"harness|mmlu_computer_security|0": 0.0,
|
491 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
492 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
493 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
494 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
495 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
500 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
501 |
+
"harness|hellaswag|0": 1.0,
|
502 |
+
"harness|winogrande|0": 1.0,
|
503 |
+
"harness|arc:challenge|0": 1.0,
|
504 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
505 |
+
"harness|arc:easy|0": 1.0,
|
506 |
+
"harness|piqa|0": 1.0,
|
507 |
+
"harness|truthfulqa:mc2|0": 2.0
|
508 |
+
},
|
509 |
+
"n-shot": {
|
510 |
+
"arc_challenge": 0,
|
511 |
+
"arc_easy": 0,
|
512 |
+
"boolq": 0,
|
513 |
+
"hellaswag": 0,
|
514 |
+
"lambada_openai": 0,
|
515 |
+
"mmlu": 0,
|
516 |
+
"mmlu_abstract_algebra": 0,
|
517 |
+
"mmlu_anatomy": 0,
|
518 |
+
"mmlu_astronomy": 0,
|
519 |
+
"mmlu_business_ethics": 0,
|
520 |
+
"mmlu_clinical_knowledge": 0,
|
521 |
+
"mmlu_college_biology": 0,
|
522 |
+
"mmlu_college_chemistry": 0,
|
523 |
+
"mmlu_college_computer_science": 0,
|
524 |
+
"mmlu_college_mathematics": 0,
|
525 |
+
"mmlu_college_medicine": 0,
|
526 |
+
"mmlu_college_physics": 0,
|
527 |
+
"mmlu_computer_security": 0,
|
528 |
+
"mmlu_conceptual_physics": 0,
|
529 |
+
"mmlu_econometrics": 0,
|
530 |
+
"mmlu_electrical_engineering": 0,
|
531 |
+
"mmlu_elementary_mathematics": 0,
|
532 |
+
"mmlu_formal_logic": 0,
|
533 |
+
"mmlu_global_facts": 0,
|
534 |
+
"mmlu_high_school_biology": 0,
|
535 |
+
"mmlu_high_school_chemistry": 0,
|
536 |
+
"mmlu_high_school_computer_science": 0,
|
537 |
+
"mmlu_high_school_european_history": 0,
|
538 |
+
"mmlu_high_school_geography": 0,
|
539 |
+
"mmlu_high_school_government_and_politics": 0,
|
540 |
+
"mmlu_high_school_macroeconomics": 0,
|
541 |
+
"mmlu_high_school_mathematics": 0,
|
542 |
+
"mmlu_high_school_microeconomics": 0,
|
543 |
+
"mmlu_high_school_physics": 0,
|
544 |
+
"mmlu_high_school_psychology": 0,
|
545 |
+
"mmlu_high_school_statistics": 0,
|
546 |
+
"mmlu_high_school_us_history": 0,
|
547 |
+
"mmlu_high_school_world_history": 0,
|
548 |
+
"mmlu_human_aging": 0,
|
549 |
+
"mmlu_human_sexuality": 0,
|
550 |
+
"mmlu_humanities": 0,
|
551 |
+
"mmlu_international_law": 0,
|
552 |
+
"mmlu_jurisprudence": 0,
|
553 |
+
"mmlu_logical_fallacies": 0,
|
554 |
+
"mmlu_machine_learning": 0,
|
555 |
+
"mmlu_management": 0,
|
556 |
+
"mmlu_marketing": 0,
|
557 |
+
"mmlu_medical_genetics": 0,
|
558 |
+
"mmlu_miscellaneous": 0,
|
559 |
+
"mmlu_moral_disputes": 0,
|
560 |
+
"mmlu_moral_scenarios": 0,
|
561 |
+
"mmlu_nutrition": 0,
|
562 |
+
"mmlu_other": 0,
|
563 |
+
"mmlu_philosophy": 0,
|
564 |
+
"mmlu_prehistory": 0,
|
565 |
+
"mmlu_professional_accounting": 0,
|
566 |
+
"mmlu_professional_law": 0,
|
567 |
+
"mmlu_professional_medicine": 0,
|
568 |
+
"mmlu_professional_psychology": 0,
|
569 |
+
"mmlu_public_relations": 0,
|
570 |
+
"mmlu_security_studies": 0,
|
571 |
+
"mmlu_social_sciences": 0,
|
572 |
+
"mmlu_sociology": 0,
|
573 |
+
"mmlu_stem": 0,
|
574 |
+
"mmlu_us_foreign_policy": 0,
|
575 |
+
"mmlu_virology": 0,
|
576 |
+
"mmlu_world_religions": 0,
|
577 |
+
"openbookqa": 0,
|
578 |
+
"piqa": 0,
|
579 |
+
"truthfulqa_mc1": 0,
|
580 |
+
"truthfulqa_mc2": 0,
|
581 |
+
"winogrande": 0
|
582 |
+
},
|
583 |
+
"date": 1716009494.4991646,
|
584 |
+
"config": {
|
585 |
+
"model": "hf",
|
586 |
+
"model_args": "pretrained=/dataset/SOLAR-10.7B-Instruct-v1.0-samples1024-1w-2-tbs-16,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
587 |
+
"batch_size": 1,
|
588 |
+
"batch_sizes": [],
|
589 |
+
"device": "cuda",
|
590 |
+
"use_cache": null,
|
591 |
+
"limit": null,
|
592 |
+
"bootstrap_iters": 100000,
|
593 |
+
"gen_kwargs": null
|
594 |
+
}
|
595 |
+
}
|
Nan-Do/results_2024-05-21-12-09-53.json
ADDED
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "no",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-21-12-09-53",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Nan-Do/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-GGUF",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 13.621387264,
|
16 |
+
"model_params": 24.153427968,
|
17 |
+
"quant_type": "llama.cpp",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|hellaswag|0": {
|
22 |
+
"acc,none": 0.6935869348735312,
|
23 |
+
"acc_stderr,none": 0.004600612000422642,
|
24 |
+
"acc_norm,none": 0.8740290778729337,
|
25 |
+
"acc_norm_stderr,none": 0.00331138449815877,
|
26 |
+
"alias": "hellaswag"
|
27 |
+
},
|
28 |
+
"harness|winogrande|0": {
|
29 |
+
"acc,none": 0.8760852407261247,
|
30 |
+
"acc_stderr,none": 0.009260146295063713,
|
31 |
+
"alias": "winogrande"
|
32 |
+
},
|
33 |
+
"harness|boolq|0": {
|
34 |
+
"acc,none": 0.8749235474006116,
|
35 |
+
"acc_stderr,none": 0.005785826015982954,
|
36 |
+
"alias": "boolq"
|
37 |
+
},
|
38 |
+
"harness|truthfulqa:mc1|0": {
|
39 |
+
"acc,none": 0.5960832313341493,
|
40 |
+
"acc_stderr,none": 0.01717727682258428,
|
41 |
+
"alias": "truthfulqa_mc1"
|
42 |
+
},
|
43 |
+
"harness|piqa|0": {
|
44 |
+
"acc,none": 0.8220892274211099,
|
45 |
+
"acc_stderr,none": 0.008922899948085587,
|
46 |
+
"acc_norm,none": 0.8378672470076169,
|
47 |
+
"acc_norm_stderr,none": 0.008599405082519758,
|
48 |
+
"alias": "piqa"
|
49 |
+
},
|
50 |
+
"harness|mmlu|0": {
|
51 |
+
"acc,none": 0.6144423871243413,
|
52 |
+
"acc_stderr,none": 0.0038680796901472215,
|
53 |
+
"alias": "mmlu"
|
54 |
+
},
|
55 |
+
"harness|mmlu_humanities|0": {
|
56 |
+
"alias": " - humanities",
|
57 |
+
"acc,none": 0.5613177470775771,
|
58 |
+
"acc_stderr,none": 0.0067944330451592365
|
59 |
+
},
|
60 |
+
"harness|mmlu_formal_logic|0": {
|
61 |
+
"alias": " - formal_logic",
|
62 |
+
"acc,none": 0.48412698412698413,
|
63 |
+
"acc_stderr,none": 0.04469881854072606
|
64 |
+
},
|
65 |
+
"harness|mmlu_high_school_european_history|0": {
|
66 |
+
"alias": " - high_school_european_history",
|
67 |
+
"acc,none": 0.7636363636363637,
|
68 |
+
"acc_stderr,none": 0.033175059300091805
|
69 |
+
},
|
70 |
+
"harness|mmlu_high_school_us_history|0": {
|
71 |
+
"alias": " - high_school_us_history",
|
72 |
+
"acc,none": 0.803921568627451,
|
73 |
+
"acc_stderr,none": 0.027865942286639325
|
74 |
+
},
|
75 |
+
"harness|mmlu_high_school_world_history|0": {
|
76 |
+
"alias": " - high_school_world_history",
|
77 |
+
"acc,none": 0.8059071729957806,
|
78 |
+
"acc_stderr,none": 0.025744902532290927
|
79 |
+
},
|
80 |
+
"harness|mmlu_international_law|0": {
|
81 |
+
"alias": " - international_law",
|
82 |
+
"acc,none": 0.743801652892562,
|
83 |
+
"acc_stderr,none": 0.03984979653302872
|
84 |
+
},
|
85 |
+
"harness|mmlu_jurisprudence|0": {
|
86 |
+
"alias": " - jurisprudence",
|
87 |
+
"acc,none": 0.75,
|
88 |
+
"acc_stderr,none": 0.04186091791394607
|
89 |
+
},
|
90 |
+
"harness|mmlu_logical_fallacies|0": {
|
91 |
+
"alias": " - logical_fallacies",
|
92 |
+
"acc,none": 0.7177914110429447,
|
93 |
+
"acc_stderr,none": 0.03536117886664743
|
94 |
+
},
|
95 |
+
"harness|mmlu_moral_disputes|0": {
|
96 |
+
"alias": " - moral_disputes",
|
97 |
+
"acc,none": 0.6763005780346821,
|
98 |
+
"acc_stderr,none": 0.025190181327608415
|
99 |
+
},
|
100 |
+
"harness|mmlu_moral_scenarios|0": {
|
101 |
+
"alias": " - moral_scenarios",
|
102 |
+
"acc,none": 0.3217877094972067,
|
103 |
+
"acc_stderr,none": 0.015624236160792577
|
104 |
+
},
|
105 |
+
"harness|mmlu_philosophy|0": {
|
106 |
+
"alias": " - philosophy",
|
107 |
+
"acc,none": 0.6881028938906752,
|
108 |
+
"acc_stderr,none": 0.02631185807185416
|
109 |
+
},
|
110 |
+
"harness|mmlu_prehistory|0": {
|
111 |
+
"alias": " - prehistory",
|
112 |
+
"acc,none": 0.7222222222222222,
|
113 |
+
"acc_stderr,none": 0.024922001168886335
|
114 |
+
},
|
115 |
+
"harness|mmlu_professional_law|0": {
|
116 |
+
"alias": " - professional_law",
|
117 |
+
"acc,none": 0.4602346805736636,
|
118 |
+
"acc_stderr,none": 0.012729785386598568
|
119 |
+
},
|
120 |
+
"harness|mmlu_world_religions|0": {
|
121 |
+
"alias": " - world_religions",
|
122 |
+
"acc,none": 0.7894736842105263,
|
123 |
+
"acc_stderr,none": 0.031267817146631786
|
124 |
+
},
|
125 |
+
"harness|mmlu_other|0": {
|
126 |
+
"alias": " - other",
|
127 |
+
"acc,none": 0.6887672996459607,
|
128 |
+
"acc_stderr,none": 0.007961111803791486
|
129 |
+
},
|
130 |
+
"harness|mmlu_business_ethics|0": {
|
131 |
+
"alias": " - business_ethics",
|
132 |
+
"acc,none": 0.6,
|
133 |
+
"acc_stderr,none": 0.049236596391733084
|
134 |
+
},
|
135 |
+
"harness|mmlu_clinical_knowledge|0": {
|
136 |
+
"alias": " - clinical_knowledge",
|
137 |
+
"acc,none": 0.6867924528301886,
|
138 |
+
"acc_stderr,none": 0.02854479331905533
|
139 |
+
},
|
140 |
+
"harness|mmlu_college_medicine|0": {
|
141 |
+
"alias": " - college_medicine",
|
142 |
+
"acc,none": 0.6416184971098265,
|
143 |
+
"acc_stderr,none": 0.03656343653353159
|
144 |
+
},
|
145 |
+
"harness|mmlu_global_facts|0": {
|
146 |
+
"alias": " - global_facts",
|
147 |
+
"acc,none": 0.3,
|
148 |
+
"acc_stderr,none": 0.046056618647183814
|
149 |
+
},
|
150 |
+
"harness|mmlu_human_aging|0": {
|
151 |
+
"alias": " - human_aging",
|
152 |
+
"acc,none": 0.6547085201793722,
|
153 |
+
"acc_stderr,none": 0.03191100192835794
|
154 |
+
},
|
155 |
+
"harness|mmlu_management|0": {
|
156 |
+
"alias": " - management",
|
157 |
+
"acc,none": 0.8058252427184466,
|
158 |
+
"acc_stderr,none": 0.03916667762822586
|
159 |
+
},
|
160 |
+
"harness|mmlu_marketing|0": {
|
161 |
+
"alias": " - marketing",
|
162 |
+
"acc,none": 0.8632478632478633,
|
163 |
+
"acc_stderr,none": 0.02250903393707781
|
164 |
+
},
|
165 |
+
"harness|mmlu_medical_genetics|0": {
|
166 |
+
"alias": " - medical_genetics",
|
167 |
+
"acc,none": 0.72,
|
168 |
+
"acc_stderr,none": 0.04512608598542127
|
169 |
+
},
|
170 |
+
"harness|mmlu_miscellaneous|0": {
|
171 |
+
"alias": " - miscellaneous",
|
172 |
+
"acc,none": 0.8148148148148148,
|
173 |
+
"acc_stderr,none": 0.013890862162876168
|
174 |
+
},
|
175 |
+
"harness|mmlu_nutrition|0": {
|
176 |
+
"alias": " - nutrition",
|
177 |
+
"acc,none": 0.7091503267973857,
|
178 |
+
"acc_stderr,none": 0.02600480036395213
|
179 |
+
},
|
180 |
+
"harness|mmlu_professional_accounting|0": {
|
181 |
+
"alias": " - professional_accounting",
|
182 |
+
"acc,none": 0.46808510638297873,
|
183 |
+
"acc_stderr,none": 0.02976667507587387
|
184 |
+
},
|
185 |
+
"harness|mmlu_professional_medicine|0": {
|
186 |
+
"alias": " - professional_medicine",
|
187 |
+
"acc,none": 0.6691176470588235,
|
188 |
+
"acc_stderr,none": 0.02858270975389843
|
189 |
+
},
|
190 |
+
"harness|mmlu_virology|0": {
|
191 |
+
"alias": " - virology",
|
192 |
+
"acc,none": 0.5120481927710844,
|
193 |
+
"acc_stderr,none": 0.03891364495835817
|
194 |
+
},
|
195 |
+
"harness|mmlu_social_sciences|0": {
|
196 |
+
"alias": " - social_sciences",
|
197 |
+
"acc,none": 0.7240818979525512,
|
198 |
+
"acc_stderr,none": 0.00786854316152042
|
199 |
+
},
|
200 |
+
"harness|mmlu_econometrics|0": {
|
201 |
+
"alias": " - econometrics",
|
202 |
+
"acc,none": 0.4824561403508772,
|
203 |
+
"acc_stderr,none": 0.04700708033551038
|
204 |
+
},
|
205 |
+
"harness|mmlu_high_school_geography|0": {
|
206 |
+
"alias": " - high_school_geography",
|
207 |
+
"acc,none": 0.7626262626262627,
|
208 |
+
"acc_stderr,none": 0.030313710538198892
|
209 |
+
},
|
210 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
211 |
+
"alias": " - high_school_government_and_politics",
|
212 |
+
"acc,none": 0.8756476683937824,
|
213 |
+
"acc_stderr,none": 0.023814477086593552
|
214 |
+
},
|
215 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
216 |
+
"alias": " - high_school_macroeconomics",
|
217 |
+
"acc,none": 0.6615384615384615,
|
218 |
+
"acc_stderr,none": 0.023991500500313043
|
219 |
+
},
|
220 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
221 |
+
"alias": " - high_school_microeconomics",
|
222 |
+
"acc,none": 0.6596638655462185,
|
223 |
+
"acc_stderr,none": 0.030778057422931673
|
224 |
+
},
|
225 |
+
"harness|mmlu_high_school_psychology|0": {
|
226 |
+
"alias": " - high_school_psychology",
|
227 |
+
"acc,none": 0.8348623853211009,
|
228 |
+
"acc_stderr,none": 0.015919557829976068
|
229 |
+
},
|
230 |
+
"harness|mmlu_human_sexuality|0": {
|
231 |
+
"alias": " - human_sexuality",
|
232 |
+
"acc,none": 0.7557251908396947,
|
233 |
+
"acc_stderr,none": 0.03768335959728745
|
234 |
+
},
|
235 |
+
"harness|mmlu_professional_psychology|0": {
|
236 |
+
"alias": " - professional_psychology",
|
237 |
+
"acc,none": 0.6323529411764706,
|
238 |
+
"acc_stderr,none": 0.019506291693954854
|
239 |
+
},
|
240 |
+
"harness|mmlu_public_relations|0": {
|
241 |
+
"alias": " - public_relations",
|
242 |
+
"acc,none": 0.6272727272727273,
|
243 |
+
"acc_stderr,none": 0.04631381319425464
|
244 |
+
},
|
245 |
+
"harness|mmlu_security_studies|0": {
|
246 |
+
"alias": " - security_studies",
|
247 |
+
"acc,none": 0.7183673469387755,
|
248 |
+
"acc_stderr,none": 0.02879518557429129
|
249 |
+
},
|
250 |
+
"harness|mmlu_sociology|0": {
|
251 |
+
"alias": " - sociology",
|
252 |
+
"acc,none": 0.8258706467661692,
|
253 |
+
"acc_stderr,none": 0.026814951200421603
|
254 |
+
},
|
255 |
+
"harness|mmlu_us_foreign_policy|0": {
|
256 |
+
"alias": " - us_foreign_policy",
|
257 |
+
"acc,none": 0.86,
|
258 |
+
"acc_stderr,none": 0.034873508801977676
|
259 |
+
},
|
260 |
+
"harness|mmlu_stem|0": {
|
261 |
+
"alias": " - stem",
|
262 |
+
"acc,none": 0.5134792261338408,
|
263 |
+
"acc_stderr,none": 0.008570381942057599
|
264 |
+
},
|
265 |
+
"harness|mmlu_abstract_algebra|0": {
|
266 |
+
"alias": " - abstract_algebra",
|
267 |
+
"acc,none": 0.28,
|
268 |
+
"acc_stderr,none": 0.04512608598542126
|
269 |
+
},
|
270 |
+
"harness|mmlu_anatomy|0": {
|
271 |
+
"alias": " - anatomy",
|
272 |
+
"acc,none": 0.562962962962963,
|
273 |
+
"acc_stderr,none": 0.042849586397534
|
274 |
+
},
|
275 |
+
"harness|mmlu_astronomy|0": {
|
276 |
+
"alias": " - astronomy",
|
277 |
+
"acc,none": 0.6710526315789473,
|
278 |
+
"acc_stderr,none": 0.03823428969926603
|
279 |
+
},
|
280 |
+
"harness|mmlu_college_biology|0": {
|
281 |
+
"alias": " - college_biology",
|
282 |
+
"acc,none": 0.7152777777777778,
|
283 |
+
"acc_stderr,none": 0.03773809990686934
|
284 |
+
},
|
285 |
+
"harness|mmlu_college_chemistry|0": {
|
286 |
+
"alias": " - college_chemistry",
|
287 |
+
"acc,none": 0.4,
|
288 |
+
"acc_stderr,none": 0.049236596391733084
|
289 |
+
},
|
290 |
+
"harness|mmlu_college_computer_science|0": {
|
291 |
+
"alias": " - college_computer_science",
|
292 |
+
"acc,none": 0.55,
|
293 |
+
"acc_stderr,none": 0.04999999999999999
|
294 |
+
},
|
295 |
+
"harness|mmlu_college_mathematics|0": {
|
296 |
+
"alias": " - college_mathematics",
|
297 |
+
"acc,none": 0.3,
|
298 |
+
"acc_stderr,none": 0.046056618647183814
|
299 |
+
},
|
300 |
+
"harness|mmlu_college_physics|0": {
|
301 |
+
"alias": " - college_physics",
|
302 |
+
"acc,none": 0.4019607843137255,
|
303 |
+
"acc_stderr,none": 0.04878608714466997
|
304 |
+
},
|
305 |
+
"harness|mmlu_computer_security|0": {
|
306 |
+
"alias": " - computer_security",
|
307 |
+
"acc,none": 0.67,
|
308 |
+
"acc_stderr,none": 0.04725815626252607
|
309 |
+
},
|
310 |
+
"harness|mmlu_conceptual_physics|0": {
|
311 |
+
"alias": " - conceptual_physics",
|
312 |
+
"acc,none": 0.5404255319148936,
|
313 |
+
"acc_stderr,none": 0.032579014820998356
|
314 |
+
},
|
315 |
+
"harness|mmlu_electrical_engineering|0": {
|
316 |
+
"alias": " - electrical_engineering",
|
317 |
+
"acc,none": 0.5448275862068965,
|
318 |
+
"acc_stderr,none": 0.04149886942192117
|
319 |
+
},
|
320 |
+
"harness|mmlu_elementary_mathematics|0": {
|
321 |
+
"alias": " - elementary_mathematics",
|
322 |
+
"acc,none": 0.42592592592592593,
|
323 |
+
"acc_stderr,none": 0.02546714904546955
|
324 |
+
},
|
325 |
+
"harness|mmlu_high_school_biology|0": {
|
326 |
+
"alias": " - high_school_biology",
|
327 |
+
"acc,none": 0.7580645161290323,
|
328 |
+
"acc_stderr,none": 0.024362599693031096
|
329 |
+
},
|
330 |
+
"harness|mmlu_high_school_chemistry|0": {
|
331 |
+
"alias": " - high_school_chemistry",
|
332 |
+
"acc,none": 0.4729064039408867,
|
333 |
+
"acc_stderr,none": 0.03512819077876106
|
334 |
+
},
|
335 |
+
"harness|mmlu_high_school_computer_science|0": {
|
336 |
+
"alias": " - high_school_computer_science",
|
337 |
+
"acc,none": 0.68,
|
338 |
+
"acc_stderr,none": 0.04688261722621505
|
339 |
+
},
|
340 |
+
"harness|mmlu_high_school_mathematics|0": {
|
341 |
+
"alias": " - high_school_mathematics",
|
342 |
+
"acc,none": 0.3592592592592593,
|
343 |
+
"acc_stderr,none": 0.02925290592725198
|
344 |
+
},
|
345 |
+
"harness|mmlu_high_school_physics|0": {
|
346 |
+
"alias": " - high_school_physics",
|
347 |
+
"acc,none": 0.32450331125827814,
|
348 |
+
"acc_stderr,none": 0.038227469376587525
|
349 |
+
},
|
350 |
+
"harness|mmlu_high_school_statistics|0": {
|
351 |
+
"alias": " - high_school_statistics",
|
352 |
+
"acc,none": 0.49537037037037035,
|
353 |
+
"acc_stderr,none": 0.03409825519163572
|
354 |
+
},
|
355 |
+
"harness|mmlu_machine_learning|0": {
|
356 |
+
"alias": " - machine_learning",
|
357 |
+
"acc,none": 0.5178571428571429,
|
358 |
+
"acc_stderr,none": 0.04742762361243011
|
359 |
+
},
|
360 |
+
"harness|arc:challenge|0": {
|
361 |
+
"acc,none": 0.659556313993174,
|
362 |
+
"acc_stderr,none": 0.013847460518892973,
|
363 |
+
"acc_norm,none": 0.6655290102389079,
|
364 |
+
"acc_norm_stderr,none": 0.013787460322441372,
|
365 |
+
"alias": "arc_challenge"
|
366 |
+
},
|
367 |
+
"harness|lambada:openai|0": {
|
368 |
+
"perplexity,none": 5.498205577502831,
|
369 |
+
"perplexity_stderr,none": 0.1438667511133427,
|
370 |
+
"acc,none": 0.48166116825150396,
|
371 |
+
"acc_stderr,none": 0.006961290586136397,
|
372 |
+
"alias": "lambada_openai"
|
373 |
+
},
|
374 |
+
"harness|openbookqa|0": {
|
375 |
+
"acc,none": 0.4,
|
376 |
+
"acc_stderr,none": 0.021930844120728505,
|
377 |
+
"acc_norm,none": 0.488,
|
378 |
+
"acc_norm_stderr,none": 0.02237662679792717,
|
379 |
+
"alias": "openbookqa"
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc2|0": {
|
382 |
+
"acc,none": 0.7543078040959663,
|
383 |
+
"acc_stderr,none": 0.014116813297013678,
|
384 |
+
"alias": "truthfulqa_mc2"
|
385 |
+
},
|
386 |
+
"harness|arc:easy|0": {
|
387 |
+
"acc,none": 0.8434343434343434,
|
388 |
+
"acc_stderr,none": 0.0074566212681594565,
|
389 |
+
"acc_norm,none": 0.7845117845117845,
|
390 |
+
"acc_norm_stderr,none": 0.008436837633389658,
|
391 |
+
"alias": "arc_easy"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Nan-Do/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-GGUF",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": null,
|
399 |
+
"architectures": "?",
|
400 |
+
"quant_type": "llama.cpp",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": null,
|
403 |
+
"model_size": null,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "bfloat16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "cpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-18T02:32:17Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "llama_cpp"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "llama.cpp",
|
417 |
+
"ftype": "*Q4_0.gguf"
|
418 |
+
},
|
419 |
+
"versions": {
|
420 |
+
"harness|hellaswag|0": 1.0,
|
421 |
+
"harness|winogrande|0": 1.0,
|
422 |
+
"harness|boolq|0": 2.0,
|
423 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
424 |
+
"harness|piqa|0": 1.0,
|
425 |
+
"harness|mmlu|0": null,
|
426 |
+
"harness|mmlu_humanities|0": null,
|
427 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
428 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
429 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
430 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
431 |
+
"harness|mmlu_international_law|0": 0.0,
|
432 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
433 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
434 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
435 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
436 |
+
"harness|mmlu_philosophy|0": 0.0,
|
437 |
+
"harness|mmlu_prehistory|0": 0.0,
|
438 |
+
"harness|mmlu_professional_law|0": 0.0,
|
439 |
+
"harness|mmlu_world_religions|0": 0.0,
|
440 |
+
"harness|mmlu_other|0": null,
|
441 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
442 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
443 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
444 |
+
"harness|mmlu_global_facts|0": 0.0,
|
445 |
+
"harness|mmlu_human_aging|0": 0.0,
|
446 |
+
"harness|mmlu_management|0": 0.0,
|
447 |
+
"harness|mmlu_marketing|0": 0.0,
|
448 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
449 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
450 |
+
"harness|mmlu_nutrition|0": 0.0,
|
451 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
452 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
453 |
+
"harness|mmlu_virology|0": 0.0,
|
454 |
+
"harness|mmlu_social_sciences|0": null,
|
455 |
+
"harness|mmlu_econometrics|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
461 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
462 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
463 |
+
"harness|mmlu_public_relations|0": 0.0,
|
464 |
+
"harness|mmlu_security_studies|0": 0.0,
|
465 |
+
"harness|mmlu_sociology|0": 0.0,
|
466 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
467 |
+
"harness|mmlu_stem|0": null,
|
468 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
469 |
+
"harness|mmlu_anatomy|0": 0.0,
|
470 |
+
"harness|mmlu_astronomy|0": 0.0,
|
471 |
+
"harness|mmlu_college_biology|0": 0.0,
|
472 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
473 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
474 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
475 |
+
"harness|mmlu_college_physics|0": 0.0,
|
476 |
+
"harness|mmlu_computer_security|0": 0.0,
|
477 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
478 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
479 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
486 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
487 |
+
"harness|arc:challenge|0": 1.0,
|
488 |
+
"harness|lambada:openai|0": 1.0,
|
489 |
+
"harness|openbookqa|0": 1.0,
|
490 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
491 |
+
"harness|arc:easy|0": 1.0
|
492 |
+
},
|
493 |
+
"n-shot": {
|
494 |
+
"arc_challenge": 0,
|
495 |
+
"arc_easy": 0,
|
496 |
+
"boolq": 0,
|
497 |
+
"hellaswag": 0,
|
498 |
+
"lambada_openai": 0,
|
499 |
+
"mmlu": 0,
|
500 |
+
"mmlu_abstract_algebra": 0,
|
501 |
+
"mmlu_anatomy": 0,
|
502 |
+
"mmlu_astronomy": 0,
|
503 |
+
"mmlu_business_ethics": 0,
|
504 |
+
"mmlu_clinical_knowledge": 0,
|
505 |
+
"mmlu_college_biology": 0,
|
506 |
+
"mmlu_college_chemistry": 0,
|
507 |
+
"mmlu_college_computer_science": 0,
|
508 |
+
"mmlu_college_mathematics": 0,
|
509 |
+
"mmlu_college_medicine": 0,
|
510 |
+
"mmlu_college_physics": 0,
|
511 |
+
"mmlu_computer_security": 0,
|
512 |
+
"mmlu_conceptual_physics": 0,
|
513 |
+
"mmlu_econometrics": 0,
|
514 |
+
"mmlu_electrical_engineering": 0,
|
515 |
+
"mmlu_elementary_mathematics": 0,
|
516 |
+
"mmlu_formal_logic": 0,
|
517 |
+
"mmlu_global_facts": 0,
|
518 |
+
"mmlu_high_school_biology": 0,
|
519 |
+
"mmlu_high_school_chemistry": 0,
|
520 |
+
"mmlu_high_school_computer_science": 0,
|
521 |
+
"mmlu_high_school_european_history": 0,
|
522 |
+
"mmlu_high_school_geography": 0,
|
523 |
+
"mmlu_high_school_government_and_politics": 0,
|
524 |
+
"mmlu_high_school_macroeconomics": 0,
|
525 |
+
"mmlu_high_school_mathematics": 0,
|
526 |
+
"mmlu_high_school_microeconomics": 0,
|
527 |
+
"mmlu_high_school_physics": 0,
|
528 |
+
"mmlu_high_school_psychology": 0,
|
529 |
+
"mmlu_high_school_statistics": 0,
|
530 |
+
"mmlu_high_school_us_history": 0,
|
531 |
+
"mmlu_high_school_world_history": 0,
|
532 |
+
"mmlu_human_aging": 0,
|
533 |
+
"mmlu_human_sexuality": 0,
|
534 |
+
"mmlu_humanities": 0,
|
535 |
+
"mmlu_international_law": 0,
|
536 |
+
"mmlu_jurisprudence": 0,
|
537 |
+
"mmlu_logical_fallacies": 0,
|
538 |
+
"mmlu_machine_learning": 0,
|
539 |
+
"mmlu_management": 0,
|
540 |
+
"mmlu_marketing": 0,
|
541 |
+
"mmlu_medical_genetics": 0,
|
542 |
+
"mmlu_miscellaneous": 0,
|
543 |
+
"mmlu_moral_disputes": 0,
|
544 |
+
"mmlu_moral_scenarios": 0,
|
545 |
+
"mmlu_nutrition": 0,
|
546 |
+
"mmlu_other": 0,
|
547 |
+
"mmlu_philosophy": 0,
|
548 |
+
"mmlu_prehistory": 0,
|
549 |
+
"mmlu_professional_accounting": 0,
|
550 |
+
"mmlu_professional_law": 0,
|
551 |
+
"mmlu_professional_medicine": 0,
|
552 |
+
"mmlu_professional_psychology": 0,
|
553 |
+
"mmlu_public_relations": 0,
|
554 |
+
"mmlu_security_studies": 0,
|
555 |
+
"mmlu_social_sciences": 0,
|
556 |
+
"mmlu_sociology": 0,
|
557 |
+
"mmlu_stem": 0,
|
558 |
+
"mmlu_us_foreign_policy": 0,
|
559 |
+
"mmlu_virology": 0,
|
560 |
+
"mmlu_world_religions": 0,
|
561 |
+
"openbookqa": 0,
|
562 |
+
"piqa": 0,
|
563 |
+
"truthfulqa_mc1": 0,
|
564 |
+
"truthfulqa_mc2": 0,
|
565 |
+
"winogrande": 0
|
566 |
+
},
|
567 |
+
"date": 1716158716.7148077,
|
568 |
+
"config": {
|
569 |
+
"model": "WrapperGGUFLM",
|
570 |
+
"model_args": "gguf_model=Nan-Do/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B-GGUF,ftype=*Q4_0.gguf,dtype=bfloat16,_commit_hash=main",
|
571 |
+
"batch_size": 1,
|
572 |
+
"batch_sizes": [],
|
573 |
+
"device": "cuda",
|
574 |
+
"use_cache": null,
|
575 |
+
"limit": null,
|
576 |
+
"bootstrap_iters": 100000,
|
577 |
+
"gen_kwargs": null
|
578 |
+
}
|
579 |
+
}
|
PrunaAI/results_2024-05-13-21-54-31.json
ADDED
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "no",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-13-21-54-31",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 2.181729792,
|
16 |
+
"model_params": 3.821079552,
|
17 |
+
"quant_type": "llama.cpp",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|hellaswag|0": {
|
22 |
+
"acc,none": 0.5686118303126867,
|
23 |
+
"acc_stderr,none": 0.004942578520987334,
|
24 |
+
"acc_norm,none": 0.752141007767377,
|
25 |
+
"acc_norm_stderr,none": 0.004308870978210358,
|
26 |
+
"alias": "hellaswag"
|
27 |
+
},
|
28 |
+
"harness|mmlu|0": {
|
29 |
+
"acc,none": 0.5713573565019228,
|
30 |
+
"acc_stderr,none": 0.003994070978563716,
|
31 |
+
"alias": "mmlu"
|
32 |
+
},
|
33 |
+
"harness|mmlu_humanities|0": {
|
34 |
+
"alias": " - humanities",
|
35 |
+
"acc,none": 0.5270988310308182,
|
36 |
+
"acc_stderr,none": 0.007098258374624685
|
37 |
+
},
|
38 |
+
"harness|mmlu_formal_logic|0": {
|
39 |
+
"alias": " - formal_logic",
|
40 |
+
"acc,none": 0.47619047619047616,
|
41 |
+
"acc_stderr,none": 0.04467062628403273
|
42 |
+
},
|
43 |
+
"harness|mmlu_high_school_european_history|0": {
|
44 |
+
"alias": " - high_school_european_history",
|
45 |
+
"acc,none": 0.46060606060606063,
|
46 |
+
"acc_stderr,none": 0.03892207016552013
|
47 |
+
},
|
48 |
+
"harness|mmlu_high_school_us_history|0": {
|
49 |
+
"alias": " - high_school_us_history",
|
50 |
+
"acc,none": 0.6029411764705882,
|
51 |
+
"acc_stderr,none": 0.03434131164719128
|
52 |
+
},
|
53 |
+
"harness|mmlu_high_school_world_history|0": {
|
54 |
+
"alias": " - high_school_world_history",
|
55 |
+
"acc,none": 0.5274261603375527,
|
56 |
+
"acc_stderr,none": 0.03249822718301303
|
57 |
+
},
|
58 |
+
"harness|mmlu_international_law|0": {
|
59 |
+
"alias": " - international_law",
|
60 |
+
"acc,none": 0.7851239669421488,
|
61 |
+
"acc_stderr,none": 0.03749492448709699
|
62 |
+
},
|
63 |
+
"harness|mmlu_jurisprudence|0": {
|
64 |
+
"alias": " - jurisprudence",
|
65 |
+
"acc,none": 0.6018518518518519,
|
66 |
+
"acc_stderr,none": 0.04732332615978814
|
67 |
+
},
|
68 |
+
"harness|mmlu_logical_fallacies|0": {
|
69 |
+
"alias": " - logical_fallacies",
|
70 |
+
"acc,none": 0.7177914110429447,
|
71 |
+
"acc_stderr,none": 0.03536117886664742
|
72 |
+
},
|
73 |
+
"harness|mmlu_moral_disputes|0": {
|
74 |
+
"alias": " - moral_disputes",
|
75 |
+
"acc,none": 0.5982658959537572,
|
76 |
+
"acc_stderr,none": 0.026394104177643627
|
77 |
+
},
|
78 |
+
"harness|mmlu_moral_scenarios|0": {
|
79 |
+
"alias": " - moral_scenarios",
|
80 |
+
"acc,none": 0.41899441340782123,
|
81 |
+
"acc_stderr,none": 0.016501579306861677
|
82 |
+
},
|
83 |
+
"harness|mmlu_philosophy|0": {
|
84 |
+
"alias": " - philosophy",
|
85 |
+
"acc,none": 0.6302250803858521,
|
86 |
+
"acc_stderr,none": 0.027417996705630998
|
87 |
+
},
|
88 |
+
"harness|mmlu_prehistory|0": {
|
89 |
+
"alias": " - prehistory",
|
90 |
+
"acc,none": 0.6944444444444444,
|
91 |
+
"acc_stderr,none": 0.025630824975621344
|
92 |
+
},
|
93 |
+
"harness|mmlu_professional_law|0": {
|
94 |
+
"alias": " - professional_law",
|
95 |
+
"acc,none": 0.4471968709256845,
|
96 |
+
"acc_stderr,none": 0.012698825252435111
|
97 |
+
},
|
98 |
+
"harness|mmlu_world_religions|0": {
|
99 |
+
"alias": " - world_religions",
|
100 |
+
"acc,none": 0.7602339181286549,
|
101 |
+
"acc_stderr,none": 0.03274485211946956
|
102 |
+
},
|
103 |
+
"harness|mmlu_other|0": {
|
104 |
+
"alias": " - other",
|
105 |
+
"acc,none": 0.638236240746701,
|
106 |
+
"acc_stderr,none": 0.008388402876897084
|
107 |
+
},
|
108 |
+
"harness|mmlu_business_ethics|0": {
|
109 |
+
"alias": " - business_ethics",
|
110 |
+
"acc,none": 0.64,
|
111 |
+
"acc_stderr,none": 0.048241815132442176
|
112 |
+
},
|
113 |
+
"harness|mmlu_clinical_knowledge|0": {
|
114 |
+
"alias": " - clinical_knowledge",
|
115 |
+
"acc,none": 0.630188679245283,
|
116 |
+
"acc_stderr,none": 0.029711421880107933
|
117 |
+
},
|
118 |
+
"harness|mmlu_college_medicine|0": {
|
119 |
+
"alias": " - college_medicine",
|
120 |
+
"acc,none": 0.5491329479768786,
|
121 |
+
"acc_stderr,none": 0.03794012674697028
|
122 |
+
},
|
123 |
+
"harness|mmlu_global_facts|0": {
|
124 |
+
"alias": " - global_facts",
|
125 |
+
"acc,none": 0.39,
|
126 |
+
"acc_stderr,none": 0.04902071300001975
|
127 |
+
},
|
128 |
+
"harness|mmlu_human_aging|0": {
|
129 |
+
"alias": " - human_aging",
|
130 |
+
"acc,none": 0.6098654708520179,
|
131 |
+
"acc_stderr,none": 0.03273766725459156
|
132 |
+
},
|
133 |
+
"harness|mmlu_management|0": {
|
134 |
+
"alias": " - management",
|
135 |
+
"acc,none": 0.7766990291262136,
|
136 |
+
"acc_stderr,none": 0.04123553189891431
|
137 |
+
},
|
138 |
+
"harness|mmlu_marketing|0": {
|
139 |
+
"alias": " - marketing",
|
140 |
+
"acc,none": 0.8418803418803419,
|
141 |
+
"acc_stderr,none": 0.023902325549560417
|
142 |
+
},
|
143 |
+
"harness|mmlu_medical_genetics|0": {
|
144 |
+
"alias": " - medical_genetics",
|
145 |
+
"acc,none": 0.66,
|
146 |
+
"acc_stderr,none": 0.04760952285695238
|
147 |
+
},
|
148 |
+
"harness|mmlu_miscellaneous|0": {
|
149 |
+
"alias": " - miscellaneous",
|
150 |
+
"acc,none": 0.7458492975734355,
|
151 |
+
"acc_stderr,none": 0.01556925469204577
|
152 |
+
},
|
153 |
+
"harness|mmlu_nutrition|0": {
|
154 |
+
"alias": " - nutrition",
|
155 |
+
"acc,none": 0.5980392156862745,
|
156 |
+
"acc_stderr,none": 0.02807415894760066
|
157 |
+
},
|
158 |
+
"harness|mmlu_professional_accounting|0": {
|
159 |
+
"alias": " - professional_accounting",
|
160 |
+
"acc,none": 0.5035460992907801,
|
161 |
+
"acc_stderr,none": 0.02982674915328092
|
162 |
+
},
|
163 |
+
"harness|mmlu_professional_medicine|0": {
|
164 |
+
"alias": " - professional_medicine",
|
165 |
+
"acc,none": 0.5661764705882353,
|
166 |
+
"acc_stderr,none": 0.030105636570016636
|
167 |
+
},
|
168 |
+
"harness|mmlu_virology|0": {
|
169 |
+
"alias": " - virology",
|
170 |
+
"acc,none": 0.4578313253012048,
|
171 |
+
"acc_stderr,none": 0.0387862677100236
|
172 |
+
},
|
173 |
+
"harness|mmlu_social_sciences|0": {
|
174 |
+
"alias": " - social_sciences",
|
175 |
+
"acc,none": 0.68020799480013,
|
176 |
+
"acc_stderr,none": 0.008161750542677104
|
177 |
+
},
|
178 |
+
"harness|mmlu_econometrics|0": {
|
179 |
+
"alias": " - econometrics",
|
180 |
+
"acc,none": 0.34210526315789475,
|
181 |
+
"acc_stderr,none": 0.04462917535336937
|
182 |
+
},
|
183 |
+
"harness|mmlu_high_school_geography|0": {
|
184 |
+
"alias": " - high_school_geography",
|
185 |
+
"acc,none": 0.7121212121212122,
|
186 |
+
"acc_stderr,none": 0.03225883512300992
|
187 |
+
},
|
188 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
189 |
+
"alias": " - high_school_government_and_politics",
|
190 |
+
"acc,none": 0.8134715025906736,
|
191 |
+
"acc_stderr,none": 0.028112091210117474
|
192 |
+
},
|
193 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
194 |
+
"alias": " - high_school_macroeconomics",
|
195 |
+
"acc,none": 0.6358974358974359,
|
196 |
+
"acc_stderr,none": 0.02439667298509477
|
197 |
+
},
|
198 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
199 |
+
"alias": " - high_school_microeconomics",
|
200 |
+
"acc,none": 0.6596638655462185,
|
201 |
+
"acc_stderr,none": 0.030778057422931673
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_psychology|0": {
|
204 |
+
"alias": " - high_school_psychology",
|
205 |
+
"acc,none": 0.8256880733944955,
|
206 |
+
"acc_stderr,none": 0.016265675632010354
|
207 |
+
},
|
208 |
+
"harness|mmlu_human_sexuality|0": {
|
209 |
+
"alias": " - human_sexuality",
|
210 |
+
"acc,none": 0.5725190839694656,
|
211 |
+
"acc_stderr,none": 0.04338920305792401
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_psychology|0": {
|
214 |
+
"alias": " - professional_psychology",
|
215 |
+
"acc,none": 0.6405228758169934,
|
216 |
+
"acc_stderr,none": 0.01941253924203216
|
217 |
+
},
|
218 |
+
"harness|mmlu_public_relations|0": {
|
219 |
+
"alias": " - public_relations",
|
220 |
+
"acc,none": 0.6090909090909091,
|
221 |
+
"acc_stderr,none": 0.04673752333670238
|
222 |
+
},
|
223 |
+
"harness|mmlu_security_studies|0": {
|
224 |
+
"alias": " - security_studies",
|
225 |
+
"acc,none": 0.5306122448979592,
|
226 |
+
"acc_stderr,none": 0.031949171367580624
|
227 |
+
},
|
228 |
+
"harness|mmlu_sociology|0": {
|
229 |
+
"alias": " - sociology",
|
230 |
+
"acc,none": 0.7960199004975125,
|
231 |
+
"acc_stderr,none": 0.02849317624532608
|
232 |
+
},
|
233 |
+
"harness|mmlu_us_foreign_policy|0": {
|
234 |
+
"alias": " - us_foreign_policy",
|
235 |
+
"acc,none": 0.77,
|
236 |
+
"acc_stderr,none": 0.042295258468165044
|
237 |
+
},
|
238 |
+
"harness|mmlu_stem|0": {
|
239 |
+
"alias": " - stem",
|
240 |
+
"acc,none": 0.4652711703139867,
|
241 |
+
"acc_stderr,none": 0.008511108943898386
|
242 |
+
},
|
243 |
+
"harness|mmlu_abstract_algebra|0": {
|
244 |
+
"alias": " - abstract_algebra",
|
245 |
+
"acc,none": 0.27,
|
246 |
+
"acc_stderr,none": 0.044619604333847394
|
247 |
+
},
|
248 |
+
"harness|mmlu_anatomy|0": {
|
249 |
+
"alias": " - anatomy",
|
250 |
+
"acc,none": 0.5777777777777777,
|
251 |
+
"acc_stderr,none": 0.04266763404099582
|
252 |
+
},
|
253 |
+
"harness|mmlu_astronomy|0": {
|
254 |
+
"alias": " - astronomy",
|
255 |
+
"acc,none": 0.6644736842105263,
|
256 |
+
"acc_stderr,none": 0.03842498559395267
|
257 |
+
},
|
258 |
+
"harness|mmlu_college_biology|0": {
|
259 |
+
"alias": " - college_biology",
|
260 |
+
"acc,none": 0.6666666666666666,
|
261 |
+
"acc_stderr,none": 0.039420826399272135
|
262 |
+
},
|
263 |
+
"harness|mmlu_college_chemistry|0": {
|
264 |
+
"alias": " - college_chemistry",
|
265 |
+
"acc,none": 0.42,
|
266 |
+
"acc_stderr,none": 0.049604496374885836
|
267 |
+
},
|
268 |
+
"harness|mmlu_college_computer_science|0": {
|
269 |
+
"alias": " - college_computer_science",
|
270 |
+
"acc,none": 0.4,
|
271 |
+
"acc_stderr,none": 0.04923659639173309
|
272 |
+
},
|
273 |
+
"harness|mmlu_college_mathematics|0": {
|
274 |
+
"alias": " - college_mathematics",
|
275 |
+
"acc,none": 0.25,
|
276 |
+
"acc_stderr,none": 0.04351941398892446
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_physics|0": {
|
279 |
+
"alias": " - college_physics",
|
280 |
+
"acc,none": 0.35294117647058826,
|
281 |
+
"acc_stderr,none": 0.04755129616062946
|
282 |
+
},
|
283 |
+
"harness|mmlu_computer_security|0": {
|
284 |
+
"alias": " - computer_security",
|
285 |
+
"acc,none": 0.65,
|
286 |
+
"acc_stderr,none": 0.047937248544110196
|
287 |
+
},
|
288 |
+
"harness|mmlu_conceptual_physics|0": {
|
289 |
+
"alias": " - conceptual_physics",
|
290 |
+
"acc,none": 0.5319148936170213,
|
291 |
+
"acc_stderr,none": 0.03261936918467382
|
292 |
+
},
|
293 |
+
"harness|mmlu_electrical_engineering|0": {
|
294 |
+
"alias": " - electrical_engineering",
|
295 |
+
"acc,none": 0.4206896551724138,
|
296 |
+
"acc_stderr,none": 0.0411391498118926
|
297 |
+
},
|
298 |
+
"harness|mmlu_elementary_mathematics|0": {
|
299 |
+
"alias": " - elementary_mathematics",
|
300 |
+
"acc,none": 0.35978835978835977,
|
301 |
+
"acc_stderr,none": 0.02471807594412928
|
302 |
+
},
|
303 |
+
"harness|mmlu_high_school_biology|0": {
|
304 |
+
"alias": " - high_school_biology",
|
305 |
+
"acc,none": 0.6935483870967742,
|
306 |
+
"acc_stderr,none": 0.02622648565255388
|
307 |
+
},
|
308 |
+
"harness|mmlu_high_school_chemistry|0": {
|
309 |
+
"alias": " - high_school_chemistry",
|
310 |
+
"acc,none": 0.4827586206896552,
|
311 |
+
"acc_stderr,none": 0.035158955511657
|
312 |
+
},
|
313 |
+
"harness|mmlu_high_school_computer_science|0": {
|
314 |
+
"alias": " - high_school_computer_science",
|
315 |
+
"acc,none": 0.61,
|
316 |
+
"acc_stderr,none": 0.04902071300001974
|
317 |
+
},
|
318 |
+
"harness|mmlu_high_school_mathematics|0": {
|
319 |
+
"alias": " - high_school_mathematics",
|
320 |
+
"acc,none": 0.22962962962962963,
|
321 |
+
"acc_stderr,none": 0.025644108639267624
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_physics|0": {
|
324 |
+
"alias": " - high_school_physics",
|
325 |
+
"acc,none": 0.3443708609271523,
|
326 |
+
"acc_stderr,none": 0.03879687024073327
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_statistics|0": {
|
329 |
+
"alias": " - high_school_statistics",
|
330 |
+
"acc,none": 0.46296296296296297,
|
331 |
+
"acc_stderr,none": 0.03400603625538272
|
332 |
+
},
|
333 |
+
"harness|mmlu_machine_learning|0": {
|
334 |
+
"alias": " - machine_learning",
|
335 |
+
"acc,none": 0.41964285714285715,
|
336 |
+
"acc_stderr,none": 0.04684099321077106
|
337 |
+
},
|
338 |
+
"harness|boolq|0": {
|
339 |
+
"acc,none": 0.8510703363914373,
|
340 |
+
"acc_stderr,none": 0.006226813679382005,
|
341 |
+
"alias": "boolq"
|
342 |
+
},
|
343 |
+
"harness|truthfulqa:mc2|0": {
|
344 |
+
"acc,none": 0.5298984108646814,
|
345 |
+
"acc_stderr,none": 0.015543848691376448,
|
346 |
+
"alias": "truthfulqa_mc2"
|
347 |
+
},
|
348 |
+
"harness|arc:easy|0": {
|
349 |
+
"acc,none": 0.7916666666666666,
|
350 |
+
"acc_stderr,none": 0.008333333333333191,
|
351 |
+
"acc_norm,none": 0.7714646464646465,
|
352 |
+
"acc_norm_stderr,none": 0.008615944722488503,
|
353 |
+
"alias": "arc_easy"
|
354 |
+
},
|
355 |
+
"harness|truthfulqa:mc1|0": {
|
356 |
+
"acc,none": 0.37576499388004897,
|
357 |
+
"acc_stderr,none": 0.016954584060214297,
|
358 |
+
"alias": "truthfulqa_mc1"
|
359 |
+
},
|
360 |
+
"harness|openbookqa|0": {
|
361 |
+
"acc,none": 0.342,
|
362 |
+
"acc_stderr,none": 0.021236147199899254,
|
363 |
+
"acc_norm,none": 0.422,
|
364 |
+
"acc_norm_stderr,none": 0.022109039310618552,
|
365 |
+
"alias": "openbookqa"
|
366 |
+
},
|
367 |
+
"harness|lambada:openai|0": {
|
368 |
+
"perplexity,none": 6.567239054538494,
|
369 |
+
"perplexity_stderr,none": 0.1852860668326811,
|
370 |
+
"acc,none": 0.3089462449058801,
|
371 |
+
"acc_stderr,none": 0.006437384484045087,
|
372 |
+
"alias": "lambada_openai"
|
373 |
+
},
|
374 |
+
"harness|piqa|0": {
|
375 |
+
"acc,none": 0.7905331882480957,
|
376 |
+
"acc_stderr,none": 0.009494302979819803,
|
377 |
+
"acc_norm,none": 0.7916213275299239,
|
378 |
+
"acc_norm_stderr,none": 0.009476125383049464,
|
379 |
+
"alias": "piqa"
|
380 |
+
},
|
381 |
+
"harness|arc:challenge|0": {
|
382 |
+
"acc,none": 0.5315699658703071,
|
383 |
+
"acc_stderr,none": 0.014582236460866978,
|
384 |
+
"acc_norm,none": 0.5443686006825939,
|
385 |
+
"acc_norm_stderr,none": 0.014553749939306868,
|
386 |
+
"alias": "arc_challenge"
|
387 |
+
},
|
388 |
+
"harness|winogrande|0": {
|
389 |
+
"acc,none": 0.7316495659037096,
|
390 |
+
"acc_stderr,none": 0.012453340359561195,
|
391 |
+
"alias": "winogrande"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": null,
|
399 |
+
"architectures": "?",
|
400 |
+
"quant_type": "llama.cpp",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": null,
|
403 |
+
"model_size": null,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "cpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-10T07:32:46Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "llama_cpp"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "llama.cpp",
|
417 |
+
"ftype": "*Q4_0.gguf"
|
418 |
+
},
|
419 |
+
"versions": {
|
420 |
+
"harness|hellaswag|0": 1.0,
|
421 |
+
"harness|mmlu|0": null,
|
422 |
+
"harness|mmlu_humanities|0": null,
|
423 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
424 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
425 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
426 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
427 |
+
"harness|mmlu_international_law|0": 0.0,
|
428 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
429 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
430 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
431 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
432 |
+
"harness|mmlu_philosophy|0": 0.0,
|
433 |
+
"harness|mmlu_prehistory|0": 0.0,
|
434 |
+
"harness|mmlu_professional_law|0": 0.0,
|
435 |
+
"harness|mmlu_world_religions|0": 0.0,
|
436 |
+
"harness|mmlu_other|0": null,
|
437 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
438 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
439 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
440 |
+
"harness|mmlu_global_facts|0": 0.0,
|
441 |
+
"harness|mmlu_human_aging|0": 0.0,
|
442 |
+
"harness|mmlu_management|0": 0.0,
|
443 |
+
"harness|mmlu_marketing|0": 0.0,
|
444 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
445 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
446 |
+
"harness|mmlu_nutrition|0": 0.0,
|
447 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
448 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
449 |
+
"harness|mmlu_virology|0": 0.0,
|
450 |
+
"harness|mmlu_social_sciences|0": null,
|
451 |
+
"harness|mmlu_econometrics|0": 0.0,
|
452 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
453 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
457 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
458 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
459 |
+
"harness|mmlu_public_relations|0": 0.0,
|
460 |
+
"harness|mmlu_security_studies|0": 0.0,
|
461 |
+
"harness|mmlu_sociology|0": 0.0,
|
462 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
463 |
+
"harness|mmlu_stem|0": null,
|
464 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
465 |
+
"harness|mmlu_anatomy|0": 0.0,
|
466 |
+
"harness|mmlu_astronomy|0": 0.0,
|
467 |
+
"harness|mmlu_college_biology|0": 0.0,
|
468 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
469 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
470 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
471 |
+
"harness|mmlu_college_physics|0": 0.0,
|
472 |
+
"harness|mmlu_computer_security|0": 0.0,
|
473 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
474 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
475 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
482 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
483 |
+
"harness|boolq|0": 2.0,
|
484 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
485 |
+
"harness|arc:easy|0": 1.0,
|
486 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
487 |
+
"harness|openbookqa|0": 1.0,
|
488 |
+
"harness|lambada:openai|0": 1.0,
|
489 |
+
"harness|piqa|0": 1.0,
|
490 |
+
"harness|arc:challenge|0": 1.0,
|
491 |
+
"harness|winogrande|0": 1.0
|
492 |
+
},
|
493 |
+
"n-shot": {
|
494 |
+
"arc_challenge": 0,
|
495 |
+
"arc_easy": 0,
|
496 |
+
"boolq": 0,
|
497 |
+
"hellaswag": 0,
|
498 |
+
"lambada_openai": 0,
|
499 |
+
"mmlu": 0,
|
500 |
+
"mmlu_abstract_algebra": 0,
|
501 |
+
"mmlu_anatomy": 0,
|
502 |
+
"mmlu_astronomy": 0,
|
503 |
+
"mmlu_business_ethics": 0,
|
504 |
+
"mmlu_clinical_knowledge": 0,
|
505 |
+
"mmlu_college_biology": 0,
|
506 |
+
"mmlu_college_chemistry": 0,
|
507 |
+
"mmlu_college_computer_science": 0,
|
508 |
+
"mmlu_college_mathematics": 0,
|
509 |
+
"mmlu_college_medicine": 0,
|
510 |
+
"mmlu_college_physics": 0,
|
511 |
+
"mmlu_computer_security": 0,
|
512 |
+
"mmlu_conceptual_physics": 0,
|
513 |
+
"mmlu_econometrics": 0,
|
514 |
+
"mmlu_electrical_engineering": 0,
|
515 |
+
"mmlu_elementary_mathematics": 0,
|
516 |
+
"mmlu_formal_logic": 0,
|
517 |
+
"mmlu_global_facts": 0,
|
518 |
+
"mmlu_high_school_biology": 0,
|
519 |
+
"mmlu_high_school_chemistry": 0,
|
520 |
+
"mmlu_high_school_computer_science": 0,
|
521 |
+
"mmlu_high_school_european_history": 0,
|
522 |
+
"mmlu_high_school_geography": 0,
|
523 |
+
"mmlu_high_school_government_and_politics": 0,
|
524 |
+
"mmlu_high_school_macroeconomics": 0,
|
525 |
+
"mmlu_high_school_mathematics": 0,
|
526 |
+
"mmlu_high_school_microeconomics": 0,
|
527 |
+
"mmlu_high_school_physics": 0,
|
528 |
+
"mmlu_high_school_psychology": 0,
|
529 |
+
"mmlu_high_school_statistics": 0,
|
530 |
+
"mmlu_high_school_us_history": 0,
|
531 |
+
"mmlu_high_school_world_history": 0,
|
532 |
+
"mmlu_human_aging": 0,
|
533 |
+
"mmlu_human_sexuality": 0,
|
534 |
+
"mmlu_humanities": 0,
|
535 |
+
"mmlu_international_law": 0,
|
536 |
+
"mmlu_jurisprudence": 0,
|
537 |
+
"mmlu_logical_fallacies": 0,
|
538 |
+
"mmlu_machine_learning": 0,
|
539 |
+
"mmlu_management": 0,
|
540 |
+
"mmlu_marketing": 0,
|
541 |
+
"mmlu_medical_genetics": 0,
|
542 |
+
"mmlu_miscellaneous": 0,
|
543 |
+
"mmlu_moral_disputes": 0,
|
544 |
+
"mmlu_moral_scenarios": 0,
|
545 |
+
"mmlu_nutrition": 0,
|
546 |
+
"mmlu_other": 0,
|
547 |
+
"mmlu_philosophy": 0,
|
548 |
+
"mmlu_prehistory": 0,
|
549 |
+
"mmlu_professional_accounting": 0,
|
550 |
+
"mmlu_professional_law": 0,
|
551 |
+
"mmlu_professional_medicine": 0,
|
552 |
+
"mmlu_professional_psychology": 0,
|
553 |
+
"mmlu_public_relations": 0,
|
554 |
+
"mmlu_security_studies": 0,
|
555 |
+
"mmlu_social_sciences": 0,
|
556 |
+
"mmlu_sociology": 0,
|
557 |
+
"mmlu_stem": 0,
|
558 |
+
"mmlu_us_foreign_policy": 0,
|
559 |
+
"mmlu_virology": 0,
|
560 |
+
"mmlu_world_religions": 0,
|
561 |
+
"openbookqa": 0,
|
562 |
+
"piqa": 0,
|
563 |
+
"truthfulqa_mc1": 0,
|
564 |
+
"truthfulqa_mc2": 0,
|
565 |
+
"winogrande": 0
|
566 |
+
},
|
567 |
+
"date": 1715589898.9681585,
|
568 |
+
"config": {
|
569 |
+
"model": "WrapperGGUFLM",
|
570 |
+
"model_args": "gguf_model=PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main",
|
571 |
+
"batch_size": 1,
|
572 |
+
"batch_sizes": [],
|
573 |
+
"device": "cuda",
|
574 |
+
"use_cache": null,
|
575 |
+
"limit": null,
|
576 |
+
"bootstrap_iters": 100000,
|
577 |
+
"gen_kwargs": null
|
578 |
+
}
|
579 |
+
}
|
Qwen/results_2024-04-26-20-25-12.json
ADDED
@@ -0,0 +1,599 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-26-20-25-12",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Qwen/Qwen1.5-0.5B-Chat-GPTQ-Int4",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 0.47,
|
16 |
+
"model_params": 0.31,
|
17 |
+
"quant_type": "GPTQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|hellaswag|0": {
|
22 |
+
"acc,none": 0.3571997610037841,
|
23 |
+
"acc_stderr,none": 0.004781950883460504,
|
24 |
+
"acc_norm,none": 0.4403505277833101,
|
25 |
+
"acc_norm_stderr,none": 0.004954146286513353,
|
26 |
+
"alias": "hellaswag"
|
27 |
+
},
|
28 |
+
"harness|mmlu|0": {
|
29 |
+
"acc,none": 0.3174049280729241,
|
30 |
+
"acc_stderr,none": 0.0038919020479297904,
|
31 |
+
"alias": "mmlu"
|
32 |
+
},
|
33 |
+
"harness|mmlu_humanities|0": {
|
34 |
+
"alias": " - humanities",
|
35 |
+
"acc,none": 0.32709883103081827,
|
36 |
+
"acc_stderr,none": 0.0067828905413636915
|
37 |
+
},
|
38 |
+
"harness|mmlu_formal_logic|0": {
|
39 |
+
"alias": " - formal_logic",
|
40 |
+
"acc,none": 0.29365079365079366,
|
41 |
+
"acc_stderr,none": 0.040735243221471276
|
42 |
+
},
|
43 |
+
"harness|mmlu_high_school_european_history|0": {
|
44 |
+
"alias": " - high_school_european_history",
|
45 |
+
"acc,none": 0.4666666666666667,
|
46 |
+
"acc_stderr,none": 0.03895658065271847
|
47 |
+
},
|
48 |
+
"harness|mmlu_high_school_us_history|0": {
|
49 |
+
"alias": " - high_school_us_history",
|
50 |
+
"acc,none": 0.3627450980392157,
|
51 |
+
"acc_stderr,none": 0.03374499356319354
|
52 |
+
},
|
53 |
+
"harness|mmlu_high_school_world_history|0": {
|
54 |
+
"alias": " - high_school_world_history",
|
55 |
+
"acc,none": 0.45569620253164556,
|
56 |
+
"acc_stderr,none": 0.032419206846933335
|
57 |
+
},
|
58 |
+
"harness|mmlu_international_law|0": {
|
59 |
+
"alias": " - international_law",
|
60 |
+
"acc,none": 0.45454545454545453,
|
61 |
+
"acc_stderr,none": 0.045454545454545456
|
62 |
+
},
|
63 |
+
"harness|mmlu_jurisprudence|0": {
|
64 |
+
"alias": " - jurisprudence",
|
65 |
+
"acc,none": 0.4166666666666667,
|
66 |
+
"acc_stderr,none": 0.04766075165356461
|
67 |
+
},
|
68 |
+
"harness|mmlu_logical_fallacies|0": {
|
69 |
+
"alias": " - logical_fallacies",
|
70 |
+
"acc,none": 0.39263803680981596,
|
71 |
+
"acc_stderr,none": 0.03836740907831029
|
72 |
+
},
|
73 |
+
"harness|mmlu_moral_disputes|0": {
|
74 |
+
"alias": " - moral_disputes",
|
75 |
+
"acc,none": 0.37572254335260113,
|
76 |
+
"acc_stderr,none": 0.026074314851657083
|
77 |
+
},
|
78 |
+
"harness|mmlu_moral_scenarios|0": {
|
79 |
+
"alias": " - moral_scenarios",
|
80 |
+
"acc,none": 0.23910614525139665,
|
81 |
+
"acc_stderr,none": 0.014265554192331154
|
82 |
+
},
|
83 |
+
"harness|mmlu_philosophy|0": {
|
84 |
+
"alias": " - philosophy",
|
85 |
+
"acc,none": 0.3440514469453376,
|
86 |
+
"acc_stderr,none": 0.026981478043648047
|
87 |
+
},
|
88 |
+
"harness|mmlu_prehistory|0": {
|
89 |
+
"alias": " - prehistory",
|
90 |
+
"acc,none": 0.3395061728395062,
|
91 |
+
"acc_stderr,none": 0.026348564412011624
|
92 |
+
},
|
93 |
+
"harness|mmlu_professional_law|0": {
|
94 |
+
"alias": " - professional_law",
|
95 |
+
"acc,none": 0.30182529335071706,
|
96 |
+
"acc_stderr,none": 0.01172435051810589
|
97 |
+
},
|
98 |
+
"harness|mmlu_world_religions|0": {
|
99 |
+
"alias": " - world_religions",
|
100 |
+
"acc,none": 0.3216374269005848,
|
101 |
+
"acc_stderr,none": 0.03582529442573122
|
102 |
+
},
|
103 |
+
"harness|mmlu_other|0": {
|
104 |
+
"alias": " - other",
|
105 |
+
"acc,none": 0.35017701963308656,
|
106 |
+
"acc_stderr,none": 0.008463796213785452
|
107 |
+
},
|
108 |
+
"harness|mmlu_business_ethics|0": {
|
109 |
+
"alias": " - business_ethics",
|
110 |
+
"acc,none": 0.43,
|
111 |
+
"acc_stderr,none": 0.049756985195624284
|
112 |
+
},
|
113 |
+
"harness|mmlu_clinical_knowledge|0": {
|
114 |
+
"alias": " - clinical_knowledge",
|
115 |
+
"acc,none": 0.30943396226415093,
|
116 |
+
"acc_stderr,none": 0.028450154794118627
|
117 |
+
},
|
118 |
+
"harness|mmlu_college_medicine|0": {
|
119 |
+
"alias": " - college_medicine",
|
120 |
+
"acc,none": 0.28901734104046245,
|
121 |
+
"acc_stderr,none": 0.034564257450869995
|
122 |
+
},
|
123 |
+
"harness|mmlu_global_facts|0": {
|
124 |
+
"alias": " - global_facts",
|
125 |
+
"acc,none": 0.3,
|
126 |
+
"acc_stderr,none": 0.046056618647183814
|
127 |
+
},
|
128 |
+
"harness|mmlu_human_aging|0": {
|
129 |
+
"alias": " - human_aging",
|
130 |
+
"acc,none": 0.3632286995515695,
|
131 |
+
"acc_stderr,none": 0.03227790442850499
|
132 |
+
},
|
133 |
+
"harness|mmlu_management|0": {
|
134 |
+
"alias": " - management",
|
135 |
+
"acc,none": 0.42718446601941745,
|
136 |
+
"acc_stderr,none": 0.04897957737781168
|
137 |
+
},
|
138 |
+
"harness|mmlu_marketing|0": {
|
139 |
+
"alias": " - marketing",
|
140 |
+
"acc,none": 0.47435897435897434,
|
141 |
+
"acc_stderr,none": 0.03271298896811159
|
142 |
+
},
|
143 |
+
"harness|mmlu_medical_genetics|0": {
|
144 |
+
"alias": " - medical_genetics",
|
145 |
+
"acc,none": 0.44,
|
146 |
+
"acc_stderr,none": 0.049888765156985884
|
147 |
+
},
|
148 |
+
"harness|mmlu_miscellaneous|0": {
|
149 |
+
"alias": " - miscellaneous",
|
150 |
+
"acc,none": 0.39080459770114945,
|
151 |
+
"acc_stderr,none": 0.01744836606706253
|
152 |
+
},
|
153 |
+
"harness|mmlu_nutrition|0": {
|
154 |
+
"alias": " - nutrition",
|
155 |
+
"acc,none": 0.3790849673202614,
|
156 |
+
"acc_stderr,none": 0.027780141207023355
|
157 |
+
},
|
158 |
+
"harness|mmlu_professional_accounting|0": {
|
159 |
+
"alias": " - professional_accounting",
|
160 |
+
"acc,none": 0.2907801418439716,
|
161 |
+
"acc_stderr,none": 0.027090664368353178
|
162 |
+
},
|
163 |
+
"harness|mmlu_professional_medicine|0": {
|
164 |
+
"alias": " - professional_medicine",
|
165 |
+
"acc,none": 0.17279411764705882,
|
166 |
+
"acc_stderr,none": 0.02296606758558178
|
167 |
+
},
|
168 |
+
"harness|mmlu_virology|0": {
|
169 |
+
"alias": " - virology",
|
170 |
+
"acc,none": 0.3132530120481928,
|
171 |
+
"acc_stderr,none": 0.03610805018031024
|
172 |
+
},
|
173 |
+
"harness|mmlu_social_sciences|0": {
|
174 |
+
"alias": " - social_sciences",
|
175 |
+
"acc,none": 0.303542411439714,
|
176 |
+
"acc_stderr,none": 0.008223945690983177
|
177 |
+
},
|
178 |
+
"harness|mmlu_econometrics|0": {
|
179 |
+
"alias": " - econometrics",
|
180 |
+
"acc,none": 0.19298245614035087,
|
181 |
+
"acc_stderr,none": 0.03712454853721368
|
182 |
+
},
|
183 |
+
"harness|mmlu_high_school_geography|0": {
|
184 |
+
"alias": " - high_school_geography",
|
185 |
+
"acc,none": 0.29292929292929293,
|
186 |
+
"acc_stderr,none": 0.032424979581788145
|
187 |
+
},
|
188 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
189 |
+
"alias": " - high_school_government_and_politics",
|
190 |
+
"acc,none": 0.2538860103626943,
|
191 |
+
"acc_stderr,none": 0.03141024780565317
|
192 |
+
},
|
193 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
194 |
+
"alias": " - high_school_macroeconomics",
|
195 |
+
"acc,none": 0.28205128205128205,
|
196 |
+
"acc_stderr,none": 0.022815813098896597
|
197 |
+
},
|
198 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
199 |
+
"alias": " - high_school_microeconomics",
|
200 |
+
"acc,none": 0.24789915966386555,
|
201 |
+
"acc_stderr,none": 0.028047967224176892
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_psychology|0": {
|
204 |
+
"alias": " - high_school_psychology",
|
205 |
+
"acc,none": 0.3302752293577982,
|
206 |
+
"acc_stderr,none": 0.02016446633634298
|
207 |
+
},
|
208 |
+
"harness|mmlu_human_sexuality|0": {
|
209 |
+
"alias": " - human_sexuality",
|
210 |
+
"acc,none": 0.44274809160305345,
|
211 |
+
"acc_stderr,none": 0.043564472026650695
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_psychology|0": {
|
214 |
+
"alias": " - professional_psychology",
|
215 |
+
"acc,none": 0.2957516339869281,
|
216 |
+
"acc_stderr,none": 0.018463154132632817
|
217 |
+
},
|
218 |
+
"harness|mmlu_public_relations|0": {
|
219 |
+
"alias": " - public_relations",
|
220 |
+
"acc,none": 0.36363636363636365,
|
221 |
+
"acc_stderr,none": 0.04607582090719976
|
222 |
+
},
|
223 |
+
"harness|mmlu_security_studies|0": {
|
224 |
+
"alias": " - security_studies",
|
225 |
+
"acc,none": 0.20816326530612245,
|
226 |
+
"acc_stderr,none": 0.02599111767281329
|
227 |
+
},
|
228 |
+
"harness|mmlu_sociology|0": {
|
229 |
+
"alias": " - sociology",
|
230 |
+
"acc,none": 0.39800995024875624,
|
231 |
+
"acc_stderr,none": 0.03461199429040013
|
232 |
+
},
|
233 |
+
"harness|mmlu_us_foreign_policy|0": {
|
234 |
+
"alias": " - us_foreign_policy",
|
235 |
+
"acc,none": 0.46,
|
236 |
+
"acc_stderr,none": 0.05009082659620332
|
237 |
+
},
|
238 |
+
"harness|mmlu_stem|0": {
|
239 |
+
"alias": " - stem",
|
240 |
+
"acc,none": 0.28417380272756104,
|
241 |
+
"acc_stderr,none": 0.008000179731584128
|
242 |
+
},
|
243 |
+
"harness|mmlu_abstract_algebra|0": {
|
244 |
+
"alias": " - abstract_algebra",
|
245 |
+
"acc,none": 0.27,
|
246 |
+
"acc_stderr,none": 0.044619604333847394
|
247 |
+
},
|
248 |
+
"harness|mmlu_anatomy|0": {
|
249 |
+
"alias": " - anatomy",
|
250 |
+
"acc,none": 0.34074074074074073,
|
251 |
+
"acc_stderr,none": 0.04094376269996793
|
252 |
+
},
|
253 |
+
"harness|mmlu_astronomy|0": {
|
254 |
+
"alias": " - astronomy",
|
255 |
+
"acc,none": 0.29605263157894735,
|
256 |
+
"acc_stderr,none": 0.037150621549989056
|
257 |
+
},
|
258 |
+
"harness|mmlu_college_biology|0": {
|
259 |
+
"alias": " - college_biology",
|
260 |
+
"acc,none": 0.2986111111111111,
|
261 |
+
"acc_stderr,none": 0.03827052357950756
|
262 |
+
},
|
263 |
+
"harness|mmlu_college_chemistry|0": {
|
264 |
+
"alias": " - college_chemistry",
|
265 |
+
"acc,none": 0.2,
|
266 |
+
"acc_stderr,none": 0.04020151261036845
|
267 |
+
},
|
268 |
+
"harness|mmlu_college_computer_science|0": {
|
269 |
+
"alias": " - college_computer_science",
|
270 |
+
"acc,none": 0.37,
|
271 |
+
"acc_stderr,none": 0.04852365870939099
|
272 |
+
},
|
273 |
+
"harness|mmlu_college_mathematics|0": {
|
274 |
+
"alias": " - college_mathematics",
|
275 |
+
"acc,none": 0.28,
|
276 |
+
"acc_stderr,none": 0.045126085985421276
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_physics|0": {
|
279 |
+
"alias": " - college_physics",
|
280 |
+
"acc,none": 0.24509803921568626,
|
281 |
+
"acc_stderr,none": 0.042801058373643966
|
282 |
+
},
|
283 |
+
"harness|mmlu_computer_security|0": {
|
284 |
+
"alias": " - computer_security",
|
285 |
+
"acc,none": 0.38,
|
286 |
+
"acc_stderr,none": 0.04878317312145633
|
287 |
+
},
|
288 |
+
"harness|mmlu_conceptual_physics|0": {
|
289 |
+
"alias": " - conceptual_physics",
|
290 |
+
"acc,none": 0.25957446808510637,
|
291 |
+
"acc_stderr,none": 0.02865917937429232
|
292 |
+
},
|
293 |
+
"harness|mmlu_electrical_engineering|0": {
|
294 |
+
"alias": " - electrical_engineering",
|
295 |
+
"acc,none": 0.36551724137931035,
|
296 |
+
"acc_stderr,none": 0.04013124195424386
|
297 |
+
},
|
298 |
+
"harness|mmlu_elementary_mathematics|0": {
|
299 |
+
"alias": " - elementary_mathematics",
|
300 |
+
"acc,none": 0.3253968253968254,
|
301 |
+
"acc_stderr,none": 0.024130158299762613
|
302 |
+
},
|
303 |
+
"harness|mmlu_high_school_biology|0": {
|
304 |
+
"alias": " - high_school_biology",
|
305 |
+
"acc,none": 0.31290322580645163,
|
306 |
+
"acc_stderr,none": 0.02637756702864586
|
307 |
+
},
|
308 |
+
"harness|mmlu_high_school_chemistry|0": {
|
309 |
+
"alias": " - high_school_chemistry",
|
310 |
+
"acc,none": 0.24630541871921183,
|
311 |
+
"acc_stderr,none": 0.030315099285617736
|
312 |
+
},
|
313 |
+
"harness|mmlu_high_school_computer_science|0": {
|
314 |
+
"alias": " - high_school_computer_science",
|
315 |
+
"acc,none": 0.35,
|
316 |
+
"acc_stderr,none": 0.0479372485441102
|
317 |
+
},
|
318 |
+
"harness|mmlu_high_school_mathematics|0": {
|
319 |
+
"alias": " - high_school_mathematics",
|
320 |
+
"acc,none": 0.22592592592592592,
|
321 |
+
"acc_stderr,none": 0.02549753263960954
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_physics|0": {
|
324 |
+
"alias": " - high_school_physics",
|
325 |
+
"acc,none": 0.2251655629139073,
|
326 |
+
"acc_stderr,none": 0.03410435282008937
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_statistics|0": {
|
329 |
+
"alias": " - high_school_statistics",
|
330 |
+
"acc,none": 0.19444444444444445,
|
331 |
+
"acc_stderr,none": 0.02699145450203673
|
332 |
+
},
|
333 |
+
"harness|mmlu_machine_learning|0": {
|
334 |
+
"alias": " - machine_learning",
|
335 |
+
"acc,none": 0.2767857142857143,
|
336 |
+
"acc_stderr,none": 0.042466243366976256
|
337 |
+
},
|
338 |
+
"harness|openbookqa|0": {
|
339 |
+
"acc,none": 0.194,
|
340 |
+
"acc_stderr,none": 0.017701827855304608,
|
341 |
+
"acc_norm,none": 0.304,
|
342 |
+
"acc_norm_stderr,none": 0.020591649571224932,
|
343 |
+
"alias": "openbookqa"
|
344 |
+
},
|
345 |
+
"harness|truthfulqa:mc2|0": {
|
346 |
+
"acc,none": 0.42460255683280484,
|
347 |
+
"acc_stderr,none": 0.015083696980807306,
|
348 |
+
"alias": "truthfulqa_mc2"
|
349 |
+
},
|
350 |
+
"harness|boolq|0": {
|
351 |
+
"acc,none": 0.41039755351681956,
|
352 |
+
"acc_stderr,none": 0.008603488048617516,
|
353 |
+
"alias": "boolq"
|
354 |
+
},
|
355 |
+
"harness|truthfulqa:mc1|0": {
|
356 |
+
"acc,none": 0.26193390452876375,
|
357 |
+
"acc_stderr,none": 0.015392118805015023,
|
358 |
+
"alias": "truthfulqa_mc1"
|
359 |
+
},
|
360 |
+
"harness|piqa|0": {
|
361 |
+
"acc,none": 0.6746463547334058,
|
362 |
+
"acc_stderr,none": 0.010931036623525191,
|
363 |
+
"acc_norm,none": 0.6632208922742111,
|
364 |
+
"acc_norm_stderr,none": 0.01102673892525118,
|
365 |
+
"alias": "piqa"
|
366 |
+
},
|
367 |
+
"harness|arc:easy|0": {
|
368 |
+
"acc,none": 0.5214646464646465,
|
369 |
+
"acc_stderr,none": 0.010250325159456659,
|
370 |
+
"acc_norm,none": 0.4772727272727273,
|
371 |
+
"acc_norm_stderr,none": 0.01024917909060598,
|
372 |
+
"alias": "arc_easy"
|
373 |
+
},
|
374 |
+
"harness|lambada:openai|0": {
|
375 |
+
"perplexity,none": 32.89382650895189,
|
376 |
+
"perplexity_stderr,none": 1.6153213247657885,
|
377 |
+
"acc,none": 0.39782650882980786,
|
378 |
+
"acc_stderr,none": 0.0068189852131081774,
|
379 |
+
"alias": "lambada_openai"
|
380 |
+
},
|
381 |
+
"harness|winogrande|0": {
|
382 |
+
"acc,none": 0.5509076558800315,
|
383 |
+
"acc_stderr,none": 0.01397945938914085,
|
384 |
+
"alias": "winogrande"
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.25426621160409557,
|
388 |
+
"acc_stderr,none": 0.012724999945157744,
|
389 |
+
"acc_norm,none": 0.2738907849829352,
|
390 |
+
"acc_norm_stderr,none": 0.013032004972989505,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Qwen/Qwen1.5-0.5B-Chat-GPTQ-Int4",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 0.5,
|
399 |
+
"architectures": "Qwen2ForCausalLM",
|
400 |
+
"quant_type": "GPTQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 0.5,
|
403 |
+
"model_size": 0.25,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-26T09:50:00Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"batch_size": 1,
|
417 |
+
"bits": 4,
|
418 |
+
"block_name_to_quantize": null,
|
419 |
+
"cache_block_outputs": true,
|
420 |
+
"damp_percent": 0.01,
|
421 |
+
"dataset": null,
|
422 |
+
"desc_act": false,
|
423 |
+
"exllama_config": {
|
424 |
+
"version": 1
|
425 |
+
},
|
426 |
+
"group_size": 128,
|
427 |
+
"max_input_length": null,
|
428 |
+
"model_seqlen": null,
|
429 |
+
"module_name_preceding_first_block": null,
|
430 |
+
"modules_in_block_to_quantize": null,
|
431 |
+
"pad_token_id": null,
|
432 |
+
"quant_method": "gptq",
|
433 |
+
"sym": true,
|
434 |
+
"tokenizer": null,
|
435 |
+
"true_sequential": true,
|
436 |
+
"use_cuda_fp16": false,
|
437 |
+
"use_exllama": true
|
438 |
+
},
|
439 |
+
"versions": {
|
440 |
+
"harness|hellaswag|0": 1.0,
|
441 |
+
"harness|mmlu|0": null,
|
442 |
+
"harness|mmlu_humanities|0": null,
|
443 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
444 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
445 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
446 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
447 |
+
"harness|mmlu_international_law|0": 0.0,
|
448 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
449 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
450 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
451 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
452 |
+
"harness|mmlu_philosophy|0": 0.0,
|
453 |
+
"harness|mmlu_prehistory|0": 0.0,
|
454 |
+
"harness|mmlu_professional_law|0": 0.0,
|
455 |
+
"harness|mmlu_world_religions|0": 0.0,
|
456 |
+
"harness|mmlu_other|0": null,
|
457 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
458 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
459 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
460 |
+
"harness|mmlu_global_facts|0": 0.0,
|
461 |
+
"harness|mmlu_human_aging|0": 0.0,
|
462 |
+
"harness|mmlu_management|0": 0.0,
|
463 |
+
"harness|mmlu_marketing|0": 0.0,
|
464 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
465 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
466 |
+
"harness|mmlu_nutrition|0": 0.0,
|
467 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
468 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
469 |
+
"harness|mmlu_virology|0": 0.0,
|
470 |
+
"harness|mmlu_social_sciences|0": null,
|
471 |
+
"harness|mmlu_econometrics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
473 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
477 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
478 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
479 |
+
"harness|mmlu_public_relations|0": 0.0,
|
480 |
+
"harness|mmlu_security_studies|0": 0.0,
|
481 |
+
"harness|mmlu_sociology|0": 0.0,
|
482 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
483 |
+
"harness|mmlu_stem|0": null,
|
484 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
485 |
+
"harness|mmlu_anatomy|0": 0.0,
|
486 |
+
"harness|mmlu_astronomy|0": 0.0,
|
487 |
+
"harness|mmlu_college_biology|0": 0.0,
|
488 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
489 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
490 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
491 |
+
"harness|mmlu_college_physics|0": 0.0,
|
492 |
+
"harness|mmlu_computer_security|0": 0.0,
|
493 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
494 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
495 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
502 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
503 |
+
"harness|openbookqa|0": 1.0,
|
504 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
505 |
+
"harness|boolq|0": 2.0,
|
506 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
507 |
+
"harness|piqa|0": 1.0,
|
508 |
+
"harness|arc:easy|0": 1.0,
|
509 |
+
"harness|lambada:openai|0": 1.0,
|
510 |
+
"harness|winogrande|0": 1.0,
|
511 |
+
"harness|arc:challenge|0": 1.0
|
512 |
+
},
|
513 |
+
"n-shot": {
|
514 |
+
"arc_challenge": 0,
|
515 |
+
"arc_easy": 0,
|
516 |
+
"boolq": 0,
|
517 |
+
"hellaswag": 0,
|
518 |
+
"lambada_openai": 0,
|
519 |
+
"mmlu": 0,
|
520 |
+
"mmlu_abstract_algebra": 0,
|
521 |
+
"mmlu_anatomy": 0,
|
522 |
+
"mmlu_astronomy": 0,
|
523 |
+
"mmlu_business_ethics": 0,
|
524 |
+
"mmlu_clinical_knowledge": 0,
|
525 |
+
"mmlu_college_biology": 0,
|
526 |
+
"mmlu_college_chemistry": 0,
|
527 |
+
"mmlu_college_computer_science": 0,
|
528 |
+
"mmlu_college_mathematics": 0,
|
529 |
+
"mmlu_college_medicine": 0,
|
530 |
+
"mmlu_college_physics": 0,
|
531 |
+
"mmlu_computer_security": 0,
|
532 |
+
"mmlu_conceptual_physics": 0,
|
533 |
+
"mmlu_econometrics": 0,
|
534 |
+
"mmlu_electrical_engineering": 0,
|
535 |
+
"mmlu_elementary_mathematics": 0,
|
536 |
+
"mmlu_formal_logic": 0,
|
537 |
+
"mmlu_global_facts": 0,
|
538 |
+
"mmlu_high_school_biology": 0,
|
539 |
+
"mmlu_high_school_chemistry": 0,
|
540 |
+
"mmlu_high_school_computer_science": 0,
|
541 |
+
"mmlu_high_school_european_history": 0,
|
542 |
+
"mmlu_high_school_geography": 0,
|
543 |
+
"mmlu_high_school_government_and_politics": 0,
|
544 |
+
"mmlu_high_school_macroeconomics": 0,
|
545 |
+
"mmlu_high_school_mathematics": 0,
|
546 |
+
"mmlu_high_school_microeconomics": 0,
|
547 |
+
"mmlu_high_school_physics": 0,
|
548 |
+
"mmlu_high_school_psychology": 0,
|
549 |
+
"mmlu_high_school_statistics": 0,
|
550 |
+
"mmlu_high_school_us_history": 0,
|
551 |
+
"mmlu_high_school_world_history": 0,
|
552 |
+
"mmlu_human_aging": 0,
|
553 |
+
"mmlu_human_sexuality": 0,
|
554 |
+
"mmlu_humanities": 0,
|
555 |
+
"mmlu_international_law": 0,
|
556 |
+
"mmlu_jurisprudence": 0,
|
557 |
+
"mmlu_logical_fallacies": 0,
|
558 |
+
"mmlu_machine_learning": 0,
|
559 |
+
"mmlu_management": 0,
|
560 |
+
"mmlu_marketing": 0,
|
561 |
+
"mmlu_medical_genetics": 0,
|
562 |
+
"mmlu_miscellaneous": 0,
|
563 |
+
"mmlu_moral_disputes": 0,
|
564 |
+
"mmlu_moral_scenarios": 0,
|
565 |
+
"mmlu_nutrition": 0,
|
566 |
+
"mmlu_other": 0,
|
567 |
+
"mmlu_philosophy": 0,
|
568 |
+
"mmlu_prehistory": 0,
|
569 |
+
"mmlu_professional_accounting": 0,
|
570 |
+
"mmlu_professional_law": 0,
|
571 |
+
"mmlu_professional_medicine": 0,
|
572 |
+
"mmlu_professional_psychology": 0,
|
573 |
+
"mmlu_public_relations": 0,
|
574 |
+
"mmlu_security_studies": 0,
|
575 |
+
"mmlu_social_sciences": 0,
|
576 |
+
"mmlu_sociology": 0,
|
577 |
+
"mmlu_stem": 0,
|
578 |
+
"mmlu_us_foreign_policy": 0,
|
579 |
+
"mmlu_virology": 0,
|
580 |
+
"mmlu_world_religions": 0,
|
581 |
+
"openbookqa": 0,
|
582 |
+
"piqa": 0,
|
583 |
+
"truthfulqa_mc1": 0,
|
584 |
+
"truthfulqa_mc2": 0,
|
585 |
+
"winogrande": 0
|
586 |
+
},
|
587 |
+
"date": 1714132429.364645,
|
588 |
+
"config": {
|
589 |
+
"model": "hf",
|
590 |
+
"model_args": "pretrained=Qwen/Qwen1.5-0.5B-Chat-GPTQ-Int4,dtype=float16,_commit_hash=main",
|
591 |
+
"batch_size": 4,
|
592 |
+
"batch_sizes": [],
|
593 |
+
"device": "cuda",
|
594 |
+
"use_cache": null,
|
595 |
+
"limit": null,
|
596 |
+
"bootstrap_iters": 100000,
|
597 |
+
"gen_kwargs": null
|
598 |
+
}
|
599 |
+
}
|
Qwen/results_2024-04-26-21-31-31.json
ADDED
@@ -0,0 +1,583 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-26-21-31-31",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Qwen/Qwen1.5-0.5B-Chat-AWQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 0.78,
|
16 |
+
"model_params": 0.31,
|
17 |
+
"quant_type": "AWQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|piqa|0": {
|
22 |
+
"acc,none": 0.6681175190424374,
|
23 |
+
"acc_stderr,none": 0.010986617776361585,
|
24 |
+
"acc_norm,none": 0.6653971708378672,
|
25 |
+
"acc_norm_stderr,none": 0.011009071725162507,
|
26 |
+
"alias": "piqa"
|
27 |
+
},
|
28 |
+
"harness|openbookqa|0": {
|
29 |
+
"acc,none": 0.192,
|
30 |
+
"acc_stderr,none": 0.017632180454361004,
|
31 |
+
"acc_norm,none": 0.31,
|
32 |
+
"acc_norm_stderr,none": 0.020704041021724805,
|
33 |
+
"alias": "openbookqa"
|
34 |
+
},
|
35 |
+
"harness|arc:easy|0": {
|
36 |
+
"acc,none": 0.468013468013468,
|
37 |
+
"acc_stderr,none": 0.010238767643185712,
|
38 |
+
"acc_norm,none": 0.42297979797979796,
|
39 |
+
"acc_norm_stderr,none": 0.0101373283822091,
|
40 |
+
"alias": "arc_easy"
|
41 |
+
},
|
42 |
+
"harness|arc:challenge|0": {
|
43 |
+
"acc,none": 0.24146757679180889,
|
44 |
+
"acc_stderr,none": 0.012506564839739432,
|
45 |
+
"acc_norm,none": 0.26791808873720135,
|
46 |
+
"acc_norm_stderr,none": 0.01294203019513643,
|
47 |
+
"alias": "arc_challenge"
|
48 |
+
},
|
49 |
+
"harness|lambada:openai|0": {
|
50 |
+
"perplexity,none": 37.145150113999286,
|
51 |
+
"perplexity_stderr,none": 1.8837992772497014,
|
52 |
+
"acc,none": 0.390064040364836,
|
53 |
+
"acc_stderr,none": 0.006795511465879196,
|
54 |
+
"alias": "lambada_openai"
|
55 |
+
},
|
56 |
+
"harness|truthfulqa:mc1|0": {
|
57 |
+
"acc,none": 0.2423500611995104,
|
58 |
+
"acc_stderr,none": 0.015000674373570345,
|
59 |
+
"alias": "truthfulqa_mc1"
|
60 |
+
},
|
61 |
+
"harness|truthfulqa:mc2|0": {
|
62 |
+
"acc,none": 0.4158103928931979,
|
63 |
+
"acc_stderr,none": 0.015056624356974616,
|
64 |
+
"alias": "truthfulqa_mc2"
|
65 |
+
},
|
66 |
+
"harness|hellaswag|0": {
|
67 |
+
"acc,none": 0.3599880501892053,
|
68 |
+
"acc_stderr,none": 0.004790155370993443,
|
69 |
+
"acc_norm,none": 0.4409480183230432,
|
70 |
+
"acc_norm_stderr,none": 0.0049548591067816485,
|
71 |
+
"alias": "hellaswag"
|
72 |
+
},
|
73 |
+
"harness|winogrande|0": {
|
74 |
+
"acc,none": 0.5343330702446725,
|
75 |
+
"acc_stderr,none": 0.014019317531542563,
|
76 |
+
"alias": "winogrande"
|
77 |
+
},
|
78 |
+
"harness|mmlu|0": {
|
79 |
+
"acc,none": 0.306010539809144,
|
80 |
+
"acc_stderr,none": 0.0038620141807728598,
|
81 |
+
"alias": "mmlu"
|
82 |
+
},
|
83 |
+
"harness|mmlu_humanities|0": {
|
84 |
+
"alias": " - humanities",
|
85 |
+
"acc,none": 0.29798087141339,
|
86 |
+
"acc_stderr,none": 0.006634991308715303
|
87 |
+
},
|
88 |
+
"harness|mmlu_formal_logic|0": {
|
89 |
+
"alias": " - formal_logic",
|
90 |
+
"acc,none": 0.30158730158730157,
|
91 |
+
"acc_stderr,none": 0.04104947269903394
|
92 |
+
},
|
93 |
+
"harness|mmlu_high_school_european_history|0": {
|
94 |
+
"alias": " - high_school_european_history",
|
95 |
+
"acc,none": 0.36363636363636365,
|
96 |
+
"acc_stderr,none": 0.03756335775187897
|
97 |
+
},
|
98 |
+
"harness|mmlu_high_school_us_history|0": {
|
99 |
+
"alias": " - high_school_us_history",
|
100 |
+
"acc,none": 0.3088235294117647,
|
101 |
+
"acc_stderr,none": 0.03242661719827218
|
102 |
+
},
|
103 |
+
"harness|mmlu_high_school_world_history|0": {
|
104 |
+
"alias": " - high_school_world_history",
|
105 |
+
"acc,none": 0.379746835443038,
|
106 |
+
"acc_stderr,none": 0.031591887529658504
|
107 |
+
},
|
108 |
+
"harness|mmlu_international_law|0": {
|
109 |
+
"alias": " - international_law",
|
110 |
+
"acc,none": 0.4628099173553719,
|
111 |
+
"acc_stderr,none": 0.04551711196104218
|
112 |
+
},
|
113 |
+
"harness|mmlu_jurisprudence|0": {
|
114 |
+
"alias": " - jurisprudence",
|
115 |
+
"acc,none": 0.3888888888888889,
|
116 |
+
"acc_stderr,none": 0.047128212574267705
|
117 |
+
},
|
118 |
+
"harness|mmlu_logical_fallacies|0": {
|
119 |
+
"alias": " - logical_fallacies",
|
120 |
+
"acc,none": 0.3067484662576687,
|
121 |
+
"acc_stderr,none": 0.03623089915724146
|
122 |
+
},
|
123 |
+
"harness|mmlu_moral_disputes|0": {
|
124 |
+
"alias": " - moral_disputes",
|
125 |
+
"acc,none": 0.3468208092485549,
|
126 |
+
"acc_stderr,none": 0.025624723994030454
|
127 |
+
},
|
128 |
+
"harness|mmlu_moral_scenarios|0": {
|
129 |
+
"alias": " - moral_scenarios",
|
130 |
+
"acc,none": 0.23798882681564246,
|
131 |
+
"acc_stderr,none": 0.014242630070574885
|
132 |
+
},
|
133 |
+
"harness|mmlu_philosophy|0": {
|
134 |
+
"alias": " - philosophy",
|
135 |
+
"acc,none": 0.3247588424437299,
|
136 |
+
"acc_stderr,none": 0.026596782287697046
|
137 |
+
},
|
138 |
+
"harness|mmlu_prehistory|0": {
|
139 |
+
"alias": " - prehistory",
|
140 |
+
"acc,none": 0.33024691358024694,
|
141 |
+
"acc_stderr,none": 0.026168298456732846
|
142 |
+
},
|
143 |
+
"harness|mmlu_professional_law|0": {
|
144 |
+
"alias": " - professional_law",
|
145 |
+
"acc,none": 0.26988265971316816,
|
146 |
+
"acc_stderr,none": 0.011337381084250397
|
147 |
+
},
|
148 |
+
"harness|mmlu_world_religions|0": {
|
149 |
+
"alias": " - world_religions",
|
150 |
+
"acc,none": 0.2807017543859649,
|
151 |
+
"acc_stderr,none": 0.034462962170884265
|
152 |
+
},
|
153 |
+
"harness|mmlu_other|0": {
|
154 |
+
"alias": " - other",
|
155 |
+
"acc,none": 0.3447055037013196,
|
156 |
+
"acc_stderr,none": 0.008436499399115982
|
157 |
+
},
|
158 |
+
"harness|mmlu_business_ethics|0": {
|
159 |
+
"alias": " - business_ethics",
|
160 |
+
"acc,none": 0.39,
|
161 |
+
"acc_stderr,none": 0.04902071300001975
|
162 |
+
},
|
163 |
+
"harness|mmlu_clinical_knowledge|0": {
|
164 |
+
"alias": " - clinical_knowledge",
|
165 |
+
"acc,none": 0.30566037735849055,
|
166 |
+
"acc_stderr,none": 0.028353298073322666
|
167 |
+
},
|
168 |
+
"harness|mmlu_college_medicine|0": {
|
169 |
+
"alias": " - college_medicine",
|
170 |
+
"acc,none": 0.2947976878612717,
|
171 |
+
"acc_stderr,none": 0.03476599607516479
|
172 |
+
},
|
173 |
+
"harness|mmlu_global_facts|0": {
|
174 |
+
"alias": " - global_facts",
|
175 |
+
"acc,none": 0.25,
|
176 |
+
"acc_stderr,none": 0.04351941398892446
|
177 |
+
},
|
178 |
+
"harness|mmlu_human_aging|0": {
|
179 |
+
"alias": " - human_aging",
|
180 |
+
"acc,none": 0.3901345291479821,
|
181 |
+
"acc_stderr,none": 0.03273766725459156
|
182 |
+
},
|
183 |
+
"harness|mmlu_management|0": {
|
184 |
+
"alias": " - management",
|
185 |
+
"acc,none": 0.3883495145631068,
|
186 |
+
"acc_stderr,none": 0.0482572933735639
|
187 |
+
},
|
188 |
+
"harness|mmlu_marketing|0": {
|
189 |
+
"alias": " - marketing",
|
190 |
+
"acc,none": 0.4658119658119658,
|
191 |
+
"acc_stderr,none": 0.03267942734081228
|
192 |
+
},
|
193 |
+
"harness|mmlu_medical_genetics|0": {
|
194 |
+
"alias": " - medical_genetics",
|
195 |
+
"acc,none": 0.43,
|
196 |
+
"acc_stderr,none": 0.049756985195624284
|
197 |
+
},
|
198 |
+
"harness|mmlu_miscellaneous|0": {
|
199 |
+
"alias": " - miscellaneous",
|
200 |
+
"acc,none": 0.388250319284802,
|
201 |
+
"acc_stderr,none": 0.017427673295544337
|
202 |
+
},
|
203 |
+
"harness|mmlu_nutrition|0": {
|
204 |
+
"alias": " - nutrition",
|
205 |
+
"acc,none": 0.38235294117647056,
|
206 |
+
"acc_stderr,none": 0.027826109307283704
|
207 |
+
},
|
208 |
+
"harness|mmlu_professional_accounting|0": {
|
209 |
+
"alias": " - professional_accounting",
|
210 |
+
"acc,none": 0.24113475177304963,
|
211 |
+
"acc_stderr,none": 0.025518731049537773
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_medicine|0": {
|
214 |
+
"alias": " - professional_medicine",
|
215 |
+
"acc,none": 0.19852941176470587,
|
216 |
+
"acc_stderr,none": 0.024231013370541114
|
217 |
+
},
|
218 |
+
"harness|mmlu_virology|0": {
|
219 |
+
"alias": " - virology",
|
220 |
+
"acc,none": 0.3192771084337349,
|
221 |
+
"acc_stderr,none": 0.0362933532994786
|
222 |
+
},
|
223 |
+
"harness|mmlu_social_sciences|0": {
|
224 |
+
"alias": " - social_sciences",
|
225 |
+
"acc,none": 0.30289242768930774,
|
226 |
+
"acc_stderr,none": 0.008243456801615645
|
227 |
+
},
|
228 |
+
"harness|mmlu_econometrics|0": {
|
229 |
+
"alias": " - econometrics",
|
230 |
+
"acc,none": 0.23684210526315788,
|
231 |
+
"acc_stderr,none": 0.03999423879281335
|
232 |
+
},
|
233 |
+
"harness|mmlu_high_school_geography|0": {
|
234 |
+
"alias": " - high_school_geography",
|
235 |
+
"acc,none": 0.30303030303030304,
|
236 |
+
"acc_stderr,none": 0.03274287914026867
|
237 |
+
},
|
238 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
239 |
+
"alias": " - high_school_government_and_politics",
|
240 |
+
"acc,none": 0.26424870466321243,
|
241 |
+
"acc_stderr,none": 0.031821550509166484
|
242 |
+
},
|
243 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
244 |
+
"alias": " - high_school_macroeconomics",
|
245 |
+
"acc,none": 0.28205128205128205,
|
246 |
+
"acc_stderr,none": 0.022815813098896603
|
247 |
+
},
|
248 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
249 |
+
"alias": " - high_school_microeconomics",
|
250 |
+
"acc,none": 0.2647058823529412,
|
251 |
+
"acc_stderr,none": 0.028657491285071973
|
252 |
+
},
|
253 |
+
"harness|mmlu_high_school_psychology|0": {
|
254 |
+
"alias": " - high_school_psychology",
|
255 |
+
"acc,none": 0.326605504587156,
|
256 |
+
"acc_stderr,none": 0.020106990889937303
|
257 |
+
},
|
258 |
+
"harness|mmlu_human_sexuality|0": {
|
259 |
+
"alias": " - human_sexuality",
|
260 |
+
"acc,none": 0.45038167938931295,
|
261 |
+
"acc_stderr,none": 0.04363643698524779
|
262 |
+
},
|
263 |
+
"harness|mmlu_professional_psychology|0": {
|
264 |
+
"alias": " - professional_psychology",
|
265 |
+
"acc,none": 0.29248366013071897,
|
266 |
+
"acc_stderr,none": 0.01840341571010979
|
267 |
+
},
|
268 |
+
"harness|mmlu_public_relations|0": {
|
269 |
+
"alias": " - public_relations",
|
270 |
+
"acc,none": 0.36363636363636365,
|
271 |
+
"acc_stderr,none": 0.04607582090719976
|
272 |
+
},
|
273 |
+
"harness|mmlu_security_studies|0": {
|
274 |
+
"alias": " - security_studies",
|
275 |
+
"acc,none": 0.21224489795918366,
|
276 |
+
"acc_stderr,none": 0.026176967197866767
|
277 |
+
},
|
278 |
+
"harness|mmlu_sociology|0": {
|
279 |
+
"alias": " - sociology",
|
280 |
+
"acc,none": 0.35323383084577115,
|
281 |
+
"acc_stderr,none": 0.03379790611796777
|
282 |
+
},
|
283 |
+
"harness|mmlu_us_foreign_policy|0": {
|
284 |
+
"alias": " - us_foreign_policy",
|
285 |
+
"acc,none": 0.42,
|
286 |
+
"acc_stderr,none": 0.04960449637488583
|
287 |
+
},
|
288 |
+
"harness|mmlu_stem|0": {
|
289 |
+
"alias": " - stem",
|
290 |
+
"acc,none": 0.28290516967967017,
|
291 |
+
"acc_stderr,none": 0.00799797732902615
|
292 |
+
},
|
293 |
+
"harness|mmlu_abstract_algebra|0": {
|
294 |
+
"alias": " - abstract_algebra",
|
295 |
+
"acc,none": 0.31,
|
296 |
+
"acc_stderr,none": 0.04648231987117316
|
297 |
+
},
|
298 |
+
"harness|mmlu_anatomy|0": {
|
299 |
+
"alias": " - anatomy",
|
300 |
+
"acc,none": 0.3333333333333333,
|
301 |
+
"acc_stderr,none": 0.04072314811876837
|
302 |
+
},
|
303 |
+
"harness|mmlu_astronomy|0": {
|
304 |
+
"alias": " - astronomy",
|
305 |
+
"acc,none": 0.2631578947368421,
|
306 |
+
"acc_stderr,none": 0.03583496176361063
|
307 |
+
},
|
308 |
+
"harness|mmlu_college_biology|0": {
|
309 |
+
"alias": " - college_biology",
|
310 |
+
"acc,none": 0.2847222222222222,
|
311 |
+
"acc_stderr,none": 0.03773809990686934
|
312 |
+
},
|
313 |
+
"harness|mmlu_college_chemistry|0": {
|
314 |
+
"alias": " - college_chemistry",
|
315 |
+
"acc,none": 0.21,
|
316 |
+
"acc_stderr,none": 0.040936018074033256
|
317 |
+
},
|
318 |
+
"harness|mmlu_college_computer_science|0": {
|
319 |
+
"alias": " - college_computer_science",
|
320 |
+
"acc,none": 0.4,
|
321 |
+
"acc_stderr,none": 0.04923659639173309
|
322 |
+
},
|
323 |
+
"harness|mmlu_college_mathematics|0": {
|
324 |
+
"alias": " - college_mathematics",
|
325 |
+
"acc,none": 0.33,
|
326 |
+
"acc_stderr,none": 0.047258156262526045
|
327 |
+
},
|
328 |
+
"harness|mmlu_college_physics|0": {
|
329 |
+
"alias": " - college_physics",
|
330 |
+
"acc,none": 0.30392156862745096,
|
331 |
+
"acc_stderr,none": 0.045766654032077636
|
332 |
+
},
|
333 |
+
"harness|mmlu_computer_security|0": {
|
334 |
+
"alias": " - computer_security",
|
335 |
+
"acc,none": 0.38,
|
336 |
+
"acc_stderr,none": 0.04878317312145633
|
337 |
+
},
|
338 |
+
"harness|mmlu_conceptual_physics|0": {
|
339 |
+
"alias": " - conceptual_physics",
|
340 |
+
"acc,none": 0.2680851063829787,
|
341 |
+
"acc_stderr,none": 0.028957342788342347
|
342 |
+
},
|
343 |
+
"harness|mmlu_electrical_engineering|0": {
|
344 |
+
"alias": " - electrical_engineering",
|
345 |
+
"acc,none": 0.33793103448275863,
|
346 |
+
"acc_stderr,none": 0.039417076320648906
|
347 |
+
},
|
348 |
+
"harness|mmlu_elementary_mathematics|0": {
|
349 |
+
"alias": " - elementary_mathematics",
|
350 |
+
"acc,none": 0.2671957671957672,
|
351 |
+
"acc_stderr,none": 0.022789673145776575
|
352 |
+
},
|
353 |
+
"harness|mmlu_high_school_biology|0": {
|
354 |
+
"alias": " - high_school_biology",
|
355 |
+
"acc,none": 0.3193548387096774,
|
356 |
+
"acc_stderr,none": 0.026522709674667765
|
357 |
+
},
|
358 |
+
"harness|mmlu_high_school_chemistry|0": {
|
359 |
+
"alias": " - high_school_chemistry",
|
360 |
+
"acc,none": 0.22660098522167488,
|
361 |
+
"acc_stderr,none": 0.029454863835292975
|
362 |
+
},
|
363 |
+
"harness|mmlu_high_school_computer_science|0": {
|
364 |
+
"alias": " - high_school_computer_science",
|
365 |
+
"acc,none": 0.35,
|
366 |
+
"acc_stderr,none": 0.047937248544110196
|
367 |
+
},
|
368 |
+
"harness|mmlu_high_school_mathematics|0": {
|
369 |
+
"alias": " - high_school_mathematics",
|
370 |
+
"acc,none": 0.23333333333333334,
|
371 |
+
"acc_stderr,none": 0.025787874220959316
|
372 |
+
},
|
373 |
+
"harness|mmlu_high_school_physics|0": {
|
374 |
+
"alias": " - high_school_physics",
|
375 |
+
"acc,none": 0.2847682119205298,
|
376 |
+
"acc_stderr,none": 0.03684881521389023
|
377 |
+
},
|
378 |
+
"harness|mmlu_high_school_statistics|0": {
|
379 |
+
"alias": " - high_school_statistics",
|
380 |
+
"acc,none": 0.2175925925925926,
|
381 |
+
"acc_stderr,none": 0.028139689444859683
|
382 |
+
},
|
383 |
+
"harness|mmlu_machine_learning|0": {
|
384 |
+
"alias": " - machine_learning",
|
385 |
+
"acc,none": 0.23214285714285715,
|
386 |
+
"acc_stderr,none": 0.04007341809755805
|
387 |
+
},
|
388 |
+
"harness|boolq|0": {
|
389 |
+
"acc,none": 0.40397553516819573,
|
390 |
+
"acc_stderr,none": 0.008582268854021396,
|
391 |
+
"alias": "boolq"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Qwen/Qwen1.5-0.5B-Chat-AWQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 1.412,
|
399 |
+
"architectures": "Qwen2ForCausalLM",
|
400 |
+
"quant_type": "AWQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 2.824,
|
403 |
+
"model_size": 1.412,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-26T12:26:52Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"group_size": 128,
|
418 |
+
"modules_to_not_convert": null,
|
419 |
+
"quant_method": "awq",
|
420 |
+
"version": "gemm",
|
421 |
+
"zero_point": true
|
422 |
+
},
|
423 |
+
"versions": {
|
424 |
+
"harness|piqa|0": 1.0,
|
425 |
+
"harness|openbookqa|0": 1.0,
|
426 |
+
"harness|arc:easy|0": 1.0,
|
427 |
+
"harness|arc:challenge|0": 1.0,
|
428 |
+
"harness|lambada:openai|0": 1.0,
|
429 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
430 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
431 |
+
"harness|hellaswag|0": 1.0,
|
432 |
+
"harness|winogrande|0": 1.0,
|
433 |
+
"harness|mmlu|0": null,
|
434 |
+
"harness|mmlu_humanities|0": null,
|
435 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
436 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
437 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
438 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
439 |
+
"harness|mmlu_international_law|0": 0.0,
|
440 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
441 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
442 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
443 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
444 |
+
"harness|mmlu_philosophy|0": 0.0,
|
445 |
+
"harness|mmlu_prehistory|0": 0.0,
|
446 |
+
"harness|mmlu_professional_law|0": 0.0,
|
447 |
+
"harness|mmlu_world_religions|0": 0.0,
|
448 |
+
"harness|mmlu_other|0": null,
|
449 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
450 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
451 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
452 |
+
"harness|mmlu_global_facts|0": 0.0,
|
453 |
+
"harness|mmlu_human_aging|0": 0.0,
|
454 |
+
"harness|mmlu_management|0": 0.0,
|
455 |
+
"harness|mmlu_marketing|0": 0.0,
|
456 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
457 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
458 |
+
"harness|mmlu_nutrition|0": 0.0,
|
459 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
460 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
461 |
+
"harness|mmlu_virology|0": 0.0,
|
462 |
+
"harness|mmlu_social_sciences|0": null,
|
463 |
+
"harness|mmlu_econometrics|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
466 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
467 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
468 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
469 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
470 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
471 |
+
"harness|mmlu_public_relations|0": 0.0,
|
472 |
+
"harness|mmlu_security_studies|0": 0.0,
|
473 |
+
"harness|mmlu_sociology|0": 0.0,
|
474 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
475 |
+
"harness|mmlu_stem|0": null,
|
476 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
477 |
+
"harness|mmlu_anatomy|0": 0.0,
|
478 |
+
"harness|mmlu_astronomy|0": 0.0,
|
479 |
+
"harness|mmlu_college_biology|0": 0.0,
|
480 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
481 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
482 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
483 |
+
"harness|mmlu_college_physics|0": 0.0,
|
484 |
+
"harness|mmlu_computer_security|0": 0.0,
|
485 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
486 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
487 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
491 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
494 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
495 |
+
"harness|boolq|0": 2.0
|
496 |
+
},
|
497 |
+
"n-shot": {
|
498 |
+
"arc_challenge": 0,
|
499 |
+
"arc_easy": 0,
|
500 |
+
"boolq": 0,
|
501 |
+
"hellaswag": 0,
|
502 |
+
"lambada_openai": 0,
|
503 |
+
"mmlu": 0,
|
504 |
+
"mmlu_abstract_algebra": 0,
|
505 |
+
"mmlu_anatomy": 0,
|
506 |
+
"mmlu_astronomy": 0,
|
507 |
+
"mmlu_business_ethics": 0,
|
508 |
+
"mmlu_clinical_knowledge": 0,
|
509 |
+
"mmlu_college_biology": 0,
|
510 |
+
"mmlu_college_chemistry": 0,
|
511 |
+
"mmlu_college_computer_science": 0,
|
512 |
+
"mmlu_college_mathematics": 0,
|
513 |
+
"mmlu_college_medicine": 0,
|
514 |
+
"mmlu_college_physics": 0,
|
515 |
+
"mmlu_computer_security": 0,
|
516 |
+
"mmlu_conceptual_physics": 0,
|
517 |
+
"mmlu_econometrics": 0,
|
518 |
+
"mmlu_electrical_engineering": 0,
|
519 |
+
"mmlu_elementary_mathematics": 0,
|
520 |
+
"mmlu_formal_logic": 0,
|
521 |
+
"mmlu_global_facts": 0,
|
522 |
+
"mmlu_high_school_biology": 0,
|
523 |
+
"mmlu_high_school_chemistry": 0,
|
524 |
+
"mmlu_high_school_computer_science": 0,
|
525 |
+
"mmlu_high_school_european_history": 0,
|
526 |
+
"mmlu_high_school_geography": 0,
|
527 |
+
"mmlu_high_school_government_and_politics": 0,
|
528 |
+
"mmlu_high_school_macroeconomics": 0,
|
529 |
+
"mmlu_high_school_mathematics": 0,
|
530 |
+
"mmlu_high_school_microeconomics": 0,
|
531 |
+
"mmlu_high_school_physics": 0,
|
532 |
+
"mmlu_high_school_psychology": 0,
|
533 |
+
"mmlu_high_school_statistics": 0,
|
534 |
+
"mmlu_high_school_us_history": 0,
|
535 |
+
"mmlu_high_school_world_history": 0,
|
536 |
+
"mmlu_human_aging": 0,
|
537 |
+
"mmlu_human_sexuality": 0,
|
538 |
+
"mmlu_humanities": 0,
|
539 |
+
"mmlu_international_law": 0,
|
540 |
+
"mmlu_jurisprudence": 0,
|
541 |
+
"mmlu_logical_fallacies": 0,
|
542 |
+
"mmlu_machine_learning": 0,
|
543 |
+
"mmlu_management": 0,
|
544 |
+
"mmlu_marketing": 0,
|
545 |
+
"mmlu_medical_genetics": 0,
|
546 |
+
"mmlu_miscellaneous": 0,
|
547 |
+
"mmlu_moral_disputes": 0,
|
548 |
+
"mmlu_moral_scenarios": 0,
|
549 |
+
"mmlu_nutrition": 0,
|
550 |
+
"mmlu_other": 0,
|
551 |
+
"mmlu_philosophy": 0,
|
552 |
+
"mmlu_prehistory": 0,
|
553 |
+
"mmlu_professional_accounting": 0,
|
554 |
+
"mmlu_professional_law": 0,
|
555 |
+
"mmlu_professional_medicine": 0,
|
556 |
+
"mmlu_professional_psychology": 0,
|
557 |
+
"mmlu_public_relations": 0,
|
558 |
+
"mmlu_security_studies": 0,
|
559 |
+
"mmlu_social_sciences": 0,
|
560 |
+
"mmlu_sociology": 0,
|
561 |
+
"mmlu_stem": 0,
|
562 |
+
"mmlu_us_foreign_policy": 0,
|
563 |
+
"mmlu_virology": 0,
|
564 |
+
"mmlu_world_religions": 0,
|
565 |
+
"openbookqa": 0,
|
566 |
+
"piqa": 0,
|
567 |
+
"truthfulqa_mc1": 0,
|
568 |
+
"truthfulqa_mc2": 0,
|
569 |
+
"winogrande": 0
|
570 |
+
},
|
571 |
+
"date": 1714134556.4389935,
|
572 |
+
"config": {
|
573 |
+
"model": "hf",
|
574 |
+
"model_args": "pretrained=Qwen/Qwen1.5-0.5B-Chat-AWQ,dtype=float16,_commit_hash=main",
|
575 |
+
"batch_size": 4,
|
576 |
+
"batch_sizes": [],
|
577 |
+
"device": "cuda",
|
578 |
+
"use_cache": null,
|
579 |
+
"limit": null,
|
580 |
+
"bootstrap_iters": 100000,
|
581 |
+
"gen_kwargs": null
|
582 |
+
}
|
583 |
+
}
|
Qwen/results_2024-04-28-05-05-52.json
ADDED
@@ -0,0 +1,583 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-28-05-05-52",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Qwen/Qwen1.5-7B-Chat-AWQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.86,
|
16 |
+
"model_params": 6.53,
|
17 |
+
"quant_type": "AWQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.6527229676400947,
|
23 |
+
"acc_stderr,none": 0.013380909249751237,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|boolq|0": {
|
27 |
+
"acc,none": 0.8366972477064221,
|
28 |
+
"acc_stderr,none": 0.006465073432190007,
|
29 |
+
"alias": "boolq"
|
30 |
+
},
|
31 |
+
"harness|piqa|0": {
|
32 |
+
"acc,none": 0.7388465723612623,
|
33 |
+
"acc_stderr,none": 0.01024873864993557,
|
34 |
+
"acc_norm,none": 0.7426550598476604,
|
35 |
+
"acc_norm_stderr,none": 0.010199921064792512,
|
36 |
+
"alias": "piqa"
|
37 |
+
},
|
38 |
+
"harness|truthfulqa:mc2|0": {
|
39 |
+
"acc,none": 0.5316967791518732,
|
40 |
+
"acc_stderr,none": 0.015886945830206962,
|
41 |
+
"alias": "truthfulqa_mc2"
|
42 |
+
},
|
43 |
+
"harness|arc:challenge|0": {
|
44 |
+
"acc,none": 0.4274744027303754,
|
45 |
+
"acc_stderr,none": 0.014456862944650649,
|
46 |
+
"acc_norm,none": 0.45307167235494883,
|
47 |
+
"acc_norm_stderr,none": 0.014546892052005628,
|
48 |
+
"alias": "arc_challenge"
|
49 |
+
},
|
50 |
+
"harness|arc:easy|0": {
|
51 |
+
"acc,none": 0.6839225589225589,
|
52 |
+
"acc_stderr,none": 0.009540440071928294,
|
53 |
+
"acc_norm,none": 0.6241582491582491,
|
54 |
+
"acc_norm_stderr,none": 0.009938436373170633,
|
55 |
+
"alias": "arc_easy"
|
56 |
+
},
|
57 |
+
"harness|hellaswag|0": {
|
58 |
+
"acc,none": 0.5802628958374826,
|
59 |
+
"acc_stderr,none": 0.004925072159723824,
|
60 |
+
"acc_norm,none": 0.7638916550487951,
|
61 |
+
"acc_norm_stderr,none": 0.004238215815533083,
|
62 |
+
"alias": "hellaswag"
|
63 |
+
},
|
64 |
+
"harness|truthfulqa:mc1|0": {
|
65 |
+
"acc,none": 0.3659730722154223,
|
66 |
+
"acc_stderr,none": 0.01686294168408839,
|
67 |
+
"alias": "truthfulqa_mc1"
|
68 |
+
},
|
69 |
+
"harness|mmlu|0": {
|
70 |
+
"acc,none": 0.5952143569292123,
|
71 |
+
"acc_stderr,none": 0.00396669659542448,
|
72 |
+
"alias": "mmlu"
|
73 |
+
},
|
74 |
+
"harness|mmlu_humanities|0": {
|
75 |
+
"alias": " - humanities",
|
76 |
+
"acc,none": 0.5460148777895856,
|
77 |
+
"acc_stderr,none": 0.006937437849855694
|
78 |
+
},
|
79 |
+
"harness|mmlu_formal_logic|0": {
|
80 |
+
"alias": " - formal_logic",
|
81 |
+
"acc,none": 0.5,
|
82 |
+
"acc_stderr,none": 0.04472135954999579
|
83 |
+
},
|
84 |
+
"harness|mmlu_high_school_european_history|0": {
|
85 |
+
"alias": " - high_school_european_history",
|
86 |
+
"acc,none": 0.7575757575757576,
|
87 |
+
"acc_stderr,none": 0.03346409881055953
|
88 |
+
},
|
89 |
+
"harness|mmlu_high_school_us_history|0": {
|
90 |
+
"alias": " - high_school_us_history",
|
91 |
+
"acc,none": 0.7549019607843137,
|
92 |
+
"acc_stderr,none": 0.030190282453501954
|
93 |
+
},
|
94 |
+
"harness|mmlu_high_school_world_history|0": {
|
95 |
+
"alias": " - high_school_world_history",
|
96 |
+
"acc,none": 0.7721518987341772,
|
97 |
+
"acc_stderr,none": 0.02730348459906942
|
98 |
+
},
|
99 |
+
"harness|mmlu_international_law|0": {
|
100 |
+
"alias": " - international_law",
|
101 |
+
"acc,none": 0.7024793388429752,
|
102 |
+
"acc_stderr,none": 0.04173349148083499
|
103 |
+
},
|
104 |
+
"harness|mmlu_jurisprudence|0": {
|
105 |
+
"alias": " - jurisprudence",
|
106 |
+
"acc,none": 0.7592592592592593,
|
107 |
+
"acc_stderr,none": 0.04133119440243838
|
108 |
+
},
|
109 |
+
"harness|mmlu_logical_fallacies|0": {
|
110 |
+
"alias": " - logical_fallacies",
|
111 |
+
"acc,none": 0.6748466257668712,
|
112 |
+
"acc_stderr,none": 0.03680350371286464
|
113 |
+
},
|
114 |
+
"harness|mmlu_moral_disputes|0": {
|
115 |
+
"alias": " - moral_disputes",
|
116 |
+
"acc,none": 0.6647398843930635,
|
117 |
+
"acc_stderr,none": 0.025416003773165555
|
118 |
+
},
|
119 |
+
"harness|mmlu_moral_scenarios|0": {
|
120 |
+
"alias": " - moral_scenarios",
|
121 |
+
"acc,none": 0.36983240223463687,
|
122 |
+
"acc_stderr,none": 0.016145881256056212
|
123 |
+
},
|
124 |
+
"harness|mmlu_philosophy|0": {
|
125 |
+
"alias": " - philosophy",
|
126 |
+
"acc,none": 0.6752411575562701,
|
127 |
+
"acc_stderr,none": 0.02659678228769705
|
128 |
+
},
|
129 |
+
"harness|mmlu_prehistory|0": {
|
130 |
+
"alias": " - prehistory",
|
131 |
+
"acc,none": 0.6358024691358025,
|
132 |
+
"acc_stderr,none": 0.026774929899722313
|
133 |
+
},
|
134 |
+
"harness|mmlu_professional_law|0": {
|
135 |
+
"alias": " - professional_law",
|
136 |
+
"acc,none": 0.4348109517601043,
|
137 |
+
"acc_stderr,none": 0.012661233805616307
|
138 |
+
},
|
139 |
+
"harness|mmlu_world_religions|0": {
|
140 |
+
"alias": " - world_religions",
|
141 |
+
"acc,none": 0.7192982456140351,
|
142 |
+
"acc_stderr,none": 0.034462962170884265
|
143 |
+
},
|
144 |
+
"harness|mmlu_other|0": {
|
145 |
+
"alias": " - other",
|
146 |
+
"acc,none": 0.6623752816221435,
|
147 |
+
"acc_stderr,none": 0.008215126567884317
|
148 |
+
},
|
149 |
+
"harness|mmlu_business_ethics|0": {
|
150 |
+
"alias": " - business_ethics",
|
151 |
+
"acc,none": 0.62,
|
152 |
+
"acc_stderr,none": 0.04878317312145632
|
153 |
+
},
|
154 |
+
"harness|mmlu_clinical_knowledge|0": {
|
155 |
+
"alias": " - clinical_knowledge",
|
156 |
+
"acc,none": 0.6792452830188679,
|
157 |
+
"acc_stderr,none": 0.028727502957880263
|
158 |
+
},
|
159 |
+
"harness|mmlu_college_medicine|0": {
|
160 |
+
"alias": " - college_medicine",
|
161 |
+
"acc,none": 0.5664739884393064,
|
162 |
+
"acc_stderr,none": 0.03778621079092056
|
163 |
+
},
|
164 |
+
"harness|mmlu_global_facts|0": {
|
165 |
+
"alias": " - global_facts",
|
166 |
+
"acc,none": 0.41,
|
167 |
+
"acc_stderr,none": 0.049431107042371025
|
168 |
+
},
|
169 |
+
"harness|mmlu_human_aging|0": {
|
170 |
+
"alias": " - human_aging",
|
171 |
+
"acc,none": 0.6547085201793722,
|
172 |
+
"acc_stderr,none": 0.03191100192835794
|
173 |
+
},
|
174 |
+
"harness|mmlu_management|0": {
|
175 |
+
"alias": " - management",
|
176 |
+
"acc,none": 0.7572815533980582,
|
177 |
+
"acc_stderr,none": 0.04245022486384495
|
178 |
+
},
|
179 |
+
"harness|mmlu_marketing|0": {
|
180 |
+
"alias": " - marketing",
|
181 |
+
"acc,none": 0.8675213675213675,
|
182 |
+
"acc_stderr,none": 0.02220930907316562
|
183 |
+
},
|
184 |
+
"harness|mmlu_medical_genetics|0": {
|
185 |
+
"alias": " - medical_genetics",
|
186 |
+
"acc,none": 0.7,
|
187 |
+
"acc_stderr,none": 0.046056618647183814
|
188 |
+
},
|
189 |
+
"harness|mmlu_miscellaneous|0": {
|
190 |
+
"alias": " - miscellaneous",
|
191 |
+
"acc,none": 0.7598978288633461,
|
192 |
+
"acc_stderr,none": 0.015274685213734191
|
193 |
+
},
|
194 |
+
"harness|mmlu_nutrition|0": {
|
195 |
+
"alias": " - nutrition",
|
196 |
+
"acc,none": 0.6830065359477124,
|
197 |
+
"acc_stderr,none": 0.026643278474508755
|
198 |
+
},
|
199 |
+
"harness|mmlu_professional_accounting|0": {
|
200 |
+
"alias": " - professional_accounting",
|
201 |
+
"acc,none": 0.4397163120567376,
|
202 |
+
"acc_stderr,none": 0.029609912075594113
|
203 |
+
},
|
204 |
+
"harness|mmlu_professional_medicine|0": {
|
205 |
+
"alias": " - professional_medicine",
|
206 |
+
"acc,none": 0.6286764705882353,
|
207 |
+
"acc_stderr,none": 0.02934980313976587
|
208 |
+
},
|
209 |
+
"harness|mmlu_virology|0": {
|
210 |
+
"alias": " - virology",
|
211 |
+
"acc,none": 0.4879518072289157,
|
212 |
+
"acc_stderr,none": 0.038913644958358196
|
213 |
+
},
|
214 |
+
"harness|mmlu_social_sciences|0": {
|
215 |
+
"alias": " - social_sciences",
|
216 |
+
"acc,none": 0.6789080272993175,
|
217 |
+
"acc_stderr,none": 0.008213450995422493
|
218 |
+
},
|
219 |
+
"harness|mmlu_econometrics|0": {
|
220 |
+
"alias": " - econometrics",
|
221 |
+
"acc,none": 0.43859649122807015,
|
222 |
+
"acc_stderr,none": 0.04668000738510455
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_geography|0": {
|
225 |
+
"alias": " - high_school_geography",
|
226 |
+
"acc,none": 0.7727272727272727,
|
227 |
+
"acc_stderr,none": 0.02985751567338641
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
230 |
+
"alias": " - high_school_government_and_politics",
|
231 |
+
"acc,none": 0.7927461139896373,
|
232 |
+
"acc_stderr,none": 0.02925282329180363
|
233 |
+
},
|
234 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
235 |
+
"alias": " - high_school_macroeconomics",
|
236 |
+
"acc,none": 0.5923076923076923,
|
237 |
+
"acc_stderr,none": 0.024915243985987847
|
238 |
+
},
|
239 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
240 |
+
"alias": " - high_school_microeconomics",
|
241 |
+
"acc,none": 0.6470588235294118,
|
242 |
+
"acc_stderr,none": 0.031041941304059285
|
243 |
+
},
|
244 |
+
"harness|mmlu_high_school_psychology|0": {
|
245 |
+
"alias": " - high_school_psychology",
|
246 |
+
"acc,none": 0.8018348623853211,
|
247 |
+
"acc_stderr,none": 0.01709057380421789
|
248 |
+
},
|
249 |
+
"harness|mmlu_human_sexuality|0": {
|
250 |
+
"alias": " - human_sexuality",
|
251 |
+
"acc,none": 0.7022900763358778,
|
252 |
+
"acc_stderr,none": 0.04010358942462203
|
253 |
+
},
|
254 |
+
"harness|mmlu_professional_psychology|0": {
|
255 |
+
"alias": " - professional_psychology",
|
256 |
+
"acc,none": 0.5637254901960784,
|
257 |
+
"acc_stderr,none": 0.02006287424353913
|
258 |
+
},
|
259 |
+
"harness|mmlu_public_relations|0": {
|
260 |
+
"alias": " - public_relations",
|
261 |
+
"acc,none": 0.6272727272727273,
|
262 |
+
"acc_stderr,none": 0.04631381319425465
|
263 |
+
},
|
264 |
+
"harness|mmlu_security_studies|0": {
|
265 |
+
"alias": " - security_studies",
|
266 |
+
"acc,none": 0.6857142857142857,
|
267 |
+
"acc_stderr,none": 0.02971932942241747
|
268 |
+
},
|
269 |
+
"harness|mmlu_sociology|0": {
|
270 |
+
"alias": " - sociology",
|
271 |
+
"acc,none": 0.7661691542288557,
|
272 |
+
"acc_stderr,none": 0.029929415408348398
|
273 |
+
},
|
274 |
+
"harness|mmlu_us_foreign_policy|0": {
|
275 |
+
"alias": " - us_foreign_policy",
|
276 |
+
"acc,none": 0.83,
|
277 |
+
"acc_stderr,none": 0.03775251680686371
|
278 |
+
},
|
279 |
+
"harness|mmlu_stem|0": {
|
280 |
+
"alias": " - stem",
|
281 |
+
"acc,none": 0.5207738661592134,
|
282 |
+
"acc_stderr,none": 0.008667800862507506
|
283 |
+
},
|
284 |
+
"harness|mmlu_abstract_algebra|0": {
|
285 |
+
"alias": " - abstract_algebra",
|
286 |
+
"acc,none": 0.45,
|
287 |
+
"acc_stderr,none": 0.049999999999999996
|
288 |
+
},
|
289 |
+
"harness|mmlu_anatomy|0": {
|
290 |
+
"alias": " - anatomy",
|
291 |
+
"acc,none": 0.5185185185185185,
|
292 |
+
"acc_stderr,none": 0.043163785995113245
|
293 |
+
},
|
294 |
+
"harness|mmlu_astronomy|0": {
|
295 |
+
"alias": " - astronomy",
|
296 |
+
"acc,none": 0.631578947368421,
|
297 |
+
"acc_stderr,none": 0.03925523381052932
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_biology|0": {
|
300 |
+
"alias": " - college_biology",
|
301 |
+
"acc,none": 0.6180555555555556,
|
302 |
+
"acc_stderr,none": 0.040629907841466674
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_chemistry|0": {
|
305 |
+
"alias": " - college_chemistry",
|
306 |
+
"acc,none": 0.41,
|
307 |
+
"acc_stderr,none": 0.049431107042371025
|
308 |
+
},
|
309 |
+
"harness|mmlu_college_computer_science|0": {
|
310 |
+
"alias": " - college_computer_science",
|
311 |
+
"acc,none": 0.58,
|
312 |
+
"acc_stderr,none": 0.04960449637488583
|
313 |
+
},
|
314 |
+
"harness|mmlu_college_mathematics|0": {
|
315 |
+
"alias": " - college_mathematics",
|
316 |
+
"acc,none": 0.38,
|
317 |
+
"acc_stderr,none": 0.04878317312145633
|
318 |
+
},
|
319 |
+
"harness|mmlu_college_physics|0": {
|
320 |
+
"alias": " - college_physics",
|
321 |
+
"acc,none": 0.38235294117647056,
|
322 |
+
"acc_stderr,none": 0.04835503696107224
|
323 |
+
},
|
324 |
+
"harness|mmlu_computer_security|0": {
|
325 |
+
"alias": " - computer_security",
|
326 |
+
"acc,none": 0.76,
|
327 |
+
"acc_stderr,none": 0.042923469599092816
|
328 |
+
},
|
329 |
+
"harness|mmlu_conceptual_physics|0": {
|
330 |
+
"alias": " - conceptual_physics",
|
331 |
+
"acc,none": 0.5106382978723404,
|
332 |
+
"acc_stderr,none": 0.03267862331014063
|
333 |
+
},
|
334 |
+
"harness|mmlu_electrical_engineering|0": {
|
335 |
+
"alias": " - electrical_engineering",
|
336 |
+
"acc,none": 0.5379310344827586,
|
337 |
+
"acc_stderr,none": 0.04154659671707548
|
338 |
+
},
|
339 |
+
"harness|mmlu_elementary_mathematics|0": {
|
340 |
+
"alias": " - elementary_mathematics",
|
341 |
+
"acc,none": 0.47354497354497355,
|
342 |
+
"acc_stderr,none": 0.02571523981134675
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_biology|0": {
|
345 |
+
"alias": " - high_school_biology",
|
346 |
+
"acc,none": 0.7161290322580646,
|
347 |
+
"acc_stderr,none": 0.02564938106302926
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_chemistry|0": {
|
350 |
+
"alias": " - high_school_chemistry",
|
351 |
+
"acc,none": 0.5320197044334976,
|
352 |
+
"acc_stderr,none": 0.035107665979592154
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_computer_science|0": {
|
355 |
+
"alias": " - high_school_computer_science",
|
356 |
+
"acc,none": 0.69,
|
357 |
+
"acc_stderr,none": 0.04648231987117316
|
358 |
+
},
|
359 |
+
"harness|mmlu_high_school_mathematics|0": {
|
360 |
+
"alias": " - high_school_mathematics",
|
361 |
+
"acc,none": 0.3333333333333333,
|
362 |
+
"acc_stderr,none": 0.028742040903948496
|
363 |
+
},
|
364 |
+
"harness|mmlu_high_school_physics|0": {
|
365 |
+
"alias": " - high_school_physics",
|
366 |
+
"acc,none": 0.39072847682119205,
|
367 |
+
"acc_stderr,none": 0.03983798306659807
|
368 |
+
},
|
369 |
+
"harness|mmlu_high_school_statistics|0": {
|
370 |
+
"alias": " - high_school_statistics",
|
371 |
+
"acc,none": 0.5555555555555556,
|
372 |
+
"acc_stderr,none": 0.03388857118502325
|
373 |
+
},
|
374 |
+
"harness|mmlu_machine_learning|0": {
|
375 |
+
"alias": " - machine_learning",
|
376 |
+
"acc,none": 0.4017857142857143,
|
377 |
+
"acc_stderr,none": 0.04653333146973646
|
378 |
+
},
|
379 |
+
"harness|openbookqa|0": {
|
380 |
+
"acc,none": 0.318,
|
381 |
+
"acc_stderr,none": 0.020847571620814014,
|
382 |
+
"acc_norm,none": 0.432,
|
383 |
+
"acc_norm_stderr,none": 0.02217510926561316,
|
384 |
+
"alias": "openbookqa"
|
385 |
+
},
|
386 |
+
"harness|lambada:openai|0": {
|
387 |
+
"perplexity,none": 5.595127359503151,
|
388 |
+
"perplexity_stderr,none": 0.19155346558182795,
|
389 |
+
"acc,none": 0.6083834659421696,
|
390 |
+
"acc_stderr,none": 0.006800350287698186,
|
391 |
+
"alias": "lambada_openai"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Qwen/Qwen1.5-7B-Chat-AWQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 8.448,
|
399 |
+
"architectures": "Qwen2ForCausalLM",
|
400 |
+
"quant_type": "AWQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 16.896,
|
403 |
+
"model_size": 8.448,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:04:58Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"group_size": 128,
|
418 |
+
"modules_to_not_convert": null,
|
419 |
+
"quant_method": "awq",
|
420 |
+
"version": "gemm",
|
421 |
+
"zero_point": true
|
422 |
+
},
|
423 |
+
"versions": {
|
424 |
+
"harness|winogrande|0": 1.0,
|
425 |
+
"harness|boolq|0": 2.0,
|
426 |
+
"harness|piqa|0": 1.0,
|
427 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
428 |
+
"harness|arc:challenge|0": 1.0,
|
429 |
+
"harness|arc:easy|0": 1.0,
|
430 |
+
"harness|hellaswag|0": 1.0,
|
431 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
432 |
+
"harness|mmlu|0": null,
|
433 |
+
"harness|mmlu_humanities|0": null,
|
434 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
435 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
436 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
437 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
438 |
+
"harness|mmlu_international_law|0": 0.0,
|
439 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
440 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
441 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
442 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
443 |
+
"harness|mmlu_philosophy|0": 0.0,
|
444 |
+
"harness|mmlu_prehistory|0": 0.0,
|
445 |
+
"harness|mmlu_professional_law|0": 0.0,
|
446 |
+
"harness|mmlu_world_religions|0": 0.0,
|
447 |
+
"harness|mmlu_other|0": null,
|
448 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
449 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
450 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
451 |
+
"harness|mmlu_global_facts|0": 0.0,
|
452 |
+
"harness|mmlu_human_aging|0": 0.0,
|
453 |
+
"harness|mmlu_management|0": 0.0,
|
454 |
+
"harness|mmlu_marketing|0": 0.0,
|
455 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
456 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
457 |
+
"harness|mmlu_nutrition|0": 0.0,
|
458 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
459 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
460 |
+
"harness|mmlu_virology|0": 0.0,
|
461 |
+
"harness|mmlu_social_sciences|0": null,
|
462 |
+
"harness|mmlu_econometrics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
466 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
467 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
468 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
469 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
470 |
+
"harness|mmlu_public_relations|0": 0.0,
|
471 |
+
"harness|mmlu_security_studies|0": 0.0,
|
472 |
+
"harness|mmlu_sociology|0": 0.0,
|
473 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
474 |
+
"harness|mmlu_stem|0": null,
|
475 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
476 |
+
"harness|mmlu_anatomy|0": 0.0,
|
477 |
+
"harness|mmlu_astronomy|0": 0.0,
|
478 |
+
"harness|mmlu_college_biology|0": 0.0,
|
479 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
480 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
481 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_college_physics|0": 0.0,
|
483 |
+
"harness|mmlu_computer_security|0": 0.0,
|
484 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
485 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
486 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
491 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
493 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
494 |
+
"harness|openbookqa|0": 1.0,
|
495 |
+
"harness|lambada:openai|0": 1.0
|
496 |
+
},
|
497 |
+
"n-shot": {
|
498 |
+
"arc_challenge": 0,
|
499 |
+
"arc_easy": 0,
|
500 |
+
"boolq": 0,
|
501 |
+
"hellaswag": 0,
|
502 |
+
"lambada_openai": 0,
|
503 |
+
"mmlu": 0,
|
504 |
+
"mmlu_abstract_algebra": 0,
|
505 |
+
"mmlu_anatomy": 0,
|
506 |
+
"mmlu_astronomy": 0,
|
507 |
+
"mmlu_business_ethics": 0,
|
508 |
+
"mmlu_clinical_knowledge": 0,
|
509 |
+
"mmlu_college_biology": 0,
|
510 |
+
"mmlu_college_chemistry": 0,
|
511 |
+
"mmlu_college_computer_science": 0,
|
512 |
+
"mmlu_college_mathematics": 0,
|
513 |
+
"mmlu_college_medicine": 0,
|
514 |
+
"mmlu_college_physics": 0,
|
515 |
+
"mmlu_computer_security": 0,
|
516 |
+
"mmlu_conceptual_physics": 0,
|
517 |
+
"mmlu_econometrics": 0,
|
518 |
+
"mmlu_electrical_engineering": 0,
|
519 |
+
"mmlu_elementary_mathematics": 0,
|
520 |
+
"mmlu_formal_logic": 0,
|
521 |
+
"mmlu_global_facts": 0,
|
522 |
+
"mmlu_high_school_biology": 0,
|
523 |
+
"mmlu_high_school_chemistry": 0,
|
524 |
+
"mmlu_high_school_computer_science": 0,
|
525 |
+
"mmlu_high_school_european_history": 0,
|
526 |
+
"mmlu_high_school_geography": 0,
|
527 |
+
"mmlu_high_school_government_and_politics": 0,
|
528 |
+
"mmlu_high_school_macroeconomics": 0,
|
529 |
+
"mmlu_high_school_mathematics": 0,
|
530 |
+
"mmlu_high_school_microeconomics": 0,
|
531 |
+
"mmlu_high_school_physics": 0,
|
532 |
+
"mmlu_high_school_psychology": 0,
|
533 |
+
"mmlu_high_school_statistics": 0,
|
534 |
+
"mmlu_high_school_us_history": 0,
|
535 |
+
"mmlu_high_school_world_history": 0,
|
536 |
+
"mmlu_human_aging": 0,
|
537 |
+
"mmlu_human_sexuality": 0,
|
538 |
+
"mmlu_humanities": 0,
|
539 |
+
"mmlu_international_law": 0,
|
540 |
+
"mmlu_jurisprudence": 0,
|
541 |
+
"mmlu_logical_fallacies": 0,
|
542 |
+
"mmlu_machine_learning": 0,
|
543 |
+
"mmlu_management": 0,
|
544 |
+
"mmlu_marketing": 0,
|
545 |
+
"mmlu_medical_genetics": 0,
|
546 |
+
"mmlu_miscellaneous": 0,
|
547 |
+
"mmlu_moral_disputes": 0,
|
548 |
+
"mmlu_moral_scenarios": 0,
|
549 |
+
"mmlu_nutrition": 0,
|
550 |
+
"mmlu_other": 0,
|
551 |
+
"mmlu_philosophy": 0,
|
552 |
+
"mmlu_prehistory": 0,
|
553 |
+
"mmlu_professional_accounting": 0,
|
554 |
+
"mmlu_professional_law": 0,
|
555 |
+
"mmlu_professional_medicine": 0,
|
556 |
+
"mmlu_professional_psychology": 0,
|
557 |
+
"mmlu_public_relations": 0,
|
558 |
+
"mmlu_security_studies": 0,
|
559 |
+
"mmlu_social_sciences": 0,
|
560 |
+
"mmlu_sociology": 0,
|
561 |
+
"mmlu_stem": 0,
|
562 |
+
"mmlu_us_foreign_policy": 0,
|
563 |
+
"mmlu_virology": 0,
|
564 |
+
"mmlu_world_religions": 0,
|
565 |
+
"openbookqa": 0,
|
566 |
+
"piqa": 0,
|
567 |
+
"truthfulqa_mc1": 0,
|
568 |
+
"truthfulqa_mc2": 0,
|
569 |
+
"winogrande": 0
|
570 |
+
},
|
571 |
+
"date": 1714230477.5001445,
|
572 |
+
"config": {
|
573 |
+
"model": "hf",
|
574 |
+
"model_args": "pretrained=Qwen/Qwen1.5-7B-Chat-AWQ,dtype=float16,_commit_hash=main",
|
575 |
+
"batch_size": 2,
|
576 |
+
"batch_sizes": [],
|
577 |
+
"device": "cuda",
|
578 |
+
"use_cache": null,
|
579 |
+
"limit": null,
|
580 |
+
"bootstrap_iters": 100000,
|
581 |
+
"gen_kwargs": null
|
582 |
+
}
|
583 |
+
}
|
Qwen/results_2024-04-28-13-06-41.json
ADDED
@@ -0,0 +1,599 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-28-13-06-41",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Qwen/Qwen1.5-7B-Chat-GPTQ-Int4",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.86,
|
16 |
+
"model_params": 6.54,
|
17 |
+
"quant_type": "GPTQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|piqa|0": {
|
22 |
+
"acc,none": 0.735038084874864,
|
23 |
+
"acc_stderr,none": 0.01029655799331606,
|
24 |
+
"acc_norm,none": 0.7453754080522307,
|
25 |
+
"acc_norm_stderr,none": 0.010164432237060476,
|
26 |
+
"alias": "piqa"
|
27 |
+
},
|
28 |
+
"harness|openbookqa|0": {
|
29 |
+
"acc,none": 0.332,
|
30 |
+
"acc_stderr,none": 0.021081766571222852,
|
31 |
+
"acc_norm,none": 0.416,
|
32 |
+
"acc_norm_stderr,none": 0.022064943313928876,
|
33 |
+
"alias": "openbookqa"
|
34 |
+
},
|
35 |
+
"harness|arc:easy|0": {
|
36 |
+
"acc,none": 0.6687710437710438,
|
37 |
+
"acc_stderr,none": 0.009657641311350919,
|
38 |
+
"acc_norm,none": 0.6056397306397306,
|
39 |
+
"acc_norm_stderr,none": 0.010028176038393004,
|
40 |
+
"alias": "arc_easy"
|
41 |
+
},
|
42 |
+
"harness|mmlu|0": {
|
43 |
+
"acc,none": 0.5904429568437545,
|
44 |
+
"acc_stderr,none": 0.003957317221865859,
|
45 |
+
"alias": "mmlu"
|
46 |
+
},
|
47 |
+
"harness|mmlu_humanities|0": {
|
48 |
+
"alias": " - humanities",
|
49 |
+
"acc,none": 0.5377258235919234,
|
50 |
+
"acc_stderr,none": 0.0068626131961731455
|
51 |
+
},
|
52 |
+
"harness|mmlu_formal_logic|0": {
|
53 |
+
"alias": " - formal_logic",
|
54 |
+
"acc,none": 0.4523809523809524,
|
55 |
+
"acc_stderr,none": 0.044518079590553275
|
56 |
+
},
|
57 |
+
"harness|mmlu_high_school_european_history|0": {
|
58 |
+
"alias": " - high_school_european_history",
|
59 |
+
"acc,none": 0.7696969696969697,
|
60 |
+
"acc_stderr,none": 0.0328766675860349
|
61 |
+
},
|
62 |
+
"harness|mmlu_high_school_us_history|0": {
|
63 |
+
"alias": " - high_school_us_history",
|
64 |
+
"acc,none": 0.7549019607843137,
|
65 |
+
"acc_stderr,none": 0.03019028245350194
|
66 |
+
},
|
67 |
+
"harness|mmlu_high_school_world_history|0": {
|
68 |
+
"alias": " - high_school_world_history",
|
69 |
+
"acc,none": 0.7510548523206751,
|
70 |
+
"acc_stderr,none": 0.028146970599422644
|
71 |
+
},
|
72 |
+
"harness|mmlu_international_law|0": {
|
73 |
+
"alias": " - international_law",
|
74 |
+
"acc,none": 0.768595041322314,
|
75 |
+
"acc_stderr,none": 0.038498560987940904
|
76 |
+
},
|
77 |
+
"harness|mmlu_jurisprudence|0": {
|
78 |
+
"alias": " - jurisprudence",
|
79 |
+
"acc,none": 0.7685185185185185,
|
80 |
+
"acc_stderr,none": 0.04077494709252627
|
81 |
+
},
|
82 |
+
"harness|mmlu_logical_fallacies|0": {
|
83 |
+
"alias": " - logical_fallacies",
|
84 |
+
"acc,none": 0.656441717791411,
|
85 |
+
"acc_stderr,none": 0.03731133519673893
|
86 |
+
},
|
87 |
+
"harness|mmlu_moral_disputes|0": {
|
88 |
+
"alias": " - moral_disputes",
|
89 |
+
"acc,none": 0.653179190751445,
|
90 |
+
"acc_stderr,none": 0.02562472399403046
|
91 |
+
},
|
92 |
+
"harness|mmlu_moral_scenarios|0": {
|
93 |
+
"alias": " - moral_scenarios",
|
94 |
+
"acc,none": 0.29720670391061454,
|
95 |
+
"acc_stderr,none": 0.015285313353641592
|
96 |
+
},
|
97 |
+
"harness|mmlu_philosophy|0": {
|
98 |
+
"alias": " - philosophy",
|
99 |
+
"acc,none": 0.6591639871382636,
|
100 |
+
"acc_stderr,none": 0.026920841260776155
|
101 |
+
},
|
102 |
+
"harness|mmlu_prehistory|0": {
|
103 |
+
"alias": " - prehistory",
|
104 |
+
"acc,none": 0.6481481481481481,
|
105 |
+
"acc_stderr,none": 0.026571483480719967
|
106 |
+
},
|
107 |
+
"harness|mmlu_professional_law|0": {
|
108 |
+
"alias": " - professional_law",
|
109 |
+
"acc,none": 0.4517601043024772,
|
110 |
+
"acc_stderr,none": 0.012710662233660247
|
111 |
+
},
|
112 |
+
"harness|mmlu_world_religions|0": {
|
113 |
+
"alias": " - world_religions",
|
114 |
+
"acc,none": 0.7660818713450293,
|
115 |
+
"acc_stderr,none": 0.03246721765117826
|
116 |
+
},
|
117 |
+
"harness|mmlu_other|0": {
|
118 |
+
"alias": " - other",
|
119 |
+
"acc,none": 0.6501448342452526,
|
120 |
+
"acc_stderr,none": 0.008309193510443607
|
121 |
+
},
|
122 |
+
"harness|mmlu_business_ethics|0": {
|
123 |
+
"alias": " - business_ethics",
|
124 |
+
"acc,none": 0.65,
|
125 |
+
"acc_stderr,none": 0.0479372485441102
|
126 |
+
},
|
127 |
+
"harness|mmlu_clinical_knowledge|0": {
|
128 |
+
"alias": " - clinical_knowledge",
|
129 |
+
"acc,none": 0.6830188679245283,
|
130 |
+
"acc_stderr,none": 0.028637235639800904
|
131 |
+
},
|
132 |
+
"harness|mmlu_college_medicine|0": {
|
133 |
+
"alias": " - college_medicine",
|
134 |
+
"acc,none": 0.5606936416184971,
|
135 |
+
"acc_stderr,none": 0.037842719328874674
|
136 |
+
},
|
137 |
+
"harness|mmlu_global_facts|0": {
|
138 |
+
"alias": " - global_facts",
|
139 |
+
"acc,none": 0.4,
|
140 |
+
"acc_stderr,none": 0.049236596391733084
|
141 |
+
},
|
142 |
+
"harness|mmlu_human_aging|0": {
|
143 |
+
"alias": " - human_aging",
|
144 |
+
"acc,none": 0.6098654708520179,
|
145 |
+
"acc_stderr,none": 0.03273766725459157
|
146 |
+
},
|
147 |
+
"harness|mmlu_management|0": {
|
148 |
+
"alias": " - management",
|
149 |
+
"acc,none": 0.7281553398058253,
|
150 |
+
"acc_stderr,none": 0.044052680241409216
|
151 |
+
},
|
152 |
+
"harness|mmlu_marketing|0": {
|
153 |
+
"alias": " - marketing",
|
154 |
+
"acc,none": 0.8333333333333334,
|
155 |
+
"acc_stderr,none": 0.024414947304543674
|
156 |
+
},
|
157 |
+
"harness|mmlu_medical_genetics|0": {
|
158 |
+
"alias": " - medical_genetics",
|
159 |
+
"acc,none": 0.69,
|
160 |
+
"acc_stderr,none": 0.04648231987117316
|
161 |
+
},
|
162 |
+
"harness|mmlu_miscellaneous|0": {
|
163 |
+
"alias": " - miscellaneous",
|
164 |
+
"acc,none": 0.7509578544061303,
|
165 |
+
"acc_stderr,none": 0.015464676163395972
|
166 |
+
},
|
167 |
+
"harness|mmlu_nutrition|0": {
|
168 |
+
"alias": " - nutrition",
|
169 |
+
"acc,none": 0.6633986928104575,
|
170 |
+
"acc_stderr,none": 0.02705797462449438
|
171 |
+
},
|
172 |
+
"harness|mmlu_professional_accounting|0": {
|
173 |
+
"alias": " - professional_accounting",
|
174 |
+
"acc,none": 0.4432624113475177,
|
175 |
+
"acc_stderr,none": 0.029634838473766006
|
176 |
+
},
|
177 |
+
"harness|mmlu_professional_medicine|0": {
|
178 |
+
"alias": " - professional_medicine",
|
179 |
+
"acc,none": 0.6213235294117647,
|
180 |
+
"acc_stderr,none": 0.02946513363977613
|
181 |
+
},
|
182 |
+
"harness|mmlu_virology|0": {
|
183 |
+
"alias": " - virology",
|
184 |
+
"acc,none": 0.463855421686747,
|
185 |
+
"acc_stderr,none": 0.03882310850890594
|
186 |
+
},
|
187 |
+
"harness|mmlu_social_sciences|0": {
|
188 |
+
"alias": " - social_sciences",
|
189 |
+
"acc,none": 0.6821579460513487,
|
190 |
+
"acc_stderr,none": 0.008189081115990158
|
191 |
+
},
|
192 |
+
"harness|mmlu_econometrics|0": {
|
193 |
+
"alias": " - econometrics",
|
194 |
+
"acc,none": 0.49122807017543857,
|
195 |
+
"acc_stderr,none": 0.04702880432049615
|
196 |
+
},
|
197 |
+
"harness|mmlu_high_school_geography|0": {
|
198 |
+
"alias": " - high_school_geography",
|
199 |
+
"acc,none": 0.7777777777777778,
|
200 |
+
"acc_stderr,none": 0.02962022787479048
|
201 |
+
},
|
202 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
203 |
+
"alias": " - high_school_government_and_politics",
|
204 |
+
"acc,none": 0.7772020725388601,
|
205 |
+
"acc_stderr,none": 0.030031147977641538
|
206 |
+
},
|
207 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
208 |
+
"alias": " - high_school_macroeconomics",
|
209 |
+
"acc,none": 0.5974358974358974,
|
210 |
+
"acc_stderr,none": 0.024864995159767752
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
213 |
+
"alias": " - high_school_microeconomics",
|
214 |
+
"acc,none": 0.6428571428571429,
|
215 |
+
"acc_stderr,none": 0.031124619309328177
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_psychology|0": {
|
218 |
+
"alias": " - high_school_psychology",
|
219 |
+
"acc,none": 0.8128440366972477,
|
220 |
+
"acc_stderr,none": 0.016722684526200148
|
221 |
+
},
|
222 |
+
"harness|mmlu_human_sexuality|0": {
|
223 |
+
"alias": " - human_sexuality",
|
224 |
+
"acc,none": 0.6870229007633588,
|
225 |
+
"acc_stderr,none": 0.04066962905677697
|
226 |
+
},
|
227 |
+
"harness|mmlu_professional_psychology|0": {
|
228 |
+
"alias": " - professional_psychology",
|
229 |
+
"acc,none": 0.553921568627451,
|
230 |
+
"acc_stderr,none": 0.020109864547181357
|
231 |
+
},
|
232 |
+
"harness|mmlu_public_relations|0": {
|
233 |
+
"alias": " - public_relations",
|
234 |
+
"acc,none": 0.6272727272727273,
|
235 |
+
"acc_stderr,none": 0.04631381319425464
|
236 |
+
},
|
237 |
+
"harness|mmlu_security_studies|0": {
|
238 |
+
"alias": " - security_studies",
|
239 |
+
"acc,none": 0.7020408163265306,
|
240 |
+
"acc_stderr,none": 0.02927956741106567
|
241 |
+
},
|
242 |
+
"harness|mmlu_sociology|0": {
|
243 |
+
"alias": " - sociology",
|
244 |
+
"acc,none": 0.7910447761194029,
|
245 |
+
"acc_stderr,none": 0.028748298931728655
|
246 |
+
},
|
247 |
+
"harness|mmlu_us_foreign_policy|0": {
|
248 |
+
"alias": " - us_foreign_policy",
|
249 |
+
"acc,none": 0.81,
|
250 |
+
"acc_stderr,none": 0.039427724440366234
|
251 |
+
},
|
252 |
+
"harness|mmlu_stem|0": {
|
253 |
+
"alias": " - stem",
|
254 |
+
"acc,none": 0.5207738661592134,
|
255 |
+
"acc_stderr,none": 0.008650288386489482
|
256 |
+
},
|
257 |
+
"harness|mmlu_abstract_algebra|0": {
|
258 |
+
"alias": " - abstract_algebra",
|
259 |
+
"acc,none": 0.41,
|
260 |
+
"acc_stderr,none": 0.04943110704237101
|
261 |
+
},
|
262 |
+
"harness|mmlu_anatomy|0": {
|
263 |
+
"alias": " - anatomy",
|
264 |
+
"acc,none": 0.5777777777777777,
|
265 |
+
"acc_stderr,none": 0.04266763404099582
|
266 |
+
},
|
267 |
+
"harness|mmlu_astronomy|0": {
|
268 |
+
"alias": " - astronomy",
|
269 |
+
"acc,none": 0.631578947368421,
|
270 |
+
"acc_stderr,none": 0.039255233810529325
|
271 |
+
},
|
272 |
+
"harness|mmlu_college_biology|0": {
|
273 |
+
"alias": " - college_biology",
|
274 |
+
"acc,none": 0.6527777777777778,
|
275 |
+
"acc_stderr,none": 0.039812405437178615
|
276 |
+
},
|
277 |
+
"harness|mmlu_college_chemistry|0": {
|
278 |
+
"alias": " - college_chemistry",
|
279 |
+
"acc,none": 0.42,
|
280 |
+
"acc_stderr,none": 0.049604496374885836
|
281 |
+
},
|
282 |
+
"harness|mmlu_college_computer_science|0": {
|
283 |
+
"alias": " - college_computer_science",
|
284 |
+
"acc,none": 0.58,
|
285 |
+
"acc_stderr,none": 0.049604496374885836
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_mathematics|0": {
|
288 |
+
"alias": " - college_mathematics",
|
289 |
+
"acc,none": 0.33,
|
290 |
+
"acc_stderr,none": 0.04725815626252603
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_physics|0": {
|
293 |
+
"alias": " - college_physics",
|
294 |
+
"acc,none": 0.4019607843137255,
|
295 |
+
"acc_stderr,none": 0.04878608714466997
|
296 |
+
},
|
297 |
+
"harness|mmlu_computer_security|0": {
|
298 |
+
"alias": " - computer_security",
|
299 |
+
"acc,none": 0.74,
|
300 |
+
"acc_stderr,none": 0.0440844002276808
|
301 |
+
},
|
302 |
+
"harness|mmlu_conceptual_physics|0": {
|
303 |
+
"alias": " - conceptual_physics",
|
304 |
+
"acc,none": 0.5361702127659574,
|
305 |
+
"acc_stderr,none": 0.03260038511835771
|
306 |
+
},
|
307 |
+
"harness|mmlu_electrical_engineering|0": {
|
308 |
+
"alias": " - electrical_engineering",
|
309 |
+
"acc,none": 0.5310344827586206,
|
310 |
+
"acc_stderr,none": 0.04158632762097828
|
311 |
+
},
|
312 |
+
"harness|mmlu_elementary_mathematics|0": {
|
313 |
+
"alias": " - elementary_mathematics",
|
314 |
+
"acc,none": 0.4603174603174603,
|
315 |
+
"acc_stderr,none": 0.025670080636909193
|
316 |
+
},
|
317 |
+
"harness|mmlu_high_school_biology|0": {
|
318 |
+
"alias": " - high_school_biology",
|
319 |
+
"acc,none": 0.7193548387096774,
|
320 |
+
"acc_stderr,none": 0.0255606047210229
|
321 |
+
},
|
322 |
+
"harness|mmlu_high_school_chemistry|0": {
|
323 |
+
"alias": " - high_school_chemistry",
|
324 |
+
"acc,none": 0.5320197044334976,
|
325 |
+
"acc_stderr,none": 0.03510766597959217
|
326 |
+
},
|
327 |
+
"harness|mmlu_high_school_computer_science|0": {
|
328 |
+
"alias": " - high_school_computer_science",
|
329 |
+
"acc,none": 0.69,
|
330 |
+
"acc_stderr,none": 0.04648231987117316
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_mathematics|0": {
|
333 |
+
"alias": " - high_school_mathematics",
|
334 |
+
"acc,none": 0.34444444444444444,
|
335 |
+
"acc_stderr,none": 0.02897264888484427
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_physics|0": {
|
338 |
+
"alias": " - high_school_physics",
|
339 |
+
"acc,none": 0.37748344370860926,
|
340 |
+
"acc_stderr,none": 0.03958027231121569
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_statistics|0": {
|
343 |
+
"alias": " - high_school_statistics",
|
344 |
+
"acc,none": 0.5324074074074074,
|
345 |
+
"acc_stderr,none": 0.03402801581358966
|
346 |
+
},
|
347 |
+
"harness|mmlu_machine_learning|0": {
|
348 |
+
"alias": " - machine_learning",
|
349 |
+
"acc,none": 0.38392857142857145,
|
350 |
+
"acc_stderr,none": 0.04616143075028546
|
351 |
+
},
|
352 |
+
"harness|winogrande|0": {
|
353 |
+
"acc,none": 0.6432517758484609,
|
354 |
+
"acc_stderr,none": 0.01346339395802872,
|
355 |
+
"alias": "winogrande"
|
356 |
+
},
|
357 |
+
"harness|truthfulqa:mc2|0": {
|
358 |
+
"acc,none": 0.541346036842384,
|
359 |
+
"acc_stderr,none": 0.01584861213215754,
|
360 |
+
"alias": "truthfulqa_mc2"
|
361 |
+
},
|
362 |
+
"harness|arc:challenge|0": {
|
363 |
+
"acc,none": 0.41638225255972694,
|
364 |
+
"acc_stderr,none": 0.01440561827943617,
|
365 |
+
"acc_norm,none": 0.439419795221843,
|
366 |
+
"acc_norm_stderr,none": 0.014503747823580123,
|
367 |
+
"alias": "arc_challenge"
|
368 |
+
},
|
369 |
+
"harness|boolq|0": {
|
370 |
+
"acc,none": 0.8403669724770643,
|
371 |
+
"acc_stderr,none": 0.006406021659710515,
|
372 |
+
"alias": "boolq"
|
373 |
+
},
|
374 |
+
"harness|lambada:openai|0": {
|
375 |
+
"perplexity,none": 6.125395895964018,
|
376 |
+
"perplexity_stderr,none": 0.21476426826907466,
|
377 |
+
"acc,none": 0.5940228992819717,
|
378 |
+
"acc_stderr,none": 0.006841706431619957,
|
379 |
+
"alias": "lambada_openai"
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc1|0": {
|
382 |
+
"acc,none": 0.3806609547123623,
|
383 |
+
"acc_stderr,none": 0.016997627871907915,
|
384 |
+
"alias": "truthfulqa_mc1"
|
385 |
+
},
|
386 |
+
"harness|hellaswag|0": {
|
387 |
+
"acc,none": 0.578868751244772,
|
388 |
+
"acc_stderr,none": 0.004927314729433566,
|
389 |
+
"acc_norm,none": 0.7615016928898626,
|
390 |
+
"acc_norm_stderr,none": 0.0042529434040930484,
|
391 |
+
"alias": "hellaswag"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Qwen/Qwen1.5-7B-Chat-GPTQ-Int4",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 8.456,
|
399 |
+
"architectures": "Qwen2ForCausalLM",
|
400 |
+
"quant_type": "GPTQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 16.912,
|
403 |
+
"model_size": 8.456,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-27T08:09:37Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"batch_size": 1,
|
417 |
+
"bits": 4,
|
418 |
+
"block_name_to_quantize": null,
|
419 |
+
"cache_block_outputs": true,
|
420 |
+
"damp_percent": 0.01,
|
421 |
+
"dataset": null,
|
422 |
+
"desc_act": false,
|
423 |
+
"exllama_config": {
|
424 |
+
"version": 1
|
425 |
+
},
|
426 |
+
"group_size": 128,
|
427 |
+
"max_input_length": null,
|
428 |
+
"model_seqlen": null,
|
429 |
+
"module_name_preceding_first_block": null,
|
430 |
+
"modules_in_block_to_quantize": null,
|
431 |
+
"pad_token_id": null,
|
432 |
+
"quant_method": "gptq",
|
433 |
+
"sym": true,
|
434 |
+
"tokenizer": null,
|
435 |
+
"true_sequential": true,
|
436 |
+
"use_cuda_fp16": false,
|
437 |
+
"use_exllama": true
|
438 |
+
},
|
439 |
+
"versions": {
|
440 |
+
"harness|piqa|0": 1.0,
|
441 |
+
"harness|openbookqa|0": 1.0,
|
442 |
+
"harness|arc:easy|0": 1.0,
|
443 |
+
"harness|mmlu|0": null,
|
444 |
+
"harness|mmlu_humanities|0": null,
|
445 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
446 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
447 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
448 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
449 |
+
"harness|mmlu_international_law|0": 0.0,
|
450 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
451 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
452 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
453 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
454 |
+
"harness|mmlu_philosophy|0": 0.0,
|
455 |
+
"harness|mmlu_prehistory|0": 0.0,
|
456 |
+
"harness|mmlu_professional_law|0": 0.0,
|
457 |
+
"harness|mmlu_world_religions|0": 0.0,
|
458 |
+
"harness|mmlu_other|0": null,
|
459 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
460 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
461 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
462 |
+
"harness|mmlu_global_facts|0": 0.0,
|
463 |
+
"harness|mmlu_human_aging|0": 0.0,
|
464 |
+
"harness|mmlu_management|0": 0.0,
|
465 |
+
"harness|mmlu_marketing|0": 0.0,
|
466 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
467 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
468 |
+
"harness|mmlu_nutrition|0": 0.0,
|
469 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
470 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
471 |
+
"harness|mmlu_virology|0": 0.0,
|
472 |
+
"harness|mmlu_social_sciences|0": null,
|
473 |
+
"harness|mmlu_econometrics|0": 0.0,
|
474 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
475 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
476 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
477 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
479 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
480 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
481 |
+
"harness|mmlu_public_relations|0": 0.0,
|
482 |
+
"harness|mmlu_security_studies|0": 0.0,
|
483 |
+
"harness|mmlu_sociology|0": 0.0,
|
484 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
485 |
+
"harness|mmlu_stem|0": null,
|
486 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
487 |
+
"harness|mmlu_anatomy|0": 0.0,
|
488 |
+
"harness|mmlu_astronomy|0": 0.0,
|
489 |
+
"harness|mmlu_college_biology|0": 0.0,
|
490 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
491 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
492 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
493 |
+
"harness|mmlu_college_physics|0": 0.0,
|
494 |
+
"harness|mmlu_computer_security|0": 0.0,
|
495 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
496 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
497 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
498 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
499 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
500 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
501 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
502 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
503 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
504 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
505 |
+
"harness|winogrande|0": 1.0,
|
506 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
507 |
+
"harness|arc:challenge|0": 1.0,
|
508 |
+
"harness|boolq|0": 2.0,
|
509 |
+
"harness|lambada:openai|0": 1.0,
|
510 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
511 |
+
"harness|hellaswag|0": 1.0
|
512 |
+
},
|
513 |
+
"n-shot": {
|
514 |
+
"arc_challenge": 0,
|
515 |
+
"arc_easy": 0,
|
516 |
+
"boolq": 0,
|
517 |
+
"hellaswag": 0,
|
518 |
+
"lambada_openai": 0,
|
519 |
+
"mmlu": 0,
|
520 |
+
"mmlu_abstract_algebra": 0,
|
521 |
+
"mmlu_anatomy": 0,
|
522 |
+
"mmlu_astronomy": 0,
|
523 |
+
"mmlu_business_ethics": 0,
|
524 |
+
"mmlu_clinical_knowledge": 0,
|
525 |
+
"mmlu_college_biology": 0,
|
526 |
+
"mmlu_college_chemistry": 0,
|
527 |
+
"mmlu_college_computer_science": 0,
|
528 |
+
"mmlu_college_mathematics": 0,
|
529 |
+
"mmlu_college_medicine": 0,
|
530 |
+
"mmlu_college_physics": 0,
|
531 |
+
"mmlu_computer_security": 0,
|
532 |
+
"mmlu_conceptual_physics": 0,
|
533 |
+
"mmlu_econometrics": 0,
|
534 |
+
"mmlu_electrical_engineering": 0,
|
535 |
+
"mmlu_elementary_mathematics": 0,
|
536 |
+
"mmlu_formal_logic": 0,
|
537 |
+
"mmlu_global_facts": 0,
|
538 |
+
"mmlu_high_school_biology": 0,
|
539 |
+
"mmlu_high_school_chemistry": 0,
|
540 |
+
"mmlu_high_school_computer_science": 0,
|
541 |
+
"mmlu_high_school_european_history": 0,
|
542 |
+
"mmlu_high_school_geography": 0,
|
543 |
+
"mmlu_high_school_government_and_politics": 0,
|
544 |
+
"mmlu_high_school_macroeconomics": 0,
|
545 |
+
"mmlu_high_school_mathematics": 0,
|
546 |
+
"mmlu_high_school_microeconomics": 0,
|
547 |
+
"mmlu_high_school_physics": 0,
|
548 |
+
"mmlu_high_school_psychology": 0,
|
549 |
+
"mmlu_high_school_statistics": 0,
|
550 |
+
"mmlu_high_school_us_history": 0,
|
551 |
+
"mmlu_high_school_world_history": 0,
|
552 |
+
"mmlu_human_aging": 0,
|
553 |
+
"mmlu_human_sexuality": 0,
|
554 |
+
"mmlu_humanities": 0,
|
555 |
+
"mmlu_international_law": 0,
|
556 |
+
"mmlu_jurisprudence": 0,
|
557 |
+
"mmlu_logical_fallacies": 0,
|
558 |
+
"mmlu_machine_learning": 0,
|
559 |
+
"mmlu_management": 0,
|
560 |
+
"mmlu_marketing": 0,
|
561 |
+
"mmlu_medical_genetics": 0,
|
562 |
+
"mmlu_miscellaneous": 0,
|
563 |
+
"mmlu_moral_disputes": 0,
|
564 |
+
"mmlu_moral_scenarios": 0,
|
565 |
+
"mmlu_nutrition": 0,
|
566 |
+
"mmlu_other": 0,
|
567 |
+
"mmlu_philosophy": 0,
|
568 |
+
"mmlu_prehistory": 0,
|
569 |
+
"mmlu_professional_accounting": 0,
|
570 |
+
"mmlu_professional_law": 0,
|
571 |
+
"mmlu_professional_medicine": 0,
|
572 |
+
"mmlu_professional_psychology": 0,
|
573 |
+
"mmlu_public_relations": 0,
|
574 |
+
"mmlu_security_studies": 0,
|
575 |
+
"mmlu_social_sciences": 0,
|
576 |
+
"mmlu_sociology": 0,
|
577 |
+
"mmlu_stem": 0,
|
578 |
+
"mmlu_us_foreign_policy": 0,
|
579 |
+
"mmlu_virology": 0,
|
580 |
+
"mmlu_world_religions": 0,
|
581 |
+
"openbookqa": 0,
|
582 |
+
"piqa": 0,
|
583 |
+
"truthfulqa_mc1": 0,
|
584 |
+
"truthfulqa_mc2": 0,
|
585 |
+
"winogrande": 0
|
586 |
+
},
|
587 |
+
"date": 1714275925.6022973,
|
588 |
+
"config": {
|
589 |
+
"model": "hf",
|
590 |
+
"model_args": "pretrained=Qwen/Qwen1.5-7B-Chat-GPTQ-Int4,dtype=float16,_commit_hash=main",
|
591 |
+
"batch_size": 2,
|
592 |
+
"batch_sizes": [],
|
593 |
+
"device": "cuda",
|
594 |
+
"use_cache": null,
|
595 |
+
"limit": null,
|
596 |
+
"bootstrap_iters": 100000,
|
597 |
+
"gen_kwargs": null
|
598 |
+
}
|
599 |
+
}
|
Qwen/results_2024-05-05-22-20-44.json
ADDED
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "no",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-05-22-20-44",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Qwen/Qwen1.5-0.5B-Chat-GGUF",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 0.389045248,
|
16 |
+
"model_params": 0.619570176,
|
17 |
+
"quant_type": "llama.cpp",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|hellaswag|0": {
|
22 |
+
"acc,none": 0.3569010157339175,
|
23 |
+
"acc_stderr,none": 0.00478106139087391,
|
24 |
+
"acc_norm,none": 0.4396534554869548,
|
25 |
+
"acc_norm_stderr,none": 0.004953305461311763,
|
26 |
+
"alias": "hellaswag"
|
27 |
+
},
|
28 |
+
"harness|truthfulqa:mc1|0": {
|
29 |
+
"acc,none": 0.23011015911872704,
|
30 |
+
"acc_stderr,none": 0.014734557959807762,
|
31 |
+
"alias": "truthfulqa_mc1"
|
32 |
+
},
|
33 |
+
"harness|lambada:openai|0": {
|
34 |
+
"perplexity,none": 34.802753466476304,
|
35 |
+
"perplexity_stderr,none": 1.7695091328108965,
|
36 |
+
"acc,none": 0.26508829807878903,
|
37 |
+
"acc_stderr,none": 0.006149289402158153,
|
38 |
+
"alias": "lambada_openai"
|
39 |
+
},
|
40 |
+
"harness|boolq|0": {
|
41 |
+
"acc,none": 0.3785932721712538,
|
42 |
+
"acc_stderr,none": 0.008483341718024479,
|
43 |
+
"alias": "boolq"
|
44 |
+
},
|
45 |
+
"harness|winogrande|0": {
|
46 |
+
"acc,none": 0.5351223362273086,
|
47 |
+
"acc_stderr,none": 0.014017773120881587,
|
48 |
+
"alias": "winogrande"
|
49 |
+
},
|
50 |
+
"harness|arc:easy|0": {
|
51 |
+
"acc,none": 0.37415824915824913,
|
52 |
+
"acc_stderr,none": 0.009929516948977625,
|
53 |
+
"acc_norm,none": 0.37247474747474746,
|
54 |
+
"acc_norm_stderr,none": 0.009920469215736012,
|
55 |
+
"alias": "arc_easy"
|
56 |
+
},
|
57 |
+
"harness|openbookqa|0": {
|
58 |
+
"acc,none": 0.176,
|
59 |
+
"acc_stderr,none": 0.01704785202062227,
|
60 |
+
"acc_norm,none": 0.27,
|
61 |
+
"acc_norm_stderr,none": 0.019874354831287487,
|
62 |
+
"alias": "openbookqa"
|
63 |
+
},
|
64 |
+
"harness|truthfulqa:mc2|0": {
|
65 |
+
"acc,none": 0.40641904896162573,
|
66 |
+
"acc_stderr,none": 0.014888528969774828,
|
67 |
+
"alias": "truthfulqa_mc2"
|
68 |
+
},
|
69 |
+
"harness|mmlu|0": {
|
70 |
+
"acc,none": 0.2296681384418174,
|
71 |
+
"acc_stderr,none": 0.00354375915129246,
|
72 |
+
"alias": "mmlu"
|
73 |
+
},
|
74 |
+
"harness|mmlu_humanities|0": {
|
75 |
+
"alias": " - humanities",
|
76 |
+
"acc,none": 0.24250797024442083,
|
77 |
+
"acc_stderr,none": 0.0062462231153999565
|
78 |
+
},
|
79 |
+
"harness|mmlu_formal_logic|0": {
|
80 |
+
"alias": " - formal_logic",
|
81 |
+
"acc,none": 0.2857142857142857,
|
82 |
+
"acc_stderr,none": 0.04040610178208841
|
83 |
+
},
|
84 |
+
"harness|mmlu_high_school_european_history|0": {
|
85 |
+
"alias": " - high_school_european_history",
|
86 |
+
"acc,none": 0.21818181818181817,
|
87 |
+
"acc_stderr,none": 0.03225078108306289
|
88 |
+
},
|
89 |
+
"harness|mmlu_high_school_us_history|0": {
|
90 |
+
"alias": " - high_school_us_history",
|
91 |
+
"acc,none": 0.25,
|
92 |
+
"acc_stderr,none": 0.03039153369274154
|
93 |
+
},
|
94 |
+
"harness|mmlu_high_school_world_history|0": {
|
95 |
+
"alias": " - high_school_world_history",
|
96 |
+
"acc,none": 0.270042194092827,
|
97 |
+
"acc_stderr,none": 0.028900721906293426
|
98 |
+
},
|
99 |
+
"harness|mmlu_international_law|0": {
|
100 |
+
"alias": " - international_law",
|
101 |
+
"acc,none": 0.2396694214876033,
|
102 |
+
"acc_stderr,none": 0.03896878985070417
|
103 |
+
},
|
104 |
+
"harness|mmlu_jurisprudence|0": {
|
105 |
+
"alias": " - jurisprudence",
|
106 |
+
"acc,none": 0.26851851851851855,
|
107 |
+
"acc_stderr,none": 0.04284467968052192
|
108 |
+
},
|
109 |
+
"harness|mmlu_logical_fallacies|0": {
|
110 |
+
"alias": " - logical_fallacies",
|
111 |
+
"acc,none": 0.22085889570552147,
|
112 |
+
"acc_stderr,none": 0.032591773927421776
|
113 |
+
},
|
114 |
+
"harness|mmlu_moral_disputes|0": {
|
115 |
+
"alias": " - moral_disputes",
|
116 |
+
"acc,none": 0.24855491329479767,
|
117 |
+
"acc_stderr,none": 0.023267528432100174
|
118 |
+
},
|
119 |
+
"harness|mmlu_moral_scenarios|0": {
|
120 |
+
"alias": " - moral_scenarios",
|
121 |
+
"acc,none": 0.23798882681564246,
|
122 |
+
"acc_stderr,none": 0.014242630070574885
|
123 |
+
},
|
124 |
+
"harness|mmlu_philosophy|0": {
|
125 |
+
"alias": " - philosophy",
|
126 |
+
"acc,none": 0.1864951768488746,
|
127 |
+
"acc_stderr,none": 0.02212243977248077
|
128 |
+
},
|
129 |
+
"harness|mmlu_prehistory|0": {
|
130 |
+
"alias": " - prehistory",
|
131 |
+
"acc,none": 0.21604938271604937,
|
132 |
+
"acc_stderr,none": 0.022899162918445813
|
133 |
+
},
|
134 |
+
"harness|mmlu_professional_law|0": {
|
135 |
+
"alias": " - professional_law",
|
136 |
+
"acc,none": 0.24641460234680573,
|
137 |
+
"acc_stderr,none": 0.011005971399927244
|
138 |
+
},
|
139 |
+
"harness|mmlu_world_religions|0": {
|
140 |
+
"alias": " - world_religions",
|
141 |
+
"acc,none": 0.3216374269005848,
|
142 |
+
"acc_stderr,none": 0.03582529442573122
|
143 |
+
},
|
144 |
+
"harness|mmlu_other|0": {
|
145 |
+
"alias": " - other",
|
146 |
+
"acc,none": 0.23978113936272932,
|
147 |
+
"acc_stderr,none": 0.00764225029165751
|
148 |
+
},
|
149 |
+
"harness|mmlu_business_ethics|0": {
|
150 |
+
"alias": " - business_ethics",
|
151 |
+
"acc,none": 0.3,
|
152 |
+
"acc_stderr,none": 0.046056618647183814
|
153 |
+
},
|
154 |
+
"harness|mmlu_clinical_knowledge|0": {
|
155 |
+
"alias": " - clinical_knowledge",
|
156 |
+
"acc,none": 0.21509433962264152,
|
157 |
+
"acc_stderr,none": 0.025288394502891377
|
158 |
+
},
|
159 |
+
"harness|mmlu_college_medicine|0": {
|
160 |
+
"alias": " - college_medicine",
|
161 |
+
"acc,none": 0.20809248554913296,
|
162 |
+
"acc_stderr,none": 0.030952890217749884
|
163 |
+
},
|
164 |
+
"harness|mmlu_global_facts|0": {
|
165 |
+
"alias": " - global_facts",
|
166 |
+
"acc,none": 0.18,
|
167 |
+
"acc_stderr,none": 0.038612291966536955
|
168 |
+
},
|
169 |
+
"harness|mmlu_human_aging|0": {
|
170 |
+
"alias": " - human_aging",
|
171 |
+
"acc,none": 0.31390134529147984,
|
172 |
+
"acc_stderr,none": 0.03114679648297246
|
173 |
+
},
|
174 |
+
"harness|mmlu_management|0": {
|
175 |
+
"alias": " - management",
|
176 |
+
"acc,none": 0.17475728155339806,
|
177 |
+
"acc_stderr,none": 0.03760178006026621
|
178 |
+
},
|
179 |
+
"harness|mmlu_marketing|0": {
|
180 |
+
"alias": " - marketing",
|
181 |
+
"acc,none": 0.2905982905982906,
|
182 |
+
"acc_stderr,none": 0.029745048572674057
|
183 |
+
},
|
184 |
+
"harness|mmlu_medical_genetics|0": {
|
185 |
+
"alias": " - medical_genetics",
|
186 |
+
"acc,none": 0.3,
|
187 |
+
"acc_stderr,none": 0.046056618647183814
|
188 |
+
},
|
189 |
+
"harness|mmlu_miscellaneous|0": {
|
190 |
+
"alias": " - miscellaneous",
|
191 |
+
"acc,none": 0.23754789272030652,
|
192 |
+
"acc_stderr,none": 0.015218733046150195
|
193 |
+
},
|
194 |
+
"harness|mmlu_nutrition|0": {
|
195 |
+
"alias": " - nutrition",
|
196 |
+
"acc,none": 0.22549019607843138,
|
197 |
+
"acc_stderr,none": 0.023929155517351284
|
198 |
+
},
|
199 |
+
"harness|mmlu_professional_accounting|0": {
|
200 |
+
"alias": " - professional_accounting",
|
201 |
+
"acc,none": 0.23404255319148937,
|
202 |
+
"acc_stderr,none": 0.025257861359432407
|
203 |
+
},
|
204 |
+
"harness|mmlu_professional_medicine|0": {
|
205 |
+
"alias": " - professional_medicine",
|
206 |
+
"acc,none": 0.18382352941176472,
|
207 |
+
"acc_stderr,none": 0.02352924218519311
|
208 |
+
},
|
209 |
+
"harness|mmlu_virology|0": {
|
210 |
+
"alias": " - virology",
|
211 |
+
"acc,none": 0.28313253012048195,
|
212 |
+
"acc_stderr,none": 0.03507295431370518
|
213 |
+
},
|
214 |
+
"harness|mmlu_social_sciences|0": {
|
215 |
+
"alias": " - social_sciences",
|
216 |
+
"acc,none": 0.21741956451088723,
|
217 |
+
"acc_stderr,none": 0.007433074431341764
|
218 |
+
},
|
219 |
+
"harness|mmlu_econometrics|0": {
|
220 |
+
"alias": " - econometrics",
|
221 |
+
"acc,none": 0.23684210526315788,
|
222 |
+
"acc_stderr,none": 0.039994238792813386
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_geography|0": {
|
225 |
+
"alias": " - high_school_geography",
|
226 |
+
"acc,none": 0.17676767676767677,
|
227 |
+
"acc_stderr,none": 0.027178752639044915
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
230 |
+
"alias": " - high_school_government_and_politics",
|
231 |
+
"acc,none": 0.20207253886010362,
|
232 |
+
"acc_stderr,none": 0.02897908979429673
|
233 |
+
},
|
234 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
235 |
+
"alias": " - high_school_macroeconomics",
|
236 |
+
"acc,none": 0.20256410256410257,
|
237 |
+
"acc_stderr,none": 0.020377660970371397
|
238 |
+
},
|
239 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
240 |
+
"alias": " - high_school_microeconomics",
|
241 |
+
"acc,none": 0.21008403361344538,
|
242 |
+
"acc_stderr,none": 0.026461398717471874
|
243 |
+
},
|
244 |
+
"harness|mmlu_high_school_psychology|0": {
|
245 |
+
"alias": " - high_school_psychology",
|
246 |
+
"acc,none": 0.1926605504587156,
|
247 |
+
"acc_stderr,none": 0.016909276884936073
|
248 |
+
},
|
249 |
+
"harness|mmlu_human_sexuality|0": {
|
250 |
+
"alias": " - human_sexuality",
|
251 |
+
"acc,none": 0.2595419847328244,
|
252 |
+
"acc_stderr,none": 0.03844876139785271
|
253 |
+
},
|
254 |
+
"harness|mmlu_professional_psychology|0": {
|
255 |
+
"alias": " - professional_psychology",
|
256 |
+
"acc,none": 0.25,
|
257 |
+
"acc_stderr,none": 0.01751781884501444
|
258 |
+
},
|
259 |
+
"harness|mmlu_public_relations|0": {
|
260 |
+
"alias": " - public_relations",
|
261 |
+
"acc,none": 0.21818181818181817,
|
262 |
+
"acc_stderr,none": 0.03955932861795833
|
263 |
+
},
|
264 |
+
"harness|mmlu_security_studies|0": {
|
265 |
+
"alias": " - security_studies",
|
266 |
+
"acc,none": 0.18775510204081633,
|
267 |
+
"acc_stderr,none": 0.02500025603954622
|
268 |
+
},
|
269 |
+
"harness|mmlu_sociology|0": {
|
270 |
+
"alias": " - sociology",
|
271 |
+
"acc,none": 0.24378109452736318,
|
272 |
+
"acc_stderr,none": 0.030360490154014652
|
273 |
+
},
|
274 |
+
"harness|mmlu_us_foreign_policy|0": {
|
275 |
+
"alias": " - us_foreign_policy",
|
276 |
+
"acc,none": 0.28,
|
277 |
+
"acc_stderr,none": 0.045126085985421276
|
278 |
+
},
|
279 |
+
"harness|mmlu_stem|0": {
|
280 |
+
"alias": " - stem",
|
281 |
+
"acc,none": 0.21249603552172533,
|
282 |
+
"acc_stderr,none": 0.007271218700485502
|
283 |
+
},
|
284 |
+
"harness|mmlu_abstract_algebra|0": {
|
285 |
+
"alias": " - abstract_algebra",
|
286 |
+
"acc,none": 0.22,
|
287 |
+
"acc_stderr,none": 0.04163331998932269
|
288 |
+
},
|
289 |
+
"harness|mmlu_anatomy|0": {
|
290 |
+
"alias": " - anatomy",
|
291 |
+
"acc,none": 0.18518518518518517,
|
292 |
+
"acc_stderr,none": 0.03355677216313142
|
293 |
+
},
|
294 |
+
"harness|mmlu_astronomy|0": {
|
295 |
+
"alias": " - astronomy",
|
296 |
+
"acc,none": 0.17763157894736842,
|
297 |
+
"acc_stderr,none": 0.031103182383123398
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_biology|0": {
|
300 |
+
"alias": " - college_biology",
|
301 |
+
"acc,none": 0.2569444444444444,
|
302 |
+
"acc_stderr,none": 0.03653946969442099
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_chemistry|0": {
|
305 |
+
"alias": " - college_chemistry",
|
306 |
+
"acc,none": 0.2,
|
307 |
+
"acc_stderr,none": 0.040201512610368445
|
308 |
+
},
|
309 |
+
"harness|mmlu_college_computer_science|0": {
|
310 |
+
"alias": " - college_computer_science",
|
311 |
+
"acc,none": 0.26,
|
312 |
+
"acc_stderr,none": 0.044084400227680794
|
313 |
+
},
|
314 |
+
"harness|mmlu_college_mathematics|0": {
|
315 |
+
"alias": " - college_mathematics",
|
316 |
+
"acc,none": 0.21,
|
317 |
+
"acc_stderr,none": 0.040936018074033256
|
318 |
+
},
|
319 |
+
"harness|mmlu_college_physics|0": {
|
320 |
+
"alias": " - college_physics",
|
321 |
+
"acc,none": 0.21568627450980393,
|
322 |
+
"acc_stderr,none": 0.040925639582376556
|
323 |
+
},
|
324 |
+
"harness|mmlu_computer_security|0": {
|
325 |
+
"alias": " - computer_security",
|
326 |
+
"acc,none": 0.28,
|
327 |
+
"acc_stderr,none": 0.045126085985421276
|
328 |
+
},
|
329 |
+
"harness|mmlu_conceptual_physics|0": {
|
330 |
+
"alias": " - conceptual_physics",
|
331 |
+
"acc,none": 0.26382978723404255,
|
332 |
+
"acc_stderr,none": 0.02880998985410298
|
333 |
+
},
|
334 |
+
"harness|mmlu_electrical_engineering|0": {
|
335 |
+
"alias": " - electrical_engineering",
|
336 |
+
"acc,none": 0.2413793103448276,
|
337 |
+
"acc_stderr,none": 0.03565998174135302
|
338 |
+
},
|
339 |
+
"harness|mmlu_elementary_mathematics|0": {
|
340 |
+
"alias": " - elementary_mathematics",
|
341 |
+
"acc,none": 0.20899470899470898,
|
342 |
+
"acc_stderr,none": 0.020940481565334835
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_biology|0": {
|
345 |
+
"alias": " - high_school_biology",
|
346 |
+
"acc,none": 0.1774193548387097,
|
347 |
+
"acc_stderr,none": 0.021732540689329265
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_chemistry|0": {
|
350 |
+
"alias": " - high_school_chemistry",
|
351 |
+
"acc,none": 0.15270935960591134,
|
352 |
+
"acc_stderr,none": 0.025308904539380624
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_computer_science|0": {
|
355 |
+
"alias": " - high_school_computer_science",
|
356 |
+
"acc,none": 0.25,
|
357 |
+
"acc_stderr,none": 0.04351941398892446
|
358 |
+
},
|
359 |
+
"harness|mmlu_high_school_mathematics|0": {
|
360 |
+
"alias": " - high_school_mathematics",
|
361 |
+
"acc,none": 0.2111111111111111,
|
362 |
+
"acc_stderr,none": 0.02488211685765508
|
363 |
+
},
|
364 |
+
"harness|mmlu_high_school_physics|0": {
|
365 |
+
"alias": " - high_school_physics",
|
366 |
+
"acc,none": 0.1986754966887417,
|
367 |
+
"acc_stderr,none": 0.032578473844367746
|
368 |
+
},
|
369 |
+
"harness|mmlu_high_school_statistics|0": {
|
370 |
+
"alias": " - high_school_statistics",
|
371 |
+
"acc,none": 0.1527777777777778,
|
372 |
+
"acc_stderr,none": 0.02453632602613422
|
373 |
+
},
|
374 |
+
"harness|mmlu_machine_learning|0": {
|
375 |
+
"alias": " - machine_learning",
|
376 |
+
"acc,none": 0.3125,
|
377 |
+
"acc_stderr,none": 0.043994650575715215
|
378 |
+
},
|
379 |
+
"harness|arc:challenge|0": {
|
380 |
+
"acc,none": 0.23464163822525597,
|
381 |
+
"acc_stderr,none": 0.012383873560768664,
|
382 |
+
"acc_norm,none": 0.2738907849829352,
|
383 |
+
"acc_norm_stderr,none": 0.013032004972989503,
|
384 |
+
"alias": "arc_challenge"
|
385 |
+
},
|
386 |
+
"harness|piqa|0": {
|
387 |
+
"acc,none": 0.6675734494015234,
|
388 |
+
"acc_stderr,none": 0.01099114155744559,
|
389 |
+
"acc_norm,none": 0.6610446137105549,
|
390 |
+
"acc_norm_stderr,none": 0.011044144419710638,
|
391 |
+
"alias": "piqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Qwen/Qwen1.5-0.5B-Chat-GGUF",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 2.0,
|
399 |
+
"architectures": "?",
|
400 |
+
"quant_type": "llama.cpp",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 4.0,
|
403 |
+
"model_size": 2.0,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-01T16:23:22Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "llama_cpp"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "llama.cpp",
|
417 |
+
"ftype": "*q4_0.gguf"
|
418 |
+
},
|
419 |
+
"versions": {
|
420 |
+
"harness|hellaswag|0": 1.0,
|
421 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
422 |
+
"harness|lambada:openai|0": 1.0,
|
423 |
+
"harness|boolq|0": 2.0,
|
424 |
+
"harness|winogrande|0": 1.0,
|
425 |
+
"harness|arc:easy|0": 1.0,
|
426 |
+
"harness|openbookqa|0": 1.0,
|
427 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
428 |
+
"harness|mmlu|0": null,
|
429 |
+
"harness|mmlu_humanities|0": null,
|
430 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
432 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
433 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
434 |
+
"harness|mmlu_international_law|0": 0.0,
|
435 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
436 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
437 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
438 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
439 |
+
"harness|mmlu_philosophy|0": 0.0,
|
440 |
+
"harness|mmlu_prehistory|0": 0.0,
|
441 |
+
"harness|mmlu_professional_law|0": 0.0,
|
442 |
+
"harness|mmlu_world_religions|0": 0.0,
|
443 |
+
"harness|mmlu_other|0": null,
|
444 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
445 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
446 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
447 |
+
"harness|mmlu_global_facts|0": 0.0,
|
448 |
+
"harness|mmlu_human_aging|0": 0.0,
|
449 |
+
"harness|mmlu_management|0": 0.0,
|
450 |
+
"harness|mmlu_marketing|0": 0.0,
|
451 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
452 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
453 |
+
"harness|mmlu_nutrition|0": 0.0,
|
454 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
455 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
456 |
+
"harness|mmlu_virology|0": 0.0,
|
457 |
+
"harness|mmlu_social_sciences|0": null,
|
458 |
+
"harness|mmlu_econometrics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
464 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
465 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
466 |
+
"harness|mmlu_public_relations|0": 0.0,
|
467 |
+
"harness|mmlu_security_studies|0": 0.0,
|
468 |
+
"harness|mmlu_sociology|0": 0.0,
|
469 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
470 |
+
"harness|mmlu_stem|0": null,
|
471 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
472 |
+
"harness|mmlu_anatomy|0": 0.0,
|
473 |
+
"harness|mmlu_astronomy|0": 0.0,
|
474 |
+
"harness|mmlu_college_biology|0": 0.0,
|
475 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
476 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
477 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
478 |
+
"harness|mmlu_college_physics|0": 0.0,
|
479 |
+
"harness|mmlu_computer_security|0": 0.0,
|
480 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
481 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
482 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
489 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
490 |
+
"harness|arc:challenge|0": 1.0,
|
491 |
+
"harness|piqa|0": 1.0
|
492 |
+
},
|
493 |
+
"n-shot": {
|
494 |
+
"arc_challenge": 0,
|
495 |
+
"arc_easy": 0,
|
496 |
+
"boolq": 0,
|
497 |
+
"hellaswag": 0,
|
498 |
+
"lambada_openai": 0,
|
499 |
+
"mmlu": 0,
|
500 |
+
"mmlu_abstract_algebra": 0,
|
501 |
+
"mmlu_anatomy": 0,
|
502 |
+
"mmlu_astronomy": 0,
|
503 |
+
"mmlu_business_ethics": 0,
|
504 |
+
"mmlu_clinical_knowledge": 0,
|
505 |
+
"mmlu_college_biology": 0,
|
506 |
+
"mmlu_college_chemistry": 0,
|
507 |
+
"mmlu_college_computer_science": 0,
|
508 |
+
"mmlu_college_mathematics": 0,
|
509 |
+
"mmlu_college_medicine": 0,
|
510 |
+
"mmlu_college_physics": 0,
|
511 |
+
"mmlu_computer_security": 0,
|
512 |
+
"mmlu_conceptual_physics": 0,
|
513 |
+
"mmlu_econometrics": 0,
|
514 |
+
"mmlu_electrical_engineering": 0,
|
515 |
+
"mmlu_elementary_mathematics": 0,
|
516 |
+
"mmlu_formal_logic": 0,
|
517 |
+
"mmlu_global_facts": 0,
|
518 |
+
"mmlu_high_school_biology": 0,
|
519 |
+
"mmlu_high_school_chemistry": 0,
|
520 |
+
"mmlu_high_school_computer_science": 0,
|
521 |
+
"mmlu_high_school_european_history": 0,
|
522 |
+
"mmlu_high_school_geography": 0,
|
523 |
+
"mmlu_high_school_government_and_politics": 0,
|
524 |
+
"mmlu_high_school_macroeconomics": 0,
|
525 |
+
"mmlu_high_school_mathematics": 0,
|
526 |
+
"mmlu_high_school_microeconomics": 0,
|
527 |
+
"mmlu_high_school_physics": 0,
|
528 |
+
"mmlu_high_school_psychology": 0,
|
529 |
+
"mmlu_high_school_statistics": 0,
|
530 |
+
"mmlu_high_school_us_history": 0,
|
531 |
+
"mmlu_high_school_world_history": 0,
|
532 |
+
"mmlu_human_aging": 0,
|
533 |
+
"mmlu_human_sexuality": 0,
|
534 |
+
"mmlu_humanities": 0,
|
535 |
+
"mmlu_international_law": 0,
|
536 |
+
"mmlu_jurisprudence": 0,
|
537 |
+
"mmlu_logical_fallacies": 0,
|
538 |
+
"mmlu_machine_learning": 0,
|
539 |
+
"mmlu_management": 0,
|
540 |
+
"mmlu_marketing": 0,
|
541 |
+
"mmlu_medical_genetics": 0,
|
542 |
+
"mmlu_miscellaneous": 0,
|
543 |
+
"mmlu_moral_disputes": 0,
|
544 |
+
"mmlu_moral_scenarios": 0,
|
545 |
+
"mmlu_nutrition": 0,
|
546 |
+
"mmlu_other": 0,
|
547 |
+
"mmlu_philosophy": 0,
|
548 |
+
"mmlu_prehistory": 0,
|
549 |
+
"mmlu_professional_accounting": 0,
|
550 |
+
"mmlu_professional_law": 0,
|
551 |
+
"mmlu_professional_medicine": 0,
|
552 |
+
"mmlu_professional_psychology": 0,
|
553 |
+
"mmlu_public_relations": 0,
|
554 |
+
"mmlu_security_studies": 0,
|
555 |
+
"mmlu_social_sciences": 0,
|
556 |
+
"mmlu_sociology": 0,
|
557 |
+
"mmlu_stem": 0,
|
558 |
+
"mmlu_us_foreign_policy": 0,
|
559 |
+
"mmlu_virology": 0,
|
560 |
+
"mmlu_world_religions": 0,
|
561 |
+
"openbookqa": 0,
|
562 |
+
"piqa": 0,
|
563 |
+
"truthfulqa_mc1": 0,
|
564 |
+
"truthfulqa_mc2": 0,
|
565 |
+
"winogrande": 0
|
566 |
+
},
|
567 |
+
"date": 1714746426.7282674,
|
568 |
+
"config": {
|
569 |
+
"model": "WrapperGGUFLM",
|
570 |
+
"model_args": "gguf_model=Qwen/Qwen1.5-0.5B-Chat-GGUF,ftype=*q4_0.gguf,dtype=float16,_commit_hash=main",
|
571 |
+
"batch_size": 1,
|
572 |
+
"batch_sizes": [],
|
573 |
+
"device": "cuda",
|
574 |
+
"use_cache": null,
|
575 |
+
"limit": null,
|
576 |
+
"bootstrap_iters": 100000,
|
577 |
+
"gen_kwargs": null
|
578 |
+
}
|
579 |
+
}
|
Qwen/results_2024-05-07-09-41-53.json
ADDED
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "no",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-07-09-41-53",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "Qwen/Qwen1.5-7B-Chat-GGUF",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 4.505956352,
|
16 |
+
"model_params": 7.721324544,
|
17 |
+
"quant_type": "llama.cpp",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|openbookqa|0": {
|
22 |
+
"acc,none": 0.3,
|
23 |
+
"acc_stderr,none": 0.020514426225628043,
|
24 |
+
"acc_norm,none": 0.396,
|
25 |
+
"acc_norm_stderr,none": 0.021893529941665817,
|
26 |
+
"alias": "openbookqa"
|
27 |
+
},
|
28 |
+
"harness|piqa|0": {
|
29 |
+
"acc,none": 0.7399347116430903,
|
30 |
+
"acc_stderr,none": 0.01023489324906131,
|
31 |
+
"acc_norm,none": 0.7393906420021763,
|
32 |
+
"acc_norm_stderr,none": 0.010241826155811618,
|
33 |
+
"alias": "piqa"
|
34 |
+
},
|
35 |
+
"harness|truthfulqa:mc2|0": {
|
36 |
+
"acc,none": 0.5445532573203143,
|
37 |
+
"acc_stderr,none": 0.01620516610689256,
|
38 |
+
"alias": "truthfulqa_mc2"
|
39 |
+
},
|
40 |
+
"harness|winogrande|0": {
|
41 |
+
"acc,none": 0.6393054459352802,
|
42 |
+
"acc_stderr,none": 0.01349606439423403,
|
43 |
+
"alias": "winogrande"
|
44 |
+
},
|
45 |
+
"harness|truthfulqa:mc1|0": {
|
46 |
+
"acc,none": 0.3537331701346389,
|
47 |
+
"acc_stderr,none": 0.016737814358846147,
|
48 |
+
"alias": "truthfulqa_mc1"
|
49 |
+
},
|
50 |
+
"harness|arc:challenge|0": {
|
51 |
+
"acc,none": 0.4104095563139932,
|
52 |
+
"acc_stderr,none": 0.014374922192642662,
|
53 |
+
"acc_norm,none": 0.4232081911262799,
|
54 |
+
"acc_norm_stderr,none": 0.014438036220848025,
|
55 |
+
"alias": "arc_challenge"
|
56 |
+
},
|
57 |
+
"harness|hellaswag|0": {
|
58 |
+
"acc,none": 0.5438159729137622,
|
59 |
+
"acc_stderr,none": 0.004970585328297622,
|
60 |
+
"acc_norm,none": 0.6963752240589524,
|
61 |
+
"acc_norm_stderr,none": 0.00458882795877513,
|
62 |
+
"alias": "hellaswag"
|
63 |
+
},
|
64 |
+
"harness|arc:easy|0": {
|
65 |
+
"acc,none": 0.6136363636363636,
|
66 |
+
"acc_stderr,none": 0.009991296778159622,
|
67 |
+
"acc_norm,none": 0.6056397306397306,
|
68 |
+
"acc_norm_stderr,none": 0.010028176038393002,
|
69 |
+
"alias": "arc_easy"
|
70 |
+
},
|
71 |
+
"harness|boolq|0": {
|
72 |
+
"acc,none": 0.7981651376146789,
|
73 |
+
"acc_stderr,none": 0.00701999832474464,
|
74 |
+
"alias": "boolq"
|
75 |
+
},
|
76 |
+
"harness|lambada:openai|0": {
|
77 |
+
"perplexity,none": 8.629176527841318,
|
78 |
+
"perplexity_stderr,none": 0.42869315340212266,
|
79 |
+
"acc,none": 0.4244129633223365,
|
80 |
+
"acc_stderr,none": 0.006885918770006381,
|
81 |
+
"alias": "lambada_openai"
|
82 |
+
},
|
83 |
+
"harness|mmlu|0": {
|
84 |
+
"acc,none": 0.5183734510753454,
|
85 |
+
"acc_stderr,none": 0.00400817537653083,
|
86 |
+
"alias": "mmlu"
|
87 |
+
},
|
88 |
+
"harness|mmlu_humanities|0": {
|
89 |
+
"alias": " - humanities",
|
90 |
+
"acc,none": 0.4359192348565356,
|
91 |
+
"acc_stderr,none": 0.006934954327448632
|
92 |
+
},
|
93 |
+
"harness|mmlu_formal_logic|0": {
|
94 |
+
"alias": " - formal_logic",
|
95 |
+
"acc,none": 0.3968253968253968,
|
96 |
+
"acc_stderr,none": 0.0437588849272706
|
97 |
+
},
|
98 |
+
"harness|mmlu_high_school_european_history|0": {
|
99 |
+
"alias": " - high_school_european_history",
|
100 |
+
"acc,none": 0.3333333333333333,
|
101 |
+
"acc_stderr,none": 0.03681050869161549
|
102 |
+
},
|
103 |
+
"harness|mmlu_high_school_us_history|0": {
|
104 |
+
"alias": " - high_school_us_history",
|
105 |
+
"acc,none": 0.37254901960784315,
|
106 |
+
"acc_stderr,none": 0.03393388584958406
|
107 |
+
},
|
108 |
+
"harness|mmlu_high_school_world_history|0": {
|
109 |
+
"alias": " - high_school_world_history",
|
110 |
+
"acc,none": 0.2742616033755274,
|
111 |
+
"acc_stderr,none": 0.029041333510598046
|
112 |
+
},
|
113 |
+
"harness|mmlu_international_law|0": {
|
114 |
+
"alias": " - international_law",
|
115 |
+
"acc,none": 0.6528925619834711,
|
116 |
+
"acc_stderr,none": 0.043457245702925335
|
117 |
+
},
|
118 |
+
"harness|mmlu_jurisprudence|0": {
|
119 |
+
"alias": " - jurisprudence",
|
120 |
+
"acc,none": 0.7777777777777778,
|
121 |
+
"acc_stderr,none": 0.040191074725573483
|
122 |
+
},
|
123 |
+
"harness|mmlu_logical_fallacies|0": {
|
124 |
+
"alias": " - logical_fallacies",
|
125 |
+
"acc,none": 0.6134969325153374,
|
126 |
+
"acc_stderr,none": 0.03825825548848607
|
127 |
+
},
|
128 |
+
"harness|mmlu_moral_disputes|0": {
|
129 |
+
"alias": " - moral_disputes",
|
130 |
+
"acc,none": 0.5086705202312138,
|
131 |
+
"acc_stderr,none": 0.02691504735536981
|
132 |
+
},
|
133 |
+
"harness|mmlu_moral_scenarios|0": {
|
134 |
+
"alias": " - moral_scenarios",
|
135 |
+
"acc,none": 0.3754189944134078,
|
136 |
+
"acc_stderr,none": 0.01619510424846353
|
137 |
+
},
|
138 |
+
"harness|mmlu_philosophy|0": {
|
139 |
+
"alias": " - philosophy",
|
140 |
+
"acc,none": 0.639871382636656,
|
141 |
+
"acc_stderr,none": 0.027264297599804015
|
142 |
+
},
|
143 |
+
"harness|mmlu_prehistory|0": {
|
144 |
+
"alias": " - prehistory",
|
145 |
+
"acc,none": 0.6234567901234568,
|
146 |
+
"acc_stderr,none": 0.026959344518747787
|
147 |
+
},
|
148 |
+
"harness|mmlu_professional_law|0": {
|
149 |
+
"alias": " - professional_law",
|
150 |
+
"acc,none": 0.32790091264667537,
|
151 |
+
"acc_stderr,none": 0.01198993664066653
|
152 |
+
},
|
153 |
+
"harness|mmlu_world_religions|0": {
|
154 |
+
"alias": " - world_religions",
|
155 |
+
"acc,none": 0.7368421052631579,
|
156 |
+
"acc_stderr,none": 0.03377310252209205
|
157 |
+
},
|
158 |
+
"harness|mmlu_other|0": {
|
159 |
+
"alias": " - other",
|
160 |
+
"acc,none": 0.5616350177019633,
|
161 |
+
"acc_stderr,none": 0.008371099697022879
|
162 |
+
},
|
163 |
+
"harness|mmlu_business_ethics|0": {
|
164 |
+
"alias": " - business_ethics",
|
165 |
+
"acc,none": 0.61,
|
166 |
+
"acc_stderr,none": 0.04902071300001974
|
167 |
+
},
|
168 |
+
"harness|mmlu_clinical_knowledge|0": {
|
169 |
+
"alias": " - clinical_knowledge",
|
170 |
+
"acc,none": 0.5962264150943396,
|
171 |
+
"acc_stderr,none": 0.03019761160019795
|
172 |
+
},
|
173 |
+
"harness|mmlu_college_medicine|0": {
|
174 |
+
"alias": " - college_medicine",
|
175 |
+
"acc,none": 0.4624277456647399,
|
176 |
+
"acc_stderr,none": 0.0380168510452446
|
177 |
+
},
|
178 |
+
"harness|mmlu_global_facts|0": {
|
179 |
+
"alias": " - global_facts",
|
180 |
+
"acc,none": 0.37,
|
181 |
+
"acc_stderr,none": 0.04852365870939099
|
182 |
+
},
|
183 |
+
"harness|mmlu_human_aging|0": {
|
184 |
+
"alias": " - human_aging",
|
185 |
+
"acc,none": 0.5336322869955157,
|
186 |
+
"acc_stderr,none": 0.033481800170603065
|
187 |
+
},
|
188 |
+
"harness|mmlu_management|0": {
|
189 |
+
"alias": " - management",
|
190 |
+
"acc,none": 0.7669902912621359,
|
191 |
+
"acc_stderr,none": 0.04185832598928315
|
192 |
+
},
|
193 |
+
"harness|mmlu_marketing|0": {
|
194 |
+
"alias": " - marketing",
|
195 |
+
"acc,none": 0.8205128205128205,
|
196 |
+
"acc_stderr,none": 0.02514093595033544
|
197 |
+
},
|
198 |
+
"harness|mmlu_medical_genetics|0": {
|
199 |
+
"alias": " - medical_genetics",
|
200 |
+
"acc,none": 0.54,
|
201 |
+
"acc_stderr,none": 0.05009082659620332
|
202 |
+
},
|
203 |
+
"harness|mmlu_miscellaneous|0": {
|
204 |
+
"alias": " - miscellaneous",
|
205 |
+
"acc,none": 0.6998722860791826,
|
206 |
+
"acc_stderr,none": 0.016389249691317425
|
207 |
+
},
|
208 |
+
"harness|mmlu_nutrition|0": {
|
209 |
+
"alias": " - nutrition",
|
210 |
+
"acc,none": 0.5915032679738562,
|
211 |
+
"acc_stderr,none": 0.028146405993096358
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_accounting|0": {
|
214 |
+
"alias": " - professional_accounting",
|
215 |
+
"acc,none": 0.39361702127659576,
|
216 |
+
"acc_stderr,none": 0.029144544781596154
|
217 |
+
},
|
218 |
+
"harness|mmlu_professional_medicine|0": {
|
219 |
+
"alias": " - professional_medicine",
|
220 |
+
"acc,none": 0.17647058823529413,
|
221 |
+
"acc_stderr,none": 0.023157468308559366
|
222 |
+
},
|
223 |
+
"harness|mmlu_virology|0": {
|
224 |
+
"alias": " - virology",
|
225 |
+
"acc,none": 0.463855421686747,
|
226 |
+
"acc_stderr,none": 0.03882310850890594
|
227 |
+
},
|
228 |
+
"harness|mmlu_social_sciences|0": {
|
229 |
+
"alias": " - social_sciences",
|
230 |
+
"acc,none": 0.6301592460188495,
|
231 |
+
"acc_stderr,none": 0.008418371310872776
|
232 |
+
},
|
233 |
+
"harness|mmlu_econometrics|0": {
|
234 |
+
"alias": " - econometrics",
|
235 |
+
"acc,none": 0.34210526315789475,
|
236 |
+
"acc_stderr,none": 0.04462917535336936
|
237 |
+
},
|
238 |
+
"harness|mmlu_high_school_geography|0": {
|
239 |
+
"alias": " - high_school_geography",
|
240 |
+
"acc,none": 0.7727272727272727,
|
241 |
+
"acc_stderr,none": 0.029857515673386414
|
242 |
+
},
|
243 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
244 |
+
"alias": " - high_school_government_and_politics",
|
245 |
+
"acc,none": 0.7823834196891192,
|
246 |
+
"acc_stderr,none": 0.02977866303775295
|
247 |
+
},
|
248 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
249 |
+
"alias": " - high_school_macroeconomics",
|
250 |
+
"acc,none": 0.5846153846153846,
|
251 |
+
"acc_stderr,none": 0.02498535492310233
|
252 |
+
},
|
253 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
254 |
+
"alias": " - high_school_microeconomics",
|
255 |
+
"acc,none": 0.6302521008403361,
|
256 |
+
"acc_stderr,none": 0.03135709599613591
|
257 |
+
},
|
258 |
+
"harness|mmlu_high_school_psychology|0": {
|
259 |
+
"alias": " - high_school_psychology",
|
260 |
+
"acc,none": 0.7761467889908257,
|
261 |
+
"acc_stderr,none": 0.017871217767790236
|
262 |
+
},
|
263 |
+
"harness|mmlu_human_sexuality|0": {
|
264 |
+
"alias": " - human_sexuality",
|
265 |
+
"acc,none": 0.6717557251908397,
|
266 |
+
"acc_stderr,none": 0.04118438565806299
|
267 |
+
},
|
268 |
+
"harness|mmlu_professional_psychology|0": {
|
269 |
+
"alias": " - professional_psychology",
|
270 |
+
"acc,none": 0.4820261437908497,
|
271 |
+
"acc_stderr,none": 0.020214761037872408
|
272 |
+
},
|
273 |
+
"harness|mmlu_public_relations|0": {
|
274 |
+
"alias": " - public_relations",
|
275 |
+
"acc,none": 0.5818181818181818,
|
276 |
+
"acc_stderr,none": 0.04724577405731572
|
277 |
+
},
|
278 |
+
"harness|mmlu_security_studies|0": {
|
279 |
+
"alias": " - security_studies",
|
280 |
+
"acc,none": 0.5428571428571428,
|
281 |
+
"acc_stderr,none": 0.031891418324213966
|
282 |
+
},
|
283 |
+
"harness|mmlu_sociology|0": {
|
284 |
+
"alias": " - sociology",
|
285 |
+
"acc,none": 0.6865671641791045,
|
286 |
+
"acc_stderr,none": 0.032801882053486435
|
287 |
+
},
|
288 |
+
"harness|mmlu_us_foreign_policy|0": {
|
289 |
+
"alias": " - us_foreign_policy",
|
290 |
+
"acc,none": 0.77,
|
291 |
+
"acc_stderr,none": 0.04229525846816505
|
292 |
+
},
|
293 |
+
"harness|mmlu_stem|0": {
|
294 |
+
"alias": " - stem",
|
295 |
+
"acc,none": 0.48969235648588644,
|
296 |
+
"acc_stderr,none": 0.008718408268467998
|
297 |
+
},
|
298 |
+
"harness|mmlu_abstract_algebra|0": {
|
299 |
+
"alias": " - abstract_algebra",
|
300 |
+
"acc,none": 0.42,
|
301 |
+
"acc_stderr,none": 0.04960449637488584
|
302 |
+
},
|
303 |
+
"harness|mmlu_anatomy|0": {
|
304 |
+
"alias": " - anatomy",
|
305 |
+
"acc,none": 0.4888888888888889,
|
306 |
+
"acc_stderr,none": 0.04318275491977976
|
307 |
+
},
|
308 |
+
"harness|mmlu_astronomy|0": {
|
309 |
+
"alias": " - astronomy",
|
310 |
+
"acc,none": 0.5855263157894737,
|
311 |
+
"acc_stderr,none": 0.04008973785779206
|
312 |
+
},
|
313 |
+
"harness|mmlu_college_biology|0": {
|
314 |
+
"alias": " - college_biology",
|
315 |
+
"acc,none": 0.5694444444444444,
|
316 |
+
"acc_stderr,none": 0.04140685639111503
|
317 |
+
},
|
318 |
+
"harness|mmlu_college_chemistry|0": {
|
319 |
+
"alias": " - college_chemistry",
|
320 |
+
"acc,none": 0.4,
|
321 |
+
"acc_stderr,none": 0.04923659639173309
|
322 |
+
},
|
323 |
+
"harness|mmlu_college_computer_science|0": {
|
324 |
+
"alias": " - college_computer_science",
|
325 |
+
"acc,none": 0.41,
|
326 |
+
"acc_stderr,none": 0.04943110704237101
|
327 |
+
},
|
328 |
+
"harness|mmlu_college_mathematics|0": {
|
329 |
+
"alias": " - college_mathematics",
|
330 |
+
"acc,none": 0.33,
|
331 |
+
"acc_stderr,none": 0.04725815626252604
|
332 |
+
},
|
333 |
+
"harness|mmlu_college_physics|0": {
|
334 |
+
"alias": " - college_physics",
|
335 |
+
"acc,none": 0.4019607843137255,
|
336 |
+
"acc_stderr,none": 0.04878608714466996
|
337 |
+
},
|
338 |
+
"harness|mmlu_computer_security|0": {
|
339 |
+
"alias": " - computer_security",
|
340 |
+
"acc,none": 0.73,
|
341 |
+
"acc_stderr,none": 0.044619604333847394
|
342 |
+
},
|
343 |
+
"harness|mmlu_conceptual_physics|0": {
|
344 |
+
"alias": " - conceptual_physics",
|
345 |
+
"acc,none": 0.5531914893617021,
|
346 |
+
"acc_stderr,none": 0.0325005368436584
|
347 |
+
},
|
348 |
+
"harness|mmlu_electrical_engineering|0": {
|
349 |
+
"alias": " - electrical_engineering",
|
350 |
+
"acc,none": 0.5310344827586206,
|
351 |
+
"acc_stderr,none": 0.04158632762097828
|
352 |
+
},
|
353 |
+
"harness|mmlu_elementary_mathematics|0": {
|
354 |
+
"alias": " - elementary_mathematics",
|
355 |
+
"acc,none": 0.4523809523809524,
|
356 |
+
"acc_stderr,none": 0.025634258115554958
|
357 |
+
},
|
358 |
+
"harness|mmlu_high_school_biology|0": {
|
359 |
+
"alias": " - high_school_biology",
|
360 |
+
"acc,none": 0.6709677419354839,
|
361 |
+
"acc_stderr,none": 0.026729499068349958
|
362 |
+
},
|
363 |
+
"harness|mmlu_high_school_chemistry|0": {
|
364 |
+
"alias": " - high_school_chemistry",
|
365 |
+
"acc,none": 0.5073891625615764,
|
366 |
+
"acc_stderr,none": 0.035176035403610105
|
367 |
+
},
|
368 |
+
"harness|mmlu_high_school_computer_science|0": {
|
369 |
+
"alias": " - high_school_computer_science",
|
370 |
+
"acc,none": 0.6,
|
371 |
+
"acc_stderr,none": 0.04923659639173309
|
372 |
+
},
|
373 |
+
"harness|mmlu_high_school_mathematics|0": {
|
374 |
+
"alias": " - high_school_mathematics",
|
375 |
+
"acc,none": 0.3333333333333333,
|
376 |
+
"acc_stderr,none": 0.028742040903948496
|
377 |
+
},
|
378 |
+
"harness|mmlu_high_school_physics|0": {
|
379 |
+
"alias": " - high_school_physics",
|
380 |
+
"acc,none": 0.3509933774834437,
|
381 |
+
"acc_stderr,none": 0.03896981964257375
|
382 |
+
},
|
383 |
+
"harness|mmlu_high_school_statistics|0": {
|
384 |
+
"alias": " - high_school_statistics",
|
385 |
+
"acc,none": 0.4351851851851852,
|
386 |
+
"acc_stderr,none": 0.03381200005643525
|
387 |
+
},
|
388 |
+
"harness|mmlu_machine_learning|0": {
|
389 |
+
"alias": " - machine_learning",
|
390 |
+
"acc,none": 0.45535714285714285,
|
391 |
+
"acc_stderr,none": 0.04726835553719099
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "Qwen/Qwen1.5-7B-Chat-GGUF",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 28.0,
|
399 |
+
"architectures": "?",
|
400 |
+
"quant_type": "llama.cpp",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 56.0,
|
403 |
+
"model_size": 28.0,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*q4_0.gguf",
|
407 |
+
"hardware": "cpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-01T16:23:43Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "llama_cpp"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "llama.cpp",
|
417 |
+
"ftype": "*q4_0.gguf"
|
418 |
+
},
|
419 |
+
"versions": {
|
420 |
+
"harness|openbookqa|0": 1.0,
|
421 |
+
"harness|piqa|0": 1.0,
|
422 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
423 |
+
"harness|winogrande|0": 1.0,
|
424 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
425 |
+
"harness|arc:challenge|0": 1.0,
|
426 |
+
"harness|hellaswag|0": 1.0,
|
427 |
+
"harness|arc:easy|0": 1.0,
|
428 |
+
"harness|boolq|0": 2.0,
|
429 |
+
"harness|lambada:openai|0": 1.0,
|
430 |
+
"harness|mmlu|0": null,
|
431 |
+
"harness|mmlu_humanities|0": null,
|
432 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
433 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
434 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
435 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
436 |
+
"harness|mmlu_international_law|0": 0.0,
|
437 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
438 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
439 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
440 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
441 |
+
"harness|mmlu_philosophy|0": 0.0,
|
442 |
+
"harness|mmlu_prehistory|0": 0.0,
|
443 |
+
"harness|mmlu_professional_law|0": 0.0,
|
444 |
+
"harness|mmlu_world_religions|0": 0.0,
|
445 |
+
"harness|mmlu_other|0": null,
|
446 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
447 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
448 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
449 |
+
"harness|mmlu_global_facts|0": 0.0,
|
450 |
+
"harness|mmlu_human_aging|0": 0.0,
|
451 |
+
"harness|mmlu_management|0": 0.0,
|
452 |
+
"harness|mmlu_marketing|0": 0.0,
|
453 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
454 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
455 |
+
"harness|mmlu_nutrition|0": 0.0,
|
456 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
457 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
458 |
+
"harness|mmlu_virology|0": 0.0,
|
459 |
+
"harness|mmlu_social_sciences|0": null,
|
460 |
+
"harness|mmlu_econometrics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
466 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
467 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
468 |
+
"harness|mmlu_public_relations|0": 0.0,
|
469 |
+
"harness|mmlu_security_studies|0": 0.0,
|
470 |
+
"harness|mmlu_sociology|0": 0.0,
|
471 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
472 |
+
"harness|mmlu_stem|0": null,
|
473 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
474 |
+
"harness|mmlu_anatomy|0": 0.0,
|
475 |
+
"harness|mmlu_astronomy|0": 0.0,
|
476 |
+
"harness|mmlu_college_biology|0": 0.0,
|
477 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
478 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
479 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
480 |
+
"harness|mmlu_college_physics|0": 0.0,
|
481 |
+
"harness|mmlu_computer_security|0": 0.0,
|
482 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
483 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
484 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
491 |
+
"harness|mmlu_machine_learning|0": 0.0
|
492 |
+
},
|
493 |
+
"n-shot": {
|
494 |
+
"arc_challenge": 0,
|
495 |
+
"arc_easy": 0,
|
496 |
+
"boolq": 0,
|
497 |
+
"hellaswag": 0,
|
498 |
+
"lambada_openai": 0,
|
499 |
+
"mmlu": 0,
|
500 |
+
"mmlu_abstract_algebra": 0,
|
501 |
+
"mmlu_anatomy": 0,
|
502 |
+
"mmlu_astronomy": 0,
|
503 |
+
"mmlu_business_ethics": 0,
|
504 |
+
"mmlu_clinical_knowledge": 0,
|
505 |
+
"mmlu_college_biology": 0,
|
506 |
+
"mmlu_college_chemistry": 0,
|
507 |
+
"mmlu_college_computer_science": 0,
|
508 |
+
"mmlu_college_mathematics": 0,
|
509 |
+
"mmlu_college_medicine": 0,
|
510 |
+
"mmlu_college_physics": 0,
|
511 |
+
"mmlu_computer_security": 0,
|
512 |
+
"mmlu_conceptual_physics": 0,
|
513 |
+
"mmlu_econometrics": 0,
|
514 |
+
"mmlu_electrical_engineering": 0,
|
515 |
+
"mmlu_elementary_mathematics": 0,
|
516 |
+
"mmlu_formal_logic": 0,
|
517 |
+
"mmlu_global_facts": 0,
|
518 |
+
"mmlu_high_school_biology": 0,
|
519 |
+
"mmlu_high_school_chemistry": 0,
|
520 |
+
"mmlu_high_school_computer_science": 0,
|
521 |
+
"mmlu_high_school_european_history": 0,
|
522 |
+
"mmlu_high_school_geography": 0,
|
523 |
+
"mmlu_high_school_government_and_politics": 0,
|
524 |
+
"mmlu_high_school_macroeconomics": 0,
|
525 |
+
"mmlu_high_school_mathematics": 0,
|
526 |
+
"mmlu_high_school_microeconomics": 0,
|
527 |
+
"mmlu_high_school_physics": 0,
|
528 |
+
"mmlu_high_school_psychology": 0,
|
529 |
+
"mmlu_high_school_statistics": 0,
|
530 |
+
"mmlu_high_school_us_history": 0,
|
531 |
+
"mmlu_high_school_world_history": 0,
|
532 |
+
"mmlu_human_aging": 0,
|
533 |
+
"mmlu_human_sexuality": 0,
|
534 |
+
"mmlu_humanities": 0,
|
535 |
+
"mmlu_international_law": 0,
|
536 |
+
"mmlu_jurisprudence": 0,
|
537 |
+
"mmlu_logical_fallacies": 0,
|
538 |
+
"mmlu_machine_learning": 0,
|
539 |
+
"mmlu_management": 0,
|
540 |
+
"mmlu_marketing": 0,
|
541 |
+
"mmlu_medical_genetics": 0,
|
542 |
+
"mmlu_miscellaneous": 0,
|
543 |
+
"mmlu_moral_disputes": 0,
|
544 |
+
"mmlu_moral_scenarios": 0,
|
545 |
+
"mmlu_nutrition": 0,
|
546 |
+
"mmlu_other": 0,
|
547 |
+
"mmlu_philosophy": 0,
|
548 |
+
"mmlu_prehistory": 0,
|
549 |
+
"mmlu_professional_accounting": 0,
|
550 |
+
"mmlu_professional_law": 0,
|
551 |
+
"mmlu_professional_medicine": 0,
|
552 |
+
"mmlu_professional_psychology": 0,
|
553 |
+
"mmlu_public_relations": 0,
|
554 |
+
"mmlu_security_studies": 0,
|
555 |
+
"mmlu_social_sciences": 0,
|
556 |
+
"mmlu_sociology": 0,
|
557 |
+
"mmlu_stem": 0,
|
558 |
+
"mmlu_us_foreign_policy": 0,
|
559 |
+
"mmlu_virology": 0,
|
560 |
+
"mmlu_world_religions": 0,
|
561 |
+
"openbookqa": 0,
|
562 |
+
"piqa": 0,
|
563 |
+
"truthfulqa_mc1": 0,
|
564 |
+
"truthfulqa_mc2": 0,
|
565 |
+
"winogrande": 0
|
566 |
+
},
|
567 |
+
"date": 1714965259.0617445,
|
568 |
+
"config": {
|
569 |
+
"model": "WrapperGGUFLM",
|
570 |
+
"model_args": "gguf_model=Qwen/Qwen1.5-7B-Chat-GGUF,ftype=*q4_0.gguf,dtype=float16,_commit_hash=main",
|
571 |
+
"batch_size": 1,
|
572 |
+
"batch_sizes": [],
|
573 |
+
"device": "cuda",
|
574 |
+
"use_cache": null,
|
575 |
+
"limit": null,
|
576 |
+
"bootstrap_iters": 100000,
|
577 |
+
"gen_kwargs": null
|
578 |
+
}
|
579 |
+
}
|
SanctumAI/results_2024-05-03-22-24-42.json
ADDED
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "no",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-03-22-24-42",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "SanctumAI/Phi-3-mini-4k-instruct-GGUF",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 2.175438336,
|
16 |
+
"model_params": 3.821079552,
|
17 |
+
"quant_type": "llama.cpp",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|openbookqa|0": {
|
22 |
+
"acc,none": 0.338,
|
23 |
+
"acc_stderr,none": 0.02117566569520941,
|
24 |
+
"acc_norm,none": 0.438,
|
25 |
+
"acc_norm_stderr,none": 0.022210326363977417,
|
26 |
+
"alias": "openbookqa"
|
27 |
+
},
|
28 |
+
"harness|hellaswag|0": {
|
29 |
+
"acc,none": 0.5976897032463653,
|
30 |
+
"acc_stderr,none": 0.004893617014975288,
|
31 |
+
"acc_norm,none": 0.7741485759808803,
|
32 |
+
"acc_norm_stderr,none": 0.004172872282984298,
|
33 |
+
"alias": "hellaswag"
|
34 |
+
},
|
35 |
+
"harness|arc:easy|0": {
|
36 |
+
"acc,none": 0.8047138047138047,
|
37 |
+
"acc_stderr,none": 0.008134384386937893,
|
38 |
+
"acc_norm,none": 0.7916666666666666,
|
39 |
+
"acc_norm_stderr,none": 0.008333333333333193,
|
40 |
+
"alias": "arc_easy"
|
41 |
+
},
|
42 |
+
"harness|arc:challenge|0": {
|
43 |
+
"acc,none": 0.5196245733788396,
|
44 |
+
"acc_stderr,none": 0.014600132075947089,
|
45 |
+
"acc_norm,none": 0.5503412969283277,
|
46 |
+
"acc_norm_stderr,none": 0.014537144444284736,
|
47 |
+
"alias": "arc_challenge"
|
48 |
+
},
|
49 |
+
"harness|boolq|0": {
|
50 |
+
"acc,none": 0.8636085626911315,
|
51 |
+
"acc_stderr,none": 0.0060026713013815024,
|
52 |
+
"alias": "boolq"
|
53 |
+
},
|
54 |
+
"harness|truthfulqa:mc1|0": {
|
55 |
+
"acc,none": 0.4186046511627907,
|
56 |
+
"acc_stderr,none": 0.01727001528447686,
|
57 |
+
"alias": "truthfulqa_mc1"
|
58 |
+
},
|
59 |
+
"harness|truthfulqa:mc2|0": {
|
60 |
+
"acc,none": 0.6075128202890306,
|
61 |
+
"acc_stderr,none": 0.01539728015231391,
|
62 |
+
"alias": "truthfulqa_mc2"
|
63 |
+
},
|
64 |
+
"harness|mmlu|0": {
|
65 |
+
"acc,none": 0.655177325167355,
|
66 |
+
"acc_stderr,none": 0.003811132023917817,
|
67 |
+
"alias": "mmlu"
|
68 |
+
},
|
69 |
+
"harness|mmlu_humanities|0": {
|
70 |
+
"alias": " - humanities",
|
71 |
+
"acc,none": 0.6125398512221042,
|
72 |
+
"acc_stderr,none": 0.006814771868222836
|
73 |
+
},
|
74 |
+
"harness|mmlu_formal_logic|0": {
|
75 |
+
"alias": " - formal_logic",
|
76 |
+
"acc,none": 0.5476190476190477,
|
77 |
+
"acc_stderr,none": 0.044518079590553275
|
78 |
+
},
|
79 |
+
"harness|mmlu_high_school_european_history|0": {
|
80 |
+
"alias": " - high_school_european_history",
|
81 |
+
"acc,none": 0.8,
|
82 |
+
"acc_stderr,none": 0.031234752377721175
|
83 |
+
},
|
84 |
+
"harness|mmlu_high_school_us_history|0": {
|
85 |
+
"alias": " - high_school_us_history",
|
86 |
+
"acc,none": 0.7794117647058824,
|
87 |
+
"acc_stderr,none": 0.02910225438967409
|
88 |
+
},
|
89 |
+
"harness|mmlu_high_school_world_history|0": {
|
90 |
+
"alias": " - high_school_world_history",
|
91 |
+
"acc,none": 0.8016877637130801,
|
92 |
+
"acc_stderr,none": 0.025955020841621105
|
93 |
+
},
|
94 |
+
"harness|mmlu_international_law|0": {
|
95 |
+
"alias": " - international_law",
|
96 |
+
"acc,none": 0.7768595041322314,
|
97 |
+
"acc_stderr,none": 0.03800754475228733
|
98 |
+
},
|
99 |
+
"harness|mmlu_jurisprudence|0": {
|
100 |
+
"alias": " - jurisprudence",
|
101 |
+
"acc,none": 0.7407407407407407,
|
102 |
+
"acc_stderr,none": 0.04236511258094633
|
103 |
+
},
|
104 |
+
"harness|mmlu_logical_fallacies|0": {
|
105 |
+
"alias": " - logical_fallacies",
|
106 |
+
"acc,none": 0.8159509202453987,
|
107 |
+
"acc_stderr,none": 0.030446777687971716
|
108 |
+
},
|
109 |
+
"harness|mmlu_moral_disputes|0": {
|
110 |
+
"alias": " - moral_disputes",
|
111 |
+
"acc,none": 0.7109826589595376,
|
112 |
+
"acc_stderr,none": 0.02440517393578323
|
113 |
+
},
|
114 |
+
"harness|mmlu_moral_scenarios|0": {
|
115 |
+
"alias": " - moral_scenarios",
|
116 |
+
"acc,none": 0.48044692737430167,
|
117 |
+
"acc_stderr,none": 0.016709709877661995
|
118 |
+
},
|
119 |
+
"harness|mmlu_philosophy|0": {
|
120 |
+
"alias": " - philosophy",
|
121 |
+
"acc,none": 0.6977491961414791,
|
122 |
+
"acc_stderr,none": 0.02608270069539966
|
123 |
+
},
|
124 |
+
"harness|mmlu_prehistory|0": {
|
125 |
+
"alias": " - prehistory",
|
126 |
+
"acc,none": 0.7623456790123457,
|
127 |
+
"acc_stderr,none": 0.023683591837008557
|
128 |
+
},
|
129 |
+
"harness|mmlu_professional_law|0": {
|
130 |
+
"alias": " - professional_law",
|
131 |
+
"acc,none": 0.4869621903520209,
|
132 |
+
"acc_stderr,none": 0.012765893883835332
|
133 |
+
},
|
134 |
+
"harness|mmlu_world_religions|0": {
|
135 |
+
"alias": " - world_religions",
|
136 |
+
"acc,none": 0.8070175438596491,
|
137 |
+
"acc_stderr,none": 0.030267457554898465
|
138 |
+
},
|
139 |
+
"harness|mmlu_other|0": {
|
140 |
+
"alias": " - other",
|
141 |
+
"acc,none": 0.7006758931445124,
|
142 |
+
"acc_stderr,none": 0.007945113982147629
|
143 |
+
},
|
144 |
+
"harness|mmlu_business_ethics|0": {
|
145 |
+
"alias": " - business_ethics",
|
146 |
+
"acc,none": 0.68,
|
147 |
+
"acc_stderr,none": 0.04688261722621504
|
148 |
+
},
|
149 |
+
"harness|mmlu_clinical_knowledge|0": {
|
150 |
+
"alias": " - clinical_knowledge",
|
151 |
+
"acc,none": 0.7320754716981132,
|
152 |
+
"acc_stderr,none": 0.027257260322494845
|
153 |
+
},
|
154 |
+
"harness|mmlu_college_medicine|0": {
|
155 |
+
"alias": " - college_medicine",
|
156 |
+
"acc,none": 0.653179190751445,
|
157 |
+
"acc_stderr,none": 0.036291466701596636
|
158 |
+
},
|
159 |
+
"harness|mmlu_global_facts|0": {
|
160 |
+
"alias": " - global_facts",
|
161 |
+
"acc,none": 0.36,
|
162 |
+
"acc_stderr,none": 0.04824181513244218
|
163 |
+
},
|
164 |
+
"harness|mmlu_human_aging|0": {
|
165 |
+
"alias": " - human_aging",
|
166 |
+
"acc,none": 0.6771300448430493,
|
167 |
+
"acc_stderr,none": 0.031381476375754995
|
168 |
+
},
|
169 |
+
"harness|mmlu_management|0": {
|
170 |
+
"alias": " - management",
|
171 |
+
"acc,none": 0.8446601941747572,
|
172 |
+
"acc_stderr,none": 0.035865947385739734
|
173 |
+
},
|
174 |
+
"harness|mmlu_marketing|0": {
|
175 |
+
"alias": " - marketing",
|
176 |
+
"acc,none": 0.8675213675213675,
|
177 |
+
"acc_stderr,none": 0.02220930907316561
|
178 |
+
},
|
179 |
+
"harness|mmlu_medical_genetics|0": {
|
180 |
+
"alias": " - medical_genetics",
|
181 |
+
"acc,none": 0.7,
|
182 |
+
"acc_stderr,none": 0.046056618647183814
|
183 |
+
},
|
184 |
+
"harness|mmlu_miscellaneous|0": {
|
185 |
+
"alias": " - miscellaneous",
|
186 |
+
"acc,none": 0.8058748403575989,
|
187 |
+
"acc_stderr,none": 0.014143970276657574
|
188 |
+
},
|
189 |
+
"harness|mmlu_nutrition|0": {
|
190 |
+
"alias": " - nutrition",
|
191 |
+
"acc,none": 0.696078431372549,
|
192 |
+
"acc_stderr,none": 0.026336613469046637
|
193 |
+
},
|
194 |
+
"harness|mmlu_professional_accounting|0": {
|
195 |
+
"alias": " - professional_accounting",
|
196 |
+
"acc,none": 0.5354609929078015,
|
197 |
+
"acc_stderr,none": 0.029752389657427047
|
198 |
+
},
|
199 |
+
"harness|mmlu_professional_medicine|0": {
|
200 |
+
"alias": " - professional_medicine",
|
201 |
+
"acc,none": 0.6507352941176471,
|
202 |
+
"acc_stderr,none": 0.02895975519682486
|
203 |
+
},
|
204 |
+
"harness|mmlu_virology|0": {
|
205 |
+
"alias": " - virology",
|
206 |
+
"acc,none": 0.5,
|
207 |
+
"acc_stderr,none": 0.03892494720807614
|
208 |
+
},
|
209 |
+
"harness|mmlu_social_sciences|0": {
|
210 |
+
"alias": " - social_sciences",
|
211 |
+
"acc,none": 0.769580760480988,
|
212 |
+
"acc_stderr,none": 0.00743221355809227
|
213 |
+
},
|
214 |
+
"harness|mmlu_econometrics|0": {
|
215 |
+
"alias": " - econometrics",
|
216 |
+
"acc,none": 0.543859649122807,
|
217 |
+
"acc_stderr,none": 0.046854730419077895
|
218 |
+
},
|
219 |
+
"harness|mmlu_high_school_geography|0": {
|
220 |
+
"alias": " - high_school_geography",
|
221 |
+
"acc,none": 0.8080808080808081,
|
222 |
+
"acc_stderr,none": 0.028057791672989017
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
225 |
+
"alias": " - high_school_government_and_politics",
|
226 |
+
"acc,none": 0.8860103626943006,
|
227 |
+
"acc_stderr,none": 0.022935144053919432
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
230 |
+
"alias": " - high_school_macroeconomics",
|
231 |
+
"acc,none": 0.7076923076923077,
|
232 |
+
"acc_stderr,none": 0.02306043838085774
|
233 |
+
},
|
234 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
235 |
+
"alias": " - high_school_microeconomics",
|
236 |
+
"acc,none": 0.819327731092437,
|
237 |
+
"acc_stderr,none": 0.024991964966600756
|
238 |
+
},
|
239 |
+
"harness|mmlu_high_school_psychology|0": {
|
240 |
+
"alias": " - high_school_psychology",
|
241 |
+
"acc,none": 0.8660550458715597,
|
242 |
+
"acc_stderr,none": 0.014602811435592635
|
243 |
+
},
|
244 |
+
"harness|mmlu_human_sexuality|0": {
|
245 |
+
"alias": " - human_sexuality",
|
246 |
+
"acc,none": 0.7022900763358778,
|
247 |
+
"acc_stderr,none": 0.04010358942462203
|
248 |
+
},
|
249 |
+
"harness|mmlu_professional_psychology|0": {
|
250 |
+
"alias": " - professional_psychology",
|
251 |
+
"acc,none": 0.6944444444444444,
|
252 |
+
"acc_stderr,none": 0.018635594034423972
|
253 |
+
},
|
254 |
+
"harness|mmlu_public_relations|0": {
|
255 |
+
"alias": " - public_relations",
|
256 |
+
"acc,none": 0.6545454545454545,
|
257 |
+
"acc_stderr,none": 0.04554619617541054
|
258 |
+
},
|
259 |
+
"harness|mmlu_security_studies|0": {
|
260 |
+
"alias": " - security_studies",
|
261 |
+
"acc,none": 0.7387755102040816,
|
262 |
+
"acc_stderr,none": 0.028123429335142783
|
263 |
+
},
|
264 |
+
"harness|mmlu_sociology|0": {
|
265 |
+
"alias": " - sociology",
|
266 |
+
"acc,none": 0.8656716417910447,
|
267 |
+
"acc_stderr,none": 0.024112678240900826
|
268 |
+
},
|
269 |
+
"harness|mmlu_us_foreign_policy|0": {
|
270 |
+
"alias": " - us_foreign_policy",
|
271 |
+
"acc,none": 0.88,
|
272 |
+
"acc_stderr,none": 0.03265986323710906
|
273 |
+
},
|
274 |
+
"harness|mmlu_stem|0": {
|
275 |
+
"alias": " - stem",
|
276 |
+
"acc,none": 0.5623215984776403,
|
277 |
+
"acc_stderr,none": 0.008412272866798855
|
278 |
+
},
|
279 |
+
"harness|mmlu_abstract_algebra|0": {
|
280 |
+
"alias": " - abstract_algebra",
|
281 |
+
"acc,none": 0.36,
|
282 |
+
"acc_stderr,none": 0.04824181513244218
|
283 |
+
},
|
284 |
+
"harness|mmlu_anatomy|0": {
|
285 |
+
"alias": " - anatomy",
|
286 |
+
"acc,none": 0.6370370370370371,
|
287 |
+
"acc_stderr,none": 0.04153948404742398
|
288 |
+
},
|
289 |
+
"harness|mmlu_astronomy|0": {
|
290 |
+
"alias": " - astronomy",
|
291 |
+
"acc,none": 0.7368421052631579,
|
292 |
+
"acc_stderr,none": 0.03583496176361072
|
293 |
+
},
|
294 |
+
"harness|mmlu_college_biology|0": {
|
295 |
+
"alias": " - college_biology",
|
296 |
+
"acc,none": 0.8125,
|
297 |
+
"acc_stderr,none": 0.032639560491693344
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_chemistry|0": {
|
300 |
+
"alias": " - college_chemistry",
|
301 |
+
"acc,none": 0.45,
|
302 |
+
"acc_stderr,none": 0.049999999999999996
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_computer_science|0": {
|
305 |
+
"alias": " - college_computer_science",
|
306 |
+
"acc,none": 0.47,
|
307 |
+
"acc_stderr,none": 0.05016135580465919
|
308 |
+
},
|
309 |
+
"harness|mmlu_college_mathematics|0": {
|
310 |
+
"alias": " - college_mathematics",
|
311 |
+
"acc,none": 0.33,
|
312 |
+
"acc_stderr,none": 0.04725815626252604
|
313 |
+
},
|
314 |
+
"harness|mmlu_college_physics|0": {
|
315 |
+
"alias": " - college_physics",
|
316 |
+
"acc,none": 0.3431372549019608,
|
317 |
+
"acc_stderr,none": 0.04724007352383889
|
318 |
+
},
|
319 |
+
"harness|mmlu_computer_security|0": {
|
320 |
+
"alias": " - computer_security",
|
321 |
+
"acc,none": 0.74,
|
322 |
+
"acc_stderr,none": 0.04408440022768078
|
323 |
+
},
|
324 |
+
"harness|mmlu_conceptual_physics|0": {
|
325 |
+
"alias": " - conceptual_physics",
|
326 |
+
"acc,none": 0.6212765957446809,
|
327 |
+
"acc_stderr,none": 0.03170995606040655
|
328 |
+
},
|
329 |
+
"harness|mmlu_electrical_engineering|0": {
|
330 |
+
"alias": " - electrical_engineering",
|
331 |
+
"acc,none": 0.5724137931034483,
|
332 |
+
"acc_stderr,none": 0.04122737111370333
|
333 |
+
},
|
334 |
+
"harness|mmlu_elementary_mathematics|0": {
|
335 |
+
"alias": " - elementary_mathematics",
|
336 |
+
"acc,none": 0.4894179894179894,
|
337 |
+
"acc_stderr,none": 0.02574554227604549
|
338 |
+
},
|
339 |
+
"harness|mmlu_high_school_biology|0": {
|
340 |
+
"alias": " - high_school_biology",
|
341 |
+
"acc,none": 0.8290322580645161,
|
342 |
+
"acc_stderr,none": 0.021417242936321558
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_chemistry|0": {
|
345 |
+
"alias": " - high_school_chemistry",
|
346 |
+
"acc,none": 0.5615763546798029,
|
347 |
+
"acc_stderr,none": 0.03491207857486518
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_computer_science|0": {
|
350 |
+
"alias": " - high_school_computer_science",
|
351 |
+
"acc,none": 0.66,
|
352 |
+
"acc_stderr,none": 0.04760952285695237
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_mathematics|0": {
|
355 |
+
"alias": " - high_school_mathematics",
|
356 |
+
"acc,none": 0.337037037037037,
|
357 |
+
"acc_stderr,none": 0.028820884666253252
|
358 |
+
},
|
359 |
+
"harness|mmlu_high_school_physics|0": {
|
360 |
+
"alias": " - high_school_physics",
|
361 |
+
"acc,none": 0.4304635761589404,
|
362 |
+
"acc_stderr,none": 0.04042809961395634
|
363 |
+
},
|
364 |
+
"harness|mmlu_high_school_statistics|0": {
|
365 |
+
"alias": " - high_school_statistics",
|
366 |
+
"acc,none": 0.5787037037037037,
|
367 |
+
"acc_stderr,none": 0.03367462138896078
|
368 |
+
},
|
369 |
+
"harness|mmlu_machine_learning|0": {
|
370 |
+
"alias": " - machine_learning",
|
371 |
+
"acc,none": 0.5,
|
372 |
+
"acc_stderr,none": 0.04745789978762494
|
373 |
+
},
|
374 |
+
"harness|piqa|0": {
|
375 |
+
"acc,none": 0.779651795429815,
|
376 |
+
"acc_stderr,none": 0.009670535456853164,
|
377 |
+
"acc_norm,none": 0.7682263329706203,
|
378 |
+
"acc_norm_stderr,none": 0.009845143772794029,
|
379 |
+
"alias": "piqa"
|
380 |
+
},
|
381 |
+
"harness|lambada:openai|0": {
|
382 |
+
"perplexity,none": 5.970033653408986,
|
383 |
+
"perplexity_stderr,none": 0.17556862043175503,
|
384 |
+
"acc,none": 0.33242771201241994,
|
385 |
+
"acc_stderr,none": 0.006563112265118183,
|
386 |
+
"alias": "lambada_openai"
|
387 |
+
},
|
388 |
+
"harness|winogrande|0": {
|
389 |
+
"acc,none": 0.7016574585635359,
|
390 |
+
"acc_stderr,none": 0.01285888501003043,
|
391 |
+
"alias": "winogrande"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "SanctumAI/Phi-3-mini-4k-instruct-GGUF",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 0.0,
|
399 |
+
"architectures": "?",
|
400 |
+
"quant_type": "llama.cpp",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 0,
|
403 |
+
"model_size": 0.0,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-01T16:17:48Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "llama_cpp"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "llama.cpp",
|
417 |
+
"ftype": "*Q4_0.gguf"
|
418 |
+
},
|
419 |
+
"versions": {
|
420 |
+
"harness|openbookqa|0": 1.0,
|
421 |
+
"harness|hellaswag|0": 1.0,
|
422 |
+
"harness|arc:easy|0": 1.0,
|
423 |
+
"harness|arc:challenge|0": 1.0,
|
424 |
+
"harness|boolq|0": 2.0,
|
425 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
426 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
427 |
+
"harness|mmlu|0": null,
|
428 |
+
"harness|mmlu_humanities|0": null,
|
429 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
430 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
432 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
433 |
+
"harness|mmlu_international_law|0": 0.0,
|
434 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
435 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
436 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
437 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
438 |
+
"harness|mmlu_philosophy|0": 0.0,
|
439 |
+
"harness|mmlu_prehistory|0": 0.0,
|
440 |
+
"harness|mmlu_professional_law|0": 0.0,
|
441 |
+
"harness|mmlu_world_religions|0": 0.0,
|
442 |
+
"harness|mmlu_other|0": null,
|
443 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
444 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
445 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
446 |
+
"harness|mmlu_global_facts|0": 0.0,
|
447 |
+
"harness|mmlu_human_aging|0": 0.0,
|
448 |
+
"harness|mmlu_management|0": 0.0,
|
449 |
+
"harness|mmlu_marketing|0": 0.0,
|
450 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
451 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
452 |
+
"harness|mmlu_nutrition|0": 0.0,
|
453 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
454 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
455 |
+
"harness|mmlu_virology|0": 0.0,
|
456 |
+
"harness|mmlu_social_sciences|0": null,
|
457 |
+
"harness|mmlu_econometrics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
463 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
464 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
465 |
+
"harness|mmlu_public_relations|0": 0.0,
|
466 |
+
"harness|mmlu_security_studies|0": 0.0,
|
467 |
+
"harness|mmlu_sociology|0": 0.0,
|
468 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
469 |
+
"harness|mmlu_stem|0": null,
|
470 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
471 |
+
"harness|mmlu_anatomy|0": 0.0,
|
472 |
+
"harness|mmlu_astronomy|0": 0.0,
|
473 |
+
"harness|mmlu_college_biology|0": 0.0,
|
474 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
475 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
476 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
477 |
+
"harness|mmlu_college_physics|0": 0.0,
|
478 |
+
"harness|mmlu_computer_security|0": 0.0,
|
479 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
480 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
481 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
488 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
489 |
+
"harness|piqa|0": 1.0,
|
490 |
+
"harness|lambada:openai|0": 1.0,
|
491 |
+
"harness|winogrande|0": 1.0
|
492 |
+
},
|
493 |
+
"n-shot": {
|
494 |
+
"arc_challenge": 0,
|
495 |
+
"arc_easy": 0,
|
496 |
+
"boolq": 0,
|
497 |
+
"hellaswag": 0,
|
498 |
+
"lambada_openai": 0,
|
499 |
+
"mmlu": 0,
|
500 |
+
"mmlu_abstract_algebra": 0,
|
501 |
+
"mmlu_anatomy": 0,
|
502 |
+
"mmlu_astronomy": 0,
|
503 |
+
"mmlu_business_ethics": 0,
|
504 |
+
"mmlu_clinical_knowledge": 0,
|
505 |
+
"mmlu_college_biology": 0,
|
506 |
+
"mmlu_college_chemistry": 0,
|
507 |
+
"mmlu_college_computer_science": 0,
|
508 |
+
"mmlu_college_mathematics": 0,
|
509 |
+
"mmlu_college_medicine": 0,
|
510 |
+
"mmlu_college_physics": 0,
|
511 |
+
"mmlu_computer_security": 0,
|
512 |
+
"mmlu_conceptual_physics": 0,
|
513 |
+
"mmlu_econometrics": 0,
|
514 |
+
"mmlu_electrical_engineering": 0,
|
515 |
+
"mmlu_elementary_mathematics": 0,
|
516 |
+
"mmlu_formal_logic": 0,
|
517 |
+
"mmlu_global_facts": 0,
|
518 |
+
"mmlu_high_school_biology": 0,
|
519 |
+
"mmlu_high_school_chemistry": 0,
|
520 |
+
"mmlu_high_school_computer_science": 0,
|
521 |
+
"mmlu_high_school_european_history": 0,
|
522 |
+
"mmlu_high_school_geography": 0,
|
523 |
+
"mmlu_high_school_government_and_politics": 0,
|
524 |
+
"mmlu_high_school_macroeconomics": 0,
|
525 |
+
"mmlu_high_school_mathematics": 0,
|
526 |
+
"mmlu_high_school_microeconomics": 0,
|
527 |
+
"mmlu_high_school_physics": 0,
|
528 |
+
"mmlu_high_school_psychology": 0,
|
529 |
+
"mmlu_high_school_statistics": 0,
|
530 |
+
"mmlu_high_school_us_history": 0,
|
531 |
+
"mmlu_high_school_world_history": 0,
|
532 |
+
"mmlu_human_aging": 0,
|
533 |
+
"mmlu_human_sexuality": 0,
|
534 |
+
"mmlu_humanities": 0,
|
535 |
+
"mmlu_international_law": 0,
|
536 |
+
"mmlu_jurisprudence": 0,
|
537 |
+
"mmlu_logical_fallacies": 0,
|
538 |
+
"mmlu_machine_learning": 0,
|
539 |
+
"mmlu_management": 0,
|
540 |
+
"mmlu_marketing": 0,
|
541 |
+
"mmlu_medical_genetics": 0,
|
542 |
+
"mmlu_miscellaneous": 0,
|
543 |
+
"mmlu_moral_disputes": 0,
|
544 |
+
"mmlu_moral_scenarios": 0,
|
545 |
+
"mmlu_nutrition": 0,
|
546 |
+
"mmlu_other": 0,
|
547 |
+
"mmlu_philosophy": 0,
|
548 |
+
"mmlu_prehistory": 0,
|
549 |
+
"mmlu_professional_accounting": 0,
|
550 |
+
"mmlu_professional_law": 0,
|
551 |
+
"mmlu_professional_medicine": 0,
|
552 |
+
"mmlu_professional_psychology": 0,
|
553 |
+
"mmlu_public_relations": 0,
|
554 |
+
"mmlu_security_studies": 0,
|
555 |
+
"mmlu_social_sciences": 0,
|
556 |
+
"mmlu_sociology": 0,
|
557 |
+
"mmlu_stem": 0,
|
558 |
+
"mmlu_us_foreign_policy": 0,
|
559 |
+
"mmlu_virology": 0,
|
560 |
+
"mmlu_world_religions": 0,
|
561 |
+
"openbookqa": 0,
|
562 |
+
"piqa": 0,
|
563 |
+
"truthfulqa_mc1": 0,
|
564 |
+
"truthfulqa_mc2": 0,
|
565 |
+
"winogrande": 0
|
566 |
+
},
|
567 |
+
"date": 1714697373.5350165,
|
568 |
+
"config": {
|
569 |
+
"model": "WrapperGGUFLM",
|
570 |
+
"model_args": "gguf_model=SanctumAI/Phi-3-mini-4k-instruct-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main",
|
571 |
+
"batch_size": 1,
|
572 |
+
"batch_sizes": [],
|
573 |
+
"device": "cuda",
|
574 |
+
"use_cache": null,
|
575 |
+
"limit": null,
|
576 |
+
"bootstrap_iters": 100000,
|
577 |
+
"gen_kwargs": null
|
578 |
+
}
|
579 |
+
}
|
TechxGenus/results_2024-05-01-22-34-56.json
ADDED
@@ -0,0 +1,583 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-01-22-34-56",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TechxGenus/gemma-7b-AWQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 7.17,
|
16 |
+
"model_params": 7.81,
|
17 |
+
"quant_type": "AWQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc1|0": {
|
22 |
+
"acc,none": 0.2766217870257038,
|
23 |
+
"acc_stderr,none": 0.015659605755326916,
|
24 |
+
"alias": "truthfulqa_mc1"
|
25 |
+
},
|
26 |
+
"harness|piqa|0": {
|
27 |
+
"acc,none": 0.7976060935799782,
|
28 |
+
"acc_stderr,none": 0.00937428968280767,
|
29 |
+
"acc_norm,none": 0.809031556039173,
|
30 |
+
"acc_norm_stderr,none": 0.00917083663701189,
|
31 |
+
"alias": "piqa"
|
32 |
+
},
|
33 |
+
"harness|boolq|0": {
|
34 |
+
"acc,none": 0.8302752293577982,
|
35 |
+
"acc_stderr,none": 0.0065656268366337065,
|
36 |
+
"alias": "boolq"
|
37 |
+
},
|
38 |
+
"harness|openbookqa|0": {
|
39 |
+
"acc,none": 0.316,
|
40 |
+
"acc_stderr,none": 0.020812359515855857,
|
41 |
+
"acc_norm,none": 0.454,
|
42 |
+
"acc_norm_stderr,none": 0.022288147591176945,
|
43 |
+
"alias": "openbookqa"
|
44 |
+
},
|
45 |
+
"harness|truthfulqa:mc2|0": {
|
46 |
+
"acc,none": 0.40628500623097014,
|
47 |
+
"acc_stderr,none": 0.01403539875143181,
|
48 |
+
"alias": "truthfulqa_mc2"
|
49 |
+
},
|
50 |
+
"harness|winogrande|0": {
|
51 |
+
"acc,none": 0.7458563535911602,
|
52 |
+
"acc_stderr,none": 0.01223630721970827,
|
53 |
+
"alias": "winogrande"
|
54 |
+
},
|
55 |
+
"harness|arc:challenge|0": {
|
56 |
+
"acc,none": 0.4974402730375427,
|
57 |
+
"acc_stderr,none": 0.014611199329843774,
|
58 |
+
"acc_norm,none": 0.514505119453925,
|
59 |
+
"acc_norm_stderr,none": 0.01460524108137005,
|
60 |
+
"alias": "arc_challenge"
|
61 |
+
},
|
62 |
+
"harness|arc:easy|0": {
|
63 |
+
"acc,none": 0.8101851851851852,
|
64 |
+
"acc_stderr,none": 0.00804684052785223,
|
65 |
+
"acc_norm,none": 0.7992424242424242,
|
66 |
+
"acc_norm_stderr,none": 0.008219462692991503,
|
67 |
+
"alias": "arc_easy"
|
68 |
+
},
|
69 |
+
"harness|mmlu|0": {
|
70 |
+
"acc,none": 0.6053980914399658,
|
71 |
+
"acc_stderr,none": 0.0038873124960644378,
|
72 |
+
"alias": "mmlu"
|
73 |
+
},
|
74 |
+
"harness|mmlu_humanities|0": {
|
75 |
+
"alias": " - humanities",
|
76 |
+
"acc,none": 0.536663124335813,
|
77 |
+
"acc_stderr,none": 0.006716199085190922
|
78 |
+
},
|
79 |
+
"harness|mmlu_formal_logic|0": {
|
80 |
+
"alias": " - formal_logic",
|
81 |
+
"acc,none": 0.4126984126984127,
|
82 |
+
"acc_stderr,none": 0.04403438954768176
|
83 |
+
},
|
84 |
+
"harness|mmlu_high_school_european_history|0": {
|
85 |
+
"alias": " - high_school_european_history",
|
86 |
+
"acc,none": 0.7090909090909091,
|
87 |
+
"acc_stderr,none": 0.03546563019624337
|
88 |
+
},
|
89 |
+
"harness|mmlu_high_school_us_history|0": {
|
90 |
+
"alias": " - high_school_us_history",
|
91 |
+
"acc,none": 0.7794117647058824,
|
92 |
+
"acc_stderr,none": 0.029102254389674082
|
93 |
+
},
|
94 |
+
"harness|mmlu_high_school_world_history|0": {
|
95 |
+
"alias": " - high_school_world_history",
|
96 |
+
"acc,none": 0.7763713080168776,
|
97 |
+
"acc_stderr,none": 0.027123298205229966
|
98 |
+
},
|
99 |
+
"harness|mmlu_international_law|0": {
|
100 |
+
"alias": " - international_law",
|
101 |
+
"acc,none": 0.7603305785123967,
|
102 |
+
"acc_stderr,none": 0.03896878985070416
|
103 |
+
},
|
104 |
+
"harness|mmlu_jurisprudence|0": {
|
105 |
+
"alias": " - jurisprudence",
|
106 |
+
"acc,none": 0.6759259259259259,
|
107 |
+
"acc_stderr,none": 0.04524596007030048
|
108 |
+
},
|
109 |
+
"harness|mmlu_logical_fallacies|0": {
|
110 |
+
"alias": " - logical_fallacies",
|
111 |
+
"acc,none": 0.7423312883435583,
|
112 |
+
"acc_stderr,none": 0.03436150827846917
|
113 |
+
},
|
114 |
+
"harness|mmlu_moral_disputes|0": {
|
115 |
+
"alias": " - moral_disputes",
|
116 |
+
"acc,none": 0.6820809248554913,
|
117 |
+
"acc_stderr,none": 0.025070713719153186
|
118 |
+
},
|
119 |
+
"harness|mmlu_moral_scenarios|0": {
|
120 |
+
"alias": " - moral_scenarios",
|
121 |
+
"acc,none": 0.2424581005586592,
|
122 |
+
"acc_stderr,none": 0.014333522059217887
|
123 |
+
},
|
124 |
+
"harness|mmlu_philosophy|0": {
|
125 |
+
"alias": " - philosophy",
|
126 |
+
"acc,none": 0.6816720257234726,
|
127 |
+
"acc_stderr,none": 0.026457225067811032
|
128 |
+
},
|
129 |
+
"harness|mmlu_prehistory|0": {
|
130 |
+
"alias": " - prehistory",
|
131 |
+
"acc,none": 0.7098765432098766,
|
132 |
+
"acc_stderr,none": 0.025251173936495033
|
133 |
+
},
|
134 |
+
"harness|mmlu_professional_law|0": {
|
135 |
+
"alias": " - professional_law",
|
136 |
+
"acc,none": 0.45045632333767927,
|
137 |
+
"acc_stderr,none": 0.012707390438502346
|
138 |
+
},
|
139 |
+
"harness|mmlu_world_religions|0": {
|
140 |
+
"alias": " - world_religions",
|
141 |
+
"acc,none": 0.8245614035087719,
|
142 |
+
"acc_stderr,none": 0.029170885500727654
|
143 |
+
},
|
144 |
+
"harness|mmlu_other|0": {
|
145 |
+
"alias": " - other",
|
146 |
+
"acc,none": 0.6868361763759253,
|
147 |
+
"acc_stderr,none": 0.0080630455468288
|
148 |
+
},
|
149 |
+
"harness|mmlu_business_ethics|0": {
|
150 |
+
"alias": " - business_ethics",
|
151 |
+
"acc,none": 0.6,
|
152 |
+
"acc_stderr,none": 0.049236596391733084
|
153 |
+
},
|
154 |
+
"harness|mmlu_clinical_knowledge|0": {
|
155 |
+
"alias": " - clinical_knowledge",
|
156 |
+
"acc,none": 0.6716981132075471,
|
157 |
+
"acc_stderr,none": 0.02890159361241178
|
158 |
+
},
|
159 |
+
"harness|mmlu_college_medicine|0": {
|
160 |
+
"alias": " - college_medicine",
|
161 |
+
"acc,none": 0.6473988439306358,
|
162 |
+
"acc_stderr,none": 0.036430371689585475
|
163 |
+
},
|
164 |
+
"harness|mmlu_global_facts|0": {
|
165 |
+
"alias": " - global_facts",
|
166 |
+
"acc,none": 0.43,
|
167 |
+
"acc_stderr,none": 0.049756985195624284
|
168 |
+
},
|
169 |
+
"harness|mmlu_human_aging|0": {
|
170 |
+
"alias": " - human_aging",
|
171 |
+
"acc,none": 0.6591928251121076,
|
172 |
+
"acc_stderr,none": 0.031811497470553604
|
173 |
+
},
|
174 |
+
"harness|mmlu_management|0": {
|
175 |
+
"alias": " - management",
|
176 |
+
"acc,none": 0.8349514563106796,
|
177 |
+
"acc_stderr,none": 0.036756688322331886
|
178 |
+
},
|
179 |
+
"harness|mmlu_marketing|0": {
|
180 |
+
"alias": " - marketing",
|
181 |
+
"acc,none": 0.8504273504273504,
|
182 |
+
"acc_stderr,none": 0.02336505149175372
|
183 |
+
},
|
184 |
+
"harness|mmlu_medical_genetics|0": {
|
185 |
+
"alias": " - medical_genetics",
|
186 |
+
"acc,none": 0.68,
|
187 |
+
"acc_stderr,none": 0.046882617226215034
|
188 |
+
},
|
189 |
+
"harness|mmlu_miscellaneous|0": {
|
190 |
+
"alias": " - miscellaneous",
|
191 |
+
"acc,none": 0.7982120051085568,
|
192 |
+
"acc_stderr,none": 0.014351702181636863
|
193 |
+
},
|
194 |
+
"harness|mmlu_nutrition|0": {
|
195 |
+
"alias": " - nutrition",
|
196 |
+
"acc,none": 0.7222222222222222,
|
197 |
+
"acc_stderr,none": 0.02564686309713791
|
198 |
+
},
|
199 |
+
"harness|mmlu_professional_accounting|0": {
|
200 |
+
"alias": " - professional_accounting",
|
201 |
+
"acc,none": 0.5035460992907801,
|
202 |
+
"acc_stderr,none": 0.02982674915328092
|
203 |
+
},
|
204 |
+
"harness|mmlu_professional_medicine|0": {
|
205 |
+
"alias": " - professional_medicine",
|
206 |
+
"acc,none": 0.6323529411764706,
|
207 |
+
"acc_stderr,none": 0.029289413409403192
|
208 |
+
},
|
209 |
+
"harness|mmlu_virology|0": {
|
210 |
+
"alias": " - virology",
|
211 |
+
"acc,none": 0.4879518072289157,
|
212 |
+
"acc_stderr,none": 0.038913644958358196
|
213 |
+
},
|
214 |
+
"harness|mmlu_social_sciences|0": {
|
215 |
+
"alias": " - social_sciences",
|
216 |
+
"acc,none": 0.6967825804354891,
|
217 |
+
"acc_stderr,none": 0.008091643946869077
|
218 |
+
},
|
219 |
+
"harness|mmlu_econometrics|0": {
|
220 |
+
"alias": " - econometrics",
|
221 |
+
"acc,none": 0.41228070175438597,
|
222 |
+
"acc_stderr,none": 0.046306532033665956
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_geography|0": {
|
225 |
+
"alias": " - high_school_geography",
|
226 |
+
"acc,none": 0.7777777777777778,
|
227 |
+
"acc_stderr,none": 0.02962022787479048
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
230 |
+
"alias": " - high_school_government_and_politics",
|
231 |
+
"acc,none": 0.8082901554404145,
|
232 |
+
"acc_stderr,none": 0.02840895362624528
|
233 |
+
},
|
234 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
235 |
+
"alias": " - high_school_macroeconomics",
|
236 |
+
"acc,none": 0.6025641025641025,
|
237 |
+
"acc_stderr,none": 0.024811920017903836
|
238 |
+
},
|
239 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
240 |
+
"alias": " - high_school_microeconomics",
|
241 |
+
"acc,none": 0.6134453781512605,
|
242 |
+
"acc_stderr,none": 0.03163145807552378
|
243 |
+
},
|
244 |
+
"harness|mmlu_high_school_psychology|0": {
|
245 |
+
"alias": " - high_school_psychology",
|
246 |
+
"acc,none": 0.818348623853211,
|
247 |
+
"acc_stderr,none": 0.016530617409266878
|
248 |
+
},
|
249 |
+
"harness|mmlu_human_sexuality|0": {
|
250 |
+
"alias": " - human_sexuality",
|
251 |
+
"acc,none": 0.7099236641221374,
|
252 |
+
"acc_stderr,none": 0.03980066246467765
|
253 |
+
},
|
254 |
+
"harness|mmlu_professional_psychology|0": {
|
255 |
+
"alias": " - professional_psychology",
|
256 |
+
"acc,none": 0.6241830065359477,
|
257 |
+
"acc_stderr,none": 0.01959402113657745
|
258 |
+
},
|
259 |
+
"harness|mmlu_public_relations|0": {
|
260 |
+
"alias": " - public_relations",
|
261 |
+
"acc,none": 0.6545454545454545,
|
262 |
+
"acc_stderr,none": 0.04554619617541054
|
263 |
+
},
|
264 |
+
"harness|mmlu_security_studies|0": {
|
265 |
+
"alias": " - security_studies",
|
266 |
+
"acc,none": 0.7061224489795919,
|
267 |
+
"acc_stderr,none": 0.029162738410249762
|
268 |
+
},
|
269 |
+
"harness|mmlu_sociology|0": {
|
270 |
+
"alias": " - sociology",
|
271 |
+
"acc,none": 0.7960199004975125,
|
272 |
+
"acc_stderr,none": 0.02849317624532607
|
273 |
+
},
|
274 |
+
"harness|mmlu_us_foreign_policy|0": {
|
275 |
+
"alias": " - us_foreign_policy",
|
276 |
+
"acc,none": 0.8,
|
277 |
+
"acc_stderr,none": 0.04020151261036846
|
278 |
+
},
|
279 |
+
"harness|mmlu_stem|0": {
|
280 |
+
"alias": " - stem",
|
281 |
+
"acc,none": 0.538534728829686,
|
282 |
+
"acc_stderr,none": 0.008589914937669538
|
283 |
+
},
|
284 |
+
"harness|mmlu_abstract_algebra|0": {
|
285 |
+
"alias": " - abstract_algebra",
|
286 |
+
"acc,none": 0.32,
|
287 |
+
"acc_stderr,none": 0.04688261722621504
|
288 |
+
},
|
289 |
+
"harness|mmlu_anatomy|0": {
|
290 |
+
"alias": " - anatomy",
|
291 |
+
"acc,none": 0.5703703703703704,
|
292 |
+
"acc_stderr,none": 0.04276349494376599
|
293 |
+
},
|
294 |
+
"harness|mmlu_astronomy|0": {
|
295 |
+
"alias": " - astronomy",
|
296 |
+
"acc,none": 0.6973684210526315,
|
297 |
+
"acc_stderr,none": 0.03738520676119668
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_biology|0": {
|
300 |
+
"alias": " - college_biology",
|
301 |
+
"acc,none": 0.7569444444444444,
|
302 |
+
"acc_stderr,none": 0.03586879280080343
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_chemistry|0": {
|
305 |
+
"alias": " - college_chemistry",
|
306 |
+
"acc,none": 0.43,
|
307 |
+
"acc_stderr,none": 0.049756985195624284
|
308 |
+
},
|
309 |
+
"harness|mmlu_college_computer_science|0": {
|
310 |
+
"alias": " - college_computer_science",
|
311 |
+
"acc,none": 0.52,
|
312 |
+
"acc_stderr,none": 0.050211673156867795
|
313 |
+
},
|
314 |
+
"harness|mmlu_college_mathematics|0": {
|
315 |
+
"alias": " - college_mathematics",
|
316 |
+
"acc,none": 0.35,
|
317 |
+
"acc_stderr,none": 0.04793724854411019
|
318 |
+
},
|
319 |
+
"harness|mmlu_college_physics|0": {
|
320 |
+
"alias": " - college_physics",
|
321 |
+
"acc,none": 0.4019607843137255,
|
322 |
+
"acc_stderr,none": 0.04878608714466996
|
323 |
+
},
|
324 |
+
"harness|mmlu_computer_security|0": {
|
325 |
+
"alias": " - computer_security",
|
326 |
+
"acc,none": 0.68,
|
327 |
+
"acc_stderr,none": 0.046882617226215034
|
328 |
+
},
|
329 |
+
"harness|mmlu_conceptual_physics|0": {
|
330 |
+
"alias": " - conceptual_physics",
|
331 |
+
"acc,none": 0.6085106382978723,
|
332 |
+
"acc_stderr,none": 0.03190701242326812
|
333 |
+
},
|
334 |
+
"harness|mmlu_electrical_engineering|0": {
|
335 |
+
"alias": " - electrical_engineering",
|
336 |
+
"acc,none": 0.5793103448275863,
|
337 |
+
"acc_stderr,none": 0.0411391498118926
|
338 |
+
},
|
339 |
+
"harness|mmlu_elementary_mathematics|0": {
|
340 |
+
"alias": " - elementary_mathematics",
|
341 |
+
"acc,none": 0.4576719576719577,
|
342 |
+
"acc_stderr,none": 0.025658868862058325
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_biology|0": {
|
345 |
+
"alias": " - high_school_biology",
|
346 |
+
"acc,none": 0.7580645161290323,
|
347 |
+
"acc_stderr,none": 0.024362599693031096
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_chemistry|0": {
|
350 |
+
"alias": " - high_school_chemistry",
|
351 |
+
"acc,none": 0.541871921182266,
|
352 |
+
"acc_stderr,none": 0.03505630140785741
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_computer_science|0": {
|
355 |
+
"alias": " - high_school_computer_science",
|
356 |
+
"acc,none": 0.64,
|
357 |
+
"acc_stderr,none": 0.048241815132442176
|
358 |
+
},
|
359 |
+
"harness|mmlu_high_school_mathematics|0": {
|
360 |
+
"alias": " - high_school_mathematics",
|
361 |
+
"acc,none": 0.3814814814814815,
|
362 |
+
"acc_stderr,none": 0.02961671892749759
|
363 |
+
},
|
364 |
+
"harness|mmlu_high_school_physics|0": {
|
365 |
+
"alias": " - high_school_physics",
|
366 |
+
"acc,none": 0.41721854304635764,
|
367 |
+
"acc_stderr,none": 0.0402614149763461
|
368 |
+
},
|
369 |
+
"harness|mmlu_high_school_statistics|0": {
|
370 |
+
"alias": " - high_school_statistics",
|
371 |
+
"acc,none": 0.5185185185185185,
|
372 |
+
"acc_stderr,none": 0.034076320938540516
|
373 |
+
},
|
374 |
+
"harness|mmlu_machine_learning|0": {
|
375 |
+
"alias": " - machine_learning",
|
376 |
+
"acc,none": 0.42857142857142855,
|
377 |
+
"acc_stderr,none": 0.04697113923010212
|
378 |
+
},
|
379 |
+
"harness|lambada:openai|0": {
|
380 |
+
"perplexity,none": 3.390937355467875,
|
381 |
+
"perplexity_stderr,none": 0.06679719420613348,
|
382 |
+
"acc,none": 0.7360760721909567,
|
383 |
+
"acc_stderr,none": 0.006140628245086325,
|
384 |
+
"alias": "lambada_openai"
|
385 |
+
},
|
386 |
+
"harness|hellaswag|0": {
|
387 |
+
"acc,none": 0.5981876120294762,
|
388 |
+
"acc_stderr,none": 0.004892624490937232,
|
389 |
+
"acc_norm,none": 0.7983469428400717,
|
390 |
+
"acc_norm_stderr,none": 0.0040041446222330935,
|
391 |
+
"alias": "hellaswag"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TechxGenus/gemma-7b-AWQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 10.44,
|
399 |
+
"architectures": "GemmaForCausalLM",
|
400 |
+
"quant_type": "AWQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 20.88,
|
403 |
+
"model_size": 10.44,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-01T07:36:03Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"group_size": 128,
|
418 |
+
"modules_to_not_convert": null,
|
419 |
+
"quant_method": "awq",
|
420 |
+
"version": "gemm",
|
421 |
+
"zero_point": true
|
422 |
+
},
|
423 |
+
"versions": {
|
424 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
425 |
+
"harness|piqa|0": 1.0,
|
426 |
+
"harness|boolq|0": 2.0,
|
427 |
+
"harness|openbookqa|0": 1.0,
|
428 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
429 |
+
"harness|winogrande|0": 1.0,
|
430 |
+
"harness|arc:challenge|0": 1.0,
|
431 |
+
"harness|arc:easy|0": 1.0,
|
432 |
+
"harness|mmlu|0": null,
|
433 |
+
"harness|mmlu_humanities|0": null,
|
434 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
435 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
436 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
437 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
438 |
+
"harness|mmlu_international_law|0": 0.0,
|
439 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
440 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
441 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
442 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
443 |
+
"harness|mmlu_philosophy|0": 0.0,
|
444 |
+
"harness|mmlu_prehistory|0": 0.0,
|
445 |
+
"harness|mmlu_professional_law|0": 0.0,
|
446 |
+
"harness|mmlu_world_religions|0": 0.0,
|
447 |
+
"harness|mmlu_other|0": null,
|
448 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
449 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
450 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
451 |
+
"harness|mmlu_global_facts|0": 0.0,
|
452 |
+
"harness|mmlu_human_aging|0": 0.0,
|
453 |
+
"harness|mmlu_management|0": 0.0,
|
454 |
+
"harness|mmlu_marketing|0": 0.0,
|
455 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
456 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
457 |
+
"harness|mmlu_nutrition|0": 0.0,
|
458 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
459 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
460 |
+
"harness|mmlu_virology|0": 0.0,
|
461 |
+
"harness|mmlu_social_sciences|0": null,
|
462 |
+
"harness|mmlu_econometrics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
466 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
467 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
468 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
469 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
470 |
+
"harness|mmlu_public_relations|0": 0.0,
|
471 |
+
"harness|mmlu_security_studies|0": 0.0,
|
472 |
+
"harness|mmlu_sociology|0": 0.0,
|
473 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
474 |
+
"harness|mmlu_stem|0": null,
|
475 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
476 |
+
"harness|mmlu_anatomy|0": 0.0,
|
477 |
+
"harness|mmlu_astronomy|0": 0.0,
|
478 |
+
"harness|mmlu_college_biology|0": 0.0,
|
479 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
480 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
481 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_college_physics|0": 0.0,
|
483 |
+
"harness|mmlu_computer_security|0": 0.0,
|
484 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
485 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
486 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
491 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
493 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
494 |
+
"harness|lambada:openai|0": 1.0,
|
495 |
+
"harness|hellaswag|0": 1.0
|
496 |
+
},
|
497 |
+
"n-shot": {
|
498 |
+
"arc_challenge": 0,
|
499 |
+
"arc_easy": 0,
|
500 |
+
"boolq": 0,
|
501 |
+
"hellaswag": 0,
|
502 |
+
"lambada_openai": 0,
|
503 |
+
"mmlu": 0,
|
504 |
+
"mmlu_abstract_algebra": 0,
|
505 |
+
"mmlu_anatomy": 0,
|
506 |
+
"mmlu_astronomy": 0,
|
507 |
+
"mmlu_business_ethics": 0,
|
508 |
+
"mmlu_clinical_knowledge": 0,
|
509 |
+
"mmlu_college_biology": 0,
|
510 |
+
"mmlu_college_chemistry": 0,
|
511 |
+
"mmlu_college_computer_science": 0,
|
512 |
+
"mmlu_college_mathematics": 0,
|
513 |
+
"mmlu_college_medicine": 0,
|
514 |
+
"mmlu_college_physics": 0,
|
515 |
+
"mmlu_computer_security": 0,
|
516 |
+
"mmlu_conceptual_physics": 0,
|
517 |
+
"mmlu_econometrics": 0,
|
518 |
+
"mmlu_electrical_engineering": 0,
|
519 |
+
"mmlu_elementary_mathematics": 0,
|
520 |
+
"mmlu_formal_logic": 0,
|
521 |
+
"mmlu_global_facts": 0,
|
522 |
+
"mmlu_high_school_biology": 0,
|
523 |
+
"mmlu_high_school_chemistry": 0,
|
524 |
+
"mmlu_high_school_computer_science": 0,
|
525 |
+
"mmlu_high_school_european_history": 0,
|
526 |
+
"mmlu_high_school_geography": 0,
|
527 |
+
"mmlu_high_school_government_and_politics": 0,
|
528 |
+
"mmlu_high_school_macroeconomics": 0,
|
529 |
+
"mmlu_high_school_mathematics": 0,
|
530 |
+
"mmlu_high_school_microeconomics": 0,
|
531 |
+
"mmlu_high_school_physics": 0,
|
532 |
+
"mmlu_high_school_psychology": 0,
|
533 |
+
"mmlu_high_school_statistics": 0,
|
534 |
+
"mmlu_high_school_us_history": 0,
|
535 |
+
"mmlu_high_school_world_history": 0,
|
536 |
+
"mmlu_human_aging": 0,
|
537 |
+
"mmlu_human_sexuality": 0,
|
538 |
+
"mmlu_humanities": 0,
|
539 |
+
"mmlu_international_law": 0,
|
540 |
+
"mmlu_jurisprudence": 0,
|
541 |
+
"mmlu_logical_fallacies": 0,
|
542 |
+
"mmlu_machine_learning": 0,
|
543 |
+
"mmlu_management": 0,
|
544 |
+
"mmlu_marketing": 0,
|
545 |
+
"mmlu_medical_genetics": 0,
|
546 |
+
"mmlu_miscellaneous": 0,
|
547 |
+
"mmlu_moral_disputes": 0,
|
548 |
+
"mmlu_moral_scenarios": 0,
|
549 |
+
"mmlu_nutrition": 0,
|
550 |
+
"mmlu_other": 0,
|
551 |
+
"mmlu_philosophy": 0,
|
552 |
+
"mmlu_prehistory": 0,
|
553 |
+
"mmlu_professional_accounting": 0,
|
554 |
+
"mmlu_professional_law": 0,
|
555 |
+
"mmlu_professional_medicine": 0,
|
556 |
+
"mmlu_professional_psychology": 0,
|
557 |
+
"mmlu_public_relations": 0,
|
558 |
+
"mmlu_security_studies": 0,
|
559 |
+
"mmlu_social_sciences": 0,
|
560 |
+
"mmlu_sociology": 0,
|
561 |
+
"mmlu_stem": 0,
|
562 |
+
"mmlu_us_foreign_policy": 0,
|
563 |
+
"mmlu_virology": 0,
|
564 |
+
"mmlu_world_religions": 0,
|
565 |
+
"openbookqa": 0,
|
566 |
+
"piqa": 0,
|
567 |
+
"truthfulqa_mc1": 0,
|
568 |
+
"truthfulqa_mc2": 0,
|
569 |
+
"winogrande": 0
|
570 |
+
},
|
571 |
+
"date": 1714549105.0151005,
|
572 |
+
"config": {
|
573 |
+
"model": "hf",
|
574 |
+
"model_args": "pretrained=TechxGenus/gemma-7b-AWQ,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
575 |
+
"batch_size": 2,
|
576 |
+
"batch_sizes": [],
|
577 |
+
"device": "cuda",
|
578 |
+
"use_cache": null,
|
579 |
+
"limit": null,
|
580 |
+
"bootstrap_iters": 100000,
|
581 |
+
"gen_kwargs": null
|
582 |
+
}
|
583 |
+
}
|
TechxGenus/results_2024-05-02-00-07-14.json
ADDED
@@ -0,0 +1,588 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-02-00-07-14",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TechxGenus/gemma-7b-GPTQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 7.18,
|
16 |
+
"model_params": 7.82,
|
17 |
+
"quant_type": "GPTQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|lambada:openai|0": {
|
22 |
+
"perplexity,none": 5.932209341148522,
|
23 |
+
"perplexity_stderr,none": 0.1341079006392553,
|
24 |
+
"acc,none": 0.5761692218125364,
|
25 |
+
"acc_stderr,none": 0.006884673454916892,
|
26 |
+
"alias": "lambada_openai"
|
27 |
+
},
|
28 |
+
"harness|boolq|0": {
|
29 |
+
"acc,none": 0.8027522935779816,
|
30 |
+
"acc_stderr,none": 0.006959680427057393,
|
31 |
+
"alias": "boolq"
|
32 |
+
},
|
33 |
+
"harness|openbookqa|0": {
|
34 |
+
"acc,none": 0.308,
|
35 |
+
"acc_stderr,none": 0.0206670329874661,
|
36 |
+
"acc_norm,none": 0.422,
|
37 |
+
"acc_norm_stderr,none": 0.022109039310618552,
|
38 |
+
"alias": "openbookqa"
|
39 |
+
},
|
40 |
+
"harness|piqa|0": {
|
41 |
+
"acc,none": 0.7834602829162133,
|
42 |
+
"acc_stderr,none": 0.00960998471438461,
|
43 |
+
"acc_norm,none": 0.794885745375408,
|
44 |
+
"acc_norm_stderr,none": 0.009420971671017915,
|
45 |
+
"alias": "piqa"
|
46 |
+
},
|
47 |
+
"harness|truthfulqa:mc1|0": {
|
48 |
+
"acc,none": 0.2729498164014688,
|
49 |
+
"acc_stderr,none": 0.015594753632006525,
|
50 |
+
"alias": "truthfulqa_mc1"
|
51 |
+
},
|
52 |
+
"harness|mmlu|0": {
|
53 |
+
"acc,none": 0.5710724967953283,
|
54 |
+
"acc_stderr,none": 0.003966391569739342,
|
55 |
+
"alias": "mmlu"
|
56 |
+
},
|
57 |
+
"harness|mmlu_humanities|0": {
|
58 |
+
"alias": " - humanities",
|
59 |
+
"acc,none": 0.4973432518597237,
|
60 |
+
"acc_stderr,none": 0.0068655705992840856
|
61 |
+
},
|
62 |
+
"harness|mmlu_formal_logic|0": {
|
63 |
+
"alias": " - formal_logic",
|
64 |
+
"acc,none": 0.38095238095238093,
|
65 |
+
"acc_stderr,none": 0.043435254289490965
|
66 |
+
},
|
67 |
+
"harness|mmlu_high_school_european_history|0": {
|
68 |
+
"alias": " - high_school_european_history",
|
69 |
+
"acc,none": 0.6303030303030303,
|
70 |
+
"acc_stderr,none": 0.03769430314512567
|
71 |
+
},
|
72 |
+
"harness|mmlu_high_school_us_history|0": {
|
73 |
+
"alias": " - high_school_us_history",
|
74 |
+
"acc,none": 0.7303921568627451,
|
75 |
+
"acc_stderr,none": 0.03114557065948678
|
76 |
+
},
|
77 |
+
"harness|mmlu_high_school_world_history|0": {
|
78 |
+
"alias": " - high_school_world_history",
|
79 |
+
"acc,none": 0.7046413502109705,
|
80 |
+
"acc_stderr,none": 0.029696338713422882
|
81 |
+
},
|
82 |
+
"harness|mmlu_international_law|0": {
|
83 |
+
"alias": " - international_law",
|
84 |
+
"acc,none": 0.71900826446281,
|
85 |
+
"acc_stderr,none": 0.04103203830514512
|
86 |
+
},
|
87 |
+
"harness|mmlu_jurisprudence|0": {
|
88 |
+
"alias": " - jurisprudence",
|
89 |
+
"acc,none": 0.6018518518518519,
|
90 |
+
"acc_stderr,none": 0.04732332615978814
|
91 |
+
},
|
92 |
+
"harness|mmlu_logical_fallacies|0": {
|
93 |
+
"alias": " - logical_fallacies",
|
94 |
+
"acc,none": 0.6380368098159509,
|
95 |
+
"acc_stderr,none": 0.037757007291414416
|
96 |
+
},
|
97 |
+
"harness|mmlu_moral_disputes|0": {
|
98 |
+
"alias": " - moral_disputes",
|
99 |
+
"acc,none": 0.5751445086705202,
|
100 |
+
"acc_stderr,none": 0.026613350840261746
|
101 |
+
},
|
102 |
+
"harness|mmlu_moral_scenarios|0": {
|
103 |
+
"alias": " - moral_scenarios",
|
104 |
+
"acc,none": 0.2424581005586592,
|
105 |
+
"acc_stderr,none": 0.014333522059217887
|
106 |
+
},
|
107 |
+
"harness|mmlu_philosophy|0": {
|
108 |
+
"alias": " - philosophy",
|
109 |
+
"acc,none": 0.6463022508038585,
|
110 |
+
"acc_stderr,none": 0.027155208103200875
|
111 |
+
},
|
112 |
+
"harness|mmlu_prehistory|0": {
|
113 |
+
"alias": " - prehistory",
|
114 |
+
"acc,none": 0.6604938271604939,
|
115 |
+
"acc_stderr,none": 0.026348564412011638
|
116 |
+
},
|
117 |
+
"harness|mmlu_professional_law|0": {
|
118 |
+
"alias": " - professional_law",
|
119 |
+
"acc,none": 0.4230769230769231,
|
120 |
+
"acc_stderr,none": 0.01261820406658839
|
121 |
+
},
|
122 |
+
"harness|mmlu_world_religions|0": {
|
123 |
+
"alias": " - world_religions",
|
124 |
+
"acc,none": 0.7953216374269005,
|
125 |
+
"acc_stderr,none": 0.030944459778533204
|
126 |
+
},
|
127 |
+
"harness|mmlu_other|0": {
|
128 |
+
"alias": " - other",
|
129 |
+
"acc,none": 0.6523978113936273,
|
130 |
+
"acc_stderr,none": 0.008314718279747594
|
131 |
+
},
|
132 |
+
"harness|mmlu_business_ethics|0": {
|
133 |
+
"alias": " - business_ethics",
|
134 |
+
"acc,none": 0.61,
|
135 |
+
"acc_stderr,none": 0.04902071300001974
|
136 |
+
},
|
137 |
+
"harness|mmlu_clinical_knowledge|0": {
|
138 |
+
"alias": " - clinical_knowledge",
|
139 |
+
"acc,none": 0.6641509433962264,
|
140 |
+
"acc_stderr,none": 0.029067220146644823
|
141 |
+
},
|
142 |
+
"harness|mmlu_college_medicine|0": {
|
143 |
+
"alias": " - college_medicine",
|
144 |
+
"acc,none": 0.5722543352601156,
|
145 |
+
"acc_stderr,none": 0.03772446857518026
|
146 |
+
},
|
147 |
+
"harness|mmlu_global_facts|0": {
|
148 |
+
"alias": " - global_facts",
|
149 |
+
"acc,none": 0.37,
|
150 |
+
"acc_stderr,none": 0.04852365870939099
|
151 |
+
},
|
152 |
+
"harness|mmlu_human_aging|0": {
|
153 |
+
"alias": " - human_aging",
|
154 |
+
"acc,none": 0.6636771300448431,
|
155 |
+
"acc_stderr,none": 0.031708824268455
|
156 |
+
},
|
157 |
+
"harness|mmlu_management|0": {
|
158 |
+
"alias": " - management",
|
159 |
+
"acc,none": 0.8349514563106796,
|
160 |
+
"acc_stderr,none": 0.036756688322331886
|
161 |
+
},
|
162 |
+
"harness|mmlu_marketing|0": {
|
163 |
+
"alias": " - marketing",
|
164 |
+
"acc,none": 0.7905982905982906,
|
165 |
+
"acc_stderr,none": 0.026655699653922737
|
166 |
+
},
|
167 |
+
"harness|mmlu_medical_genetics|0": {
|
168 |
+
"alias": " - medical_genetics",
|
169 |
+
"acc,none": 0.61,
|
170 |
+
"acc_stderr,none": 0.04902071300001975
|
171 |
+
},
|
172 |
+
"harness|mmlu_miscellaneous|0": {
|
173 |
+
"alias": " - miscellaneous",
|
174 |
+
"acc,none": 0.7484035759897829,
|
175 |
+
"acc_stderr,none": 0.015517322365529617
|
176 |
+
},
|
177 |
+
"harness|mmlu_nutrition|0": {
|
178 |
+
"alias": " - nutrition",
|
179 |
+
"acc,none": 0.6830065359477124,
|
180 |
+
"acc_stderr,none": 0.026643278474508755
|
181 |
+
},
|
182 |
+
"harness|mmlu_professional_accounting|0": {
|
183 |
+
"alias": " - professional_accounting",
|
184 |
+
"acc,none": 0.46808510638297873,
|
185 |
+
"acc_stderr,none": 0.029766675075873866
|
186 |
+
},
|
187 |
+
"harness|mmlu_professional_medicine|0": {
|
188 |
+
"alias": " - professional_medicine",
|
189 |
+
"acc,none": 0.6176470588235294,
|
190 |
+
"acc_stderr,none": 0.029520095697687754
|
191 |
+
},
|
192 |
+
"harness|mmlu_virology|0": {
|
193 |
+
"alias": " - virology",
|
194 |
+
"acc,none": 0.4759036144578313,
|
195 |
+
"acc_stderr,none": 0.03887971849597264
|
196 |
+
},
|
197 |
+
"harness|mmlu_social_sciences|0": {
|
198 |
+
"alias": " - social_sciences",
|
199 |
+
"acc,none": 0.6626584335391615,
|
200 |
+
"acc_stderr,none": 0.008305114704313213
|
201 |
+
},
|
202 |
+
"harness|mmlu_econometrics|0": {
|
203 |
+
"alias": " - econometrics",
|
204 |
+
"acc,none": 0.35964912280701755,
|
205 |
+
"acc_stderr,none": 0.04514496132873632
|
206 |
+
},
|
207 |
+
"harness|mmlu_high_school_geography|0": {
|
208 |
+
"alias": " - high_school_geography",
|
209 |
+
"acc,none": 0.7272727272727273,
|
210 |
+
"acc_stderr,none": 0.03173071239071724
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
213 |
+
"alias": " - high_school_government_and_politics",
|
214 |
+
"acc,none": 0.7979274611398963,
|
215 |
+
"acc_stderr,none": 0.02897908979429673
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
218 |
+
"alias": " - high_school_macroeconomics",
|
219 |
+
"acc,none": 0.5820512820512821,
|
220 |
+
"acc_stderr,none": 0.02500732988246122
|
221 |
+
},
|
222 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
223 |
+
"alias": " - high_school_microeconomics",
|
224 |
+
"acc,none": 0.6302521008403361,
|
225 |
+
"acc_stderr,none": 0.03135709599613591
|
226 |
+
},
|
227 |
+
"harness|mmlu_high_school_psychology|0": {
|
228 |
+
"alias": " - high_school_psychology",
|
229 |
+
"acc,none": 0.7834862385321101,
|
230 |
+
"acc_stderr,none": 0.01765871059444313
|
231 |
+
},
|
232 |
+
"harness|mmlu_human_sexuality|0": {
|
233 |
+
"alias": " - human_sexuality",
|
234 |
+
"acc,none": 0.6183206106870229,
|
235 |
+
"acc_stderr,none": 0.0426073515764456
|
236 |
+
},
|
237 |
+
"harness|mmlu_professional_psychology|0": {
|
238 |
+
"alias": " - professional_psychology",
|
239 |
+
"acc,none": 0.5669934640522876,
|
240 |
+
"acc_stderr,none": 0.020045442473324227
|
241 |
+
},
|
242 |
+
"harness|mmlu_public_relations|0": {
|
243 |
+
"alias": " - public_relations",
|
244 |
+
"acc,none": 0.5727272727272728,
|
245 |
+
"acc_stderr,none": 0.04738198703545483
|
246 |
+
},
|
247 |
+
"harness|mmlu_security_studies|0": {
|
248 |
+
"alias": " - security_studies",
|
249 |
+
"acc,none": 0.6816326530612244,
|
250 |
+
"acc_stderr,none": 0.029822533793982066
|
251 |
+
},
|
252 |
+
"harness|mmlu_sociology|0": {
|
253 |
+
"alias": " - sociology",
|
254 |
+
"acc,none": 0.7910447761194029,
|
255 |
+
"acc_stderr,none": 0.028748298931728658
|
256 |
+
},
|
257 |
+
"harness|mmlu_us_foreign_policy|0": {
|
258 |
+
"alias": " - us_foreign_policy",
|
259 |
+
"acc,none": 0.79,
|
260 |
+
"acc_stderr,none": 0.040936018074033256
|
261 |
+
},
|
262 |
+
"harness|mmlu_stem|0": {
|
263 |
+
"alias": " - stem",
|
264 |
+
"acc,none": 0.5115762765620044,
|
265 |
+
"acc_stderr,none": 0.008616921534276603
|
266 |
+
},
|
267 |
+
"harness|mmlu_abstract_algebra|0": {
|
268 |
+
"alias": " - abstract_algebra",
|
269 |
+
"acc,none": 0.33,
|
270 |
+
"acc_stderr,none": 0.047258156262526045
|
271 |
+
},
|
272 |
+
"harness|mmlu_anatomy|0": {
|
273 |
+
"alias": " - anatomy",
|
274 |
+
"acc,none": 0.5481481481481482,
|
275 |
+
"acc_stderr,none": 0.04299268905480864
|
276 |
+
},
|
277 |
+
"harness|mmlu_astronomy|0": {
|
278 |
+
"alias": " - astronomy",
|
279 |
+
"acc,none": 0.6776315789473685,
|
280 |
+
"acc_stderr,none": 0.03803510248351585
|
281 |
+
},
|
282 |
+
"harness|mmlu_college_biology|0": {
|
283 |
+
"alias": " - college_biology",
|
284 |
+
"acc,none": 0.7430555555555556,
|
285 |
+
"acc_stderr,none": 0.03653946969442099
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_chemistry|0": {
|
288 |
+
"alias": " - college_chemistry",
|
289 |
+
"acc,none": 0.45,
|
290 |
+
"acc_stderr,none": 0.05
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_computer_science|0": {
|
293 |
+
"alias": " - college_computer_science",
|
294 |
+
"acc,none": 0.48,
|
295 |
+
"acc_stderr,none": 0.050211673156867795
|
296 |
+
},
|
297 |
+
"harness|mmlu_college_mathematics|0": {
|
298 |
+
"alias": " - college_mathematics",
|
299 |
+
"acc,none": 0.37,
|
300 |
+
"acc_stderr,none": 0.048523658709391
|
301 |
+
},
|
302 |
+
"harness|mmlu_college_physics|0": {
|
303 |
+
"alias": " - college_physics",
|
304 |
+
"acc,none": 0.3431372549019608,
|
305 |
+
"acc_stderr,none": 0.04724007352383889
|
306 |
+
},
|
307 |
+
"harness|mmlu_computer_security|0": {
|
308 |
+
"alias": " - computer_security",
|
309 |
+
"acc,none": 0.6,
|
310 |
+
"acc_stderr,none": 0.04923659639173309
|
311 |
+
},
|
312 |
+
"harness|mmlu_conceptual_physics|0": {
|
313 |
+
"alias": " - conceptual_physics",
|
314 |
+
"acc,none": 0.5617021276595745,
|
315 |
+
"acc_stderr,none": 0.03243618636108101
|
316 |
+
},
|
317 |
+
"harness|mmlu_electrical_engineering|0": {
|
318 |
+
"alias": " - electrical_engineering",
|
319 |
+
"acc,none": 0.5517241379310345,
|
320 |
+
"acc_stderr,none": 0.041443118108781526
|
321 |
+
},
|
322 |
+
"harness|mmlu_elementary_mathematics|0": {
|
323 |
+
"alias": " - elementary_mathematics",
|
324 |
+
"acc,none": 0.4074074074074074,
|
325 |
+
"acc_stderr,none": 0.025305906241590632
|
326 |
+
},
|
327 |
+
"harness|mmlu_high_school_biology|0": {
|
328 |
+
"alias": " - high_school_biology",
|
329 |
+
"acc,none": 0.7419354838709677,
|
330 |
+
"acc_stderr,none": 0.02489246917246283
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_chemistry|0": {
|
333 |
+
"alias": " - high_school_chemistry",
|
334 |
+
"acc,none": 0.5123152709359606,
|
335 |
+
"acc_stderr,none": 0.035169204442208966
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_computer_science|0": {
|
338 |
+
"alias": " - high_school_computer_science",
|
339 |
+
"acc,none": 0.55,
|
340 |
+
"acc_stderr,none": 0.04999999999999999
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_mathematics|0": {
|
343 |
+
"alias": " - high_school_mathematics",
|
344 |
+
"acc,none": 0.34814814814814815,
|
345 |
+
"acc_stderr,none": 0.029045600290616258
|
346 |
+
},
|
347 |
+
"harness|mmlu_high_school_physics|0": {
|
348 |
+
"alias": " - high_school_physics",
|
349 |
+
"acc,none": 0.40397350993377484,
|
350 |
+
"acc_stderr,none": 0.040064856853653415
|
351 |
+
},
|
352 |
+
"harness|mmlu_high_school_statistics|0": {
|
353 |
+
"alias": " - high_school_statistics",
|
354 |
+
"acc,none": 0.5462962962962963,
|
355 |
+
"acc_stderr,none": 0.033953227263757976
|
356 |
+
},
|
357 |
+
"harness|mmlu_machine_learning|0": {
|
358 |
+
"alias": " - machine_learning",
|
359 |
+
"acc,none": 0.38392857142857145,
|
360 |
+
"acc_stderr,none": 0.04616143075028547
|
361 |
+
},
|
362 |
+
"harness|winogrande|0": {
|
363 |
+
"acc,none": 0.7269139700078927,
|
364 |
+
"acc_stderr,none": 0.012522020105869457,
|
365 |
+
"alias": "winogrande"
|
366 |
+
},
|
367 |
+
"harness|hellaswag|0": {
|
368 |
+
"acc,none": 0.5654252141007767,
|
369 |
+
"acc_stderr,none": 0.004946879874422689,
|
370 |
+
"acc_norm,none": 0.7689703246365266,
|
371 |
+
"acc_norm_stderr,none": 0.004206299481687509,
|
372 |
+
"alias": "hellaswag"
|
373 |
+
},
|
374 |
+
"harness|arc:challenge|0": {
|
375 |
+
"acc,none": 0.48464163822525597,
|
376 |
+
"acc_stderr,none": 0.014604496129394911,
|
377 |
+
"acc_norm,none": 0.5093856655290102,
|
378 |
+
"acc_norm_stderr,none": 0.014608816322065003,
|
379 |
+
"alias": "arc_challenge"
|
380 |
+
},
|
381 |
+
"harness|arc:easy|0": {
|
382 |
+
"acc,none": 0.7946127946127947,
|
383 |
+
"acc_stderr,none": 0.008289582587432945,
|
384 |
+
"acc_norm,none": 0.7714646464646465,
|
385 |
+
"acc_norm_stderr,none": 0.008615944722488483,
|
386 |
+
"alias": "arc_easy"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc2|0": {
|
389 |
+
"acc,none": 0.4114860662572397,
|
390 |
+
"acc_stderr,none": 0.013981988649185982,
|
391 |
+
"alias": "truthfulqa_mc2"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TechxGenus/gemma-7b-GPTQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 10.452,
|
399 |
+
"architectures": "GemmaForCausalLM",
|
400 |
+
"quant_type": "GPTQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 20.904,
|
403 |
+
"model_size": 10.452,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-01T07:38:56Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"damp_percent": 0.01,
|
418 |
+
"desc_act": false,
|
419 |
+
"group_size": 128,
|
420 |
+
"is_marlin_format": false,
|
421 |
+
"model_file_base_name": null,
|
422 |
+
"model_name_or_path": null,
|
423 |
+
"quant_method": "gptq",
|
424 |
+
"static_groups": false,
|
425 |
+
"sym": true,
|
426 |
+
"true_sequential": true
|
427 |
+
},
|
428 |
+
"versions": {
|
429 |
+
"harness|lambada:openai|0": 1.0,
|
430 |
+
"harness|boolq|0": 2.0,
|
431 |
+
"harness|openbookqa|0": 1.0,
|
432 |
+
"harness|piqa|0": 1.0,
|
433 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
434 |
+
"harness|mmlu|0": null,
|
435 |
+
"harness|mmlu_humanities|0": null,
|
436 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
437 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
438 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
439 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
440 |
+
"harness|mmlu_international_law|0": 0.0,
|
441 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
442 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
443 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
444 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
445 |
+
"harness|mmlu_philosophy|0": 0.0,
|
446 |
+
"harness|mmlu_prehistory|0": 0.0,
|
447 |
+
"harness|mmlu_professional_law|0": 0.0,
|
448 |
+
"harness|mmlu_world_religions|0": 0.0,
|
449 |
+
"harness|mmlu_other|0": null,
|
450 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
451 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
452 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
453 |
+
"harness|mmlu_global_facts|0": 0.0,
|
454 |
+
"harness|mmlu_human_aging|0": 0.0,
|
455 |
+
"harness|mmlu_management|0": 0.0,
|
456 |
+
"harness|mmlu_marketing|0": 0.0,
|
457 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
458 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
459 |
+
"harness|mmlu_nutrition|0": 0.0,
|
460 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
461 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
462 |
+
"harness|mmlu_virology|0": 0.0,
|
463 |
+
"harness|mmlu_social_sciences|0": null,
|
464 |
+
"harness|mmlu_econometrics|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
466 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
467 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
468 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
469 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
470 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
471 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
472 |
+
"harness|mmlu_public_relations|0": 0.0,
|
473 |
+
"harness|mmlu_security_studies|0": 0.0,
|
474 |
+
"harness|mmlu_sociology|0": 0.0,
|
475 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
476 |
+
"harness|mmlu_stem|0": null,
|
477 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
478 |
+
"harness|mmlu_anatomy|0": 0.0,
|
479 |
+
"harness|mmlu_astronomy|0": 0.0,
|
480 |
+
"harness|mmlu_college_biology|0": 0.0,
|
481 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
482 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
483 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
484 |
+
"harness|mmlu_college_physics|0": 0.0,
|
485 |
+
"harness|mmlu_computer_security|0": 0.0,
|
486 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
487 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
488 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
491 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
494 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
495 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
496 |
+
"harness|winogrande|0": 1.0,
|
497 |
+
"harness|hellaswag|0": 1.0,
|
498 |
+
"harness|arc:challenge|0": 1.0,
|
499 |
+
"harness|arc:easy|0": 1.0,
|
500 |
+
"harness|truthfulqa:mc2|0": 2.0
|
501 |
+
},
|
502 |
+
"n-shot": {
|
503 |
+
"arc_challenge": 0,
|
504 |
+
"arc_easy": 0,
|
505 |
+
"boolq": 0,
|
506 |
+
"hellaswag": 0,
|
507 |
+
"lambada_openai": 0,
|
508 |
+
"mmlu": 0,
|
509 |
+
"mmlu_abstract_algebra": 0,
|
510 |
+
"mmlu_anatomy": 0,
|
511 |
+
"mmlu_astronomy": 0,
|
512 |
+
"mmlu_business_ethics": 0,
|
513 |
+
"mmlu_clinical_knowledge": 0,
|
514 |
+
"mmlu_college_biology": 0,
|
515 |
+
"mmlu_college_chemistry": 0,
|
516 |
+
"mmlu_college_computer_science": 0,
|
517 |
+
"mmlu_college_mathematics": 0,
|
518 |
+
"mmlu_college_medicine": 0,
|
519 |
+
"mmlu_college_physics": 0,
|
520 |
+
"mmlu_computer_security": 0,
|
521 |
+
"mmlu_conceptual_physics": 0,
|
522 |
+
"mmlu_econometrics": 0,
|
523 |
+
"mmlu_electrical_engineering": 0,
|
524 |
+
"mmlu_elementary_mathematics": 0,
|
525 |
+
"mmlu_formal_logic": 0,
|
526 |
+
"mmlu_global_facts": 0,
|
527 |
+
"mmlu_high_school_biology": 0,
|
528 |
+
"mmlu_high_school_chemistry": 0,
|
529 |
+
"mmlu_high_school_computer_science": 0,
|
530 |
+
"mmlu_high_school_european_history": 0,
|
531 |
+
"mmlu_high_school_geography": 0,
|
532 |
+
"mmlu_high_school_government_and_politics": 0,
|
533 |
+
"mmlu_high_school_macroeconomics": 0,
|
534 |
+
"mmlu_high_school_mathematics": 0,
|
535 |
+
"mmlu_high_school_microeconomics": 0,
|
536 |
+
"mmlu_high_school_physics": 0,
|
537 |
+
"mmlu_high_school_psychology": 0,
|
538 |
+
"mmlu_high_school_statistics": 0,
|
539 |
+
"mmlu_high_school_us_history": 0,
|
540 |
+
"mmlu_high_school_world_history": 0,
|
541 |
+
"mmlu_human_aging": 0,
|
542 |
+
"mmlu_human_sexuality": 0,
|
543 |
+
"mmlu_humanities": 0,
|
544 |
+
"mmlu_international_law": 0,
|
545 |
+
"mmlu_jurisprudence": 0,
|
546 |
+
"mmlu_logical_fallacies": 0,
|
547 |
+
"mmlu_machine_learning": 0,
|
548 |
+
"mmlu_management": 0,
|
549 |
+
"mmlu_marketing": 0,
|
550 |
+
"mmlu_medical_genetics": 0,
|
551 |
+
"mmlu_miscellaneous": 0,
|
552 |
+
"mmlu_moral_disputes": 0,
|
553 |
+
"mmlu_moral_scenarios": 0,
|
554 |
+
"mmlu_nutrition": 0,
|
555 |
+
"mmlu_other": 0,
|
556 |
+
"mmlu_philosophy": 0,
|
557 |
+
"mmlu_prehistory": 0,
|
558 |
+
"mmlu_professional_accounting": 0,
|
559 |
+
"mmlu_professional_law": 0,
|
560 |
+
"mmlu_professional_medicine": 0,
|
561 |
+
"mmlu_professional_psychology": 0,
|
562 |
+
"mmlu_public_relations": 0,
|
563 |
+
"mmlu_security_studies": 0,
|
564 |
+
"mmlu_social_sciences": 0,
|
565 |
+
"mmlu_sociology": 0,
|
566 |
+
"mmlu_stem": 0,
|
567 |
+
"mmlu_us_foreign_policy": 0,
|
568 |
+
"mmlu_virology": 0,
|
569 |
+
"mmlu_world_religions": 0,
|
570 |
+
"openbookqa": 0,
|
571 |
+
"piqa": 0,
|
572 |
+
"truthfulqa_mc1": 0,
|
573 |
+
"truthfulqa_mc2": 0,
|
574 |
+
"winogrande": 0
|
575 |
+
},
|
576 |
+
"date": 1714574230.2024841,
|
577 |
+
"config": {
|
578 |
+
"model": "hf",
|
579 |
+
"model_args": "pretrained=TechxGenus/gemma-7b-GPTQ,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
580 |
+
"batch_size": 2,
|
581 |
+
"batch_sizes": [],
|
582 |
+
"device": "cuda",
|
583 |
+
"use_cache": null,
|
584 |
+
"limit": null,
|
585 |
+
"bootstrap_iters": 100000,
|
586 |
+
"gen_kwargs": null
|
587 |
+
}
|
588 |
+
}
|
TheBloke/results_2024-04-27-02-47-01.json
ADDED
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-27-02-47-01",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/Llama-2-7B-Chat-GPTQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 3.9,
|
16 |
+
"model_params": 6.54,
|
17 |
+
"quant_type": "GPTQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|piqa|0": {
|
22 |
+
"acc,none": 0.763873775843308,
|
23 |
+
"acc_stderr,none": 0.009908965890558213,
|
24 |
+
"acc_norm,none": 0.7665941240478781,
|
25 |
+
"acc_norm_stderr,none": 0.009869247889521012,
|
26 |
+
"alias": "piqa"
|
27 |
+
},
|
28 |
+
"harness|truthfulqa:mc1|0": {
|
29 |
+
"acc,none": 0.2913096695226438,
|
30 |
+
"acc_stderr,none": 0.015905987048184828,
|
31 |
+
"alias": "truthfulqa_mc1"
|
32 |
+
},
|
33 |
+
"harness|hellaswag|0": {
|
34 |
+
"acc,none": 0.5614419438358893,
|
35 |
+
"acc_stderr,none": 0.0049519641319213225,
|
36 |
+
"acc_norm,none": 0.7391953794064927,
|
37 |
+
"acc_norm_stderr,none": 0.004381761941552722,
|
38 |
+
"alias": "hellaswag"
|
39 |
+
},
|
40 |
+
"harness|truthfulqa:mc2|0": {
|
41 |
+
"acc,none": 0.44047324022379325,
|
42 |
+
"acc_stderr,none": 0.01555687649014822,
|
43 |
+
"alias": "truthfulqa_mc2"
|
44 |
+
},
|
45 |
+
"harness|openbookqa|0": {
|
46 |
+
"acc,none": 0.328,
|
47 |
+
"acc_stderr,none": 0.021017027165175492,
|
48 |
+
"acc_norm,none": 0.4,
|
49 |
+
"acc_norm_stderr,none": 0.021930844120728505,
|
50 |
+
"alias": "openbookqa"
|
51 |
+
},
|
52 |
+
"harness|mmlu|0": {
|
53 |
+
"acc,none": 0.43156245549067085,
|
54 |
+
"acc_stderr,none": 0.00403805715600188,
|
55 |
+
"alias": "mmlu"
|
56 |
+
},
|
57 |
+
"harness|mmlu_humanities|0": {
|
58 |
+
"alias": " - humanities",
|
59 |
+
"acc,none": 0.4074388947927736,
|
60 |
+
"acc_stderr,none": 0.0069121839021850364
|
61 |
+
},
|
62 |
+
"harness|mmlu_formal_logic|0": {
|
63 |
+
"alias": " - formal_logic",
|
64 |
+
"acc,none": 0.23809523809523808,
|
65 |
+
"acc_stderr,none": 0.03809523809523812
|
66 |
+
},
|
67 |
+
"harness|mmlu_high_school_european_history|0": {
|
68 |
+
"alias": " - high_school_european_history",
|
69 |
+
"acc,none": 0.5636363636363636,
|
70 |
+
"acc_stderr,none": 0.03872592983524753
|
71 |
+
},
|
72 |
+
"harness|mmlu_high_school_us_history|0": {
|
73 |
+
"alias": " - high_school_us_history",
|
74 |
+
"acc,none": 0.5882352941176471,
|
75 |
+
"acc_stderr,none": 0.0345423658538061
|
76 |
+
},
|
77 |
+
"harness|mmlu_high_school_world_history|0": {
|
78 |
+
"alias": " - high_school_world_history",
|
79 |
+
"acc,none": 0.5822784810126582,
|
80 |
+
"acc_stderr,none": 0.032103530322412685
|
81 |
+
},
|
82 |
+
"harness|mmlu_international_law|0": {
|
83 |
+
"alias": " - international_law",
|
84 |
+
"acc,none": 0.5537190082644629,
|
85 |
+
"acc_stderr,none": 0.0453793517794788
|
86 |
+
},
|
87 |
+
"harness|mmlu_jurisprudence|0": {
|
88 |
+
"alias": " - jurisprudence",
|
89 |
+
"acc,none": 0.5185185185185185,
|
90 |
+
"acc_stderr,none": 0.04830366024635331
|
91 |
+
},
|
92 |
+
"harness|mmlu_logical_fallacies|0": {
|
93 |
+
"alias": " - logical_fallacies",
|
94 |
+
"acc,none": 0.5276073619631901,
|
95 |
+
"acc_stderr,none": 0.039223782906109894
|
96 |
+
},
|
97 |
+
"harness|mmlu_moral_disputes|0": {
|
98 |
+
"alias": " - moral_disputes",
|
99 |
+
"acc,none": 0.48554913294797686,
|
100 |
+
"acc_stderr,none": 0.02690784985628254
|
101 |
+
},
|
102 |
+
"harness|mmlu_moral_scenarios|0": {
|
103 |
+
"alias": " - moral_scenarios",
|
104 |
+
"acc,none": 0.2424581005586592,
|
105 |
+
"acc_stderr,none": 0.014333522059217887
|
106 |
+
},
|
107 |
+
"harness|mmlu_philosophy|0": {
|
108 |
+
"alias": " - philosophy",
|
109 |
+
"acc,none": 0.4983922829581994,
|
110 |
+
"acc_stderr,none": 0.02839794490780661
|
111 |
+
},
|
112 |
+
"harness|mmlu_prehistory|0": {
|
113 |
+
"alias": " - prehistory",
|
114 |
+
"acc,none": 0.5246913580246914,
|
115 |
+
"acc_stderr,none": 0.02778680093142745
|
116 |
+
},
|
117 |
+
"harness|mmlu_professional_law|0": {
|
118 |
+
"alias": " - professional_law",
|
119 |
+
"acc,none": 0.32920469361147325,
|
120 |
+
"acc_stderr,none": 0.0120020916669023
|
121 |
+
},
|
122 |
+
"harness|mmlu_world_religions|0": {
|
123 |
+
"alias": " - world_religions",
|
124 |
+
"acc,none": 0.6549707602339181,
|
125 |
+
"acc_stderr,none": 0.036459813773888065
|
126 |
+
},
|
127 |
+
"harness|mmlu_other|0": {
|
128 |
+
"alias": " - other",
|
129 |
+
"acc,none": 0.5098165432893467,
|
130 |
+
"acc_stderr,none": 0.008732608676943622
|
131 |
+
},
|
132 |
+
"harness|mmlu_business_ethics|0": {
|
133 |
+
"alias": " - business_ethics",
|
134 |
+
"acc,none": 0.42,
|
135 |
+
"acc_stderr,none": 0.049604496374885836
|
136 |
+
},
|
137 |
+
"harness|mmlu_clinical_knowledge|0": {
|
138 |
+
"alias": " - clinical_knowledge",
|
139 |
+
"acc,none": 0.4679245283018868,
|
140 |
+
"acc_stderr,none": 0.03070948699255655
|
141 |
+
},
|
142 |
+
"harness|mmlu_college_medicine|0": {
|
143 |
+
"alias": " - college_medicine",
|
144 |
+
"acc,none": 0.3699421965317919,
|
145 |
+
"acc_stderr,none": 0.036812296333943194
|
146 |
+
},
|
147 |
+
"harness|mmlu_global_facts|0": {
|
148 |
+
"alias": " - global_facts",
|
149 |
+
"acc,none": 0.41,
|
150 |
+
"acc_stderr,none": 0.049431107042371025
|
151 |
+
},
|
152 |
+
"harness|mmlu_human_aging|0": {
|
153 |
+
"alias": " - human_aging",
|
154 |
+
"acc,none": 0.5381165919282511,
|
155 |
+
"acc_stderr,none": 0.03346015011973228
|
156 |
+
},
|
157 |
+
"harness|mmlu_management|0": {
|
158 |
+
"alias": " - management",
|
159 |
+
"acc,none": 0.5825242718446602,
|
160 |
+
"acc_stderr,none": 0.048828405482122375
|
161 |
+
},
|
162 |
+
"harness|mmlu_marketing|0": {
|
163 |
+
"alias": " - marketing",
|
164 |
+
"acc,none": 0.688034188034188,
|
165 |
+
"acc_stderr,none": 0.030351527323344937
|
166 |
+
},
|
167 |
+
"harness|mmlu_medical_genetics|0": {
|
168 |
+
"alias": " - medical_genetics",
|
169 |
+
"acc,none": 0.42,
|
170 |
+
"acc_stderr,none": 0.049604496374885836
|
171 |
+
},
|
172 |
+
"harness|mmlu_miscellaneous|0": {
|
173 |
+
"alias": " - miscellaneous",
|
174 |
+
"acc,none": 0.6500638569604087,
|
175 |
+
"acc_stderr,none": 0.017055679797150426
|
176 |
+
},
|
177 |
+
"harness|mmlu_nutrition|0": {
|
178 |
+
"alias": " - nutrition",
|
179 |
+
"acc,none": 0.47058823529411764,
|
180 |
+
"acc_stderr,none": 0.028580341065138286
|
181 |
+
},
|
182 |
+
"harness|mmlu_professional_accounting|0": {
|
183 |
+
"alias": " - professional_accounting",
|
184 |
+
"acc,none": 0.3262411347517731,
|
185 |
+
"acc_stderr,none": 0.027968453043563168
|
186 |
+
},
|
187 |
+
"harness|mmlu_professional_medicine|0": {
|
188 |
+
"alias": " - professional_medicine",
|
189 |
+
"acc,none": 0.41544117647058826,
|
190 |
+
"acc_stderr,none": 0.029935342707877743
|
191 |
+
},
|
192 |
+
"harness|mmlu_virology|0": {
|
193 |
+
"alias": " - virology",
|
194 |
+
"acc,none": 0.43373493975903615,
|
195 |
+
"acc_stderr,none": 0.03858158940685515
|
196 |
+
},
|
197 |
+
"harness|mmlu_social_sciences|0": {
|
198 |
+
"alias": " - social_sciences",
|
199 |
+
"acc,none": 0.4774130646733832,
|
200 |
+
"acc_stderr,none": 0.008798512511707348
|
201 |
+
},
|
202 |
+
"harness|mmlu_econometrics|0": {
|
203 |
+
"alias": " - econometrics",
|
204 |
+
"acc,none": 0.24561403508771928,
|
205 |
+
"acc_stderr,none": 0.04049339297748141
|
206 |
+
},
|
207 |
+
"harness|mmlu_high_school_geography|0": {
|
208 |
+
"alias": " - high_school_geography",
|
209 |
+
"acc,none": 0.5151515151515151,
|
210 |
+
"acc_stderr,none": 0.03560716516531061
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
213 |
+
"alias": " - high_school_government_and_politics",
|
214 |
+
"acc,none": 0.616580310880829,
|
215 |
+
"acc_stderr,none": 0.03508984236295342
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
218 |
+
"alias": " - high_school_macroeconomics",
|
219 |
+
"acc,none": 0.36923076923076925,
|
220 |
+
"acc_stderr,none": 0.02446861524147892
|
221 |
+
},
|
222 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
223 |
+
"alias": " - high_school_microeconomics",
|
224 |
+
"acc,none": 0.33613445378151263,
|
225 |
+
"acc_stderr,none": 0.03068473711513536
|
226 |
+
},
|
227 |
+
"harness|mmlu_high_school_psychology|0": {
|
228 |
+
"alias": " - high_school_psychology",
|
229 |
+
"acc,none": 0.563302752293578,
|
230 |
+
"acc_stderr,none": 0.021264820158714195
|
231 |
+
},
|
232 |
+
"harness|mmlu_human_sexuality|0": {
|
233 |
+
"alias": " - human_sexuality",
|
234 |
+
"acc,none": 0.5190839694656488,
|
235 |
+
"acc_stderr,none": 0.04382094705550988
|
236 |
+
},
|
237 |
+
"harness|mmlu_professional_psychology|0": {
|
238 |
+
"alias": " - professional_psychology",
|
239 |
+
"acc,none": 0.4117647058823529,
|
240 |
+
"acc_stderr,none": 0.01991037746310593
|
241 |
+
},
|
242 |
+
"harness|mmlu_public_relations|0": {
|
243 |
+
"alias": " - public_relations",
|
244 |
+
"acc,none": 0.5181818181818182,
|
245 |
+
"acc_stderr,none": 0.04785964010794916
|
246 |
+
},
|
247 |
+
"harness|mmlu_security_studies|0": {
|
248 |
+
"alias": " - security_studies",
|
249 |
+
"acc,none": 0.4530612244897959,
|
250 |
+
"acc_stderr,none": 0.03186785930004129
|
251 |
+
},
|
252 |
+
"harness|mmlu_sociology|0": {
|
253 |
+
"alias": " - sociology",
|
254 |
+
"acc,none": 0.6716417910447762,
|
255 |
+
"acc_stderr,none": 0.033206858897443244
|
256 |
+
},
|
257 |
+
"harness|mmlu_us_foreign_policy|0": {
|
258 |
+
"alias": " - us_foreign_policy",
|
259 |
+
"acc,none": 0.66,
|
260 |
+
"acc_stderr,none": 0.04760952285695237
|
261 |
+
},
|
262 |
+
"harness|mmlu_stem|0": {
|
263 |
+
"alias": " - stem",
|
264 |
+
"acc,none": 0.3457025055502696,
|
265 |
+
"acc_stderr,none": 0.00832133835630205
|
266 |
+
},
|
267 |
+
"harness|mmlu_abstract_algebra|0": {
|
268 |
+
"alias": " - abstract_algebra",
|
269 |
+
"acc,none": 0.32,
|
270 |
+
"acc_stderr,none": 0.04688261722621504
|
271 |
+
},
|
272 |
+
"harness|mmlu_anatomy|0": {
|
273 |
+
"alias": " - anatomy",
|
274 |
+
"acc,none": 0.42962962962962964,
|
275 |
+
"acc_stderr,none": 0.04276349494376599
|
276 |
+
},
|
277 |
+
"harness|mmlu_astronomy|0": {
|
278 |
+
"alias": " - astronomy",
|
279 |
+
"acc,none": 0.4473684210526316,
|
280 |
+
"acc_stderr,none": 0.04046336883978251
|
281 |
+
},
|
282 |
+
"harness|mmlu_college_biology|0": {
|
283 |
+
"alias": " - college_biology",
|
284 |
+
"acc,none": 0.4513888888888889,
|
285 |
+
"acc_stderr,none": 0.04161402398403279
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_chemistry|0": {
|
288 |
+
"alias": " - college_chemistry",
|
289 |
+
"acc,none": 0.2,
|
290 |
+
"acc_stderr,none": 0.04020151261036845
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_computer_science|0": {
|
293 |
+
"alias": " - college_computer_science",
|
294 |
+
"acc,none": 0.31,
|
295 |
+
"acc_stderr,none": 0.04648231987117316
|
296 |
+
},
|
297 |
+
"harness|mmlu_college_mathematics|0": {
|
298 |
+
"alias": " - college_mathematics",
|
299 |
+
"acc,none": 0.27,
|
300 |
+
"acc_stderr,none": 0.0446196043338474
|
301 |
+
},
|
302 |
+
"harness|mmlu_college_physics|0": {
|
303 |
+
"alias": " - college_physics",
|
304 |
+
"acc,none": 0.24509803921568626,
|
305 |
+
"acc_stderr,none": 0.042801058373643945
|
306 |
+
},
|
307 |
+
"harness|mmlu_computer_security|0": {
|
308 |
+
"alias": " - computer_security",
|
309 |
+
"acc,none": 0.58,
|
310 |
+
"acc_stderr,none": 0.049604496374885836
|
311 |
+
},
|
312 |
+
"harness|mmlu_conceptual_physics|0": {
|
313 |
+
"alias": " - conceptual_physics",
|
314 |
+
"acc,none": 0.3872340425531915,
|
315 |
+
"acc_stderr,none": 0.03184389265339525
|
316 |
+
},
|
317 |
+
"harness|mmlu_electrical_engineering|0": {
|
318 |
+
"alias": " - electrical_engineering",
|
319 |
+
"acc,none": 0.42758620689655175,
|
320 |
+
"acc_stderr,none": 0.04122737111370331
|
321 |
+
},
|
322 |
+
"harness|mmlu_elementary_mathematics|0": {
|
323 |
+
"alias": " - elementary_mathematics",
|
324 |
+
"acc,none": 0.2777777777777778,
|
325 |
+
"acc_stderr,none": 0.023068188848261117
|
326 |
+
},
|
327 |
+
"harness|mmlu_high_school_biology|0": {
|
328 |
+
"alias": " - high_school_biology",
|
329 |
+
"acc,none": 0.47419354838709676,
|
330 |
+
"acc_stderr,none": 0.02840609505765332
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_chemistry|0": {
|
333 |
+
"alias": " - high_school_chemistry",
|
334 |
+
"acc,none": 0.30049261083743845,
|
335 |
+
"acc_stderr,none": 0.03225799476233485
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_computer_science|0": {
|
338 |
+
"alias": " - high_school_computer_science",
|
339 |
+
"acc,none": 0.4,
|
340 |
+
"acc_stderr,none": 0.04923659639173309
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_mathematics|0": {
|
343 |
+
"alias": " - high_school_mathematics",
|
344 |
+
"acc,none": 0.24444444444444444,
|
345 |
+
"acc_stderr,none": 0.026202766534652148
|
346 |
+
},
|
347 |
+
"harness|mmlu_high_school_physics|0": {
|
348 |
+
"alias": " - high_school_physics",
|
349 |
+
"acc,none": 0.2980132450331126,
|
350 |
+
"acc_stderr,none": 0.037345356767871984
|
351 |
+
},
|
352 |
+
"harness|mmlu_high_school_statistics|0": {
|
353 |
+
"alias": " - high_school_statistics",
|
354 |
+
"acc,none": 0.2222222222222222,
|
355 |
+
"acc_stderr,none": 0.028353212866863445
|
356 |
+
},
|
357 |
+
"harness|mmlu_machine_learning|0": {
|
358 |
+
"alias": " - machine_learning",
|
359 |
+
"acc,none": 0.36607142857142855,
|
360 |
+
"acc_stderr,none": 0.04572372358737431
|
361 |
+
},
|
362 |
+
"harness|winogrande|0": {
|
363 |
+
"acc,none": 0.6511444356748224,
|
364 |
+
"acc_stderr,none": 0.013395059320137336,
|
365 |
+
"alias": "winogrande"
|
366 |
+
},
|
367 |
+
"harness|boolq|0": {
|
368 |
+
"acc,none": 0.8012232415902141,
|
369 |
+
"acc_stderr,none": 0.00697994677614537,
|
370 |
+
"alias": "boolq"
|
371 |
+
},
|
372 |
+
"harness|lambada:openai|0": {
|
373 |
+
"perplexity,none": 3.5526815329331782,
|
374 |
+
"perplexity_stderr,none": 0.09428366649260858,
|
375 |
+
"acc,none": 0.6966815447312246,
|
376 |
+
"acc_stderr,none": 0.006404402872809118,
|
377 |
+
"alias": "lambada_openai"
|
378 |
+
},
|
379 |
+
"harness|arc:challenge|0": {
|
380 |
+
"acc,none": 0.42406143344709896,
|
381 |
+
"acc_stderr,none": 0.014441889627464394,
|
382 |
+
"acc_norm,none": 0.42918088737201365,
|
383 |
+
"acc_norm_stderr,none": 0.014464085894870653,
|
384 |
+
"alias": "arc_challenge"
|
385 |
+
},
|
386 |
+
"harness|arc:easy|0": {
|
387 |
+
"acc,none": 0.7264309764309764,
|
388 |
+
"acc_stderr,none": 0.009147424438490745,
|
389 |
+
"acc_norm,none": 0.6784511784511784,
|
390 |
+
"acc_norm_stderr,none": 0.00958409157564063,
|
391 |
+
"alias": "arc_easy"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/Llama-2-7B-Chat-GPTQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 4.524,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "GPTQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 9.048,
|
403 |
+
"model_size": 4.524,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-26T17:24:28Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"group_size": 128,
|
418 |
+
"damp_percent": 0.01,
|
419 |
+
"desc_act": false,
|
420 |
+
"sym": true,
|
421 |
+
"true_sequential": true,
|
422 |
+
"model_name_or_path": null,
|
423 |
+
"model_file_base_name": "model",
|
424 |
+
"quant_method": "gptq"
|
425 |
+
},
|
426 |
+
"versions": {
|
427 |
+
"harness|piqa|0": 1.0,
|
428 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
429 |
+
"harness|hellaswag|0": 1.0,
|
430 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
431 |
+
"harness|openbookqa|0": 1.0,
|
432 |
+
"harness|mmlu|0": null,
|
433 |
+
"harness|mmlu_humanities|0": null,
|
434 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
435 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
436 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
437 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
438 |
+
"harness|mmlu_international_law|0": 0.0,
|
439 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
440 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
441 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
442 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
443 |
+
"harness|mmlu_philosophy|0": 0.0,
|
444 |
+
"harness|mmlu_prehistory|0": 0.0,
|
445 |
+
"harness|mmlu_professional_law|0": 0.0,
|
446 |
+
"harness|mmlu_world_religions|0": 0.0,
|
447 |
+
"harness|mmlu_other|0": null,
|
448 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
449 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
450 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
451 |
+
"harness|mmlu_global_facts|0": 0.0,
|
452 |
+
"harness|mmlu_human_aging|0": 0.0,
|
453 |
+
"harness|mmlu_management|0": 0.0,
|
454 |
+
"harness|mmlu_marketing|0": 0.0,
|
455 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
456 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
457 |
+
"harness|mmlu_nutrition|0": 0.0,
|
458 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
459 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
460 |
+
"harness|mmlu_virology|0": 0.0,
|
461 |
+
"harness|mmlu_social_sciences|0": null,
|
462 |
+
"harness|mmlu_econometrics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
466 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
467 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
468 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
469 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
470 |
+
"harness|mmlu_public_relations|0": 0.0,
|
471 |
+
"harness|mmlu_security_studies|0": 0.0,
|
472 |
+
"harness|mmlu_sociology|0": 0.0,
|
473 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
474 |
+
"harness|mmlu_stem|0": null,
|
475 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
476 |
+
"harness|mmlu_anatomy|0": 0.0,
|
477 |
+
"harness|mmlu_astronomy|0": 0.0,
|
478 |
+
"harness|mmlu_college_biology|0": 0.0,
|
479 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
480 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
481 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_college_physics|0": 0.0,
|
483 |
+
"harness|mmlu_computer_security|0": 0.0,
|
484 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
485 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
486 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
491 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
493 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
494 |
+
"harness|winogrande|0": 1.0,
|
495 |
+
"harness|boolq|0": 2.0,
|
496 |
+
"harness|lambada:openai|0": 1.0,
|
497 |
+
"harness|arc:challenge|0": 1.0,
|
498 |
+
"harness|arc:easy|0": 1.0
|
499 |
+
},
|
500 |
+
"n-shot": {
|
501 |
+
"arc_challenge": 0,
|
502 |
+
"arc_easy": 0,
|
503 |
+
"boolq": 0,
|
504 |
+
"hellaswag": 0,
|
505 |
+
"lambada_openai": 0,
|
506 |
+
"mmlu": 0,
|
507 |
+
"mmlu_abstract_algebra": 0,
|
508 |
+
"mmlu_anatomy": 0,
|
509 |
+
"mmlu_astronomy": 0,
|
510 |
+
"mmlu_business_ethics": 0,
|
511 |
+
"mmlu_clinical_knowledge": 0,
|
512 |
+
"mmlu_college_biology": 0,
|
513 |
+
"mmlu_college_chemistry": 0,
|
514 |
+
"mmlu_college_computer_science": 0,
|
515 |
+
"mmlu_college_mathematics": 0,
|
516 |
+
"mmlu_college_medicine": 0,
|
517 |
+
"mmlu_college_physics": 0,
|
518 |
+
"mmlu_computer_security": 0,
|
519 |
+
"mmlu_conceptual_physics": 0,
|
520 |
+
"mmlu_econometrics": 0,
|
521 |
+
"mmlu_electrical_engineering": 0,
|
522 |
+
"mmlu_elementary_mathematics": 0,
|
523 |
+
"mmlu_formal_logic": 0,
|
524 |
+
"mmlu_global_facts": 0,
|
525 |
+
"mmlu_high_school_biology": 0,
|
526 |
+
"mmlu_high_school_chemistry": 0,
|
527 |
+
"mmlu_high_school_computer_science": 0,
|
528 |
+
"mmlu_high_school_european_history": 0,
|
529 |
+
"mmlu_high_school_geography": 0,
|
530 |
+
"mmlu_high_school_government_and_politics": 0,
|
531 |
+
"mmlu_high_school_macroeconomics": 0,
|
532 |
+
"mmlu_high_school_mathematics": 0,
|
533 |
+
"mmlu_high_school_microeconomics": 0,
|
534 |
+
"mmlu_high_school_physics": 0,
|
535 |
+
"mmlu_high_school_psychology": 0,
|
536 |
+
"mmlu_high_school_statistics": 0,
|
537 |
+
"mmlu_high_school_us_history": 0,
|
538 |
+
"mmlu_high_school_world_history": 0,
|
539 |
+
"mmlu_human_aging": 0,
|
540 |
+
"mmlu_human_sexuality": 0,
|
541 |
+
"mmlu_humanities": 0,
|
542 |
+
"mmlu_international_law": 0,
|
543 |
+
"mmlu_jurisprudence": 0,
|
544 |
+
"mmlu_logical_fallacies": 0,
|
545 |
+
"mmlu_machine_learning": 0,
|
546 |
+
"mmlu_management": 0,
|
547 |
+
"mmlu_marketing": 0,
|
548 |
+
"mmlu_medical_genetics": 0,
|
549 |
+
"mmlu_miscellaneous": 0,
|
550 |
+
"mmlu_moral_disputes": 0,
|
551 |
+
"mmlu_moral_scenarios": 0,
|
552 |
+
"mmlu_nutrition": 0,
|
553 |
+
"mmlu_other": 0,
|
554 |
+
"mmlu_philosophy": 0,
|
555 |
+
"mmlu_prehistory": 0,
|
556 |
+
"mmlu_professional_accounting": 0,
|
557 |
+
"mmlu_professional_law": 0,
|
558 |
+
"mmlu_professional_medicine": 0,
|
559 |
+
"mmlu_professional_psychology": 0,
|
560 |
+
"mmlu_public_relations": 0,
|
561 |
+
"mmlu_security_studies": 0,
|
562 |
+
"mmlu_social_sciences": 0,
|
563 |
+
"mmlu_sociology": 0,
|
564 |
+
"mmlu_stem": 0,
|
565 |
+
"mmlu_us_foreign_policy": 0,
|
566 |
+
"mmlu_virology": 0,
|
567 |
+
"mmlu_world_religions": 0,
|
568 |
+
"openbookqa": 0,
|
569 |
+
"piqa": 0,
|
570 |
+
"truthfulqa_mc1": 0,
|
571 |
+
"truthfulqa_mc2": 0,
|
572 |
+
"winogrande": 0
|
573 |
+
},
|
574 |
+
"date": 1714152422.1714947,
|
575 |
+
"config": {
|
576 |
+
"model": "hf",
|
577 |
+
"model_args": "pretrained=TheBloke/Llama-2-7B-Chat-GPTQ,dtype=float16,_commit_hash=main",
|
578 |
+
"batch_size": 2,
|
579 |
+
"batch_sizes": [],
|
580 |
+
"device": "cuda",
|
581 |
+
"use_cache": null,
|
582 |
+
"limit": null,
|
583 |
+
"bootstrap_iters": 100000,
|
584 |
+
"gen_kwargs": null
|
585 |
+
}
|
586 |
+
}
|
TheBloke/results_2024-04-27-08-48-07.json
ADDED
@@ -0,0 +1,582 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-27-08-48-07",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/Llama-2-7B-Chat-AWQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 3.89,
|
16 |
+
"model_params": 6.53,
|
17 |
+
"quant_type": "AWQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|piqa|0": {
|
22 |
+
"acc,none": 0.7562568008705114,
|
23 |
+
"acc_stderr,none": 0.010017199471500614,
|
24 |
+
"acc_norm,none": 0.7622415669205659,
|
25 |
+
"acc_norm_stderr,none": 0.009932525779525485,
|
26 |
+
"alias": "piqa"
|
27 |
+
},
|
28 |
+
"harness|lambada:openai|0": {
|
29 |
+
"perplexity,none": 3.5058974442285646,
|
30 |
+
"perplexity_stderr,none": 0.09435147029917793,
|
31 |
+
"acc,none": 0.6902775082476228,
|
32 |
+
"acc_stderr,none": 0.00644184552805125,
|
33 |
+
"alias": "lambada_openai"
|
34 |
+
},
|
35 |
+
"harness|truthfulqa:mc1|0": {
|
36 |
+
"acc,none": 0.29008567931456547,
|
37 |
+
"acc_stderr,none": 0.015886236874209515,
|
38 |
+
"alias": "truthfulqa_mc1"
|
39 |
+
},
|
40 |
+
"harness|boolq|0": {
|
41 |
+
"acc,none": 0.8042813455657493,
|
42 |
+
"acc_stderr,none": 0.006939251824863106,
|
43 |
+
"alias": "boolq"
|
44 |
+
},
|
45 |
+
"harness|hellaswag|0": {
|
46 |
+
"acc,none": 0.5713005377414858,
|
47 |
+
"acc_stderr,none": 0.004938787067611794,
|
48 |
+
"acc_norm,none": 0.7488548097988449,
|
49 |
+
"acc_norm_stderr,none": 0.004327855588466402,
|
50 |
+
"alias": "hellaswag"
|
51 |
+
},
|
52 |
+
"harness|mmlu|0": {
|
53 |
+
"acc,none": 0.4528557185586099,
|
54 |
+
"acc_stderr,none": 0.004036188883688366,
|
55 |
+
"alias": "mmlu"
|
56 |
+
},
|
57 |
+
"harness|mmlu_humanities|0": {
|
58 |
+
"alias": " - humanities",
|
59 |
+
"acc,none": 0.42422954303931987,
|
60 |
+
"acc_stderr,none": 0.006938084209855627
|
61 |
+
},
|
62 |
+
"harness|mmlu_formal_logic|0": {
|
63 |
+
"alias": " - formal_logic",
|
64 |
+
"acc,none": 0.25396825396825395,
|
65 |
+
"acc_stderr,none": 0.03893259610604675
|
66 |
+
},
|
67 |
+
"harness|mmlu_high_school_european_history|0": {
|
68 |
+
"alias": " - high_school_european_history",
|
69 |
+
"acc,none": 0.5757575757575758,
|
70 |
+
"acc_stderr,none": 0.038592681420702636
|
71 |
+
},
|
72 |
+
"harness|mmlu_high_school_us_history|0": {
|
73 |
+
"alias": " - high_school_us_history",
|
74 |
+
"acc,none": 0.5931372549019608,
|
75 |
+
"acc_stderr,none": 0.03447891136353383
|
76 |
+
},
|
77 |
+
"harness|mmlu_high_school_world_history|0": {
|
78 |
+
"alias": " - high_school_world_history",
|
79 |
+
"acc,none": 0.6371308016877637,
|
80 |
+
"acc_stderr,none": 0.031299208255302136
|
81 |
+
},
|
82 |
+
"harness|mmlu_international_law|0": {
|
83 |
+
"alias": " - international_law",
|
84 |
+
"acc,none": 0.5867768595041323,
|
85 |
+
"acc_stderr,none": 0.04495087843548408
|
86 |
+
},
|
87 |
+
"harness|mmlu_jurisprudence|0": {
|
88 |
+
"alias": " - jurisprudence",
|
89 |
+
"acc,none": 0.5462962962962963,
|
90 |
+
"acc_stderr,none": 0.04812917324536823
|
91 |
+
},
|
92 |
+
"harness|mmlu_logical_fallacies|0": {
|
93 |
+
"alias": " - logical_fallacies",
|
94 |
+
"acc,none": 0.5828220858895705,
|
95 |
+
"acc_stderr,none": 0.038741028598180814
|
96 |
+
},
|
97 |
+
"harness|mmlu_moral_disputes|0": {
|
98 |
+
"alias": " - moral_disputes",
|
99 |
+
"acc,none": 0.48554913294797686,
|
100 |
+
"acc_stderr,none": 0.026907849856282532
|
101 |
+
},
|
102 |
+
"harness|mmlu_moral_scenarios|0": {
|
103 |
+
"alias": " - moral_scenarios",
|
104 |
+
"acc,none": 0.2424581005586592,
|
105 |
+
"acc_stderr,none": 0.014333522059217887
|
106 |
+
},
|
107 |
+
"harness|mmlu_philosophy|0": {
|
108 |
+
"alias": " - philosophy",
|
109 |
+
"acc,none": 0.5209003215434084,
|
110 |
+
"acc_stderr,none": 0.028373270961069414
|
111 |
+
},
|
112 |
+
"harness|mmlu_prehistory|0": {
|
113 |
+
"alias": " - prehistory",
|
114 |
+
"acc,none": 0.5185185185185185,
|
115 |
+
"acc_stderr,none": 0.027801656212323667
|
116 |
+
},
|
117 |
+
"harness|mmlu_professional_law|0": {
|
118 |
+
"alias": " - professional_law",
|
119 |
+
"acc,none": 0.3546284224250326,
|
120 |
+
"acc_stderr,none": 0.012218576439090174
|
121 |
+
},
|
122 |
+
"harness|mmlu_world_religions|0": {
|
123 |
+
"alias": " - world_religions",
|
124 |
+
"acc,none": 0.6608187134502924,
|
125 |
+
"acc_stderr,none": 0.03631053496488904
|
126 |
+
},
|
127 |
+
"harness|mmlu_other|0": {
|
128 |
+
"alias": " - other",
|
129 |
+
"acc,none": 0.536530415191503,
|
130 |
+
"acc_stderr,none": 0.008694819121553921
|
131 |
+
},
|
132 |
+
"harness|mmlu_business_ethics|0": {
|
133 |
+
"alias": " - business_ethics",
|
134 |
+
"acc,none": 0.49,
|
135 |
+
"acc_stderr,none": 0.05024183937956911
|
136 |
+
},
|
137 |
+
"harness|mmlu_clinical_knowledge|0": {
|
138 |
+
"alias": " - clinical_knowledge",
|
139 |
+
"acc,none": 0.49433962264150944,
|
140 |
+
"acc_stderr,none": 0.03077090076385131
|
141 |
+
},
|
142 |
+
"harness|mmlu_college_medicine|0": {
|
143 |
+
"alias": " - college_medicine",
|
144 |
+
"acc,none": 0.36416184971098264,
|
145 |
+
"acc_stderr,none": 0.036690724774169084
|
146 |
+
},
|
147 |
+
"harness|mmlu_global_facts|0": {
|
148 |
+
"alias": " - global_facts",
|
149 |
+
"acc,none": 0.35,
|
150 |
+
"acc_stderr,none": 0.047937248544110196
|
151 |
+
},
|
152 |
+
"harness|mmlu_human_aging|0": {
|
153 |
+
"alias": " - human_aging",
|
154 |
+
"acc,none": 0.5829596412556054,
|
155 |
+
"acc_stderr,none": 0.03309266936071722
|
156 |
+
},
|
157 |
+
"harness|mmlu_management|0": {
|
158 |
+
"alias": " - management",
|
159 |
+
"acc,none": 0.6699029126213593,
|
160 |
+
"acc_stderr,none": 0.04656147110012351
|
161 |
+
},
|
162 |
+
"harness|mmlu_marketing|0": {
|
163 |
+
"alias": " - marketing",
|
164 |
+
"acc,none": 0.6965811965811965,
|
165 |
+
"acc_stderr,none": 0.030118210106942652
|
166 |
+
},
|
167 |
+
"harness|mmlu_medical_genetics|0": {
|
168 |
+
"alias": " - medical_genetics",
|
169 |
+
"acc,none": 0.51,
|
170 |
+
"acc_stderr,none": 0.05024183937956911
|
171 |
+
},
|
172 |
+
"harness|mmlu_miscellaneous|0": {
|
173 |
+
"alias": " - miscellaneous",
|
174 |
+
"acc,none": 0.6756066411238825,
|
175 |
+
"acc_stderr,none": 0.016740929047162702
|
176 |
+
},
|
177 |
+
"harness|mmlu_nutrition|0": {
|
178 |
+
"alias": " - nutrition",
|
179 |
+
"acc,none": 0.49673202614379086,
|
180 |
+
"acc_stderr,none": 0.02862930519400354
|
181 |
+
},
|
182 |
+
"harness|mmlu_professional_accounting|0": {
|
183 |
+
"alias": " - professional_accounting",
|
184 |
+
"acc,none": 0.36524822695035464,
|
185 |
+
"acc_stderr,none": 0.028723863853281278
|
186 |
+
},
|
187 |
+
"harness|mmlu_professional_medicine|0": {
|
188 |
+
"alias": " - professional_medicine",
|
189 |
+
"acc,none": 0.43014705882352944,
|
190 |
+
"acc_stderr,none": 0.030074971917302875
|
191 |
+
},
|
192 |
+
"harness|mmlu_virology|0": {
|
193 |
+
"alias": " - virology",
|
194 |
+
"acc,none": 0.45180722891566266,
|
195 |
+
"acc_stderr,none": 0.03874371556587953
|
196 |
+
},
|
197 |
+
"harness|mmlu_social_sciences|0": {
|
198 |
+
"alias": " - social_sciences",
|
199 |
+
"acc,none": 0.5128371790705233,
|
200 |
+
"acc_stderr,none": 0.008748422821694416
|
201 |
+
},
|
202 |
+
"harness|mmlu_econometrics|0": {
|
203 |
+
"alias": " - econometrics",
|
204 |
+
"acc,none": 0.2894736842105263,
|
205 |
+
"acc_stderr,none": 0.04266339443159395
|
206 |
+
},
|
207 |
+
"harness|mmlu_high_school_geography|0": {
|
208 |
+
"alias": " - high_school_geography",
|
209 |
+
"acc,none": 0.6060606060606061,
|
210 |
+
"acc_stderr,none": 0.03481285338232962
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
213 |
+
"alias": " - high_school_government_and_politics",
|
214 |
+
"acc,none": 0.6632124352331606,
|
215 |
+
"acc_stderr,none": 0.03410780251836183
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
218 |
+
"alias": " - high_school_macroeconomics",
|
219 |
+
"acc,none": 0.36153846153846153,
|
220 |
+
"acc_stderr,none": 0.024359581465397
|
221 |
+
},
|
222 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
223 |
+
"alias": " - high_school_microeconomics",
|
224 |
+
"acc,none": 0.36134453781512604,
|
225 |
+
"acc_stderr,none": 0.031204691225150016
|
226 |
+
},
|
227 |
+
"harness|mmlu_high_school_psychology|0": {
|
228 |
+
"alias": " - high_school_psychology",
|
229 |
+
"acc,none": 0.6165137614678899,
|
230 |
+
"acc_stderr,none": 0.02084715664191598
|
231 |
+
},
|
232 |
+
"harness|mmlu_human_sexuality|0": {
|
233 |
+
"alias": " - human_sexuality",
|
234 |
+
"acc,none": 0.5343511450381679,
|
235 |
+
"acc_stderr,none": 0.04374928560599738
|
236 |
+
},
|
237 |
+
"harness|mmlu_professional_psychology|0": {
|
238 |
+
"alias": " - professional_psychology",
|
239 |
+
"acc,none": 0.434640522875817,
|
240 |
+
"acc_stderr,none": 0.020054269200726452
|
241 |
+
},
|
242 |
+
"harness|mmlu_public_relations|0": {
|
243 |
+
"alias": " - public_relations",
|
244 |
+
"acc,none": 0.5272727272727272,
|
245 |
+
"acc_stderr,none": 0.04782001791380061
|
246 |
+
},
|
247 |
+
"harness|mmlu_security_studies|0": {
|
248 |
+
"alias": " - security_studies",
|
249 |
+
"acc,none": 0.5224489795918368,
|
250 |
+
"acc_stderr,none": 0.03197694118713672
|
251 |
+
},
|
252 |
+
"harness|mmlu_sociology|0": {
|
253 |
+
"alias": " - sociology",
|
254 |
+
"acc,none": 0.6965174129353234,
|
255 |
+
"acc_stderr,none": 0.03251006816458618
|
256 |
+
},
|
257 |
+
"harness|mmlu_us_foreign_policy|0": {
|
258 |
+
"alias": " - us_foreign_policy",
|
259 |
+
"acc,none": 0.72,
|
260 |
+
"acc_stderr,none": 0.04512608598542128
|
261 |
+
},
|
262 |
+
"harness|mmlu_stem|0": {
|
263 |
+
"alias": " - stem",
|
264 |
+
"acc,none": 0.35458293688550585,
|
265 |
+
"acc_stderr,none": 0.00834404825169343
|
266 |
+
},
|
267 |
+
"harness|mmlu_abstract_algebra|0": {
|
268 |
+
"alias": " - abstract_algebra",
|
269 |
+
"acc,none": 0.27,
|
270 |
+
"acc_stderr,none": 0.0446196043338474
|
271 |
+
},
|
272 |
+
"harness|mmlu_anatomy|0": {
|
273 |
+
"alias": " - anatomy",
|
274 |
+
"acc,none": 0.45185185185185184,
|
275 |
+
"acc_stderr,none": 0.04299268905480864
|
276 |
+
},
|
277 |
+
"harness|mmlu_astronomy|0": {
|
278 |
+
"alias": " - astronomy",
|
279 |
+
"acc,none": 0.4605263157894737,
|
280 |
+
"acc_stderr,none": 0.04056242252249033
|
281 |
+
},
|
282 |
+
"harness|mmlu_college_biology|0": {
|
283 |
+
"alias": " - college_biology",
|
284 |
+
"acc,none": 0.5,
|
285 |
+
"acc_stderr,none": 0.04181210050035455
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_chemistry|0": {
|
288 |
+
"alias": " - college_chemistry",
|
289 |
+
"acc,none": 0.21,
|
290 |
+
"acc_stderr,none": 0.040936018074033256
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_computer_science|0": {
|
293 |
+
"alias": " - college_computer_science",
|
294 |
+
"acc,none": 0.33,
|
295 |
+
"acc_stderr,none": 0.04725815626252604
|
296 |
+
},
|
297 |
+
"harness|mmlu_college_mathematics|0": {
|
298 |
+
"alias": " - college_mathematics",
|
299 |
+
"acc,none": 0.29,
|
300 |
+
"acc_stderr,none": 0.045604802157206845
|
301 |
+
},
|
302 |
+
"harness|mmlu_college_physics|0": {
|
303 |
+
"alias": " - college_physics",
|
304 |
+
"acc,none": 0.23529411764705882,
|
305 |
+
"acc_stderr,none": 0.042207736591714534
|
306 |
+
},
|
307 |
+
"harness|mmlu_computer_security|0": {
|
308 |
+
"alias": " - computer_security",
|
309 |
+
"acc,none": 0.57,
|
310 |
+
"acc_stderr,none": 0.04975698519562428
|
311 |
+
},
|
312 |
+
"harness|mmlu_conceptual_physics|0": {
|
313 |
+
"alias": " - conceptual_physics",
|
314 |
+
"acc,none": 0.3702127659574468,
|
315 |
+
"acc_stderr,none": 0.031565646822367836
|
316 |
+
},
|
317 |
+
"harness|mmlu_electrical_engineering|0": {
|
318 |
+
"alias": " - electrical_engineering",
|
319 |
+
"acc,none": 0.4206896551724138,
|
320 |
+
"acc_stderr,none": 0.0411391498118926
|
321 |
+
},
|
322 |
+
"harness|mmlu_elementary_mathematics|0": {
|
323 |
+
"alias": " - elementary_mathematics",
|
324 |
+
"acc,none": 0.2777777777777778,
|
325 |
+
"acc_stderr,none": 0.023068188848261117
|
326 |
+
},
|
327 |
+
"harness|mmlu_high_school_biology|0": {
|
328 |
+
"alias": " - high_school_biology",
|
329 |
+
"acc,none": 0.5258064516129032,
|
330 |
+
"acc_stderr,none": 0.02840609505765332
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_chemistry|0": {
|
333 |
+
"alias": " - high_school_chemistry",
|
334 |
+
"acc,none": 0.31527093596059114,
|
335 |
+
"acc_stderr,none": 0.032690808719701876
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_computer_science|0": {
|
338 |
+
"alias": " - high_school_computer_science",
|
339 |
+
"acc,none": 0.4,
|
340 |
+
"acc_stderr,none": 0.049236596391733084
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_mathematics|0": {
|
343 |
+
"alias": " - high_school_mathematics",
|
344 |
+
"acc,none": 0.27037037037037037,
|
345 |
+
"acc_stderr,none": 0.02708037281514566
|
346 |
+
},
|
347 |
+
"harness|mmlu_high_school_physics|0": {
|
348 |
+
"alias": " - high_school_physics",
|
349 |
+
"acc,none": 0.2781456953642384,
|
350 |
+
"acc_stderr,none": 0.03658603262763743
|
351 |
+
},
|
352 |
+
"harness|mmlu_high_school_statistics|0": {
|
353 |
+
"alias": " - high_school_statistics",
|
354 |
+
"acc,none": 0.24074074074074073,
|
355 |
+
"acc_stderr,none": 0.02915752218460559
|
356 |
+
},
|
357 |
+
"harness|mmlu_machine_learning|0": {
|
358 |
+
"alias": " - machine_learning",
|
359 |
+
"acc,none": 0.33035714285714285,
|
360 |
+
"acc_stderr,none": 0.04464285714285714
|
361 |
+
},
|
362 |
+
"harness|winogrande|0": {
|
363 |
+
"acc,none": 0.6471981057616417,
|
364 |
+
"acc_stderr,none": 0.013429728101788947,
|
365 |
+
"alias": "winogrande"
|
366 |
+
},
|
367 |
+
"harness|openbookqa|0": {
|
368 |
+
"acc,none": 0.318,
|
369 |
+
"acc_stderr,none": 0.020847571620814007,
|
370 |
+
"acc_norm,none": 0.422,
|
371 |
+
"acc_norm_stderr,none": 0.022109039310618552,
|
372 |
+
"alias": "openbookqa"
|
373 |
+
},
|
374 |
+
"harness|truthfulqa:mc2|0": {
|
375 |
+
"acc,none": 0.4429339932483719,
|
376 |
+
"acc_stderr,none": 0.015653506812546597,
|
377 |
+
"alias": "truthfulqa_mc2"
|
378 |
+
},
|
379 |
+
"harness|arc:challenge|0": {
|
380 |
+
"acc,none": 0.4300341296928328,
|
381 |
+
"acc_stderr,none": 0.014467631559137994,
|
382 |
+
"acc_norm,none": 0.431740614334471,
|
383 |
+
"acc_norm_stderr,none": 0.014474591427196204,
|
384 |
+
"alias": "arc_challenge"
|
385 |
+
},
|
386 |
+
"harness|arc:easy|0": {
|
387 |
+
"acc,none": 0.726010101010101,
|
388 |
+
"acc_stderr,none": 0.009151805901544028,
|
389 |
+
"acc_norm,none": 0.6696127946127947,
|
390 |
+
"acc_norm_stderr,none": 0.009651430216428185,
|
391 |
+
"alias": "arc_easy"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/Llama-2-7B-Chat-AWQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 4.516,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "AWQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 9.032,
|
403 |
+
"model_size": 4.516,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-26T17:26:23Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "awq",
|
417 |
+
"zero_point": true,
|
418 |
+
"group_size": 128,
|
419 |
+
"bits": 4,
|
420 |
+
"version": "gemm"
|
421 |
+
},
|
422 |
+
"versions": {
|
423 |
+
"harness|piqa|0": 1.0,
|
424 |
+
"harness|lambada:openai|0": 1.0,
|
425 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
426 |
+
"harness|boolq|0": 2.0,
|
427 |
+
"harness|hellaswag|0": 1.0,
|
428 |
+
"harness|mmlu|0": null,
|
429 |
+
"harness|mmlu_humanities|0": null,
|
430 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
432 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
433 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
434 |
+
"harness|mmlu_international_law|0": 0.0,
|
435 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
436 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
437 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
438 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
439 |
+
"harness|mmlu_philosophy|0": 0.0,
|
440 |
+
"harness|mmlu_prehistory|0": 0.0,
|
441 |
+
"harness|mmlu_professional_law|0": 0.0,
|
442 |
+
"harness|mmlu_world_religions|0": 0.0,
|
443 |
+
"harness|mmlu_other|0": null,
|
444 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
445 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
446 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
447 |
+
"harness|mmlu_global_facts|0": 0.0,
|
448 |
+
"harness|mmlu_human_aging|0": 0.0,
|
449 |
+
"harness|mmlu_management|0": 0.0,
|
450 |
+
"harness|mmlu_marketing|0": 0.0,
|
451 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
452 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
453 |
+
"harness|mmlu_nutrition|0": 0.0,
|
454 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
455 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
456 |
+
"harness|mmlu_virology|0": 0.0,
|
457 |
+
"harness|mmlu_social_sciences|0": null,
|
458 |
+
"harness|mmlu_econometrics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
464 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
465 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
466 |
+
"harness|mmlu_public_relations|0": 0.0,
|
467 |
+
"harness|mmlu_security_studies|0": 0.0,
|
468 |
+
"harness|mmlu_sociology|0": 0.0,
|
469 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
470 |
+
"harness|mmlu_stem|0": null,
|
471 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
472 |
+
"harness|mmlu_anatomy|0": 0.0,
|
473 |
+
"harness|mmlu_astronomy|0": 0.0,
|
474 |
+
"harness|mmlu_college_biology|0": 0.0,
|
475 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
476 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
477 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
478 |
+
"harness|mmlu_college_physics|0": 0.0,
|
479 |
+
"harness|mmlu_computer_security|0": 0.0,
|
480 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
481 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
482 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
489 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
490 |
+
"harness|winogrande|0": 1.0,
|
491 |
+
"harness|openbookqa|0": 1.0,
|
492 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
493 |
+
"harness|arc:challenge|0": 1.0,
|
494 |
+
"harness|arc:easy|0": 1.0
|
495 |
+
},
|
496 |
+
"n-shot": {
|
497 |
+
"arc_challenge": 0,
|
498 |
+
"arc_easy": 0,
|
499 |
+
"boolq": 0,
|
500 |
+
"hellaswag": 0,
|
501 |
+
"lambada_openai": 0,
|
502 |
+
"mmlu": 0,
|
503 |
+
"mmlu_abstract_algebra": 0,
|
504 |
+
"mmlu_anatomy": 0,
|
505 |
+
"mmlu_astronomy": 0,
|
506 |
+
"mmlu_business_ethics": 0,
|
507 |
+
"mmlu_clinical_knowledge": 0,
|
508 |
+
"mmlu_college_biology": 0,
|
509 |
+
"mmlu_college_chemistry": 0,
|
510 |
+
"mmlu_college_computer_science": 0,
|
511 |
+
"mmlu_college_mathematics": 0,
|
512 |
+
"mmlu_college_medicine": 0,
|
513 |
+
"mmlu_college_physics": 0,
|
514 |
+
"mmlu_computer_security": 0,
|
515 |
+
"mmlu_conceptual_physics": 0,
|
516 |
+
"mmlu_econometrics": 0,
|
517 |
+
"mmlu_electrical_engineering": 0,
|
518 |
+
"mmlu_elementary_mathematics": 0,
|
519 |
+
"mmlu_formal_logic": 0,
|
520 |
+
"mmlu_global_facts": 0,
|
521 |
+
"mmlu_high_school_biology": 0,
|
522 |
+
"mmlu_high_school_chemistry": 0,
|
523 |
+
"mmlu_high_school_computer_science": 0,
|
524 |
+
"mmlu_high_school_european_history": 0,
|
525 |
+
"mmlu_high_school_geography": 0,
|
526 |
+
"mmlu_high_school_government_and_politics": 0,
|
527 |
+
"mmlu_high_school_macroeconomics": 0,
|
528 |
+
"mmlu_high_school_mathematics": 0,
|
529 |
+
"mmlu_high_school_microeconomics": 0,
|
530 |
+
"mmlu_high_school_physics": 0,
|
531 |
+
"mmlu_high_school_psychology": 0,
|
532 |
+
"mmlu_high_school_statistics": 0,
|
533 |
+
"mmlu_high_school_us_history": 0,
|
534 |
+
"mmlu_high_school_world_history": 0,
|
535 |
+
"mmlu_human_aging": 0,
|
536 |
+
"mmlu_human_sexuality": 0,
|
537 |
+
"mmlu_humanities": 0,
|
538 |
+
"mmlu_international_law": 0,
|
539 |
+
"mmlu_jurisprudence": 0,
|
540 |
+
"mmlu_logical_fallacies": 0,
|
541 |
+
"mmlu_machine_learning": 0,
|
542 |
+
"mmlu_management": 0,
|
543 |
+
"mmlu_marketing": 0,
|
544 |
+
"mmlu_medical_genetics": 0,
|
545 |
+
"mmlu_miscellaneous": 0,
|
546 |
+
"mmlu_moral_disputes": 0,
|
547 |
+
"mmlu_moral_scenarios": 0,
|
548 |
+
"mmlu_nutrition": 0,
|
549 |
+
"mmlu_other": 0,
|
550 |
+
"mmlu_philosophy": 0,
|
551 |
+
"mmlu_prehistory": 0,
|
552 |
+
"mmlu_professional_accounting": 0,
|
553 |
+
"mmlu_professional_law": 0,
|
554 |
+
"mmlu_professional_medicine": 0,
|
555 |
+
"mmlu_professional_psychology": 0,
|
556 |
+
"mmlu_public_relations": 0,
|
557 |
+
"mmlu_security_studies": 0,
|
558 |
+
"mmlu_social_sciences": 0,
|
559 |
+
"mmlu_sociology": 0,
|
560 |
+
"mmlu_stem": 0,
|
561 |
+
"mmlu_us_foreign_policy": 0,
|
562 |
+
"mmlu_virology": 0,
|
563 |
+
"mmlu_world_religions": 0,
|
564 |
+
"openbookqa": 0,
|
565 |
+
"piqa": 0,
|
566 |
+
"truthfulqa_mc1": 0,
|
567 |
+
"truthfulqa_mc2": 0,
|
568 |
+
"winogrande": 0
|
569 |
+
},
|
570 |
+
"date": 1714157340.7852106,
|
571 |
+
"config": {
|
572 |
+
"model": "hf",
|
573 |
+
"model_args": "pretrained=TheBloke/Llama-2-7B-Chat-AWQ,dtype=float16,_commit_hash=main",
|
574 |
+
"batch_size": 2,
|
575 |
+
"batch_sizes": [],
|
576 |
+
"device": "cuda",
|
577 |
+
"use_cache": null,
|
578 |
+
"limit": null,
|
579 |
+
"bootstrap_iters": 100000,
|
580 |
+
"gen_kwargs": null
|
581 |
+
}
|
582 |
+
}
|
TheBloke/results_2024-04-27-23-05-56.json
ADDED
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "no",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-27-23-05-56",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/Llama-2-7B-Chat-GGUF",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 3.825065984,
|
16 |
+
"model_params": 6.738415616,
|
17 |
+
"quant_type": "llama.cpp",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc1|0": {
|
22 |
+
"acc,none": 0.29865361077111385,
|
23 |
+
"acc_stderr,none": 0.016021570613768542,
|
24 |
+
"alias": "truthfulqa_mc1"
|
25 |
+
},
|
26 |
+
"harness|lambada:openai|0": {
|
27 |
+
"perplexity,none": 4.835081603467283,
|
28 |
+
"perplexity_stderr,none": 0.15195496196061345,
|
29 |
+
"acc,none": 0.4865127110421114,
|
30 |
+
"acc_stderr,none": 0.006963442876327696,
|
31 |
+
"alias": "lambada_openai"
|
32 |
+
},
|
33 |
+
"harness|arc:challenge|0": {
|
34 |
+
"acc,none": 0.42662116040955633,
|
35 |
+
"acc_stderr,none": 0.014453185592920293,
|
36 |
+
"acc_norm,none": 0.4325938566552901,
|
37 |
+
"acc_norm_stderr,none": 0.014478005694182526,
|
38 |
+
"alias": "arc_challenge"
|
39 |
+
},
|
40 |
+
"harness|openbookqa|0": {
|
41 |
+
"acc,none": 0.31,
|
42 |
+
"acc_stderr,none": 0.02070404102172479,
|
43 |
+
"acc_norm,none": 0.434,
|
44 |
+
"acc_norm_stderr,none": 0.022187215803029008,
|
45 |
+
"alias": "openbookqa"
|
46 |
+
},
|
47 |
+
"harness|hellaswag|0": {
|
48 |
+
"acc,none": 0.5728938458474407,
|
49 |
+
"acc_stderr,none": 0.004936470085238477,
|
50 |
+
"acc_norm,none": 0.7499502091216889,
|
51 |
+
"acc_norm_stderr,none": 0.004321564303822491,
|
52 |
+
"alias": "hellaswag"
|
53 |
+
},
|
54 |
+
"harness|boolq|0": {
|
55 |
+
"acc,none": 0.8024464831804281,
|
56 |
+
"acc_stderr,none": 0.006963746631628736,
|
57 |
+
"alias": "boolq"
|
58 |
+
},
|
59 |
+
"harness|mmlu|0": {
|
60 |
+
"acc,none": 0.4325594644637516,
|
61 |
+
"acc_stderr,none": 0.00404401665456216,
|
62 |
+
"alias": "mmlu"
|
63 |
+
},
|
64 |
+
"harness|mmlu_humanities|0": {
|
65 |
+
"alias": " - humanities",
|
66 |
+
"acc,none": 0.3895855472901169,
|
67 |
+
"acc_stderr,none": 0.006936149839962311
|
68 |
+
},
|
69 |
+
"harness|mmlu_formal_logic|0": {
|
70 |
+
"alias": " - formal_logic",
|
71 |
+
"acc,none": 0.20634920634920634,
|
72 |
+
"acc_stderr,none": 0.03619604524124252
|
73 |
+
},
|
74 |
+
"harness|mmlu_high_school_european_history|0": {
|
75 |
+
"alias": " - high_school_european_history",
|
76 |
+
"acc,none": 0.3878787878787879,
|
77 |
+
"acc_stderr,none": 0.0380491365397101
|
78 |
+
},
|
79 |
+
"harness|mmlu_high_school_us_history|0": {
|
80 |
+
"alias": " - high_school_us_history",
|
81 |
+
"acc,none": 0.5049019607843137,
|
82 |
+
"acc_stderr,none": 0.03509143375606787
|
83 |
+
},
|
84 |
+
"harness|mmlu_high_school_world_history|0": {
|
85 |
+
"alias": " - high_school_world_history",
|
86 |
+
"acc,none": 0.42616033755274263,
|
87 |
+
"acc_stderr,none": 0.03219035703131774
|
88 |
+
},
|
89 |
+
"harness|mmlu_international_law|0": {
|
90 |
+
"alias": " - international_law",
|
91 |
+
"acc,none": 0.5371900826446281,
|
92 |
+
"acc_stderr,none": 0.045517111961042175
|
93 |
+
},
|
94 |
+
"harness|mmlu_jurisprudence|0": {
|
95 |
+
"alias": " - jurisprudence",
|
96 |
+
"acc,none": 0.49074074074074076,
|
97 |
+
"acc_stderr,none": 0.04832853553437055
|
98 |
+
},
|
99 |
+
"harness|mmlu_logical_fallacies|0": {
|
100 |
+
"alias": " - logical_fallacies",
|
101 |
+
"acc,none": 0.4723926380368098,
|
102 |
+
"acc_stderr,none": 0.0392237829061099
|
103 |
+
},
|
104 |
+
"harness|mmlu_moral_disputes|0": {
|
105 |
+
"alias": " - moral_disputes",
|
106 |
+
"acc,none": 0.4884393063583815,
|
107 |
+
"acc_stderr,none": 0.026911898686377906
|
108 |
+
},
|
109 |
+
"harness|mmlu_moral_scenarios|0": {
|
110 |
+
"alias": " - moral_scenarios",
|
111 |
+
"acc,none": 0.2424581005586592,
|
112 |
+
"acc_stderr,none": 0.014333522059217887
|
113 |
+
},
|
114 |
+
"harness|mmlu_philosophy|0": {
|
115 |
+
"alias": " - philosophy",
|
116 |
+
"acc,none": 0.5080385852090032,
|
117 |
+
"acc_stderr,none": 0.028394421370984545
|
118 |
+
},
|
119 |
+
"harness|mmlu_prehistory|0": {
|
120 |
+
"alias": " - prehistory",
|
121 |
+
"acc,none": 0.5061728395061729,
|
122 |
+
"acc_stderr,none": 0.027818623962583295
|
123 |
+
},
|
124 |
+
"harness|mmlu_professional_law|0": {
|
125 |
+
"alias": " - professional_law",
|
126 |
+
"acc,none": 0.34419817470664926,
|
127 |
+
"acc_stderr,none": 0.012134433741002574
|
128 |
+
},
|
129 |
+
"harness|mmlu_world_religions|0": {
|
130 |
+
"alias": " - world_religions",
|
131 |
+
"acc,none": 0.631578947368421,
|
132 |
+
"acc_stderr,none": 0.03699658017656878
|
133 |
+
},
|
134 |
+
"harness|mmlu_other|0": {
|
135 |
+
"alias": " - other",
|
136 |
+
"acc,none": 0.5236562600579338,
|
137 |
+
"acc_stderr,none": 0.008731196155986961
|
138 |
+
},
|
139 |
+
"harness|mmlu_business_ethics|0": {
|
140 |
+
"alias": " - business_ethics",
|
141 |
+
"acc,none": 0.43,
|
142 |
+
"acc_stderr,none": 0.049756985195624284
|
143 |
+
},
|
144 |
+
"harness|mmlu_clinical_knowledge|0": {
|
145 |
+
"alias": " - clinical_knowledge",
|
146 |
+
"acc,none": 0.5132075471698113,
|
147 |
+
"acc_stderr,none": 0.030762134874500482
|
148 |
+
},
|
149 |
+
"harness|mmlu_college_medicine|0": {
|
150 |
+
"alias": " - college_medicine",
|
151 |
+
"acc,none": 0.3872832369942196,
|
152 |
+
"acc_stderr,none": 0.037143259063020656
|
153 |
+
},
|
154 |
+
"harness|mmlu_global_facts|0": {
|
155 |
+
"alias": " - global_facts",
|
156 |
+
"acc,none": 0.36,
|
157 |
+
"acc_stderr,none": 0.04824181513244218
|
158 |
+
},
|
159 |
+
"harness|mmlu_human_aging|0": {
|
160 |
+
"alias": " - human_aging",
|
161 |
+
"acc,none": 0.5739910313901345,
|
162 |
+
"acc_stderr,none": 0.033188332862172806
|
163 |
+
},
|
164 |
+
"harness|mmlu_management|0": {
|
165 |
+
"alias": " - management",
|
166 |
+
"acc,none": 0.6116504854368932,
|
167 |
+
"acc_stderr,none": 0.0482572933735639
|
168 |
+
},
|
169 |
+
"harness|mmlu_marketing|0": {
|
170 |
+
"alias": " - marketing",
|
171 |
+
"acc,none": 0.7051282051282052,
|
172 |
+
"acc_stderr,none": 0.029872577708891172
|
173 |
+
},
|
174 |
+
"harness|mmlu_medical_genetics|0": {
|
175 |
+
"alias": " - medical_genetics",
|
176 |
+
"acc,none": 0.46,
|
177 |
+
"acc_stderr,none": 0.05009082659620332
|
178 |
+
},
|
179 |
+
"harness|mmlu_miscellaneous|0": {
|
180 |
+
"alias": " - miscellaneous",
|
181 |
+
"acc,none": 0.648786717752235,
|
182 |
+
"acc_stderr,none": 0.01706998205149943
|
183 |
+
},
|
184 |
+
"harness|mmlu_nutrition|0": {
|
185 |
+
"alias": " - nutrition",
|
186 |
+
"acc,none": 0.5032679738562091,
|
187 |
+
"acc_stderr,none": 0.028629305194003543
|
188 |
+
},
|
189 |
+
"harness|mmlu_professional_accounting|0": {
|
190 |
+
"alias": " - professional_accounting",
|
191 |
+
"acc,none": 0.35815602836879434,
|
192 |
+
"acc_stderr,none": 0.028602085862759415
|
193 |
+
},
|
194 |
+
"harness|mmlu_professional_medicine|0": {
|
195 |
+
"alias": " - professional_medicine",
|
196 |
+
"acc,none": 0.39338235294117646,
|
197 |
+
"acc_stderr,none": 0.029674288281311183
|
198 |
+
},
|
199 |
+
"harness|mmlu_virology|0": {
|
200 |
+
"alias": " - virology",
|
201 |
+
"acc,none": 0.4397590361445783,
|
202 |
+
"acc_stderr,none": 0.03864139923699122
|
203 |
+
},
|
204 |
+
"harness|mmlu_social_sciences|0": {
|
205 |
+
"alias": " - social_sciences",
|
206 |
+
"acc,none": 0.49496262593435164,
|
207 |
+
"acc_stderr,none": 0.008805335177966452
|
208 |
+
},
|
209 |
+
"harness|mmlu_econometrics|0": {
|
210 |
+
"alias": " - econometrics",
|
211 |
+
"acc,none": 0.3333333333333333,
|
212 |
+
"acc_stderr,none": 0.044346007015849245
|
213 |
+
},
|
214 |
+
"harness|mmlu_high_school_geography|0": {
|
215 |
+
"alias": " - high_school_geography",
|
216 |
+
"acc,none": 0.5707070707070707,
|
217 |
+
"acc_stderr,none": 0.03526552724601199
|
218 |
+
},
|
219 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
220 |
+
"alias": " - high_school_government_and_politics",
|
221 |
+
"acc,none": 0.6373056994818653,
|
222 |
+
"acc_stderr,none": 0.034697137917043715
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
225 |
+
"alias": " - high_school_macroeconomics",
|
226 |
+
"acc,none": 0.382051282051282,
|
227 |
+
"acc_stderr,none": 0.02463554916390823
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
230 |
+
"alias": " - high_school_microeconomics",
|
231 |
+
"acc,none": 0.3277310924369748,
|
232 |
+
"acc_stderr,none": 0.030489911417673227
|
233 |
+
},
|
234 |
+
"harness|mmlu_high_school_psychology|0": {
|
235 |
+
"alias": " - high_school_psychology",
|
236 |
+
"acc,none": 0.5798165137614679,
|
237 |
+
"acc_stderr,none": 0.0211624200482735
|
238 |
+
},
|
239 |
+
"harness|mmlu_human_sexuality|0": {
|
240 |
+
"alias": " - human_sexuality",
|
241 |
+
"acc,none": 0.549618320610687,
|
242 |
+
"acc_stderr,none": 0.04363643698524779
|
243 |
+
},
|
244 |
+
"harness|mmlu_professional_psychology|0": {
|
245 |
+
"alias": " - professional_psychology",
|
246 |
+
"acc,none": 0.4215686274509804,
|
247 |
+
"acc_stderr,none": 0.019977422600227474
|
248 |
+
},
|
249 |
+
"harness|mmlu_public_relations|0": {
|
250 |
+
"alias": " - public_relations",
|
251 |
+
"acc,none": 0.5181818181818182,
|
252 |
+
"acc_stderr,none": 0.04785964010794916
|
253 |
+
},
|
254 |
+
"harness|mmlu_security_studies|0": {
|
255 |
+
"alias": " - security_studies",
|
256 |
+
"acc,none": 0.46938775510204084,
|
257 |
+
"acc_stderr,none": 0.031949171367580624
|
258 |
+
},
|
259 |
+
"harness|mmlu_sociology|0": {
|
260 |
+
"alias": " - sociology",
|
261 |
+
"acc,none": 0.6517412935323383,
|
262 |
+
"acc_stderr,none": 0.03368787466115459
|
263 |
+
},
|
264 |
+
"harness|mmlu_us_foreign_policy|0": {
|
265 |
+
"alias": " - us_foreign_policy",
|
266 |
+
"acc,none": 0.73,
|
267 |
+
"acc_stderr,none": 0.0446196043338474
|
268 |
+
},
|
269 |
+
"harness|mmlu_stem|0": {
|
270 |
+
"alias": " - stem",
|
271 |
+
"acc,none": 0.3460196638122423,
|
272 |
+
"acc_stderr,none": 0.008328893618355438
|
273 |
+
},
|
274 |
+
"harness|mmlu_abstract_algebra|0": {
|
275 |
+
"alias": " - abstract_algebra",
|
276 |
+
"acc,none": 0.26,
|
277 |
+
"acc_stderr,none": 0.04408440022768077
|
278 |
+
},
|
279 |
+
"harness|mmlu_anatomy|0": {
|
280 |
+
"alias": " - anatomy",
|
281 |
+
"acc,none": 0.4444444444444444,
|
282 |
+
"acc_stderr,none": 0.04292596718256981
|
283 |
+
},
|
284 |
+
"harness|mmlu_astronomy|0": {
|
285 |
+
"alias": " - astronomy",
|
286 |
+
"acc,none": 0.4276315789473684,
|
287 |
+
"acc_stderr,none": 0.04026097083296559
|
288 |
+
},
|
289 |
+
"harness|mmlu_college_biology|0": {
|
290 |
+
"alias": " - college_biology",
|
291 |
+
"acc,none": 0.4652777777777778,
|
292 |
+
"acc_stderr,none": 0.04171115858181618
|
293 |
+
},
|
294 |
+
"harness|mmlu_college_chemistry|0": {
|
295 |
+
"alias": " - college_chemistry",
|
296 |
+
"acc,none": 0.21,
|
297 |
+
"acc_stderr,none": 0.040936018074033256
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_computer_science|0": {
|
300 |
+
"alias": " - college_computer_science",
|
301 |
+
"acc,none": 0.32,
|
302 |
+
"acc_stderr,none": 0.046882617226215034
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_mathematics|0": {
|
305 |
+
"alias": " - college_mathematics",
|
306 |
+
"acc,none": 0.3,
|
307 |
+
"acc_stderr,none": 0.046056618647183814
|
308 |
+
},
|
309 |
+
"harness|mmlu_college_physics|0": {
|
310 |
+
"alias": " - college_physics",
|
311 |
+
"acc,none": 0.22549019607843138,
|
312 |
+
"acc_stderr,none": 0.041583075330832865
|
313 |
+
},
|
314 |
+
"harness|mmlu_computer_security|0": {
|
315 |
+
"alias": " - computer_security",
|
316 |
+
"acc,none": 0.55,
|
317 |
+
"acc_stderr,none": 0.049999999999999996
|
318 |
+
},
|
319 |
+
"harness|mmlu_conceptual_physics|0": {
|
320 |
+
"alias": " - conceptual_physics",
|
321 |
+
"acc,none": 0.3659574468085106,
|
322 |
+
"acc_stderr,none": 0.03148955829745529
|
323 |
+
},
|
324 |
+
"harness|mmlu_electrical_engineering|0": {
|
325 |
+
"alias": " - electrical_engineering",
|
326 |
+
"acc,none": 0.42758620689655175,
|
327 |
+
"acc_stderr,none": 0.041227371113703316
|
328 |
+
},
|
329 |
+
"harness|mmlu_elementary_mathematics|0": {
|
330 |
+
"alias": " - elementary_mathematics",
|
331 |
+
"acc,none": 0.2751322751322751,
|
332 |
+
"acc_stderr,none": 0.023000086859068646
|
333 |
+
},
|
334 |
+
"harness|mmlu_high_school_biology|0": {
|
335 |
+
"alias": " - high_school_biology",
|
336 |
+
"acc,none": 0.4838709677419355,
|
337 |
+
"acc_stderr,none": 0.028429203176724555
|
338 |
+
},
|
339 |
+
"harness|mmlu_high_school_chemistry|0": {
|
340 |
+
"alias": " - high_school_chemistry",
|
341 |
+
"acc,none": 0.3103448275862069,
|
342 |
+
"acc_stderr,none": 0.032550867699701024
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_computer_science|0": {
|
345 |
+
"alias": " - high_school_computer_science",
|
346 |
+
"acc,none": 0.41,
|
347 |
+
"acc_stderr,none": 0.049431107042371025
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_mathematics|0": {
|
350 |
+
"alias": " - high_school_mathematics",
|
351 |
+
"acc,none": 0.27037037037037037,
|
352 |
+
"acc_stderr,none": 0.02708037281514566
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_physics|0": {
|
355 |
+
"alias": " - high_school_physics",
|
356 |
+
"acc,none": 0.271523178807947,
|
357 |
+
"acc_stderr,none": 0.036313298039696525
|
358 |
+
},
|
359 |
+
"harness|mmlu_high_school_statistics|0": {
|
360 |
+
"alias": " - high_school_statistics",
|
361 |
+
"acc,none": 0.22685185185185186,
|
362 |
+
"acc_stderr,none": 0.02856165010242226
|
363 |
+
},
|
364 |
+
"harness|mmlu_machine_learning|0": {
|
365 |
+
"alias": " - machine_learning",
|
366 |
+
"acc,none": 0.38392857142857145,
|
367 |
+
"acc_stderr,none": 0.04616143075028547
|
368 |
+
},
|
369 |
+
"harness|arc:easy|0": {
|
370 |
+
"acc,none": 0.7281144781144782,
|
371 |
+
"acc_stderr,none": 0.009129795867310492,
|
372 |
+
"acc_norm,none": 0.6784511784511784,
|
373 |
+
"acc_norm_stderr,none": 0.009584091575640625,
|
374 |
+
"alias": "arc_easy"
|
375 |
+
},
|
376 |
+
"harness|winogrande|0": {
|
377 |
+
"acc,none": 0.6779794790844514,
|
378 |
+
"acc_stderr,none": 0.013132070202071081,
|
379 |
+
"alias": "winogrande"
|
380 |
+
},
|
381 |
+
"harness|piqa|0": {
|
382 |
+
"acc,none": 0.749183895538629,
|
383 |
+
"acc_stderr,none": 0.010113869547069044,
|
384 |
+
"acc_norm,none": 0.7568008705114254,
|
385 |
+
"acc_norm_stderr,none": 0.010009611953858934,
|
386 |
+
"alias": "piqa"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc2|0": {
|
389 |
+
"acc,none": 0.4434567291361564,
|
390 |
+
"acc_stderr,none": 0.015648678178541143,
|
391 |
+
"alias": "truthfulqa_mc2"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/Llama-2-7B-Chat-GGUF",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 28.0,
|
399 |
+
"architectures": "?",
|
400 |
+
"quant_type": "llama.cpp",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 56.0,
|
403 |
+
"model_size": 28.0,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-26T17:46:46Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "llama_cpp"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "llama.cpp",
|
417 |
+
"ftype": "*Q4_0.gguf"
|
418 |
+
},
|
419 |
+
"versions": {
|
420 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
421 |
+
"harness|lambada:openai|0": 1.0,
|
422 |
+
"harness|arc:challenge|0": 1.0,
|
423 |
+
"harness|openbookqa|0": 1.0,
|
424 |
+
"harness|hellaswag|0": 1.0,
|
425 |
+
"harness|boolq|0": 2.0,
|
426 |
+
"harness|mmlu|0": null,
|
427 |
+
"harness|mmlu_humanities|0": null,
|
428 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
429 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
430 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
432 |
+
"harness|mmlu_international_law|0": 0.0,
|
433 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
434 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
435 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
436 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
437 |
+
"harness|mmlu_philosophy|0": 0.0,
|
438 |
+
"harness|mmlu_prehistory|0": 0.0,
|
439 |
+
"harness|mmlu_professional_law|0": 0.0,
|
440 |
+
"harness|mmlu_world_religions|0": 0.0,
|
441 |
+
"harness|mmlu_other|0": null,
|
442 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
443 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
444 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
445 |
+
"harness|mmlu_global_facts|0": 0.0,
|
446 |
+
"harness|mmlu_human_aging|0": 0.0,
|
447 |
+
"harness|mmlu_management|0": 0.0,
|
448 |
+
"harness|mmlu_marketing|0": 0.0,
|
449 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
450 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
451 |
+
"harness|mmlu_nutrition|0": 0.0,
|
452 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
453 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
454 |
+
"harness|mmlu_virology|0": 0.0,
|
455 |
+
"harness|mmlu_social_sciences|0": null,
|
456 |
+
"harness|mmlu_econometrics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
462 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
463 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
464 |
+
"harness|mmlu_public_relations|0": 0.0,
|
465 |
+
"harness|mmlu_security_studies|0": 0.0,
|
466 |
+
"harness|mmlu_sociology|0": 0.0,
|
467 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
468 |
+
"harness|mmlu_stem|0": null,
|
469 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
470 |
+
"harness|mmlu_anatomy|0": 0.0,
|
471 |
+
"harness|mmlu_astronomy|0": 0.0,
|
472 |
+
"harness|mmlu_college_biology|0": 0.0,
|
473 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
474 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
475 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_college_physics|0": 0.0,
|
477 |
+
"harness|mmlu_computer_security|0": 0.0,
|
478 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
479 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
480 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
487 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
488 |
+
"harness|arc:easy|0": 1.0,
|
489 |
+
"harness|winogrande|0": 1.0,
|
490 |
+
"harness|piqa|0": 1.0,
|
491 |
+
"harness|truthfulqa:mc2|0": 2.0
|
492 |
+
},
|
493 |
+
"n-shot": {
|
494 |
+
"arc_challenge": 0,
|
495 |
+
"arc_easy": 0,
|
496 |
+
"boolq": 0,
|
497 |
+
"hellaswag": 0,
|
498 |
+
"lambada_openai": 0,
|
499 |
+
"mmlu": 0,
|
500 |
+
"mmlu_abstract_algebra": 0,
|
501 |
+
"mmlu_anatomy": 0,
|
502 |
+
"mmlu_astronomy": 0,
|
503 |
+
"mmlu_business_ethics": 0,
|
504 |
+
"mmlu_clinical_knowledge": 0,
|
505 |
+
"mmlu_college_biology": 0,
|
506 |
+
"mmlu_college_chemistry": 0,
|
507 |
+
"mmlu_college_computer_science": 0,
|
508 |
+
"mmlu_college_mathematics": 0,
|
509 |
+
"mmlu_college_medicine": 0,
|
510 |
+
"mmlu_college_physics": 0,
|
511 |
+
"mmlu_computer_security": 0,
|
512 |
+
"mmlu_conceptual_physics": 0,
|
513 |
+
"mmlu_econometrics": 0,
|
514 |
+
"mmlu_electrical_engineering": 0,
|
515 |
+
"mmlu_elementary_mathematics": 0,
|
516 |
+
"mmlu_formal_logic": 0,
|
517 |
+
"mmlu_global_facts": 0,
|
518 |
+
"mmlu_high_school_biology": 0,
|
519 |
+
"mmlu_high_school_chemistry": 0,
|
520 |
+
"mmlu_high_school_computer_science": 0,
|
521 |
+
"mmlu_high_school_european_history": 0,
|
522 |
+
"mmlu_high_school_geography": 0,
|
523 |
+
"mmlu_high_school_government_and_politics": 0,
|
524 |
+
"mmlu_high_school_macroeconomics": 0,
|
525 |
+
"mmlu_high_school_mathematics": 0,
|
526 |
+
"mmlu_high_school_microeconomics": 0,
|
527 |
+
"mmlu_high_school_physics": 0,
|
528 |
+
"mmlu_high_school_psychology": 0,
|
529 |
+
"mmlu_high_school_statistics": 0,
|
530 |
+
"mmlu_high_school_us_history": 0,
|
531 |
+
"mmlu_high_school_world_history": 0,
|
532 |
+
"mmlu_human_aging": 0,
|
533 |
+
"mmlu_human_sexuality": 0,
|
534 |
+
"mmlu_humanities": 0,
|
535 |
+
"mmlu_international_law": 0,
|
536 |
+
"mmlu_jurisprudence": 0,
|
537 |
+
"mmlu_logical_fallacies": 0,
|
538 |
+
"mmlu_machine_learning": 0,
|
539 |
+
"mmlu_management": 0,
|
540 |
+
"mmlu_marketing": 0,
|
541 |
+
"mmlu_medical_genetics": 0,
|
542 |
+
"mmlu_miscellaneous": 0,
|
543 |
+
"mmlu_moral_disputes": 0,
|
544 |
+
"mmlu_moral_scenarios": 0,
|
545 |
+
"mmlu_nutrition": 0,
|
546 |
+
"mmlu_other": 0,
|
547 |
+
"mmlu_philosophy": 0,
|
548 |
+
"mmlu_prehistory": 0,
|
549 |
+
"mmlu_professional_accounting": 0,
|
550 |
+
"mmlu_professional_law": 0,
|
551 |
+
"mmlu_professional_medicine": 0,
|
552 |
+
"mmlu_professional_psychology": 0,
|
553 |
+
"mmlu_public_relations": 0,
|
554 |
+
"mmlu_security_studies": 0,
|
555 |
+
"mmlu_social_sciences": 0,
|
556 |
+
"mmlu_sociology": 0,
|
557 |
+
"mmlu_stem": 0,
|
558 |
+
"mmlu_us_foreign_policy": 0,
|
559 |
+
"mmlu_virology": 0,
|
560 |
+
"mmlu_world_religions": 0,
|
561 |
+
"openbookqa": 0,
|
562 |
+
"piqa": 0,
|
563 |
+
"truthfulqa_mc1": 0,
|
564 |
+
"truthfulqa_mc2": 0,
|
565 |
+
"winogrande": 0
|
566 |
+
},
|
567 |
+
"date": 1714179285.517894,
|
568 |
+
"config": {
|
569 |
+
"model": "WrapperGGUFLM",
|
570 |
+
"model_args": "gguf_model=TheBloke/Llama-2-7B-Chat-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main",
|
571 |
+
"batch_size": 1,
|
572 |
+
"batch_sizes": [],
|
573 |
+
"device": "cuda",
|
574 |
+
"use_cache": null,
|
575 |
+
"limit": null,
|
576 |
+
"bootstrap_iters": 100000,
|
577 |
+
"gen_kwargs": null
|
578 |
+
}
|
579 |
+
}
|
TheBloke/results_2024-04-29-01-54-05.json
ADDED
@@ -0,0 +1,583 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-29-01-54-05",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/Mistral-7B-Instruct-v0.2-AWQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 4.15,
|
16 |
+
"model_params": 7.03,
|
17 |
+
"quant_type": "AWQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc2|0": {
|
22 |
+
"acc,none": 0.670141078497724,
|
23 |
+
"acc_stderr,none": 0.01521788784865362,
|
24 |
+
"alias": "truthfulqa_mc2"
|
25 |
+
},
|
26 |
+
"harness|truthfulqa:mc1|0": {
|
27 |
+
"acc,none": 0.5091799265605875,
|
28 |
+
"acc_stderr,none": 0.017500550724819753,
|
29 |
+
"alias": "truthfulqa_mc1"
|
30 |
+
},
|
31 |
+
"harness|mmlu|0": {
|
32 |
+
"acc,none": 0.5812562313060817,
|
33 |
+
"acc_stderr,none": 0.003955803368572514,
|
34 |
+
"alias": "mmlu"
|
35 |
+
},
|
36 |
+
"harness|mmlu_humanities|0": {
|
37 |
+
"alias": " - humanities",
|
38 |
+
"acc,none": 0.5298618490967056,
|
39 |
+
"acc_stderr,none": 0.006858786328047652
|
40 |
+
},
|
41 |
+
"harness|mmlu_formal_logic|0": {
|
42 |
+
"alias": " - formal_logic",
|
43 |
+
"acc,none": 0.3492063492063492,
|
44 |
+
"acc_stderr,none": 0.04263906892795132
|
45 |
+
},
|
46 |
+
"harness|mmlu_high_school_european_history|0": {
|
47 |
+
"alias": " - high_school_european_history",
|
48 |
+
"acc,none": 0.7333333333333333,
|
49 |
+
"acc_stderr,none": 0.03453131801885417
|
50 |
+
},
|
51 |
+
"harness|mmlu_high_school_us_history|0": {
|
52 |
+
"alias": " - high_school_us_history",
|
53 |
+
"acc,none": 0.7696078431372549,
|
54 |
+
"acc_stderr,none": 0.029554292605695053
|
55 |
+
},
|
56 |
+
"harness|mmlu_high_school_world_history|0": {
|
57 |
+
"alias": " - high_school_world_history",
|
58 |
+
"acc,none": 0.7679324894514767,
|
59 |
+
"acc_stderr,none": 0.02747974455080852
|
60 |
+
},
|
61 |
+
"harness|mmlu_international_law|0": {
|
62 |
+
"alias": " - international_law",
|
63 |
+
"acc,none": 0.743801652892562,
|
64 |
+
"acc_stderr,none": 0.03984979653302872
|
65 |
+
},
|
66 |
+
"harness|mmlu_jurisprudence|0": {
|
67 |
+
"alias": " - jurisprudence",
|
68 |
+
"acc,none": 0.7037037037037037,
|
69 |
+
"acc_stderr,none": 0.04414343666854933
|
70 |
+
},
|
71 |
+
"harness|mmlu_logical_fallacies|0": {
|
72 |
+
"alias": " - logical_fallacies",
|
73 |
+
"acc,none": 0.7423312883435583,
|
74 |
+
"acc_stderr,none": 0.03436150827846917
|
75 |
+
},
|
76 |
+
"harness|mmlu_moral_disputes|0": {
|
77 |
+
"alias": " - moral_disputes",
|
78 |
+
"acc,none": 0.6502890173410405,
|
79 |
+
"acc_stderr,none": 0.025674281456531025
|
80 |
+
},
|
81 |
+
"harness|mmlu_moral_scenarios|0": {
|
82 |
+
"alias": " - moral_scenarios",
|
83 |
+
"acc,none": 0.33519553072625696,
|
84 |
+
"acc_stderr,none": 0.015788007190185884
|
85 |
+
},
|
86 |
+
"harness|mmlu_philosophy|0": {
|
87 |
+
"alias": " - philosophy",
|
88 |
+
"acc,none": 0.639871382636656,
|
89 |
+
"acc_stderr,none": 0.027264297599804012
|
90 |
+
},
|
91 |
+
"harness|mmlu_prehistory|0": {
|
92 |
+
"alias": " - prehistory",
|
93 |
+
"acc,none": 0.6574074074074074,
|
94 |
+
"acc_stderr,none": 0.026406145973625655
|
95 |
+
},
|
96 |
+
"harness|mmlu_professional_law|0": {
|
97 |
+
"alias": " - professional_law",
|
98 |
+
"acc,none": 0.4106910039113429,
|
99 |
+
"acc_stderr,none": 0.012564871542534353
|
100 |
+
},
|
101 |
+
"harness|mmlu_world_religions|0": {
|
102 |
+
"alias": " - world_religions",
|
103 |
+
"acc,none": 0.7894736842105263,
|
104 |
+
"acc_stderr,none": 0.031267817146631786
|
105 |
+
},
|
106 |
+
"harness|mmlu_other|0": {
|
107 |
+
"alias": " - other",
|
108 |
+
"acc,none": 0.652075957515288,
|
109 |
+
"acc_stderr,none": 0.008252496734366984
|
110 |
+
},
|
111 |
+
"harness|mmlu_business_ethics|0": {
|
112 |
+
"alias": " - business_ethics",
|
113 |
+
"acc,none": 0.63,
|
114 |
+
"acc_stderr,none": 0.04852365870939098
|
115 |
+
},
|
116 |
+
"harness|mmlu_clinical_knowledge|0": {
|
117 |
+
"alias": " - clinical_knowledge",
|
118 |
+
"acc,none": 0.6566037735849056,
|
119 |
+
"acc_stderr,none": 0.02922452646912479
|
120 |
+
},
|
121 |
+
"harness|mmlu_college_medicine|0": {
|
122 |
+
"alias": " - college_medicine",
|
123 |
+
"acc,none": 0.5549132947976878,
|
124 |
+
"acc_stderr,none": 0.03789401760283647
|
125 |
+
},
|
126 |
+
"harness|mmlu_global_facts|0": {
|
127 |
+
"alias": " - global_facts",
|
128 |
+
"acc,none": 0.38,
|
129 |
+
"acc_stderr,none": 0.048783173121456316
|
130 |
+
},
|
131 |
+
"harness|mmlu_human_aging|0": {
|
132 |
+
"alias": " - human_aging",
|
133 |
+
"acc,none": 0.5919282511210763,
|
134 |
+
"acc_stderr,none": 0.03298574607842821
|
135 |
+
},
|
136 |
+
"harness|mmlu_management|0": {
|
137 |
+
"alias": " - management",
|
138 |
+
"acc,none": 0.7184466019417476,
|
139 |
+
"acc_stderr,none": 0.04453254836326466
|
140 |
+
},
|
141 |
+
"harness|mmlu_marketing|0": {
|
142 |
+
"alias": " - marketing",
|
143 |
+
"acc,none": 0.8675213675213675,
|
144 |
+
"acc_stderr,none": 0.022209309073165616
|
145 |
+
},
|
146 |
+
"harness|mmlu_medical_genetics|0": {
|
147 |
+
"alias": " - medical_genetics",
|
148 |
+
"acc,none": 0.62,
|
149 |
+
"acc_stderr,none": 0.04878317312145633
|
150 |
+
},
|
151 |
+
"harness|mmlu_miscellaneous|0": {
|
152 |
+
"alias": " - miscellaneous",
|
153 |
+
"acc,none": 0.7752234993614304,
|
154 |
+
"acc_stderr,none": 0.014927447101937157
|
155 |
+
},
|
156 |
+
"harness|mmlu_nutrition|0": {
|
157 |
+
"alias": " - nutrition",
|
158 |
+
"acc,none": 0.6666666666666666,
|
159 |
+
"acc_stderr,none": 0.02699254433929723
|
160 |
+
},
|
161 |
+
"harness|mmlu_professional_accounting|0": {
|
162 |
+
"alias": " - professional_accounting",
|
163 |
+
"acc,none": 0.46099290780141844,
|
164 |
+
"acc_stderr,none": 0.02973659252642444
|
165 |
+
},
|
166 |
+
"harness|mmlu_professional_medicine|0": {
|
167 |
+
"alias": " - professional_medicine",
|
168 |
+
"acc,none": 0.6139705882352942,
|
169 |
+
"acc_stderr,none": 0.029573269134411127
|
170 |
+
},
|
171 |
+
"harness|mmlu_virology|0": {
|
172 |
+
"alias": " - virology",
|
173 |
+
"acc,none": 0.4578313253012048,
|
174 |
+
"acc_stderr,none": 0.0387862677100236
|
175 |
+
},
|
176 |
+
"harness|mmlu_social_sciences|0": {
|
177 |
+
"alias": " - social_sciences",
|
178 |
+
"acc,none": 0.6733831654208645,
|
179 |
+
"acc_stderr,none": 0.008218778690022905
|
180 |
+
},
|
181 |
+
"harness|mmlu_econometrics|0": {
|
182 |
+
"alias": " - econometrics",
|
183 |
+
"acc,none": 0.38596491228070173,
|
184 |
+
"acc_stderr,none": 0.045796394220704334
|
185 |
+
},
|
186 |
+
"harness|mmlu_high_school_geography|0": {
|
187 |
+
"alias": " - high_school_geography",
|
188 |
+
"acc,none": 0.7121212121212122,
|
189 |
+
"acc_stderr,none": 0.03225883512300993
|
190 |
+
},
|
191 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
192 |
+
"alias": " - high_school_government_and_politics",
|
193 |
+
"acc,none": 0.7979274611398963,
|
194 |
+
"acc_stderr,none": 0.028979089794296732
|
195 |
+
},
|
196 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
197 |
+
"alias": " - high_school_macroeconomics",
|
198 |
+
"acc,none": 0.5538461538461539,
|
199 |
+
"acc_stderr,none": 0.02520357177302833
|
200 |
+
},
|
201 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
202 |
+
"alias": " - high_school_microeconomics",
|
203 |
+
"acc,none": 0.6512605042016807,
|
204 |
+
"acc_stderr,none": 0.030956636328566548
|
205 |
+
},
|
206 |
+
"harness|mmlu_high_school_psychology|0": {
|
207 |
+
"alias": " - high_school_psychology",
|
208 |
+
"acc,none": 0.7889908256880734,
|
209 |
+
"acc_stderr,none": 0.01749392240411265
|
210 |
+
},
|
211 |
+
"harness|mmlu_human_sexuality|0": {
|
212 |
+
"alias": " - human_sexuality",
|
213 |
+
"acc,none": 0.6870229007633588,
|
214 |
+
"acc_stderr,none": 0.04066962905677697
|
215 |
+
},
|
216 |
+
"harness|mmlu_professional_psychology|0": {
|
217 |
+
"alias": " - professional_psychology",
|
218 |
+
"acc,none": 0.5784313725490197,
|
219 |
+
"acc_stderr,none": 0.01997742260022747
|
220 |
+
},
|
221 |
+
"harness|mmlu_public_relations|0": {
|
222 |
+
"alias": " - public_relations",
|
223 |
+
"acc,none": 0.6545454545454545,
|
224 |
+
"acc_stderr,none": 0.04554619617541054
|
225 |
+
},
|
226 |
+
"harness|mmlu_security_studies|0": {
|
227 |
+
"alias": " - security_studies",
|
228 |
+
"acc,none": 0.6653061224489796,
|
229 |
+
"acc_stderr,none": 0.030209235226242307
|
230 |
+
},
|
231 |
+
"harness|mmlu_sociology|0": {
|
232 |
+
"alias": " - sociology",
|
233 |
+
"acc,none": 0.835820895522388,
|
234 |
+
"acc_stderr,none": 0.02619392354445413
|
235 |
+
},
|
236 |
+
"harness|mmlu_us_foreign_policy|0": {
|
237 |
+
"alias": " - us_foreign_policy",
|
238 |
+
"acc,none": 0.85,
|
239 |
+
"acc_stderr,none": 0.03588702812826371
|
240 |
+
},
|
241 |
+
"harness|mmlu_stem|0": {
|
242 |
+
"alias": " - stem",
|
243 |
+
"acc,none": 0.49825562955915004,
|
244 |
+
"acc_stderr,none": 0.008669172503365446
|
245 |
+
},
|
246 |
+
"harness|mmlu_abstract_algebra|0": {
|
247 |
+
"alias": " - abstract_algebra",
|
248 |
+
"acc,none": 0.29,
|
249 |
+
"acc_stderr,none": 0.045604802157206845
|
250 |
+
},
|
251 |
+
"harness|mmlu_anatomy|0": {
|
252 |
+
"alias": " - anatomy",
|
253 |
+
"acc,none": 0.5925925925925926,
|
254 |
+
"acc_stderr,none": 0.042446332383532286
|
255 |
+
},
|
256 |
+
"harness|mmlu_astronomy|0": {
|
257 |
+
"alias": " - astronomy",
|
258 |
+
"acc,none": 0.625,
|
259 |
+
"acc_stderr,none": 0.039397364351956274
|
260 |
+
},
|
261 |
+
"harness|mmlu_college_biology|0": {
|
262 |
+
"alias": " - college_biology",
|
263 |
+
"acc,none": 0.6736111111111112,
|
264 |
+
"acc_stderr,none": 0.03921067198982266
|
265 |
+
},
|
266 |
+
"harness|mmlu_college_chemistry|0": {
|
267 |
+
"alias": " - college_chemistry",
|
268 |
+
"acc,none": 0.41,
|
269 |
+
"acc_stderr,none": 0.049431107042371025
|
270 |
+
},
|
271 |
+
"harness|mmlu_college_computer_science|0": {
|
272 |
+
"alias": " - college_computer_science",
|
273 |
+
"acc,none": 0.59,
|
274 |
+
"acc_stderr,none": 0.04943110704237102
|
275 |
+
},
|
276 |
+
"harness|mmlu_college_mathematics|0": {
|
277 |
+
"alias": " - college_mathematics",
|
278 |
+
"acc,none": 0.37,
|
279 |
+
"acc_stderr,none": 0.048523658709391
|
280 |
+
},
|
281 |
+
"harness|mmlu_college_physics|0": {
|
282 |
+
"alias": " - college_physics",
|
283 |
+
"acc,none": 0.4019607843137255,
|
284 |
+
"acc_stderr,none": 0.04878608714466996
|
285 |
+
},
|
286 |
+
"harness|mmlu_computer_security|0": {
|
287 |
+
"alias": " - computer_security",
|
288 |
+
"acc,none": 0.68,
|
289 |
+
"acc_stderr,none": 0.046882617226215034
|
290 |
+
},
|
291 |
+
"harness|mmlu_conceptual_physics|0": {
|
292 |
+
"alias": " - conceptual_physics",
|
293 |
+
"acc,none": 0.48936170212765956,
|
294 |
+
"acc_stderr,none": 0.03267862331014063
|
295 |
+
},
|
296 |
+
"harness|mmlu_electrical_engineering|0": {
|
297 |
+
"alias": " - electrical_engineering",
|
298 |
+
"acc,none": 0.5172413793103449,
|
299 |
+
"acc_stderr,none": 0.04164188720169375
|
300 |
+
},
|
301 |
+
"harness|mmlu_elementary_mathematics|0": {
|
302 |
+
"alias": " - elementary_mathematics",
|
303 |
+
"acc,none": 0.41534391534391535,
|
304 |
+
"acc_stderr,none": 0.02537952491077841
|
305 |
+
},
|
306 |
+
"harness|mmlu_high_school_biology|0": {
|
307 |
+
"alias": " - high_school_biology",
|
308 |
+
"acc,none": 0.6903225806451613,
|
309 |
+
"acc_stderr,none": 0.026302774983517418
|
310 |
+
},
|
311 |
+
"harness|mmlu_high_school_chemistry|0": {
|
312 |
+
"alias": " - high_school_chemistry",
|
313 |
+
"acc,none": 0.5024630541871922,
|
314 |
+
"acc_stderr,none": 0.035179450386910616
|
315 |
+
},
|
316 |
+
"harness|mmlu_high_school_computer_science|0": {
|
317 |
+
"alias": " - high_school_computer_science",
|
318 |
+
"acc,none": 0.6,
|
319 |
+
"acc_stderr,none": 0.049236596391733084
|
320 |
+
},
|
321 |
+
"harness|mmlu_high_school_mathematics|0": {
|
322 |
+
"alias": " - high_school_mathematics",
|
323 |
+
"acc,none": 0.3333333333333333,
|
324 |
+
"acc_stderr,none": 0.028742040903948496
|
325 |
+
},
|
326 |
+
"harness|mmlu_high_school_physics|0": {
|
327 |
+
"alias": " - high_school_physics",
|
328 |
+
"acc,none": 0.3576158940397351,
|
329 |
+
"acc_stderr,none": 0.03913453431177258
|
330 |
+
},
|
331 |
+
"harness|mmlu_high_school_statistics|0": {
|
332 |
+
"alias": " - high_school_statistics",
|
333 |
+
"acc,none": 0.4675925925925926,
|
334 |
+
"acc_stderr,none": 0.03402801581358966
|
335 |
+
},
|
336 |
+
"harness|mmlu_machine_learning|0": {
|
337 |
+
"alias": " - machine_learning",
|
338 |
+
"acc,none": 0.5,
|
339 |
+
"acc_stderr,none": 0.04745789978762494
|
340 |
+
},
|
341 |
+
"harness|hellaswag|0": {
|
342 |
+
"acc,none": 0.6559450308703445,
|
343 |
+
"acc_stderr,none": 0.004740882120999965,
|
344 |
+
"acc_norm,none": 0.8328022306313483,
|
345 |
+
"acc_norm_stderr,none": 0.003723897305645456,
|
346 |
+
"alias": "hellaswag"
|
347 |
+
},
|
348 |
+
"harness|openbookqa|0": {
|
349 |
+
"acc,none": 0.342,
|
350 |
+
"acc_stderr,none": 0.02123614719989926,
|
351 |
+
"acc_norm,none": 0.454,
|
352 |
+
"acc_norm_stderr,none": 0.022288147591176945,
|
353 |
+
"alias": "openbookqa"
|
354 |
+
},
|
355 |
+
"harness|boolq|0": {
|
356 |
+
"acc,none": 0.8510703363914373,
|
357 |
+
"acc_stderr,none": 0.006226813679382003,
|
358 |
+
"alias": "boolq"
|
359 |
+
},
|
360 |
+
"harness|piqa|0": {
|
361 |
+
"acc,none": 0.7986942328618063,
|
362 |
+
"acc_stderr,none": 0.00935543109899043,
|
363 |
+
"acc_norm,none": 0.8057671381936888,
|
364 |
+
"acc_norm_stderr,none": 0.009230209366168259,
|
365 |
+
"alias": "piqa"
|
366 |
+
},
|
367 |
+
"harness|winogrande|0": {
|
368 |
+
"acc,none": 0.7411207576953434,
|
369 |
+
"acc_stderr,none": 0.012310515810993376,
|
370 |
+
"alias": "winogrande"
|
371 |
+
},
|
372 |
+
"harness|arc:challenge|0": {
|
373 |
+
"acc,none": 0.537542662116041,
|
374 |
+
"acc_stderr,none": 0.01457014449507558,
|
375 |
+
"acc_norm,none": 0.5597269624573379,
|
376 |
+
"acc_norm_stderr,none": 0.014506769524804243,
|
377 |
+
"alias": "arc_challenge"
|
378 |
+
},
|
379 |
+
"harness|lambada:openai|0": {
|
380 |
+
"perplexity,none": 3.5114616007527477,
|
381 |
+
"perplexity_stderr,none": 0.07624527890354445,
|
382 |
+
"acc,none": 0.7098777411216767,
|
383 |
+
"acc_stderr,none": 0.006322580641394919,
|
384 |
+
"alias": "lambada_openai"
|
385 |
+
},
|
386 |
+
"harness|arc:easy|0": {
|
387 |
+
"acc,none": 0.8042929292929293,
|
388 |
+
"acc_stderr,none": 0.008141015407566888,
|
389 |
+
"acc_norm,none": 0.7571548821548821,
|
390 |
+
"acc_norm_stderr,none": 0.008798836444222035,
|
391 |
+
"alias": "arc_easy"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/Mistral-7B-Instruct-v0.2-AWQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 4.784,
|
399 |
+
"architectures": "MistralForCausalLM",
|
400 |
+
"quant_type": "AWQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 9.568,
|
403 |
+
"model_size": 4.784,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-28T08:53:22Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"group_size": 128,
|
418 |
+
"modules_to_not_convert": [],
|
419 |
+
"quant_method": "awq",
|
420 |
+
"version": "gemm",
|
421 |
+
"zero_point": true
|
422 |
+
},
|
423 |
+
"versions": {
|
424 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
425 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
426 |
+
"harness|mmlu|0": null,
|
427 |
+
"harness|mmlu_humanities|0": null,
|
428 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
429 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
430 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
432 |
+
"harness|mmlu_international_law|0": 0.0,
|
433 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
434 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
435 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
436 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
437 |
+
"harness|mmlu_philosophy|0": 0.0,
|
438 |
+
"harness|mmlu_prehistory|0": 0.0,
|
439 |
+
"harness|mmlu_professional_law|0": 0.0,
|
440 |
+
"harness|mmlu_world_religions|0": 0.0,
|
441 |
+
"harness|mmlu_other|0": null,
|
442 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
443 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
444 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
445 |
+
"harness|mmlu_global_facts|0": 0.0,
|
446 |
+
"harness|mmlu_human_aging|0": 0.0,
|
447 |
+
"harness|mmlu_management|0": 0.0,
|
448 |
+
"harness|mmlu_marketing|0": 0.0,
|
449 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
450 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
451 |
+
"harness|mmlu_nutrition|0": 0.0,
|
452 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
453 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
454 |
+
"harness|mmlu_virology|0": 0.0,
|
455 |
+
"harness|mmlu_social_sciences|0": null,
|
456 |
+
"harness|mmlu_econometrics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
462 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
463 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
464 |
+
"harness|mmlu_public_relations|0": 0.0,
|
465 |
+
"harness|mmlu_security_studies|0": 0.0,
|
466 |
+
"harness|mmlu_sociology|0": 0.0,
|
467 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
468 |
+
"harness|mmlu_stem|0": null,
|
469 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
470 |
+
"harness|mmlu_anatomy|0": 0.0,
|
471 |
+
"harness|mmlu_astronomy|0": 0.0,
|
472 |
+
"harness|mmlu_college_biology|0": 0.0,
|
473 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
474 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
475 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
476 |
+
"harness|mmlu_college_physics|0": 0.0,
|
477 |
+
"harness|mmlu_computer_security|0": 0.0,
|
478 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
479 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
480 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
487 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
488 |
+
"harness|hellaswag|0": 1.0,
|
489 |
+
"harness|openbookqa|0": 1.0,
|
490 |
+
"harness|boolq|0": 2.0,
|
491 |
+
"harness|piqa|0": 1.0,
|
492 |
+
"harness|winogrande|0": 1.0,
|
493 |
+
"harness|arc:challenge|0": 1.0,
|
494 |
+
"harness|lambada:openai|0": 1.0,
|
495 |
+
"harness|arc:easy|0": 1.0
|
496 |
+
},
|
497 |
+
"n-shot": {
|
498 |
+
"arc_challenge": 0,
|
499 |
+
"arc_easy": 0,
|
500 |
+
"boolq": 0,
|
501 |
+
"hellaswag": 0,
|
502 |
+
"lambada_openai": 0,
|
503 |
+
"mmlu": 0,
|
504 |
+
"mmlu_abstract_algebra": 0,
|
505 |
+
"mmlu_anatomy": 0,
|
506 |
+
"mmlu_astronomy": 0,
|
507 |
+
"mmlu_business_ethics": 0,
|
508 |
+
"mmlu_clinical_knowledge": 0,
|
509 |
+
"mmlu_college_biology": 0,
|
510 |
+
"mmlu_college_chemistry": 0,
|
511 |
+
"mmlu_college_computer_science": 0,
|
512 |
+
"mmlu_college_mathematics": 0,
|
513 |
+
"mmlu_college_medicine": 0,
|
514 |
+
"mmlu_college_physics": 0,
|
515 |
+
"mmlu_computer_security": 0,
|
516 |
+
"mmlu_conceptual_physics": 0,
|
517 |
+
"mmlu_econometrics": 0,
|
518 |
+
"mmlu_electrical_engineering": 0,
|
519 |
+
"mmlu_elementary_mathematics": 0,
|
520 |
+
"mmlu_formal_logic": 0,
|
521 |
+
"mmlu_global_facts": 0,
|
522 |
+
"mmlu_high_school_biology": 0,
|
523 |
+
"mmlu_high_school_chemistry": 0,
|
524 |
+
"mmlu_high_school_computer_science": 0,
|
525 |
+
"mmlu_high_school_european_history": 0,
|
526 |
+
"mmlu_high_school_geography": 0,
|
527 |
+
"mmlu_high_school_government_and_politics": 0,
|
528 |
+
"mmlu_high_school_macroeconomics": 0,
|
529 |
+
"mmlu_high_school_mathematics": 0,
|
530 |
+
"mmlu_high_school_microeconomics": 0,
|
531 |
+
"mmlu_high_school_physics": 0,
|
532 |
+
"mmlu_high_school_psychology": 0,
|
533 |
+
"mmlu_high_school_statistics": 0,
|
534 |
+
"mmlu_high_school_us_history": 0,
|
535 |
+
"mmlu_high_school_world_history": 0,
|
536 |
+
"mmlu_human_aging": 0,
|
537 |
+
"mmlu_human_sexuality": 0,
|
538 |
+
"mmlu_humanities": 0,
|
539 |
+
"mmlu_international_law": 0,
|
540 |
+
"mmlu_jurisprudence": 0,
|
541 |
+
"mmlu_logical_fallacies": 0,
|
542 |
+
"mmlu_machine_learning": 0,
|
543 |
+
"mmlu_management": 0,
|
544 |
+
"mmlu_marketing": 0,
|
545 |
+
"mmlu_medical_genetics": 0,
|
546 |
+
"mmlu_miscellaneous": 0,
|
547 |
+
"mmlu_moral_disputes": 0,
|
548 |
+
"mmlu_moral_scenarios": 0,
|
549 |
+
"mmlu_nutrition": 0,
|
550 |
+
"mmlu_other": 0,
|
551 |
+
"mmlu_philosophy": 0,
|
552 |
+
"mmlu_prehistory": 0,
|
553 |
+
"mmlu_professional_accounting": 0,
|
554 |
+
"mmlu_professional_law": 0,
|
555 |
+
"mmlu_professional_medicine": 0,
|
556 |
+
"mmlu_professional_psychology": 0,
|
557 |
+
"mmlu_public_relations": 0,
|
558 |
+
"mmlu_security_studies": 0,
|
559 |
+
"mmlu_social_sciences": 0,
|
560 |
+
"mmlu_sociology": 0,
|
561 |
+
"mmlu_stem": 0,
|
562 |
+
"mmlu_us_foreign_policy": 0,
|
563 |
+
"mmlu_virology": 0,
|
564 |
+
"mmlu_world_religions": 0,
|
565 |
+
"openbookqa": 0,
|
566 |
+
"piqa": 0,
|
567 |
+
"truthfulqa_mc1": 0,
|
568 |
+
"truthfulqa_mc2": 0,
|
569 |
+
"winogrande": 0
|
570 |
+
},
|
571 |
+
"date": 1714303185.1043832,
|
572 |
+
"config": {
|
573 |
+
"model": "hf",
|
574 |
+
"model_args": "pretrained=TheBloke/Mistral-7B-Instruct-v0.2-AWQ,dtype=float16,_commit_hash=main",
|
575 |
+
"batch_size": 2,
|
576 |
+
"batch_sizes": [],
|
577 |
+
"device": "cuda",
|
578 |
+
"use_cache": null,
|
579 |
+
"limit": null,
|
580 |
+
"bootstrap_iters": 100000,
|
581 |
+
"gen_kwargs": null
|
582 |
+
}
|
583 |
+
}
|
TheBloke/results_2024-04-29-22-05-21.json
ADDED
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "no",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-04-29-22-05-21",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/Mistral-7B-Instruct-v0.2-GGUF",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 4.108181504,
|
16 |
+
"model_params": 7.241732096,
|
17 |
+
"quant_type": "llama.cpp",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.734017363851618,
|
23 |
+
"acc_stderr,none": 0.012418323153051046,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|piqa|0": {
|
27 |
+
"acc,none": 0.7758433079434167,
|
28 |
+
"acc_stderr,none": 0.009729897956410032,
|
29 |
+
"acc_norm,none": 0.7845484221980413,
|
30 |
+
"acc_norm_stderr,none": 0.00959246311565809,
|
31 |
+
"alias": "piqa"
|
32 |
+
},
|
33 |
+
"harness|truthfulqa:mc2|0": {
|
34 |
+
"acc,none": 0.6784488430446446,
|
35 |
+
"acc_stderr,none": 0.015160742121748417,
|
36 |
+
"alias": "truthfulqa_mc2"
|
37 |
+
},
|
38 |
+
"harness|hellaswag|0": {
|
39 |
+
"acc,none": 0.668990240987851,
|
40 |
+
"acc_stderr,none": 0.004696148339570966,
|
41 |
+
"acc_norm,none": 0.8399721171081458,
|
42 |
+
"acc_norm_stderr,none": 0.0036588262081015126,
|
43 |
+
"alias": "hellaswag"
|
44 |
+
},
|
45 |
+
"harness|openbookqa|0": {
|
46 |
+
"acc,none": 0.388,
|
47 |
+
"acc_stderr,none": 0.021814300984787635,
|
48 |
+
"acc_norm,none": 0.476,
|
49 |
+
"acc_norm_stderr,none": 0.0223572738810164,
|
50 |
+
"alias": "openbookqa"
|
51 |
+
},
|
52 |
+
"harness|truthfulqa:mc1|0": {
|
53 |
+
"acc,none": 0.5312117503059975,
|
54 |
+
"acc_stderr,none": 0.017469364874577547,
|
55 |
+
"alias": "truthfulqa_mc1"
|
56 |
+
},
|
57 |
+
"harness|lambada:openai|0": {
|
58 |
+
"perplexity,none": 4.439210478325373,
|
59 |
+
"perplexity_stderr,none": 0.10946463394642139,
|
60 |
+
"acc,none": 0.5010673394139337,
|
61 |
+
"acc_stderr,none": 0.006965961785703057,
|
62 |
+
"alias": "lambada_openai"
|
63 |
+
},
|
64 |
+
"harness|arc:easy|0": {
|
65 |
+
"acc,none": 0.7790404040404041,
|
66 |
+
"acc_stderr,none": 0.00851343094701945,
|
67 |
+
"acc_norm,none": 0.7058080808080808,
|
68 |
+
"acc_norm_stderr,none": 0.009350328648861737,
|
69 |
+
"alias": "arc_easy"
|
70 |
+
},
|
71 |
+
"harness|boolq|0": {
|
72 |
+
"acc,none": 0.854434250764526,
|
73 |
+
"acc_stderr,none": 0.00616824285431075,
|
74 |
+
"alias": "boolq"
|
75 |
+
},
|
76 |
+
"harness|mmlu|0": {
|
77 |
+
"acc,none": 0.5844609030052699,
|
78 |
+
"acc_stderr,none": 0.003963750018814539,
|
79 |
+
"alias": "mmlu"
|
80 |
+
},
|
81 |
+
"harness|mmlu_humanities|0": {
|
82 |
+
"alias": " - humanities",
|
83 |
+
"acc,none": 0.5430393198724761,
|
84 |
+
"acc_stderr,none": 0.0068896056546608395
|
85 |
+
},
|
86 |
+
"harness|mmlu_formal_logic|0": {
|
87 |
+
"alias": " - formal_logic",
|
88 |
+
"acc,none": 0.3888888888888889,
|
89 |
+
"acc_stderr,none": 0.04360314860077459
|
90 |
+
},
|
91 |
+
"harness|mmlu_high_school_european_history|0": {
|
92 |
+
"alias": " - high_school_european_history",
|
93 |
+
"acc,none": 0.7151515151515152,
|
94 |
+
"acc_stderr,none": 0.03524390844511781
|
95 |
+
},
|
96 |
+
"harness|mmlu_high_school_us_history|0": {
|
97 |
+
"alias": " - high_school_us_history",
|
98 |
+
"acc,none": 0.7843137254901961,
|
99 |
+
"acc_stderr,none": 0.028867431449849303
|
100 |
+
},
|
101 |
+
"harness|mmlu_high_school_world_history|0": {
|
102 |
+
"alias": " - high_school_world_history",
|
103 |
+
"acc,none": 0.7468354430379747,
|
104 |
+
"acc_stderr,none": 0.028304657943035293
|
105 |
+
},
|
106 |
+
"harness|mmlu_international_law|0": {
|
107 |
+
"alias": " - international_law",
|
108 |
+
"acc,none": 0.7355371900826446,
|
109 |
+
"acc_stderr,none": 0.040261875275912046
|
110 |
+
},
|
111 |
+
"harness|mmlu_jurisprudence|0": {
|
112 |
+
"alias": " - jurisprudence",
|
113 |
+
"acc,none": 0.7037037037037037,
|
114 |
+
"acc_stderr,none": 0.04414343666854933
|
115 |
+
},
|
116 |
+
"harness|mmlu_logical_fallacies|0": {
|
117 |
+
"alias": " - logical_fallacies",
|
118 |
+
"acc,none": 0.7423312883435583,
|
119 |
+
"acc_stderr,none": 0.03436150827846917
|
120 |
+
},
|
121 |
+
"harness|mmlu_moral_disputes|0": {
|
122 |
+
"alias": " - moral_disputes",
|
123 |
+
"acc,none": 0.6589595375722543,
|
124 |
+
"acc_stderr,none": 0.025522474632121615
|
125 |
+
},
|
126 |
+
"harness|mmlu_moral_scenarios|0": {
|
127 |
+
"alias": " - moral_scenarios",
|
128 |
+
"acc,none": 0.35195530726256985,
|
129 |
+
"acc_stderr,none": 0.015972668523689063
|
130 |
+
},
|
131 |
+
"harness|mmlu_philosophy|0": {
|
132 |
+
"alias": " - philosophy",
|
133 |
+
"acc,none": 0.6559485530546624,
|
134 |
+
"acc_stderr,none": 0.026981478043648033
|
135 |
+
},
|
136 |
+
"harness|mmlu_prehistory|0": {
|
137 |
+
"alias": " - prehistory",
|
138 |
+
"acc,none": 0.6635802469135802,
|
139 |
+
"acc_stderr,none": 0.026289734945952922
|
140 |
+
},
|
141 |
+
"harness|mmlu_professional_law|0": {
|
142 |
+
"alias": " - professional_law",
|
143 |
+
"acc,none": 0.4335071707953064,
|
144 |
+
"acc_stderr,none": 0.012656810383983967
|
145 |
+
},
|
146 |
+
"harness|mmlu_world_religions|0": {
|
147 |
+
"alias": " - world_religions",
|
148 |
+
"acc,none": 0.8070175438596491,
|
149 |
+
"acc_stderr,none": 0.030267457554898465
|
150 |
+
},
|
151 |
+
"harness|mmlu_other|0": {
|
152 |
+
"alias": " - other",
|
153 |
+
"acc,none": 0.6530415191503057,
|
154 |
+
"acc_stderr,none": 0.008275074231324661
|
155 |
+
},
|
156 |
+
"harness|mmlu_business_ethics|0": {
|
157 |
+
"alias": " - business_ethics",
|
158 |
+
"acc,none": 0.61,
|
159 |
+
"acc_stderr,none": 0.04902071300001974
|
160 |
+
},
|
161 |
+
"harness|mmlu_clinical_knowledge|0": {
|
162 |
+
"alias": " - clinical_knowledge",
|
163 |
+
"acc,none": 0.6452830188679245,
|
164 |
+
"acc_stderr,none": 0.02944517532819958
|
165 |
+
},
|
166 |
+
"harness|mmlu_college_medicine|0": {
|
167 |
+
"alias": " - college_medicine",
|
168 |
+
"acc,none": 0.5433526011560693,
|
169 |
+
"acc_stderr,none": 0.03798106566014498
|
170 |
+
},
|
171 |
+
"harness|mmlu_global_facts|0": {
|
172 |
+
"alias": " - global_facts",
|
173 |
+
"acc,none": 0.35,
|
174 |
+
"acc_stderr,none": 0.047937248544110196
|
175 |
+
},
|
176 |
+
"harness|mmlu_human_aging|0": {
|
177 |
+
"alias": " - human_aging",
|
178 |
+
"acc,none": 0.6233183856502242,
|
179 |
+
"acc_stderr,none": 0.032521134899291884
|
180 |
+
},
|
181 |
+
"harness|mmlu_management|0": {
|
182 |
+
"alias": " - management",
|
183 |
+
"acc,none": 0.7378640776699029,
|
184 |
+
"acc_stderr,none": 0.04354631077260595
|
185 |
+
},
|
186 |
+
"harness|mmlu_marketing|0": {
|
187 |
+
"alias": " - marketing",
|
188 |
+
"acc,none": 0.8504273504273504,
|
189 |
+
"acc_stderr,none": 0.02336505149175372
|
190 |
+
},
|
191 |
+
"harness|mmlu_medical_genetics|0": {
|
192 |
+
"alias": " - medical_genetics",
|
193 |
+
"acc,none": 0.61,
|
194 |
+
"acc_stderr,none": 0.04902071300001975
|
195 |
+
},
|
196 |
+
"harness|mmlu_miscellaneous|0": {
|
197 |
+
"alias": " - miscellaneous",
|
198 |
+
"acc,none": 0.768837803320562,
|
199 |
+
"acc_stderr,none": 0.015075523238101086
|
200 |
+
},
|
201 |
+
"harness|mmlu_nutrition|0": {
|
202 |
+
"alias": " - nutrition",
|
203 |
+
"acc,none": 0.6405228758169934,
|
204 |
+
"acc_stderr,none": 0.027475969910660952
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_accounting|0": {
|
207 |
+
"alias": " - professional_accounting",
|
208 |
+
"acc,none": 0.48226950354609927,
|
209 |
+
"acc_stderr,none": 0.02980873964223777
|
210 |
+
},
|
211 |
+
"harness|mmlu_professional_medicine|0": {
|
212 |
+
"alias": " - professional_medicine",
|
213 |
+
"acc,none": 0.6617647058823529,
|
214 |
+
"acc_stderr,none": 0.028739328513983572
|
215 |
+
},
|
216 |
+
"harness|mmlu_virology|0": {
|
217 |
+
"alias": " - virology",
|
218 |
+
"acc,none": 0.4759036144578313,
|
219 |
+
"acc_stderr,none": 0.03887971849597264
|
220 |
+
},
|
221 |
+
"harness|mmlu_social_sciences|0": {
|
222 |
+
"alias": " - social_sciences",
|
223 |
+
"acc,none": 0.6746831329216769,
|
224 |
+
"acc_stderr,none": 0.008223038429253208
|
225 |
+
},
|
226 |
+
"harness|mmlu_econometrics|0": {
|
227 |
+
"alias": " - econometrics",
|
228 |
+
"acc,none": 0.4824561403508772,
|
229 |
+
"acc_stderr,none": 0.04700708033551038
|
230 |
+
},
|
231 |
+
"harness|mmlu_high_school_geography|0": {
|
232 |
+
"alias": " - high_school_geography",
|
233 |
+
"acc,none": 0.7373737373737373,
|
234 |
+
"acc_stderr,none": 0.03135305009533084
|
235 |
+
},
|
236 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
237 |
+
"alias": " - high_school_government_and_politics",
|
238 |
+
"acc,none": 0.7979274611398963,
|
239 |
+
"acc_stderr,none": 0.028979089794296732
|
240 |
+
},
|
241 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
242 |
+
"alias": " - high_school_macroeconomics",
|
243 |
+
"acc,none": 0.5461538461538461,
|
244 |
+
"acc_stderr,none": 0.025242770987126184
|
245 |
+
},
|
246 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
247 |
+
"alias": " - high_school_microeconomics",
|
248 |
+
"acc,none": 0.6134453781512605,
|
249 |
+
"acc_stderr,none": 0.03163145807552378
|
250 |
+
},
|
251 |
+
"harness|mmlu_high_school_psychology|0": {
|
252 |
+
"alias": " - high_school_psychology",
|
253 |
+
"acc,none": 0.7834862385321101,
|
254 |
+
"acc_stderr,none": 0.017658710594443152
|
255 |
+
},
|
256 |
+
"harness|mmlu_human_sexuality|0": {
|
257 |
+
"alias": " - human_sexuality",
|
258 |
+
"acc,none": 0.6793893129770993,
|
259 |
+
"acc_stderr,none": 0.04093329229834278
|
260 |
+
},
|
261 |
+
"harness|mmlu_professional_psychology|0": {
|
262 |
+
"alias": " - professional_psychology",
|
263 |
+
"acc,none": 0.5686274509803921,
|
264 |
+
"acc_stderr,none": 0.02003639376835264
|
265 |
+
},
|
266 |
+
"harness|mmlu_public_relations|0": {
|
267 |
+
"alias": " - public_relations",
|
268 |
+
"acc,none": 0.7,
|
269 |
+
"acc_stderr,none": 0.04389311454644287
|
270 |
+
},
|
271 |
+
"harness|mmlu_security_studies|0": {
|
272 |
+
"alias": " - security_studies",
|
273 |
+
"acc,none": 0.6775510204081633,
|
274 |
+
"acc_stderr,none": 0.029923100563683903
|
275 |
+
},
|
276 |
+
"harness|mmlu_sociology|0": {
|
277 |
+
"alias": " - sociology",
|
278 |
+
"acc,none": 0.8407960199004975,
|
279 |
+
"acc_stderr,none": 0.02587064676616913
|
280 |
+
},
|
281 |
+
"harness|mmlu_us_foreign_policy|0": {
|
282 |
+
"alias": " - us_foreign_policy",
|
283 |
+
"acc,none": 0.86,
|
284 |
+
"acc_stderr,none": 0.03487350880197768
|
285 |
+
},
|
286 |
+
"harness|mmlu_stem|0": {
|
287 |
+
"alias": " - stem",
|
288 |
+
"acc,none": 0.4906438312718046,
|
289 |
+
"acc_stderr,none": 0.008661995829603667
|
290 |
+
},
|
291 |
+
"harness|mmlu_abstract_algebra|0": {
|
292 |
+
"alias": " - abstract_algebra",
|
293 |
+
"acc,none": 0.27,
|
294 |
+
"acc_stderr,none": 0.044619604333847415
|
295 |
+
},
|
296 |
+
"harness|mmlu_anatomy|0": {
|
297 |
+
"alias": " - anatomy",
|
298 |
+
"acc,none": 0.5407407407407407,
|
299 |
+
"acc_stderr,none": 0.04304979692464242
|
300 |
+
},
|
301 |
+
"harness|mmlu_astronomy|0": {
|
302 |
+
"alias": " - astronomy",
|
303 |
+
"acc,none": 0.625,
|
304 |
+
"acc_stderr,none": 0.039397364351956274
|
305 |
+
},
|
306 |
+
"harness|mmlu_college_biology|0": {
|
307 |
+
"alias": " - college_biology",
|
308 |
+
"acc,none": 0.6319444444444444,
|
309 |
+
"acc_stderr,none": 0.04032999053960719
|
310 |
+
},
|
311 |
+
"harness|mmlu_college_chemistry|0": {
|
312 |
+
"alias": " - college_chemistry",
|
313 |
+
"acc,none": 0.36,
|
314 |
+
"acc_stderr,none": 0.04824181513244218
|
315 |
+
},
|
316 |
+
"harness|mmlu_college_computer_science|0": {
|
317 |
+
"alias": " - college_computer_science",
|
318 |
+
"acc,none": 0.58,
|
319 |
+
"acc_stderr,none": 0.049604496374885836
|
320 |
+
},
|
321 |
+
"harness|mmlu_college_mathematics|0": {
|
322 |
+
"alias": " - college_mathematics",
|
323 |
+
"acc,none": 0.35,
|
324 |
+
"acc_stderr,none": 0.047937248544110175
|
325 |
+
},
|
326 |
+
"harness|mmlu_college_physics|0": {
|
327 |
+
"alias": " - college_physics",
|
328 |
+
"acc,none": 0.35294117647058826,
|
329 |
+
"acc_stderr,none": 0.047551296160629475
|
330 |
+
},
|
331 |
+
"harness|mmlu_computer_security|0": {
|
332 |
+
"alias": " - computer_security",
|
333 |
+
"acc,none": 0.67,
|
334 |
+
"acc_stderr,none": 0.04725815626252609
|
335 |
+
},
|
336 |
+
"harness|mmlu_conceptual_physics|0": {
|
337 |
+
"alias": " - conceptual_physics",
|
338 |
+
"acc,none": 0.5106382978723404,
|
339 |
+
"acc_stderr,none": 0.03267862331014063
|
340 |
+
},
|
341 |
+
"harness|mmlu_electrical_engineering|0": {
|
342 |
+
"alias": " - electrical_engineering",
|
343 |
+
"acc,none": 0.5517241379310345,
|
344 |
+
"acc_stderr,none": 0.04144311810878152
|
345 |
+
},
|
346 |
+
"harness|mmlu_elementary_mathematics|0": {
|
347 |
+
"alias": " - elementary_mathematics",
|
348 |
+
"acc,none": 0.4074074074074074,
|
349 |
+
"acc_stderr,none": 0.025305906241590636
|
350 |
+
},
|
351 |
+
"harness|mmlu_high_school_biology|0": {
|
352 |
+
"alias": " - high_school_biology",
|
353 |
+
"acc,none": 0.6903225806451613,
|
354 |
+
"acc_stderr,none": 0.026302774983517418
|
355 |
+
},
|
356 |
+
"harness|mmlu_high_school_chemistry|0": {
|
357 |
+
"alias": " - high_school_chemistry",
|
358 |
+
"acc,none": 0.4729064039408867,
|
359 |
+
"acc_stderr,none": 0.035128190778761066
|
360 |
+
},
|
361 |
+
"harness|mmlu_high_school_computer_science|0": {
|
362 |
+
"alias": " - high_school_computer_science",
|
363 |
+
"acc,none": 0.61,
|
364 |
+
"acc_stderr,none": 0.04902071300001974
|
365 |
+
},
|
366 |
+
"harness|mmlu_high_school_mathematics|0": {
|
367 |
+
"alias": " - high_school_mathematics",
|
368 |
+
"acc,none": 0.3333333333333333,
|
369 |
+
"acc_stderr,none": 0.028742040903948492
|
370 |
+
},
|
371 |
+
"harness|mmlu_high_school_physics|0": {
|
372 |
+
"alias": " - high_school_physics",
|
373 |
+
"acc,none": 0.3708609271523179,
|
374 |
+
"acc_stderr,none": 0.03943966699183629
|
375 |
+
},
|
376 |
+
"harness|mmlu_high_school_statistics|0": {
|
377 |
+
"alias": " - high_school_statistics",
|
378 |
+
"acc,none": 0.4675925925925926,
|
379 |
+
"acc_stderr,none": 0.03402801581358966
|
380 |
+
},
|
381 |
+
"harness|mmlu_machine_learning|0": {
|
382 |
+
"alias": " - machine_learning",
|
383 |
+
"acc,none": 0.5089285714285714,
|
384 |
+
"acc_stderr,none": 0.04745033255489123
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.5349829351535836,
|
388 |
+
"acc_stderr,none": 0.014575583922019663,
|
389 |
+
"acc_norm,none": 0.5503412969283277,
|
390 |
+
"acc_norm_stderr,none": 0.014537144444284736,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/Mistral-7B-Instruct-v0.2-GGUF",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 28.0,
|
399 |
+
"architectures": "?",
|
400 |
+
"quant_type": "llama.cpp",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 56.0,
|
403 |
+
"model_size": 28.0,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-28T09:07:10Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "llama_cpp"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "llama.cpp",
|
417 |
+
"ftype": "*Q4_0.gguf"
|
418 |
+
},
|
419 |
+
"versions": {
|
420 |
+
"harness|winogrande|0": 1.0,
|
421 |
+
"harness|piqa|0": 1.0,
|
422 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
423 |
+
"harness|hellaswag|0": 1.0,
|
424 |
+
"harness|openbookqa|0": 1.0,
|
425 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
426 |
+
"harness|lambada:openai|0": 1.0,
|
427 |
+
"harness|arc:easy|0": 1.0,
|
428 |
+
"harness|boolq|0": 2.0,
|
429 |
+
"harness|mmlu|0": null,
|
430 |
+
"harness|mmlu_humanities|0": null,
|
431 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
432 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
433 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
434 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
435 |
+
"harness|mmlu_international_law|0": 0.0,
|
436 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
437 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
438 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
439 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
440 |
+
"harness|mmlu_philosophy|0": 0.0,
|
441 |
+
"harness|mmlu_prehistory|0": 0.0,
|
442 |
+
"harness|mmlu_professional_law|0": 0.0,
|
443 |
+
"harness|mmlu_world_religions|0": 0.0,
|
444 |
+
"harness|mmlu_other|0": null,
|
445 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
446 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
447 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
448 |
+
"harness|mmlu_global_facts|0": 0.0,
|
449 |
+
"harness|mmlu_human_aging|0": 0.0,
|
450 |
+
"harness|mmlu_management|0": 0.0,
|
451 |
+
"harness|mmlu_marketing|0": 0.0,
|
452 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
453 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
454 |
+
"harness|mmlu_nutrition|0": 0.0,
|
455 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
456 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
457 |
+
"harness|mmlu_virology|0": 0.0,
|
458 |
+
"harness|mmlu_social_sciences|0": null,
|
459 |
+
"harness|mmlu_econometrics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
465 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
466 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
467 |
+
"harness|mmlu_public_relations|0": 0.0,
|
468 |
+
"harness|mmlu_security_studies|0": 0.0,
|
469 |
+
"harness|mmlu_sociology|0": 0.0,
|
470 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
471 |
+
"harness|mmlu_stem|0": null,
|
472 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
473 |
+
"harness|mmlu_anatomy|0": 0.0,
|
474 |
+
"harness|mmlu_astronomy|0": 0.0,
|
475 |
+
"harness|mmlu_college_biology|0": 0.0,
|
476 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
477 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
478 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
479 |
+
"harness|mmlu_college_physics|0": 0.0,
|
480 |
+
"harness|mmlu_computer_security|0": 0.0,
|
481 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
482 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
483 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
490 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
491 |
+
"harness|arc:challenge|0": 1.0
|
492 |
+
},
|
493 |
+
"n-shot": {
|
494 |
+
"arc_challenge": 0,
|
495 |
+
"arc_easy": 0,
|
496 |
+
"boolq": 0,
|
497 |
+
"hellaswag": 0,
|
498 |
+
"lambada_openai": 0,
|
499 |
+
"mmlu": 0,
|
500 |
+
"mmlu_abstract_algebra": 0,
|
501 |
+
"mmlu_anatomy": 0,
|
502 |
+
"mmlu_astronomy": 0,
|
503 |
+
"mmlu_business_ethics": 0,
|
504 |
+
"mmlu_clinical_knowledge": 0,
|
505 |
+
"mmlu_college_biology": 0,
|
506 |
+
"mmlu_college_chemistry": 0,
|
507 |
+
"mmlu_college_computer_science": 0,
|
508 |
+
"mmlu_college_mathematics": 0,
|
509 |
+
"mmlu_college_medicine": 0,
|
510 |
+
"mmlu_college_physics": 0,
|
511 |
+
"mmlu_computer_security": 0,
|
512 |
+
"mmlu_conceptual_physics": 0,
|
513 |
+
"mmlu_econometrics": 0,
|
514 |
+
"mmlu_electrical_engineering": 0,
|
515 |
+
"mmlu_elementary_mathematics": 0,
|
516 |
+
"mmlu_formal_logic": 0,
|
517 |
+
"mmlu_global_facts": 0,
|
518 |
+
"mmlu_high_school_biology": 0,
|
519 |
+
"mmlu_high_school_chemistry": 0,
|
520 |
+
"mmlu_high_school_computer_science": 0,
|
521 |
+
"mmlu_high_school_european_history": 0,
|
522 |
+
"mmlu_high_school_geography": 0,
|
523 |
+
"mmlu_high_school_government_and_politics": 0,
|
524 |
+
"mmlu_high_school_macroeconomics": 0,
|
525 |
+
"mmlu_high_school_mathematics": 0,
|
526 |
+
"mmlu_high_school_microeconomics": 0,
|
527 |
+
"mmlu_high_school_physics": 0,
|
528 |
+
"mmlu_high_school_psychology": 0,
|
529 |
+
"mmlu_high_school_statistics": 0,
|
530 |
+
"mmlu_high_school_us_history": 0,
|
531 |
+
"mmlu_high_school_world_history": 0,
|
532 |
+
"mmlu_human_aging": 0,
|
533 |
+
"mmlu_human_sexuality": 0,
|
534 |
+
"mmlu_humanities": 0,
|
535 |
+
"mmlu_international_law": 0,
|
536 |
+
"mmlu_jurisprudence": 0,
|
537 |
+
"mmlu_logical_fallacies": 0,
|
538 |
+
"mmlu_machine_learning": 0,
|
539 |
+
"mmlu_management": 0,
|
540 |
+
"mmlu_marketing": 0,
|
541 |
+
"mmlu_medical_genetics": 0,
|
542 |
+
"mmlu_miscellaneous": 0,
|
543 |
+
"mmlu_moral_disputes": 0,
|
544 |
+
"mmlu_moral_scenarios": 0,
|
545 |
+
"mmlu_nutrition": 0,
|
546 |
+
"mmlu_other": 0,
|
547 |
+
"mmlu_philosophy": 0,
|
548 |
+
"mmlu_prehistory": 0,
|
549 |
+
"mmlu_professional_accounting": 0,
|
550 |
+
"mmlu_professional_law": 0,
|
551 |
+
"mmlu_professional_medicine": 0,
|
552 |
+
"mmlu_professional_psychology": 0,
|
553 |
+
"mmlu_public_relations": 0,
|
554 |
+
"mmlu_security_studies": 0,
|
555 |
+
"mmlu_social_sciences": 0,
|
556 |
+
"mmlu_sociology": 0,
|
557 |
+
"mmlu_stem": 0,
|
558 |
+
"mmlu_us_foreign_policy": 0,
|
559 |
+
"mmlu_virology": 0,
|
560 |
+
"mmlu_world_religions": 0,
|
561 |
+
"openbookqa": 0,
|
562 |
+
"piqa": 0,
|
563 |
+
"truthfulqa_mc1": 0,
|
564 |
+
"truthfulqa_mc2": 0,
|
565 |
+
"winogrande": 0
|
566 |
+
},
|
567 |
+
"date": 1714355029.9857757,
|
568 |
+
"config": {
|
569 |
+
"model": "WrapperGGUFLM",
|
570 |
+
"model_args": "gguf_model=TheBloke/Mistral-7B-Instruct-v0.2-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main",
|
571 |
+
"batch_size": 1,
|
572 |
+
"batch_sizes": [],
|
573 |
+
"device": "cuda",
|
574 |
+
"use_cache": null,
|
575 |
+
"limit": null,
|
576 |
+
"bootstrap_iters": 100000,
|
577 |
+
"gen_kwargs": null
|
578 |
+
}
|
579 |
+
}
|
TheBloke/results_2024-05-02-16-52-29.json
ADDED
@@ -0,0 +1,582 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-02-16-52-29",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/SOLAR-10.7B-Instruct-v1.0-AWQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.96,
|
16 |
+
"model_params": 10.55,
|
17 |
+
"quant_type": "AWQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:challenge|0": {
|
22 |
+
"acc,none": 0.5981228668941979,
|
23 |
+
"acc_stderr,none": 0.014327268614578274,
|
24 |
+
"acc_norm,none": 0.6322525597269625,
|
25 |
+
"acc_norm_stderr,none": 0.014090995618168473,
|
26 |
+
"alias": "arc_challenge"
|
27 |
+
},
|
28 |
+
"harness|openbookqa|0": {
|
29 |
+
"acc,none": 0.374,
|
30 |
+
"acc_stderr,none": 0.021660710347204487,
|
31 |
+
"acc_norm,none": 0.494,
|
32 |
+
"acc_norm_stderr,none": 0.022381462412439324,
|
33 |
+
"alias": "openbookqa"
|
34 |
+
},
|
35 |
+
"harness|truthfulqa:mc1|0": {
|
36 |
+
"acc,none": 0.5471236230110159,
|
37 |
+
"acc_stderr,none": 0.01742558984831402,
|
38 |
+
"alias": "truthfulqa_mc1"
|
39 |
+
},
|
40 |
+
"harness|piqa|0": {
|
41 |
+
"acc,none": 0.8112078346028292,
|
42 |
+
"acc_stderr,none": 0.009130687388952816,
|
43 |
+
"acc_norm,none": 0.8144722524483133,
|
44 |
+
"acc_norm_stderr,none": 0.009069597302603996,
|
45 |
+
"alias": "piqa"
|
46 |
+
},
|
47 |
+
"harness|boolq|0": {
|
48 |
+
"acc,none": 0.8798165137614679,
|
49 |
+
"acc_stderr,none": 0.005687363587870172,
|
50 |
+
"alias": "boolq"
|
51 |
+
},
|
52 |
+
"harness|arc:easy|0": {
|
53 |
+
"acc,none": 0.8308080808080808,
|
54 |
+
"acc_stderr,none": 0.007693223639488826,
|
55 |
+
"acc_norm,none": 0.8101851851851852,
|
56 |
+
"acc_norm_stderr,none": 0.008046840527852234,
|
57 |
+
"alias": "arc_easy"
|
58 |
+
},
|
59 |
+
"harness|lambada:openai|0": {
|
60 |
+
"perplexity,none": 3.185362930040927,
|
61 |
+
"perplexity_stderr,none": 0.07406409479719334,
|
62 |
+
"acc,none": 0.7279254803027363,
|
63 |
+
"acc_stderr,none": 0.006200111064998447,
|
64 |
+
"alias": "lambada_openai"
|
65 |
+
},
|
66 |
+
"harness|mmlu|0": {
|
67 |
+
"acc,none": 0.6232730380287709,
|
68 |
+
"acc_stderr,none": 0.0038533964574598407,
|
69 |
+
"alias": "mmlu"
|
70 |
+
},
|
71 |
+
"harness|mmlu_humanities|0": {
|
72 |
+
"alias": " - humanities",
|
73 |
+
"acc,none": 0.5727948990435706,
|
74 |
+
"acc_stderr,none": 0.006732570609347105
|
75 |
+
},
|
76 |
+
"harness|mmlu_formal_logic|0": {
|
77 |
+
"alias": " - formal_logic",
|
78 |
+
"acc,none": 0.373015873015873,
|
79 |
+
"acc_stderr,none": 0.04325506042017086
|
80 |
+
},
|
81 |
+
"harness|mmlu_high_school_european_history|0": {
|
82 |
+
"alias": " - high_school_european_history",
|
83 |
+
"acc,none": 0.806060606060606,
|
84 |
+
"acc_stderr,none": 0.030874145136562097
|
85 |
+
},
|
86 |
+
"harness|mmlu_high_school_us_history|0": {
|
87 |
+
"alias": " - high_school_us_history",
|
88 |
+
"acc,none": 0.8333333333333334,
|
89 |
+
"acc_stderr,none": 0.026156867523931055
|
90 |
+
},
|
91 |
+
"harness|mmlu_high_school_world_history|0": {
|
92 |
+
"alias": " - high_school_world_history",
|
93 |
+
"acc,none": 0.8270042194092827,
|
94 |
+
"acc_stderr,none": 0.024621562866768445
|
95 |
+
},
|
96 |
+
"harness|mmlu_international_law|0": {
|
97 |
+
"alias": " - international_law",
|
98 |
+
"acc,none": 0.8264462809917356,
|
99 |
+
"acc_stderr,none": 0.0345727283691767
|
100 |
+
},
|
101 |
+
"harness|mmlu_jurisprudence|0": {
|
102 |
+
"alias": " - jurisprudence",
|
103 |
+
"acc,none": 0.7685185185185185,
|
104 |
+
"acc_stderr,none": 0.04077494709252626
|
105 |
+
},
|
106 |
+
"harness|mmlu_logical_fallacies|0": {
|
107 |
+
"alias": " - logical_fallacies",
|
108 |
+
"acc,none": 0.6993865030674846,
|
109 |
+
"acc_stderr,none": 0.03602511318806771
|
110 |
+
},
|
111 |
+
"harness|mmlu_moral_disputes|0": {
|
112 |
+
"alias": " - moral_disputes",
|
113 |
+
"acc,none": 0.7023121387283237,
|
114 |
+
"acc_stderr,none": 0.024617055388676982
|
115 |
+
},
|
116 |
+
"harness|mmlu_moral_scenarios|0": {
|
117 |
+
"alias": " - moral_scenarios",
|
118 |
+
"acc,none": 0.3307262569832402,
|
119 |
+
"acc_stderr,none": 0.01573502625896612
|
120 |
+
},
|
121 |
+
"harness|mmlu_philosophy|0": {
|
122 |
+
"alias": " - philosophy",
|
123 |
+
"acc,none": 0.6945337620578779,
|
124 |
+
"acc_stderr,none": 0.026160584450140453
|
125 |
+
},
|
126 |
+
"harness|mmlu_prehistory|0": {
|
127 |
+
"alias": " - prehistory",
|
128 |
+
"acc,none": 0.7438271604938271,
|
129 |
+
"acc_stderr,none": 0.0242885336377261
|
130 |
+
},
|
131 |
+
"harness|mmlu_professional_law|0": {
|
132 |
+
"alias": " - professional_law",
|
133 |
+
"acc,none": 0.47392438070404175,
|
134 |
+
"acc_stderr,none": 0.012752858346533134
|
135 |
+
},
|
136 |
+
"harness|mmlu_world_religions|0": {
|
137 |
+
"alias": " - world_religions",
|
138 |
+
"acc,none": 0.7543859649122807,
|
139 |
+
"acc_stderr,none": 0.033014059469872487
|
140 |
+
},
|
141 |
+
"harness|mmlu_other|0": {
|
142 |
+
"alias": " - other",
|
143 |
+
"acc,none": 0.7016414547795301,
|
144 |
+
"acc_stderr,none": 0.007934723097613417
|
145 |
+
},
|
146 |
+
"harness|mmlu_business_ethics|0": {
|
147 |
+
"alias": " - business_ethics",
|
148 |
+
"acc,none": 0.66,
|
149 |
+
"acc_stderr,none": 0.04760952285695237
|
150 |
+
},
|
151 |
+
"harness|mmlu_clinical_knowledge|0": {
|
152 |
+
"alias": " - clinical_knowledge",
|
153 |
+
"acc,none": 0.6867924528301886,
|
154 |
+
"acc_stderr,none": 0.028544793319055326
|
155 |
+
},
|
156 |
+
"harness|mmlu_college_medicine|0": {
|
157 |
+
"alias": " - college_medicine",
|
158 |
+
"acc,none": 0.6473988439306358,
|
159 |
+
"acc_stderr,none": 0.036430371689585475
|
160 |
+
},
|
161 |
+
"harness|mmlu_global_facts|0": {
|
162 |
+
"alias": " - global_facts",
|
163 |
+
"acc,none": 0.36,
|
164 |
+
"acc_stderr,none": 0.04824181513244218
|
165 |
+
},
|
166 |
+
"harness|mmlu_human_aging|0": {
|
167 |
+
"alias": " - human_aging",
|
168 |
+
"acc,none": 0.695067264573991,
|
169 |
+
"acc_stderr,none": 0.030898610882477515
|
170 |
+
},
|
171 |
+
"harness|mmlu_management|0": {
|
172 |
+
"alias": " - management",
|
173 |
+
"acc,none": 0.8155339805825242,
|
174 |
+
"acc_stderr,none": 0.03840423627288276
|
175 |
+
},
|
176 |
+
"harness|mmlu_marketing|0": {
|
177 |
+
"alias": " - marketing",
|
178 |
+
"acc,none": 0.8547008547008547,
|
179 |
+
"acc_stderr,none": 0.023086635086841407
|
180 |
+
},
|
181 |
+
"harness|mmlu_medical_genetics|0": {
|
182 |
+
"alias": " - medical_genetics",
|
183 |
+
"acc,none": 0.73,
|
184 |
+
"acc_stderr,none": 0.044619604333847394
|
185 |
+
},
|
186 |
+
"harness|mmlu_miscellaneous|0": {
|
187 |
+
"alias": " - miscellaneous",
|
188 |
+
"acc,none": 0.80970625798212,
|
189 |
+
"acc_stderr,none": 0.01403694585038138
|
190 |
+
},
|
191 |
+
"harness|mmlu_nutrition|0": {
|
192 |
+
"alias": " - nutrition",
|
193 |
+
"acc,none": 0.7156862745098039,
|
194 |
+
"acc_stderr,none": 0.025829163272757468
|
195 |
+
},
|
196 |
+
"harness|mmlu_professional_accounting|0": {
|
197 |
+
"alias": " - professional_accounting",
|
198 |
+
"acc,none": 0.5035460992907801,
|
199 |
+
"acc_stderr,none": 0.02982674915328092
|
200 |
+
},
|
201 |
+
"harness|mmlu_professional_medicine|0": {
|
202 |
+
"alias": " - professional_medicine",
|
203 |
+
"acc,none": 0.7022058823529411,
|
204 |
+
"acc_stderr,none": 0.027778298701545443
|
205 |
+
},
|
206 |
+
"harness|mmlu_virology|0": {
|
207 |
+
"alias": " - virology",
|
208 |
+
"acc,none": 0.5180722891566265,
|
209 |
+
"acc_stderr,none": 0.038899512528272166
|
210 |
+
},
|
211 |
+
"harness|mmlu_social_sciences|0": {
|
212 |
+
"alias": " - social_sciences",
|
213 |
+
"acc,none": 0.7237569060773481,
|
214 |
+
"acc_stderr,none": 0.007875459074235897
|
215 |
+
},
|
216 |
+
"harness|mmlu_econometrics|0": {
|
217 |
+
"alias": " - econometrics",
|
218 |
+
"acc,none": 0.47368421052631576,
|
219 |
+
"acc_stderr,none": 0.046970851366478626
|
220 |
+
},
|
221 |
+
"harness|mmlu_high_school_geography|0": {
|
222 |
+
"alias": " - high_school_geography",
|
223 |
+
"acc,none": 0.803030303030303,
|
224 |
+
"acc_stderr,none": 0.02833560973246336
|
225 |
+
},
|
226 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
227 |
+
"alias": " - high_school_government_and_politics",
|
228 |
+
"acc,none": 0.8808290155440415,
|
229 |
+
"acc_stderr,none": 0.02338193534812142
|
230 |
+
},
|
231 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
232 |
+
"alias": " - high_school_macroeconomics",
|
233 |
+
"acc,none": 0.6358974358974359,
|
234 |
+
"acc_stderr,none": 0.024396672985094767
|
235 |
+
},
|
236 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
237 |
+
"alias": " - high_school_microeconomics",
|
238 |
+
"acc,none": 0.6764705882352942,
|
239 |
+
"acc_stderr,none": 0.03038835355188678
|
240 |
+
},
|
241 |
+
"harness|mmlu_high_school_psychology|0": {
|
242 |
+
"alias": " - high_school_psychology",
|
243 |
+
"acc,none": 0.8201834862385321,
|
244 |
+
"acc_stderr,none": 0.016465345467391534
|
245 |
+
},
|
246 |
+
"harness|mmlu_human_sexuality|0": {
|
247 |
+
"alias": " - human_sexuality",
|
248 |
+
"acc,none": 0.7480916030534351,
|
249 |
+
"acc_stderr,none": 0.038073871163060866
|
250 |
+
},
|
251 |
+
"harness|mmlu_professional_psychology|0": {
|
252 |
+
"alias": " - professional_psychology",
|
253 |
+
"acc,none": 0.6519607843137255,
|
254 |
+
"acc_stderr,none": 0.01927099870822398
|
255 |
+
},
|
256 |
+
"harness|mmlu_public_relations|0": {
|
257 |
+
"alias": " - public_relations",
|
258 |
+
"acc,none": 0.6272727272727273,
|
259 |
+
"acc_stderr,none": 0.04631381319425465
|
260 |
+
},
|
261 |
+
"harness|mmlu_security_studies|0": {
|
262 |
+
"alias": " - security_studies",
|
263 |
+
"acc,none": 0.6938775510204082,
|
264 |
+
"acc_stderr,none": 0.02950489645459596
|
265 |
+
},
|
266 |
+
"harness|mmlu_sociology|0": {
|
267 |
+
"alias": " - sociology",
|
268 |
+
"acc,none": 0.8208955223880597,
|
269 |
+
"acc_stderr,none": 0.027113286753111837
|
270 |
+
},
|
271 |
+
"harness|mmlu_us_foreign_policy|0": {
|
272 |
+
"alias": " - us_foreign_policy",
|
273 |
+
"acc,none": 0.87,
|
274 |
+
"acc_stderr,none": 0.03379976689896309
|
275 |
+
},
|
276 |
+
"harness|mmlu_stem|0": {
|
277 |
+
"alias": " - stem",
|
278 |
+
"acc,none": 0.5233111322549953,
|
279 |
+
"acc_stderr,none": 0.008565620869325358
|
280 |
+
},
|
281 |
+
"harness|mmlu_abstract_algebra|0": {
|
282 |
+
"alias": " - abstract_algebra",
|
283 |
+
"acc,none": 0.35,
|
284 |
+
"acc_stderr,none": 0.0479372485441102
|
285 |
+
},
|
286 |
+
"harness|mmlu_anatomy|0": {
|
287 |
+
"alias": " - anatomy",
|
288 |
+
"acc,none": 0.5851851851851851,
|
289 |
+
"acc_stderr,none": 0.04256193767901408
|
290 |
+
},
|
291 |
+
"harness|mmlu_astronomy|0": {
|
292 |
+
"alias": " - astronomy",
|
293 |
+
"acc,none": 0.7039473684210527,
|
294 |
+
"acc_stderr,none": 0.037150621549989056
|
295 |
+
},
|
296 |
+
"harness|mmlu_college_biology|0": {
|
297 |
+
"alias": " - college_biology",
|
298 |
+
"acc,none": 0.7291666666666666,
|
299 |
+
"acc_stderr,none": 0.037161774375660164
|
300 |
+
},
|
301 |
+
"harness|mmlu_college_chemistry|0": {
|
302 |
+
"alias": " - college_chemistry",
|
303 |
+
"acc,none": 0.4,
|
304 |
+
"acc_stderr,none": 0.04923659639173309
|
305 |
+
},
|
306 |
+
"harness|mmlu_college_computer_science|0": {
|
307 |
+
"alias": " - college_computer_science",
|
308 |
+
"acc,none": 0.5,
|
309 |
+
"acc_stderr,none": 0.050251890762960605
|
310 |
+
},
|
311 |
+
"harness|mmlu_college_mathematics|0": {
|
312 |
+
"alias": " - college_mathematics",
|
313 |
+
"acc,none": 0.29,
|
314 |
+
"acc_stderr,none": 0.04560480215720684
|
315 |
+
},
|
316 |
+
"harness|mmlu_college_physics|0": {
|
317 |
+
"alias": " - college_physics",
|
318 |
+
"acc,none": 0.38235294117647056,
|
319 |
+
"acc_stderr,none": 0.04835503696107224
|
320 |
+
},
|
321 |
+
"harness|mmlu_computer_security|0": {
|
322 |
+
"alias": " - computer_security",
|
323 |
+
"acc,none": 0.71,
|
324 |
+
"acc_stderr,none": 0.045604802157206845
|
325 |
+
},
|
326 |
+
"harness|mmlu_conceptual_physics|0": {
|
327 |
+
"alias": " - conceptual_physics",
|
328 |
+
"acc,none": 0.5617021276595745,
|
329 |
+
"acc_stderr,none": 0.03243618636108101
|
330 |
+
},
|
331 |
+
"harness|mmlu_electrical_engineering|0": {
|
332 |
+
"alias": " - electrical_engineering",
|
333 |
+
"acc,none": 0.5379310344827586,
|
334 |
+
"acc_stderr,none": 0.04154659671707548
|
335 |
+
},
|
336 |
+
"harness|mmlu_elementary_mathematics|0": {
|
337 |
+
"alias": " - elementary_mathematics",
|
338 |
+
"acc,none": 0.4470899470899471,
|
339 |
+
"acc_stderr,none": 0.025606723995777025
|
340 |
+
},
|
341 |
+
"harness|mmlu_high_school_biology|0": {
|
342 |
+
"alias": " - high_school_biology",
|
343 |
+
"acc,none": 0.7677419354838709,
|
344 |
+
"acc_stderr,none": 0.02402225613030824
|
345 |
+
},
|
346 |
+
"harness|mmlu_high_school_chemistry|0": {
|
347 |
+
"alias": " - high_school_chemistry",
|
348 |
+
"acc,none": 0.458128078817734,
|
349 |
+
"acc_stderr,none": 0.03505630140785741
|
350 |
+
},
|
351 |
+
"harness|mmlu_high_school_computer_science|0": {
|
352 |
+
"alias": " - high_school_computer_science",
|
353 |
+
"acc,none": 0.62,
|
354 |
+
"acc_stderr,none": 0.048783173121456316
|
355 |
+
},
|
356 |
+
"harness|mmlu_high_school_mathematics|0": {
|
357 |
+
"alias": " - high_school_mathematics",
|
358 |
+
"acc,none": 0.362962962962963,
|
359 |
+
"acc_stderr,none": 0.02931820364520686
|
360 |
+
},
|
361 |
+
"harness|mmlu_high_school_physics|0": {
|
362 |
+
"alias": " - high_school_physics",
|
363 |
+
"acc,none": 0.3509933774834437,
|
364 |
+
"acc_stderr,none": 0.03896981964257374
|
365 |
+
},
|
366 |
+
"harness|mmlu_high_school_statistics|0": {
|
367 |
+
"alias": " - high_school_statistics",
|
368 |
+
"acc,none": 0.5370370370370371,
|
369 |
+
"acc_stderr,none": 0.03400603625538271
|
370 |
+
},
|
371 |
+
"harness|mmlu_machine_learning|0": {
|
372 |
+
"alias": " - machine_learning",
|
373 |
+
"acc,none": 0.5,
|
374 |
+
"acc_stderr,none": 0.04745789978762494
|
375 |
+
},
|
376 |
+
"harness|truthfulqa:mc2|0": {
|
377 |
+
"acc,none": 0.7003691298306434,
|
378 |
+
"acc_stderr,none": 0.015084794104413395,
|
379 |
+
"alias": "truthfulqa_mc2"
|
380 |
+
},
|
381 |
+
"harness|winogrande|0": {
|
382 |
+
"acc,none": 0.7458563535911602,
|
383 |
+
"acc_stderr,none": 0.012236307219708278,
|
384 |
+
"alias": "winogrande"
|
385 |
+
},
|
386 |
+
"harness|hellaswag|0": {
|
387 |
+
"acc,none": 0.6806413065126469,
|
388 |
+
"acc_stderr,none": 0.004652753439460115,
|
389 |
+
"acc_norm,none": 0.8593905596494722,
|
390 |
+
"acc_norm_stderr,none": 0.0034690778470563856,
|
391 |
+
"alias": "hellaswag"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/SOLAR-10.7B-Instruct-v1.0-AWQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 6.652,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "AWQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 13.304,
|
403 |
+
"model_size": 6.652,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-01T16:10:00Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"group_size": 128,
|
418 |
+
"quant_method": "awq",
|
419 |
+
"version": "gemm",
|
420 |
+
"zero_point": true
|
421 |
+
},
|
422 |
+
"versions": {
|
423 |
+
"harness|arc:challenge|0": 1.0,
|
424 |
+
"harness|openbookqa|0": 1.0,
|
425 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
426 |
+
"harness|piqa|0": 1.0,
|
427 |
+
"harness|boolq|0": 2.0,
|
428 |
+
"harness|arc:easy|0": 1.0,
|
429 |
+
"harness|lambada:openai|0": 1.0,
|
430 |
+
"harness|mmlu|0": null,
|
431 |
+
"harness|mmlu_humanities|0": null,
|
432 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
433 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
434 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
435 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
436 |
+
"harness|mmlu_international_law|0": 0.0,
|
437 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
438 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
439 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
440 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
441 |
+
"harness|mmlu_philosophy|0": 0.0,
|
442 |
+
"harness|mmlu_prehistory|0": 0.0,
|
443 |
+
"harness|mmlu_professional_law|0": 0.0,
|
444 |
+
"harness|mmlu_world_religions|0": 0.0,
|
445 |
+
"harness|mmlu_other|0": null,
|
446 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
447 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
448 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
449 |
+
"harness|mmlu_global_facts|0": 0.0,
|
450 |
+
"harness|mmlu_human_aging|0": 0.0,
|
451 |
+
"harness|mmlu_management|0": 0.0,
|
452 |
+
"harness|mmlu_marketing|0": 0.0,
|
453 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
454 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
455 |
+
"harness|mmlu_nutrition|0": 0.0,
|
456 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
457 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
458 |
+
"harness|mmlu_virology|0": 0.0,
|
459 |
+
"harness|mmlu_social_sciences|0": null,
|
460 |
+
"harness|mmlu_econometrics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
466 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
467 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
468 |
+
"harness|mmlu_public_relations|0": 0.0,
|
469 |
+
"harness|mmlu_security_studies|0": 0.0,
|
470 |
+
"harness|mmlu_sociology|0": 0.0,
|
471 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
472 |
+
"harness|mmlu_stem|0": null,
|
473 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
474 |
+
"harness|mmlu_anatomy|0": 0.0,
|
475 |
+
"harness|mmlu_astronomy|0": 0.0,
|
476 |
+
"harness|mmlu_college_biology|0": 0.0,
|
477 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
478 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
479 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
480 |
+
"harness|mmlu_college_physics|0": 0.0,
|
481 |
+
"harness|mmlu_computer_security|0": 0.0,
|
482 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
483 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
484 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
491 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
492 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
493 |
+
"harness|winogrande|0": 1.0,
|
494 |
+
"harness|hellaswag|0": 1.0
|
495 |
+
},
|
496 |
+
"n-shot": {
|
497 |
+
"arc_challenge": 0,
|
498 |
+
"arc_easy": 0,
|
499 |
+
"boolq": 0,
|
500 |
+
"hellaswag": 0,
|
501 |
+
"lambada_openai": 0,
|
502 |
+
"mmlu": 0,
|
503 |
+
"mmlu_abstract_algebra": 0,
|
504 |
+
"mmlu_anatomy": 0,
|
505 |
+
"mmlu_astronomy": 0,
|
506 |
+
"mmlu_business_ethics": 0,
|
507 |
+
"mmlu_clinical_knowledge": 0,
|
508 |
+
"mmlu_college_biology": 0,
|
509 |
+
"mmlu_college_chemistry": 0,
|
510 |
+
"mmlu_college_computer_science": 0,
|
511 |
+
"mmlu_college_mathematics": 0,
|
512 |
+
"mmlu_college_medicine": 0,
|
513 |
+
"mmlu_college_physics": 0,
|
514 |
+
"mmlu_computer_security": 0,
|
515 |
+
"mmlu_conceptual_physics": 0,
|
516 |
+
"mmlu_econometrics": 0,
|
517 |
+
"mmlu_electrical_engineering": 0,
|
518 |
+
"mmlu_elementary_mathematics": 0,
|
519 |
+
"mmlu_formal_logic": 0,
|
520 |
+
"mmlu_global_facts": 0,
|
521 |
+
"mmlu_high_school_biology": 0,
|
522 |
+
"mmlu_high_school_chemistry": 0,
|
523 |
+
"mmlu_high_school_computer_science": 0,
|
524 |
+
"mmlu_high_school_european_history": 0,
|
525 |
+
"mmlu_high_school_geography": 0,
|
526 |
+
"mmlu_high_school_government_and_politics": 0,
|
527 |
+
"mmlu_high_school_macroeconomics": 0,
|
528 |
+
"mmlu_high_school_mathematics": 0,
|
529 |
+
"mmlu_high_school_microeconomics": 0,
|
530 |
+
"mmlu_high_school_physics": 0,
|
531 |
+
"mmlu_high_school_psychology": 0,
|
532 |
+
"mmlu_high_school_statistics": 0,
|
533 |
+
"mmlu_high_school_us_history": 0,
|
534 |
+
"mmlu_high_school_world_history": 0,
|
535 |
+
"mmlu_human_aging": 0,
|
536 |
+
"mmlu_human_sexuality": 0,
|
537 |
+
"mmlu_humanities": 0,
|
538 |
+
"mmlu_international_law": 0,
|
539 |
+
"mmlu_jurisprudence": 0,
|
540 |
+
"mmlu_logical_fallacies": 0,
|
541 |
+
"mmlu_machine_learning": 0,
|
542 |
+
"mmlu_management": 0,
|
543 |
+
"mmlu_marketing": 0,
|
544 |
+
"mmlu_medical_genetics": 0,
|
545 |
+
"mmlu_miscellaneous": 0,
|
546 |
+
"mmlu_moral_disputes": 0,
|
547 |
+
"mmlu_moral_scenarios": 0,
|
548 |
+
"mmlu_nutrition": 0,
|
549 |
+
"mmlu_other": 0,
|
550 |
+
"mmlu_philosophy": 0,
|
551 |
+
"mmlu_prehistory": 0,
|
552 |
+
"mmlu_professional_accounting": 0,
|
553 |
+
"mmlu_professional_law": 0,
|
554 |
+
"mmlu_professional_medicine": 0,
|
555 |
+
"mmlu_professional_psychology": 0,
|
556 |
+
"mmlu_public_relations": 0,
|
557 |
+
"mmlu_security_studies": 0,
|
558 |
+
"mmlu_social_sciences": 0,
|
559 |
+
"mmlu_sociology": 0,
|
560 |
+
"mmlu_stem": 0,
|
561 |
+
"mmlu_us_foreign_policy": 0,
|
562 |
+
"mmlu_virology": 0,
|
563 |
+
"mmlu_world_religions": 0,
|
564 |
+
"openbookqa": 0,
|
565 |
+
"piqa": 0,
|
566 |
+
"truthfulqa_mc1": 0,
|
567 |
+
"truthfulqa_mc2": 0,
|
568 |
+
"winogrande": 0
|
569 |
+
},
|
570 |
+
"date": 1714605116.6428869,
|
571 |
+
"config": {
|
572 |
+
"model": "hf",
|
573 |
+
"model_args": "pretrained=TheBloke/SOLAR-10.7B-Instruct-v1.0-AWQ,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
574 |
+
"batch_size": 2,
|
575 |
+
"batch_sizes": [],
|
576 |
+
"device": "cuda",
|
577 |
+
"use_cache": null,
|
578 |
+
"limit": null,
|
579 |
+
"bootstrap_iters": 100000,
|
580 |
+
"gen_kwargs": null
|
581 |
+
}
|
582 |
+
}
|
TheBloke/results_2024-05-03-08-18-06.json
ADDED
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "no",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-03-08-18-06",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/SOLAR-10.7B-Instruct-v1.0-GGUF",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 6.071640064,
|
16 |
+
"model_params": 10.731524096,
|
17 |
+
"quant_type": "llama.cpp",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|piqa|0": {
|
22 |
+
"acc,none": 0.823177366702938,
|
23 |
+
"acc_stderr,none": 0.008901456201658638,
|
24 |
+
"acc_norm,none": 0.8264417845484222,
|
25 |
+
"acc_norm_stderr,none": 0.008836375101386918,
|
26 |
+
"alias": "piqa"
|
27 |
+
},
|
28 |
+
"harness|winogrande|0": {
|
29 |
+
"acc,none": 0.7592738752959748,
|
30 |
+
"acc_stderr,none": 0.012015559212224188,
|
31 |
+
"alias": "winogrande"
|
32 |
+
},
|
33 |
+
"harness|lambada:openai|0": {
|
34 |
+
"perplexity,none": 3.9791798149200446,
|
35 |
+
"perplexity_stderr,none": 0.09951611112279504,
|
36 |
+
"acc,none": 0.524160683097225,
|
37 |
+
"acc_stderr,none": 0.006957840284118759,
|
38 |
+
"alias": "lambada_openai"
|
39 |
+
},
|
40 |
+
"harness|openbookqa|0": {
|
41 |
+
"acc,none": 0.372,
|
42 |
+
"acc_stderr,none": 0.0216371979857224,
|
43 |
+
"acc_norm,none": 0.49,
|
44 |
+
"acc_norm_stderr,none": 0.02237859698923078,
|
45 |
+
"alias": "openbookqa"
|
46 |
+
},
|
47 |
+
"harness|truthfulqa:mc1|0": {
|
48 |
+
"acc,none": 0.5630354957160343,
|
49 |
+
"acc_stderr,none": 0.017363844503195974,
|
50 |
+
"alias": "truthfulqa_mc1"
|
51 |
+
},
|
52 |
+
"harness|mmlu|0": {
|
53 |
+
"acc,none": 0.6204244409628258,
|
54 |
+
"acc_stderr,none": 0.0038773551606319846,
|
55 |
+
"alias": "mmlu"
|
56 |
+
},
|
57 |
+
"harness|mmlu_humanities|0": {
|
58 |
+
"alias": " - humanities",
|
59 |
+
"acc,none": 0.5615302869287991,
|
60 |
+
"acc_stderr,none": 0.006910974026687457
|
61 |
+
},
|
62 |
+
"harness|mmlu_formal_logic|0": {
|
63 |
+
"alias": " - formal_logic",
|
64 |
+
"acc,none": 0.4365079365079365,
|
65 |
+
"acc_stderr,none": 0.04435932892851466
|
66 |
+
},
|
67 |
+
"harness|mmlu_high_school_european_history|0": {
|
68 |
+
"alias": " - high_school_european_history",
|
69 |
+
"acc,none": 0.6060606060606061,
|
70 |
+
"acc_stderr,none": 0.0381549430868893
|
71 |
+
},
|
72 |
+
"harness|mmlu_high_school_us_history|0": {
|
73 |
+
"alias": " - high_school_us_history",
|
74 |
+
"acc,none": 0.6127450980392157,
|
75 |
+
"acc_stderr,none": 0.03418931233833344
|
76 |
+
},
|
77 |
+
"harness|mmlu_high_school_world_history|0": {
|
78 |
+
"alias": " - high_school_world_history",
|
79 |
+
"acc,none": 0.6919831223628692,
|
80 |
+
"acc_stderr,none": 0.030052389335605695
|
81 |
+
},
|
82 |
+
"harness|mmlu_international_law|0": {
|
83 |
+
"alias": " - international_law",
|
84 |
+
"acc,none": 0.8016528925619835,
|
85 |
+
"acc_stderr,none": 0.03640118271990947
|
86 |
+
},
|
87 |
+
"harness|mmlu_jurisprudence|0": {
|
88 |
+
"alias": " - jurisprudence",
|
89 |
+
"acc,none": 0.7685185185185185,
|
90 |
+
"acc_stderr,none": 0.04077494709252626
|
91 |
+
},
|
92 |
+
"harness|mmlu_logical_fallacies|0": {
|
93 |
+
"alias": " - logical_fallacies",
|
94 |
+
"acc,none": 0.7361963190184049,
|
95 |
+
"acc_stderr,none": 0.03462419931615624
|
96 |
+
},
|
97 |
+
"harness|mmlu_moral_disputes|0": {
|
98 |
+
"alias": " - moral_disputes",
|
99 |
+
"acc,none": 0.7196531791907514,
|
100 |
+
"acc_stderr,none": 0.024182427496577605
|
101 |
+
},
|
102 |
+
"harness|mmlu_moral_scenarios|0": {
|
103 |
+
"alias": " - moral_scenarios",
|
104 |
+
"acc,none": 0.358659217877095,
|
105 |
+
"acc_stderr,none": 0.016040454426164478
|
106 |
+
},
|
107 |
+
"harness|mmlu_philosophy|0": {
|
108 |
+
"alias": " - philosophy",
|
109 |
+
"acc,none": 0.6784565916398714,
|
110 |
+
"acc_stderr,none": 0.026527724079528872
|
111 |
+
},
|
112 |
+
"harness|mmlu_prehistory|0": {
|
113 |
+
"alias": " - prehistory",
|
114 |
+
"acc,none": 0.7469135802469136,
|
115 |
+
"acc_stderr,none": 0.024191808600713002
|
116 |
+
},
|
117 |
+
"harness|mmlu_professional_law|0": {
|
118 |
+
"alias": " - professional_law",
|
119 |
+
"acc,none": 0.4810951760104302,
|
120 |
+
"acc_stderr,none": 0.012761104871472652
|
121 |
+
},
|
122 |
+
"harness|mmlu_world_religions|0": {
|
123 |
+
"alias": " - world_religions",
|
124 |
+
"acc,none": 0.8011695906432749,
|
125 |
+
"acc_stderr,none": 0.03061111655743253
|
126 |
+
},
|
127 |
+
"harness|mmlu_other|0": {
|
128 |
+
"alias": " - other",
|
129 |
+
"acc,none": 0.7064692629546186,
|
130 |
+
"acc_stderr,none": 0.007895000819957698
|
131 |
+
},
|
132 |
+
"harness|mmlu_business_ethics|0": {
|
133 |
+
"alias": " - business_ethics",
|
134 |
+
"acc,none": 0.65,
|
135 |
+
"acc_stderr,none": 0.0479372485441102
|
136 |
+
},
|
137 |
+
"harness|mmlu_clinical_knowledge|0": {
|
138 |
+
"alias": " - clinical_knowledge",
|
139 |
+
"acc,none": 0.6867924528301886,
|
140 |
+
"acc_stderr,none": 0.02854479331905533
|
141 |
+
},
|
142 |
+
"harness|mmlu_college_medicine|0": {
|
143 |
+
"alias": " - college_medicine",
|
144 |
+
"acc,none": 0.6473988439306358,
|
145 |
+
"acc_stderr,none": 0.036430371689585475
|
146 |
+
},
|
147 |
+
"harness|mmlu_global_facts|0": {
|
148 |
+
"alias": " - global_facts",
|
149 |
+
"acc,none": 0.35,
|
150 |
+
"acc_stderr,none": 0.04793724854411019
|
151 |
+
},
|
152 |
+
"harness|mmlu_human_aging|0": {
|
153 |
+
"alias": " - human_aging",
|
154 |
+
"acc,none": 0.7040358744394619,
|
155 |
+
"acc_stderr,none": 0.0306365913486998
|
156 |
+
},
|
157 |
+
"harness|mmlu_management|0": {
|
158 |
+
"alias": " - management",
|
159 |
+
"acc,none": 0.7864077669902912,
|
160 |
+
"acc_stderr,none": 0.040580420156460344
|
161 |
+
},
|
162 |
+
"harness|mmlu_marketing|0": {
|
163 |
+
"alias": " - marketing",
|
164 |
+
"acc,none": 0.8760683760683761,
|
165 |
+
"acc_stderr,none": 0.021586494001281382
|
166 |
+
},
|
167 |
+
"harness|mmlu_medical_genetics|0": {
|
168 |
+
"alias": " - medical_genetics",
|
169 |
+
"acc,none": 0.73,
|
170 |
+
"acc_stderr,none": 0.044619604333847394
|
171 |
+
},
|
172 |
+
"harness|mmlu_miscellaneous|0": {
|
173 |
+
"alias": " - miscellaneous",
|
174 |
+
"acc,none": 0.8199233716475096,
|
175 |
+
"acc_stderr,none": 0.013740797258579823
|
176 |
+
},
|
177 |
+
"harness|mmlu_nutrition|0": {
|
178 |
+
"alias": " - nutrition",
|
179 |
+
"acc,none": 0.7091503267973857,
|
180 |
+
"acc_stderr,none": 0.02600480036395213
|
181 |
+
},
|
182 |
+
"harness|mmlu_professional_accounting|0": {
|
183 |
+
"alias": " - professional_accounting",
|
184 |
+
"acc,none": 0.524822695035461,
|
185 |
+
"acc_stderr,none": 0.02979071924382972
|
186 |
+
},
|
187 |
+
"harness|mmlu_professional_medicine|0": {
|
188 |
+
"alias": " - professional_medicine",
|
189 |
+
"acc,none": 0.6911764705882353,
|
190 |
+
"acc_stderr,none": 0.028064998167040094
|
191 |
+
},
|
192 |
+
"harness|mmlu_virology|0": {
|
193 |
+
"alias": " - virology",
|
194 |
+
"acc,none": 0.5421686746987951,
|
195 |
+
"acc_stderr,none": 0.038786267710023595
|
196 |
+
},
|
197 |
+
"harness|mmlu_social_sciences|0": {
|
198 |
+
"alias": " - social_sciences",
|
199 |
+
"acc,none": 0.7299317517062074,
|
200 |
+
"acc_stderr,none": 0.007807463005859416
|
201 |
+
},
|
202 |
+
"harness|mmlu_econometrics|0": {
|
203 |
+
"alias": " - econometrics",
|
204 |
+
"acc,none": 0.5087719298245614,
|
205 |
+
"acc_stderr,none": 0.047028804320496165
|
206 |
+
},
|
207 |
+
"harness|mmlu_high_school_geography|0": {
|
208 |
+
"alias": " - high_school_geography",
|
209 |
+
"acc,none": 0.797979797979798,
|
210 |
+
"acc_stderr,none": 0.028606204289229893
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
213 |
+
"alias": " - high_school_government_and_politics",
|
214 |
+
"acc,none": 0.8652849740932642,
|
215 |
+
"acc_stderr,none": 0.024639789097709443
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
218 |
+
"alias": " - high_school_macroeconomics",
|
219 |
+
"acc,none": 0.6538461538461539,
|
220 |
+
"acc_stderr,none": 0.024121125416941187
|
221 |
+
},
|
222 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
223 |
+
"alias": " - high_school_microeconomics",
|
224 |
+
"acc,none": 0.7058823529411765,
|
225 |
+
"acc_stderr,none": 0.02959732973097809
|
226 |
+
},
|
227 |
+
"harness|mmlu_high_school_psychology|0": {
|
228 |
+
"alias": " - high_school_psychology",
|
229 |
+
"acc,none": 0.8422018348623853,
|
230 |
+
"acc_stderr,none": 0.015630022970092455
|
231 |
+
},
|
232 |
+
"harness|mmlu_human_sexuality|0": {
|
233 |
+
"alias": " - human_sexuality",
|
234 |
+
"acc,none": 0.7175572519083969,
|
235 |
+
"acc_stderr,none": 0.03948406125768361
|
236 |
+
},
|
237 |
+
"harness|mmlu_professional_psychology|0": {
|
238 |
+
"alias": " - professional_psychology",
|
239 |
+
"acc,none": 0.6339869281045751,
|
240 |
+
"acc_stderr,none": 0.019488025745529665
|
241 |
+
},
|
242 |
+
"harness|mmlu_public_relations|0": {
|
243 |
+
"alias": " - public_relations",
|
244 |
+
"acc,none": 0.6636363636363637,
|
245 |
+
"acc_stderr,none": 0.04525393596302505
|
246 |
+
},
|
247 |
+
"harness|mmlu_security_studies|0": {
|
248 |
+
"alias": " - security_studies",
|
249 |
+
"acc,none": 0.6775510204081633,
|
250 |
+
"acc_stderr,none": 0.029923100563683903
|
251 |
+
},
|
252 |
+
"harness|mmlu_sociology|0": {
|
253 |
+
"alias": " - sociology",
|
254 |
+
"acc,none": 0.845771144278607,
|
255 |
+
"acc_stderr,none": 0.025538433368578348
|
256 |
+
},
|
257 |
+
"harness|mmlu_us_foreign_policy|0": {
|
258 |
+
"alias": " - us_foreign_policy",
|
259 |
+
"acc,none": 0.9,
|
260 |
+
"acc_stderr,none": 0.03015113445777634
|
261 |
+
},
|
262 |
+
"harness|mmlu_stem|0": {
|
263 |
+
"alias": " - stem",
|
264 |
+
"acc,none": 0.5166508087535681,
|
265 |
+
"acc_stderr,none": 0.008558572662836687
|
266 |
+
},
|
267 |
+
"harness|mmlu_abstract_algebra|0": {
|
268 |
+
"alias": " - abstract_algebra",
|
269 |
+
"acc,none": 0.35,
|
270 |
+
"acc_stderr,none": 0.04793724854411021
|
271 |
+
},
|
272 |
+
"harness|mmlu_anatomy|0": {
|
273 |
+
"alias": " - anatomy",
|
274 |
+
"acc,none": 0.5703703703703704,
|
275 |
+
"acc_stderr,none": 0.04276349494376599
|
276 |
+
},
|
277 |
+
"harness|mmlu_astronomy|0": {
|
278 |
+
"alias": " - astronomy",
|
279 |
+
"acc,none": 0.7236842105263158,
|
280 |
+
"acc_stderr,none": 0.03639057569952929
|
281 |
+
},
|
282 |
+
"harness|mmlu_college_biology|0": {
|
283 |
+
"alias": " - college_biology",
|
284 |
+
"acc,none": 0.7222222222222222,
|
285 |
+
"acc_stderr,none": 0.037455547914624576
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_chemistry|0": {
|
288 |
+
"alias": " - college_chemistry",
|
289 |
+
"acc,none": 0.4,
|
290 |
+
"acc_stderr,none": 0.04923659639173309
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_computer_science|0": {
|
293 |
+
"alias": " - college_computer_science",
|
294 |
+
"acc,none": 0.48,
|
295 |
+
"acc_stderr,none": 0.050211673156867795
|
296 |
+
},
|
297 |
+
"harness|mmlu_college_mathematics|0": {
|
298 |
+
"alias": " - college_mathematics",
|
299 |
+
"acc,none": 0.34,
|
300 |
+
"acc_stderr,none": 0.04760952285695235
|
301 |
+
},
|
302 |
+
"harness|mmlu_college_physics|0": {
|
303 |
+
"alias": " - college_physics",
|
304 |
+
"acc,none": 0.4019607843137255,
|
305 |
+
"acc_stderr,none": 0.04878608714466996
|
306 |
+
},
|
307 |
+
"harness|mmlu_computer_security|0": {
|
308 |
+
"alias": " - computer_security",
|
309 |
+
"acc,none": 0.71,
|
310 |
+
"acc_stderr,none": 0.04560480215720684
|
311 |
+
},
|
312 |
+
"harness|mmlu_conceptual_physics|0": {
|
313 |
+
"alias": " - conceptual_physics",
|
314 |
+
"acc,none": 0.5659574468085107,
|
315 |
+
"acc_stderr,none": 0.03240038086792747
|
316 |
+
},
|
317 |
+
"harness|mmlu_electrical_engineering|0": {
|
318 |
+
"alias": " - electrical_engineering",
|
319 |
+
"acc,none": 0.5379310344827586,
|
320 |
+
"acc_stderr,none": 0.04154659671707548
|
321 |
+
},
|
322 |
+
"harness|mmlu_elementary_mathematics|0": {
|
323 |
+
"alias": " - elementary_mathematics",
|
324 |
+
"acc,none": 0.4312169312169312,
|
325 |
+
"acc_stderr,none": 0.0255064816981382
|
326 |
+
},
|
327 |
+
"harness|mmlu_high_school_biology|0": {
|
328 |
+
"alias": " - high_school_biology",
|
329 |
+
"acc,none": 0.7709677419354839,
|
330 |
+
"acc_stderr,none": 0.023904914311782644
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_chemistry|0": {
|
333 |
+
"alias": " - high_school_chemistry",
|
334 |
+
"acc,none": 0.4433497536945813,
|
335 |
+
"acc_stderr,none": 0.03495334582162933
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_computer_science|0": {
|
338 |
+
"alias": " - high_school_computer_science",
|
339 |
+
"acc,none": 0.58,
|
340 |
+
"acc_stderr,none": 0.049604496374885836
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_mathematics|0": {
|
343 |
+
"alias": " - high_school_mathematics",
|
344 |
+
"acc,none": 0.3592592592592593,
|
345 |
+
"acc_stderr,none": 0.029252905927251972
|
346 |
+
},
|
347 |
+
"harness|mmlu_high_school_physics|0": {
|
348 |
+
"alias": " - high_school_physics",
|
349 |
+
"acc,none": 0.31125827814569534,
|
350 |
+
"acc_stderr,none": 0.03780445850526733
|
351 |
+
},
|
352 |
+
"harness|mmlu_high_school_statistics|0": {
|
353 |
+
"alias": " - high_school_statistics",
|
354 |
+
"acc,none": 0.49537037037037035,
|
355 |
+
"acc_stderr,none": 0.03409825519163572
|
356 |
+
},
|
357 |
+
"harness|mmlu_machine_learning|0": {
|
358 |
+
"alias": " - machine_learning",
|
359 |
+
"acc,none": 0.5089285714285714,
|
360 |
+
"acc_stderr,none": 0.04745033255489123
|
361 |
+
},
|
362 |
+
"harness|boolq|0": {
|
363 |
+
"acc,none": 0.882874617737003,
|
364 |
+
"acc_stderr,none": 0.005624288190378989,
|
365 |
+
"alias": "boolq"
|
366 |
+
},
|
367 |
+
"harness|hellaswag|0": {
|
368 |
+
"acc,none": 0.6772555267874926,
|
369 |
+
"acc_stderr,none": 0.004665704208339031,
|
370 |
+
"acc_norm,none": 0.8599880501892053,
|
371 |
+
"acc_norm_stderr,none": 0.0034629026011362076,
|
372 |
+
"alias": "hellaswag"
|
373 |
+
},
|
374 |
+
"harness|arc:easy|0": {
|
375 |
+
"acc,none": 0.8337542087542088,
|
376 |
+
"acc_stderr,none": 0.00763945790688671,
|
377 |
+
"acc_norm,none": 0.8173400673400674,
|
378 |
+
"acc_norm_stderr,none": 0.00792850371920913,
|
379 |
+
"alias": "arc_easy"
|
380 |
+
},
|
381 |
+
"harness|truthfulqa:mc2|0": {
|
382 |
+
"acc,none": 0.7113200322582366,
|
383 |
+
"acc_stderr,none": 0.014928830439186688,
|
384 |
+
"alias": "truthfulqa_mc2"
|
385 |
+
},
|
386 |
+
"harness|arc:challenge|0": {
|
387 |
+
"acc,none": 0.6040955631399317,
|
388 |
+
"acc_stderr,none": 0.01429122839353659,
|
389 |
+
"acc_norm,none": 0.6245733788395904,
|
390 |
+
"acc_norm_stderr,none": 0.014150631435111728,
|
391 |
+
"alias": "arc_challenge"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/SOLAR-10.7B-Instruct-v1.0-GGUF",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 42.8,
|
399 |
+
"architectures": "?",
|
400 |
+
"quant_type": "llama.cpp",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 85.6,
|
403 |
+
"model_size": 42.8,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-01T16:10:42Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "llama_cpp"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "llama.cpp",
|
417 |
+
"ftype": "*Q4_0.gguf"
|
418 |
+
},
|
419 |
+
"versions": {
|
420 |
+
"harness|piqa|0": 1.0,
|
421 |
+
"harness|winogrande|0": 1.0,
|
422 |
+
"harness|lambada:openai|0": 1.0,
|
423 |
+
"harness|openbookqa|0": 1.0,
|
424 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
425 |
+
"harness|mmlu|0": null,
|
426 |
+
"harness|mmlu_humanities|0": null,
|
427 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
428 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
429 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
430 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
431 |
+
"harness|mmlu_international_law|0": 0.0,
|
432 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
433 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
434 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
435 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
436 |
+
"harness|mmlu_philosophy|0": 0.0,
|
437 |
+
"harness|mmlu_prehistory|0": 0.0,
|
438 |
+
"harness|mmlu_professional_law|0": 0.0,
|
439 |
+
"harness|mmlu_world_religions|0": 0.0,
|
440 |
+
"harness|mmlu_other|0": null,
|
441 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
442 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
443 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
444 |
+
"harness|mmlu_global_facts|0": 0.0,
|
445 |
+
"harness|mmlu_human_aging|0": 0.0,
|
446 |
+
"harness|mmlu_management|0": 0.0,
|
447 |
+
"harness|mmlu_marketing|0": 0.0,
|
448 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
449 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
450 |
+
"harness|mmlu_nutrition|0": 0.0,
|
451 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
452 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
453 |
+
"harness|mmlu_virology|0": 0.0,
|
454 |
+
"harness|mmlu_social_sciences|0": null,
|
455 |
+
"harness|mmlu_econometrics|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
461 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
462 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
463 |
+
"harness|mmlu_public_relations|0": 0.0,
|
464 |
+
"harness|mmlu_security_studies|0": 0.0,
|
465 |
+
"harness|mmlu_sociology|0": 0.0,
|
466 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
467 |
+
"harness|mmlu_stem|0": null,
|
468 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
469 |
+
"harness|mmlu_anatomy|0": 0.0,
|
470 |
+
"harness|mmlu_astronomy|0": 0.0,
|
471 |
+
"harness|mmlu_college_biology|0": 0.0,
|
472 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
473 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
474 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
475 |
+
"harness|mmlu_college_physics|0": 0.0,
|
476 |
+
"harness|mmlu_computer_security|0": 0.0,
|
477 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
478 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
479 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
486 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
487 |
+
"harness|boolq|0": 2.0,
|
488 |
+
"harness|hellaswag|0": 1.0,
|
489 |
+
"harness|arc:easy|0": 1.0,
|
490 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
491 |
+
"harness|arc:challenge|0": 1.0
|
492 |
+
},
|
493 |
+
"n-shot": {
|
494 |
+
"arc_challenge": 0,
|
495 |
+
"arc_easy": 0,
|
496 |
+
"boolq": 0,
|
497 |
+
"hellaswag": 0,
|
498 |
+
"lambada_openai": 0,
|
499 |
+
"mmlu": 0,
|
500 |
+
"mmlu_abstract_algebra": 0,
|
501 |
+
"mmlu_anatomy": 0,
|
502 |
+
"mmlu_astronomy": 0,
|
503 |
+
"mmlu_business_ethics": 0,
|
504 |
+
"mmlu_clinical_knowledge": 0,
|
505 |
+
"mmlu_college_biology": 0,
|
506 |
+
"mmlu_college_chemistry": 0,
|
507 |
+
"mmlu_college_computer_science": 0,
|
508 |
+
"mmlu_college_mathematics": 0,
|
509 |
+
"mmlu_college_medicine": 0,
|
510 |
+
"mmlu_college_physics": 0,
|
511 |
+
"mmlu_computer_security": 0,
|
512 |
+
"mmlu_conceptual_physics": 0,
|
513 |
+
"mmlu_econometrics": 0,
|
514 |
+
"mmlu_electrical_engineering": 0,
|
515 |
+
"mmlu_elementary_mathematics": 0,
|
516 |
+
"mmlu_formal_logic": 0,
|
517 |
+
"mmlu_global_facts": 0,
|
518 |
+
"mmlu_high_school_biology": 0,
|
519 |
+
"mmlu_high_school_chemistry": 0,
|
520 |
+
"mmlu_high_school_computer_science": 0,
|
521 |
+
"mmlu_high_school_european_history": 0,
|
522 |
+
"mmlu_high_school_geography": 0,
|
523 |
+
"mmlu_high_school_government_and_politics": 0,
|
524 |
+
"mmlu_high_school_macroeconomics": 0,
|
525 |
+
"mmlu_high_school_mathematics": 0,
|
526 |
+
"mmlu_high_school_microeconomics": 0,
|
527 |
+
"mmlu_high_school_physics": 0,
|
528 |
+
"mmlu_high_school_psychology": 0,
|
529 |
+
"mmlu_high_school_statistics": 0,
|
530 |
+
"mmlu_high_school_us_history": 0,
|
531 |
+
"mmlu_high_school_world_history": 0,
|
532 |
+
"mmlu_human_aging": 0,
|
533 |
+
"mmlu_human_sexuality": 0,
|
534 |
+
"mmlu_humanities": 0,
|
535 |
+
"mmlu_international_law": 0,
|
536 |
+
"mmlu_jurisprudence": 0,
|
537 |
+
"mmlu_logical_fallacies": 0,
|
538 |
+
"mmlu_machine_learning": 0,
|
539 |
+
"mmlu_management": 0,
|
540 |
+
"mmlu_marketing": 0,
|
541 |
+
"mmlu_medical_genetics": 0,
|
542 |
+
"mmlu_miscellaneous": 0,
|
543 |
+
"mmlu_moral_disputes": 0,
|
544 |
+
"mmlu_moral_scenarios": 0,
|
545 |
+
"mmlu_nutrition": 0,
|
546 |
+
"mmlu_other": 0,
|
547 |
+
"mmlu_philosophy": 0,
|
548 |
+
"mmlu_prehistory": 0,
|
549 |
+
"mmlu_professional_accounting": 0,
|
550 |
+
"mmlu_professional_law": 0,
|
551 |
+
"mmlu_professional_medicine": 0,
|
552 |
+
"mmlu_professional_psychology": 0,
|
553 |
+
"mmlu_public_relations": 0,
|
554 |
+
"mmlu_security_studies": 0,
|
555 |
+
"mmlu_social_sciences": 0,
|
556 |
+
"mmlu_sociology": 0,
|
557 |
+
"mmlu_stem": 0,
|
558 |
+
"mmlu_us_foreign_policy": 0,
|
559 |
+
"mmlu_virology": 0,
|
560 |
+
"mmlu_world_religions": 0,
|
561 |
+
"openbookqa": 0,
|
562 |
+
"piqa": 0,
|
563 |
+
"truthfulqa_mc1": 0,
|
564 |
+
"truthfulqa_mc2": 0,
|
565 |
+
"winogrande": 0
|
566 |
+
},
|
567 |
+
"date": 1714640508.2427852,
|
568 |
+
"config": {
|
569 |
+
"model": "WrapperGGUFLM",
|
570 |
+
"model_args": "gguf_model=TheBloke/SOLAR-10.7B-Instruct-v1.0-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main",
|
571 |
+
"batch_size": 1,
|
572 |
+
"batch_sizes": [],
|
573 |
+
"device": "cuda",
|
574 |
+
"use_cache": null,
|
575 |
+
"limit": null,
|
576 |
+
"bootstrap_iters": 100000,
|
577 |
+
"gen_kwargs": null
|
578 |
+
}
|
579 |
+
}
|
TheBloke/results_2024-05-07-15-11-38.json
ADDED
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-07-15-11-38",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/SOLAR-10.7B-Instruct-v1.0-GPTQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.98,
|
16 |
+
"model_params": 10.57,
|
17 |
+
"quant_type": "GPTQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|piqa|0": {
|
22 |
+
"acc,none": 0.8079434167573449,
|
23 |
+
"acc_stderr,none": 0.009190740295126482,
|
24 |
+
"acc_norm,none": 0.8063112078346029,
|
25 |
+
"acc_norm_stderr,none": 0.009220384152336643,
|
26 |
+
"alias": "piqa"
|
27 |
+
},
|
28 |
+
"harness|mmlu|0": {
|
29 |
+
"acc,none": 0.6274747187010398,
|
30 |
+
"acc_stderr,none": 0.003850177736503787,
|
31 |
+
"alias": "mmlu"
|
32 |
+
},
|
33 |
+
"harness|mmlu_humanities|0": {
|
34 |
+
"alias": " - humanities",
|
35 |
+
"acc,none": 0.5804463336875664,
|
36 |
+
"acc_stderr,none": 0.006792090451321374
|
37 |
+
},
|
38 |
+
"harness|mmlu_formal_logic|0": {
|
39 |
+
"alias": " - formal_logic",
|
40 |
+
"acc,none": 0.42063492063492064,
|
41 |
+
"acc_stderr,none": 0.04415438226743744
|
42 |
+
},
|
43 |
+
"harness|mmlu_high_school_european_history|0": {
|
44 |
+
"alias": " - high_school_european_history",
|
45 |
+
"acc,none": 0.7818181818181819,
|
46 |
+
"acc_stderr,none": 0.03225078108306289
|
47 |
+
},
|
48 |
+
"harness|mmlu_high_school_us_history|0": {
|
49 |
+
"alias": " - high_school_us_history",
|
50 |
+
"acc,none": 0.8137254901960784,
|
51 |
+
"acc_stderr,none": 0.027325470966716323
|
52 |
+
},
|
53 |
+
"harness|mmlu_high_school_world_history|0": {
|
54 |
+
"alias": " - high_school_world_history",
|
55 |
+
"acc,none": 0.8059071729957806,
|
56 |
+
"acc_stderr,none": 0.02574490253229091
|
57 |
+
},
|
58 |
+
"harness|mmlu_international_law|0": {
|
59 |
+
"alias": " - international_law",
|
60 |
+
"acc,none": 0.768595041322314,
|
61 |
+
"acc_stderr,none": 0.038498560987940904
|
62 |
+
},
|
63 |
+
"harness|mmlu_jurisprudence|0": {
|
64 |
+
"alias": " - jurisprudence",
|
65 |
+
"acc,none": 0.7777777777777778,
|
66 |
+
"acc_stderr,none": 0.040191074725573483
|
67 |
+
},
|
68 |
+
"harness|mmlu_logical_fallacies|0": {
|
69 |
+
"alias": " - logical_fallacies",
|
70 |
+
"acc,none": 0.7116564417177914,
|
71 |
+
"acc_stderr,none": 0.035590395316173425
|
72 |
+
},
|
73 |
+
"harness|mmlu_moral_disputes|0": {
|
74 |
+
"alias": " - moral_disputes",
|
75 |
+
"acc,none": 0.7138728323699421,
|
76 |
+
"acc_stderr,none": 0.02433214677913412
|
77 |
+
},
|
78 |
+
"harness|mmlu_moral_scenarios|0": {
|
79 |
+
"alias": " - moral_scenarios",
|
80 |
+
"acc,none": 0.3452513966480447,
|
81 |
+
"acc_stderr,none": 0.015901432608930358
|
82 |
+
},
|
83 |
+
"harness|mmlu_philosophy|0": {
|
84 |
+
"alias": " - philosophy",
|
85 |
+
"acc,none": 0.6720257234726688,
|
86 |
+
"acc_stderr,none": 0.02666441088693761
|
87 |
+
},
|
88 |
+
"harness|mmlu_prehistory|0": {
|
89 |
+
"alias": " - prehistory",
|
90 |
+
"acc,none": 0.7438271604938271,
|
91 |
+
"acc_stderr,none": 0.024288533637726095
|
92 |
+
},
|
93 |
+
"harness|mmlu_professional_law|0": {
|
94 |
+
"alias": " - professional_law",
|
95 |
+
"acc,none": 0.4973924380704042,
|
96 |
+
"acc_stderr,none": 0.012770062445433172
|
97 |
+
},
|
98 |
+
"harness|mmlu_world_religions|0": {
|
99 |
+
"alias": " - world_religions",
|
100 |
+
"acc,none": 0.7602339181286549,
|
101 |
+
"acc_stderr,none": 0.03274485211946956
|
102 |
+
},
|
103 |
+
"harness|mmlu_other|0": {
|
104 |
+
"alias": " - other",
|
105 |
+
"acc,none": 0.7122626327647248,
|
106 |
+
"acc_stderr,none": 0.00782546582386488
|
107 |
+
},
|
108 |
+
"harness|mmlu_business_ethics|0": {
|
109 |
+
"alias": " - business_ethics",
|
110 |
+
"acc,none": 0.63,
|
111 |
+
"acc_stderr,none": 0.048523658709390974
|
112 |
+
},
|
113 |
+
"harness|mmlu_clinical_knowledge|0": {
|
114 |
+
"alias": " - clinical_knowledge",
|
115 |
+
"acc,none": 0.7094339622641509,
|
116 |
+
"acc_stderr,none": 0.02794321998933713
|
117 |
+
},
|
118 |
+
"harness|mmlu_college_medicine|0": {
|
119 |
+
"alias": " - college_medicine",
|
120 |
+
"acc,none": 0.6589595375722543,
|
121 |
+
"acc_stderr,none": 0.036146654241808254
|
122 |
+
},
|
123 |
+
"harness|mmlu_global_facts|0": {
|
124 |
+
"alias": " - global_facts",
|
125 |
+
"acc,none": 0.38,
|
126 |
+
"acc_stderr,none": 0.04878317312145633
|
127 |
+
},
|
128 |
+
"harness|mmlu_human_aging|0": {
|
129 |
+
"alias": " - human_aging",
|
130 |
+
"acc,none": 0.6860986547085202,
|
131 |
+
"acc_stderr,none": 0.031146796482972465
|
132 |
+
},
|
133 |
+
"harness|mmlu_management|0": {
|
134 |
+
"alias": " - management",
|
135 |
+
"acc,none": 0.8058252427184466,
|
136 |
+
"acc_stderr,none": 0.039166677628225836
|
137 |
+
},
|
138 |
+
"harness|mmlu_marketing|0": {
|
139 |
+
"alias": " - marketing",
|
140 |
+
"acc,none": 0.8846153846153846,
|
141 |
+
"acc_stderr,none": 0.020930193185179333
|
142 |
+
},
|
143 |
+
"harness|mmlu_medical_genetics|0": {
|
144 |
+
"alias": " - medical_genetics",
|
145 |
+
"acc,none": 0.74,
|
146 |
+
"acc_stderr,none": 0.04408440022768078
|
147 |
+
},
|
148 |
+
"harness|mmlu_miscellaneous|0": {
|
149 |
+
"alias": " - miscellaneous",
|
150 |
+
"acc,none": 0.8250319284802043,
|
151 |
+
"acc_stderr,none": 0.01358661921990334
|
152 |
+
},
|
153 |
+
"harness|mmlu_nutrition|0": {
|
154 |
+
"alias": " - nutrition",
|
155 |
+
"acc,none": 0.696078431372549,
|
156 |
+
"acc_stderr,none": 0.026336613469046647
|
157 |
+
},
|
158 |
+
"harness|mmlu_professional_accounting|0": {
|
159 |
+
"alias": " - professional_accounting",
|
160 |
+
"acc,none": 0.5035460992907801,
|
161 |
+
"acc_stderr,none": 0.02982674915328092
|
162 |
+
},
|
163 |
+
"harness|mmlu_professional_medicine|0": {
|
164 |
+
"alias": " - professional_medicine",
|
165 |
+
"acc,none": 0.7463235294117647,
|
166 |
+
"acc_stderr,none": 0.02643132987078952
|
167 |
+
},
|
168 |
+
"harness|mmlu_virology|0": {
|
169 |
+
"alias": " - virology",
|
170 |
+
"acc,none": 0.536144578313253,
|
171 |
+
"acc_stderr,none": 0.03882310850890593
|
172 |
+
},
|
173 |
+
"harness|mmlu_social_sciences|0": {
|
174 |
+
"alias": " - social_sciences",
|
175 |
+
"acc,none": 0.7237569060773481,
|
176 |
+
"acc_stderr,none": 0.007880951300681182
|
177 |
+
},
|
178 |
+
"harness|mmlu_econometrics|0": {
|
179 |
+
"alias": " - econometrics",
|
180 |
+
"acc,none": 0.49122807017543857,
|
181 |
+
"acc_stderr,none": 0.04702880432049615
|
182 |
+
},
|
183 |
+
"harness|mmlu_high_school_geography|0": {
|
184 |
+
"alias": " - high_school_geography",
|
185 |
+
"acc,none": 0.803030303030303,
|
186 |
+
"acc_stderr,none": 0.02833560973246336
|
187 |
+
},
|
188 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
189 |
+
"alias": " - high_school_government_and_politics",
|
190 |
+
"acc,none": 0.8756476683937824,
|
191 |
+
"acc_stderr,none": 0.02381447708659355
|
192 |
+
},
|
193 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
194 |
+
"alias": " - high_school_macroeconomics",
|
195 |
+
"acc,none": 0.6282051282051282,
|
196 |
+
"acc_stderr,none": 0.024503472557110932
|
197 |
+
},
|
198 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
199 |
+
"alias": " - high_school_microeconomics",
|
200 |
+
"acc,none": 0.6680672268907563,
|
201 |
+
"acc_stderr,none": 0.030588697013783642
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_psychology|0": {
|
204 |
+
"alias": " - high_school_psychology",
|
205 |
+
"acc,none": 0.8201834862385321,
|
206 |
+
"acc_stderr,none": 0.016465345467391545
|
207 |
+
},
|
208 |
+
"harness|mmlu_human_sexuality|0": {
|
209 |
+
"alias": " - human_sexuality",
|
210 |
+
"acc,none": 0.7175572519083969,
|
211 |
+
"acc_stderr,none": 0.03948406125768361
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_psychology|0": {
|
214 |
+
"alias": " - professional_psychology",
|
215 |
+
"acc,none": 0.6535947712418301,
|
216 |
+
"acc_stderr,none": 0.019249785691717203
|
217 |
+
},
|
218 |
+
"harness|mmlu_public_relations|0": {
|
219 |
+
"alias": " - public_relations",
|
220 |
+
"acc,none": 0.6454545454545455,
|
221 |
+
"acc_stderr,none": 0.04582004841505417
|
222 |
+
},
|
223 |
+
"harness|mmlu_security_studies|0": {
|
224 |
+
"alias": " - security_studies",
|
225 |
+
"acc,none": 0.7061224489795919,
|
226 |
+
"acc_stderr,none": 0.029162738410249762
|
227 |
+
},
|
228 |
+
"harness|mmlu_sociology|0": {
|
229 |
+
"alias": " - sociology",
|
230 |
+
"acc,none": 0.8308457711442786,
|
231 |
+
"acc_stderr,none": 0.026508590656233257
|
232 |
+
},
|
233 |
+
"harness|mmlu_us_foreign_policy|0": {
|
234 |
+
"alias": " - us_foreign_policy",
|
235 |
+
"acc,none": 0.87,
|
236 |
+
"acc_stderr,none": 0.03379976689896309
|
237 |
+
},
|
238 |
+
"harness|mmlu_stem|0": {
|
239 |
+
"alias": " - stem",
|
240 |
+
"acc,none": 0.520139549635268,
|
241 |
+
"acc_stderr,none": 0.008524961863333031
|
242 |
+
},
|
243 |
+
"harness|mmlu_abstract_algebra|0": {
|
244 |
+
"alias": " - abstract_algebra",
|
245 |
+
"acc,none": 0.34,
|
246 |
+
"acc_stderr,none": 0.04760952285695235
|
247 |
+
},
|
248 |
+
"harness|mmlu_anatomy|0": {
|
249 |
+
"alias": " - anatomy",
|
250 |
+
"acc,none": 0.6148148148148148,
|
251 |
+
"acc_stderr,none": 0.042039210401562783
|
252 |
+
},
|
253 |
+
"harness|mmlu_astronomy|0": {
|
254 |
+
"alias": " - astronomy",
|
255 |
+
"acc,none": 0.7105263157894737,
|
256 |
+
"acc_stderr,none": 0.036906779861372814
|
257 |
+
},
|
258 |
+
"harness|mmlu_college_biology|0": {
|
259 |
+
"alias": " - college_biology",
|
260 |
+
"acc,none": 0.7430555555555556,
|
261 |
+
"acc_stderr,none": 0.03653946969442099
|
262 |
+
},
|
263 |
+
"harness|mmlu_college_chemistry|0": {
|
264 |
+
"alias": " - college_chemistry",
|
265 |
+
"acc,none": 0.41,
|
266 |
+
"acc_stderr,none": 0.049431107042371025
|
267 |
+
},
|
268 |
+
"harness|mmlu_college_computer_science|0": {
|
269 |
+
"alias": " - college_computer_science",
|
270 |
+
"acc,none": 0.49,
|
271 |
+
"acc_stderr,none": 0.05024183937956911
|
272 |
+
},
|
273 |
+
"harness|mmlu_college_mathematics|0": {
|
274 |
+
"alias": " - college_mathematics",
|
275 |
+
"acc,none": 0.33,
|
276 |
+
"acc_stderr,none": 0.047258156262526045
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_physics|0": {
|
279 |
+
"alias": " - college_physics",
|
280 |
+
"acc,none": 0.35294117647058826,
|
281 |
+
"acc_stderr,none": 0.04755129616062946
|
282 |
+
},
|
283 |
+
"harness|mmlu_computer_security|0": {
|
284 |
+
"alias": " - computer_security",
|
285 |
+
"acc,none": 0.68,
|
286 |
+
"acc_stderr,none": 0.046882617226215034
|
287 |
+
},
|
288 |
+
"harness|mmlu_conceptual_physics|0": {
|
289 |
+
"alias": " - conceptual_physics",
|
290 |
+
"acc,none": 0.5872340425531914,
|
291 |
+
"acc_stderr,none": 0.03218471141400351
|
292 |
+
},
|
293 |
+
"harness|mmlu_electrical_engineering|0": {
|
294 |
+
"alias": " - electrical_engineering",
|
295 |
+
"acc,none": 0.5379310344827586,
|
296 |
+
"acc_stderr,none": 0.04154659671707548
|
297 |
+
},
|
298 |
+
"harness|mmlu_elementary_mathematics|0": {
|
299 |
+
"alias": " - elementary_mathematics",
|
300 |
+
"acc,none": 0.41005291005291006,
|
301 |
+
"acc_stderr,none": 0.025331202438944437
|
302 |
+
},
|
303 |
+
"harness|mmlu_high_school_biology|0": {
|
304 |
+
"alias": " - high_school_biology",
|
305 |
+
"acc,none": 0.7677419354838709,
|
306 |
+
"acc_stderr,none": 0.02402225613030824
|
307 |
+
},
|
308 |
+
"harness|mmlu_high_school_chemistry|0": {
|
309 |
+
"alias": " - high_school_chemistry",
|
310 |
+
"acc,none": 0.4630541871921182,
|
311 |
+
"acc_stderr,none": 0.035083705204426656
|
312 |
+
},
|
313 |
+
"harness|mmlu_high_school_computer_science|0": {
|
314 |
+
"alias": " - high_school_computer_science",
|
315 |
+
"acc,none": 0.65,
|
316 |
+
"acc_stderr,none": 0.047937248544110196
|
317 |
+
},
|
318 |
+
"harness|mmlu_high_school_mathematics|0": {
|
319 |
+
"alias": " - high_school_mathematics",
|
320 |
+
"acc,none": 0.3333333333333333,
|
321 |
+
"acc_stderr,none": 0.028742040903948482
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_physics|0": {
|
324 |
+
"alias": " - high_school_physics",
|
325 |
+
"acc,none": 0.3443708609271523,
|
326 |
+
"acc_stderr,none": 0.03879687024073327
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_statistics|0": {
|
329 |
+
"alias": " - high_school_statistics",
|
330 |
+
"acc,none": 0.5416666666666666,
|
331 |
+
"acc_stderr,none": 0.03398110890294636
|
332 |
+
},
|
333 |
+
"harness|mmlu_machine_learning|0": {
|
334 |
+
"alias": " - machine_learning",
|
335 |
+
"acc,none": 0.48214285714285715,
|
336 |
+
"acc_stderr,none": 0.047427623612430116
|
337 |
+
},
|
338 |
+
"harness|lambada:openai|0": {
|
339 |
+
"perplexity,none": 3.1608315965568927,
|
340 |
+
"perplexity_stderr,none": 0.07289151576789415,
|
341 |
+
"acc,none": 0.7294779739957307,
|
342 |
+
"acc_stderr,none": 0.006188985712381461,
|
343 |
+
"alias": "lambada_openai"
|
344 |
+
},
|
345 |
+
"harness|truthfulqa:mc2|0": {
|
346 |
+
"acc,none": 0.7074062163257564,
|
347 |
+
"acc_stderr,none": 0.015118268786707145,
|
348 |
+
"alias": "truthfulqa_mc2"
|
349 |
+
},
|
350 |
+
"harness|openbookqa|0": {
|
351 |
+
"acc,none": 0.352,
|
352 |
+
"acc_stderr,none": 0.021380042385946044,
|
353 |
+
"acc_norm,none": 0.478,
|
354 |
+
"acc_norm_stderr,none": 0.022361396739207867,
|
355 |
+
"alias": "openbookqa"
|
356 |
+
},
|
357 |
+
"harness|boolq|0": {
|
358 |
+
"acc,none": 0.8788990825688073,
|
359 |
+
"acc_stderr,none": 0.005706052483368355,
|
360 |
+
"alias": "boolq"
|
361 |
+
},
|
362 |
+
"harness|truthfulqa:mc1|0": {
|
363 |
+
"acc,none": 0.5618115055079559,
|
364 |
+
"acc_stderr,none": 0.01736923616440442,
|
365 |
+
"alias": "truthfulqa_mc1"
|
366 |
+
},
|
367 |
+
"harness|hellaswag|0": {
|
368 |
+
"acc,none": 0.676458872734515,
|
369 |
+
"acc_stderr,none": 0.004668710689192406,
|
370 |
+
"acc_norm,none": 0.858195578570006,
|
371 |
+
"acc_norm_stderr,none": 0.003481364840771094,
|
372 |
+
"alias": "hellaswag"
|
373 |
+
},
|
374 |
+
"harness|arc:challenge|0": {
|
375 |
+
"acc,none": 0.6092150170648464,
|
376 |
+
"acc_stderr,none": 0.014258563880513778,
|
377 |
+
"acc_norm,none": 0.6322525597269625,
|
378 |
+
"acc_norm_stderr,none": 0.014090995618168478,
|
379 |
+
"alias": "arc_challenge"
|
380 |
+
},
|
381 |
+
"harness|arc:easy|0": {
|
382 |
+
"acc,none": 0.8333333333333334,
|
383 |
+
"acc_stderr,none": 0.007647191129018639,
|
384 |
+
"acc_norm,none": 0.8080808080808081,
|
385 |
+
"acc_norm_stderr,none": 0.00808080808080797,
|
386 |
+
"alias": "arc_easy"
|
387 |
+
},
|
388 |
+
"harness|winogrande|0": {
|
389 |
+
"acc,none": 0.7537490134175217,
|
390 |
+
"acc_stderr,none": 0.012108365307437509,
|
391 |
+
"alias": "winogrande"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/SOLAR-10.7B-Instruct-v1.0-GPTQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 6.668,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "GPTQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 13.336,
|
403 |
+
"model_size": 6.668,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-07T04:22:25Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"group_size": 128,
|
418 |
+
"damp_percent": 0.1,
|
419 |
+
"desc_act": true,
|
420 |
+
"sym": true,
|
421 |
+
"true_sequential": true,
|
422 |
+
"model_name_or_path": null,
|
423 |
+
"model_file_base_name": "model",
|
424 |
+
"quant_method": "gptq"
|
425 |
+
},
|
426 |
+
"versions": {
|
427 |
+
"harness|piqa|0": 1.0,
|
428 |
+
"harness|mmlu|0": null,
|
429 |
+
"harness|mmlu_humanities|0": null,
|
430 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
432 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
433 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
434 |
+
"harness|mmlu_international_law|0": 0.0,
|
435 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
436 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
437 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
438 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
439 |
+
"harness|mmlu_philosophy|0": 0.0,
|
440 |
+
"harness|mmlu_prehistory|0": 0.0,
|
441 |
+
"harness|mmlu_professional_law|0": 0.0,
|
442 |
+
"harness|mmlu_world_religions|0": 0.0,
|
443 |
+
"harness|mmlu_other|0": null,
|
444 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
445 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
446 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
447 |
+
"harness|mmlu_global_facts|0": 0.0,
|
448 |
+
"harness|mmlu_human_aging|0": 0.0,
|
449 |
+
"harness|mmlu_management|0": 0.0,
|
450 |
+
"harness|mmlu_marketing|0": 0.0,
|
451 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
452 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
453 |
+
"harness|mmlu_nutrition|0": 0.0,
|
454 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
455 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
456 |
+
"harness|mmlu_virology|0": 0.0,
|
457 |
+
"harness|mmlu_social_sciences|0": null,
|
458 |
+
"harness|mmlu_econometrics|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
464 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
465 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
466 |
+
"harness|mmlu_public_relations|0": 0.0,
|
467 |
+
"harness|mmlu_security_studies|0": 0.0,
|
468 |
+
"harness|mmlu_sociology|0": 0.0,
|
469 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
470 |
+
"harness|mmlu_stem|0": null,
|
471 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
472 |
+
"harness|mmlu_anatomy|0": 0.0,
|
473 |
+
"harness|mmlu_astronomy|0": 0.0,
|
474 |
+
"harness|mmlu_college_biology|0": 0.0,
|
475 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
476 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
477 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
478 |
+
"harness|mmlu_college_physics|0": 0.0,
|
479 |
+
"harness|mmlu_computer_security|0": 0.0,
|
480 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
481 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
482 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
489 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
490 |
+
"harness|lambada:openai|0": 1.0,
|
491 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
492 |
+
"harness|openbookqa|0": 1.0,
|
493 |
+
"harness|boolq|0": 2.0,
|
494 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
495 |
+
"harness|hellaswag|0": 1.0,
|
496 |
+
"harness|arc:challenge|0": 1.0,
|
497 |
+
"harness|arc:easy|0": 1.0,
|
498 |
+
"harness|winogrande|0": 1.0
|
499 |
+
},
|
500 |
+
"n-shot": {
|
501 |
+
"arc_challenge": 0,
|
502 |
+
"arc_easy": 0,
|
503 |
+
"boolq": 0,
|
504 |
+
"hellaswag": 0,
|
505 |
+
"lambada_openai": 0,
|
506 |
+
"mmlu": 0,
|
507 |
+
"mmlu_abstract_algebra": 0,
|
508 |
+
"mmlu_anatomy": 0,
|
509 |
+
"mmlu_astronomy": 0,
|
510 |
+
"mmlu_business_ethics": 0,
|
511 |
+
"mmlu_clinical_knowledge": 0,
|
512 |
+
"mmlu_college_biology": 0,
|
513 |
+
"mmlu_college_chemistry": 0,
|
514 |
+
"mmlu_college_computer_science": 0,
|
515 |
+
"mmlu_college_mathematics": 0,
|
516 |
+
"mmlu_college_medicine": 0,
|
517 |
+
"mmlu_college_physics": 0,
|
518 |
+
"mmlu_computer_security": 0,
|
519 |
+
"mmlu_conceptual_physics": 0,
|
520 |
+
"mmlu_econometrics": 0,
|
521 |
+
"mmlu_electrical_engineering": 0,
|
522 |
+
"mmlu_elementary_mathematics": 0,
|
523 |
+
"mmlu_formal_logic": 0,
|
524 |
+
"mmlu_global_facts": 0,
|
525 |
+
"mmlu_high_school_biology": 0,
|
526 |
+
"mmlu_high_school_chemistry": 0,
|
527 |
+
"mmlu_high_school_computer_science": 0,
|
528 |
+
"mmlu_high_school_european_history": 0,
|
529 |
+
"mmlu_high_school_geography": 0,
|
530 |
+
"mmlu_high_school_government_and_politics": 0,
|
531 |
+
"mmlu_high_school_macroeconomics": 0,
|
532 |
+
"mmlu_high_school_mathematics": 0,
|
533 |
+
"mmlu_high_school_microeconomics": 0,
|
534 |
+
"mmlu_high_school_physics": 0,
|
535 |
+
"mmlu_high_school_psychology": 0,
|
536 |
+
"mmlu_high_school_statistics": 0,
|
537 |
+
"mmlu_high_school_us_history": 0,
|
538 |
+
"mmlu_high_school_world_history": 0,
|
539 |
+
"mmlu_human_aging": 0,
|
540 |
+
"mmlu_human_sexuality": 0,
|
541 |
+
"mmlu_humanities": 0,
|
542 |
+
"mmlu_international_law": 0,
|
543 |
+
"mmlu_jurisprudence": 0,
|
544 |
+
"mmlu_logical_fallacies": 0,
|
545 |
+
"mmlu_machine_learning": 0,
|
546 |
+
"mmlu_management": 0,
|
547 |
+
"mmlu_marketing": 0,
|
548 |
+
"mmlu_medical_genetics": 0,
|
549 |
+
"mmlu_miscellaneous": 0,
|
550 |
+
"mmlu_moral_disputes": 0,
|
551 |
+
"mmlu_moral_scenarios": 0,
|
552 |
+
"mmlu_nutrition": 0,
|
553 |
+
"mmlu_other": 0,
|
554 |
+
"mmlu_philosophy": 0,
|
555 |
+
"mmlu_prehistory": 0,
|
556 |
+
"mmlu_professional_accounting": 0,
|
557 |
+
"mmlu_professional_law": 0,
|
558 |
+
"mmlu_professional_medicine": 0,
|
559 |
+
"mmlu_professional_psychology": 0,
|
560 |
+
"mmlu_public_relations": 0,
|
561 |
+
"mmlu_security_studies": 0,
|
562 |
+
"mmlu_social_sciences": 0,
|
563 |
+
"mmlu_sociology": 0,
|
564 |
+
"mmlu_stem": 0,
|
565 |
+
"mmlu_us_foreign_policy": 0,
|
566 |
+
"mmlu_virology": 0,
|
567 |
+
"mmlu_world_religions": 0,
|
568 |
+
"openbookqa": 0,
|
569 |
+
"piqa": 0,
|
570 |
+
"truthfulqa_mc1": 0,
|
571 |
+
"truthfulqa_mc2": 0,
|
572 |
+
"winogrande": 0
|
573 |
+
},
|
574 |
+
"date": 1715055897.2560651,
|
575 |
+
"config": {
|
576 |
+
"model": "hf",
|
577 |
+
"model_args": "pretrained=TheBloke/SOLAR-10.7B-Instruct-v1.0-GPTQ,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
578 |
+
"batch_size": 1,
|
579 |
+
"batch_sizes": [],
|
580 |
+
"device": "cuda",
|
581 |
+
"use_cache": null,
|
582 |
+
"limit": null,
|
583 |
+
"bootstrap_iters": 100000,
|
584 |
+
"gen_kwargs": null
|
585 |
+
}
|
586 |
+
}
|
TheBloke/results_2024-05-07-21-25-07.json
ADDED
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "no",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-07-21-25-07",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/phi-2-GGUF",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 1.60065536,
|
16 |
+
"model_params": 2.77968384,
|
17 |
+
"quant_type": "llama.cpp",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|hellaswag|0": {
|
22 |
+
"acc,none": 0.49790878311093406,
|
23 |
+
"acc_stderr,none": 0.004989737768749929,
|
24 |
+
"acc_norm,none": 0.6547500497908784,
|
25 |
+
"acc_norm_stderr,none": 0.004744780201276645,
|
26 |
+
"alias": "hellaswag"
|
27 |
+
},
|
28 |
+
"harness|arc:easy|0": {
|
29 |
+
"acc,none": 0.4292929292929293,
|
30 |
+
"acc_stderr,none": 0.010156678075911101,
|
31 |
+
"acc_norm,none": 0.4385521885521885,
|
32 |
+
"acc_norm_stderr,none": 0.010182010275471116,
|
33 |
+
"alias": "arc_easy"
|
34 |
+
},
|
35 |
+
"harness|boolq|0": {
|
36 |
+
"acc,none": 0.3782874617737003,
|
37 |
+
"acc_stderr,none": 0.008482001133931,
|
38 |
+
"alias": "boolq"
|
39 |
+
},
|
40 |
+
"harness|mmlu|0": {
|
41 |
+
"acc,none": 0.22945449366187154,
|
42 |
+
"acc_stderr,none": 0.0035426239458926224,
|
43 |
+
"alias": "mmlu"
|
44 |
+
},
|
45 |
+
"harness|mmlu_humanities|0": {
|
46 |
+
"alias": " - humanities",
|
47 |
+
"acc,none": 0.24208289054197663,
|
48 |
+
"acc_stderr,none": 0.0062426684031394305
|
49 |
+
},
|
50 |
+
"harness|mmlu_formal_logic|0": {
|
51 |
+
"alias": " - formal_logic",
|
52 |
+
"acc,none": 0.2857142857142857,
|
53 |
+
"acc_stderr,none": 0.04040610178208841
|
54 |
+
},
|
55 |
+
"harness|mmlu_high_school_european_history|0": {
|
56 |
+
"alias": " - high_school_european_history",
|
57 |
+
"acc,none": 0.21818181818181817,
|
58 |
+
"acc_stderr,none": 0.03225078108306289
|
59 |
+
},
|
60 |
+
"harness|mmlu_high_school_us_history|0": {
|
61 |
+
"alias": " - high_school_us_history",
|
62 |
+
"acc,none": 0.25,
|
63 |
+
"acc_stderr,none": 0.03039153369274154
|
64 |
+
},
|
65 |
+
"harness|mmlu_high_school_world_history|0": {
|
66 |
+
"alias": " - high_school_world_history",
|
67 |
+
"acc,none": 0.270042194092827,
|
68 |
+
"acc_stderr,none": 0.028900721906293426
|
69 |
+
},
|
70 |
+
"harness|mmlu_international_law|0": {
|
71 |
+
"alias": " - international_law",
|
72 |
+
"acc,none": 0.2396694214876033,
|
73 |
+
"acc_stderr,none": 0.03896878985070417
|
74 |
+
},
|
75 |
+
"harness|mmlu_jurisprudence|0": {
|
76 |
+
"alias": " - jurisprudence",
|
77 |
+
"acc,none": 0.25925925925925924,
|
78 |
+
"acc_stderr,none": 0.04236511258094634
|
79 |
+
},
|
80 |
+
"harness|mmlu_logical_fallacies|0": {
|
81 |
+
"alias": " - logical_fallacies",
|
82 |
+
"acc,none": 0.22085889570552147,
|
83 |
+
"acc_stderr,none": 0.032591773927421776
|
84 |
+
},
|
85 |
+
"harness|mmlu_moral_disputes|0": {
|
86 |
+
"alias": " - moral_disputes",
|
87 |
+
"acc,none": 0.24855491329479767,
|
88 |
+
"acc_stderr,none": 0.023267528432100174
|
89 |
+
},
|
90 |
+
"harness|mmlu_moral_scenarios|0": {
|
91 |
+
"alias": " - moral_scenarios",
|
92 |
+
"acc,none": 0.23798882681564246,
|
93 |
+
"acc_stderr,none": 0.014242630070574885
|
94 |
+
},
|
95 |
+
"harness|mmlu_philosophy|0": {
|
96 |
+
"alias": " - philosophy",
|
97 |
+
"acc,none": 0.1864951768488746,
|
98 |
+
"acc_stderr,none": 0.02212243977248077
|
99 |
+
},
|
100 |
+
"harness|mmlu_prehistory|0": {
|
101 |
+
"alias": " - prehistory",
|
102 |
+
"acc,none": 0.21604938271604937,
|
103 |
+
"acc_stderr,none": 0.022899162918445813
|
104 |
+
},
|
105 |
+
"harness|mmlu_professional_law|0": {
|
106 |
+
"alias": " - professional_law",
|
107 |
+
"acc,none": 0.2457627118644068,
|
108 |
+
"acc_stderr,none": 0.01099615663514269
|
109 |
+
},
|
110 |
+
"harness|mmlu_world_religions|0": {
|
111 |
+
"alias": " - world_religions",
|
112 |
+
"acc,none": 0.3216374269005848,
|
113 |
+
"acc_stderr,none": 0.03582529442573122
|
114 |
+
},
|
115 |
+
"harness|mmlu_other|0": {
|
116 |
+
"alias": " - other",
|
117 |
+
"acc,none": 0.23978113936272932,
|
118 |
+
"acc_stderr,none": 0.00764225029165751
|
119 |
+
},
|
120 |
+
"harness|mmlu_business_ethics|0": {
|
121 |
+
"alias": " - business_ethics",
|
122 |
+
"acc,none": 0.3,
|
123 |
+
"acc_stderr,none": 0.046056618647183814
|
124 |
+
},
|
125 |
+
"harness|mmlu_clinical_knowledge|0": {
|
126 |
+
"alias": " - clinical_knowledge",
|
127 |
+
"acc,none": 0.21509433962264152,
|
128 |
+
"acc_stderr,none": 0.025288394502891377
|
129 |
+
},
|
130 |
+
"harness|mmlu_college_medicine|0": {
|
131 |
+
"alias": " - college_medicine",
|
132 |
+
"acc,none": 0.20809248554913296,
|
133 |
+
"acc_stderr,none": 0.030952890217749884
|
134 |
+
},
|
135 |
+
"harness|mmlu_global_facts|0": {
|
136 |
+
"alias": " - global_facts",
|
137 |
+
"acc,none": 0.18,
|
138 |
+
"acc_stderr,none": 0.038612291966536955
|
139 |
+
},
|
140 |
+
"harness|mmlu_human_aging|0": {
|
141 |
+
"alias": " - human_aging",
|
142 |
+
"acc,none": 0.31390134529147984,
|
143 |
+
"acc_stderr,none": 0.03114679648297246
|
144 |
+
},
|
145 |
+
"harness|mmlu_management|0": {
|
146 |
+
"alias": " - management",
|
147 |
+
"acc,none": 0.17475728155339806,
|
148 |
+
"acc_stderr,none": 0.03760178006026621
|
149 |
+
},
|
150 |
+
"harness|mmlu_marketing|0": {
|
151 |
+
"alias": " - marketing",
|
152 |
+
"acc,none": 0.2905982905982906,
|
153 |
+
"acc_stderr,none": 0.029745048572674057
|
154 |
+
},
|
155 |
+
"harness|mmlu_medical_genetics|0": {
|
156 |
+
"alias": " - medical_genetics",
|
157 |
+
"acc,none": 0.3,
|
158 |
+
"acc_stderr,none": 0.046056618647183814
|
159 |
+
},
|
160 |
+
"harness|mmlu_miscellaneous|0": {
|
161 |
+
"alias": " - miscellaneous",
|
162 |
+
"acc,none": 0.23754789272030652,
|
163 |
+
"acc_stderr,none": 0.015218733046150195
|
164 |
+
},
|
165 |
+
"harness|mmlu_nutrition|0": {
|
166 |
+
"alias": " - nutrition",
|
167 |
+
"acc,none": 0.22549019607843138,
|
168 |
+
"acc_stderr,none": 0.023929155517351284
|
169 |
+
},
|
170 |
+
"harness|mmlu_professional_accounting|0": {
|
171 |
+
"alias": " - professional_accounting",
|
172 |
+
"acc,none": 0.23404255319148937,
|
173 |
+
"acc_stderr,none": 0.025257861359432407
|
174 |
+
},
|
175 |
+
"harness|mmlu_professional_medicine|0": {
|
176 |
+
"alias": " - professional_medicine",
|
177 |
+
"acc,none": 0.18382352941176472,
|
178 |
+
"acc_stderr,none": 0.02352924218519311
|
179 |
+
},
|
180 |
+
"harness|mmlu_virology|0": {
|
181 |
+
"alias": " - virology",
|
182 |
+
"acc,none": 0.28313253012048195,
|
183 |
+
"acc_stderr,none": 0.03507295431370518
|
184 |
+
},
|
185 |
+
"harness|mmlu_social_sciences|0": {
|
186 |
+
"alias": " - social_sciences",
|
187 |
+
"acc,none": 0.2170945726356841,
|
188 |
+
"acc_stderr,none": 0.007428786285788534
|
189 |
+
},
|
190 |
+
"harness|mmlu_econometrics|0": {
|
191 |
+
"alias": " - econometrics",
|
192 |
+
"acc,none": 0.23684210526315788,
|
193 |
+
"acc_stderr,none": 0.039994238792813386
|
194 |
+
},
|
195 |
+
"harness|mmlu_high_school_geography|0": {
|
196 |
+
"alias": " - high_school_geography",
|
197 |
+
"acc,none": 0.17676767676767677,
|
198 |
+
"acc_stderr,none": 0.027178752639044915
|
199 |
+
},
|
200 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
201 |
+
"alias": " - high_school_government_and_politics",
|
202 |
+
"acc,none": 0.19689119170984457,
|
203 |
+
"acc_stderr,none": 0.02869787397186069
|
204 |
+
},
|
205 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
206 |
+
"alias": " - high_school_macroeconomics",
|
207 |
+
"acc,none": 0.20256410256410257,
|
208 |
+
"acc_stderr,none": 0.020377660970371397
|
209 |
+
},
|
210 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
211 |
+
"alias": " - high_school_microeconomics",
|
212 |
+
"acc,none": 0.21008403361344538,
|
213 |
+
"acc_stderr,none": 0.026461398717471874
|
214 |
+
},
|
215 |
+
"harness|mmlu_high_school_psychology|0": {
|
216 |
+
"alias": " - high_school_psychology",
|
217 |
+
"acc,none": 0.1926605504587156,
|
218 |
+
"acc_stderr,none": 0.016909276884936073
|
219 |
+
},
|
220 |
+
"harness|mmlu_human_sexuality|0": {
|
221 |
+
"alias": " - human_sexuality",
|
222 |
+
"acc,none": 0.2595419847328244,
|
223 |
+
"acc_stderr,none": 0.03844876139785271
|
224 |
+
},
|
225 |
+
"harness|mmlu_professional_psychology|0": {
|
226 |
+
"alias": " - professional_psychology",
|
227 |
+
"acc,none": 0.25,
|
228 |
+
"acc_stderr,none": 0.01751781884501444
|
229 |
+
},
|
230 |
+
"harness|mmlu_public_relations|0": {
|
231 |
+
"alias": " - public_relations",
|
232 |
+
"acc,none": 0.21818181818181817,
|
233 |
+
"acc_stderr,none": 0.03955932861795833
|
234 |
+
},
|
235 |
+
"harness|mmlu_security_studies|0": {
|
236 |
+
"alias": " - security_studies",
|
237 |
+
"acc,none": 0.18775510204081633,
|
238 |
+
"acc_stderr,none": 0.02500025603954622
|
239 |
+
},
|
240 |
+
"harness|mmlu_sociology|0": {
|
241 |
+
"alias": " - sociology",
|
242 |
+
"acc,none": 0.24378109452736318,
|
243 |
+
"acc_stderr,none": 0.030360490154014652
|
244 |
+
},
|
245 |
+
"harness|mmlu_us_foreign_policy|0": {
|
246 |
+
"alias": " - us_foreign_policy",
|
247 |
+
"acc,none": 0.28,
|
248 |
+
"acc_stderr,none": 0.045126085985421276
|
249 |
+
},
|
250 |
+
"harness|mmlu_stem|0": {
|
251 |
+
"alias": " - stem",
|
252 |
+
"acc,none": 0.21249603552172533,
|
253 |
+
"acc_stderr,none": 0.007271218700485502
|
254 |
+
},
|
255 |
+
"harness|mmlu_abstract_algebra|0": {
|
256 |
+
"alias": " - abstract_algebra",
|
257 |
+
"acc,none": 0.22,
|
258 |
+
"acc_stderr,none": 0.04163331998932269
|
259 |
+
},
|
260 |
+
"harness|mmlu_anatomy|0": {
|
261 |
+
"alias": " - anatomy",
|
262 |
+
"acc,none": 0.18518518518518517,
|
263 |
+
"acc_stderr,none": 0.03355677216313142
|
264 |
+
},
|
265 |
+
"harness|mmlu_astronomy|0": {
|
266 |
+
"alias": " - astronomy",
|
267 |
+
"acc,none": 0.17763157894736842,
|
268 |
+
"acc_stderr,none": 0.031103182383123398
|
269 |
+
},
|
270 |
+
"harness|mmlu_college_biology|0": {
|
271 |
+
"alias": " - college_biology",
|
272 |
+
"acc,none": 0.2569444444444444,
|
273 |
+
"acc_stderr,none": 0.03653946969442099
|
274 |
+
},
|
275 |
+
"harness|mmlu_college_chemistry|0": {
|
276 |
+
"alias": " - college_chemistry",
|
277 |
+
"acc,none": 0.2,
|
278 |
+
"acc_stderr,none": 0.040201512610368445
|
279 |
+
},
|
280 |
+
"harness|mmlu_college_computer_science|0": {
|
281 |
+
"alias": " - college_computer_science",
|
282 |
+
"acc,none": 0.26,
|
283 |
+
"acc_stderr,none": 0.044084400227680794
|
284 |
+
},
|
285 |
+
"harness|mmlu_college_mathematics|0": {
|
286 |
+
"alias": " - college_mathematics",
|
287 |
+
"acc,none": 0.21,
|
288 |
+
"acc_stderr,none": 0.040936018074033256
|
289 |
+
},
|
290 |
+
"harness|mmlu_college_physics|0": {
|
291 |
+
"alias": " - college_physics",
|
292 |
+
"acc,none": 0.21568627450980393,
|
293 |
+
"acc_stderr,none": 0.040925639582376556
|
294 |
+
},
|
295 |
+
"harness|mmlu_computer_security|0": {
|
296 |
+
"alias": " - computer_security",
|
297 |
+
"acc,none": 0.28,
|
298 |
+
"acc_stderr,none": 0.045126085985421276
|
299 |
+
},
|
300 |
+
"harness|mmlu_conceptual_physics|0": {
|
301 |
+
"alias": " - conceptual_physics",
|
302 |
+
"acc,none": 0.26382978723404255,
|
303 |
+
"acc_stderr,none": 0.02880998985410298
|
304 |
+
},
|
305 |
+
"harness|mmlu_electrical_engineering|0": {
|
306 |
+
"alias": " - electrical_engineering",
|
307 |
+
"acc,none": 0.2413793103448276,
|
308 |
+
"acc_stderr,none": 0.03565998174135302
|
309 |
+
},
|
310 |
+
"harness|mmlu_elementary_mathematics|0": {
|
311 |
+
"alias": " - elementary_mathematics",
|
312 |
+
"acc,none": 0.20899470899470898,
|
313 |
+
"acc_stderr,none": 0.020940481565334835
|
314 |
+
},
|
315 |
+
"harness|mmlu_high_school_biology|0": {
|
316 |
+
"alias": " - high_school_biology",
|
317 |
+
"acc,none": 0.1774193548387097,
|
318 |
+
"acc_stderr,none": 0.021732540689329265
|
319 |
+
},
|
320 |
+
"harness|mmlu_high_school_chemistry|0": {
|
321 |
+
"alias": " - high_school_chemistry",
|
322 |
+
"acc,none": 0.15270935960591134,
|
323 |
+
"acc_stderr,none": 0.025308904539380624
|
324 |
+
},
|
325 |
+
"harness|mmlu_high_school_computer_science|0": {
|
326 |
+
"alias": " - high_school_computer_science",
|
327 |
+
"acc,none": 0.25,
|
328 |
+
"acc_stderr,none": 0.04351941398892446
|
329 |
+
},
|
330 |
+
"harness|mmlu_high_school_mathematics|0": {
|
331 |
+
"alias": " - high_school_mathematics",
|
332 |
+
"acc,none": 0.2111111111111111,
|
333 |
+
"acc_stderr,none": 0.02488211685765508
|
334 |
+
},
|
335 |
+
"harness|mmlu_high_school_physics|0": {
|
336 |
+
"alias": " - high_school_physics",
|
337 |
+
"acc,none": 0.1986754966887417,
|
338 |
+
"acc_stderr,none": 0.032578473844367746
|
339 |
+
},
|
340 |
+
"harness|mmlu_high_school_statistics|0": {
|
341 |
+
"alias": " - high_school_statistics",
|
342 |
+
"acc,none": 0.1527777777777778,
|
343 |
+
"acc_stderr,none": 0.02453632602613422
|
344 |
+
},
|
345 |
+
"harness|mmlu_machine_learning|0": {
|
346 |
+
"alias": " - machine_learning",
|
347 |
+
"acc,none": 0.3125,
|
348 |
+
"acc_stderr,none": 0.043994650575715215
|
349 |
+
},
|
350 |
+
"harness|lambada:openai|0": {
|
351 |
+
"perplexity,none": 1.0694247819579417,
|
352 |
+
"perplexity_stderr,none": 0.01328762392218948,
|
353 |
+
"acc,none": 0.7234620609353775,
|
354 |
+
"acc_stderr,none": 0.006231567654090111,
|
355 |
+
"alias": "lambada_openai"
|
356 |
+
},
|
357 |
+
"harness|arc:challenge|0": {
|
358 |
+
"acc,none": 0.3583617747440273,
|
359 |
+
"acc_stderr,none": 0.014012883334859864,
|
360 |
+
"acc_norm,none": 0.3856655290102389,
|
361 |
+
"acc_norm_stderr,none": 0.014224250973257172,
|
362 |
+
"alias": "arc_challenge"
|
363 |
+
},
|
364 |
+
"harness|piqa|0": {
|
365 |
+
"acc,none": 0.7595212187159956,
|
366 |
+
"acc_stderr,none": 0.009971345364651074,
|
367 |
+
"acc_norm,none": 0.7622415669205659,
|
368 |
+
"acc_norm_stderr,none": 0.009932525779525483,
|
369 |
+
"alias": "piqa"
|
370 |
+
},
|
371 |
+
"harness|openbookqa|0": {
|
372 |
+
"acc,none": 0.274,
|
373 |
+
"acc_stderr,none": 0.01996610354027947,
|
374 |
+
"acc_norm,none": 0.318,
|
375 |
+
"acc_norm_stderr,none": 0.020847571620814014,
|
376 |
+
"alias": "openbookqa"
|
377 |
+
},
|
378 |
+
"harness|truthfulqa:mc2|0": {
|
379 |
+
"acc,none": 0.4664689437320238,
|
380 |
+
"acc_stderr,none": 0.014567932598063655,
|
381 |
+
"alias": "truthfulqa_mc2"
|
382 |
+
},
|
383 |
+
"harness|winogrande|0": {
|
384 |
+
"acc,none": 0.7411207576953434,
|
385 |
+
"acc_stderr,none": 0.012310515810993376,
|
386 |
+
"alias": "winogrande"
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc1|0": {
|
389 |
+
"acc,none": 0.3047735618115055,
|
390 |
+
"acc_stderr,none": 0.016114124156882424,
|
391 |
+
"alias": "truthfulqa_mc1"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/phi-2-GGUF",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 0.0,
|
399 |
+
"architectures": "?",
|
400 |
+
"quant_type": "llama.cpp",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 0,
|
403 |
+
"model_size": 0.0,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "cpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-07T03:59:13Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "llama_cpp"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "llama.cpp",
|
417 |
+
"ftype": "*Q4_0.gguf"
|
418 |
+
},
|
419 |
+
"versions": {
|
420 |
+
"harness|hellaswag|0": 1.0,
|
421 |
+
"harness|arc:easy|0": 1.0,
|
422 |
+
"harness|boolq|0": 2.0,
|
423 |
+
"harness|mmlu|0": null,
|
424 |
+
"harness|mmlu_humanities|0": null,
|
425 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
426 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
427 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
428 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
429 |
+
"harness|mmlu_international_law|0": 0.0,
|
430 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
431 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
432 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
433 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
434 |
+
"harness|mmlu_philosophy|0": 0.0,
|
435 |
+
"harness|mmlu_prehistory|0": 0.0,
|
436 |
+
"harness|mmlu_professional_law|0": 0.0,
|
437 |
+
"harness|mmlu_world_religions|0": 0.0,
|
438 |
+
"harness|mmlu_other|0": null,
|
439 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
440 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
441 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
442 |
+
"harness|mmlu_global_facts|0": 0.0,
|
443 |
+
"harness|mmlu_human_aging|0": 0.0,
|
444 |
+
"harness|mmlu_management|0": 0.0,
|
445 |
+
"harness|mmlu_marketing|0": 0.0,
|
446 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
447 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
448 |
+
"harness|mmlu_nutrition|0": 0.0,
|
449 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
450 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
451 |
+
"harness|mmlu_virology|0": 0.0,
|
452 |
+
"harness|mmlu_social_sciences|0": null,
|
453 |
+
"harness|mmlu_econometrics|0": 0.0,
|
454 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
455 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
456 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
457 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
459 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
460 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
461 |
+
"harness|mmlu_public_relations|0": 0.0,
|
462 |
+
"harness|mmlu_security_studies|0": 0.0,
|
463 |
+
"harness|mmlu_sociology|0": 0.0,
|
464 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
465 |
+
"harness|mmlu_stem|0": null,
|
466 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
467 |
+
"harness|mmlu_anatomy|0": 0.0,
|
468 |
+
"harness|mmlu_astronomy|0": 0.0,
|
469 |
+
"harness|mmlu_college_biology|0": 0.0,
|
470 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
471 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
472 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
473 |
+
"harness|mmlu_college_physics|0": 0.0,
|
474 |
+
"harness|mmlu_computer_security|0": 0.0,
|
475 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
476 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
477 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
478 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
479 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
480 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
481 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
484 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
485 |
+
"harness|lambada:openai|0": 1.0,
|
486 |
+
"harness|arc:challenge|0": 1.0,
|
487 |
+
"harness|piqa|0": 1.0,
|
488 |
+
"harness|openbookqa|0": 1.0,
|
489 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
490 |
+
"harness|winogrande|0": 1.0,
|
491 |
+
"harness|truthfulqa:mc1|0": 2.0
|
492 |
+
},
|
493 |
+
"n-shot": {
|
494 |
+
"arc_challenge": 0,
|
495 |
+
"arc_easy": 0,
|
496 |
+
"boolq": 0,
|
497 |
+
"hellaswag": 0,
|
498 |
+
"lambada_openai": 0,
|
499 |
+
"mmlu": 0,
|
500 |
+
"mmlu_abstract_algebra": 0,
|
501 |
+
"mmlu_anatomy": 0,
|
502 |
+
"mmlu_astronomy": 0,
|
503 |
+
"mmlu_business_ethics": 0,
|
504 |
+
"mmlu_clinical_knowledge": 0,
|
505 |
+
"mmlu_college_biology": 0,
|
506 |
+
"mmlu_college_chemistry": 0,
|
507 |
+
"mmlu_college_computer_science": 0,
|
508 |
+
"mmlu_college_mathematics": 0,
|
509 |
+
"mmlu_college_medicine": 0,
|
510 |
+
"mmlu_college_physics": 0,
|
511 |
+
"mmlu_computer_security": 0,
|
512 |
+
"mmlu_conceptual_physics": 0,
|
513 |
+
"mmlu_econometrics": 0,
|
514 |
+
"mmlu_electrical_engineering": 0,
|
515 |
+
"mmlu_elementary_mathematics": 0,
|
516 |
+
"mmlu_formal_logic": 0,
|
517 |
+
"mmlu_global_facts": 0,
|
518 |
+
"mmlu_high_school_biology": 0,
|
519 |
+
"mmlu_high_school_chemistry": 0,
|
520 |
+
"mmlu_high_school_computer_science": 0,
|
521 |
+
"mmlu_high_school_european_history": 0,
|
522 |
+
"mmlu_high_school_geography": 0,
|
523 |
+
"mmlu_high_school_government_and_politics": 0,
|
524 |
+
"mmlu_high_school_macroeconomics": 0,
|
525 |
+
"mmlu_high_school_mathematics": 0,
|
526 |
+
"mmlu_high_school_microeconomics": 0,
|
527 |
+
"mmlu_high_school_physics": 0,
|
528 |
+
"mmlu_high_school_psychology": 0,
|
529 |
+
"mmlu_high_school_statistics": 0,
|
530 |
+
"mmlu_high_school_us_history": 0,
|
531 |
+
"mmlu_high_school_world_history": 0,
|
532 |
+
"mmlu_human_aging": 0,
|
533 |
+
"mmlu_human_sexuality": 0,
|
534 |
+
"mmlu_humanities": 0,
|
535 |
+
"mmlu_international_law": 0,
|
536 |
+
"mmlu_jurisprudence": 0,
|
537 |
+
"mmlu_logical_fallacies": 0,
|
538 |
+
"mmlu_machine_learning": 0,
|
539 |
+
"mmlu_management": 0,
|
540 |
+
"mmlu_marketing": 0,
|
541 |
+
"mmlu_medical_genetics": 0,
|
542 |
+
"mmlu_miscellaneous": 0,
|
543 |
+
"mmlu_moral_disputes": 0,
|
544 |
+
"mmlu_moral_scenarios": 0,
|
545 |
+
"mmlu_nutrition": 0,
|
546 |
+
"mmlu_other": 0,
|
547 |
+
"mmlu_philosophy": 0,
|
548 |
+
"mmlu_prehistory": 0,
|
549 |
+
"mmlu_professional_accounting": 0,
|
550 |
+
"mmlu_professional_law": 0,
|
551 |
+
"mmlu_professional_medicine": 0,
|
552 |
+
"mmlu_professional_psychology": 0,
|
553 |
+
"mmlu_public_relations": 0,
|
554 |
+
"mmlu_security_studies": 0,
|
555 |
+
"mmlu_social_sciences": 0,
|
556 |
+
"mmlu_sociology": 0,
|
557 |
+
"mmlu_stem": 0,
|
558 |
+
"mmlu_us_foreign_policy": 0,
|
559 |
+
"mmlu_virology": 0,
|
560 |
+
"mmlu_world_religions": 0,
|
561 |
+
"openbookqa": 0,
|
562 |
+
"piqa": 0,
|
563 |
+
"truthfulqa_mc1": 0,
|
564 |
+
"truthfulqa_mc2": 0,
|
565 |
+
"winogrande": 0
|
566 |
+
},
|
567 |
+
"date": 1715074986.1140783,
|
568 |
+
"config": {
|
569 |
+
"model": "WrapperGGUFLM",
|
570 |
+
"model_args": "gguf_model=TheBloke/phi-2-GGUF,ftype=*Q4_0.gguf,dtype=float16,_commit_hash=main",
|
571 |
+
"batch_size": 1,
|
572 |
+
"batch_sizes": [],
|
573 |
+
"device": "cuda",
|
574 |
+
"use_cache": null,
|
575 |
+
"limit": null,
|
576 |
+
"bootstrap_iters": 100000,
|
577 |
+
"gen_kwargs": null
|
578 |
+
}
|
579 |
+
}
|
TheBloke/results_2024-05-11-15-26-38.json
ADDED
@@ -0,0 +1,582 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-11-15-26-38",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/Llama-2-13B-chat-AWQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 7.25,
|
16 |
+
"model_params": 12.79,
|
17 |
+
"quant_type": "AWQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:challenge|0": {
|
22 |
+
"acc,none": 0.4564846416382253,
|
23 |
+
"acc_stderr,none": 0.014555949760496439,
|
24 |
+
"acc_norm,none": 0.5025597269624573,
|
25 |
+
"acc_norm_stderr,none": 0.014611199329843784,
|
26 |
+
"alias": "arc_challenge"
|
27 |
+
},
|
28 |
+
"harness|lambada:openai|0": {
|
29 |
+
"perplexity,none": 2.943540165174782,
|
30 |
+
"perplexity_stderr,none": 0.07048825902928478,
|
31 |
+
"acc,none": 0.727731418591112,
|
32 |
+
"acc_stderr,none": 0.006201495026535792,
|
33 |
+
"alias": "lambada_openai"
|
34 |
+
},
|
35 |
+
"harness|boolq|0": {
|
36 |
+
"acc,none": 0.8027522935779816,
|
37 |
+
"acc_stderr,none": 0.0069596804270573975,
|
38 |
+
"alias": "boolq"
|
39 |
+
},
|
40 |
+
"harness|truthfulqa:mc2|0": {
|
41 |
+
"acc,none": 0.43653026778274867,
|
42 |
+
"acc_stderr,none": 0.01578076424843842,
|
43 |
+
"alias": "truthfulqa_mc2"
|
44 |
+
},
|
45 |
+
"harness|hellaswag|0": {
|
46 |
+
"acc,none": 0.6022704640509858,
|
47 |
+
"acc_stderr,none": 0.004884287515461496,
|
48 |
+
"acc_norm,none": 0.7892850029874527,
|
49 |
+
"acc_norm_stderr,none": 0.0040698290284162586,
|
50 |
+
"alias": "hellaswag"
|
51 |
+
},
|
52 |
+
"harness|arc:easy|0": {
|
53 |
+
"acc,none": 0.7798821548821548,
|
54 |
+
"acc_stderr,none": 0.008501788774716771,
|
55 |
+
"acc_norm,none": 0.7436868686868687,
|
56 |
+
"acc_norm_stderr,none": 0.008958775997918346,
|
57 |
+
"alias": "arc_easy"
|
58 |
+
},
|
59 |
+
"harness|piqa|0": {
|
60 |
+
"acc,none": 0.779107725788901,
|
61 |
+
"acc_stderr,none": 0.009679088048842217,
|
62 |
+
"acc_norm,none": 0.7927094668117519,
|
63 |
+
"acc_norm_stderr,none": 0.009457844699952377,
|
64 |
+
"alias": "piqa"
|
65 |
+
},
|
66 |
+
"harness|truthfulqa:mc1|0": {
|
67 |
+
"acc,none": 0.27906976744186046,
|
68 |
+
"acc_stderr,none": 0.01570210709062791,
|
69 |
+
"alias": "truthfulqa_mc1"
|
70 |
+
},
|
71 |
+
"harness|openbookqa|0": {
|
72 |
+
"acc,none": 0.352,
|
73 |
+
"acc_stderr,none": 0.02138004238594605,
|
74 |
+
"acc_norm,none": 0.44,
|
75 |
+
"acc_norm_stderr,none": 0.022221331534143057,
|
76 |
+
"alias": "openbookqa"
|
77 |
+
},
|
78 |
+
"harness|winogrande|0": {
|
79 |
+
"acc,none": 0.7111286503551697,
|
80 |
+
"acc_stderr,none": 0.012738241271018443,
|
81 |
+
"alias": "winogrande"
|
82 |
+
},
|
83 |
+
"harness|mmlu|0": {
|
84 |
+
"acc,none": 0.5237857855006409,
|
85 |
+
"acc_stderr,none": 0.003985973730899309,
|
86 |
+
"alias": "mmlu"
|
87 |
+
},
|
88 |
+
"harness|mmlu_humanities|0": {
|
89 |
+
"alias": " - humanities",
|
90 |
+
"acc,none": 0.485653560042508,
|
91 |
+
"acc_stderr,none": 0.006840270638361877
|
92 |
+
},
|
93 |
+
"harness|mmlu_formal_logic|0": {
|
94 |
+
"alias": " - formal_logic",
|
95 |
+
"acc,none": 0.24603174603174602,
|
96 |
+
"acc_stderr,none": 0.038522733649243156
|
97 |
+
},
|
98 |
+
"harness|mmlu_high_school_european_history|0": {
|
99 |
+
"alias": " - high_school_european_history",
|
100 |
+
"acc,none": 0.6666666666666666,
|
101 |
+
"acc_stderr,none": 0.0368105086916155
|
102 |
+
},
|
103 |
+
"harness|mmlu_high_school_us_history|0": {
|
104 |
+
"alias": " - high_school_us_history",
|
105 |
+
"acc,none": 0.7107843137254902,
|
106 |
+
"acc_stderr,none": 0.03182231867647553
|
107 |
+
},
|
108 |
+
"harness|mmlu_high_school_world_history|0": {
|
109 |
+
"alias": " - high_school_world_history",
|
110 |
+
"acc,none": 0.7468354430379747,
|
111 |
+
"acc_stderr,none": 0.028304657943035282
|
112 |
+
},
|
113 |
+
"harness|mmlu_international_law|0": {
|
114 |
+
"alias": " - international_law",
|
115 |
+
"acc,none": 0.6859504132231405,
|
116 |
+
"acc_stderr,none": 0.042369647530410184
|
117 |
+
},
|
118 |
+
"harness|mmlu_jurisprudence|0": {
|
119 |
+
"alias": " - jurisprudence",
|
120 |
+
"acc,none": 0.6388888888888888,
|
121 |
+
"acc_stderr,none": 0.04643454608906275
|
122 |
+
},
|
123 |
+
"harness|mmlu_logical_fallacies|0": {
|
124 |
+
"alias": " - logical_fallacies",
|
125 |
+
"acc,none": 0.6932515337423313,
|
126 |
+
"acc_stderr,none": 0.036230899157241474
|
127 |
+
},
|
128 |
+
"harness|mmlu_moral_disputes|0": {
|
129 |
+
"alias": " - moral_disputes",
|
130 |
+
"acc,none": 0.5924855491329479,
|
131 |
+
"acc_stderr,none": 0.0264545781469315
|
132 |
+
},
|
133 |
+
"harness|mmlu_moral_scenarios|0": {
|
134 |
+
"alias": " - moral_scenarios",
|
135 |
+
"acc,none": 0.24916201117318434,
|
136 |
+
"acc_stderr,none": 0.014465893829859926
|
137 |
+
},
|
138 |
+
"harness|mmlu_philosophy|0": {
|
139 |
+
"alias": " - philosophy",
|
140 |
+
"acc,none": 0.6109324758842444,
|
141 |
+
"acc_stderr,none": 0.027690337536485376
|
142 |
+
},
|
143 |
+
"harness|mmlu_prehistory|0": {
|
144 |
+
"alias": " - prehistory",
|
145 |
+
"acc,none": 0.6018518518518519,
|
146 |
+
"acc_stderr,none": 0.02723741509459247
|
147 |
+
},
|
148 |
+
"harness|mmlu_professional_law|0": {
|
149 |
+
"alias": " - professional_law",
|
150 |
+
"acc,none": 0.39895697522816165,
|
151 |
+
"acc_stderr,none": 0.012506757655293674
|
152 |
+
},
|
153 |
+
"harness|mmlu_world_religions|0": {
|
154 |
+
"alias": " - world_religions",
|
155 |
+
"acc,none": 0.7719298245614035,
|
156 |
+
"acc_stderr,none": 0.03218093795602357
|
157 |
+
},
|
158 |
+
"harness|mmlu_other|0": {
|
159 |
+
"alias": " - other",
|
160 |
+
"acc,none": 0.6057289990344383,
|
161 |
+
"acc_stderr,none": 0.008413631260310068
|
162 |
+
},
|
163 |
+
"harness|mmlu_business_ethics|0": {
|
164 |
+
"alias": " - business_ethics",
|
165 |
+
"acc,none": 0.52,
|
166 |
+
"acc_stderr,none": 0.050211673156867795
|
167 |
+
},
|
168 |
+
"harness|mmlu_clinical_knowledge|0": {
|
169 |
+
"alias": " - clinical_knowledge",
|
170 |
+
"acc,none": 0.5622641509433962,
|
171 |
+
"acc_stderr,none": 0.03053333843046752
|
172 |
+
},
|
173 |
+
"harness|mmlu_college_medicine|0": {
|
174 |
+
"alias": " - college_medicine",
|
175 |
+
"acc,none": 0.4277456647398844,
|
176 |
+
"acc_stderr,none": 0.03772446857518027
|
177 |
+
},
|
178 |
+
"harness|mmlu_global_facts|0": {
|
179 |
+
"alias": " - global_facts",
|
180 |
+
"acc,none": 0.31,
|
181 |
+
"acc_stderr,none": 0.04648231987117316
|
182 |
+
},
|
183 |
+
"harness|mmlu_human_aging|0": {
|
184 |
+
"alias": " - human_aging",
|
185 |
+
"acc,none": 0.6143497757847534,
|
186 |
+
"acc_stderr,none": 0.03266842214289202
|
187 |
+
},
|
188 |
+
"harness|mmlu_management|0": {
|
189 |
+
"alias": " - management",
|
190 |
+
"acc,none": 0.7281553398058253,
|
191 |
+
"acc_stderr,none": 0.044052680241409216
|
192 |
+
},
|
193 |
+
"harness|mmlu_marketing|0": {
|
194 |
+
"alias": " - marketing",
|
195 |
+
"acc,none": 0.7991452991452992,
|
196 |
+
"acc_stderr,none": 0.026246772946890477
|
197 |
+
},
|
198 |
+
"harness|mmlu_medical_genetics|0": {
|
199 |
+
"alias": " - medical_genetics",
|
200 |
+
"acc,none": 0.62,
|
201 |
+
"acc_stderr,none": 0.04878317312145632
|
202 |
+
},
|
203 |
+
"harness|mmlu_miscellaneous|0": {
|
204 |
+
"alias": " - miscellaneous",
|
205 |
+
"acc,none": 0.7637292464878672,
|
206 |
+
"acc_stderr,none": 0.015190473717037495
|
207 |
+
},
|
208 |
+
"harness|mmlu_nutrition|0": {
|
209 |
+
"alias": " - nutrition",
|
210 |
+
"acc,none": 0.6111111111111112,
|
211 |
+
"acc_stderr,none": 0.02791405551046802
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_accounting|0": {
|
214 |
+
"alias": " - professional_accounting",
|
215 |
+
"acc,none": 0.425531914893617,
|
216 |
+
"acc_stderr,none": 0.029494827600144366
|
217 |
+
},
|
218 |
+
"harness|mmlu_professional_medicine|0": {
|
219 |
+
"alias": " - professional_medicine",
|
220 |
+
"acc,none": 0.4852941176470588,
|
221 |
+
"acc_stderr,none": 0.03035969707904611
|
222 |
+
},
|
223 |
+
"harness|mmlu_virology|0": {
|
224 |
+
"alias": " - virology",
|
225 |
+
"acc,none": 0.46987951807228917,
|
226 |
+
"acc_stderr,none": 0.03885425420866767
|
227 |
+
},
|
228 |
+
"harness|mmlu_social_sciences|0": {
|
229 |
+
"alias": " - social_sciences",
|
230 |
+
"acc,none": 0.607084822879428,
|
231 |
+
"acc_stderr,none": 0.008540524382266563
|
232 |
+
},
|
233 |
+
"harness|mmlu_econometrics|0": {
|
234 |
+
"alias": " - econometrics",
|
235 |
+
"acc,none": 0.2719298245614035,
|
236 |
+
"acc_stderr,none": 0.04185774424022056
|
237 |
+
},
|
238 |
+
"harness|mmlu_high_school_geography|0": {
|
239 |
+
"alias": " - high_school_geography",
|
240 |
+
"acc,none": 0.6616161616161617,
|
241 |
+
"acc_stderr,none": 0.03371124142626302
|
242 |
+
},
|
243 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
244 |
+
"alias": " - high_school_government_and_politics",
|
245 |
+
"acc,none": 0.7461139896373057,
|
246 |
+
"acc_stderr,none": 0.03141024780565319
|
247 |
+
},
|
248 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
249 |
+
"alias": " - high_school_macroeconomics",
|
250 |
+
"acc,none": 0.47435897435897434,
|
251 |
+
"acc_stderr,none": 0.025317649726448663
|
252 |
+
},
|
253 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
254 |
+
"alias": " - high_school_microeconomics",
|
255 |
+
"acc,none": 0.5,
|
256 |
+
"acc_stderr,none": 0.032478490123081544
|
257 |
+
},
|
258 |
+
"harness|mmlu_high_school_psychology|0": {
|
259 |
+
"alias": " - high_school_psychology",
|
260 |
+
"acc,none": 0.7174311926605504,
|
261 |
+
"acc_stderr,none": 0.01930424349770715
|
262 |
+
},
|
263 |
+
"harness|mmlu_human_sexuality|0": {
|
264 |
+
"alias": " - human_sexuality",
|
265 |
+
"acc,none": 0.6412213740458015,
|
266 |
+
"acc_stderr,none": 0.04206739313864908
|
267 |
+
},
|
268 |
+
"harness|mmlu_professional_psychology|0": {
|
269 |
+
"alias": " - professional_psychology",
|
270 |
+
"acc,none": 0.5326797385620915,
|
271 |
+
"acc_stderr,none": 0.020184583359102202
|
272 |
+
},
|
273 |
+
"harness|mmlu_public_relations|0": {
|
274 |
+
"alias": " - public_relations",
|
275 |
+
"acc,none": 0.6272727272727273,
|
276 |
+
"acc_stderr,none": 0.04631381319425464
|
277 |
+
},
|
278 |
+
"harness|mmlu_security_studies|0": {
|
279 |
+
"alias": " - security_studies",
|
280 |
+
"acc,none": 0.6285714285714286,
|
281 |
+
"acc_stderr,none": 0.030932858792789845
|
282 |
+
},
|
283 |
+
"harness|mmlu_sociology|0": {
|
284 |
+
"alias": " - sociology",
|
285 |
+
"acc,none": 0.7562189054726368,
|
286 |
+
"acc_stderr,none": 0.03036049015401466
|
287 |
+
},
|
288 |
+
"harness|mmlu_us_foreign_policy|0": {
|
289 |
+
"alias": " - us_foreign_policy",
|
290 |
+
"acc,none": 0.82,
|
291 |
+
"acc_stderr,none": 0.03861229196653694
|
292 |
+
},
|
293 |
+
"harness|mmlu_stem|0": {
|
294 |
+
"alias": " - stem",
|
295 |
+
"acc,none": 0.4186489058039962,
|
296 |
+
"acc_stderr,none": 0.008528121437144745
|
297 |
+
},
|
298 |
+
"harness|mmlu_abstract_algebra|0": {
|
299 |
+
"alias": " - abstract_algebra",
|
300 |
+
"acc,none": 0.28,
|
301 |
+
"acc_stderr,none": 0.04512608598542129
|
302 |
+
},
|
303 |
+
"harness|mmlu_anatomy|0": {
|
304 |
+
"alias": " - anatomy",
|
305 |
+
"acc,none": 0.5185185185185185,
|
306 |
+
"acc_stderr,none": 0.04316378599511324
|
307 |
+
},
|
308 |
+
"harness|mmlu_astronomy|0": {
|
309 |
+
"alias": " - astronomy",
|
310 |
+
"acc,none": 0.5592105263157895,
|
311 |
+
"acc_stderr,none": 0.04040311062490436
|
312 |
+
},
|
313 |
+
"harness|mmlu_college_biology|0": {
|
314 |
+
"alias": " - college_biology",
|
315 |
+
"acc,none": 0.5277777777777778,
|
316 |
+
"acc_stderr,none": 0.04174752578923185
|
317 |
+
},
|
318 |
+
"harness|mmlu_college_chemistry|0": {
|
319 |
+
"alias": " - college_chemistry",
|
320 |
+
"acc,none": 0.32,
|
321 |
+
"acc_stderr,none": 0.046882617226215034
|
322 |
+
},
|
323 |
+
"harness|mmlu_college_computer_science|0": {
|
324 |
+
"alias": " - college_computer_science",
|
325 |
+
"acc,none": 0.47,
|
326 |
+
"acc_stderr,none": 0.05016135580465919
|
327 |
+
},
|
328 |
+
"harness|mmlu_college_mathematics|0": {
|
329 |
+
"alias": " - college_mathematics",
|
330 |
+
"acc,none": 0.31,
|
331 |
+
"acc_stderr,none": 0.04648231987117316
|
332 |
+
},
|
333 |
+
"harness|mmlu_college_physics|0": {
|
334 |
+
"alias": " - college_physics",
|
335 |
+
"acc,none": 0.27450980392156865,
|
336 |
+
"acc_stderr,none": 0.04440521906179326
|
337 |
+
},
|
338 |
+
"harness|mmlu_computer_security|0": {
|
339 |
+
"alias": " - computer_security",
|
340 |
+
"acc,none": 0.62,
|
341 |
+
"acc_stderr,none": 0.048783173121456316
|
342 |
+
},
|
343 |
+
"harness|mmlu_conceptual_physics|0": {
|
344 |
+
"alias": " - conceptual_physics",
|
345 |
+
"acc,none": 0.4085106382978723,
|
346 |
+
"acc_stderr,none": 0.03213418026701576
|
347 |
+
},
|
348 |
+
"harness|mmlu_electrical_engineering|0": {
|
349 |
+
"alias": " - electrical_engineering",
|
350 |
+
"acc,none": 0.5310344827586206,
|
351 |
+
"acc_stderr,none": 0.04158632762097828
|
352 |
+
},
|
353 |
+
"harness|mmlu_elementary_mathematics|0": {
|
354 |
+
"alias": " - elementary_mathematics",
|
355 |
+
"acc,none": 0.31216931216931215,
|
356 |
+
"acc_stderr,none": 0.023865206836972585
|
357 |
+
},
|
358 |
+
"harness|mmlu_high_school_biology|0": {
|
359 |
+
"alias": " - high_school_biology",
|
360 |
+
"acc,none": 0.6258064516129033,
|
361 |
+
"acc_stderr,none": 0.027528904299845704
|
362 |
+
},
|
363 |
+
"harness|mmlu_high_school_chemistry|0": {
|
364 |
+
"alias": " - high_school_chemistry",
|
365 |
+
"acc,none": 0.43842364532019706,
|
366 |
+
"acc_stderr,none": 0.03491207857486519
|
367 |
+
},
|
368 |
+
"harness|mmlu_high_school_computer_science|0": {
|
369 |
+
"alias": " - high_school_computer_science",
|
370 |
+
"acc,none": 0.58,
|
371 |
+
"acc_stderr,none": 0.049604496374885836
|
372 |
+
},
|
373 |
+
"harness|mmlu_high_school_mathematics|0": {
|
374 |
+
"alias": " - high_school_mathematics",
|
375 |
+
"acc,none": 0.2740740740740741,
|
376 |
+
"acc_stderr,none": 0.027195934804085626
|
377 |
+
},
|
378 |
+
"harness|mmlu_high_school_physics|0": {
|
379 |
+
"alias": " - high_school_physics",
|
380 |
+
"acc,none": 0.31788079470198677,
|
381 |
+
"acc_stderr,none": 0.038020397601079024
|
382 |
+
},
|
383 |
+
"harness|mmlu_high_school_statistics|0": {
|
384 |
+
"alias": " - high_school_statistics",
|
385 |
+
"acc,none": 0.3194444444444444,
|
386 |
+
"acc_stderr,none": 0.03179876342176851
|
387 |
+
},
|
388 |
+
"harness|mmlu_machine_learning|0": {
|
389 |
+
"alias": " - machine_learning",
|
390 |
+
"acc,none": 0.3392857142857143,
|
391 |
+
"acc_stderr,none": 0.04493949068613541
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/Llama-2-13B-chat-AWQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7.25,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "AWQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 12.79,
|
403 |
+
"model_size": 7.25,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-10T07:46:17Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"quant_method": "awq",
|
417 |
+
"zero_point": true,
|
418 |
+
"group_size": 128,
|
419 |
+
"bits": 4,
|
420 |
+
"version": "gemm"
|
421 |
+
},
|
422 |
+
"versions": {
|
423 |
+
"harness|arc:challenge|0": 1.0,
|
424 |
+
"harness|lambada:openai|0": 1.0,
|
425 |
+
"harness|boolq|0": 2.0,
|
426 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
427 |
+
"harness|hellaswag|0": 1.0,
|
428 |
+
"harness|arc:easy|0": 1.0,
|
429 |
+
"harness|piqa|0": 1.0,
|
430 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
431 |
+
"harness|openbookqa|0": 1.0,
|
432 |
+
"harness|winogrande|0": 1.0,
|
433 |
+
"harness|mmlu|0": null,
|
434 |
+
"harness|mmlu_humanities|0": null,
|
435 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
436 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
437 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
438 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
439 |
+
"harness|mmlu_international_law|0": 0.0,
|
440 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
441 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
442 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
443 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
444 |
+
"harness|mmlu_philosophy|0": 0.0,
|
445 |
+
"harness|mmlu_prehistory|0": 0.0,
|
446 |
+
"harness|mmlu_professional_law|0": 0.0,
|
447 |
+
"harness|mmlu_world_religions|0": 0.0,
|
448 |
+
"harness|mmlu_other|0": null,
|
449 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
450 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
451 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
452 |
+
"harness|mmlu_global_facts|0": 0.0,
|
453 |
+
"harness|mmlu_human_aging|0": 0.0,
|
454 |
+
"harness|mmlu_management|0": 0.0,
|
455 |
+
"harness|mmlu_marketing|0": 0.0,
|
456 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
457 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
458 |
+
"harness|mmlu_nutrition|0": 0.0,
|
459 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
460 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
461 |
+
"harness|mmlu_virology|0": 0.0,
|
462 |
+
"harness|mmlu_social_sciences|0": null,
|
463 |
+
"harness|mmlu_econometrics|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
466 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
467 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
468 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
469 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
470 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
471 |
+
"harness|mmlu_public_relations|0": 0.0,
|
472 |
+
"harness|mmlu_security_studies|0": 0.0,
|
473 |
+
"harness|mmlu_sociology|0": 0.0,
|
474 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
475 |
+
"harness|mmlu_stem|0": null,
|
476 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
477 |
+
"harness|mmlu_anatomy|0": 0.0,
|
478 |
+
"harness|mmlu_astronomy|0": 0.0,
|
479 |
+
"harness|mmlu_college_biology|0": 0.0,
|
480 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
481 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
482 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
483 |
+
"harness|mmlu_college_physics|0": 0.0,
|
484 |
+
"harness|mmlu_computer_security|0": 0.0,
|
485 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
486 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
487 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
491 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
494 |
+
"harness|mmlu_machine_learning|0": 0.0
|
495 |
+
},
|
496 |
+
"n-shot": {
|
497 |
+
"arc_challenge": 0,
|
498 |
+
"arc_easy": 0,
|
499 |
+
"boolq": 0,
|
500 |
+
"hellaswag": 0,
|
501 |
+
"lambada_openai": 0,
|
502 |
+
"mmlu": 0,
|
503 |
+
"mmlu_abstract_algebra": 0,
|
504 |
+
"mmlu_anatomy": 0,
|
505 |
+
"mmlu_astronomy": 0,
|
506 |
+
"mmlu_business_ethics": 0,
|
507 |
+
"mmlu_clinical_knowledge": 0,
|
508 |
+
"mmlu_college_biology": 0,
|
509 |
+
"mmlu_college_chemistry": 0,
|
510 |
+
"mmlu_college_computer_science": 0,
|
511 |
+
"mmlu_college_mathematics": 0,
|
512 |
+
"mmlu_college_medicine": 0,
|
513 |
+
"mmlu_college_physics": 0,
|
514 |
+
"mmlu_computer_security": 0,
|
515 |
+
"mmlu_conceptual_physics": 0,
|
516 |
+
"mmlu_econometrics": 0,
|
517 |
+
"mmlu_electrical_engineering": 0,
|
518 |
+
"mmlu_elementary_mathematics": 0,
|
519 |
+
"mmlu_formal_logic": 0,
|
520 |
+
"mmlu_global_facts": 0,
|
521 |
+
"mmlu_high_school_biology": 0,
|
522 |
+
"mmlu_high_school_chemistry": 0,
|
523 |
+
"mmlu_high_school_computer_science": 0,
|
524 |
+
"mmlu_high_school_european_history": 0,
|
525 |
+
"mmlu_high_school_geography": 0,
|
526 |
+
"mmlu_high_school_government_and_politics": 0,
|
527 |
+
"mmlu_high_school_macroeconomics": 0,
|
528 |
+
"mmlu_high_school_mathematics": 0,
|
529 |
+
"mmlu_high_school_microeconomics": 0,
|
530 |
+
"mmlu_high_school_physics": 0,
|
531 |
+
"mmlu_high_school_psychology": 0,
|
532 |
+
"mmlu_high_school_statistics": 0,
|
533 |
+
"mmlu_high_school_us_history": 0,
|
534 |
+
"mmlu_high_school_world_history": 0,
|
535 |
+
"mmlu_human_aging": 0,
|
536 |
+
"mmlu_human_sexuality": 0,
|
537 |
+
"mmlu_humanities": 0,
|
538 |
+
"mmlu_international_law": 0,
|
539 |
+
"mmlu_jurisprudence": 0,
|
540 |
+
"mmlu_logical_fallacies": 0,
|
541 |
+
"mmlu_machine_learning": 0,
|
542 |
+
"mmlu_management": 0,
|
543 |
+
"mmlu_marketing": 0,
|
544 |
+
"mmlu_medical_genetics": 0,
|
545 |
+
"mmlu_miscellaneous": 0,
|
546 |
+
"mmlu_moral_disputes": 0,
|
547 |
+
"mmlu_moral_scenarios": 0,
|
548 |
+
"mmlu_nutrition": 0,
|
549 |
+
"mmlu_other": 0,
|
550 |
+
"mmlu_philosophy": 0,
|
551 |
+
"mmlu_prehistory": 0,
|
552 |
+
"mmlu_professional_accounting": 0,
|
553 |
+
"mmlu_professional_law": 0,
|
554 |
+
"mmlu_professional_medicine": 0,
|
555 |
+
"mmlu_professional_psychology": 0,
|
556 |
+
"mmlu_public_relations": 0,
|
557 |
+
"mmlu_security_studies": 0,
|
558 |
+
"mmlu_social_sciences": 0,
|
559 |
+
"mmlu_sociology": 0,
|
560 |
+
"mmlu_stem": 0,
|
561 |
+
"mmlu_us_foreign_policy": 0,
|
562 |
+
"mmlu_virology": 0,
|
563 |
+
"mmlu_world_religions": 0,
|
564 |
+
"openbookqa": 0,
|
565 |
+
"piqa": 0,
|
566 |
+
"truthfulqa_mc1": 0,
|
567 |
+
"truthfulqa_mc2": 0,
|
568 |
+
"winogrande": 0
|
569 |
+
},
|
570 |
+
"date": 1715337875.2237763,
|
571 |
+
"config": {
|
572 |
+
"model": "hf",
|
573 |
+
"model_args": "pretrained=TheBloke/Llama-2-13B-chat-AWQ,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
574 |
+
"batch_size": 1,
|
575 |
+
"batch_sizes": [],
|
576 |
+
"device": "cuda",
|
577 |
+
"use_cache": null,
|
578 |
+
"limit": null,
|
579 |
+
"bootstrap_iters": 100000,
|
580 |
+
"gen_kwargs": null
|
581 |
+
}
|
582 |
+
}
|
TheBloke/results_2024-05-11-21-17-09.json
ADDED
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-11-21-17-09",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/Mistral-7B-Instruct-v0.2-GPTQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 4.16,
|
16 |
+
"model_params": 7.04,
|
17 |
+
"quant_type": "GPTQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|truthfulqa:mc1|0": {
|
22 |
+
"acc,none": 0.5067319461444308,
|
23 |
+
"acc_stderr,none": 0.017501914492655382,
|
24 |
+
"alias": "truthfulqa_mc1"
|
25 |
+
},
|
26 |
+
"harness|winogrande|0": {
|
27 |
+
"acc,none": 0.7324388318863457,
|
28 |
+
"acc_stderr,none": 0.012441718456893009,
|
29 |
+
"alias": "winogrande"
|
30 |
+
},
|
31 |
+
"harness|hellaswag|0": {
|
32 |
+
"acc,none": 0.6537542322246565,
|
33 |
+
"acc_stderr,none": 0.004748003276466214,
|
34 |
+
"acc_norm,none": 0.8312089225253934,
|
35 |
+
"acc_norm_stderr,none": 0.003738017734037975,
|
36 |
+
"alias": "hellaswag"
|
37 |
+
},
|
38 |
+
"harness|boolq|0": {
|
39 |
+
"acc,none": 0.8431192660550458,
|
40 |
+
"acc_stderr,none": 0.0063609481079962785,
|
41 |
+
"alias": "boolq"
|
42 |
+
},
|
43 |
+
"harness|openbookqa|0": {
|
44 |
+
"acc,none": 0.328,
|
45 |
+
"acc_stderr,none": 0.021017027165175495,
|
46 |
+
"acc_norm,none": 0.438,
|
47 |
+
"acc_norm_stderr,none": 0.022210326363977413,
|
48 |
+
"alias": "openbookqa"
|
49 |
+
},
|
50 |
+
"harness|mmlu|0": {
|
51 |
+
"acc,none": 0.5804016521862982,
|
52 |
+
"acc_stderr,none": 0.003961191476321839,
|
53 |
+
"alias": "mmlu"
|
54 |
+
},
|
55 |
+
"harness|mmlu_humanities|0": {
|
56 |
+
"alias": " - humanities",
|
57 |
+
"acc,none": 0.5341126461211477,
|
58 |
+
"acc_stderr,none": 0.0068607425916126594
|
59 |
+
},
|
60 |
+
"harness|mmlu_formal_logic|0": {
|
61 |
+
"alias": " - formal_logic",
|
62 |
+
"acc,none": 0.3492063492063492,
|
63 |
+
"acc_stderr,none": 0.04263906892795133
|
64 |
+
},
|
65 |
+
"harness|mmlu_high_school_european_history|0": {
|
66 |
+
"alias": " - high_school_european_history",
|
67 |
+
"acc,none": 0.7151515151515152,
|
68 |
+
"acc_stderr,none": 0.03524390844511781
|
69 |
+
},
|
70 |
+
"harness|mmlu_high_school_us_history|0": {
|
71 |
+
"alias": " - high_school_us_history",
|
72 |
+
"acc,none": 0.7745098039215687,
|
73 |
+
"acc_stderr,none": 0.02933116229425172
|
74 |
+
},
|
75 |
+
"harness|mmlu_high_school_world_history|0": {
|
76 |
+
"alias": " - high_school_world_history",
|
77 |
+
"acc,none": 0.7805907172995781,
|
78 |
+
"acc_stderr,none": 0.026939106581553945
|
79 |
+
},
|
80 |
+
"harness|mmlu_international_law|0": {
|
81 |
+
"alias": " - international_law",
|
82 |
+
"acc,none": 0.7355371900826446,
|
83 |
+
"acc_stderr,none": 0.040261875275912046
|
84 |
+
},
|
85 |
+
"harness|mmlu_jurisprudence|0": {
|
86 |
+
"alias": " - jurisprudence",
|
87 |
+
"acc,none": 0.7129629629629629,
|
88 |
+
"acc_stderr,none": 0.043733130409147614
|
89 |
+
},
|
90 |
+
"harness|mmlu_logical_fallacies|0": {
|
91 |
+
"alias": " - logical_fallacies",
|
92 |
+
"acc,none": 0.7484662576687117,
|
93 |
+
"acc_stderr,none": 0.03408997886857529
|
94 |
+
},
|
95 |
+
"harness|mmlu_moral_disputes|0": {
|
96 |
+
"alias": " - moral_disputes",
|
97 |
+
"acc,none": 0.6358381502890174,
|
98 |
+
"acc_stderr,none": 0.025906632631016113
|
99 |
+
},
|
100 |
+
"harness|mmlu_moral_scenarios|0": {
|
101 |
+
"alias": " - moral_scenarios",
|
102 |
+
"acc,none": 0.34301675977653634,
|
103 |
+
"acc_stderr,none": 0.015876912673057752
|
104 |
+
},
|
105 |
+
"harness|mmlu_philosophy|0": {
|
106 |
+
"alias": " - philosophy",
|
107 |
+
"acc,none": 0.6302250803858521,
|
108 |
+
"acc_stderr,none": 0.027417996705630995
|
109 |
+
},
|
110 |
+
"harness|mmlu_prehistory|0": {
|
111 |
+
"alias": " - prehistory",
|
112 |
+
"acc,none": 0.6697530864197531,
|
113 |
+
"acc_stderr,none": 0.026168298456732842
|
114 |
+
},
|
115 |
+
"harness|mmlu_professional_law|0": {
|
116 |
+
"alias": " - professional_law",
|
117 |
+
"acc,none": 0.4172099087353325,
|
118 |
+
"acc_stderr,none": 0.012593959992906424
|
119 |
+
},
|
120 |
+
"harness|mmlu_world_religions|0": {
|
121 |
+
"alias": " - world_religions",
|
122 |
+
"acc,none": 0.8187134502923976,
|
123 |
+
"acc_stderr,none": 0.029547741687640038
|
124 |
+
},
|
125 |
+
"harness|mmlu_other|0": {
|
126 |
+
"alias": " - other",
|
127 |
+
"acc,none": 0.6462825877051819,
|
128 |
+
"acc_stderr,none": 0.008254745930389685
|
129 |
+
},
|
130 |
+
"harness|mmlu_business_ethics|0": {
|
131 |
+
"alias": " - business_ethics",
|
132 |
+
"acc,none": 0.58,
|
133 |
+
"acc_stderr,none": 0.049604496374885836
|
134 |
+
},
|
135 |
+
"harness|mmlu_clinical_knowledge|0": {
|
136 |
+
"alias": " - clinical_knowledge",
|
137 |
+
"acc,none": 0.6830188679245283,
|
138 |
+
"acc_stderr,none": 0.02863723563980089
|
139 |
+
},
|
140 |
+
"harness|mmlu_college_medicine|0": {
|
141 |
+
"alias": " - college_medicine",
|
142 |
+
"acc,none": 0.5722543352601156,
|
143 |
+
"acc_stderr,none": 0.03772446857518027
|
144 |
+
},
|
145 |
+
"harness|mmlu_global_facts|0": {
|
146 |
+
"alias": " - global_facts",
|
147 |
+
"acc,none": 0.33,
|
148 |
+
"acc_stderr,none": 0.04725815626252606
|
149 |
+
},
|
150 |
+
"harness|mmlu_human_aging|0": {
|
151 |
+
"alias": " - human_aging",
|
152 |
+
"acc,none": 0.6053811659192825,
|
153 |
+
"acc_stderr,none": 0.03280400504755291
|
154 |
+
},
|
155 |
+
"harness|mmlu_management|0": {
|
156 |
+
"alias": " - management",
|
157 |
+
"acc,none": 0.7475728155339806,
|
158 |
+
"acc_stderr,none": 0.04301250399690878
|
159 |
+
},
|
160 |
+
"harness|mmlu_marketing|0": {
|
161 |
+
"alias": " - marketing",
|
162 |
+
"acc,none": 0.8589743589743589,
|
163 |
+
"acc_stderr,none": 0.022801382534597524
|
164 |
+
},
|
165 |
+
"harness|mmlu_medical_genetics|0": {
|
166 |
+
"alias": " - medical_genetics",
|
167 |
+
"acc,none": 0.62,
|
168 |
+
"acc_stderr,none": 0.04878317312145632
|
169 |
+
},
|
170 |
+
"harness|mmlu_miscellaneous|0": {
|
171 |
+
"alias": " - miscellaneous",
|
172 |
+
"acc,none": 0.7701149425287356,
|
173 |
+
"acc_stderr,none": 0.015046301846691807
|
174 |
+
},
|
175 |
+
"harness|mmlu_nutrition|0": {
|
176 |
+
"alias": " - nutrition",
|
177 |
+
"acc,none": 0.6274509803921569,
|
178 |
+
"acc_stderr,none": 0.027684181883302895
|
179 |
+
},
|
180 |
+
"harness|mmlu_professional_accounting|0": {
|
181 |
+
"alias": " - professional_accounting",
|
182 |
+
"acc,none": 0.44680851063829785,
|
183 |
+
"acc_stderr,none": 0.029658235097666907
|
184 |
+
},
|
185 |
+
"harness|mmlu_professional_medicine|0": {
|
186 |
+
"alias": " - professional_medicine",
|
187 |
+
"acc,none": 0.625,
|
188 |
+
"acc_stderr,none": 0.029408372932278746
|
189 |
+
},
|
190 |
+
"harness|mmlu_virology|0": {
|
191 |
+
"alias": " - virology",
|
192 |
+
"acc,none": 0.42771084337349397,
|
193 |
+
"acc_stderr,none": 0.038515976837185335
|
194 |
+
},
|
195 |
+
"harness|mmlu_social_sciences|0": {
|
196 |
+
"alias": " - social_sciences",
|
197 |
+
"acc,none": 0.677608059798505,
|
198 |
+
"acc_stderr,none": 0.008221694733283947
|
199 |
+
},
|
200 |
+
"harness|mmlu_econometrics|0": {
|
201 |
+
"alias": " - econometrics",
|
202 |
+
"acc,none": 0.39473684210526316,
|
203 |
+
"acc_stderr,none": 0.04598188057816542
|
204 |
+
},
|
205 |
+
"harness|mmlu_high_school_geography|0": {
|
206 |
+
"alias": " - high_school_geography",
|
207 |
+
"acc,none": 0.7424242424242424,
|
208 |
+
"acc_stderr,none": 0.031156269519646843
|
209 |
+
},
|
210 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
211 |
+
"alias": " - high_school_government_and_politics",
|
212 |
+
"acc,none": 0.7979274611398963,
|
213 |
+
"acc_stderr,none": 0.02897908979429673
|
214 |
+
},
|
215 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
216 |
+
"alias": " - high_school_macroeconomics",
|
217 |
+
"acc,none": 0.558974358974359,
|
218 |
+
"acc_stderr,none": 0.02517404838400075
|
219 |
+
},
|
220 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
221 |
+
"alias": " - high_school_microeconomics",
|
222 |
+
"acc,none": 0.6386554621848739,
|
223 |
+
"acc_stderr,none": 0.031204691225150016
|
224 |
+
},
|
225 |
+
"harness|mmlu_high_school_psychology|0": {
|
226 |
+
"alias": " - high_school_psychology",
|
227 |
+
"acc,none": 0.7743119266055046,
|
228 |
+
"acc_stderr,none": 0.017923087667803057
|
229 |
+
},
|
230 |
+
"harness|mmlu_human_sexuality|0": {
|
231 |
+
"alias": " - human_sexuality",
|
232 |
+
"acc,none": 0.7022900763358778,
|
233 |
+
"acc_stderr,none": 0.040103589424622034
|
234 |
+
},
|
235 |
+
"harness|mmlu_professional_psychology|0": {
|
236 |
+
"alias": " - professional_psychology",
|
237 |
+
"acc,none": 0.5915032679738562,
|
238 |
+
"acc_stderr,none": 0.019886221037501862
|
239 |
+
},
|
240 |
+
"harness|mmlu_public_relations|0": {
|
241 |
+
"alias": " - public_relations",
|
242 |
+
"acc,none": 0.6636363636363637,
|
243 |
+
"acc_stderr,none": 0.04525393596302505
|
244 |
+
},
|
245 |
+
"harness|mmlu_security_studies|0": {
|
246 |
+
"alias": " - security_studies",
|
247 |
+
"acc,none": 0.7061224489795919,
|
248 |
+
"acc_stderr,none": 0.029162738410249772
|
249 |
+
},
|
250 |
+
"harness|mmlu_sociology|0": {
|
251 |
+
"alias": " - sociology",
|
252 |
+
"acc,none": 0.8208955223880597,
|
253 |
+
"acc_stderr,none": 0.027113286753111837
|
254 |
+
},
|
255 |
+
"harness|mmlu_us_foreign_policy|0": {
|
256 |
+
"alias": " - us_foreign_policy",
|
257 |
+
"acc,none": 0.82,
|
258 |
+
"acc_stderr,none": 0.038612291966536934
|
259 |
+
},
|
260 |
+
"harness|mmlu_stem|0": {
|
261 |
+
"alias": " - stem",
|
262 |
+
"acc,none": 0.48969235648588644,
|
263 |
+
"acc_stderr,none": 0.008709716985915076
|
264 |
+
},
|
265 |
+
"harness|mmlu_abstract_algebra|0": {
|
266 |
+
"alias": " - abstract_algebra",
|
267 |
+
"acc,none": 0.34,
|
268 |
+
"acc_stderr,none": 0.047609522856952365
|
269 |
+
},
|
270 |
+
"harness|mmlu_anatomy|0": {
|
271 |
+
"alias": " - anatomy",
|
272 |
+
"acc,none": 0.5777777777777777,
|
273 |
+
"acc_stderr,none": 0.04266763404099582
|
274 |
+
},
|
275 |
+
"harness|mmlu_astronomy|0": {
|
276 |
+
"alias": " - astronomy",
|
277 |
+
"acc,none": 0.6052631578947368,
|
278 |
+
"acc_stderr,none": 0.039777499346220734
|
279 |
+
},
|
280 |
+
"harness|mmlu_college_biology|0": {
|
281 |
+
"alias": " - college_biology",
|
282 |
+
"acc,none": 0.6388888888888888,
|
283 |
+
"acc_stderr,none": 0.04016660030451233
|
284 |
+
},
|
285 |
+
"harness|mmlu_college_chemistry|0": {
|
286 |
+
"alias": " - college_chemistry",
|
287 |
+
"acc,none": 0.4,
|
288 |
+
"acc_stderr,none": 0.04923659639173309
|
289 |
+
},
|
290 |
+
"harness|mmlu_college_computer_science|0": {
|
291 |
+
"alias": " - college_computer_science",
|
292 |
+
"acc,none": 0.54,
|
293 |
+
"acc_stderr,none": 0.05009082659620332
|
294 |
+
},
|
295 |
+
"harness|mmlu_college_mathematics|0": {
|
296 |
+
"alias": " - college_mathematics",
|
297 |
+
"acc,none": 0.36,
|
298 |
+
"acc_stderr,none": 0.048241815132442176
|
299 |
+
},
|
300 |
+
"harness|mmlu_college_physics|0": {
|
301 |
+
"alias": " - college_physics",
|
302 |
+
"acc,none": 0.43137254901960786,
|
303 |
+
"acc_stderr,none": 0.04928099597287534
|
304 |
+
},
|
305 |
+
"harness|mmlu_computer_security|0": {
|
306 |
+
"alias": " - computer_security",
|
307 |
+
"acc,none": 0.66,
|
308 |
+
"acc_stderr,none": 0.04760952285695237
|
309 |
+
},
|
310 |
+
"harness|mmlu_conceptual_physics|0": {
|
311 |
+
"alias": " - conceptual_physics",
|
312 |
+
"acc,none": 0.4978723404255319,
|
313 |
+
"acc_stderr,none": 0.03268572658667492
|
314 |
+
},
|
315 |
+
"harness|mmlu_electrical_engineering|0": {
|
316 |
+
"alias": " - electrical_engineering",
|
317 |
+
"acc,none": 0.5586206896551724,
|
318 |
+
"acc_stderr,none": 0.04137931034482758
|
319 |
+
},
|
320 |
+
"harness|mmlu_elementary_mathematics|0": {
|
321 |
+
"alias": " - elementary_mathematics",
|
322 |
+
"acc,none": 0.3994708994708995,
|
323 |
+
"acc_stderr,none": 0.02522545028406788
|
324 |
+
},
|
325 |
+
"harness|mmlu_high_school_biology|0": {
|
326 |
+
"alias": " - high_school_biology",
|
327 |
+
"acc,none": 0.6580645161290323,
|
328 |
+
"acc_stderr,none": 0.026985289576552746
|
329 |
+
},
|
330 |
+
"harness|mmlu_high_school_chemistry|0": {
|
331 |
+
"alias": " - high_school_chemistry",
|
332 |
+
"acc,none": 0.49261083743842365,
|
333 |
+
"acc_stderr,none": 0.03517603540361008
|
334 |
+
},
|
335 |
+
"harness|mmlu_high_school_computer_science|0": {
|
336 |
+
"alias": " - high_school_computer_science",
|
337 |
+
"acc,none": 0.64,
|
338 |
+
"acc_stderr,none": 0.04824181513244218
|
339 |
+
},
|
340 |
+
"harness|mmlu_high_school_mathematics|0": {
|
341 |
+
"alias": " - high_school_mathematics",
|
342 |
+
"acc,none": 0.34444444444444444,
|
343 |
+
"acc_stderr,none": 0.02897264888484427
|
344 |
+
},
|
345 |
+
"harness|mmlu_high_school_physics|0": {
|
346 |
+
"alias": " - high_school_physics",
|
347 |
+
"acc,none": 0.3708609271523179,
|
348 |
+
"acc_stderr,none": 0.03943966699183629
|
349 |
+
},
|
350 |
+
"harness|mmlu_high_school_statistics|0": {
|
351 |
+
"alias": " - high_school_statistics",
|
352 |
+
"acc,none": 0.4212962962962963,
|
353 |
+
"acc_stderr,none": 0.03367462138896078
|
354 |
+
},
|
355 |
+
"harness|mmlu_machine_learning|0": {
|
356 |
+
"alias": " - machine_learning",
|
357 |
+
"acc,none": 0.45535714285714285,
|
358 |
+
"acc_stderr,none": 0.04726835553719099
|
359 |
+
},
|
360 |
+
"harness|truthfulqa:mc2|0": {
|
361 |
+
"acc,none": 0.6730758600725846,
|
362 |
+
"acc_stderr,none": 0.015085602828695083,
|
363 |
+
"alias": "truthfulqa_mc2"
|
364 |
+
},
|
365 |
+
"harness|arc:challenge|0": {
|
366 |
+
"acc,none": 0.5477815699658704,
|
367 |
+
"acc_stderr,none": 0.014544519880633827,
|
368 |
+
"acc_norm,none": 0.560580204778157,
|
369 |
+
"acc_norm_stderr,none": 0.014503747823580129,
|
370 |
+
"alias": "arc_challenge"
|
371 |
+
},
|
372 |
+
"harness|arc:easy|0": {
|
373 |
+
"acc,none": 0.8152356902356902,
|
374 |
+
"acc_stderr,none": 0.007963772171570785,
|
375 |
+
"acc_norm,none": 0.7609427609427609,
|
376 |
+
"acc_norm_stderr,none": 0.008751754723580422,
|
377 |
+
"alias": "arc_easy"
|
378 |
+
},
|
379 |
+
"harness|piqa|0": {
|
380 |
+
"acc,none": 0.7970620239390642,
|
381 |
+
"acc_stderr,none": 0.009383679003767338,
|
382 |
+
"acc_norm,none": 0.8025027203482046,
|
383 |
+
"acc_norm_stderr,none": 0.009288578108523262,
|
384 |
+
"alias": "piqa"
|
385 |
+
},
|
386 |
+
"harness|lambada:openai|0": {
|
387 |
+
"perplexity,none": 3.542202154991171,
|
388 |
+
"perplexity_stderr,none": 0.07532470166471553,
|
389 |
+
"acc,none": 0.7110421113914225,
|
390 |
+
"acc_stderr,none": 0.00631505317377688,
|
391 |
+
"alias": "lambada_openai"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/Mistral-7B-Instruct-v0.2-GPTQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 4.16,
|
399 |
+
"architectures": "MistralForCausalLM",
|
400 |
+
"quant_type": "GPTQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 7.04,
|
403 |
+
"model_size": 4.16,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-10T05:47:33Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"group_size": 128,
|
418 |
+
"damp_percent": 0.1,
|
419 |
+
"desc_act": true,
|
420 |
+
"sym": true,
|
421 |
+
"true_sequential": true,
|
422 |
+
"model_name_or_path": null,
|
423 |
+
"model_file_base_name": "model",
|
424 |
+
"quant_method": "gptq"
|
425 |
+
},
|
426 |
+
"versions": {
|
427 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
428 |
+
"harness|winogrande|0": 1.0,
|
429 |
+
"harness|hellaswag|0": 1.0,
|
430 |
+
"harness|boolq|0": 2.0,
|
431 |
+
"harness|openbookqa|0": 1.0,
|
432 |
+
"harness|mmlu|0": null,
|
433 |
+
"harness|mmlu_humanities|0": null,
|
434 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
435 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
436 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
437 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
438 |
+
"harness|mmlu_international_law|0": 0.0,
|
439 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
440 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
441 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
442 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
443 |
+
"harness|mmlu_philosophy|0": 0.0,
|
444 |
+
"harness|mmlu_prehistory|0": 0.0,
|
445 |
+
"harness|mmlu_professional_law|0": 0.0,
|
446 |
+
"harness|mmlu_world_religions|0": 0.0,
|
447 |
+
"harness|mmlu_other|0": null,
|
448 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
449 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
450 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
451 |
+
"harness|mmlu_global_facts|0": 0.0,
|
452 |
+
"harness|mmlu_human_aging|0": 0.0,
|
453 |
+
"harness|mmlu_management|0": 0.0,
|
454 |
+
"harness|mmlu_marketing|0": 0.0,
|
455 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
456 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
457 |
+
"harness|mmlu_nutrition|0": 0.0,
|
458 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
459 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
460 |
+
"harness|mmlu_virology|0": 0.0,
|
461 |
+
"harness|mmlu_social_sciences|0": null,
|
462 |
+
"harness|mmlu_econometrics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
466 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
467 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
468 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
469 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
470 |
+
"harness|mmlu_public_relations|0": 0.0,
|
471 |
+
"harness|mmlu_security_studies|0": 0.0,
|
472 |
+
"harness|mmlu_sociology|0": 0.0,
|
473 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
474 |
+
"harness|mmlu_stem|0": null,
|
475 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
476 |
+
"harness|mmlu_anatomy|0": 0.0,
|
477 |
+
"harness|mmlu_astronomy|0": 0.0,
|
478 |
+
"harness|mmlu_college_biology|0": 0.0,
|
479 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
480 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
481 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_college_physics|0": 0.0,
|
483 |
+
"harness|mmlu_computer_security|0": 0.0,
|
484 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
485 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
486 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
491 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
493 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
494 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
495 |
+
"harness|arc:challenge|0": 1.0,
|
496 |
+
"harness|arc:easy|0": 1.0,
|
497 |
+
"harness|piqa|0": 1.0,
|
498 |
+
"harness|lambada:openai|0": 1.0
|
499 |
+
},
|
500 |
+
"n-shot": {
|
501 |
+
"arc_challenge": 0,
|
502 |
+
"arc_easy": 0,
|
503 |
+
"boolq": 0,
|
504 |
+
"hellaswag": 0,
|
505 |
+
"lambada_openai": 0,
|
506 |
+
"mmlu": 0,
|
507 |
+
"mmlu_abstract_algebra": 0,
|
508 |
+
"mmlu_anatomy": 0,
|
509 |
+
"mmlu_astronomy": 0,
|
510 |
+
"mmlu_business_ethics": 0,
|
511 |
+
"mmlu_clinical_knowledge": 0,
|
512 |
+
"mmlu_college_biology": 0,
|
513 |
+
"mmlu_college_chemistry": 0,
|
514 |
+
"mmlu_college_computer_science": 0,
|
515 |
+
"mmlu_college_mathematics": 0,
|
516 |
+
"mmlu_college_medicine": 0,
|
517 |
+
"mmlu_college_physics": 0,
|
518 |
+
"mmlu_computer_security": 0,
|
519 |
+
"mmlu_conceptual_physics": 0,
|
520 |
+
"mmlu_econometrics": 0,
|
521 |
+
"mmlu_electrical_engineering": 0,
|
522 |
+
"mmlu_elementary_mathematics": 0,
|
523 |
+
"mmlu_formal_logic": 0,
|
524 |
+
"mmlu_global_facts": 0,
|
525 |
+
"mmlu_high_school_biology": 0,
|
526 |
+
"mmlu_high_school_chemistry": 0,
|
527 |
+
"mmlu_high_school_computer_science": 0,
|
528 |
+
"mmlu_high_school_european_history": 0,
|
529 |
+
"mmlu_high_school_geography": 0,
|
530 |
+
"mmlu_high_school_government_and_politics": 0,
|
531 |
+
"mmlu_high_school_macroeconomics": 0,
|
532 |
+
"mmlu_high_school_mathematics": 0,
|
533 |
+
"mmlu_high_school_microeconomics": 0,
|
534 |
+
"mmlu_high_school_physics": 0,
|
535 |
+
"mmlu_high_school_psychology": 0,
|
536 |
+
"mmlu_high_school_statistics": 0,
|
537 |
+
"mmlu_high_school_us_history": 0,
|
538 |
+
"mmlu_high_school_world_history": 0,
|
539 |
+
"mmlu_human_aging": 0,
|
540 |
+
"mmlu_human_sexuality": 0,
|
541 |
+
"mmlu_humanities": 0,
|
542 |
+
"mmlu_international_law": 0,
|
543 |
+
"mmlu_jurisprudence": 0,
|
544 |
+
"mmlu_logical_fallacies": 0,
|
545 |
+
"mmlu_machine_learning": 0,
|
546 |
+
"mmlu_management": 0,
|
547 |
+
"mmlu_marketing": 0,
|
548 |
+
"mmlu_medical_genetics": 0,
|
549 |
+
"mmlu_miscellaneous": 0,
|
550 |
+
"mmlu_moral_disputes": 0,
|
551 |
+
"mmlu_moral_scenarios": 0,
|
552 |
+
"mmlu_nutrition": 0,
|
553 |
+
"mmlu_other": 0,
|
554 |
+
"mmlu_philosophy": 0,
|
555 |
+
"mmlu_prehistory": 0,
|
556 |
+
"mmlu_professional_accounting": 0,
|
557 |
+
"mmlu_professional_law": 0,
|
558 |
+
"mmlu_professional_medicine": 0,
|
559 |
+
"mmlu_professional_psychology": 0,
|
560 |
+
"mmlu_public_relations": 0,
|
561 |
+
"mmlu_security_studies": 0,
|
562 |
+
"mmlu_social_sciences": 0,
|
563 |
+
"mmlu_sociology": 0,
|
564 |
+
"mmlu_stem": 0,
|
565 |
+
"mmlu_us_foreign_policy": 0,
|
566 |
+
"mmlu_virology": 0,
|
567 |
+
"mmlu_world_religions": 0,
|
568 |
+
"openbookqa": 0,
|
569 |
+
"piqa": 0,
|
570 |
+
"truthfulqa_mc1": 0,
|
571 |
+
"truthfulqa_mc2": 0,
|
572 |
+
"winogrande": 0
|
573 |
+
},
|
574 |
+
"date": 1715428953.698051,
|
575 |
+
"config": {
|
576 |
+
"model": "hf",
|
577 |
+
"model_args": "pretrained=TheBloke/Mistral-7B-Instruct-v0.2-GPTQ,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
578 |
+
"batch_size": 2,
|
579 |
+
"batch_sizes": [],
|
580 |
+
"device": "cuda",
|
581 |
+
"use_cache": null,
|
582 |
+
"limit": null,
|
583 |
+
"bootstrap_iters": 100000,
|
584 |
+
"gen_kwargs": null
|
585 |
+
}
|
586 |
+
}
|
TheBloke/results_2024-05-12-19-26-44.json
ADDED
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-12-19-26-44",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "TheBloke/Llama-2-13B-chat-GPTQ",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 7.26,
|
16 |
+
"model_params": 12.8,
|
17 |
+
"quant_type": "GPTQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|mmlu|0": {
|
22 |
+
"acc,none": 0.5139581256231306,
|
23 |
+
"acc_stderr,none": 0.004007227446679919,
|
24 |
+
"alias": "mmlu"
|
25 |
+
},
|
26 |
+
"harness|mmlu_humanities|0": {
|
27 |
+
"alias": " - humanities",
|
28 |
+
"acc,none": 0.4767268862911796,
|
29 |
+
"acc_stderr,none": 0.006863000718421932
|
30 |
+
},
|
31 |
+
"harness|mmlu_formal_logic|0": {
|
32 |
+
"alias": " - formal_logic",
|
33 |
+
"acc,none": 0.2619047619047619,
|
34 |
+
"acc_stderr,none": 0.03932537680392871
|
35 |
+
},
|
36 |
+
"harness|mmlu_high_school_european_history|0": {
|
37 |
+
"alias": " - high_school_european_history",
|
38 |
+
"acc,none": 0.6303030303030303,
|
39 |
+
"acc_stderr,none": 0.03769430314512567
|
40 |
+
},
|
41 |
+
"harness|mmlu_high_school_us_history|0": {
|
42 |
+
"alias": " - high_school_us_history",
|
43 |
+
"acc,none": 0.7009803921568627,
|
44 |
+
"acc_stderr,none": 0.03213325717373616
|
45 |
+
},
|
46 |
+
"harness|mmlu_high_school_world_history|0": {
|
47 |
+
"alias": " - high_school_world_history",
|
48 |
+
"acc,none": 0.7172995780590717,
|
49 |
+
"acc_stderr,none": 0.029312814153955914
|
50 |
+
},
|
51 |
+
"harness|mmlu_international_law|0": {
|
52 |
+
"alias": " - international_law",
|
53 |
+
"acc,none": 0.6776859504132231,
|
54 |
+
"acc_stderr,none": 0.04266416363352168
|
55 |
+
},
|
56 |
+
"harness|mmlu_jurisprudence|0": {
|
57 |
+
"alias": " - jurisprudence",
|
58 |
+
"acc,none": 0.6851851851851852,
|
59 |
+
"acc_stderr,none": 0.04489931073591312
|
60 |
+
},
|
61 |
+
"harness|mmlu_logical_fallacies|0": {
|
62 |
+
"alias": " - logical_fallacies",
|
63 |
+
"acc,none": 0.6809815950920245,
|
64 |
+
"acc_stderr,none": 0.03661997551073836
|
65 |
+
},
|
66 |
+
"harness|mmlu_moral_disputes|0": {
|
67 |
+
"alias": " - moral_disputes",
|
68 |
+
"acc,none": 0.5895953757225434,
|
69 |
+
"acc_stderr,none": 0.02648339204209818
|
70 |
+
},
|
71 |
+
"harness|mmlu_moral_scenarios|0": {
|
72 |
+
"alias": " - moral_scenarios",
|
73 |
+
"acc,none": 0.24916201117318434,
|
74 |
+
"acc_stderr,none": 0.01446589382985993
|
75 |
+
},
|
76 |
+
"harness|mmlu_philosophy|0": {
|
77 |
+
"alias": " - philosophy",
|
78 |
+
"acc,none": 0.6077170418006431,
|
79 |
+
"acc_stderr,none": 0.027731258647011994
|
80 |
+
},
|
81 |
+
"harness|mmlu_prehistory|0": {
|
82 |
+
"alias": " - prehistory",
|
83 |
+
"acc,none": 0.5864197530864198,
|
84 |
+
"acc_stderr,none": 0.027402042040269955
|
85 |
+
},
|
86 |
+
"harness|mmlu_professional_law|0": {
|
87 |
+
"alias": " - professional_law",
|
88 |
+
"acc,none": 0.38722294654498046,
|
89 |
+
"acc_stderr,none": 0.012441155326854933
|
90 |
+
},
|
91 |
+
"harness|mmlu_world_religions|0": {
|
92 |
+
"alias": " - world_religions",
|
93 |
+
"acc,none": 0.7368421052631579,
|
94 |
+
"acc_stderr,none": 0.03377310252209206
|
95 |
+
},
|
96 |
+
"harness|mmlu_other|0": {
|
97 |
+
"alias": " - other",
|
98 |
+
"acc,none": 0.5880270357257805,
|
99 |
+
"acc_stderr,none": 0.008506546676113863
|
100 |
+
},
|
101 |
+
"harness|mmlu_business_ethics|0": {
|
102 |
+
"alias": " - business_ethics",
|
103 |
+
"acc,none": 0.49,
|
104 |
+
"acc_stderr,none": 0.05024183937956911
|
105 |
+
},
|
106 |
+
"harness|mmlu_clinical_knowledge|0": {
|
107 |
+
"alias": " - clinical_knowledge",
|
108 |
+
"acc,none": 0.5509433962264151,
|
109 |
+
"acc_stderr,none": 0.030612730713641095
|
110 |
+
},
|
111 |
+
"harness|mmlu_college_medicine|0": {
|
112 |
+
"alias": " - college_medicine",
|
113 |
+
"acc,none": 0.43352601156069365,
|
114 |
+
"acc_stderr,none": 0.03778621079092055
|
115 |
+
},
|
116 |
+
"harness|mmlu_global_facts|0": {
|
117 |
+
"alias": " - global_facts",
|
118 |
+
"acc,none": 0.32,
|
119 |
+
"acc_stderr,none": 0.04688261722621504
|
120 |
+
},
|
121 |
+
"harness|mmlu_human_aging|0": {
|
122 |
+
"alias": " - human_aging",
|
123 |
+
"acc,none": 0.6143497757847534,
|
124 |
+
"acc_stderr,none": 0.03266842214289202
|
125 |
+
},
|
126 |
+
"harness|mmlu_management|0": {
|
127 |
+
"alias": " - management",
|
128 |
+
"acc,none": 0.6796116504854369,
|
129 |
+
"acc_stderr,none": 0.04620284082280041
|
130 |
+
},
|
131 |
+
"harness|mmlu_marketing|0": {
|
132 |
+
"alias": " - marketing",
|
133 |
+
"acc,none": 0.7649572649572649,
|
134 |
+
"acc_stderr,none": 0.02777883590493543
|
135 |
+
},
|
136 |
+
"harness|mmlu_medical_genetics|0": {
|
137 |
+
"alias": " - medical_genetics",
|
138 |
+
"acc,none": 0.61,
|
139 |
+
"acc_stderr,none": 0.04902071300001976
|
140 |
+
},
|
141 |
+
"harness|mmlu_miscellaneous|0": {
|
142 |
+
"alias": " - miscellaneous",
|
143 |
+
"acc,none": 0.7458492975734355,
|
144 |
+
"acc_stderr,none": 0.015569254692045778
|
145 |
+
},
|
146 |
+
"harness|mmlu_nutrition|0": {
|
147 |
+
"alias": " - nutrition",
|
148 |
+
"acc,none": 0.5718954248366013,
|
149 |
+
"acc_stderr,none": 0.028332397483664274
|
150 |
+
},
|
151 |
+
"harness|mmlu_professional_accounting|0": {
|
152 |
+
"alias": " - professional_accounting",
|
153 |
+
"acc,none": 0.3900709219858156,
|
154 |
+
"acc_stderr,none": 0.02909767559946393
|
155 |
+
},
|
156 |
+
"harness|mmlu_professional_medicine|0": {
|
157 |
+
"alias": " - professional_medicine",
|
158 |
+
"acc,none": 0.48161764705882354,
|
159 |
+
"acc_stderr,none": 0.030352303395351964
|
160 |
+
},
|
161 |
+
"harness|mmlu_virology|0": {
|
162 |
+
"alias": " - virology",
|
163 |
+
"acc,none": 0.46987951807228917,
|
164 |
+
"acc_stderr,none": 0.03885425420866767
|
165 |
+
},
|
166 |
+
"harness|mmlu_social_sciences|0": {
|
167 |
+
"alias": " - social_sciences",
|
168 |
+
"acc,none": 0.5976600584985375,
|
169 |
+
"acc_stderr,none": 0.008593939236131217
|
170 |
+
},
|
171 |
+
"harness|mmlu_econometrics|0": {
|
172 |
+
"alias": " - econometrics",
|
173 |
+
"acc,none": 0.2719298245614035,
|
174 |
+
"acc_stderr,none": 0.04185774424022056
|
175 |
+
},
|
176 |
+
"harness|mmlu_high_school_geography|0": {
|
177 |
+
"alias": " - high_school_geography",
|
178 |
+
"acc,none": 0.6464646464646465,
|
179 |
+
"acc_stderr,none": 0.03406086723547153
|
180 |
+
},
|
181 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
182 |
+
"alias": " - high_school_government_and_politics",
|
183 |
+
"acc,none": 0.7305699481865285,
|
184 |
+
"acc_stderr,none": 0.03201867122877793
|
185 |
+
},
|
186 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
187 |
+
"alias": " - high_school_macroeconomics",
|
188 |
+
"acc,none": 0.47435897435897434,
|
189 |
+
"acc_stderr,none": 0.02531764972644866
|
190 |
+
},
|
191 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
192 |
+
"alias": " - high_school_microeconomics",
|
193 |
+
"acc,none": 0.4831932773109244,
|
194 |
+
"acc_stderr,none": 0.03246013680375308
|
195 |
+
},
|
196 |
+
"harness|mmlu_high_school_psychology|0": {
|
197 |
+
"alias": " - high_school_psychology",
|
198 |
+
"acc,none": 0.7009174311926606,
|
199 |
+
"acc_stderr,none": 0.019630417285415182
|
200 |
+
},
|
201 |
+
"harness|mmlu_human_sexuality|0": {
|
202 |
+
"alias": " - human_sexuality",
|
203 |
+
"acc,none": 0.6106870229007634,
|
204 |
+
"acc_stderr,none": 0.042764865428145914
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_psychology|0": {
|
207 |
+
"alias": " - professional_psychology",
|
208 |
+
"acc,none": 0.5261437908496732,
|
209 |
+
"acc_stderr,none": 0.020200164564804588
|
210 |
+
},
|
211 |
+
"harness|mmlu_public_relations|0": {
|
212 |
+
"alias": " - public_relations",
|
213 |
+
"acc,none": 0.5636363636363636,
|
214 |
+
"acc_stderr,none": 0.04750185058907297
|
215 |
+
},
|
216 |
+
"harness|mmlu_security_studies|0": {
|
217 |
+
"alias": " - security_studies",
|
218 |
+
"acc,none": 0.673469387755102,
|
219 |
+
"acc_stderr,none": 0.030021056238440307
|
220 |
+
},
|
221 |
+
"harness|mmlu_sociology|0": {
|
222 |
+
"alias": " - sociology",
|
223 |
+
"acc,none": 0.7412935323383084,
|
224 |
+
"acc_stderr,none": 0.030965903123573026
|
225 |
+
},
|
226 |
+
"harness|mmlu_us_foreign_policy|0": {
|
227 |
+
"alias": " - us_foreign_policy",
|
228 |
+
"acc,none": 0.79,
|
229 |
+
"acc_stderr,none": 0.040936018074033256
|
230 |
+
},
|
231 |
+
"harness|mmlu_stem|0": {
|
232 |
+
"alias": " - stem",
|
233 |
+
"acc,none": 0.4148430066603235,
|
234 |
+
"acc_stderr,none": 0.008544388493151839
|
235 |
+
},
|
236 |
+
"harness|mmlu_abstract_algebra|0": {
|
237 |
+
"alias": " - abstract_algebra",
|
238 |
+
"acc,none": 0.32,
|
239 |
+
"acc_stderr,none": 0.04688261722621504
|
240 |
+
},
|
241 |
+
"harness|mmlu_anatomy|0": {
|
242 |
+
"alias": " - anatomy",
|
243 |
+
"acc,none": 0.48148148148148145,
|
244 |
+
"acc_stderr,none": 0.043163785995113245
|
245 |
+
},
|
246 |
+
"harness|mmlu_astronomy|0": {
|
247 |
+
"alias": " - astronomy",
|
248 |
+
"acc,none": 0.5657894736842105,
|
249 |
+
"acc_stderr,none": 0.04033565667848319
|
250 |
+
},
|
251 |
+
"harness|mmlu_college_biology|0": {
|
252 |
+
"alias": " - college_biology",
|
253 |
+
"acc,none": 0.5416666666666666,
|
254 |
+
"acc_stderr,none": 0.04166666666666666
|
255 |
+
},
|
256 |
+
"harness|mmlu_college_chemistry|0": {
|
257 |
+
"alias": " - college_chemistry",
|
258 |
+
"acc,none": 0.32,
|
259 |
+
"acc_stderr,none": 0.046882617226215034
|
260 |
+
},
|
261 |
+
"harness|mmlu_college_computer_science|0": {
|
262 |
+
"alias": " - college_computer_science",
|
263 |
+
"acc,none": 0.44,
|
264 |
+
"acc_stderr,none": 0.04988876515698589
|
265 |
+
},
|
266 |
+
"harness|mmlu_college_mathematics|0": {
|
267 |
+
"alias": " - college_mathematics",
|
268 |
+
"acc,none": 0.3,
|
269 |
+
"acc_stderr,none": 0.046056618647183814
|
270 |
+
},
|
271 |
+
"harness|mmlu_college_physics|0": {
|
272 |
+
"alias": " - college_physics",
|
273 |
+
"acc,none": 0.3137254901960784,
|
274 |
+
"acc_stderr,none": 0.04617034827006718
|
275 |
+
},
|
276 |
+
"harness|mmlu_computer_security|0": {
|
277 |
+
"alias": " - computer_security",
|
278 |
+
"acc,none": 0.65,
|
279 |
+
"acc_stderr,none": 0.0479372485441102
|
280 |
+
},
|
281 |
+
"harness|mmlu_conceptual_physics|0": {
|
282 |
+
"alias": " - conceptual_physics",
|
283 |
+
"acc,none": 0.39574468085106385,
|
284 |
+
"acc_stderr,none": 0.031967586978353627
|
285 |
+
},
|
286 |
+
"harness|mmlu_electrical_engineering|0": {
|
287 |
+
"alias": " - electrical_engineering",
|
288 |
+
"acc,none": 0.5103448275862069,
|
289 |
+
"acc_stderr,none": 0.04165774775728763
|
290 |
+
},
|
291 |
+
"harness|mmlu_elementary_mathematics|0": {
|
292 |
+
"alias": " - elementary_mathematics",
|
293 |
+
"acc,none": 0.31746031746031744,
|
294 |
+
"acc_stderr,none": 0.02397386199899208
|
295 |
+
},
|
296 |
+
"harness|mmlu_high_school_biology|0": {
|
297 |
+
"alias": " - high_school_biology",
|
298 |
+
"acc,none": 0.5903225806451613,
|
299 |
+
"acc_stderr,none": 0.027976054915347364
|
300 |
+
},
|
301 |
+
"harness|mmlu_high_school_chemistry|0": {
|
302 |
+
"alias": " - high_school_chemistry",
|
303 |
+
"acc,none": 0.43842364532019706,
|
304 |
+
"acc_stderr,none": 0.03491207857486519
|
305 |
+
},
|
306 |
+
"harness|mmlu_high_school_computer_science|0": {
|
307 |
+
"alias": " - high_school_computer_science",
|
308 |
+
"acc,none": 0.58,
|
309 |
+
"acc_stderr,none": 0.049604496374885836
|
310 |
+
},
|
311 |
+
"harness|mmlu_high_school_mathematics|0": {
|
312 |
+
"alias": " - high_school_mathematics",
|
313 |
+
"acc,none": 0.25555555555555554,
|
314 |
+
"acc_stderr,none": 0.026593939101844082
|
315 |
+
},
|
316 |
+
"harness|mmlu_high_school_physics|0": {
|
317 |
+
"alias": " - high_school_physics",
|
318 |
+
"acc,none": 0.304635761589404,
|
319 |
+
"acc_stderr,none": 0.03757949922943342
|
320 |
+
},
|
321 |
+
"harness|mmlu_high_school_statistics|0": {
|
322 |
+
"alias": " - high_school_statistics",
|
323 |
+
"acc,none": 0.33796296296296297,
|
324 |
+
"acc_stderr,none": 0.03225941352631295
|
325 |
+
},
|
326 |
+
"harness|mmlu_machine_learning|0": {
|
327 |
+
"alias": " - machine_learning",
|
328 |
+
"acc,none": 0.3482142857142857,
|
329 |
+
"acc_stderr,none": 0.04521829902833585
|
330 |
+
},
|
331 |
+
"harness|truthfulqa:mc1|0": {
|
332 |
+
"acc,none": 0.26805385556915545,
|
333 |
+
"acc_stderr,none": 0.015506204722834547,
|
334 |
+
"alias": "truthfulqa_mc1"
|
335 |
+
},
|
336 |
+
"harness|arc:challenge|0": {
|
337 |
+
"acc,none": 0.4616040955631399,
|
338 |
+
"acc_stderr,none": 0.014568245550296358,
|
339 |
+
"acc_norm,none": 0.4863481228668942,
|
340 |
+
"acc_norm_stderr,none": 0.014605943429860947,
|
341 |
+
"alias": "arc_challenge"
|
342 |
+
},
|
343 |
+
"harness|truthfulqa:mc2|0": {
|
344 |
+
"acc,none": 0.41768405068814446,
|
345 |
+
"acc_stderr,none": 0.015583992889631661,
|
346 |
+
"alias": "truthfulqa_mc2"
|
347 |
+
},
|
348 |
+
"harness|arc:easy|0": {
|
349 |
+
"acc,none": 0.7714646464646465,
|
350 |
+
"acc_stderr,none": 0.008615944722488472,
|
351 |
+
"acc_norm,none": 0.7386363636363636,
|
352 |
+
"acc_norm_stderr,none": 0.00901583836660819,
|
353 |
+
"alias": "arc_easy"
|
354 |
+
},
|
355 |
+
"harness|lambada:openai|0": {
|
356 |
+
"perplexity,none": 3.01529524411286,
|
357 |
+
"perplexity_stderr,none": 0.07297248309564458,
|
358 |
+
"acc,none": 0.7273432951678633,
|
359 |
+
"acc_stderr,none": 0.0062042584889067335,
|
360 |
+
"alias": "lambada_openai"
|
361 |
+
},
|
362 |
+
"harness|boolq|0": {
|
363 |
+
"acc,none": 0.8119266055045872,
|
364 |
+
"acc_stderr,none": 0.006834623690939715,
|
365 |
+
"alias": "boolq"
|
366 |
+
},
|
367 |
+
"harness|winogrande|0": {
|
368 |
+
"acc,none": 0.6977111286503551,
|
369 |
+
"acc_stderr,none": 0.012907200361627541,
|
370 |
+
"alias": "winogrande"
|
371 |
+
},
|
372 |
+
"harness|hellaswag|0": {
|
373 |
+
"acc,none": 0.5992830113523202,
|
374 |
+
"acc_stderr,none": 0.004890422457747264,
|
375 |
+
"acc_norm,none": 0.7880900219079865,
|
376 |
+
"acc_norm_stderr,none": 0.004078262107595482,
|
377 |
+
"alias": "hellaswag"
|
378 |
+
},
|
379 |
+
"harness|piqa|0": {
|
380 |
+
"acc,none": 0.7780195865070729,
|
381 |
+
"acc_stderr,none": 0.009696120744661996,
|
382 |
+
"acc_norm,none": 0.7905331882480957,
|
383 |
+
"acc_norm_stderr,none": 0.009494302979819808,
|
384 |
+
"alias": "piqa"
|
385 |
+
},
|
386 |
+
"harness|openbookqa|0": {
|
387 |
+
"acc,none": 0.342,
|
388 |
+
"acc_stderr,none": 0.02123614719989926,
|
389 |
+
"acc_norm,none": 0.426,
|
390 |
+
"acc_norm_stderr,none": 0.022136577335085637,
|
391 |
+
"alias": "openbookqa"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "TheBloke/Llama-2-13B-chat-GPTQ",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7.26,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "GPTQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 12.8,
|
403 |
+
"model_size": 7.26,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Waiting",
|
409 |
+
"submitted_time": "2024-05-10T07:50:09Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"group_size": 128,
|
418 |
+
"damp_percent": 0.01,
|
419 |
+
"desc_act": false,
|
420 |
+
"sym": true,
|
421 |
+
"true_sequential": true,
|
422 |
+
"model_name_or_path": null,
|
423 |
+
"model_file_base_name": "model",
|
424 |
+
"quant_method": "gptq"
|
425 |
+
},
|
426 |
+
"versions": {
|
427 |
+
"harness|mmlu|0": null,
|
428 |
+
"harness|mmlu_humanities|0": null,
|
429 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
430 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
432 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
433 |
+
"harness|mmlu_international_law|0": 0.0,
|
434 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
435 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
436 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
437 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
438 |
+
"harness|mmlu_philosophy|0": 0.0,
|
439 |
+
"harness|mmlu_prehistory|0": 0.0,
|
440 |
+
"harness|mmlu_professional_law|0": 0.0,
|
441 |
+
"harness|mmlu_world_religions|0": 0.0,
|
442 |
+
"harness|mmlu_other|0": null,
|
443 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
444 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
445 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
446 |
+
"harness|mmlu_global_facts|0": 0.0,
|
447 |
+
"harness|mmlu_human_aging|0": 0.0,
|
448 |
+
"harness|mmlu_management|0": 0.0,
|
449 |
+
"harness|mmlu_marketing|0": 0.0,
|
450 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
451 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
452 |
+
"harness|mmlu_nutrition|0": 0.0,
|
453 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
454 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
455 |
+
"harness|mmlu_virology|0": 0.0,
|
456 |
+
"harness|mmlu_social_sciences|0": null,
|
457 |
+
"harness|mmlu_econometrics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
463 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
464 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
465 |
+
"harness|mmlu_public_relations|0": 0.0,
|
466 |
+
"harness|mmlu_security_studies|0": 0.0,
|
467 |
+
"harness|mmlu_sociology|0": 0.0,
|
468 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
469 |
+
"harness|mmlu_stem|0": null,
|
470 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
471 |
+
"harness|mmlu_anatomy|0": 0.0,
|
472 |
+
"harness|mmlu_astronomy|0": 0.0,
|
473 |
+
"harness|mmlu_college_biology|0": 0.0,
|
474 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
475 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
476 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
477 |
+
"harness|mmlu_college_physics|0": 0.0,
|
478 |
+
"harness|mmlu_computer_security|0": 0.0,
|
479 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
480 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
481 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
488 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
489 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
490 |
+
"harness|arc:challenge|0": 1.0,
|
491 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
492 |
+
"harness|arc:easy|0": 1.0,
|
493 |
+
"harness|lambada:openai|0": 1.0,
|
494 |
+
"harness|boolq|0": 2.0,
|
495 |
+
"harness|winogrande|0": 1.0,
|
496 |
+
"harness|hellaswag|0": 1.0,
|
497 |
+
"harness|piqa|0": 1.0,
|
498 |
+
"harness|openbookqa|0": 1.0
|
499 |
+
},
|
500 |
+
"n-shot": {
|
501 |
+
"arc_challenge": 0,
|
502 |
+
"arc_easy": 0,
|
503 |
+
"boolq": 0,
|
504 |
+
"hellaswag": 0,
|
505 |
+
"lambada_openai": 0,
|
506 |
+
"mmlu": 0,
|
507 |
+
"mmlu_abstract_algebra": 0,
|
508 |
+
"mmlu_anatomy": 0,
|
509 |
+
"mmlu_astronomy": 0,
|
510 |
+
"mmlu_business_ethics": 0,
|
511 |
+
"mmlu_clinical_knowledge": 0,
|
512 |
+
"mmlu_college_biology": 0,
|
513 |
+
"mmlu_college_chemistry": 0,
|
514 |
+
"mmlu_college_computer_science": 0,
|
515 |
+
"mmlu_college_mathematics": 0,
|
516 |
+
"mmlu_college_medicine": 0,
|
517 |
+
"mmlu_college_physics": 0,
|
518 |
+
"mmlu_computer_security": 0,
|
519 |
+
"mmlu_conceptual_physics": 0,
|
520 |
+
"mmlu_econometrics": 0,
|
521 |
+
"mmlu_electrical_engineering": 0,
|
522 |
+
"mmlu_elementary_mathematics": 0,
|
523 |
+
"mmlu_formal_logic": 0,
|
524 |
+
"mmlu_global_facts": 0,
|
525 |
+
"mmlu_high_school_biology": 0,
|
526 |
+
"mmlu_high_school_chemistry": 0,
|
527 |
+
"mmlu_high_school_computer_science": 0,
|
528 |
+
"mmlu_high_school_european_history": 0,
|
529 |
+
"mmlu_high_school_geography": 0,
|
530 |
+
"mmlu_high_school_government_and_politics": 0,
|
531 |
+
"mmlu_high_school_macroeconomics": 0,
|
532 |
+
"mmlu_high_school_mathematics": 0,
|
533 |
+
"mmlu_high_school_microeconomics": 0,
|
534 |
+
"mmlu_high_school_physics": 0,
|
535 |
+
"mmlu_high_school_psychology": 0,
|
536 |
+
"mmlu_high_school_statistics": 0,
|
537 |
+
"mmlu_high_school_us_history": 0,
|
538 |
+
"mmlu_high_school_world_history": 0,
|
539 |
+
"mmlu_human_aging": 0,
|
540 |
+
"mmlu_human_sexuality": 0,
|
541 |
+
"mmlu_humanities": 0,
|
542 |
+
"mmlu_international_law": 0,
|
543 |
+
"mmlu_jurisprudence": 0,
|
544 |
+
"mmlu_logical_fallacies": 0,
|
545 |
+
"mmlu_machine_learning": 0,
|
546 |
+
"mmlu_management": 0,
|
547 |
+
"mmlu_marketing": 0,
|
548 |
+
"mmlu_medical_genetics": 0,
|
549 |
+
"mmlu_miscellaneous": 0,
|
550 |
+
"mmlu_moral_disputes": 0,
|
551 |
+
"mmlu_moral_scenarios": 0,
|
552 |
+
"mmlu_nutrition": 0,
|
553 |
+
"mmlu_other": 0,
|
554 |
+
"mmlu_philosophy": 0,
|
555 |
+
"mmlu_prehistory": 0,
|
556 |
+
"mmlu_professional_accounting": 0,
|
557 |
+
"mmlu_professional_law": 0,
|
558 |
+
"mmlu_professional_medicine": 0,
|
559 |
+
"mmlu_professional_psychology": 0,
|
560 |
+
"mmlu_public_relations": 0,
|
561 |
+
"mmlu_security_studies": 0,
|
562 |
+
"mmlu_social_sciences": 0,
|
563 |
+
"mmlu_sociology": 0,
|
564 |
+
"mmlu_stem": 0,
|
565 |
+
"mmlu_us_foreign_policy": 0,
|
566 |
+
"mmlu_virology": 0,
|
567 |
+
"mmlu_world_religions": 0,
|
568 |
+
"openbookqa": 0,
|
569 |
+
"piqa": 0,
|
570 |
+
"truthfulqa_mc1": 0,
|
571 |
+
"truthfulqa_mc2": 0,
|
572 |
+
"winogrande": 0
|
573 |
+
},
|
574 |
+
"date": 1715502446.1674964,
|
575 |
+
"config": {
|
576 |
+
"model": "hf",
|
577 |
+
"model_args": "pretrained=TheBloke/Llama-2-13B-chat-GPTQ,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
578 |
+
"batch_size": 1,
|
579 |
+
"batch_sizes": [],
|
580 |
+
"device": "cuda",
|
581 |
+
"use_cache": null,
|
582 |
+
"limit": null,
|
583 |
+
"bootstrap_iters": 100000,
|
584 |
+
"gen_kwargs": null
|
585 |
+
}
|
586 |
+
}
|
alokabhishek/results_2024-05-08-02-05-18.json
ADDED
@@ -0,0 +1,589 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-08-02-05-18",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "alokabhishek/falcon-7b-instruct-bnb-4bit",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 4.01,
|
16 |
+
"model_params": 6.83,
|
17 |
+
"quant_type": "bitsandbytes",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|mmlu|0": {
|
22 |
+
"acc,none": 0.2454778521578123,
|
23 |
+
"acc_stderr,none": 0.0036257236339441393,
|
24 |
+
"alias": "mmlu"
|
25 |
+
},
|
26 |
+
"harness|mmlu_humanities|0": {
|
27 |
+
"alias": " - humanities",
|
28 |
+
"acc,none": 0.24782146652497344,
|
29 |
+
"acc_stderr,none": 0.006287119040079848
|
30 |
+
},
|
31 |
+
"harness|mmlu_formal_logic|0": {
|
32 |
+
"alias": " - formal_logic",
|
33 |
+
"acc,none": 0.2857142857142857,
|
34 |
+
"acc_stderr,none": 0.04040610178208841
|
35 |
+
},
|
36 |
+
"harness|mmlu_high_school_european_history|0": {
|
37 |
+
"alias": " - high_school_european_history",
|
38 |
+
"acc,none": 0.24848484848484848,
|
39 |
+
"acc_stderr,none": 0.033744026441394036
|
40 |
+
},
|
41 |
+
"harness|mmlu_high_school_us_history|0": {
|
42 |
+
"alias": " - high_school_us_history",
|
43 |
+
"acc,none": 0.25980392156862747,
|
44 |
+
"acc_stderr,none": 0.030778554678693254
|
45 |
+
},
|
46 |
+
"harness|mmlu_high_school_world_history|0": {
|
47 |
+
"alias": " - high_school_world_history",
|
48 |
+
"acc,none": 0.26582278481012656,
|
49 |
+
"acc_stderr,none": 0.028756799629658335
|
50 |
+
},
|
51 |
+
"harness|mmlu_international_law|0": {
|
52 |
+
"alias": " - international_law",
|
53 |
+
"acc,none": 0.256198347107438,
|
54 |
+
"acc_stderr,none": 0.03984979653302871
|
55 |
+
},
|
56 |
+
"harness|mmlu_jurisprudence|0": {
|
57 |
+
"alias": " - jurisprudence",
|
58 |
+
"acc,none": 0.28703703703703703,
|
59 |
+
"acc_stderr,none": 0.043733130409147614
|
60 |
+
},
|
61 |
+
"harness|mmlu_logical_fallacies|0": {
|
62 |
+
"alias": " - logical_fallacies",
|
63 |
+
"acc,none": 0.19631901840490798,
|
64 |
+
"acc_stderr,none": 0.031207970394709218
|
65 |
+
},
|
66 |
+
"harness|mmlu_moral_disputes|0": {
|
67 |
+
"alias": " - moral_disputes",
|
68 |
+
"acc,none": 0.2658959537572254,
|
69 |
+
"acc_stderr,none": 0.023786203255508287
|
70 |
+
},
|
71 |
+
"harness|mmlu_moral_scenarios|0": {
|
72 |
+
"alias": " - moral_scenarios",
|
73 |
+
"acc,none": 0.23910614525139665,
|
74 |
+
"acc_stderr,none": 0.014265554192331154
|
75 |
+
},
|
76 |
+
"harness|mmlu_philosophy|0": {
|
77 |
+
"alias": " - philosophy",
|
78 |
+
"acc,none": 0.20257234726688103,
|
79 |
+
"acc_stderr,none": 0.022827317491059675
|
80 |
+
},
|
81 |
+
"harness|mmlu_prehistory|0": {
|
82 |
+
"alias": " - prehistory",
|
83 |
+
"acc,none": 0.2222222222222222,
|
84 |
+
"acc_stderr,none": 0.023132376234543325
|
85 |
+
},
|
86 |
+
"harness|mmlu_professional_law|0": {
|
87 |
+
"alias": " - professional_law",
|
88 |
+
"acc,none": 0.24445893089960888,
|
89 |
+
"acc_stderr,none": 0.010976425013113902
|
90 |
+
},
|
91 |
+
"harness|mmlu_world_religions|0": {
|
92 |
+
"alias": " - world_religions",
|
93 |
+
"acc,none": 0.3684210526315789,
|
94 |
+
"acc_stderr,none": 0.036996580176568775
|
95 |
+
},
|
96 |
+
"harness|mmlu_other|0": {
|
97 |
+
"alias": " - other",
|
98 |
+
"acc,none": 0.261345349211458,
|
99 |
+
"acc_stderr,none": 0.007864211382806421
|
100 |
+
},
|
101 |
+
"harness|mmlu_business_ethics|0": {
|
102 |
+
"alias": " - business_ethics",
|
103 |
+
"acc,none": 0.34,
|
104 |
+
"acc_stderr,none": 0.04760952285695236
|
105 |
+
},
|
106 |
+
"harness|mmlu_clinical_knowledge|0": {
|
107 |
+
"alias": " - clinical_knowledge",
|
108 |
+
"acc,none": 0.2188679245283019,
|
109 |
+
"acc_stderr,none": 0.02544786382510861
|
110 |
+
},
|
111 |
+
"harness|mmlu_college_medicine|0": {
|
112 |
+
"alias": " - college_medicine",
|
113 |
+
"acc,none": 0.23699421965317918,
|
114 |
+
"acc_stderr,none": 0.03242414757483099
|
115 |
+
},
|
116 |
+
"harness|mmlu_global_facts|0": {
|
117 |
+
"alias": " - global_facts",
|
118 |
+
"acc,none": 0.21,
|
119 |
+
"acc_stderr,none": 0.040936018074033256
|
120 |
+
},
|
121 |
+
"harness|mmlu_human_aging|0": {
|
122 |
+
"alias": " - human_aging",
|
123 |
+
"acc,none": 0.33183856502242154,
|
124 |
+
"acc_stderr,none": 0.031602951437766785
|
125 |
+
},
|
126 |
+
"harness|mmlu_management|0": {
|
127 |
+
"alias": " - management",
|
128 |
+
"acc,none": 0.2524271844660194,
|
129 |
+
"acc_stderr,none": 0.04301250399690877
|
130 |
+
},
|
131 |
+
"harness|mmlu_marketing|0": {
|
132 |
+
"alias": " - marketing",
|
133 |
+
"acc,none": 0.29914529914529914,
|
134 |
+
"acc_stderr,none": 0.029996951858349483
|
135 |
+
},
|
136 |
+
"harness|mmlu_medical_genetics|0": {
|
137 |
+
"alias": " - medical_genetics",
|
138 |
+
"acc,none": 0.31,
|
139 |
+
"acc_stderr,none": 0.04648231987117316
|
140 |
+
},
|
141 |
+
"harness|mmlu_miscellaneous|0": {
|
142 |
+
"alias": " - miscellaneous",
|
143 |
+
"acc,none": 0.2720306513409962,
|
144 |
+
"acc_stderr,none": 0.01591336744750051
|
145 |
+
},
|
146 |
+
"harness|mmlu_nutrition|0": {
|
147 |
+
"alias": " - nutrition",
|
148 |
+
"acc,none": 0.23202614379084968,
|
149 |
+
"acc_stderr,none": 0.024170840879341016
|
150 |
+
},
|
151 |
+
"harness|mmlu_professional_accounting|0": {
|
152 |
+
"alias": " - professional_accounting",
|
153 |
+
"acc,none": 0.2553191489361702,
|
154 |
+
"acc_stderr,none": 0.026011992930902
|
155 |
+
},
|
156 |
+
"harness|mmlu_professional_medicine|0": {
|
157 |
+
"alias": " - professional_medicine",
|
158 |
+
"acc,none": 0.1875,
|
159 |
+
"acc_stderr,none": 0.023709788253811766
|
160 |
+
},
|
161 |
+
"harness|mmlu_virology|0": {
|
162 |
+
"alias": " - virology",
|
163 |
+
"acc,none": 0.30120481927710846,
|
164 |
+
"acc_stderr,none": 0.0357160923005348
|
165 |
+
},
|
166 |
+
"harness|mmlu_social_sciences|0": {
|
167 |
+
"alias": " - social_sciences",
|
168 |
+
"acc,none": 0.23951901202469938,
|
169 |
+
"acc_stderr,none": 0.00768774913583545
|
170 |
+
},
|
171 |
+
"harness|mmlu_econometrics|0": {
|
172 |
+
"alias": " - econometrics",
|
173 |
+
"acc,none": 0.2631578947368421,
|
174 |
+
"acc_stderr,none": 0.04142439719489359
|
175 |
+
},
|
176 |
+
"harness|mmlu_high_school_geography|0": {
|
177 |
+
"alias": " - high_school_geography",
|
178 |
+
"acc,none": 0.25757575757575757,
|
179 |
+
"acc_stderr,none": 0.031156269519646836
|
180 |
+
},
|
181 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
182 |
+
"alias": " - high_school_government_and_politics",
|
183 |
+
"acc,none": 0.20207253886010362,
|
184 |
+
"acc_stderr,none": 0.02897908979429673
|
185 |
+
},
|
186 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
187 |
+
"alias": " - high_school_macroeconomics",
|
188 |
+
"acc,none": 0.24102564102564103,
|
189 |
+
"acc_stderr,none": 0.021685546665333184
|
190 |
+
},
|
191 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
192 |
+
"alias": " - high_school_microeconomics",
|
193 |
+
"acc,none": 0.24789915966386555,
|
194 |
+
"acc_stderr,none": 0.028047967224176892
|
195 |
+
},
|
196 |
+
"harness|mmlu_high_school_psychology|0": {
|
197 |
+
"alias": " - high_school_psychology",
|
198 |
+
"acc,none": 0.20550458715596331,
|
199 |
+
"acc_stderr,none": 0.017324352325016015
|
200 |
+
},
|
201 |
+
"harness|mmlu_human_sexuality|0": {
|
202 |
+
"alias": " - human_sexuality",
|
203 |
+
"acc,none": 0.2900763358778626,
|
204 |
+
"acc_stderr,none": 0.03980066246467765
|
205 |
+
},
|
206 |
+
"harness|mmlu_professional_psychology|0": {
|
207 |
+
"alias": " - professional_psychology",
|
208 |
+
"acc,none": 0.2549019607843137,
|
209 |
+
"acc_stderr,none": 0.017630827375148383
|
210 |
+
},
|
211 |
+
"harness|mmlu_public_relations|0": {
|
212 |
+
"alias": " - public_relations",
|
213 |
+
"acc,none": 0.3181818181818182,
|
214 |
+
"acc_stderr,none": 0.04461272175910508
|
215 |
+
},
|
216 |
+
"harness|mmlu_security_studies|0": {
|
217 |
+
"alias": " - security_studies",
|
218 |
+
"acc,none": 0.19591836734693877,
|
219 |
+
"acc_stderr,none": 0.025409301953225678
|
220 |
+
},
|
221 |
+
"harness|mmlu_sociology|0": {
|
222 |
+
"alias": " - sociology",
|
223 |
+
"acc,none": 0.22388059701492538,
|
224 |
+
"acc_stderr,none": 0.02947525023601719
|
225 |
+
},
|
226 |
+
"harness|mmlu_us_foreign_policy|0": {
|
227 |
+
"alias": " - us_foreign_policy",
|
228 |
+
"acc,none": 0.3,
|
229 |
+
"acc_stderr,none": 0.046056618647183814
|
230 |
+
},
|
231 |
+
"harness|mmlu_stem|0": {
|
232 |
+
"alias": " - stem",
|
233 |
+
"acc,none": 0.23215984776403426,
|
234 |
+
"acc_stderr,none": 0.007508345730856319
|
235 |
+
},
|
236 |
+
"harness|mmlu_abstract_algebra|0": {
|
237 |
+
"alias": " - abstract_algebra",
|
238 |
+
"acc,none": 0.22,
|
239 |
+
"acc_stderr,none": 0.04163331998932269
|
240 |
+
},
|
241 |
+
"harness|mmlu_anatomy|0": {
|
242 |
+
"alias": " - anatomy",
|
243 |
+
"acc,none": 0.2,
|
244 |
+
"acc_stderr,none": 0.03455473702325437
|
245 |
+
},
|
246 |
+
"harness|mmlu_astronomy|0": {
|
247 |
+
"alias": " - astronomy",
|
248 |
+
"acc,none": 0.18421052631578946,
|
249 |
+
"acc_stderr,none": 0.0315469804508223
|
250 |
+
},
|
251 |
+
"harness|mmlu_college_biology|0": {
|
252 |
+
"alias": " - college_biology",
|
253 |
+
"acc,none": 0.2986111111111111,
|
254 |
+
"acc_stderr,none": 0.03827052357950756
|
255 |
+
},
|
256 |
+
"harness|mmlu_college_chemistry|0": {
|
257 |
+
"alias": " - college_chemistry",
|
258 |
+
"acc,none": 0.22,
|
259 |
+
"acc_stderr,none": 0.04163331998932269
|
260 |
+
},
|
261 |
+
"harness|mmlu_college_computer_science|0": {
|
262 |
+
"alias": " - college_computer_science",
|
263 |
+
"acc,none": 0.24,
|
264 |
+
"acc_stderr,none": 0.042923469599092816
|
265 |
+
},
|
266 |
+
"harness|mmlu_college_mathematics|0": {
|
267 |
+
"alias": " - college_mathematics",
|
268 |
+
"acc,none": 0.22,
|
269 |
+
"acc_stderr,none": 0.041633319989322695
|
270 |
+
},
|
271 |
+
"harness|mmlu_college_physics|0": {
|
272 |
+
"alias": " - college_physics",
|
273 |
+
"acc,none": 0.24509803921568626,
|
274 |
+
"acc_stderr,none": 0.04280105837364397
|
275 |
+
},
|
276 |
+
"harness|mmlu_computer_security|0": {
|
277 |
+
"alias": " - computer_security",
|
278 |
+
"acc,none": 0.31,
|
279 |
+
"acc_stderr,none": 0.04648231987117316
|
280 |
+
},
|
281 |
+
"harness|mmlu_conceptual_physics|0": {
|
282 |
+
"alias": " - conceptual_physics",
|
283 |
+
"acc,none": 0.2936170212765957,
|
284 |
+
"acc_stderr,none": 0.029771642712491227
|
285 |
+
},
|
286 |
+
"harness|mmlu_electrical_engineering|0": {
|
287 |
+
"alias": " - electrical_engineering",
|
288 |
+
"acc,none": 0.23448275862068965,
|
289 |
+
"acc_stderr,none": 0.035306258743465914
|
290 |
+
},
|
291 |
+
"harness|mmlu_elementary_mathematics|0": {
|
292 |
+
"alias": " - elementary_mathematics",
|
293 |
+
"acc,none": 0.20899470899470898,
|
294 |
+
"acc_stderr,none": 0.020940481565334835
|
295 |
+
},
|
296 |
+
"harness|mmlu_high_school_biology|0": {
|
297 |
+
"alias": " - high_school_biology",
|
298 |
+
"acc,none": 0.23548387096774193,
|
299 |
+
"acc_stderr,none": 0.024137632429337724
|
300 |
+
},
|
301 |
+
"harness|mmlu_high_school_chemistry|0": {
|
302 |
+
"alias": " - high_school_chemistry",
|
303 |
+
"acc,none": 0.18226600985221675,
|
304 |
+
"acc_stderr,none": 0.02716334085964515
|
305 |
+
},
|
306 |
+
"harness|mmlu_high_school_computer_science|0": {
|
307 |
+
"alias": " - high_school_computer_science",
|
308 |
+
"acc,none": 0.29,
|
309 |
+
"acc_stderr,none": 0.045604802157206845
|
310 |
+
},
|
311 |
+
"harness|mmlu_high_school_mathematics|0": {
|
312 |
+
"alias": " - high_school_mathematics",
|
313 |
+
"acc,none": 0.22592592592592592,
|
314 |
+
"acc_stderr,none": 0.025497532639609542
|
315 |
+
},
|
316 |
+
"harness|mmlu_high_school_physics|0": {
|
317 |
+
"alias": " - high_school_physics",
|
318 |
+
"acc,none": 0.23841059602649006,
|
319 |
+
"acc_stderr,none": 0.03479185572599661
|
320 |
+
},
|
321 |
+
"harness|mmlu_high_school_statistics|0": {
|
322 |
+
"alias": " - high_school_statistics",
|
323 |
+
"acc,none": 0.1712962962962963,
|
324 |
+
"acc_stderr,none": 0.02569534164382468
|
325 |
+
},
|
326 |
+
"harness|mmlu_machine_learning|0": {
|
327 |
+
"alias": " - machine_learning",
|
328 |
+
"acc,none": 0.29464285714285715,
|
329 |
+
"acc_stderr,none": 0.043270409325787296
|
330 |
+
},
|
331 |
+
"harness|arc:easy|0": {
|
332 |
+
"acc,none": 0.7285353535353535,
|
333 |
+
"acc_stderr,none": 0.009125362970360623,
|
334 |
+
"acc_norm,none": 0.6763468013468014,
|
335 |
+
"acc_norm_stderr,none": 0.009600478182273785,
|
336 |
+
"alias": "arc_easy"
|
337 |
+
},
|
338 |
+
"harness|hellaswag|0": {
|
339 |
+
"acc,none": 0.5119498107946624,
|
340 |
+
"acc_stderr,none": 0.004988356146499022,
|
341 |
+
"acc_norm,none": 0.6914957179844653,
|
342 |
+
"acc_norm_stderr,none": 0.004609320024893916,
|
343 |
+
"alias": "hellaswag"
|
344 |
+
},
|
345 |
+
"harness|piqa|0": {
|
346 |
+
"acc,none": 0.7823721436343852,
|
347 |
+
"acc_stderr,none": 0.009627407474840874,
|
348 |
+
"acc_norm,none": 0.7861806311207835,
|
349 |
+
"acc_norm_stderr,none": 0.009565994206915592,
|
350 |
+
"alias": "piqa"
|
351 |
+
},
|
352 |
+
"harness|boolq|0": {
|
353 |
+
"acc,none": 0.7125382262996942,
|
354 |
+
"acc_stderr,none": 0.007915651663295326,
|
355 |
+
"alias": "boolq"
|
356 |
+
},
|
357 |
+
"harness|lambada:openai|0": {
|
358 |
+
"perplexity,none": 5.298423800158154,
|
359 |
+
"perplexity_stderr,none": 0.12158416023239337,
|
360 |
+
"acc,none": 0.634775858723074,
|
361 |
+
"acc_stderr,none": 0.006708138364946144,
|
362 |
+
"alias": "lambada_openai"
|
363 |
+
},
|
364 |
+
"harness|openbookqa|0": {
|
365 |
+
"acc,none": 0.3,
|
366 |
+
"acc_stderr,none": 0.020514426225628043,
|
367 |
+
"acc_norm,none": 0.408,
|
368 |
+
"acc_norm_stderr,none": 0.02200091089387719,
|
369 |
+
"alias": "openbookqa"
|
370 |
+
},
|
371 |
+
"harness|arc:challenge|0": {
|
372 |
+
"acc,none": 0.39761092150170646,
|
373 |
+
"acc_stderr,none": 0.014301752223279528,
|
374 |
+
"acc_norm,none": 0.4189419795221843,
|
375 |
+
"acc_norm_stderr,none": 0.01441810695363901,
|
376 |
+
"alias": "arc_challenge"
|
377 |
+
},
|
378 |
+
"harness|truthfulqa:mc2|0": {
|
379 |
+
"acc,none": 0.44307675898066345,
|
380 |
+
"acc_stderr,none": 0.014804938982947631,
|
381 |
+
"alias": "truthfulqa_mc2"
|
382 |
+
},
|
383 |
+
"harness|truthfulqa:mc1|0": {
|
384 |
+
"acc,none": 0.2876376988984088,
|
385 |
+
"acc_stderr,none": 0.015846315101394795,
|
386 |
+
"alias": "truthfulqa_mc1"
|
387 |
+
},
|
388 |
+
"harness|winogrande|0": {
|
389 |
+
"acc,none": 0.6558800315706393,
|
390 |
+
"acc_stderr,none": 0.013352121905005935,
|
391 |
+
"alias": "winogrande"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "alokabhishek/falcon-7b-instruct-bnb-4bit",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 14.852,
|
399 |
+
"architectures": "FalconForCausalLM",
|
400 |
+
"quant_type": "bitsandbytes",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 29.704,
|
403 |
+
"model_size": 14.852,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-07T08:53:20Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"_load_in_4bit": true,
|
417 |
+
"_load_in_8bit": false,
|
418 |
+
"bnb_4bit_compute_dtype": "bfloat16",
|
419 |
+
"bnb_4bit_quant_type": "nf4",
|
420 |
+
"bnb_4bit_use_double_quant": true,
|
421 |
+
"llm_int8_enable_fp32_cpu_offload": false,
|
422 |
+
"llm_int8_has_fp16_weight": false,
|
423 |
+
"llm_int8_skip_modules": null,
|
424 |
+
"llm_int8_threshold": 6.0,
|
425 |
+
"load_in_4bit": true,
|
426 |
+
"load_in_8bit": false,
|
427 |
+
"quant_method": "bitsandbytes"
|
428 |
+
},
|
429 |
+
"versions": {
|
430 |
+
"harness|mmlu|0": null,
|
431 |
+
"harness|mmlu_humanities|0": null,
|
432 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
433 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
434 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
435 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
436 |
+
"harness|mmlu_international_law|0": 0.0,
|
437 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
438 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
439 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
440 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
441 |
+
"harness|mmlu_philosophy|0": 0.0,
|
442 |
+
"harness|mmlu_prehistory|0": 0.0,
|
443 |
+
"harness|mmlu_professional_law|0": 0.0,
|
444 |
+
"harness|mmlu_world_religions|0": 0.0,
|
445 |
+
"harness|mmlu_other|0": null,
|
446 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
447 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
448 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
449 |
+
"harness|mmlu_global_facts|0": 0.0,
|
450 |
+
"harness|mmlu_human_aging|0": 0.0,
|
451 |
+
"harness|mmlu_management|0": 0.0,
|
452 |
+
"harness|mmlu_marketing|0": 0.0,
|
453 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
454 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
455 |
+
"harness|mmlu_nutrition|0": 0.0,
|
456 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
457 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
458 |
+
"harness|mmlu_virology|0": 0.0,
|
459 |
+
"harness|mmlu_social_sciences|0": null,
|
460 |
+
"harness|mmlu_econometrics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
466 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
467 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
468 |
+
"harness|mmlu_public_relations|0": 0.0,
|
469 |
+
"harness|mmlu_security_studies|0": 0.0,
|
470 |
+
"harness|mmlu_sociology|0": 0.0,
|
471 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
472 |
+
"harness|mmlu_stem|0": null,
|
473 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
474 |
+
"harness|mmlu_anatomy|0": 0.0,
|
475 |
+
"harness|mmlu_astronomy|0": 0.0,
|
476 |
+
"harness|mmlu_college_biology|0": 0.0,
|
477 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
478 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
479 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
480 |
+
"harness|mmlu_college_physics|0": 0.0,
|
481 |
+
"harness|mmlu_computer_security|0": 0.0,
|
482 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
483 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
484 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
491 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
492 |
+
"harness|arc:easy|0": 1.0,
|
493 |
+
"harness|hellaswag|0": 1.0,
|
494 |
+
"harness|piqa|0": 1.0,
|
495 |
+
"harness|boolq|0": 2.0,
|
496 |
+
"harness|lambada:openai|0": 1.0,
|
497 |
+
"harness|openbookqa|0": 1.0,
|
498 |
+
"harness|arc:challenge|0": 1.0,
|
499 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
500 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
501 |
+
"harness|winogrande|0": 1.0
|
502 |
+
},
|
503 |
+
"n-shot": {
|
504 |
+
"arc_challenge": 0,
|
505 |
+
"arc_easy": 0,
|
506 |
+
"boolq": 0,
|
507 |
+
"hellaswag": 0,
|
508 |
+
"lambada_openai": 0,
|
509 |
+
"mmlu": 0,
|
510 |
+
"mmlu_abstract_algebra": 0,
|
511 |
+
"mmlu_anatomy": 0,
|
512 |
+
"mmlu_astronomy": 0,
|
513 |
+
"mmlu_business_ethics": 0,
|
514 |
+
"mmlu_clinical_knowledge": 0,
|
515 |
+
"mmlu_college_biology": 0,
|
516 |
+
"mmlu_college_chemistry": 0,
|
517 |
+
"mmlu_college_computer_science": 0,
|
518 |
+
"mmlu_college_mathematics": 0,
|
519 |
+
"mmlu_college_medicine": 0,
|
520 |
+
"mmlu_college_physics": 0,
|
521 |
+
"mmlu_computer_security": 0,
|
522 |
+
"mmlu_conceptual_physics": 0,
|
523 |
+
"mmlu_econometrics": 0,
|
524 |
+
"mmlu_electrical_engineering": 0,
|
525 |
+
"mmlu_elementary_mathematics": 0,
|
526 |
+
"mmlu_formal_logic": 0,
|
527 |
+
"mmlu_global_facts": 0,
|
528 |
+
"mmlu_high_school_biology": 0,
|
529 |
+
"mmlu_high_school_chemistry": 0,
|
530 |
+
"mmlu_high_school_computer_science": 0,
|
531 |
+
"mmlu_high_school_european_history": 0,
|
532 |
+
"mmlu_high_school_geography": 0,
|
533 |
+
"mmlu_high_school_government_and_politics": 0,
|
534 |
+
"mmlu_high_school_macroeconomics": 0,
|
535 |
+
"mmlu_high_school_mathematics": 0,
|
536 |
+
"mmlu_high_school_microeconomics": 0,
|
537 |
+
"mmlu_high_school_physics": 0,
|
538 |
+
"mmlu_high_school_psychology": 0,
|
539 |
+
"mmlu_high_school_statistics": 0,
|
540 |
+
"mmlu_high_school_us_history": 0,
|
541 |
+
"mmlu_high_school_world_history": 0,
|
542 |
+
"mmlu_human_aging": 0,
|
543 |
+
"mmlu_human_sexuality": 0,
|
544 |
+
"mmlu_humanities": 0,
|
545 |
+
"mmlu_international_law": 0,
|
546 |
+
"mmlu_jurisprudence": 0,
|
547 |
+
"mmlu_logical_fallacies": 0,
|
548 |
+
"mmlu_machine_learning": 0,
|
549 |
+
"mmlu_management": 0,
|
550 |
+
"mmlu_marketing": 0,
|
551 |
+
"mmlu_medical_genetics": 0,
|
552 |
+
"mmlu_miscellaneous": 0,
|
553 |
+
"mmlu_moral_disputes": 0,
|
554 |
+
"mmlu_moral_scenarios": 0,
|
555 |
+
"mmlu_nutrition": 0,
|
556 |
+
"mmlu_other": 0,
|
557 |
+
"mmlu_philosophy": 0,
|
558 |
+
"mmlu_prehistory": 0,
|
559 |
+
"mmlu_professional_accounting": 0,
|
560 |
+
"mmlu_professional_law": 0,
|
561 |
+
"mmlu_professional_medicine": 0,
|
562 |
+
"mmlu_professional_psychology": 0,
|
563 |
+
"mmlu_public_relations": 0,
|
564 |
+
"mmlu_security_studies": 0,
|
565 |
+
"mmlu_social_sciences": 0,
|
566 |
+
"mmlu_sociology": 0,
|
567 |
+
"mmlu_stem": 0,
|
568 |
+
"mmlu_us_foreign_policy": 0,
|
569 |
+
"mmlu_virology": 0,
|
570 |
+
"mmlu_world_religions": 0,
|
571 |
+
"openbookqa": 0,
|
572 |
+
"piqa": 0,
|
573 |
+
"truthfulqa_mc1": 0,
|
574 |
+
"truthfulqa_mc2": 0,
|
575 |
+
"winogrande": 0
|
576 |
+
},
|
577 |
+
"date": 1715072170.8339446,
|
578 |
+
"config": {
|
579 |
+
"model": "hf",
|
580 |
+
"model_args": "pretrained=alokabhishek/falcon-7b-instruct-bnb-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
581 |
+
"batch_size": 1,
|
582 |
+
"batch_sizes": [],
|
583 |
+
"device": "cuda",
|
584 |
+
"use_cache": null,
|
585 |
+
"limit": null,
|
586 |
+
"bootstrap_iters": 100000,
|
587 |
+
"gen_kwargs": null
|
588 |
+
}
|
589 |
+
}
|
astronomer/results_2024-05-13-17-16-12.json
ADDED
@@ -0,0 +1,588 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-13-17-16-12",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "astronomer/Llama-3-8B-Instruct-GPTQ-4-Bit",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.74,
|
16 |
+
"model_params": 7.04,
|
17 |
+
"quant_type": "GPTQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|winogrande|0": {
|
22 |
+
"acc,none": 0.7134964483030781,
|
23 |
+
"acc_stderr,none": 0.01270703013996038,
|
24 |
+
"alias": "winogrande"
|
25 |
+
},
|
26 |
+
"harness|piqa|0": {
|
27 |
+
"acc,none": 0.7698585418933623,
|
28 |
+
"acc_stderr,none": 0.009820832826839817,
|
29 |
+
"acc_norm,none": 0.7698585418933623,
|
30 |
+
"acc_norm_stderr,none": 0.009820832826839815,
|
31 |
+
"alias": "piqa"
|
32 |
+
},
|
33 |
+
"harness|boolq|0": {
|
34 |
+
"acc,none": 0.8201834862385321,
|
35 |
+
"acc_stderr,none": 0.006716806494844575,
|
36 |
+
"alias": "boolq"
|
37 |
+
},
|
38 |
+
"harness|hellaswag|0": {
|
39 |
+
"acc,none": 0.5652260505875324,
|
40 |
+
"acc_stderr,none": 0.004947141797384123,
|
41 |
+
"acc_norm,none": 0.7495518820952002,
|
42 |
+
"acc_norm_stderr,none": 0.004323856300539162,
|
43 |
+
"alias": "hellaswag"
|
44 |
+
},
|
45 |
+
"harness|arc:easy|0": {
|
46 |
+
"acc,none": 0.8034511784511784,
|
47 |
+
"acc_stderr,none": 0.008154233832067922,
|
48 |
+
"acc_norm,none": 0.7828282828282829,
|
49 |
+
"acc_norm_stderr,none": 0.008460637338999105,
|
50 |
+
"alias": "arc_easy"
|
51 |
+
},
|
52 |
+
"harness|openbookqa|0": {
|
53 |
+
"acc,none": 0.314,
|
54 |
+
"acc_stderr,none": 0.020776701920308997,
|
55 |
+
"acc_norm,none": 0.426,
|
56 |
+
"acc_norm_stderr,none": 0.022136577335085637,
|
57 |
+
"alias": "openbookqa"
|
58 |
+
},
|
59 |
+
"harness|truthfulqa:mc2|0": {
|
60 |
+
"acc,none": 0.5058067674960235,
|
61 |
+
"acc_stderr,none": 0.01513541322967859,
|
62 |
+
"alias": "truthfulqa_mc2"
|
63 |
+
},
|
64 |
+
"harness|truthfulqa:mc1|0": {
|
65 |
+
"acc,none": 0.34149326805385555,
|
66 |
+
"acc_stderr,none": 0.016600688619950826,
|
67 |
+
"alias": "truthfulqa_mc1"
|
68 |
+
},
|
69 |
+
"harness|mmlu|0": {
|
70 |
+
"acc,none": 0.6135165930779092,
|
71 |
+
"acc_stderr,none": 0.00389230425794973,
|
72 |
+
"alias": "mmlu"
|
73 |
+
},
|
74 |
+
"harness|mmlu_humanities|0": {
|
75 |
+
"alias": " - humanities",
|
76 |
+
"acc,none": 0.5621679064824655,
|
77 |
+
"acc_stderr,none": 0.006801371377907809
|
78 |
+
},
|
79 |
+
"harness|mmlu_formal_logic|0": {
|
80 |
+
"alias": " - formal_logic",
|
81 |
+
"acc,none": 0.42063492063492064,
|
82 |
+
"acc_stderr,none": 0.04415438226743744
|
83 |
+
},
|
84 |
+
"harness|mmlu_high_school_european_history|0": {
|
85 |
+
"alias": " - high_school_european_history",
|
86 |
+
"acc,none": 0.7393939393939394,
|
87 |
+
"acc_stderr,none": 0.034277431758165236
|
88 |
+
},
|
89 |
+
"harness|mmlu_high_school_us_history|0": {
|
90 |
+
"alias": " - high_school_us_history",
|
91 |
+
"acc,none": 0.8137254901960784,
|
92 |
+
"acc_stderr,none": 0.027325470966716326
|
93 |
+
},
|
94 |
+
"harness|mmlu_high_school_world_history|0": {
|
95 |
+
"alias": " - high_school_world_history",
|
96 |
+
"acc,none": 0.8143459915611815,
|
97 |
+
"acc_stderr,none": 0.025310495376944867
|
98 |
+
},
|
99 |
+
"harness|mmlu_international_law|0": {
|
100 |
+
"alias": " - international_law",
|
101 |
+
"acc,none": 0.7520661157024794,
|
102 |
+
"acc_stderr,none": 0.03941897526516302
|
103 |
+
},
|
104 |
+
"harness|mmlu_jurisprudence|0": {
|
105 |
+
"alias": " - jurisprudence",
|
106 |
+
"acc,none": 0.7962962962962963,
|
107 |
+
"acc_stderr,none": 0.03893542518824849
|
108 |
+
},
|
109 |
+
"harness|mmlu_logical_fallacies|0": {
|
110 |
+
"alias": " - logical_fallacies",
|
111 |
+
"acc,none": 0.7239263803680982,
|
112 |
+
"acc_stderr,none": 0.03512385283705048
|
113 |
+
},
|
114 |
+
"harness|mmlu_moral_disputes|0": {
|
115 |
+
"alias": " - moral_disputes",
|
116 |
+
"acc,none": 0.6763005780346821,
|
117 |
+
"acc_stderr,none": 0.02519018132760842
|
118 |
+
},
|
119 |
+
"harness|mmlu_moral_scenarios|0": {
|
120 |
+
"alias": " - moral_scenarios",
|
121 |
+
"acc,none": 0.3329608938547486,
|
122 |
+
"acc_stderr,none": 0.01576171617839756
|
123 |
+
},
|
124 |
+
"harness|mmlu_philosophy|0": {
|
125 |
+
"alias": " - philosophy",
|
126 |
+
"acc,none": 0.707395498392283,
|
127 |
+
"acc_stderr,none": 0.02583989833487798
|
128 |
+
},
|
129 |
+
"harness|mmlu_prehistory|0": {
|
130 |
+
"alias": " - prehistory",
|
131 |
+
"acc,none": 0.7037037037037037,
|
132 |
+
"acc_stderr,none": 0.025407197798890165
|
133 |
+
},
|
134 |
+
"harness|mmlu_professional_law|0": {
|
135 |
+
"alias": " - professional_law",
|
136 |
+
"acc,none": 0.46088657105606257,
|
137 |
+
"acc_stderr,none": 0.012731102790504524
|
138 |
+
},
|
139 |
+
"harness|mmlu_world_religions|0": {
|
140 |
+
"alias": " - world_religions",
|
141 |
+
"acc,none": 0.7543859649122807,
|
142 |
+
"acc_stderr,none": 0.03301405946987251
|
143 |
+
},
|
144 |
+
"harness|mmlu_other|0": {
|
145 |
+
"alias": " - other",
|
146 |
+
"acc,none": 0.6935951078210493,
|
147 |
+
"acc_stderr,none": 0.008023000727075687
|
148 |
+
},
|
149 |
+
"harness|mmlu_business_ethics|0": {
|
150 |
+
"alias": " - business_ethics",
|
151 |
+
"acc,none": 0.61,
|
152 |
+
"acc_stderr,none": 0.04902071300001974
|
153 |
+
},
|
154 |
+
"harness|mmlu_clinical_knowledge|0": {
|
155 |
+
"alias": " - clinical_knowledge",
|
156 |
+
"acc,none": 0.690566037735849,
|
157 |
+
"acc_stderr,none": 0.028450154794118634
|
158 |
+
},
|
159 |
+
"harness|mmlu_college_medicine|0": {
|
160 |
+
"alias": " - college_medicine",
|
161 |
+
"acc,none": 0.6416184971098265,
|
162 |
+
"acc_stderr,none": 0.03656343653353159
|
163 |
+
},
|
164 |
+
"harness|mmlu_global_facts|0": {
|
165 |
+
"alias": " - global_facts",
|
166 |
+
"acc,none": 0.41,
|
167 |
+
"acc_stderr,none": 0.049431107042371025
|
168 |
+
},
|
169 |
+
"harness|mmlu_human_aging|0": {
|
170 |
+
"alias": " - human_aging",
|
171 |
+
"acc,none": 0.6233183856502242,
|
172 |
+
"acc_stderr,none": 0.032521134899291884
|
173 |
+
},
|
174 |
+
"harness|mmlu_management|0": {
|
175 |
+
"alias": " - management",
|
176 |
+
"acc,none": 0.8349514563106796,
|
177 |
+
"acc_stderr,none": 0.036756688322331886
|
178 |
+
},
|
179 |
+
"harness|mmlu_marketing|0": {
|
180 |
+
"alias": " - marketing",
|
181 |
+
"acc,none": 0.8846153846153846,
|
182 |
+
"acc_stderr,none": 0.02093019318517933
|
183 |
+
},
|
184 |
+
"harness|mmlu_medical_genetics|0": {
|
185 |
+
"alias": " - medical_genetics",
|
186 |
+
"acc,none": 0.75,
|
187 |
+
"acc_stderr,none": 0.04351941398892446
|
188 |
+
},
|
189 |
+
"harness|mmlu_miscellaneous|0": {
|
190 |
+
"alias": " - miscellaneous",
|
191 |
+
"acc,none": 0.789272030651341,
|
192 |
+
"acc_stderr,none": 0.014583812465862546
|
193 |
+
},
|
194 |
+
"harness|mmlu_nutrition|0": {
|
195 |
+
"alias": " - nutrition",
|
196 |
+
"acc,none": 0.7091503267973857,
|
197 |
+
"acc_stderr,none": 0.02600480036395213
|
198 |
+
},
|
199 |
+
"harness|mmlu_professional_accounting|0": {
|
200 |
+
"alias": " - professional_accounting",
|
201 |
+
"acc,none": 0.5354609929078015,
|
202 |
+
"acc_stderr,none": 0.02975238965742705
|
203 |
+
},
|
204 |
+
"harness|mmlu_professional_medicine|0": {
|
205 |
+
"alias": " - professional_medicine",
|
206 |
+
"acc,none": 0.6691176470588235,
|
207 |
+
"acc_stderr,none": 0.02858270975389844
|
208 |
+
},
|
209 |
+
"harness|mmlu_virology|0": {
|
210 |
+
"alias": " - virology",
|
211 |
+
"acc,none": 0.5060240963855421,
|
212 |
+
"acc_stderr,none": 0.03892212195333045
|
213 |
+
},
|
214 |
+
"harness|mmlu_social_sciences|0": {
|
215 |
+
"alias": " - social_sciences",
|
216 |
+
"acc,none": 0.7114072148196295,
|
217 |
+
"acc_stderr,none": 0.00803029096887605
|
218 |
+
},
|
219 |
+
"harness|mmlu_econometrics|0": {
|
220 |
+
"alias": " - econometrics",
|
221 |
+
"acc,none": 0.45614035087719296,
|
222 |
+
"acc_stderr,none": 0.04685473041907789
|
223 |
+
},
|
224 |
+
"harness|mmlu_high_school_geography|0": {
|
225 |
+
"alias": " - high_school_geography",
|
226 |
+
"acc,none": 0.7474747474747475,
|
227 |
+
"acc_stderr,none": 0.030954055470365907
|
228 |
+
},
|
229 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
230 |
+
"alias": " - high_school_government_and_politics",
|
231 |
+
"acc,none": 0.8186528497409327,
|
232 |
+
"acc_stderr,none": 0.027807032360686088
|
233 |
+
},
|
234 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
235 |
+
"alias": " - high_school_macroeconomics",
|
236 |
+
"acc,none": 0.6230769230769231,
|
237 |
+
"acc_stderr,none": 0.024570975364225995
|
238 |
+
},
|
239 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
240 |
+
"alias": " - high_school_microeconomics",
|
241 |
+
"acc,none": 0.6680672268907563,
|
242 |
+
"acc_stderr,none": 0.03058869701378364
|
243 |
+
},
|
244 |
+
"harness|mmlu_high_school_psychology|0": {
|
245 |
+
"alias": " - high_school_psychology",
|
246 |
+
"acc,none": 0.8055045871559633,
|
247 |
+
"acc_stderr,none": 0.016970289090458057
|
248 |
+
},
|
249 |
+
"harness|mmlu_human_sexuality|0": {
|
250 |
+
"alias": " - human_sexuality",
|
251 |
+
"acc,none": 0.7480916030534351,
|
252 |
+
"acc_stderr,none": 0.03807387116306086
|
253 |
+
},
|
254 |
+
"harness|mmlu_professional_psychology|0": {
|
255 |
+
"alias": " - professional_psychology",
|
256 |
+
"acc,none": 0.6535947712418301,
|
257 |
+
"acc_stderr,none": 0.01924978569171721
|
258 |
+
},
|
259 |
+
"harness|mmlu_public_relations|0": {
|
260 |
+
"alias": " - public_relations",
|
261 |
+
"acc,none": 0.6818181818181818,
|
262 |
+
"acc_stderr,none": 0.04461272175910509
|
263 |
+
},
|
264 |
+
"harness|mmlu_security_studies|0": {
|
265 |
+
"alias": " - security_studies",
|
266 |
+
"acc,none": 0.7061224489795919,
|
267 |
+
"acc_stderr,none": 0.029162738410249762
|
268 |
+
},
|
269 |
+
"harness|mmlu_sociology|0": {
|
270 |
+
"alias": " - sociology",
|
271 |
+
"acc,none": 0.8009950248756219,
|
272 |
+
"acc_stderr,none": 0.028231365092758406
|
273 |
+
},
|
274 |
+
"harness|mmlu_us_foreign_policy|0": {
|
275 |
+
"alias": " - us_foreign_policy",
|
276 |
+
"acc,none": 0.83,
|
277 |
+
"acc_stderr,none": 0.0377525168068637
|
278 |
+
},
|
279 |
+
"harness|mmlu_stem|0": {
|
280 |
+
"alias": " - stem",
|
281 |
+
"acc,none": 0.5156993339676499,
|
282 |
+
"acc_stderr,none": 0.00857673801503054
|
283 |
+
},
|
284 |
+
"harness|mmlu_abstract_algebra|0": {
|
285 |
+
"alias": " - abstract_algebra",
|
286 |
+
"acc,none": 0.33,
|
287 |
+
"acc_stderr,none": 0.047258156262526045
|
288 |
+
},
|
289 |
+
"harness|mmlu_anatomy|0": {
|
290 |
+
"alias": " - anatomy",
|
291 |
+
"acc,none": 0.6074074074074074,
|
292 |
+
"acc_stderr,none": 0.04218506215368879
|
293 |
+
},
|
294 |
+
"harness|mmlu_astronomy|0": {
|
295 |
+
"alias": " - astronomy",
|
296 |
+
"acc,none": 0.6578947368421053,
|
297 |
+
"acc_stderr,none": 0.03860731599316092
|
298 |
+
},
|
299 |
+
"harness|mmlu_college_biology|0": {
|
300 |
+
"alias": " - college_biology",
|
301 |
+
"acc,none": 0.7361111111111112,
|
302 |
+
"acc_stderr,none": 0.03685651095897532
|
303 |
+
},
|
304 |
+
"harness|mmlu_college_chemistry|0": {
|
305 |
+
"alias": " - college_chemistry",
|
306 |
+
"acc,none": 0.39,
|
307 |
+
"acc_stderr,none": 0.04902071300001975
|
308 |
+
},
|
309 |
+
"harness|mmlu_college_computer_science|0": {
|
310 |
+
"alias": " - college_computer_science",
|
311 |
+
"acc,none": 0.47,
|
312 |
+
"acc_stderr,none": 0.05016135580465919
|
313 |
+
},
|
314 |
+
"harness|mmlu_college_mathematics|0": {
|
315 |
+
"alias": " - college_mathematics",
|
316 |
+
"acc,none": 0.26,
|
317 |
+
"acc_stderr,none": 0.04408440022768079
|
318 |
+
},
|
319 |
+
"harness|mmlu_college_physics|0": {
|
320 |
+
"alias": " - college_physics",
|
321 |
+
"acc,none": 0.4803921568627451,
|
322 |
+
"acc_stderr,none": 0.04971358884367405
|
323 |
+
},
|
324 |
+
"harness|mmlu_computer_security|0": {
|
325 |
+
"alias": " - computer_security",
|
326 |
+
"acc,none": 0.72,
|
327 |
+
"acc_stderr,none": 0.045126085985421296
|
328 |
+
},
|
329 |
+
"harness|mmlu_conceptual_physics|0": {
|
330 |
+
"alias": " - conceptual_physics",
|
331 |
+
"acc,none": 0.5531914893617021,
|
332 |
+
"acc_stderr,none": 0.032500536843658404
|
333 |
+
},
|
334 |
+
"harness|mmlu_electrical_engineering|0": {
|
335 |
+
"alias": " - electrical_engineering",
|
336 |
+
"acc,none": 0.6137931034482759,
|
337 |
+
"acc_stderr,none": 0.04057324734419036
|
338 |
+
},
|
339 |
+
"harness|mmlu_elementary_mathematics|0": {
|
340 |
+
"alias": " - elementary_mathematics",
|
341 |
+
"acc,none": 0.4074074074074074,
|
342 |
+
"acc_stderr,none": 0.025305906241590632
|
343 |
+
},
|
344 |
+
"harness|mmlu_high_school_biology|0": {
|
345 |
+
"alias": " - high_school_biology",
|
346 |
+
"acc,none": 0.7419354838709677,
|
347 |
+
"acc_stderr,none": 0.024892469172462843
|
348 |
+
},
|
349 |
+
"harness|mmlu_high_school_chemistry|0": {
|
350 |
+
"alias": " - high_school_chemistry",
|
351 |
+
"acc,none": 0.47783251231527096,
|
352 |
+
"acc_stderr,none": 0.03514528562175008
|
353 |
+
},
|
354 |
+
"harness|mmlu_high_school_computer_science|0": {
|
355 |
+
"alias": " - high_school_computer_science",
|
356 |
+
"acc,none": 0.64,
|
357 |
+
"acc_stderr,none": 0.04824181513244218
|
358 |
+
},
|
359 |
+
"harness|mmlu_high_school_mathematics|0": {
|
360 |
+
"alias": " - high_school_mathematics",
|
361 |
+
"acc,none": 0.3592592592592593,
|
362 |
+
"acc_stderr,none": 0.029252905927251976
|
363 |
+
},
|
364 |
+
"harness|mmlu_high_school_physics|0": {
|
365 |
+
"alias": " - high_school_physics",
|
366 |
+
"acc,none": 0.423841059602649,
|
367 |
+
"acc_stderr,none": 0.04034846678603397
|
368 |
+
},
|
369 |
+
"harness|mmlu_high_school_statistics|0": {
|
370 |
+
"alias": " - high_school_statistics",
|
371 |
+
"acc,none": 0.4583333333333333,
|
372 |
+
"acc_stderr,none": 0.033981108902946366
|
373 |
+
},
|
374 |
+
"harness|mmlu_machine_learning|0": {
|
375 |
+
"alias": " - machine_learning",
|
376 |
+
"acc,none": 0.42857142857142855,
|
377 |
+
"acc_stderr,none": 0.04697113923010212
|
378 |
+
},
|
379 |
+
"harness|arc:challenge|0": {
|
380 |
+
"acc,none": 0.5068259385665529,
|
381 |
+
"acc_stderr,none": 0.014610029151379813,
|
382 |
+
"acc_norm,none": 0.5392491467576792,
|
383 |
+
"acc_norm_stderr,none": 0.014566303676636588,
|
384 |
+
"alias": "arc_challenge"
|
385 |
+
},
|
386 |
+
"harness|lambada:openai|0": {
|
387 |
+
"perplexity,none": 3.5080767853820873,
|
388 |
+
"perplexity_stderr,none": 0.09030328878014143,
|
389 |
+
"acc,none": 0.6993984086939646,
|
390 |
+
"acc_stderr,none": 0.006388075353174957,
|
391 |
+
"alias": "lambada_openai"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "astronomer/Llama-3-8B-Instruct-GPTQ-4-Bit",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 5.74,
|
399 |
+
"architectures": "LlamaForCausalLM",
|
400 |
+
"quant_type": "GPTQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 7.04,
|
403 |
+
"model_size": 5.74,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-10T04:42:46Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"damp_percent": 0.1,
|
418 |
+
"desc_act": true,
|
419 |
+
"group_size": 128,
|
420 |
+
"is_marlin_format": false,
|
421 |
+
"model_file_base_name": null,
|
422 |
+
"model_name_or_path": null,
|
423 |
+
"quant_method": "gptq",
|
424 |
+
"static_groups": false,
|
425 |
+
"sym": true,
|
426 |
+
"true_sequential": true
|
427 |
+
},
|
428 |
+
"versions": {
|
429 |
+
"harness|winogrande|0": 1.0,
|
430 |
+
"harness|piqa|0": 1.0,
|
431 |
+
"harness|boolq|0": 2.0,
|
432 |
+
"harness|hellaswag|0": 1.0,
|
433 |
+
"harness|arc:easy|0": 1.0,
|
434 |
+
"harness|openbookqa|0": 1.0,
|
435 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
436 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
437 |
+
"harness|mmlu|0": null,
|
438 |
+
"harness|mmlu_humanities|0": null,
|
439 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
440 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
441 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
442 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
443 |
+
"harness|mmlu_international_law|0": 0.0,
|
444 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
445 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
446 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
447 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
448 |
+
"harness|mmlu_philosophy|0": 0.0,
|
449 |
+
"harness|mmlu_prehistory|0": 0.0,
|
450 |
+
"harness|mmlu_professional_law|0": 0.0,
|
451 |
+
"harness|mmlu_world_religions|0": 0.0,
|
452 |
+
"harness|mmlu_other|0": null,
|
453 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
454 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
455 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
456 |
+
"harness|mmlu_global_facts|0": 0.0,
|
457 |
+
"harness|mmlu_human_aging|0": 0.0,
|
458 |
+
"harness|mmlu_management|0": 0.0,
|
459 |
+
"harness|mmlu_marketing|0": 0.0,
|
460 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
461 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
462 |
+
"harness|mmlu_nutrition|0": 0.0,
|
463 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
464 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
465 |
+
"harness|mmlu_virology|0": 0.0,
|
466 |
+
"harness|mmlu_social_sciences|0": null,
|
467 |
+
"harness|mmlu_econometrics|0": 0.0,
|
468 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
469 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
470 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
471 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
472 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
473 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
474 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
475 |
+
"harness|mmlu_public_relations|0": 0.0,
|
476 |
+
"harness|mmlu_security_studies|0": 0.0,
|
477 |
+
"harness|mmlu_sociology|0": 0.0,
|
478 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
479 |
+
"harness|mmlu_stem|0": null,
|
480 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
481 |
+
"harness|mmlu_anatomy|0": 0.0,
|
482 |
+
"harness|mmlu_astronomy|0": 0.0,
|
483 |
+
"harness|mmlu_college_biology|0": 0.0,
|
484 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
485 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
486 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
487 |
+
"harness|mmlu_college_physics|0": 0.0,
|
488 |
+
"harness|mmlu_computer_security|0": 0.0,
|
489 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
490 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
491 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
494 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
495 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
497 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
498 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
499 |
+
"harness|arc:challenge|0": 1.0,
|
500 |
+
"harness|lambada:openai|0": 1.0
|
501 |
+
},
|
502 |
+
"n-shot": {
|
503 |
+
"arc_challenge": 0,
|
504 |
+
"arc_easy": 0,
|
505 |
+
"boolq": 0,
|
506 |
+
"hellaswag": 0,
|
507 |
+
"lambada_openai": 0,
|
508 |
+
"mmlu": 0,
|
509 |
+
"mmlu_abstract_algebra": 0,
|
510 |
+
"mmlu_anatomy": 0,
|
511 |
+
"mmlu_astronomy": 0,
|
512 |
+
"mmlu_business_ethics": 0,
|
513 |
+
"mmlu_clinical_knowledge": 0,
|
514 |
+
"mmlu_college_biology": 0,
|
515 |
+
"mmlu_college_chemistry": 0,
|
516 |
+
"mmlu_college_computer_science": 0,
|
517 |
+
"mmlu_college_mathematics": 0,
|
518 |
+
"mmlu_college_medicine": 0,
|
519 |
+
"mmlu_college_physics": 0,
|
520 |
+
"mmlu_computer_security": 0,
|
521 |
+
"mmlu_conceptual_physics": 0,
|
522 |
+
"mmlu_econometrics": 0,
|
523 |
+
"mmlu_electrical_engineering": 0,
|
524 |
+
"mmlu_elementary_mathematics": 0,
|
525 |
+
"mmlu_formal_logic": 0,
|
526 |
+
"mmlu_global_facts": 0,
|
527 |
+
"mmlu_high_school_biology": 0,
|
528 |
+
"mmlu_high_school_chemistry": 0,
|
529 |
+
"mmlu_high_school_computer_science": 0,
|
530 |
+
"mmlu_high_school_european_history": 0,
|
531 |
+
"mmlu_high_school_geography": 0,
|
532 |
+
"mmlu_high_school_government_and_politics": 0,
|
533 |
+
"mmlu_high_school_macroeconomics": 0,
|
534 |
+
"mmlu_high_school_mathematics": 0,
|
535 |
+
"mmlu_high_school_microeconomics": 0,
|
536 |
+
"mmlu_high_school_physics": 0,
|
537 |
+
"mmlu_high_school_psychology": 0,
|
538 |
+
"mmlu_high_school_statistics": 0,
|
539 |
+
"mmlu_high_school_us_history": 0,
|
540 |
+
"mmlu_high_school_world_history": 0,
|
541 |
+
"mmlu_human_aging": 0,
|
542 |
+
"mmlu_human_sexuality": 0,
|
543 |
+
"mmlu_humanities": 0,
|
544 |
+
"mmlu_international_law": 0,
|
545 |
+
"mmlu_jurisprudence": 0,
|
546 |
+
"mmlu_logical_fallacies": 0,
|
547 |
+
"mmlu_machine_learning": 0,
|
548 |
+
"mmlu_management": 0,
|
549 |
+
"mmlu_marketing": 0,
|
550 |
+
"mmlu_medical_genetics": 0,
|
551 |
+
"mmlu_miscellaneous": 0,
|
552 |
+
"mmlu_moral_disputes": 0,
|
553 |
+
"mmlu_moral_scenarios": 0,
|
554 |
+
"mmlu_nutrition": 0,
|
555 |
+
"mmlu_other": 0,
|
556 |
+
"mmlu_philosophy": 0,
|
557 |
+
"mmlu_prehistory": 0,
|
558 |
+
"mmlu_professional_accounting": 0,
|
559 |
+
"mmlu_professional_law": 0,
|
560 |
+
"mmlu_professional_medicine": 0,
|
561 |
+
"mmlu_professional_psychology": 0,
|
562 |
+
"mmlu_public_relations": 0,
|
563 |
+
"mmlu_security_studies": 0,
|
564 |
+
"mmlu_social_sciences": 0,
|
565 |
+
"mmlu_sociology": 0,
|
566 |
+
"mmlu_stem": 0,
|
567 |
+
"mmlu_us_foreign_policy": 0,
|
568 |
+
"mmlu_virology": 0,
|
569 |
+
"mmlu_world_religions": 0,
|
570 |
+
"openbookqa": 0,
|
571 |
+
"piqa": 0,
|
572 |
+
"truthfulqa_mc1": 0,
|
573 |
+
"truthfulqa_mc2": 0,
|
574 |
+
"winogrande": 0
|
575 |
+
},
|
576 |
+
"date": 1715587328.967321,
|
577 |
+
"config": {
|
578 |
+
"model": "hf",
|
579 |
+
"model_args": "pretrained=astronomer/Llama-3-8B-Instruct-GPTQ-4-Bit,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
580 |
+
"batch_size": 2,
|
581 |
+
"batch_sizes": [],
|
582 |
+
"device": "cuda",
|
583 |
+
"use_cache": null,
|
584 |
+
"limit": null,
|
585 |
+
"bootstrap_iters": 100000,
|
586 |
+
"gen_kwargs": null
|
587 |
+
}
|
588 |
+
}
|
baichuan-inc/results_2024-05-13-19-42-01.json
ADDED
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-13-19-42-01",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "baichuan-inc/Baichuan2-7B-Chat-4bits",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 5.4,
|
16 |
+
"model_params": 7.0,
|
17 |
+
"quant_type": "bitsandbytes",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|arc:easy|0": {
|
22 |
+
"acc,none": 0.726010101010101,
|
23 |
+
"acc_stderr,none": 0.009151805901544022,
|
24 |
+
"acc_norm,none": 0.678030303030303,
|
25 |
+
"acc_norm_stderr,none": 0.009587386696300385,
|
26 |
+
"alias": "arc_easy"
|
27 |
+
},
|
28 |
+
"harness|truthfulqa:mc1|0": {
|
29 |
+
"acc,none": 0.3023255813953488,
|
30 |
+
"acc_stderr,none": 0.016077509266133026,
|
31 |
+
"alias": "truthfulqa_mc1"
|
32 |
+
},
|
33 |
+
"harness|lambada:openai|0": {
|
34 |
+
"perplexity,none": 4.038756542051814,
|
35 |
+
"perplexity_stderr,none": 0.11540847819628168,
|
36 |
+
"acc,none": 0.672811954201436,
|
37 |
+
"acc_stderr,none": 0.006536686193974627,
|
38 |
+
"alias": "lambada_openai"
|
39 |
+
},
|
40 |
+
"harness|winogrande|0": {
|
41 |
+
"acc,none": 0.67008681925809,
|
42 |
+
"acc_stderr,none": 0.013214432542517553,
|
43 |
+
"alias": "winogrande"
|
44 |
+
},
|
45 |
+
"harness|piqa|0": {
|
46 |
+
"acc,none": 0.735038084874864,
|
47 |
+
"acc_stderr,none": 0.010296557993316075,
|
48 |
+
"acc_norm,none": 0.7383025027203483,
|
49 |
+
"acc_norm_stderr,none": 0.010255630772708229,
|
50 |
+
"alias": "piqa"
|
51 |
+
},
|
52 |
+
"harness|mmlu|0": {
|
53 |
+
"acc,none": 0.5022076627261074,
|
54 |
+
"acc_stderr,none": 0.004038480145464961,
|
55 |
+
"alias": "mmlu"
|
56 |
+
},
|
57 |
+
"harness|mmlu_humanities|0": {
|
58 |
+
"alias": " - humanities",
|
59 |
+
"acc,none": 0.46099893730074387,
|
60 |
+
"acc_stderr,none": 0.006891577589106509
|
61 |
+
},
|
62 |
+
"harness|mmlu_formal_logic|0": {
|
63 |
+
"alias": " - formal_logic",
|
64 |
+
"acc,none": 0.373015873015873,
|
65 |
+
"acc_stderr,none": 0.04325506042017086
|
66 |
+
},
|
67 |
+
"harness|mmlu_high_school_european_history|0": {
|
68 |
+
"alias": " - high_school_european_history",
|
69 |
+
"acc,none": 0.6484848484848484,
|
70 |
+
"acc_stderr,none": 0.037282069986826503
|
71 |
+
},
|
72 |
+
"harness|mmlu_high_school_us_history|0": {
|
73 |
+
"alias": " - high_school_us_history",
|
74 |
+
"acc,none": 0.7009803921568627,
|
75 |
+
"acc_stderr,none": 0.03213325717373617
|
76 |
+
},
|
77 |
+
"harness|mmlu_high_school_world_history|0": {
|
78 |
+
"alias": " - high_school_world_history",
|
79 |
+
"acc,none": 0.70042194092827,
|
80 |
+
"acc_stderr,none": 0.029818024749753095
|
81 |
+
},
|
82 |
+
"harness|mmlu_international_law|0": {
|
83 |
+
"alias": " - international_law",
|
84 |
+
"acc,none": 0.628099173553719,
|
85 |
+
"acc_stderr,none": 0.044120158066245044
|
86 |
+
},
|
87 |
+
"harness|mmlu_jurisprudence|0": {
|
88 |
+
"alias": " - jurisprudence",
|
89 |
+
"acc,none": 0.6111111111111112,
|
90 |
+
"acc_stderr,none": 0.0471282125742677
|
91 |
+
},
|
92 |
+
"harness|mmlu_logical_fallacies|0": {
|
93 |
+
"alias": " - logical_fallacies",
|
94 |
+
"acc,none": 0.5705521472392638,
|
95 |
+
"acc_stderr,none": 0.03889066619112722
|
96 |
+
},
|
97 |
+
"harness|mmlu_moral_disputes|0": {
|
98 |
+
"alias": " - moral_disputes",
|
99 |
+
"acc,none": 0.5317919075144508,
|
100 |
+
"acc_stderr,none": 0.02686462436675666
|
101 |
+
},
|
102 |
+
"harness|mmlu_moral_scenarios|0": {
|
103 |
+
"alias": " - moral_scenarios",
|
104 |
+
"acc,none": 0.23575418994413408,
|
105 |
+
"acc_stderr,none": 0.014196375686290804
|
106 |
+
},
|
107 |
+
"harness|mmlu_philosophy|0": {
|
108 |
+
"alias": " - philosophy",
|
109 |
+
"acc,none": 0.5819935691318328,
|
110 |
+
"acc_stderr,none": 0.028013651891995076
|
111 |
+
},
|
112 |
+
"harness|mmlu_prehistory|0": {
|
113 |
+
"alias": " - prehistory",
|
114 |
+
"acc,none": 0.5771604938271605,
|
115 |
+
"acc_stderr,none": 0.027487472980871595
|
116 |
+
},
|
117 |
+
"harness|mmlu_professional_law|0": {
|
118 |
+
"alias": " - professional_law",
|
119 |
+
"acc,none": 0.37809647979139505,
|
120 |
+
"acc_stderr,none": 0.012384878406798095
|
121 |
+
},
|
122 |
+
"harness|mmlu_world_religions|0": {
|
123 |
+
"alias": " - world_religions",
|
124 |
+
"acc,none": 0.7485380116959064,
|
125 |
+
"acc_stderr,none": 0.033275044238468436
|
126 |
+
},
|
127 |
+
"harness|mmlu_other|0": {
|
128 |
+
"alias": " - other",
|
129 |
+
"acc,none": 0.5806243965239781,
|
130 |
+
"acc_stderr,none": 0.008620571172520262
|
131 |
+
},
|
132 |
+
"harness|mmlu_business_ethics|0": {
|
133 |
+
"alias": " - business_ethics",
|
134 |
+
"acc,none": 0.57,
|
135 |
+
"acc_stderr,none": 0.04975698519562428
|
136 |
+
},
|
137 |
+
"harness|mmlu_clinical_knowledge|0": {
|
138 |
+
"alias": " - clinical_knowledge",
|
139 |
+
"acc,none": 0.5547169811320755,
|
140 |
+
"acc_stderr,none": 0.030588052974270655
|
141 |
+
},
|
142 |
+
"harness|mmlu_college_medicine|0": {
|
143 |
+
"alias": " - college_medicine",
|
144 |
+
"acc,none": 0.4913294797687861,
|
145 |
+
"acc_stderr,none": 0.038118909889404105
|
146 |
+
},
|
147 |
+
"harness|mmlu_global_facts|0": {
|
148 |
+
"alias": " - global_facts",
|
149 |
+
"acc,none": 0.35,
|
150 |
+
"acc_stderr,none": 0.04793724854411022
|
151 |
+
},
|
152 |
+
"harness|mmlu_human_aging|0": {
|
153 |
+
"alias": " - human_aging",
|
154 |
+
"acc,none": 0.5695067264573991,
|
155 |
+
"acc_stderr,none": 0.0332319730294294
|
156 |
+
},
|
157 |
+
"harness|mmlu_management|0": {
|
158 |
+
"alias": " - management",
|
159 |
+
"acc,none": 0.6213592233009708,
|
160 |
+
"acc_stderr,none": 0.048026946982589726
|
161 |
+
},
|
162 |
+
"harness|mmlu_marketing|0": {
|
163 |
+
"alias": " - marketing",
|
164 |
+
"acc,none": 0.7564102564102564,
|
165 |
+
"acc_stderr,none": 0.028120966503914394
|
166 |
+
},
|
167 |
+
"harness|mmlu_medical_genetics|0": {
|
168 |
+
"alias": " - medical_genetics",
|
169 |
+
"acc,none": 0.54,
|
170 |
+
"acc_stderr,none": 0.05009082659620332
|
171 |
+
},
|
172 |
+
"harness|mmlu_miscellaneous|0": {
|
173 |
+
"alias": " - miscellaneous",
|
174 |
+
"acc,none": 0.7037037037037037,
|
175 |
+
"acc_stderr,none": 0.016328814422102052
|
176 |
+
},
|
177 |
+
"harness|mmlu_nutrition|0": {
|
178 |
+
"alias": " - nutrition",
|
179 |
+
"acc,none": 0.5849673202614379,
|
180 |
+
"acc_stderr,none": 0.028213504177824093
|
181 |
+
},
|
182 |
+
"harness|mmlu_professional_accounting|0": {
|
183 |
+
"alias": " - professional_accounting",
|
184 |
+
"acc,none": 0.35815602836879434,
|
185 |
+
"acc_stderr,none": 0.02860208586275942
|
186 |
+
},
|
187 |
+
"harness|mmlu_professional_medicine|0": {
|
188 |
+
"alias": " - professional_medicine",
|
189 |
+
"acc,none": 0.5330882352941176,
|
190 |
+
"acc_stderr,none": 0.030306257722468317
|
191 |
+
},
|
192 |
+
"harness|mmlu_virology|0": {
|
193 |
+
"alias": " - virology",
|
194 |
+
"acc,none": 0.4939759036144578,
|
195 |
+
"acc_stderr,none": 0.03892212195333045
|
196 |
+
},
|
197 |
+
"harness|mmlu_social_sciences|0": {
|
198 |
+
"alias": " - social_sciences",
|
199 |
+
"acc,none": 0.5794605134871629,
|
200 |
+
"acc_stderr,none": 0.008692685039668366
|
201 |
+
},
|
202 |
+
"harness|mmlu_econometrics|0": {
|
203 |
+
"alias": " - econometrics",
|
204 |
+
"acc,none": 0.2807017543859649,
|
205 |
+
"acc_stderr,none": 0.04227054451232199
|
206 |
+
},
|
207 |
+
"harness|mmlu_high_school_geography|0": {
|
208 |
+
"alias": " - high_school_geography",
|
209 |
+
"acc,none": 0.6464646464646465,
|
210 |
+
"acc_stderr,none": 0.03406086723547153
|
211 |
+
},
|
212 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
213 |
+
"alias": " - high_school_government_and_politics",
|
214 |
+
"acc,none": 0.7202072538860104,
|
215 |
+
"acc_stderr,none": 0.032396370467357036
|
216 |
+
},
|
217 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
218 |
+
"alias": " - high_school_macroeconomics",
|
219 |
+
"acc,none": 0.46923076923076923,
|
220 |
+
"acc_stderr,none": 0.025302958890850154
|
221 |
+
},
|
222 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
223 |
+
"alias": " - high_school_microeconomics",
|
224 |
+
"acc,none": 0.48739495798319327,
|
225 |
+
"acc_stderr,none": 0.03246816765752174
|
226 |
+
},
|
227 |
+
"harness|mmlu_high_school_psychology|0": {
|
228 |
+
"alias": " - high_school_psychology",
|
229 |
+
"acc,none": 0.6844036697247706,
|
230 |
+
"acc_stderr,none": 0.019926117513869662
|
231 |
+
},
|
232 |
+
"harness|mmlu_human_sexuality|0": {
|
233 |
+
"alias": " - human_sexuality",
|
234 |
+
"acc,none": 0.6183206106870229,
|
235 |
+
"acc_stderr,none": 0.04260735157644561
|
236 |
+
},
|
237 |
+
"harness|mmlu_professional_psychology|0": {
|
238 |
+
"alias": " - professional_psychology",
|
239 |
+
"acc,none": 0.4950980392156863,
|
240 |
+
"acc_stderr,none": 0.020226862710039463
|
241 |
+
},
|
242 |
+
"harness|mmlu_public_relations|0": {
|
243 |
+
"alias": " - public_relations",
|
244 |
+
"acc,none": 0.6272727272727273,
|
245 |
+
"acc_stderr,none": 0.04631381319425465
|
246 |
+
},
|
247 |
+
"harness|mmlu_security_studies|0": {
|
248 |
+
"alias": " - security_studies",
|
249 |
+
"acc,none": 0.6040816326530613,
|
250 |
+
"acc_stderr,none": 0.03130802899065685
|
251 |
+
},
|
252 |
+
"harness|mmlu_sociology|0": {
|
253 |
+
"alias": " - sociology",
|
254 |
+
"acc,none": 0.7164179104477612,
|
255 |
+
"acc_stderr,none": 0.031871875379197966
|
256 |
+
},
|
257 |
+
"harness|mmlu_us_foreign_policy|0": {
|
258 |
+
"alias": " - us_foreign_policy",
|
259 |
+
"acc,none": 0.67,
|
260 |
+
"acc_stderr,none": 0.04725815626252609
|
261 |
+
},
|
262 |
+
"harness|mmlu_stem|0": {
|
263 |
+
"alias": " - stem",
|
264 |
+
"acc,none": 0.4110371075166508,
|
265 |
+
"acc_stderr,none": 0.008578848650999856
|
266 |
+
},
|
267 |
+
"harness|mmlu_abstract_algebra|0": {
|
268 |
+
"alias": " - abstract_algebra",
|
269 |
+
"acc,none": 0.29,
|
270 |
+
"acc_stderr,none": 0.04560480215720683
|
271 |
+
},
|
272 |
+
"harness|mmlu_anatomy|0": {
|
273 |
+
"alias": " - anatomy",
|
274 |
+
"acc,none": 0.4962962962962963,
|
275 |
+
"acc_stderr,none": 0.04319223625811331
|
276 |
+
},
|
277 |
+
"harness|mmlu_astronomy|0": {
|
278 |
+
"alias": " - astronomy",
|
279 |
+
"acc,none": 0.5328947368421053,
|
280 |
+
"acc_stderr,none": 0.040601270352363966
|
281 |
+
},
|
282 |
+
"harness|mmlu_college_biology|0": {
|
283 |
+
"alias": " - college_biology",
|
284 |
+
"acc,none": 0.4861111111111111,
|
285 |
+
"acc_stderr,none": 0.041795966175810016
|
286 |
+
},
|
287 |
+
"harness|mmlu_college_chemistry|0": {
|
288 |
+
"alias": " - college_chemistry",
|
289 |
+
"acc,none": 0.38,
|
290 |
+
"acc_stderr,none": 0.04878317312145632
|
291 |
+
},
|
292 |
+
"harness|mmlu_college_computer_science|0": {
|
293 |
+
"alias": " - college_computer_science",
|
294 |
+
"acc,none": 0.46,
|
295 |
+
"acc_stderr,none": 0.05009082659620333
|
296 |
+
},
|
297 |
+
"harness|mmlu_college_mathematics|0": {
|
298 |
+
"alias": " - college_mathematics",
|
299 |
+
"acc,none": 0.31,
|
300 |
+
"acc_stderr,none": 0.04648231987117316
|
301 |
+
},
|
302 |
+
"harness|mmlu_college_physics|0": {
|
303 |
+
"alias": " - college_physics",
|
304 |
+
"acc,none": 0.2549019607843137,
|
305 |
+
"acc_stderr,none": 0.04336432707993176
|
306 |
+
},
|
307 |
+
"harness|mmlu_computer_security|0": {
|
308 |
+
"alias": " - computer_security",
|
309 |
+
"acc,none": 0.63,
|
310 |
+
"acc_stderr,none": 0.048523658709391
|
311 |
+
},
|
312 |
+
"harness|mmlu_conceptual_physics|0": {
|
313 |
+
"alias": " - conceptual_physics",
|
314 |
+
"acc,none": 0.41702127659574467,
|
315 |
+
"acc_stderr,none": 0.03223276266711712
|
316 |
+
},
|
317 |
+
"harness|mmlu_electrical_engineering|0": {
|
318 |
+
"alias": " - electrical_engineering",
|
319 |
+
"acc,none": 0.45517241379310347,
|
320 |
+
"acc_stderr,none": 0.04149886942192118
|
321 |
+
},
|
322 |
+
"harness|mmlu_elementary_mathematics|0": {
|
323 |
+
"alias": " - elementary_mathematics",
|
324 |
+
"acc,none": 0.29894179894179895,
|
325 |
+
"acc_stderr,none": 0.023577604791655795
|
326 |
+
},
|
327 |
+
"harness|mmlu_high_school_biology|0": {
|
328 |
+
"alias": " - high_school_biology",
|
329 |
+
"acc,none": 0.5806451612903226,
|
330 |
+
"acc_stderr,none": 0.02807158890109184
|
331 |
+
},
|
332 |
+
"harness|mmlu_high_school_chemistry|0": {
|
333 |
+
"alias": " - high_school_chemistry",
|
334 |
+
"acc,none": 0.39408866995073893,
|
335 |
+
"acc_stderr,none": 0.03438157967036543
|
336 |
+
},
|
337 |
+
"harness|mmlu_high_school_computer_science|0": {
|
338 |
+
"alias": " - high_school_computer_science",
|
339 |
+
"acc,none": 0.48,
|
340 |
+
"acc_stderr,none": 0.050211673156867795
|
341 |
+
},
|
342 |
+
"harness|mmlu_high_school_mathematics|0": {
|
343 |
+
"alias": " - high_school_mathematics",
|
344 |
+
"acc,none": 0.25925925925925924,
|
345 |
+
"acc_stderr,none": 0.02671924078371218
|
346 |
+
},
|
347 |
+
"harness|mmlu_high_school_physics|0": {
|
348 |
+
"alias": " - high_school_physics",
|
349 |
+
"acc,none": 0.3576158940397351,
|
350 |
+
"acc_stderr,none": 0.03913453431177258
|
351 |
+
},
|
352 |
+
"harness|mmlu_high_school_statistics|0": {
|
353 |
+
"alias": " - high_school_statistics",
|
354 |
+
"acc,none": 0.44907407407407407,
|
355 |
+
"acc_stderr,none": 0.03392238405321617
|
356 |
+
},
|
357 |
+
"harness|mmlu_machine_learning|0": {
|
358 |
+
"alias": " - machine_learning",
|
359 |
+
"acc,none": 0.3482142857142857,
|
360 |
+
"acc_stderr,none": 0.04521829902833585
|
361 |
+
},
|
362 |
+
"harness|boolq|0": {
|
363 |
+
"acc,none": 0.7880733944954128,
|
364 |
+
"acc_stderr,none": 0.007147737811541546,
|
365 |
+
"alias": "boolq"
|
366 |
+
},
|
367 |
+
"harness|truthfulqa:mc2|0": {
|
368 |
+
"acc,none": 0.4629458144430423,
|
369 |
+
"acc_stderr,none": 0.015589156879983877,
|
370 |
+
"alias": "truthfulqa_mc2"
|
371 |
+
},
|
372 |
+
"harness|arc:challenge|0": {
|
373 |
+
"acc,none": 0.39078498293515357,
|
374 |
+
"acc_stderr,none": 0.014258563880513778,
|
375 |
+
"acc_norm,none": 0.4206484641638225,
|
376 |
+
"acc_norm_stderr,none": 0.014426211252508403,
|
377 |
+
"alias": "arc_challenge"
|
378 |
+
},
|
379 |
+
"harness|openbookqa|0": {
|
380 |
+
"acc,none": 0.288,
|
381 |
+
"acc_stderr,none": 0.02027150383507522,
|
382 |
+
"acc_norm,none": 0.388,
|
383 |
+
"acc_norm_stderr,none": 0.021814300984787635,
|
384 |
+
"alias": "openbookqa"
|
385 |
+
},
|
386 |
+
"harness|hellaswag|0": {
|
387 |
+
"acc,none": 0.5295757817167894,
|
388 |
+
"acc_stderr,none": 0.004981044370530789,
|
389 |
+
"acc_norm,none": 0.7046405098585939,
|
390 |
+
"acc_norm_stderr,none": 0.004552718360513105,
|
391 |
+
"alias": "hellaswag"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "./Baichuan2-7B-Chat-4bits",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 7.0,
|
399 |
+
"architectures": "BaichuanForCausalLM",
|
400 |
+
"quant_type": "bitsandbytes",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 7.0,
|
403 |
+
"model_size": 5.4,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-29T07:30:28Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bnb_4bit_compute_dtype": "bfloat16",
|
417 |
+
"bnb_4bit_quant_type": "nf4",
|
418 |
+
"bnb_4bit_use_double_quant": true,
|
419 |
+
"llm_int8_enable_fp32_cpu_offload": false,
|
420 |
+
"llm_int8_has_fp16_weight": false,
|
421 |
+
"llm_int8_skip_modules": null,
|
422 |
+
"llm_int8_threshold": 6.0,
|
423 |
+
"load_in_4bit": true,
|
424 |
+
"load_in_8bit": false
|
425 |
+
},
|
426 |
+
"versions": {
|
427 |
+
"harness|arc:easy|0": 1.0,
|
428 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
429 |
+
"harness|lambada:openai|0": 1.0,
|
430 |
+
"harness|winogrande|0": 1.0,
|
431 |
+
"harness|piqa|0": 1.0,
|
432 |
+
"harness|mmlu|0": null,
|
433 |
+
"harness|mmlu_humanities|0": null,
|
434 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
435 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
436 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
437 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
438 |
+
"harness|mmlu_international_law|0": 0.0,
|
439 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
440 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
441 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
442 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
443 |
+
"harness|mmlu_philosophy|0": 0.0,
|
444 |
+
"harness|mmlu_prehistory|0": 0.0,
|
445 |
+
"harness|mmlu_professional_law|0": 0.0,
|
446 |
+
"harness|mmlu_world_religions|0": 0.0,
|
447 |
+
"harness|mmlu_other|0": null,
|
448 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
449 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
450 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
451 |
+
"harness|mmlu_global_facts|0": 0.0,
|
452 |
+
"harness|mmlu_human_aging|0": 0.0,
|
453 |
+
"harness|mmlu_management|0": 0.0,
|
454 |
+
"harness|mmlu_marketing|0": 0.0,
|
455 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
456 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
457 |
+
"harness|mmlu_nutrition|0": 0.0,
|
458 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
459 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
460 |
+
"harness|mmlu_virology|0": 0.0,
|
461 |
+
"harness|mmlu_social_sciences|0": null,
|
462 |
+
"harness|mmlu_econometrics|0": 0.0,
|
463 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
464 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
465 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
466 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
467 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
468 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
469 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
470 |
+
"harness|mmlu_public_relations|0": 0.0,
|
471 |
+
"harness|mmlu_security_studies|0": 0.0,
|
472 |
+
"harness|mmlu_sociology|0": 0.0,
|
473 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
474 |
+
"harness|mmlu_stem|0": null,
|
475 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
476 |
+
"harness|mmlu_anatomy|0": 0.0,
|
477 |
+
"harness|mmlu_astronomy|0": 0.0,
|
478 |
+
"harness|mmlu_college_biology|0": 0.0,
|
479 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
480 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
481 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_college_physics|0": 0.0,
|
483 |
+
"harness|mmlu_computer_security|0": 0.0,
|
484 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
485 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
486 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
488 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
489 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
490 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
491 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
493 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
494 |
+
"harness|boolq|0": 2.0,
|
495 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
496 |
+
"harness|arc:challenge|0": 1.0,
|
497 |
+
"harness|openbookqa|0": 1.0,
|
498 |
+
"harness|hellaswag|0": 1.0
|
499 |
+
},
|
500 |
+
"n-shot": {
|
501 |
+
"arc_challenge": 0,
|
502 |
+
"arc_easy": 0,
|
503 |
+
"boolq": 0,
|
504 |
+
"hellaswag": 0,
|
505 |
+
"lambada_openai": 0,
|
506 |
+
"mmlu": 0,
|
507 |
+
"mmlu_abstract_algebra": 0,
|
508 |
+
"mmlu_anatomy": 0,
|
509 |
+
"mmlu_astronomy": 0,
|
510 |
+
"mmlu_business_ethics": 0,
|
511 |
+
"mmlu_clinical_knowledge": 0,
|
512 |
+
"mmlu_college_biology": 0,
|
513 |
+
"mmlu_college_chemistry": 0,
|
514 |
+
"mmlu_college_computer_science": 0,
|
515 |
+
"mmlu_college_mathematics": 0,
|
516 |
+
"mmlu_college_medicine": 0,
|
517 |
+
"mmlu_college_physics": 0,
|
518 |
+
"mmlu_computer_security": 0,
|
519 |
+
"mmlu_conceptual_physics": 0,
|
520 |
+
"mmlu_econometrics": 0,
|
521 |
+
"mmlu_electrical_engineering": 0,
|
522 |
+
"mmlu_elementary_mathematics": 0,
|
523 |
+
"mmlu_formal_logic": 0,
|
524 |
+
"mmlu_global_facts": 0,
|
525 |
+
"mmlu_high_school_biology": 0,
|
526 |
+
"mmlu_high_school_chemistry": 0,
|
527 |
+
"mmlu_high_school_computer_science": 0,
|
528 |
+
"mmlu_high_school_european_history": 0,
|
529 |
+
"mmlu_high_school_geography": 0,
|
530 |
+
"mmlu_high_school_government_and_politics": 0,
|
531 |
+
"mmlu_high_school_macroeconomics": 0,
|
532 |
+
"mmlu_high_school_mathematics": 0,
|
533 |
+
"mmlu_high_school_microeconomics": 0,
|
534 |
+
"mmlu_high_school_physics": 0,
|
535 |
+
"mmlu_high_school_psychology": 0,
|
536 |
+
"mmlu_high_school_statistics": 0,
|
537 |
+
"mmlu_high_school_us_history": 0,
|
538 |
+
"mmlu_high_school_world_history": 0,
|
539 |
+
"mmlu_human_aging": 0,
|
540 |
+
"mmlu_human_sexuality": 0,
|
541 |
+
"mmlu_humanities": 0,
|
542 |
+
"mmlu_international_law": 0,
|
543 |
+
"mmlu_jurisprudence": 0,
|
544 |
+
"mmlu_logical_fallacies": 0,
|
545 |
+
"mmlu_machine_learning": 0,
|
546 |
+
"mmlu_management": 0,
|
547 |
+
"mmlu_marketing": 0,
|
548 |
+
"mmlu_medical_genetics": 0,
|
549 |
+
"mmlu_miscellaneous": 0,
|
550 |
+
"mmlu_moral_disputes": 0,
|
551 |
+
"mmlu_moral_scenarios": 0,
|
552 |
+
"mmlu_nutrition": 0,
|
553 |
+
"mmlu_other": 0,
|
554 |
+
"mmlu_philosophy": 0,
|
555 |
+
"mmlu_prehistory": 0,
|
556 |
+
"mmlu_professional_accounting": 0,
|
557 |
+
"mmlu_professional_law": 0,
|
558 |
+
"mmlu_professional_medicine": 0,
|
559 |
+
"mmlu_professional_psychology": 0,
|
560 |
+
"mmlu_public_relations": 0,
|
561 |
+
"mmlu_security_studies": 0,
|
562 |
+
"mmlu_social_sciences": 0,
|
563 |
+
"mmlu_sociology": 0,
|
564 |
+
"mmlu_stem": 0,
|
565 |
+
"mmlu_us_foreign_policy": 0,
|
566 |
+
"mmlu_virology": 0,
|
567 |
+
"mmlu_world_religions": 0,
|
568 |
+
"openbookqa": 0,
|
569 |
+
"piqa": 0,
|
570 |
+
"truthfulqa_mc1": 0,
|
571 |
+
"truthfulqa_mc2": 0,
|
572 |
+
"winogrande": 0
|
573 |
+
},
|
574 |
+
"date": 1715594337.0446994,
|
575 |
+
"config": {
|
576 |
+
"model": "hf",
|
577 |
+
"model_args": "pretrained=./Baichuan2-7B-Chat-4bits,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
578 |
+
"batch_size": 2,
|
579 |
+
"batch_sizes": [],
|
580 |
+
"device": "cuda",
|
581 |
+
"use_cache": null,
|
582 |
+
"limit": null,
|
583 |
+
"bootstrap_iters": 100000,
|
584 |
+
"gen_kwargs": null
|
585 |
+
}
|
586 |
+
}
|
baichuan-inc/results_2024-05-14-00-28-25.json
ADDED
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-14-00-28-25",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "baichuan-inc/Baichuan2-13B-Chat-4bits",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 9.08,
|
16 |
+
"model_params": 13.0,
|
17 |
+
"quant_type": "bitsandbytes",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|piqa|0": {
|
22 |
+
"acc,none": 0.7568008705114254,
|
23 |
+
"acc_stderr,none": 0.010009611953858943,
|
24 |
+
"acc_norm,none": 0.764961915125136,
|
25 |
+
"acc_norm_stderr,none": 0.00989314668880533,
|
26 |
+
"alias": "piqa"
|
27 |
+
},
|
28 |
+
"harness|arc:easy|0": {
|
29 |
+
"acc,none": 0.7470538720538721,
|
30 |
+
"acc_stderr,none": 0.008919862739165623,
|
31 |
+
"acc_norm,none": 0.6948653198653199,
|
32 |
+
"acc_norm_stderr,none": 0.009448531094163909,
|
33 |
+
"alias": "arc_easy"
|
34 |
+
},
|
35 |
+
"harness|arc:challenge|0": {
|
36 |
+
"acc,none": 0.4684300341296928,
|
37 |
+
"acc_stderr,none": 0.014582236460866977,
|
38 |
+
"acc_norm,none": 0.47440273037542663,
|
39 |
+
"acc_norm_stderr,none": 0.014592230885298964,
|
40 |
+
"alias": "arc_challenge"
|
41 |
+
},
|
42 |
+
"harness|openbookqa|0": {
|
43 |
+
"acc,none": 0.32,
|
44 |
+
"acc_stderr,none": 0.020882340488761805,
|
45 |
+
"acc_norm,none": 0.428,
|
46 |
+
"acc_norm_stderr,none": 0.022149790663861923,
|
47 |
+
"alias": "openbookqa"
|
48 |
+
},
|
49 |
+
"harness|truthfulqa:mc2|0": {
|
50 |
+
"acc,none": 0.4993847866633565,
|
51 |
+
"acc_stderr,none": 0.01580566629514981,
|
52 |
+
"alias": "truthfulqa_mc2"
|
53 |
+
},
|
54 |
+
"harness|winogrande|0": {
|
55 |
+
"acc,none": 0.7063930544593529,
|
56 |
+
"acc_stderr,none": 0.012799397296204173,
|
57 |
+
"alias": "winogrande"
|
58 |
+
},
|
59 |
+
"harness|hellaswag|0": {
|
60 |
+
"acc,none": 0.5663214499103765,
|
61 |
+
"acc_stderr,none": 0.004945691164810072,
|
62 |
+
"acc_norm,none": 0.7527384983071101,
|
63 |
+
"acc_norm_stderr,none": 0.004305383398710274,
|
64 |
+
"alias": "hellaswag"
|
65 |
+
},
|
66 |
+
"harness|boolq|0": {
|
67 |
+
"acc,none": 0.8342507645259939,
|
68 |
+
"acc_stderr,none": 0.006503791548089842,
|
69 |
+
"alias": "boolq"
|
70 |
+
},
|
71 |
+
"harness|lambada:openai|0": {
|
72 |
+
"perplexity,none": 3.2218982936112495,
|
73 |
+
"perplexity_stderr,none": 0.0814591097593229,
|
74 |
+
"acc,none": 0.7106539879681739,
|
75 |
+
"acc_stderr,none": 0.00631756795443543,
|
76 |
+
"alias": "lambada_openai"
|
77 |
+
},
|
78 |
+
"harness|mmlu|0": {
|
79 |
+
"acc,none": 0.5554052129326307,
|
80 |
+
"acc_stderr,none": 0.0039702777358831525,
|
81 |
+
"alias": "mmlu"
|
82 |
+
},
|
83 |
+
"harness|mmlu_humanities|0": {
|
84 |
+
"alias": " - humanities",
|
85 |
+
"acc,none": 0.5090329436769394,
|
86 |
+
"acc_stderr,none": 0.006845596956692873
|
87 |
+
},
|
88 |
+
"harness|mmlu_formal_logic|0": {
|
89 |
+
"alias": " - formal_logic",
|
90 |
+
"acc,none": 0.35714285714285715,
|
91 |
+
"acc_stderr,none": 0.04285714285714281
|
92 |
+
},
|
93 |
+
"harness|mmlu_high_school_european_history|0": {
|
94 |
+
"alias": " - high_school_european_history",
|
95 |
+
"acc,none": 0.7272727272727273,
|
96 |
+
"acc_stderr,none": 0.03477691162163659
|
97 |
+
},
|
98 |
+
"harness|mmlu_high_school_us_history|0": {
|
99 |
+
"alias": " - high_school_us_history",
|
100 |
+
"acc,none": 0.7598039215686274,
|
101 |
+
"acc_stderr,none": 0.02998373305591361
|
102 |
+
},
|
103 |
+
"harness|mmlu_high_school_world_history|0": {
|
104 |
+
"alias": " - high_school_world_history",
|
105 |
+
"acc,none": 0.759493670886076,
|
106 |
+
"acc_stderr,none": 0.027820781981149678
|
107 |
+
},
|
108 |
+
"harness|mmlu_international_law|0": {
|
109 |
+
"alias": " - international_law",
|
110 |
+
"acc,none": 0.6942148760330579,
|
111 |
+
"acc_stderr,none": 0.04205953933884124
|
112 |
+
},
|
113 |
+
"harness|mmlu_jurisprudence|0": {
|
114 |
+
"alias": " - jurisprudence",
|
115 |
+
"acc,none": 0.6759259259259259,
|
116 |
+
"acc_stderr,none": 0.04524596007030048
|
117 |
+
},
|
118 |
+
"harness|mmlu_logical_fallacies|0": {
|
119 |
+
"alias": " - logical_fallacies",
|
120 |
+
"acc,none": 0.6993865030674846,
|
121 |
+
"acc_stderr,none": 0.0360251131880677
|
122 |
+
},
|
123 |
+
"harness|mmlu_moral_disputes|0": {
|
124 |
+
"alias": " - moral_disputes",
|
125 |
+
"acc,none": 0.6040462427745664,
|
126 |
+
"acc_stderr,none": 0.02632981334194624
|
127 |
+
},
|
128 |
+
"harness|mmlu_moral_scenarios|0": {
|
129 |
+
"alias": " - moral_scenarios",
|
130 |
+
"acc,none": 0.2558659217877095,
|
131 |
+
"acc_stderr,none": 0.014593620923210737
|
132 |
+
},
|
133 |
+
"harness|mmlu_philosophy|0": {
|
134 |
+
"alias": " - philosophy",
|
135 |
+
"acc,none": 0.5819935691318328,
|
136 |
+
"acc_stderr,none": 0.028013651891995072
|
137 |
+
},
|
138 |
+
"harness|mmlu_prehistory|0": {
|
139 |
+
"alias": " - prehistory",
|
140 |
+
"acc,none": 0.6419753086419753,
|
141 |
+
"acc_stderr,none": 0.026675611926037082
|
142 |
+
},
|
143 |
+
"harness|mmlu_professional_law|0": {
|
144 |
+
"alias": " - professional_law",
|
145 |
+
"acc,none": 0.4322033898305085,
|
146 |
+
"acc_stderr,none": 0.012652297777114968
|
147 |
+
},
|
148 |
+
"harness|mmlu_world_religions|0": {
|
149 |
+
"alias": " - world_religions",
|
150 |
+
"acc,none": 0.783625730994152,
|
151 |
+
"acc_stderr,none": 0.031581495393387324
|
152 |
+
},
|
153 |
+
"harness|mmlu_other|0": {
|
154 |
+
"alias": " - other",
|
155 |
+
"acc,none": 0.6337302864499518,
|
156 |
+
"acc_stderr,none": 0.00836658956166198
|
157 |
+
},
|
158 |
+
"harness|mmlu_business_ethics|0": {
|
159 |
+
"alias": " - business_ethics",
|
160 |
+
"acc,none": 0.58,
|
161 |
+
"acc_stderr,none": 0.049604496374885836
|
162 |
+
},
|
163 |
+
"harness|mmlu_clinical_knowledge|0": {
|
164 |
+
"alias": " - clinical_knowledge",
|
165 |
+
"acc,none": 0.6188679245283019,
|
166 |
+
"acc_stderr,none": 0.02989060968628664
|
167 |
+
},
|
168 |
+
"harness|mmlu_college_medicine|0": {
|
169 |
+
"alias": " - college_medicine",
|
170 |
+
"acc,none": 0.5722543352601156,
|
171 |
+
"acc_stderr,none": 0.037724468575180276
|
172 |
+
},
|
173 |
+
"harness|mmlu_global_facts|0": {
|
174 |
+
"alias": " - global_facts",
|
175 |
+
"acc,none": 0.35,
|
176 |
+
"acc_stderr,none": 0.047937248544110196
|
177 |
+
},
|
178 |
+
"harness|mmlu_human_aging|0": {
|
179 |
+
"alias": " - human_aging",
|
180 |
+
"acc,none": 0.6143497757847534,
|
181 |
+
"acc_stderr,none": 0.03266842214289201
|
182 |
+
},
|
183 |
+
"harness|mmlu_management|0": {
|
184 |
+
"alias": " - management",
|
185 |
+
"acc,none": 0.7572815533980582,
|
186 |
+
"acc_stderr,none": 0.04245022486384495
|
187 |
+
},
|
188 |
+
"harness|mmlu_marketing|0": {
|
189 |
+
"alias": " - marketing",
|
190 |
+
"acc,none": 0.8290598290598291,
|
191 |
+
"acc_stderr,none": 0.024662496845209818
|
192 |
+
},
|
193 |
+
"harness|mmlu_medical_genetics|0": {
|
194 |
+
"alias": " - medical_genetics",
|
195 |
+
"acc,none": 0.62,
|
196 |
+
"acc_stderr,none": 0.04878317312145632
|
197 |
+
},
|
198 |
+
"harness|mmlu_miscellaneous|0": {
|
199 |
+
"alias": " - miscellaneous",
|
200 |
+
"acc,none": 0.7650063856960408,
|
201 |
+
"acc_stderr,none": 0.015162024152278443
|
202 |
+
},
|
203 |
+
"harness|mmlu_nutrition|0": {
|
204 |
+
"alias": " - nutrition",
|
205 |
+
"acc,none": 0.5947712418300654,
|
206 |
+
"acc_stderr,none": 0.02811092849280907
|
207 |
+
},
|
208 |
+
"harness|mmlu_professional_accounting|0": {
|
209 |
+
"alias": " - professional_accounting",
|
210 |
+
"acc,none": 0.46099290780141844,
|
211 |
+
"acc_stderr,none": 0.02973659252642444
|
212 |
+
},
|
213 |
+
"harness|mmlu_professional_medicine|0": {
|
214 |
+
"alias": " - professional_medicine",
|
215 |
+
"acc,none": 0.5698529411764706,
|
216 |
+
"acc_stderr,none": 0.030074971917302875
|
217 |
+
},
|
218 |
+
"harness|mmlu_virology|0": {
|
219 |
+
"alias": " - virology",
|
220 |
+
"acc,none": 0.4578313253012048,
|
221 |
+
"acc_stderr,none": 0.0387862677100236
|
222 |
+
},
|
223 |
+
"harness|mmlu_social_sciences|0": {
|
224 |
+
"alias": " - social_sciences",
|
225 |
+
"acc,none": 0.6421839454013649,
|
226 |
+
"acc_stderr,none": 0.008410443495026124
|
227 |
+
},
|
228 |
+
"harness|mmlu_econometrics|0": {
|
229 |
+
"alias": " - econometrics",
|
230 |
+
"acc,none": 0.39473684210526316,
|
231 |
+
"acc_stderr,none": 0.045981880578165414
|
232 |
+
},
|
233 |
+
"harness|mmlu_high_school_geography|0": {
|
234 |
+
"alias": " - high_school_geography",
|
235 |
+
"acc,none": 0.7121212121212122,
|
236 |
+
"acc_stderr,none": 0.03225883512300992
|
237 |
+
},
|
238 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
239 |
+
"alias": " - high_school_government_and_politics",
|
240 |
+
"acc,none": 0.7772020725388601,
|
241 |
+
"acc_stderr,none": 0.03003114797764154
|
242 |
+
},
|
243 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
244 |
+
"alias": " - high_school_macroeconomics",
|
245 |
+
"acc,none": 0.5358974358974359,
|
246 |
+
"acc_stderr,none": 0.025285585990017848
|
247 |
+
},
|
248 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
249 |
+
"alias": " - high_school_microeconomics",
|
250 |
+
"acc,none": 0.5,
|
251 |
+
"acc_stderr,none": 0.032478490123081544
|
252 |
+
},
|
253 |
+
"harness|mmlu_high_school_psychology|0": {
|
254 |
+
"alias": " - high_school_psychology",
|
255 |
+
"acc,none": 0.7651376146788991,
|
256 |
+
"acc_stderr,none": 0.018175110510343578
|
257 |
+
},
|
258 |
+
"harness|mmlu_human_sexuality|0": {
|
259 |
+
"alias": " - human_sexuality",
|
260 |
+
"acc,none": 0.6717557251908397,
|
261 |
+
"acc_stderr,none": 0.04118438565806298
|
262 |
+
},
|
263 |
+
"harness|mmlu_professional_psychology|0": {
|
264 |
+
"alias": " - professional_psychology",
|
265 |
+
"acc,none": 0.5604575163398693,
|
266 |
+
"acc_stderr,none": 0.02007942040808792
|
267 |
+
},
|
268 |
+
"harness|mmlu_public_relations|0": {
|
269 |
+
"alias": " - public_relations",
|
270 |
+
"acc,none": 0.6,
|
271 |
+
"acc_stderr,none": 0.0469237132203465
|
272 |
+
},
|
273 |
+
"harness|mmlu_security_studies|0": {
|
274 |
+
"alias": " - security_studies",
|
275 |
+
"acc,none": 0.6612244897959184,
|
276 |
+
"acc_stderr,none": 0.030299506562154188
|
277 |
+
},
|
278 |
+
"harness|mmlu_sociology|0": {
|
279 |
+
"alias": " - sociology",
|
280 |
+
"acc,none": 0.7562189054726368,
|
281 |
+
"acc_stderr,none": 0.03036049015401465
|
282 |
+
},
|
283 |
+
"harness|mmlu_us_foreign_policy|0": {
|
284 |
+
"alias": " - us_foreign_policy",
|
285 |
+
"acc,none": 0.84,
|
286 |
+
"acc_stderr,none": 0.03684529491774708
|
287 |
+
},
|
288 |
+
"harness|mmlu_stem|0": {
|
289 |
+
"alias": " - stem",
|
290 |
+
"acc,none": 0.4627339042182049,
|
291 |
+
"acc_stderr,none": 0.008541429032898602
|
292 |
+
},
|
293 |
+
"harness|mmlu_abstract_algebra|0": {
|
294 |
+
"alias": " - abstract_algebra",
|
295 |
+
"acc,none": 0.32,
|
296 |
+
"acc_stderr,none": 0.04688261722621505
|
297 |
+
},
|
298 |
+
"harness|mmlu_anatomy|0": {
|
299 |
+
"alias": " - anatomy",
|
300 |
+
"acc,none": 0.5407407407407407,
|
301 |
+
"acc_stderr,none": 0.04304979692464243
|
302 |
+
},
|
303 |
+
"harness|mmlu_astronomy|0": {
|
304 |
+
"alias": " - astronomy",
|
305 |
+
"acc,none": 0.625,
|
306 |
+
"acc_stderr,none": 0.039397364351956274
|
307 |
+
},
|
308 |
+
"harness|mmlu_college_biology|0": {
|
309 |
+
"alias": " - college_biology",
|
310 |
+
"acc,none": 0.6666666666666666,
|
311 |
+
"acc_stderr,none": 0.03942082639927214
|
312 |
+
},
|
313 |
+
"harness|mmlu_college_chemistry|0": {
|
314 |
+
"alias": " - college_chemistry",
|
315 |
+
"acc,none": 0.42,
|
316 |
+
"acc_stderr,none": 0.049604496374885836
|
317 |
+
},
|
318 |
+
"harness|mmlu_college_computer_science|0": {
|
319 |
+
"alias": " - college_computer_science",
|
320 |
+
"acc,none": 0.45,
|
321 |
+
"acc_stderr,none": 0.05
|
322 |
+
},
|
323 |
+
"harness|mmlu_college_mathematics|0": {
|
324 |
+
"alias": " - college_mathematics",
|
325 |
+
"acc,none": 0.32,
|
326 |
+
"acc_stderr,none": 0.046882617226215034
|
327 |
+
},
|
328 |
+
"harness|mmlu_college_physics|0": {
|
329 |
+
"alias": " - college_physics",
|
330 |
+
"acc,none": 0.3627450980392157,
|
331 |
+
"acc_stderr,none": 0.04784060704105654
|
332 |
+
},
|
333 |
+
"harness|mmlu_computer_security|0": {
|
334 |
+
"alias": " - computer_security",
|
335 |
+
"acc,none": 0.72,
|
336 |
+
"acc_stderr,none": 0.045126085985421296
|
337 |
+
},
|
338 |
+
"harness|mmlu_conceptual_physics|0": {
|
339 |
+
"alias": " - conceptual_physics",
|
340 |
+
"acc,none": 0.502127659574468,
|
341 |
+
"acc_stderr,none": 0.03268572658667492
|
342 |
+
},
|
343 |
+
"harness|mmlu_electrical_engineering|0": {
|
344 |
+
"alias": " - electrical_engineering",
|
345 |
+
"acc,none": 0.503448275862069,
|
346 |
+
"acc_stderr,none": 0.0416656757710158
|
347 |
+
},
|
348 |
+
"harness|mmlu_elementary_mathematics|0": {
|
349 |
+
"alias": " - elementary_mathematics",
|
350 |
+
"acc,none": 0.373015873015873,
|
351 |
+
"acc_stderr,none": 0.02490699045899257
|
352 |
+
},
|
353 |
+
"harness|mmlu_high_school_biology|0": {
|
354 |
+
"alias": " - high_school_biology",
|
355 |
+
"acc,none": 0.7096774193548387,
|
356 |
+
"acc_stderr,none": 0.02582210611941589
|
357 |
+
},
|
358 |
+
"harness|mmlu_high_school_chemistry|0": {
|
359 |
+
"alias": " - high_school_chemistry",
|
360 |
+
"acc,none": 0.43842364532019706,
|
361 |
+
"acc_stderr,none": 0.03491207857486519
|
362 |
+
},
|
363 |
+
"harness|mmlu_high_school_computer_science|0": {
|
364 |
+
"alias": " - high_school_computer_science",
|
365 |
+
"acc,none": 0.56,
|
366 |
+
"acc_stderr,none": 0.04988876515698589
|
367 |
+
},
|
368 |
+
"harness|mmlu_high_school_mathematics|0": {
|
369 |
+
"alias": " - high_school_mathematics",
|
370 |
+
"acc,none": 0.28888888888888886,
|
371 |
+
"acc_stderr,none": 0.027634907264178544
|
372 |
+
},
|
373 |
+
"harness|mmlu_high_school_physics|0": {
|
374 |
+
"alias": " - high_school_physics",
|
375 |
+
"acc,none": 0.304635761589404,
|
376 |
+
"acc_stderr,none": 0.03757949922943342
|
377 |
+
},
|
378 |
+
"harness|mmlu_high_school_statistics|0": {
|
379 |
+
"alias": " - high_school_statistics",
|
380 |
+
"acc,none": 0.37037037037037035,
|
381 |
+
"acc_stderr,none": 0.03293377139415191
|
382 |
+
},
|
383 |
+
"harness|mmlu_machine_learning|0": {
|
384 |
+
"alias": " - machine_learning",
|
385 |
+
"acc,none": 0.30357142857142855,
|
386 |
+
"acc_stderr,none": 0.04364226155841044
|
387 |
+
},
|
388 |
+
"harness|truthfulqa:mc1|0": {
|
389 |
+
"acc,none": 0.35006119951040393,
|
390 |
+
"acc_stderr,none": 0.01669794942015103,
|
391 |
+
"alias": "truthfulqa_mc1"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "./Baichuan2-13B-Chat-4bits",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 13.0,
|
399 |
+
"architectures": "BaichuanForCausalLM",
|
400 |
+
"quant_type": "bitsandbytes",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 13.0,
|
403 |
+
"model_size": 9.08,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-04-29T07:32:08Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bnb_4bit_compute_dtype": "bfloat16",
|
417 |
+
"bnb_4bit_quant_type": "nf4",
|
418 |
+
"bnb_4bit_use_double_quant": true,
|
419 |
+
"llm_int8_enable_fp32_cpu_offload": false,
|
420 |
+
"llm_int8_has_fp16_weight": false,
|
421 |
+
"llm_int8_skip_modules": null,
|
422 |
+
"llm_int8_threshold": 6.0,
|
423 |
+
"load_in_4bit": true,
|
424 |
+
"load_in_8bit": false
|
425 |
+
},
|
426 |
+
"versions": {
|
427 |
+
"harness|piqa|0": 1.0,
|
428 |
+
"harness|arc:easy|0": 1.0,
|
429 |
+
"harness|arc:challenge|0": 1.0,
|
430 |
+
"harness|openbookqa|0": 1.0,
|
431 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
432 |
+
"harness|winogrande|0": 1.0,
|
433 |
+
"harness|hellaswag|0": 1.0,
|
434 |
+
"harness|boolq|0": 2.0,
|
435 |
+
"harness|lambada:openai|0": 1.0,
|
436 |
+
"harness|mmlu|0": null,
|
437 |
+
"harness|mmlu_humanities|0": null,
|
438 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
439 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
440 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
441 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
442 |
+
"harness|mmlu_international_law|0": 0.0,
|
443 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
444 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
445 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
446 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
447 |
+
"harness|mmlu_philosophy|0": 0.0,
|
448 |
+
"harness|mmlu_prehistory|0": 0.0,
|
449 |
+
"harness|mmlu_professional_law|0": 0.0,
|
450 |
+
"harness|mmlu_world_religions|0": 0.0,
|
451 |
+
"harness|mmlu_other|0": null,
|
452 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
453 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
454 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
455 |
+
"harness|mmlu_global_facts|0": 0.0,
|
456 |
+
"harness|mmlu_human_aging|0": 0.0,
|
457 |
+
"harness|mmlu_management|0": 0.0,
|
458 |
+
"harness|mmlu_marketing|0": 0.0,
|
459 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
460 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
461 |
+
"harness|mmlu_nutrition|0": 0.0,
|
462 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
463 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
464 |
+
"harness|mmlu_virology|0": 0.0,
|
465 |
+
"harness|mmlu_social_sciences|0": null,
|
466 |
+
"harness|mmlu_econometrics|0": 0.0,
|
467 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
468 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
469 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
470 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
471 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
472 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
473 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
474 |
+
"harness|mmlu_public_relations|0": 0.0,
|
475 |
+
"harness|mmlu_security_studies|0": 0.0,
|
476 |
+
"harness|mmlu_sociology|0": 0.0,
|
477 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
478 |
+
"harness|mmlu_stem|0": null,
|
479 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
480 |
+
"harness|mmlu_anatomy|0": 0.0,
|
481 |
+
"harness|mmlu_astronomy|0": 0.0,
|
482 |
+
"harness|mmlu_college_biology|0": 0.0,
|
483 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
484 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
485 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
486 |
+
"harness|mmlu_college_physics|0": 0.0,
|
487 |
+
"harness|mmlu_computer_security|0": 0.0,
|
488 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
489 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
490 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
491 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
492 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
493 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
494 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
495 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
496 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
497 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
498 |
+
"harness|truthfulqa:mc1|0": 2.0
|
499 |
+
},
|
500 |
+
"n-shot": {
|
501 |
+
"arc_challenge": 0,
|
502 |
+
"arc_easy": 0,
|
503 |
+
"boolq": 0,
|
504 |
+
"hellaswag": 0,
|
505 |
+
"lambada_openai": 0,
|
506 |
+
"mmlu": 0,
|
507 |
+
"mmlu_abstract_algebra": 0,
|
508 |
+
"mmlu_anatomy": 0,
|
509 |
+
"mmlu_astronomy": 0,
|
510 |
+
"mmlu_business_ethics": 0,
|
511 |
+
"mmlu_clinical_knowledge": 0,
|
512 |
+
"mmlu_college_biology": 0,
|
513 |
+
"mmlu_college_chemistry": 0,
|
514 |
+
"mmlu_college_computer_science": 0,
|
515 |
+
"mmlu_college_mathematics": 0,
|
516 |
+
"mmlu_college_medicine": 0,
|
517 |
+
"mmlu_college_physics": 0,
|
518 |
+
"mmlu_computer_security": 0,
|
519 |
+
"mmlu_conceptual_physics": 0,
|
520 |
+
"mmlu_econometrics": 0,
|
521 |
+
"mmlu_electrical_engineering": 0,
|
522 |
+
"mmlu_elementary_mathematics": 0,
|
523 |
+
"mmlu_formal_logic": 0,
|
524 |
+
"mmlu_global_facts": 0,
|
525 |
+
"mmlu_high_school_biology": 0,
|
526 |
+
"mmlu_high_school_chemistry": 0,
|
527 |
+
"mmlu_high_school_computer_science": 0,
|
528 |
+
"mmlu_high_school_european_history": 0,
|
529 |
+
"mmlu_high_school_geography": 0,
|
530 |
+
"mmlu_high_school_government_and_politics": 0,
|
531 |
+
"mmlu_high_school_macroeconomics": 0,
|
532 |
+
"mmlu_high_school_mathematics": 0,
|
533 |
+
"mmlu_high_school_microeconomics": 0,
|
534 |
+
"mmlu_high_school_physics": 0,
|
535 |
+
"mmlu_high_school_psychology": 0,
|
536 |
+
"mmlu_high_school_statistics": 0,
|
537 |
+
"mmlu_high_school_us_history": 0,
|
538 |
+
"mmlu_high_school_world_history": 0,
|
539 |
+
"mmlu_human_aging": 0,
|
540 |
+
"mmlu_human_sexuality": 0,
|
541 |
+
"mmlu_humanities": 0,
|
542 |
+
"mmlu_international_law": 0,
|
543 |
+
"mmlu_jurisprudence": 0,
|
544 |
+
"mmlu_logical_fallacies": 0,
|
545 |
+
"mmlu_machine_learning": 0,
|
546 |
+
"mmlu_management": 0,
|
547 |
+
"mmlu_marketing": 0,
|
548 |
+
"mmlu_medical_genetics": 0,
|
549 |
+
"mmlu_miscellaneous": 0,
|
550 |
+
"mmlu_moral_disputes": 0,
|
551 |
+
"mmlu_moral_scenarios": 0,
|
552 |
+
"mmlu_nutrition": 0,
|
553 |
+
"mmlu_other": 0,
|
554 |
+
"mmlu_philosophy": 0,
|
555 |
+
"mmlu_prehistory": 0,
|
556 |
+
"mmlu_professional_accounting": 0,
|
557 |
+
"mmlu_professional_law": 0,
|
558 |
+
"mmlu_professional_medicine": 0,
|
559 |
+
"mmlu_professional_psychology": 0,
|
560 |
+
"mmlu_public_relations": 0,
|
561 |
+
"mmlu_security_studies": 0,
|
562 |
+
"mmlu_social_sciences": 0,
|
563 |
+
"mmlu_sociology": 0,
|
564 |
+
"mmlu_stem": 0,
|
565 |
+
"mmlu_us_foreign_policy": 0,
|
566 |
+
"mmlu_virology": 0,
|
567 |
+
"mmlu_world_religions": 0,
|
568 |
+
"openbookqa": 0,
|
569 |
+
"piqa": 0,
|
570 |
+
"truthfulqa_mc1": 0,
|
571 |
+
"truthfulqa_mc2": 0,
|
572 |
+
"winogrande": 0
|
573 |
+
},
|
574 |
+
"date": 1715600850.9519372,
|
575 |
+
"config": {
|
576 |
+
"model": "hf",
|
577 |
+
"model_args": "pretrained=./Baichuan2-13B-Chat-4bits,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
578 |
+
"batch_size": 1,
|
579 |
+
"batch_sizes": [],
|
580 |
+
"device": "cuda",
|
581 |
+
"use_cache": null,
|
582 |
+
"limit": null,
|
583 |
+
"bootstrap_iters": 100000,
|
584 |
+
"gen_kwargs": null
|
585 |
+
}
|
586 |
+
}
|
casperhansen/results_2024-05-08-20-07-49.json
ADDED
@@ -0,0 +1,582 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "1.4",
|
4 |
+
"num_few_shot_default": null,
|
5 |
+
"num_fewshot_seeds": null,
|
6 |
+
"override_batch_size": null,
|
7 |
+
"max_samples": null,
|
8 |
+
"job_id": -1,
|
9 |
+
"start_time": null,
|
10 |
+
"end_time": "2024-05-08-20-07-49",
|
11 |
+
"total_evaluation_time_secondes": "",
|
12 |
+
"model_name": "casperhansen/falcon-7b-awq",
|
13 |
+
"model_sha": "",
|
14 |
+
"model_dtype": "4bit",
|
15 |
+
"model_size": 4.16,
|
16 |
+
"model_params": 8.33,
|
17 |
+
"quant_type": "AWQ",
|
18 |
+
"precision": "4bit"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"harness|boolq|0": {
|
22 |
+
"acc,none": 0.726605504587156,
|
23 |
+
"acc_stderr,none": 0.0077953705600891975,
|
24 |
+
"alias": "boolq"
|
25 |
+
},
|
26 |
+
"harness|winogrande|0": {
|
27 |
+
"acc,none": 0.6890292028413575,
|
28 |
+
"acc_stderr,none": 0.013009534736286068,
|
29 |
+
"alias": "winogrande"
|
30 |
+
},
|
31 |
+
"harness|truthfulqa:mc1|0": {
|
32 |
+
"acc,none": 0.21909424724602203,
|
33 |
+
"acc_stderr,none": 0.014480038578757447,
|
34 |
+
"alias": "truthfulqa_mc1"
|
35 |
+
},
|
36 |
+
"harness|arc:easy|0": {
|
37 |
+
"acc,none": 0.7428451178451179,
|
38 |
+
"acc_stderr,none": 0.008968394768971991,
|
39 |
+
"acc_norm,none": 0.7003367003367004,
|
40 |
+
"acc_norm_stderr,none": 0.009400228586205973,
|
41 |
+
"alias": "arc_easy"
|
42 |
+
},
|
43 |
+
"harness|mmlu|0": {
|
44 |
+
"acc,none": 0.2680529839054266,
|
45 |
+
"acc_stderr,none": 0.0037312184112828226,
|
46 |
+
"alias": "mmlu"
|
47 |
+
},
|
48 |
+
"harness|mmlu_humanities|0": {
|
49 |
+
"alias": " - humanities",
|
50 |
+
"acc,none": 0.27396386822529223,
|
51 |
+
"acc_stderr,none": 0.006496081637998288
|
52 |
+
},
|
53 |
+
"harness|mmlu_formal_logic|0": {
|
54 |
+
"alias": " - formal_logic",
|
55 |
+
"acc,none": 0.25396825396825395,
|
56 |
+
"acc_stderr,none": 0.03893259610604673
|
57 |
+
},
|
58 |
+
"harness|mmlu_high_school_european_history|0": {
|
59 |
+
"alias": " - high_school_european_history",
|
60 |
+
"acc,none": 0.2606060606060606,
|
61 |
+
"acc_stderr,none": 0.03427743175816524
|
62 |
+
},
|
63 |
+
"harness|mmlu_high_school_us_history|0": {
|
64 |
+
"alias": " - high_school_us_history",
|
65 |
+
"acc,none": 0.22058823529411764,
|
66 |
+
"acc_stderr,none": 0.02910225438967409
|
67 |
+
},
|
68 |
+
"harness|mmlu_high_school_world_history|0": {
|
69 |
+
"alias": " - high_school_world_history",
|
70 |
+
"acc,none": 0.2616033755274262,
|
71 |
+
"acc_stderr,none": 0.028609516716994934
|
72 |
+
},
|
73 |
+
"harness|mmlu_international_law|0": {
|
74 |
+
"alias": " - international_law",
|
75 |
+
"acc,none": 0.33884297520661155,
|
76 |
+
"acc_stderr,none": 0.043207678075366705
|
77 |
+
},
|
78 |
+
"harness|mmlu_jurisprudence|0": {
|
79 |
+
"alias": " - jurisprudence",
|
80 |
+
"acc,none": 0.35185185185185186,
|
81 |
+
"acc_stderr,none": 0.04616631111801715
|
82 |
+
},
|
83 |
+
"harness|mmlu_logical_fallacies|0": {
|
84 |
+
"alias": " - logical_fallacies",
|
85 |
+
"acc,none": 0.25153374233128833,
|
86 |
+
"acc_stderr,none": 0.03408997886857529
|
87 |
+
},
|
88 |
+
"harness|mmlu_moral_disputes|0": {
|
89 |
+
"alias": " - moral_disputes",
|
90 |
+
"acc,none": 0.3236994219653179,
|
91 |
+
"acc_stderr,none": 0.025190181327608422
|
92 |
+
},
|
93 |
+
"harness|mmlu_moral_scenarios|0": {
|
94 |
+
"alias": " - moral_scenarios",
|
95 |
+
"acc,none": 0.23798882681564246,
|
96 |
+
"acc_stderr,none": 0.014242630070574885
|
97 |
+
},
|
98 |
+
"harness|mmlu_philosophy|0": {
|
99 |
+
"alias": " - philosophy",
|
100 |
+
"acc,none": 0.2861736334405145,
|
101 |
+
"acc_stderr,none": 0.025670259242188936
|
102 |
+
},
|
103 |
+
"harness|mmlu_prehistory|0": {
|
104 |
+
"alias": " - prehistory",
|
105 |
+
"acc,none": 0.2993827160493827,
|
106 |
+
"acc_stderr,none": 0.02548311560119546
|
107 |
+
},
|
108 |
+
"harness|mmlu_professional_law|0": {
|
109 |
+
"alias": " - professional_law",
|
110 |
+
"acc,none": 0.2803129074315515,
|
111 |
+
"acc_stderr,none": 0.011471555944958616
|
112 |
+
},
|
113 |
+
"harness|mmlu_world_religions|0": {
|
114 |
+
"alias": " - world_religions",
|
115 |
+
"acc,none": 0.26900584795321636,
|
116 |
+
"acc_stderr,none": 0.03401052620104089
|
117 |
+
},
|
118 |
+
"harness|mmlu_other|0": {
|
119 |
+
"alias": " - other",
|
120 |
+
"acc,none": 0.2648857418731896,
|
121 |
+
"acc_stderr,none": 0.007902788160794474
|
122 |
+
},
|
123 |
+
"harness|mmlu_business_ethics|0": {
|
124 |
+
"alias": " - business_ethics",
|
125 |
+
"acc,none": 0.25,
|
126 |
+
"acc_stderr,none": 0.04351941398892446
|
127 |
+
},
|
128 |
+
"harness|mmlu_clinical_knowledge|0": {
|
129 |
+
"alias": " - clinical_knowledge",
|
130 |
+
"acc,none": 0.23773584905660378,
|
131 |
+
"acc_stderr,none": 0.02619980880756192
|
132 |
+
},
|
133 |
+
"harness|mmlu_college_medicine|0": {
|
134 |
+
"alias": " - college_medicine",
|
135 |
+
"acc,none": 0.31213872832369943,
|
136 |
+
"acc_stderr,none": 0.03533133389323657
|
137 |
+
},
|
138 |
+
"harness|mmlu_global_facts|0": {
|
139 |
+
"alias": " - global_facts",
|
140 |
+
"acc,none": 0.28,
|
141 |
+
"acc_stderr,none": 0.04512608598542129
|
142 |
+
},
|
143 |
+
"harness|mmlu_human_aging|0": {
|
144 |
+
"alias": " - human_aging",
|
145 |
+
"acc,none": 0.29596412556053814,
|
146 |
+
"acc_stderr,none": 0.03063659134869982
|
147 |
+
},
|
148 |
+
"harness|mmlu_management|0": {
|
149 |
+
"alias": " - management",
|
150 |
+
"acc,none": 0.27184466019417475,
|
151 |
+
"acc_stderr,none": 0.044052680241409216
|
152 |
+
},
|
153 |
+
"harness|mmlu_marketing|0": {
|
154 |
+
"alias": " - marketing",
|
155 |
+
"acc,none": 0.2606837606837607,
|
156 |
+
"acc_stderr,none": 0.028760348956523414
|
157 |
+
},
|
158 |
+
"harness|mmlu_medical_genetics|0": {
|
159 |
+
"alias": " - medical_genetics",
|
160 |
+
"acc,none": 0.28,
|
161 |
+
"acc_stderr,none": 0.04512608598542127
|
162 |
+
},
|
163 |
+
"harness|mmlu_miscellaneous|0": {
|
164 |
+
"alias": " - miscellaneous",
|
165 |
+
"acc,none": 0.2886334610472541,
|
166 |
+
"acc_stderr,none": 0.01620379270319779
|
167 |
+
},
|
168 |
+
"harness|mmlu_nutrition|0": {
|
169 |
+
"alias": " - nutrition",
|
170 |
+
"acc,none": 0.28431372549019607,
|
171 |
+
"acc_stderr,none": 0.02582916327275748
|
172 |
+
},
|
173 |
+
"harness|mmlu_professional_accounting|0": {
|
174 |
+
"alias": " - professional_accounting",
|
175 |
+
"acc,none": 0.22695035460992907,
|
176 |
+
"acc_stderr,none": 0.024987106365642973
|
177 |
+
},
|
178 |
+
"harness|mmlu_professional_medicine|0": {
|
179 |
+
"alias": " - professional_medicine",
|
180 |
+
"acc,none": 0.16544117647058823,
|
181 |
+
"acc_stderr,none": 0.022571771025494757
|
182 |
+
},
|
183 |
+
"harness|mmlu_virology|0": {
|
184 |
+
"alias": " - virology",
|
185 |
+
"acc,none": 0.2891566265060241,
|
186 |
+
"acc_stderr,none": 0.03529486801511115
|
187 |
+
},
|
188 |
+
"harness|mmlu_social_sciences|0": {
|
189 |
+
"alias": " - social_sciences",
|
190 |
+
"acc,none": 0.26616834579135523,
|
191 |
+
"acc_stderr,none": 0.007948025982070027
|
192 |
+
},
|
193 |
+
"harness|mmlu_econometrics|0": {
|
194 |
+
"alias": " - econometrics",
|
195 |
+
"acc,none": 0.3333333333333333,
|
196 |
+
"acc_stderr,none": 0.04434600701584925
|
197 |
+
},
|
198 |
+
"harness|mmlu_high_school_geography|0": {
|
199 |
+
"alias": " - high_school_geography",
|
200 |
+
"acc,none": 0.2878787878787879,
|
201 |
+
"acc_stderr,none": 0.03225883512300993
|
202 |
+
},
|
203 |
+
"harness|mmlu_high_school_government_and_politics|0": {
|
204 |
+
"alias": " - high_school_government_and_politics",
|
205 |
+
"acc,none": 0.23316062176165803,
|
206 |
+
"acc_stderr,none": 0.030516111371476008
|
207 |
+
},
|
208 |
+
"harness|mmlu_high_school_macroeconomics|0": {
|
209 |
+
"alias": " - high_school_macroeconomics",
|
210 |
+
"acc,none": 0.19743589743589743,
|
211 |
+
"acc_stderr,none": 0.02018264696867483
|
212 |
+
},
|
213 |
+
"harness|mmlu_high_school_microeconomics|0": {
|
214 |
+
"alias": " - high_school_microeconomics",
|
215 |
+
"acc,none": 0.23529411764705882,
|
216 |
+
"acc_stderr,none": 0.027553614467863797
|
217 |
+
},
|
218 |
+
"harness|mmlu_high_school_psychology|0": {
|
219 |
+
"alias": " - high_school_psychology",
|
220 |
+
"acc,none": 0.25504587155963304,
|
221 |
+
"acc_stderr,none": 0.01868850085653585
|
222 |
+
},
|
223 |
+
"harness|mmlu_human_sexuality|0": {
|
224 |
+
"alias": " - human_sexuality",
|
225 |
+
"acc,none": 0.3435114503816794,
|
226 |
+
"acc_stderr,none": 0.041649760719448786
|
227 |
+
},
|
228 |
+
"harness|mmlu_professional_psychology|0": {
|
229 |
+
"alias": " - professional_psychology",
|
230 |
+
"acc,none": 0.26143790849673204,
|
231 |
+
"acc_stderr,none": 0.017776947157528054
|
232 |
+
},
|
233 |
+
"harness|mmlu_public_relations|0": {
|
234 |
+
"alias": " - public_relations",
|
235 |
+
"acc,none": 0.34545454545454546,
|
236 |
+
"acc_stderr,none": 0.04554619617541054
|
237 |
+
},
|
238 |
+
"harness|mmlu_security_studies|0": {
|
239 |
+
"alias": " - security_studies",
|
240 |
+
"acc,none": 0.2979591836734694,
|
241 |
+
"acc_stderr,none": 0.029279567411065674
|
242 |
+
},
|
243 |
+
"harness|mmlu_sociology|0": {
|
244 |
+
"alias": " - sociology",
|
245 |
+
"acc,none": 0.31840796019900497,
|
246 |
+
"acc_stderr,none": 0.03294118479054096
|
247 |
+
},
|
248 |
+
"harness|mmlu_us_foreign_policy|0": {
|
249 |
+
"alias": " - us_foreign_policy",
|
250 |
+
"acc,none": 0.27,
|
251 |
+
"acc_stderr,none": 0.044619604333847394
|
252 |
+
},
|
253 |
+
"harness|mmlu_stem|0": {
|
254 |
+
"alias": " - stem",
|
255 |
+
"acc,none": 0.26419283222327944,
|
256 |
+
"acc_stderr,none": 0.007832889101339188
|
257 |
+
},
|
258 |
+
"harness|mmlu_abstract_algebra|0": {
|
259 |
+
"alias": " - abstract_algebra",
|
260 |
+
"acc,none": 0.23,
|
261 |
+
"acc_stderr,none": 0.04229525846816506
|
262 |
+
},
|
263 |
+
"harness|mmlu_anatomy|0": {
|
264 |
+
"alias": " - anatomy",
|
265 |
+
"acc,none": 0.28888888888888886,
|
266 |
+
"acc_stderr,none": 0.0391545063041425
|
267 |
+
},
|
268 |
+
"harness|mmlu_astronomy|0": {
|
269 |
+
"alias": " - astronomy",
|
270 |
+
"acc,none": 0.29605263157894735,
|
271 |
+
"acc_stderr,none": 0.03715062154998904
|
272 |
+
},
|
273 |
+
"harness|mmlu_college_biology|0": {
|
274 |
+
"alias": " - college_biology",
|
275 |
+
"acc,none": 0.20833333333333334,
|
276 |
+
"acc_stderr,none": 0.033961162058453336
|
277 |
+
},
|
278 |
+
"harness|mmlu_college_chemistry|0": {
|
279 |
+
"alias": " - college_chemistry",
|
280 |
+
"acc,none": 0.18,
|
281 |
+
"acc_stderr,none": 0.03861229196653694
|
282 |
+
},
|
283 |
+
"harness|mmlu_college_computer_science|0": {
|
284 |
+
"alias": " - college_computer_science",
|
285 |
+
"acc,none": 0.19,
|
286 |
+
"acc_stderr,none": 0.039427724440366234
|
287 |
+
},
|
288 |
+
"harness|mmlu_college_mathematics|0": {
|
289 |
+
"alias": " - college_mathematics",
|
290 |
+
"acc,none": 0.27,
|
291 |
+
"acc_stderr,none": 0.0446196043338474
|
292 |
+
},
|
293 |
+
"harness|mmlu_college_physics|0": {
|
294 |
+
"alias": " - college_physics",
|
295 |
+
"acc,none": 0.30392156862745096,
|
296 |
+
"acc_stderr,none": 0.04576665403207762
|
297 |
+
},
|
298 |
+
"harness|mmlu_computer_security|0": {
|
299 |
+
"alias": " - computer_security",
|
300 |
+
"acc,none": 0.31,
|
301 |
+
"acc_stderr,none": 0.04648231987117316
|
302 |
+
},
|
303 |
+
"harness|mmlu_conceptual_physics|0": {
|
304 |
+
"alias": " - conceptual_physics",
|
305 |
+
"acc,none": 0.35319148936170214,
|
306 |
+
"acc_stderr,none": 0.031245325202761926
|
307 |
+
},
|
308 |
+
"harness|mmlu_electrical_engineering|0": {
|
309 |
+
"alias": " - electrical_engineering",
|
310 |
+
"acc,none": 0.27586206896551724,
|
311 |
+
"acc_stderr,none": 0.03724563619774632
|
312 |
+
},
|
313 |
+
"harness|mmlu_elementary_mathematics|0": {
|
314 |
+
"alias": " - elementary_mathematics",
|
315 |
+
"acc,none": 0.22486772486772486,
|
316 |
+
"acc_stderr,none": 0.02150209607822914
|
317 |
+
},
|
318 |
+
"harness|mmlu_high_school_biology|0": {
|
319 |
+
"alias": " - high_school_biology",
|
320 |
+
"acc,none": 0.24838709677419354,
|
321 |
+
"acc_stderr,none": 0.024580028921481003
|
322 |
+
},
|
323 |
+
"harness|mmlu_high_school_chemistry|0": {
|
324 |
+
"alias": " - high_school_chemistry",
|
325 |
+
"acc,none": 0.32019704433497537,
|
326 |
+
"acc_stderr,none": 0.032826493853041504
|
327 |
+
},
|
328 |
+
"harness|mmlu_high_school_computer_science|0": {
|
329 |
+
"alias": " - high_school_computer_science",
|
330 |
+
"acc,none": 0.3,
|
331 |
+
"acc_stderr,none": 0.046056618647183814
|
332 |
+
},
|
333 |
+
"harness|mmlu_high_school_mathematics|0": {
|
334 |
+
"alias": " - high_school_mathematics",
|
335 |
+
"acc,none": 0.22962962962962963,
|
336 |
+
"acc_stderr,none": 0.025644108639267624
|
337 |
+
},
|
338 |
+
"harness|mmlu_high_school_physics|0": {
|
339 |
+
"alias": " - high_school_physics",
|
340 |
+
"acc,none": 0.24503311258278146,
|
341 |
+
"acc_stderr,none": 0.03511807571804724
|
342 |
+
},
|
343 |
+
"harness|mmlu_high_school_statistics|0": {
|
344 |
+
"alias": " - high_school_statistics",
|
345 |
+
"acc,none": 0.25,
|
346 |
+
"acc_stderr,none": 0.029531221160930918
|
347 |
+
},
|
348 |
+
"harness|mmlu_machine_learning|0": {
|
349 |
+
"alias": " - machine_learning",
|
350 |
+
"acc,none": 0.33035714285714285,
|
351 |
+
"acc_stderr,none": 0.04464285714285714
|
352 |
+
},
|
353 |
+
"harness|hellaswag|0": {
|
354 |
+
"acc,none": 0.5702051384186417,
|
355 |
+
"acc_stderr,none": 0.004940349676769315,
|
356 |
+
"acc_norm,none": 0.7559251145190201,
|
357 |
+
"acc_norm_stderr,none": 0.004286594977390944,
|
358 |
+
"alias": "hellaswag"
|
359 |
+
},
|
360 |
+
"harness|openbookqa|0": {
|
361 |
+
"acc,none": 0.308,
|
362 |
+
"acc_stderr,none": 0.0206670329874661,
|
363 |
+
"acc_norm,none": 0.438,
|
364 |
+
"acc_norm_stderr,none": 0.022210326363977417,
|
365 |
+
"alias": "openbookqa"
|
366 |
+
},
|
367 |
+
"harness|truthfulqa:mc2|0": {
|
368 |
+
"acc,none": 0.34705041069354287,
|
369 |
+
"acc_stderr,none": 0.013414222993514864,
|
370 |
+
"alias": "truthfulqa_mc2"
|
371 |
+
},
|
372 |
+
"harness|piqa|0": {
|
373 |
+
"acc,none": 0.7959738846572362,
|
374 |
+
"acc_stderr,none": 0.009402378102942638,
|
375 |
+
"acc_norm,none": 0.8008705114254625,
|
376 |
+
"acc_norm_stderr,none": 0.009317391893706877,
|
377 |
+
"alias": "piqa"
|
378 |
+
},
|
379 |
+
"harness|arc:challenge|0": {
|
380 |
+
"acc,none": 0.3924914675767918,
|
381 |
+
"acc_stderr,none": 0.014269634635670691,
|
382 |
+
"acc_norm,none": 0.42662116040955633,
|
383 |
+
"acc_norm_stderr,none": 0.014453185592920293,
|
384 |
+
"alias": "arc_challenge"
|
385 |
+
},
|
386 |
+
"harness|lambada:openai|0": {
|
387 |
+
"perplexity,none": 3.247753848970227,
|
388 |
+
"perplexity_stderr,none": 0.06320226910987307,
|
389 |
+
"acc,none": 0.7490782068697845,
|
390 |
+
"acc_stderr,none": 0.006040109961800763,
|
391 |
+
"alias": "lambada_openai"
|
392 |
+
}
|
393 |
+
},
|
394 |
+
"task_info": {
|
395 |
+
"model": "casperhansen/falcon-7b-awq",
|
396 |
+
"revision": "main",
|
397 |
+
"private": false,
|
398 |
+
"params": 28.0,
|
399 |
+
"architectures": "RWForCausalLM",
|
400 |
+
"quant_type": "AWQ",
|
401 |
+
"precision": "4bit",
|
402 |
+
"model_params": 56.0,
|
403 |
+
"model_size": 28.0,
|
404 |
+
"weight_dtype": "int4",
|
405 |
+
"compute_dtype": "float16",
|
406 |
+
"gguf_ftype": "*Q4_0.gguf",
|
407 |
+
"hardware": "gpu",
|
408 |
+
"status": "Pending",
|
409 |
+
"submitted_time": "2024-05-08T00:55:39Z",
|
410 |
+
"model_type": "quantization",
|
411 |
+
"job_id": -1,
|
412 |
+
"job_start_time": null,
|
413 |
+
"scripts": "ITREX"
|
414 |
+
},
|
415 |
+
"quantization_config": {
|
416 |
+
"bits": 4,
|
417 |
+
"group_size": 64,
|
418 |
+
"quant_method": "awq",
|
419 |
+
"version": "gemm",
|
420 |
+
"zero_point": true
|
421 |
+
},
|
422 |
+
"versions": {
|
423 |
+
"harness|boolq|0": 2.0,
|
424 |
+
"harness|winogrande|0": 1.0,
|
425 |
+
"harness|truthfulqa:mc1|0": 2.0,
|
426 |
+
"harness|arc:easy|0": 1.0,
|
427 |
+
"harness|mmlu|0": null,
|
428 |
+
"harness|mmlu_humanities|0": null,
|
429 |
+
"harness|mmlu_formal_logic|0": 0.0,
|
430 |
+
"harness|mmlu_high_school_european_history|0": 0.0,
|
431 |
+
"harness|mmlu_high_school_us_history|0": 0.0,
|
432 |
+
"harness|mmlu_high_school_world_history|0": 0.0,
|
433 |
+
"harness|mmlu_international_law|0": 0.0,
|
434 |
+
"harness|mmlu_jurisprudence|0": 0.0,
|
435 |
+
"harness|mmlu_logical_fallacies|0": 0.0,
|
436 |
+
"harness|mmlu_moral_disputes|0": 0.0,
|
437 |
+
"harness|mmlu_moral_scenarios|0": 0.0,
|
438 |
+
"harness|mmlu_philosophy|0": 0.0,
|
439 |
+
"harness|mmlu_prehistory|0": 0.0,
|
440 |
+
"harness|mmlu_professional_law|0": 0.0,
|
441 |
+
"harness|mmlu_world_religions|0": 0.0,
|
442 |
+
"harness|mmlu_other|0": null,
|
443 |
+
"harness|mmlu_business_ethics|0": 0.0,
|
444 |
+
"harness|mmlu_clinical_knowledge|0": 0.0,
|
445 |
+
"harness|mmlu_college_medicine|0": 0.0,
|
446 |
+
"harness|mmlu_global_facts|0": 0.0,
|
447 |
+
"harness|mmlu_human_aging|0": 0.0,
|
448 |
+
"harness|mmlu_management|0": 0.0,
|
449 |
+
"harness|mmlu_marketing|0": 0.0,
|
450 |
+
"harness|mmlu_medical_genetics|0": 0.0,
|
451 |
+
"harness|mmlu_miscellaneous|0": 0.0,
|
452 |
+
"harness|mmlu_nutrition|0": 0.0,
|
453 |
+
"harness|mmlu_professional_accounting|0": 0.0,
|
454 |
+
"harness|mmlu_professional_medicine|0": 0.0,
|
455 |
+
"harness|mmlu_virology|0": 0.0,
|
456 |
+
"harness|mmlu_social_sciences|0": null,
|
457 |
+
"harness|mmlu_econometrics|0": 0.0,
|
458 |
+
"harness|mmlu_high_school_geography|0": 0.0,
|
459 |
+
"harness|mmlu_high_school_government_and_politics|0": 0.0,
|
460 |
+
"harness|mmlu_high_school_macroeconomics|0": 0.0,
|
461 |
+
"harness|mmlu_high_school_microeconomics|0": 0.0,
|
462 |
+
"harness|mmlu_high_school_psychology|0": 0.0,
|
463 |
+
"harness|mmlu_human_sexuality|0": 0.0,
|
464 |
+
"harness|mmlu_professional_psychology|0": 0.0,
|
465 |
+
"harness|mmlu_public_relations|0": 0.0,
|
466 |
+
"harness|mmlu_security_studies|0": 0.0,
|
467 |
+
"harness|mmlu_sociology|0": 0.0,
|
468 |
+
"harness|mmlu_us_foreign_policy|0": 0.0,
|
469 |
+
"harness|mmlu_stem|0": null,
|
470 |
+
"harness|mmlu_abstract_algebra|0": 0.0,
|
471 |
+
"harness|mmlu_anatomy|0": 0.0,
|
472 |
+
"harness|mmlu_astronomy|0": 0.0,
|
473 |
+
"harness|mmlu_college_biology|0": 0.0,
|
474 |
+
"harness|mmlu_college_chemistry|0": 0.0,
|
475 |
+
"harness|mmlu_college_computer_science|0": 0.0,
|
476 |
+
"harness|mmlu_college_mathematics|0": 0.0,
|
477 |
+
"harness|mmlu_college_physics|0": 0.0,
|
478 |
+
"harness|mmlu_computer_security|0": 0.0,
|
479 |
+
"harness|mmlu_conceptual_physics|0": 0.0,
|
480 |
+
"harness|mmlu_electrical_engineering|0": 0.0,
|
481 |
+
"harness|mmlu_elementary_mathematics|0": 0.0,
|
482 |
+
"harness|mmlu_high_school_biology|0": 0.0,
|
483 |
+
"harness|mmlu_high_school_chemistry|0": 0.0,
|
484 |
+
"harness|mmlu_high_school_computer_science|0": 0.0,
|
485 |
+
"harness|mmlu_high_school_mathematics|0": 0.0,
|
486 |
+
"harness|mmlu_high_school_physics|0": 0.0,
|
487 |
+
"harness|mmlu_high_school_statistics|0": 0.0,
|
488 |
+
"harness|mmlu_machine_learning|0": 0.0,
|
489 |
+
"harness|hellaswag|0": 1.0,
|
490 |
+
"harness|openbookqa|0": 1.0,
|
491 |
+
"harness|truthfulqa:mc2|0": 2.0,
|
492 |
+
"harness|piqa|0": 1.0,
|
493 |
+
"harness|arc:challenge|0": 1.0,
|
494 |
+
"harness|lambada:openai|0": 1.0
|
495 |
+
},
|
496 |
+
"n-shot": {
|
497 |
+
"arc_challenge": 0,
|
498 |
+
"arc_easy": 0,
|
499 |
+
"boolq": 0,
|
500 |
+
"hellaswag": 0,
|
501 |
+
"lambada_openai": 0,
|
502 |
+
"mmlu": 0,
|
503 |
+
"mmlu_abstract_algebra": 0,
|
504 |
+
"mmlu_anatomy": 0,
|
505 |
+
"mmlu_astronomy": 0,
|
506 |
+
"mmlu_business_ethics": 0,
|
507 |
+
"mmlu_clinical_knowledge": 0,
|
508 |
+
"mmlu_college_biology": 0,
|
509 |
+
"mmlu_college_chemistry": 0,
|
510 |
+
"mmlu_college_computer_science": 0,
|
511 |
+
"mmlu_college_mathematics": 0,
|
512 |
+
"mmlu_college_medicine": 0,
|
513 |
+
"mmlu_college_physics": 0,
|
514 |
+
"mmlu_computer_security": 0,
|
515 |
+
"mmlu_conceptual_physics": 0,
|
516 |
+
"mmlu_econometrics": 0,
|
517 |
+
"mmlu_electrical_engineering": 0,
|
518 |
+
"mmlu_elementary_mathematics": 0,
|
519 |
+
"mmlu_formal_logic": 0,
|
520 |
+
"mmlu_global_facts": 0,
|
521 |
+
"mmlu_high_school_biology": 0,
|
522 |
+
"mmlu_high_school_chemistry": 0,
|
523 |
+
"mmlu_high_school_computer_science": 0,
|
524 |
+
"mmlu_high_school_european_history": 0,
|
525 |
+
"mmlu_high_school_geography": 0,
|
526 |
+
"mmlu_high_school_government_and_politics": 0,
|
527 |
+
"mmlu_high_school_macroeconomics": 0,
|
528 |
+
"mmlu_high_school_mathematics": 0,
|
529 |
+
"mmlu_high_school_microeconomics": 0,
|
530 |
+
"mmlu_high_school_physics": 0,
|
531 |
+
"mmlu_high_school_psychology": 0,
|
532 |
+
"mmlu_high_school_statistics": 0,
|
533 |
+
"mmlu_high_school_us_history": 0,
|
534 |
+
"mmlu_high_school_world_history": 0,
|
535 |
+
"mmlu_human_aging": 0,
|
536 |
+
"mmlu_human_sexuality": 0,
|
537 |
+
"mmlu_humanities": 0,
|
538 |
+
"mmlu_international_law": 0,
|
539 |
+
"mmlu_jurisprudence": 0,
|
540 |
+
"mmlu_logical_fallacies": 0,
|
541 |
+
"mmlu_machine_learning": 0,
|
542 |
+
"mmlu_management": 0,
|
543 |
+
"mmlu_marketing": 0,
|
544 |
+
"mmlu_medical_genetics": 0,
|
545 |
+
"mmlu_miscellaneous": 0,
|
546 |
+
"mmlu_moral_disputes": 0,
|
547 |
+
"mmlu_moral_scenarios": 0,
|
548 |
+
"mmlu_nutrition": 0,
|
549 |
+
"mmlu_other": 0,
|
550 |
+
"mmlu_philosophy": 0,
|
551 |
+
"mmlu_prehistory": 0,
|
552 |
+
"mmlu_professional_accounting": 0,
|
553 |
+
"mmlu_professional_law": 0,
|
554 |
+
"mmlu_professional_medicine": 0,
|
555 |
+
"mmlu_professional_psychology": 0,
|
556 |
+
"mmlu_public_relations": 0,
|
557 |
+
"mmlu_security_studies": 0,
|
558 |
+
"mmlu_social_sciences": 0,
|
559 |
+
"mmlu_sociology": 0,
|
560 |
+
"mmlu_stem": 0,
|
561 |
+
"mmlu_us_foreign_policy": 0,
|
562 |
+
"mmlu_virology": 0,
|
563 |
+
"mmlu_world_religions": 0,
|
564 |
+
"openbookqa": 0,
|
565 |
+
"piqa": 0,
|
566 |
+
"truthfulqa_mc1": 0,
|
567 |
+
"truthfulqa_mc2": 0,
|
568 |
+
"winogrande": 0
|
569 |
+
},
|
570 |
+
"date": 1715129862.4682531,
|
571 |
+
"config": {
|
572 |
+
"model": "hf",
|
573 |
+
"model_args": "pretrained=casperhansen/falcon-7b-awq,trust_remote_code=True,dtype=float16,_commit_hash=main",
|
574 |
+
"batch_size": 1,
|
575 |
+
"batch_sizes": [],
|
576 |
+
"device": "cuda",
|
577 |
+
"use_cache": null,
|
578 |
+
"limit": null,
|
579 |
+
"bootstrap_iters": 100000,
|
580 |
+
"gen_kwargs": null
|
581 |
+
}
|
582 |
+
}
|