|
{ |
|
"results": { |
|
"hendrycksTest-abstract_algebra": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847415, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847415 |
|
}, |
|
"hendrycksTest-anatomy": { |
|
"acc": 0.22962962962962963, |
|
"acc_stderr": 0.03633384414073465, |
|
"acc_norm": 0.22962962962962963, |
|
"acc_norm_stderr": 0.03633384414073465 |
|
}, |
|
"hendrycksTest-astronomy": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03459777606810535, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03459777606810535 |
|
}, |
|
"hendrycksTest-business_ethics": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"hendrycksTest-clinical_knowledge": { |
|
"acc": 0.35094339622641507, |
|
"acc_stderr": 0.02937364625323469, |
|
"acc_norm": 0.35094339622641507, |
|
"acc_norm_stderr": 0.02937364625323469 |
|
}, |
|
"hendrycksTest-college_biology": { |
|
"acc": 0.2847222222222222, |
|
"acc_stderr": 0.03773809990686933, |
|
"acc_norm": 0.2847222222222222, |
|
"acc_norm_stderr": 0.03773809990686933 |
|
}, |
|
"hendrycksTest-college_chemistry": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720683, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720683 |
|
}, |
|
"hendrycksTest-college_computer_science": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"hendrycksTest-college_mathematics": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"hendrycksTest-college_medicine": { |
|
"acc": 0.3063583815028902, |
|
"acc_stderr": 0.03514942551267437, |
|
"acc_norm": 0.3063583815028902, |
|
"acc_norm_stderr": 0.03514942551267437 |
|
}, |
|
"hendrycksTest-college_physics": { |
|
"acc": 0.17647058823529413, |
|
"acc_stderr": 0.03793281185307812, |
|
"acc_norm": 0.17647058823529413, |
|
"acc_norm_stderr": 0.03793281185307812 |
|
}, |
|
"hendrycksTest-computer_security": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"hendrycksTest-conceptual_physics": { |
|
"acc": 0.33617021276595743, |
|
"acc_stderr": 0.030881618520676942, |
|
"acc_norm": 0.33617021276595743, |
|
"acc_norm_stderr": 0.030881618520676942 |
|
}, |
|
"hendrycksTest-econometrics": { |
|
"acc": 0.2719298245614035, |
|
"acc_stderr": 0.04185774424022056, |
|
"acc_norm": 0.2719298245614035, |
|
"acc_norm_stderr": 0.04185774424022056 |
|
}, |
|
"hendrycksTest-electrical_engineering": { |
|
"acc": 0.30344827586206896, |
|
"acc_stderr": 0.038312260488503336, |
|
"acc_norm": 0.30344827586206896, |
|
"acc_norm_stderr": 0.038312260488503336 |
|
}, |
|
"hendrycksTest-elementary_mathematics": { |
|
"acc": 0.24603174603174602, |
|
"acc_stderr": 0.022182037202948365, |
|
"acc_norm": 0.24603174603174602, |
|
"acc_norm_stderr": 0.022182037202948365 |
|
}, |
|
"hendrycksTest-formal_logic": { |
|
"acc": 0.29365079365079366, |
|
"acc_stderr": 0.04073524322147127, |
|
"acc_norm": 0.29365079365079366, |
|
"acc_norm_stderr": 0.04073524322147127 |
|
}, |
|
"hendrycksTest-global_facts": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"hendrycksTest-high_school_biology": { |
|
"acc": 0.2806451612903226, |
|
"acc_stderr": 0.025560604721022902, |
|
"acc_norm": 0.2806451612903226, |
|
"acc_norm_stderr": 0.025560604721022902 |
|
}, |
|
"hendrycksTest-high_school_chemistry": { |
|
"acc": 0.270935960591133, |
|
"acc_stderr": 0.031270907132976984, |
|
"acc_norm": 0.270935960591133, |
|
"acc_norm_stderr": 0.031270907132976984 |
|
}, |
|
"hendrycksTest-high_school_computer_science": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816505, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816505 |
|
}, |
|
"hendrycksTest-high_school_european_history": { |
|
"acc": 0.30303030303030304, |
|
"acc_stderr": 0.03588624800091707, |
|
"acc_norm": 0.30303030303030304, |
|
"acc_norm_stderr": 0.03588624800091707 |
|
}, |
|
"hendrycksTest-high_school_geography": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.03191178226713549, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.03191178226713549 |
|
}, |
|
"hendrycksTest-high_school_government_and_politics": { |
|
"acc": 0.22279792746113988, |
|
"acc_stderr": 0.03003114797764155, |
|
"acc_norm": 0.22279792746113988, |
|
"acc_norm_stderr": 0.03003114797764155 |
|
}, |
|
"hendrycksTest-high_school_macroeconomics": { |
|
"acc": 0.24102564102564103, |
|
"acc_stderr": 0.021685546665333195, |
|
"acc_norm": 0.24102564102564103, |
|
"acc_norm_stderr": 0.021685546665333195 |
|
}, |
|
"hendrycksTest-high_school_mathematics": { |
|
"acc": 0.24814814814814815, |
|
"acc_stderr": 0.026335739404055803, |
|
"acc_norm": 0.24814814814814815, |
|
"acc_norm_stderr": 0.026335739404055803 |
|
}, |
|
"hendrycksTest-high_school_microeconomics": { |
|
"acc": 0.31092436974789917, |
|
"acc_stderr": 0.03006676158297794, |
|
"acc_norm": 0.31092436974789917, |
|
"acc_norm_stderr": 0.03006676158297794 |
|
}, |
|
"hendrycksTest-high_school_physics": { |
|
"acc": 0.33774834437086093, |
|
"acc_stderr": 0.038615575462551684, |
|
"acc_norm": 0.33774834437086093, |
|
"acc_norm_stderr": 0.038615575462551684 |
|
}, |
|
"hendrycksTest-high_school_psychology": { |
|
"acc": 0.3155963302752294, |
|
"acc_stderr": 0.019926117513869666, |
|
"acc_norm": 0.3155963302752294, |
|
"acc_norm_stderr": 0.019926117513869666 |
|
}, |
|
"hendrycksTest-high_school_statistics": { |
|
"acc": 0.35648148148148145, |
|
"acc_stderr": 0.032664783315272714, |
|
"acc_norm": 0.35648148148148145, |
|
"acc_norm_stderr": 0.032664783315272714 |
|
}, |
|
"hendrycksTest-high_school_us_history": { |
|
"acc": 0.30392156862745096, |
|
"acc_stderr": 0.032282103870378935, |
|
"acc_norm": 0.30392156862745096, |
|
"acc_norm_stderr": 0.032282103870378935 |
|
}, |
|
"hendrycksTest-high_school_world_history": { |
|
"acc": 0.31645569620253167, |
|
"acc_stderr": 0.030274974880218977, |
|
"acc_norm": 0.31645569620253167, |
|
"acc_norm_stderr": 0.030274974880218977 |
|
}, |
|
"hendrycksTest-human_aging": { |
|
"acc": 0.24663677130044842, |
|
"acc_stderr": 0.028930413120910877, |
|
"acc_norm": 0.24663677130044842, |
|
"acc_norm_stderr": 0.028930413120910877 |
|
}, |
|
"hendrycksTest-human_sexuality": { |
|
"acc": 0.2595419847328244, |
|
"acc_stderr": 0.03844876139785271, |
|
"acc_norm": 0.2595419847328244, |
|
"acc_norm_stderr": 0.03844876139785271 |
|
}, |
|
"hendrycksTest-international_law": { |
|
"acc": 0.2892561983471074, |
|
"acc_stderr": 0.04139112727635465, |
|
"acc_norm": 0.2892561983471074, |
|
"acc_norm_stderr": 0.04139112727635465 |
|
}, |
|
"hendrycksTest-jurisprudence": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.04284467968052191, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052191 |
|
}, |
|
"hendrycksTest-logical_fallacies": { |
|
"acc": 0.2147239263803681, |
|
"acc_stderr": 0.03226219377286774, |
|
"acc_norm": 0.2147239263803681, |
|
"acc_norm_stderr": 0.03226219377286774 |
|
}, |
|
"hendrycksTest-machine_learning": { |
|
"acc": 0.25892857142857145, |
|
"acc_stderr": 0.04157751539865629, |
|
"acc_norm": 0.25892857142857145, |
|
"acc_norm_stderr": 0.04157751539865629 |
|
}, |
|
"hendrycksTest-management": { |
|
"acc": 0.27184466019417475, |
|
"acc_stderr": 0.044052680241409216, |
|
"acc_norm": 0.27184466019417475, |
|
"acc_norm_stderr": 0.044052680241409216 |
|
}, |
|
"hendrycksTest-marketing": { |
|
"acc": 0.3247863247863248, |
|
"acc_stderr": 0.03067902276549883, |
|
"acc_norm": 0.3247863247863248, |
|
"acc_norm_stderr": 0.03067902276549883 |
|
}, |
|
"hendrycksTest-medical_genetics": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"hendrycksTest-miscellaneous": { |
|
"acc": 0.31417624521072796, |
|
"acc_stderr": 0.016599291735884907, |
|
"acc_norm": 0.31417624521072796, |
|
"acc_norm_stderr": 0.016599291735884907 |
|
}, |
|
"hendrycksTest-moral_disputes": { |
|
"acc": 0.2976878612716763, |
|
"acc_stderr": 0.02461705538867701, |
|
"acc_norm": 0.2976878612716763, |
|
"acc_norm_stderr": 0.02461705538867701 |
|
}, |
|
"hendrycksTest-moral_scenarios": { |
|
"acc": 0.2636871508379888, |
|
"acc_stderr": 0.01473692638376196, |
|
"acc_norm": 0.2636871508379888, |
|
"acc_norm_stderr": 0.01473692638376196 |
|
}, |
|
"hendrycksTest-nutrition": { |
|
"acc": 0.30392156862745096, |
|
"acc_stderr": 0.026336613469046647, |
|
"acc_norm": 0.30392156862745096, |
|
"acc_norm_stderr": 0.026336613469046647 |
|
}, |
|
"hendrycksTest-philosophy": { |
|
"acc": 0.2829581993569132, |
|
"acc_stderr": 0.025583062489984824, |
|
"acc_norm": 0.2829581993569132, |
|
"acc_norm_stderr": 0.025583062489984824 |
|
}, |
|
"hendrycksTest-prehistory": { |
|
"acc": 0.2932098765432099, |
|
"acc_stderr": 0.02532988817190092, |
|
"acc_norm": 0.2932098765432099, |
|
"acc_norm_stderr": 0.02532988817190092 |
|
}, |
|
"hendrycksTest-professional_accounting": { |
|
"acc": 0.24822695035460993, |
|
"acc_stderr": 0.025770015644290396, |
|
"acc_norm": 0.24822695035460993, |
|
"acc_norm_stderr": 0.025770015644290396 |
|
}, |
|
"hendrycksTest-professional_law": { |
|
"acc": 0.2646675358539765, |
|
"acc_stderr": 0.011267332992845517, |
|
"acc_norm": 0.2646675358539765, |
|
"acc_norm_stderr": 0.011267332992845517 |
|
}, |
|
"hendrycksTest-professional_medicine": { |
|
"acc": 0.2426470588235294, |
|
"acc_stderr": 0.02604066247420126, |
|
"acc_norm": 0.2426470588235294, |
|
"acc_norm_stderr": 0.02604066247420126 |
|
}, |
|
"hendrycksTest-professional_psychology": { |
|
"acc": 0.272875816993464, |
|
"acc_stderr": 0.01802047414839358, |
|
"acc_norm": 0.272875816993464, |
|
"acc_norm_stderr": 0.01802047414839358 |
|
}, |
|
"hendrycksTest-public_relations": { |
|
"acc": 0.3090909090909091, |
|
"acc_stderr": 0.044262946482000985, |
|
"acc_norm": 0.3090909090909091, |
|
"acc_norm_stderr": 0.044262946482000985 |
|
}, |
|
"hendrycksTest-security_studies": { |
|
"acc": 0.23673469387755103, |
|
"acc_stderr": 0.027212835884073167, |
|
"acc_norm": 0.23673469387755103, |
|
"acc_norm_stderr": 0.027212835884073167 |
|
}, |
|
"hendrycksTest-sociology": { |
|
"acc": 0.29850746268656714, |
|
"acc_stderr": 0.032357437893550424, |
|
"acc_norm": 0.29850746268656714, |
|
"acc_norm_stderr": 0.032357437893550424 |
|
}, |
|
"hendrycksTest-us_foreign_policy": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"hendrycksTest-virology": { |
|
"acc": 0.3192771084337349, |
|
"acc_stderr": 0.03629335329947859, |
|
"acc_norm": 0.3192771084337349, |
|
"acc_norm_stderr": 0.03629335329947859 |
|
}, |
|
"hendrycksTest-world_religions": { |
|
"acc": 0.3742690058479532, |
|
"acc_stderr": 0.037116011853894806, |
|
"acc_norm": 0.3742690058479532, |
|
"acc_norm_stderr": 0.037116011853894806 |
|
} |
|
}, |
|
"versions": { |
|
"hendrycksTest-abstract_algebra": 1, |
|
"hendrycksTest-anatomy": 1, |
|
"hendrycksTest-astronomy": 1, |
|
"hendrycksTest-business_ethics": 1, |
|
"hendrycksTest-clinical_knowledge": 1, |
|
"hendrycksTest-college_biology": 1, |
|
"hendrycksTest-college_chemistry": 1, |
|
"hendrycksTest-college_computer_science": 1, |
|
"hendrycksTest-college_mathematics": 1, |
|
"hendrycksTest-college_medicine": 1, |
|
"hendrycksTest-college_physics": 1, |
|
"hendrycksTest-computer_security": 1, |
|
"hendrycksTest-conceptual_physics": 1, |
|
"hendrycksTest-econometrics": 1, |
|
"hendrycksTest-electrical_engineering": 1, |
|
"hendrycksTest-elementary_mathematics": 1, |
|
"hendrycksTest-formal_logic": 1, |
|
"hendrycksTest-global_facts": 1, |
|
"hendrycksTest-high_school_biology": 1, |
|
"hendrycksTest-high_school_chemistry": 1, |
|
"hendrycksTest-high_school_computer_science": 1, |
|
"hendrycksTest-high_school_european_history": 1, |
|
"hendrycksTest-high_school_geography": 1, |
|
"hendrycksTest-high_school_government_and_politics": 1, |
|
"hendrycksTest-high_school_macroeconomics": 1, |
|
"hendrycksTest-high_school_mathematics": 1, |
|
"hendrycksTest-high_school_microeconomics": 1, |
|
"hendrycksTest-high_school_physics": 1, |
|
"hendrycksTest-high_school_psychology": 1, |
|
"hendrycksTest-high_school_statistics": 1, |
|
"hendrycksTest-high_school_us_history": 1, |
|
"hendrycksTest-high_school_world_history": 1, |
|
"hendrycksTest-human_aging": 1, |
|
"hendrycksTest-human_sexuality": 1, |
|
"hendrycksTest-international_law": 1, |
|
"hendrycksTest-jurisprudence": 1, |
|
"hendrycksTest-logical_fallacies": 1, |
|
"hendrycksTest-machine_learning": 1, |
|
"hendrycksTest-management": 1, |
|
"hendrycksTest-marketing": 1, |
|
"hendrycksTest-medical_genetics": 1, |
|
"hendrycksTest-miscellaneous": 1, |
|
"hendrycksTest-moral_disputes": 1, |
|
"hendrycksTest-moral_scenarios": 1, |
|
"hendrycksTest-nutrition": 1, |
|
"hendrycksTest-philosophy": 1, |
|
"hendrycksTest-prehistory": 1, |
|
"hendrycksTest-professional_accounting": 1, |
|
"hendrycksTest-professional_law": 1, |
|
"hendrycksTest-professional_medicine": 1, |
|
"hendrycksTest-professional_psychology": 1, |
|
"hendrycksTest-public_relations": 1, |
|
"hendrycksTest-security_studies": 1, |
|
"hendrycksTest-sociology": 1, |
|
"hendrycksTest-us_foreign_policy": 1, |
|
"hendrycksTest-virology": 1, |
|
"hendrycksTest-world_religions": 1 |
|
}, |
|
"config": { |
|
"model": "hf-causal", |
|
"model_args": "pretrained=./workdir_7b_16mix/ckpt_357", |
|
"num_fewshot": 5, |
|
"batch_size": "1", |
|
"batch_sizes": [], |
|
"device": null, |
|
"no_cache": true, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"description_dict": {} |
|
} |
|
} |