|
{ |
|
"results": { |
|
"hendrycksTest-abstract_algebra": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847415, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847415 |
|
}, |
|
"hendrycksTest-anatomy": { |
|
"acc": 0.24444444444444444, |
|
"acc_stderr": 0.03712537833614865, |
|
"acc_norm": 0.24444444444444444, |
|
"acc_norm_stderr": 0.03712537833614865 |
|
}, |
|
"hendrycksTest-astronomy": { |
|
"acc": 0.2565789473684211, |
|
"acc_stderr": 0.0355418036802569, |
|
"acc_norm": 0.2565789473684211, |
|
"acc_norm_stderr": 0.0355418036802569 |
|
}, |
|
"hendrycksTest-business_ethics": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"hendrycksTest-clinical_knowledge": { |
|
"acc": 0.33962264150943394, |
|
"acc_stderr": 0.029146904747798335, |
|
"acc_norm": 0.33962264150943394, |
|
"acc_norm_stderr": 0.029146904747798335 |
|
}, |
|
"hendrycksTest-college_biology": { |
|
"acc": 0.3194444444444444, |
|
"acc_stderr": 0.038990736873573344, |
|
"acc_norm": 0.3194444444444444, |
|
"acc_norm_stderr": 0.038990736873573344 |
|
}, |
|
"hendrycksTest-college_chemistry": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.04461960433384741, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.04461960433384741 |
|
}, |
|
"hendrycksTest-college_computer_science": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"hendrycksTest-college_mathematics": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"hendrycksTest-college_medicine": { |
|
"acc": 0.27167630057803466, |
|
"acc_stderr": 0.03391750322321659, |
|
"acc_norm": 0.27167630057803466, |
|
"acc_norm_stderr": 0.03391750322321659 |
|
}, |
|
"hendrycksTest-college_physics": { |
|
"acc": 0.20588235294117646, |
|
"acc_stderr": 0.04023382273617748, |
|
"acc_norm": 0.20588235294117646, |
|
"acc_norm_stderr": 0.04023382273617748 |
|
}, |
|
"hendrycksTest-computer_security": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.04793724854411018, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.04793724854411018 |
|
}, |
|
"hendrycksTest-conceptual_physics": { |
|
"acc": 0.3404255319148936, |
|
"acc_stderr": 0.030976692998534436, |
|
"acc_norm": 0.3404255319148936, |
|
"acc_norm_stderr": 0.030976692998534436 |
|
}, |
|
"hendrycksTest-econometrics": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.041424397194893624, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.041424397194893624 |
|
}, |
|
"hendrycksTest-electrical_engineering": { |
|
"acc": 0.2896551724137931, |
|
"acc_stderr": 0.037800192304380135, |
|
"acc_norm": 0.2896551724137931, |
|
"acc_norm_stderr": 0.037800192304380135 |
|
}, |
|
"hendrycksTest-elementary_mathematics": { |
|
"acc": 0.2671957671957672, |
|
"acc_stderr": 0.02278967314577656, |
|
"acc_norm": 0.2671957671957672, |
|
"acc_norm_stderr": 0.02278967314577656 |
|
}, |
|
"hendrycksTest-formal_logic": { |
|
"acc": 0.29365079365079366, |
|
"acc_stderr": 0.040735243221471276, |
|
"acc_norm": 0.29365079365079366, |
|
"acc_norm_stderr": 0.040735243221471276 |
|
}, |
|
"hendrycksTest-global_facts": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"hendrycksTest-high_school_biology": { |
|
"acc": 0.26129032258064516, |
|
"acc_stderr": 0.024993053397764826, |
|
"acc_norm": 0.26129032258064516, |
|
"acc_norm_stderr": 0.024993053397764826 |
|
}, |
|
"hendrycksTest-high_school_chemistry": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.03178529710642749, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.03178529710642749 |
|
}, |
|
"hendrycksTest-high_school_computer_science": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"hendrycksTest-high_school_european_history": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.03681050869161549, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.03681050869161549 |
|
}, |
|
"hendrycksTest-high_school_geography": { |
|
"acc": 0.2828282828282828, |
|
"acc_stderr": 0.0320877955878675, |
|
"acc_norm": 0.2828282828282828, |
|
"acc_norm_stderr": 0.0320877955878675 |
|
}, |
|
"hendrycksTest-high_school_government_and_politics": { |
|
"acc": 0.22279792746113988, |
|
"acc_stderr": 0.030031147977641545, |
|
"acc_norm": 0.22279792746113988, |
|
"acc_norm_stderr": 0.030031147977641545 |
|
}, |
|
"hendrycksTest-high_school_macroeconomics": { |
|
"acc": 0.23846153846153847, |
|
"acc_stderr": 0.021606294494647727, |
|
"acc_norm": 0.23846153846153847, |
|
"acc_norm_stderr": 0.021606294494647727 |
|
}, |
|
"hendrycksTest-high_school_mathematics": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.026719240783712173, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.026719240783712173 |
|
}, |
|
"hendrycksTest-high_school_microeconomics": { |
|
"acc": 0.27310924369747897, |
|
"acc_stderr": 0.028942004040998167, |
|
"acc_norm": 0.27310924369747897, |
|
"acc_norm_stderr": 0.028942004040998167 |
|
}, |
|
"hendrycksTest-high_school_physics": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.037101857261199946, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.037101857261199946 |
|
}, |
|
"hendrycksTest-high_school_psychology": { |
|
"acc": 0.3321100917431193, |
|
"acc_stderr": 0.02019268298542334, |
|
"acc_norm": 0.3321100917431193, |
|
"acc_norm_stderr": 0.02019268298542334 |
|
}, |
|
"hendrycksTest-high_school_statistics": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.029157522184605603, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.029157522184605603 |
|
}, |
|
"hendrycksTest-high_school_us_history": { |
|
"acc": 0.2696078431372549, |
|
"acc_stderr": 0.03114557065948678, |
|
"acc_norm": 0.2696078431372549, |
|
"acc_norm_stderr": 0.03114557065948678 |
|
}, |
|
"hendrycksTest-high_school_world_history": { |
|
"acc": 0.34177215189873417, |
|
"acc_stderr": 0.030874537537553617, |
|
"acc_norm": 0.34177215189873417, |
|
"acc_norm_stderr": 0.030874537537553617 |
|
}, |
|
"hendrycksTest-human_aging": { |
|
"acc": 0.2645739910313901, |
|
"acc_stderr": 0.02960510321703831, |
|
"acc_norm": 0.2645739910313901, |
|
"acc_norm_stderr": 0.02960510321703831 |
|
}, |
|
"hendrycksTest-human_sexuality": { |
|
"acc": 0.26717557251908397, |
|
"acc_stderr": 0.03880848301082396, |
|
"acc_norm": 0.26717557251908397, |
|
"acc_norm_stderr": 0.03880848301082396 |
|
}, |
|
"hendrycksTest-international_law": { |
|
"acc": 0.3305785123966942, |
|
"acc_stderr": 0.04294340845212095, |
|
"acc_norm": 0.3305785123966942, |
|
"acc_norm_stderr": 0.04294340845212095 |
|
}, |
|
"hendrycksTest-jurisprudence": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.044143436668549335, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.044143436668549335 |
|
}, |
|
"hendrycksTest-logical_fallacies": { |
|
"acc": 0.19631901840490798, |
|
"acc_stderr": 0.031207970394709215, |
|
"acc_norm": 0.19631901840490798, |
|
"acc_norm_stderr": 0.031207970394709215 |
|
}, |
|
"hendrycksTest-machine_learning": { |
|
"acc": 0.26785714285714285, |
|
"acc_stderr": 0.04203277291467764, |
|
"acc_norm": 0.26785714285714285, |
|
"acc_norm_stderr": 0.04203277291467764 |
|
}, |
|
"hendrycksTest-management": { |
|
"acc": 0.2815533980582524, |
|
"acc_stderr": 0.04453254836326469, |
|
"acc_norm": 0.2815533980582524, |
|
"acc_norm_stderr": 0.04453254836326469 |
|
}, |
|
"hendrycksTest-marketing": { |
|
"acc": 0.36752136752136755, |
|
"acc_stderr": 0.03158539157745635, |
|
"acc_norm": 0.36752136752136755, |
|
"acc_norm_stderr": 0.03158539157745635 |
|
}, |
|
"hendrycksTest-medical_genetics": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"hendrycksTest-miscellaneous": { |
|
"acc": 0.33205619412515963, |
|
"acc_stderr": 0.016841174655295735, |
|
"acc_norm": 0.33205619412515963, |
|
"acc_norm_stderr": 0.016841174655295735 |
|
}, |
|
"hendrycksTest-moral_disputes": { |
|
"acc": 0.33236994219653176, |
|
"acc_stderr": 0.025361168749688225, |
|
"acc_norm": 0.33236994219653176, |
|
"acc_norm_stderr": 0.025361168749688225 |
|
}, |
|
"hendrycksTest-moral_scenarios": { |
|
"acc": 0.23575418994413408, |
|
"acc_stderr": 0.014196375686290804, |
|
"acc_norm": 0.23575418994413408, |
|
"acc_norm_stderr": 0.014196375686290804 |
|
}, |
|
"hendrycksTest-nutrition": { |
|
"acc": 0.30392156862745096, |
|
"acc_stderr": 0.026336613469046647, |
|
"acc_norm": 0.30392156862745096, |
|
"acc_norm_stderr": 0.026336613469046647 |
|
}, |
|
"hendrycksTest-philosophy": { |
|
"acc": 0.3022508038585209, |
|
"acc_stderr": 0.026082700695399662, |
|
"acc_norm": 0.3022508038585209, |
|
"acc_norm_stderr": 0.026082700695399662 |
|
}, |
|
"hendrycksTest-prehistory": { |
|
"acc": 0.2716049382716049, |
|
"acc_stderr": 0.02474862449053737, |
|
"acc_norm": 0.2716049382716049, |
|
"acc_norm_stderr": 0.02474862449053737 |
|
}, |
|
"hendrycksTest-professional_accounting": { |
|
"acc": 0.2765957446808511, |
|
"acc_stderr": 0.026684564340461004, |
|
"acc_norm": 0.2765957446808511, |
|
"acc_norm_stderr": 0.026684564340461004 |
|
}, |
|
"hendrycksTest-professional_law": { |
|
"acc": 0.26792698826597133, |
|
"acc_stderr": 0.01131134769063386, |
|
"acc_norm": 0.26792698826597133, |
|
"acc_norm_stderr": 0.01131134769063386 |
|
}, |
|
"hendrycksTest-professional_medicine": { |
|
"acc": 0.1875, |
|
"acc_stderr": 0.023709788253811766, |
|
"acc_norm": 0.1875, |
|
"acc_norm_stderr": 0.023709788253811766 |
|
}, |
|
"hendrycksTest-professional_psychology": { |
|
"acc": 0.3088235294117647, |
|
"acc_stderr": 0.018690850273595284, |
|
"acc_norm": 0.3088235294117647, |
|
"acc_norm_stderr": 0.018690850273595284 |
|
}, |
|
"hendrycksTest-public_relations": { |
|
"acc": 0.35454545454545455, |
|
"acc_stderr": 0.04582004841505416, |
|
"acc_norm": 0.35454545454545455, |
|
"acc_norm_stderr": 0.04582004841505416 |
|
}, |
|
"hendrycksTest-security_studies": { |
|
"acc": 0.20408163265306123, |
|
"acc_stderr": 0.02580128347509051, |
|
"acc_norm": 0.20408163265306123, |
|
"acc_norm_stderr": 0.02580128347509051 |
|
}, |
|
"hendrycksTest-sociology": { |
|
"acc": 0.2736318407960199, |
|
"acc_stderr": 0.03152439186555401, |
|
"acc_norm": 0.2736318407960199, |
|
"acc_norm_stderr": 0.03152439186555401 |
|
}, |
|
"hendrycksTest-us_foreign_policy": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"hendrycksTest-virology": { |
|
"acc": 0.3493975903614458, |
|
"acc_stderr": 0.03711725190740748, |
|
"acc_norm": 0.3493975903614458, |
|
"acc_norm_stderr": 0.03711725190740748 |
|
}, |
|
"hendrycksTest-world_religions": { |
|
"acc": 0.3508771929824561, |
|
"acc_stderr": 0.036602988340491624, |
|
"acc_norm": 0.3508771929824561, |
|
"acc_norm_stderr": 0.036602988340491624 |
|
} |
|
}, |
|
"versions": { |
|
"hendrycksTest-abstract_algebra": 1, |
|
"hendrycksTest-anatomy": 1, |
|
"hendrycksTest-astronomy": 1, |
|
"hendrycksTest-business_ethics": 1, |
|
"hendrycksTest-clinical_knowledge": 1, |
|
"hendrycksTest-college_biology": 1, |
|
"hendrycksTest-college_chemistry": 1, |
|
"hendrycksTest-college_computer_science": 1, |
|
"hendrycksTest-college_mathematics": 1, |
|
"hendrycksTest-college_medicine": 1, |
|
"hendrycksTest-college_physics": 1, |
|
"hendrycksTest-computer_security": 1, |
|
"hendrycksTest-conceptual_physics": 1, |
|
"hendrycksTest-econometrics": 1, |
|
"hendrycksTest-electrical_engineering": 1, |
|
"hendrycksTest-elementary_mathematics": 1, |
|
"hendrycksTest-formal_logic": 1, |
|
"hendrycksTest-global_facts": 1, |
|
"hendrycksTest-high_school_biology": 1, |
|
"hendrycksTest-high_school_chemistry": 1, |
|
"hendrycksTest-high_school_computer_science": 1, |
|
"hendrycksTest-high_school_european_history": 1, |
|
"hendrycksTest-high_school_geography": 1, |
|
"hendrycksTest-high_school_government_and_politics": 1, |
|
"hendrycksTest-high_school_macroeconomics": 1, |
|
"hendrycksTest-high_school_mathematics": 1, |
|
"hendrycksTest-high_school_microeconomics": 1, |
|
"hendrycksTest-high_school_physics": 1, |
|
"hendrycksTest-high_school_psychology": 1, |
|
"hendrycksTest-high_school_statistics": 1, |
|
"hendrycksTest-high_school_us_history": 1, |
|
"hendrycksTest-high_school_world_history": 1, |
|
"hendrycksTest-human_aging": 1, |
|
"hendrycksTest-human_sexuality": 1, |
|
"hendrycksTest-international_law": 1, |
|
"hendrycksTest-jurisprudence": 1, |
|
"hendrycksTest-logical_fallacies": 1, |
|
"hendrycksTest-machine_learning": 1, |
|
"hendrycksTest-management": 1, |
|
"hendrycksTest-marketing": 1, |
|
"hendrycksTest-medical_genetics": 1, |
|
"hendrycksTest-miscellaneous": 1, |
|
"hendrycksTest-moral_disputes": 1, |
|
"hendrycksTest-moral_scenarios": 1, |
|
"hendrycksTest-nutrition": 1, |
|
"hendrycksTest-philosophy": 1, |
|
"hendrycksTest-prehistory": 1, |
|
"hendrycksTest-professional_accounting": 1, |
|
"hendrycksTest-professional_law": 1, |
|
"hendrycksTest-professional_medicine": 1, |
|
"hendrycksTest-professional_psychology": 1, |
|
"hendrycksTest-public_relations": 1, |
|
"hendrycksTest-security_studies": 1, |
|
"hendrycksTest-sociology": 1, |
|
"hendrycksTest-us_foreign_policy": 1, |
|
"hendrycksTest-virology": 1, |
|
"hendrycksTest-world_religions": 1 |
|
}, |
|
"config": { |
|
"model": "hf-causal", |
|
"model_args": "pretrained=./workdir_7b_16mix/ckpt_358", |
|
"num_fewshot": 5, |
|
"batch_size": "1", |
|
"batch_sizes": [], |
|
"device": null, |
|
"no_cache": true, |
|
"limit": null, |
|
"bootstrap_iters": 100000, |
|
"description_dict": {} |
|
} |
|
} |