{ "results": { "hendrycksTest-abstract_algebra": { "acc": 0.29, "acc_stderr": 0.04560480215720683, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720683 }, "hendrycksTest-anatomy": { "acc": 0.3037037037037037, "acc_stderr": 0.039725528847851375, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.039725528847851375 }, "hendrycksTest-astronomy": { "acc": 0.2565789473684211, "acc_stderr": 0.0355418036802569, "acc_norm": 0.2565789473684211, "acc_norm_stderr": 0.0355418036802569 }, "hendrycksTest-business_ethics": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "hendrycksTest-clinical_knowledge": { "acc": 0.37735849056603776, "acc_stderr": 0.029832808114796, "acc_norm": 0.37735849056603776, "acc_norm_stderr": 0.029832808114796 }, "hendrycksTest-college_biology": { "acc": 0.2708333333333333, "acc_stderr": 0.03716177437566017, "acc_norm": 0.2708333333333333, "acc_norm_stderr": 0.03716177437566017 }, "hendrycksTest-college_chemistry": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "hendrycksTest-college_computer_science": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "hendrycksTest-college_mathematics": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "hendrycksTest-college_medicine": { "acc": 0.28901734104046245, "acc_stderr": 0.034564257450870016, "acc_norm": 0.28901734104046245, "acc_norm_stderr": 0.034564257450870016 }, "hendrycksTest-college_physics": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237656, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237656 }, "hendrycksTest-computer_security": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "hendrycksTest-conceptual_physics": { "acc": 0.34893617021276596, "acc_stderr": 0.031158522131357783, "acc_norm": 0.34893617021276596, "acc_norm_stderr": 0.031158522131357783 }, "hendrycksTest-econometrics": { "acc": 0.24561403508771928, "acc_stderr": 0.040493392977481425, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.040493392977481425 }, "hendrycksTest-electrical_engineering": { "acc": 0.2827586206896552, "acc_stderr": 0.03752833958003336, "acc_norm": 0.2827586206896552, "acc_norm_stderr": 0.03752833958003336 }, "hendrycksTest-elementary_mathematics": { "acc": 0.291005291005291, "acc_stderr": 0.023393826500484865, "acc_norm": 0.291005291005291, "acc_norm_stderr": 0.023393826500484865 }, "hendrycksTest-formal_logic": { "acc": 0.3492063492063492, "acc_stderr": 0.04263906892795132, "acc_norm": 0.3492063492063492, "acc_norm_stderr": 0.04263906892795132 }, "hendrycksTest-global_facts": { "acc": 0.32, "acc_stderr": 0.04688261722621503, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621503 }, "hendrycksTest-high_school_biology": { "acc": 0.2870967741935484, "acc_stderr": 0.025736542745594525, "acc_norm": 0.2870967741935484, "acc_norm_stderr": 0.025736542745594525 }, "hendrycksTest-high_school_chemistry": { "acc": 0.270935960591133, "acc_stderr": 0.031270907132976984, "acc_norm": 0.270935960591133, "acc_norm_stderr": 0.031270907132976984 }, "hendrycksTest-high_school_computer_science": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "hendrycksTest-high_school_european_history": { "acc": 0.296969696969697, "acc_stderr": 0.03567969772268049, "acc_norm": 0.296969696969697, "acc_norm_stderr": 0.03567969772268049 }, "hendrycksTest-high_school_geography": { "acc": 0.41919191919191917, "acc_stderr": 0.035155207286704175, "acc_norm": 0.41919191919191917, "acc_norm_stderr": 0.035155207286704175 }, "hendrycksTest-high_school_government_and_politics": { "acc": 0.3160621761658031, "acc_stderr": 0.03355397369686173, "acc_norm": 0.3160621761658031, "acc_norm_stderr": 0.03355397369686173 }, "hendrycksTest-high_school_macroeconomics": { "acc": 0.28717948717948716, "acc_stderr": 0.022939925418530616, "acc_norm": 0.28717948717948716, "acc_norm_stderr": 0.022939925418530616 }, "hendrycksTest-high_school_mathematics": { "acc": 0.23333333333333334, "acc_stderr": 0.02578787422095931, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.02578787422095931 }, "hendrycksTest-high_school_microeconomics": { "acc": 0.3487394957983193, "acc_stderr": 0.03095663632856654, "acc_norm": 0.3487394957983193, "acc_norm_stderr": 0.03095663632856654 }, "hendrycksTest-high_school_physics": { "acc": 0.2913907284768212, "acc_stderr": 0.037101857261199946, "acc_norm": 0.2913907284768212, "acc_norm_stderr": 0.037101857261199946 }, "hendrycksTest-high_school_psychology": { "acc": 0.3577981651376147, "acc_stderr": 0.020552060784827818, "acc_norm": 0.3577981651376147, "acc_norm_stderr": 0.020552060784827818 }, "hendrycksTest-high_school_statistics": { "acc": 0.4027777777777778, "acc_stderr": 0.03344887382997866, "acc_norm": 0.4027777777777778, "acc_norm_stderr": 0.03344887382997866 }, "hendrycksTest-high_school_us_history": { "acc": 0.24019607843137256, "acc_stderr": 0.02998373305591362, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.02998373305591362 }, "hendrycksTest-high_school_world_history": { "acc": 0.24472573839662448, "acc_stderr": 0.027985699387036413, "acc_norm": 0.24472573839662448, "acc_norm_stderr": 0.027985699387036413 }, "hendrycksTest-human_aging": { "acc": 0.13901345291479822, "acc_stderr": 0.0232193528344745, "acc_norm": 0.13901345291479822, "acc_norm_stderr": 0.0232193528344745 }, "hendrycksTest-human_sexuality": { "acc": 0.2824427480916031, "acc_stderr": 0.03948406125768361, "acc_norm": 0.2824427480916031, "acc_norm_stderr": 0.03948406125768361 }, "hendrycksTest-international_law": { "acc": 0.4132231404958678, "acc_stderr": 0.04495087843548408, "acc_norm": 0.4132231404958678, "acc_norm_stderr": 0.04495087843548408 }, "hendrycksTest-jurisprudence": { "acc": 0.32407407407407407, "acc_stderr": 0.04524596007030048, "acc_norm": 0.32407407407407407, "acc_norm_stderr": 0.04524596007030048 }, "hendrycksTest-logical_fallacies": { "acc": 0.2331288343558282, "acc_stderr": 0.033220157957767414, "acc_norm": 0.2331288343558282, "acc_norm_stderr": 0.033220157957767414 }, "hendrycksTest-machine_learning": { "acc": 0.23214285714285715, "acc_stderr": 0.04007341809755806, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.04007341809755806 }, "hendrycksTest-management": { "acc": 0.3106796116504854, "acc_stderr": 0.04582124160161551, "acc_norm": 0.3106796116504854, "acc_norm_stderr": 0.04582124160161551 }, "hendrycksTest-marketing": { "acc": 0.32905982905982906, "acc_stderr": 0.030782321577688166, "acc_norm": 0.32905982905982906, "acc_norm_stderr": 0.030782321577688166 }, "hendrycksTest-medical_genetics": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "hendrycksTest-miscellaneous": { "acc": 0.2886334610472541, "acc_stderr": 0.016203792703197797, "acc_norm": 0.2886334610472541, "acc_norm_stderr": 0.016203792703197797 }, "hendrycksTest-moral_disputes": { "acc": 0.3208092485549133, "acc_stderr": 0.025131000233647897, "acc_norm": 0.3208092485549133, "acc_norm_stderr": 0.025131000233647897 }, "hendrycksTest-moral_scenarios": { "acc": 0.24581005586592178, "acc_stderr": 0.014400296429225592, "acc_norm": 0.24581005586592178, "acc_norm_stderr": 0.014400296429225592 }, "hendrycksTest-nutrition": { "acc": 0.3627450980392157, "acc_stderr": 0.027530078447110317, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.027530078447110317 }, "hendrycksTest-philosophy": { "acc": 0.3247588424437299, "acc_stderr": 0.026596782287697043, "acc_norm": 0.3247588424437299, "acc_norm_stderr": 0.026596782287697043 }, "hendrycksTest-prehistory": { "acc": 0.3487654320987654, "acc_stderr": 0.02651759772446501, "acc_norm": 0.3487654320987654, "acc_norm_stderr": 0.02651759772446501 }, "hendrycksTest-professional_accounting": { "acc": 0.3120567375886525, "acc_stderr": 0.02764012054516992, "acc_norm": 0.3120567375886525, "acc_norm_stderr": 0.02764012054516992 }, "hendrycksTest-professional_law": { "acc": 0.2692307692307692, "acc_stderr": 0.01132873440314031, "acc_norm": 0.2692307692307692, "acc_norm_stderr": 0.01132873440314031 }, "hendrycksTest-professional_medicine": { "acc": 0.30514705882352944, "acc_stderr": 0.027971541370170598, "acc_norm": 0.30514705882352944, "acc_norm_stderr": 0.027971541370170598 }, "hendrycksTest-professional_psychology": { "acc": 0.26143790849673204, "acc_stderr": 0.01777694715752805, "acc_norm": 0.26143790849673204, "acc_norm_stderr": 0.01777694715752805 }, "hendrycksTest-public_relations": { "acc": 0.32727272727272727, "acc_stderr": 0.04494290866252089, "acc_norm": 0.32727272727272727, "acc_norm_stderr": 0.04494290866252089 }, "hendrycksTest-security_studies": { "acc": 0.35918367346938773, "acc_stderr": 0.030713560455108493, "acc_norm": 0.35918367346938773, "acc_norm_stderr": 0.030713560455108493 }, "hendrycksTest-sociology": { "acc": 0.3383084577114428, "acc_stderr": 0.033455630703391935, "acc_norm": 0.3383084577114428, "acc_norm_stderr": 0.033455630703391935 }, "hendrycksTest-us_foreign_policy": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "hendrycksTest-virology": { "acc": 0.2891566265060241, "acc_stderr": 0.03529486801511115, "acc_norm": 0.2891566265060241, "acc_norm_stderr": 0.03529486801511115 }, "hendrycksTest-world_religions": { "acc": 0.3508771929824561, "acc_stderr": 0.03660298834049162, "acc_norm": 0.3508771929824561, "acc_norm_stderr": 0.03660298834049162 } }, "versions": { "hendrycksTest-abstract_algebra": 1, "hendrycksTest-anatomy": 1, "hendrycksTest-astronomy": 1, "hendrycksTest-business_ethics": 1, "hendrycksTest-clinical_knowledge": 1, "hendrycksTest-college_biology": 1, "hendrycksTest-college_chemistry": 1, "hendrycksTest-college_computer_science": 1, "hendrycksTest-college_mathematics": 1, "hendrycksTest-college_medicine": 1, "hendrycksTest-college_physics": 1, "hendrycksTest-computer_security": 1, "hendrycksTest-conceptual_physics": 1, "hendrycksTest-econometrics": 1, "hendrycksTest-electrical_engineering": 1, "hendrycksTest-elementary_mathematics": 1, "hendrycksTest-formal_logic": 1, "hendrycksTest-global_facts": 1, "hendrycksTest-high_school_biology": 1, "hendrycksTest-high_school_chemistry": 1, "hendrycksTest-high_school_computer_science": 1, "hendrycksTest-high_school_european_history": 1, "hendrycksTest-high_school_geography": 1, "hendrycksTest-high_school_government_and_politics": 1, "hendrycksTest-high_school_macroeconomics": 1, "hendrycksTest-high_school_mathematics": 1, "hendrycksTest-high_school_microeconomics": 1, "hendrycksTest-high_school_physics": 1, "hendrycksTest-high_school_psychology": 1, "hendrycksTest-high_school_statistics": 1, "hendrycksTest-high_school_us_history": 1, "hendrycksTest-high_school_world_history": 1, "hendrycksTest-human_aging": 1, "hendrycksTest-human_sexuality": 1, "hendrycksTest-international_law": 1, "hendrycksTest-jurisprudence": 1, "hendrycksTest-logical_fallacies": 1, "hendrycksTest-machine_learning": 1, "hendrycksTest-management": 1, "hendrycksTest-marketing": 1, "hendrycksTest-medical_genetics": 1, "hendrycksTest-miscellaneous": 1, "hendrycksTest-moral_disputes": 1, "hendrycksTest-moral_scenarios": 1, "hendrycksTest-nutrition": 1, "hendrycksTest-philosophy": 1, "hendrycksTest-prehistory": 1, "hendrycksTest-professional_accounting": 1, "hendrycksTest-professional_law": 1, "hendrycksTest-professional_medicine": 1, "hendrycksTest-professional_psychology": 1, "hendrycksTest-public_relations": 1, "hendrycksTest-security_studies": 1, "hendrycksTest-sociology": 1, "hendrycksTest-us_foreign_policy": 1, "hendrycksTest-virology": 1, "hendrycksTest-world_religions": 1 }, "config": { "model": "hf-causal", "model_args": "pretrained=workdir_7b/ckpt_353", "num_fewshot": 5, "batch_size": "8", "batch_sizes": [], "device": null, "no_cache": true, "limit": null, "bootstrap_iters": 100000, "description_dict": {} } }