YAML Metadata
Warning:
empty or missing yaml metadata in repo card
(https://huggingface.co/docs/hub/model-cards#model-card-metadata)
AGIEVAL
{
"agieval_aqua_rat": {
"acc": 0.21653543307086615,
"acc_stderr": 0.02589488017640766,
"acc_norm": 0.2283464566929134,
"acc_norm_stderr": 0.02639052653782214
},
"agieval_logiqa_en": {
"acc": 0.3824884792626728,
"acc_stderr": 0.01906228828357591,
"acc_norm": 0.3978494623655914,
"acc_norm_stderr": 0.01919796734677121
},
"agieval_lsat_ar": {
"acc": 0.2391304347826087,
"acc_stderr": 0.028187385293933952,
"acc_norm": 0.23478260869565218,
"acc_norm_stderr": 0.028009647070930115
},
"agieval_lsat_lr": {
"acc": 0.5098039215686274,
"acc_stderr": 0.022157849643610006,
"acc_norm": 0.5137254901960784,
"acc_norm_stderr": 0.022153758586005032
},
"agieval_lsat_rc": {
"acc": 0.5799256505576208,
"acc_stderr": 0.030149620080212,
"acc_norm": 0.5650557620817844,
"acc_norm_stderr": 0.030282731632881116
},
"agieval_sat_en": {
"acc": 0.7281553398058253,
"acc_stderr": 0.03107388056324747,
"acc_norm": 0.7184466019417476,
"acc_norm_stderr": 0.03141236994965782
},
"agieval_sat_en_without_passage": {
"acc": 0.42718446601941745,
"acc_stderr": 0.03454921537431906,
"acc_norm": 0.4029126213592233,
"acc_norm_stderr": 0.034256851969664756
},
"agieval_sat_math": {
"acc": 0.35,
"acc_stderr": 0.032230618755899304,
"acc_norm": 0.3181818181818182,
"acc_norm_stderr": 0.031473852941718845
}
}
##GPT4ALL
{
"arc_challenge": {
"acc": 0.5486348122866894,
"acc_stderr": 0.014542104569955267,
"acc_norm": 0.5742320819112628,
"acc_norm_stderr": 0.014449464278868807
},
"arc_easy": {
"acc": 0.8253367003367004,
"acc_stderr": 0.007790845678413371,
"acc_norm": 0.8068181818181818,
"acc_norm_stderr": 0.008101009769568695
},
"boolq": {
"acc": 0.8681957186544342,
"acc_stderr": 0.005916517022827898
},
"hellaswag": {
"acc": 0.6168094005178252,
"acc_stderr": 0.0048517055047904425,
"acc_norm": 0.8050189205337582,
"acc_norm_stderr": 0.003953764481098414
},
"openbookqa": {
"acc": 0.322,
"acc_stderr": 0.020916668330019882,
"acc_norm": 0.438,
"acc_norm_stderr": 0.022210326363977417
},
"piqa": {
"acc": 0.8106637649619152,
"acc_stderr": 0.009140767676615017,
"acc_norm": 0.8199129488574538,
"acc_norm_stderr": 0.00896541789890321
},
"winogrande": {
"acc": 0.7419100236779794,
"acc_stderr": 0.012298278833972389
}
}
##TRUTHFULQA
{
"truthfulqa_mc": {
"mc1": 0.36107711138310894,
"mc1_stderr": 0.016814312844836882,
"mc2": 0.5219368112449279,
"mc2_stderr": 0.015178997290064866
}
}
- Downloads last month
- -