Muennighoff's picture
Add
411a88c
raw
history blame
2.45 kB
{
"results": {
"anli_r1": {
"acc": 0.348,
"acc_stderr": 0.01507060460376841
},
"anli_r2": {
"acc": 0.342,
"acc_stderr": 0.015008706182121738
},
"anli_r3": {
"acc": 0.33,
"acc_stderr": 0.013579531277800918
},
"cb": {
"acc": 0.26785714285714285,
"acc_stderr": 0.05971290310957636,
"f1": 0.2511904761904762
},
"copa": {
"acc": 0.81,
"acc_stderr": 0.03942772444036623
},
"hellaswag": {
"acc": 0.4565823541127266,
"acc_stderr": 0.0049709334202319285,
"acc_norm": 0.6061541525592511,
"acc_norm_stderr": 0.0048760280379419405
},
"rte": {
"acc": 0.5379061371841155,
"acc_stderr": 0.030009848912529117
},
"winogrande": {
"acc": 0.6037884767166535,
"acc_stderr": 0.013746404157154946
},
"storycloze_2016": {
"acc": 0.7354355959380011,
"acc_stderr": 0.01020040054171416
},
"boolq": {
"acc": 0.6201834862385321,
"acc_stderr": 0.008488668235778613
},
"arc_easy": {
"acc": 0.5900673400673401,
"acc_stderr": 0.010091953527506246,
"acc_norm": 0.5791245791245792,
"acc_norm_stderr": 0.01013050216406634
},
"arc_challenge": {
"acc": 0.28754266211604096,
"acc_stderr": 0.01322671905626613,
"acc_norm": 0.31313993174061433,
"acc_norm_stderr": 0.013552671543623504
},
"sciq": {
"acc": 0.918,
"acc_stderr": 0.008680515615523746,
"acc_norm": 0.917,
"acc_norm_stderr": 0.00872852720607479
},
"piqa": {
"acc": 0.7317736670293797,
"acc_stderr": 0.010336761992404485,
"acc_norm": 0.7448313384113167,
"acc_norm_stderr": 0.010171571592521828
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}