Muennighoff's picture
Add eval
b851397
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.332,
"acc_stderr": 0.014899597242811487
},
"anli_r2": {
"acc": 0.329,
"acc_stderr": 0.014865395385928357
},
"anli_r3": {
"acc": 0.3541666666666667,
"acc_stderr": 0.013811933499570954
},
"cb": {
"acc": 0.5535714285714286,
"acc_stderr": 0.06703189227942395,
"f1": 0.38376730002345766
},
"copa": {
"acc": 0.81,
"acc_stderr": 0.03942772444036623
},
"hellaswag": {
"acc": 0.47400916152160927,
"acc_stderr": 0.004983035420235716,
"acc_norm": 0.619896434973113,
"acc_norm_stderr": 0.004844199910173026
},
"rte": {
"acc": 0.516245487364621,
"acc_stderr": 0.030080573208738064
},
"winogrande": {
"acc": 0.5722178374112076,
"acc_stderr": 0.013905134013839944
},
"storycloze_2016": {
"acc": 0.7177979690005345,
"acc_stderr": 0.010407834479647675
},
"boolq": {
"acc": 0.5648318042813456,
"acc_stderr": 0.008671229580582118
},
"arc_easy": {
"acc": 0.5997474747474747,
"acc_stderr": 0.010053550119896127,
"acc_norm": 0.569023569023569,
"acc_norm_stderr": 0.010161552863493746
},
"arc_challenge": {
"acc": 0.27559726962457337,
"acc_stderr": 0.01305716965576184,
"acc_norm": 0.31569965870307165,
"acc_norm_stderr": 0.013582571095815291
},
"sciq": {
"acc": 0.844,
"acc_stderr": 0.01148023500612236,
"acc_norm": 0.794,
"acc_norm_stderr": 0.012795613612786551
},
"piqa": {
"acc": 0.7399347116430903,
"acc_stderr": 0.0102348932490613,
"acc_norm": 0.7595212187159956,
"acc_norm_stderr": 0.009971345364651064
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}