Muennighoff's picture
Add eval
b851397
raw
history blame
2.44 kB
{
"results": {
"anli_r1": {
"acc": 0.34,
"acc_stderr": 0.014987482264363937
},
"anli_r2": {
"acc": 0.321,
"acc_stderr": 0.014770821817934644
},
"anli_r3": {
"acc": 0.34,
"acc_stderr": 0.013680495725767803
},
"cb": {
"acc": 0.375,
"acc_stderr": 0.06527912098338669,
"f1": 0.32099491681373216
},
"copa": {
"acc": 0.77,
"acc_stderr": 0.04229525846816506
},
"hellaswag": {
"acc": 0.48078072097191793,
"acc_stderr": 0.004986093791041653,
"acc_norm": 0.6337382991435969,
"acc_norm_stderr": 0.004807975515446487
},
"rte": {
"acc": 0.5740072202166066,
"acc_stderr": 0.029764956741777645
},
"winogrande": {
"acc": 0.590370955011839,
"acc_stderr": 0.013821049109655453
},
"storycloze_2016": {
"acc": 0.7204703367183325,
"acc_stderr": 0.01037770209970486
},
"boolq": {
"acc": 0.5948012232415902,
"acc_stderr": 0.008586427929715515
},
"arc_easy": {
"acc": 0.6262626262626263,
"acc_stderr": 0.009927267058259628,
"acc_norm": 0.5917508417508418,
"acc_norm_stderr": 0.01008556619579125
},
"arc_challenge": {
"acc": 0.29266211604095566,
"acc_stderr": 0.013295916103619417,
"acc_norm": 0.32337883959044367,
"acc_norm_stderr": 0.013669421630012132
},
"sciq": {
"acc": 0.904,
"acc_stderr": 0.009320454434783227,
"acc_norm": 0.885,
"acc_norm_stderr": 0.01009340759490462
},
"piqa": {
"acc": 0.7622415669205659,
"acc_stderr": 0.009932525779525489,
"acc_norm": 0.763873775843308,
"acc_norm_stderr": 0.009908965890558218
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}