Muennighoff's picture
Add
83391d7
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.334,
"acc_stderr": 0.01492201952373296
},
"anli_r2": {
"acc": 0.334,
"acc_stderr": 0.014922019523732965
},
"anli_r3": {
"acc": 0.3566666666666667,
"acc_stderr": 0.013833742805050715
},
"cb": {
"acc": 0.2857142857142857,
"acc_stderr": 0.06091449038731725,
"f1": 0.2883181980926342
},
"copa": {
"acc": 0.71,
"acc_stderr": 0.04560480215720684
},
"hellaswag": {
"acc": 0.43905596494722166,
"acc_stderr": 0.0049525768633152155,
"acc_norm": 0.5635331607249552,
"acc_norm_stderr": 0.004949335356881862
},
"rte": {
"acc": 0.5379061371841155,
"acc_stderr": 0.030009848912529117
},
"winogrande": {
"acc": 0.5445935280189423,
"acc_stderr": 0.013996485037729793
},
"storycloze_2016": {
"acc": 0.703901656867985,
"acc_stderr": 0.010557307688475126
},
"boolq": {
"acc": 0.5758409785932722,
"acc_stderr": 0.00864386902338812
},
"arc_easy": {
"acc": 0.5387205387205387,
"acc_stderr": 0.010228972678389611,
"acc_norm": 0.48947811447811446,
"acc_norm_stderr": 0.010257511546488232
},
"arc_challenge": {
"acc": 0.26109215017064846,
"acc_stderr": 0.012835523909473848,
"acc_norm": 0.26706484641638223,
"acc_norm_stderr": 0.012928933196496344
},
"sciq": {
"acc": 0.777,
"acc_stderr": 0.013169830843425677,
"acc_norm": 0.686,
"acc_norm_stderr": 0.01468399195108796
},
"piqa": {
"acc": 0.7154515778019587,
"acc_stderr": 0.010527218464130614,
"acc_norm": 0.735038084874864,
"acc_norm_stderr": 0.010296557993316038
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}