Muennighoff's picture
Merge eval
5443e66
raw
history blame
2.47 kB
{
"results": {
"anli_r1": {
"acc": 0.328,
"acc_stderr": 0.014853842487270334
},
"anli_r2": {
"acc": 0.337,
"acc_stderr": 0.014955087918653605
},
"anli_r3": {
"acc": 0.32416666666666666,
"acc_stderr": 0.013517438120881624
},
"cb": {
"acc": 0.26785714285714285,
"acc_stderr": 0.05971290310957636,
"f1": 0.18656056587091072
},
"copa": {
"acc": 0.76,
"acc_stderr": 0.04292346959909283
},
"hellaswag": {
"acc": 0.4547898824935272,
"acc_stderr": 0.004969341773423513,
"acc_norm": 0.5937064329814777,
"acc_norm_stderr": 0.004901368629533419
},
"rte": {
"acc": 0.5595667870036101,
"acc_stderr": 0.029882123363118726
},
"winogrande": {
"acc": 0.5769534333070244,
"acc_stderr": 0.01388505535905647
},
"storycloze_2016": {
"acc": 0.694815606627472,
"acc_stderr": 0.010648664383985661
},
"boolq": {
"acc": 0.6256880733944954,
"acc_stderr": 0.00846424665644323
},
"arc_easy": {
"acc": 0.40614478114478114,
"acc_stderr": 0.010077409815364048,
"acc_norm": 0.3766835016835017,
"acc_norm_stderr": 0.009942848077476172
},
"arc_challenge": {
"acc": 0.20648464163822525,
"acc_stderr": 0.011828865619002316,
"acc_norm": 0.2551194539249147,
"acc_norm_stderr": 0.012739038695202109
},
"sciq": {
"acc": 0.775,
"acc_stderr": 0.013211720158614756,
"acc_norm": 0.709,
"acc_norm_stderr": 0.014370995982377933
},
"piqa": {
"acc": 0.6561479869423286,
"acc_stderr": 0.011082356277961393,
"acc_norm": 0.6528835690968444,
"acc_norm_stderr": 0.011107104993128086
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}