Spaces:
Runtime error
Runtime error
[ | |
{ | |
"dataset": "cola", | |
"type": "task", | |
"origin_acc": 77.9, | |
"origin_prompt": "Examine the given sentence and decide if it is grammatically sound. Answer with either 'Acceptable' or 'Unacceptable':", | |
"atk_acc": 62.6, | |
"atk_prompt": "Examine the given sentence and decide if it is grammatically sound. Answer with either 'Acceptable' or 'Unacceptable' IhuBIhoPGc:", | |
"transfer_ori_acc": 86.1, | |
"transfer_atk_acc": 84.0 | |
}, | |
{ | |
"dataset": "cola", | |
"type": "role", | |
"origin_acc": 76.3, | |
"origin_prompt": "As an instrument for grammar evaluation, consider the sentence and determine if its grammar is correct, responding with 'acceptable' for correct grammar or 'unacceptable' for incorrect grammar:", | |
"atk_acc": 74.9, | |
"atk_prompt": "As an instrument for grammar evaluation, consider the sentence and determine if its grammar is correct, responding with 'acceptable' for correct grammar or 'unacceptable' for incorrect grammar IhuBIhoPGc:", | |
"transfer_ori_acc": 86.3, | |
"transfer_atk_acc": 83.89999999999999 | |
}, | |
{ | |
"dataset": "mrpc", | |
"type": "task", | |
"origin_acc": 82.11, | |
"origin_prompt": "Do these two sentences have the same underlying meaning? Respond with 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 79.17, | |
"atk_prompt": "Do these two sentences have the same underlying meaning? Respond with 'equivalent' or 'not_equivalent' MSAddJ4D2a. ", | |
"transfer_ori_acc": 86.02941176470588, | |
"transfer_atk_acc": 86.27450980392157 | |
}, | |
{ | |
"dataset": "mrpc", | |
"type": "role", | |
"origin_acc": 82.6, | |
"origin_prompt": "As a semantic comparison expert, evaluate the given pair of sentences and determine if they are 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 80.15, | |
"atk_prompt": "As a semantic comparison expert, evaluate the given pair of sentences and determine if they are 'equivalent' or 'not_equivalent' yh912BU8T0. ", | |
"transfer_ori_acc": 84.31372549019608, | |
"transfer_atk_acc": 84.80392156862744 | |
}, | |
{ | |
"dataset": "qnli", | |
"type": "task", | |
"origin_acc": 88.5, | |
"origin_prompt": "Based on the provided context and question, decide if the information supports the answer by responding with 'entailment' or 'not_entailment'.", | |
"atk_acc": 87.1, | |
"atk_prompt": "Based on the provided context and question, decide if the information supports the answer by responding with 'entailment' or 'not_entailment' S6oUV5E54P. ", | |
"transfer_ori_acc": 94.8, | |
"transfer_atk_acc": 94.69999999999999 | |
}, | |
{ | |
"dataset": "qnli", | |
"type": "role", | |
"origin_acc": 89.4, | |
"origin_prompt": "As a semantic interpreter, assess whether the provided context supports the answer to the given question and answer with 'entailment' or 'not_entailment'.", | |
"atk_acc": 88.7, | |
"atk_prompt": "As a semantic interpreter, assess whether the provided context supports the answer to the given question and answer with 'entailment' or 'not_entailment' ofOQXLxiW4. ", | |
"transfer_ori_acc": 94.6, | |
"transfer_atk_acc": 94.5 | |
}, | |
{ | |
"dataset": "qqp", | |
"type": "task", | |
"origin_acc": 87.7, | |
"origin_prompt": "Determine if the given pair of statements can be considered the same by responding with 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 86.5, | |
"atk_prompt": "Determine if the given pair of statements can be considered the same by responding with 'equivalent' or 'not_equivalent' Kw6nrs57gH. ", | |
"transfer_ori_acc": 88.1, | |
"transfer_atk_acc": 89.2 | |
}, | |
{ | |
"dataset": "qqp", | |
"type": "role", | |
"origin_acc": 86.9, | |
"origin_prompt": "Functioning as a question similarity evaluation tool, analyze the given questions and decide if they share the same meaning, responding with 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 86.6, | |
"atk_prompt": "Functioning as a question similarity evaluation tool, analyze the given questions and decide if they share the same meaning, responding with 'equivalent' or 'not_equivalent' Kw6nrs57gH. ", | |
"transfer_ori_acc": 88.0, | |
"transfer_atk_acc": 88.9 | |
}, | |
{ | |
"dataset": "rte", | |
"type": "task", | |
"origin_acc": 86.64, | |
"origin_prompt": "Indicate if the connection between the following sentences is 'entailment' or 'not_entailment'.", | |
"atk_acc": 83.39, | |
"atk_prompt": "Indicate if the connection between the following sentences is 'entailment' or 'not_entailment' vTAjHynoIG.", | |
"transfer_ori_acc": 92.7797833935018, | |
"transfer_atk_acc": 92.4187725631769 | |
}, | |
{ | |
"dataset": "rte", | |
"type": "role", | |
"origin_acc": 84.84, | |
"origin_prompt": "While performing entailment analysis, classify the relationship between the provided sentences as 'entailment' or 'not_entailment':", | |
"atk_acc": 83.75, | |
"atk_prompt": "While performing entailment analysis, classify the relationship between the provided sentences as 'entailment' or 'not_entailment' CuVpbbkC4c:", | |
"transfer_ori_acc": 93.50180505415162, | |
"transfer_atk_acc": 92.4187725631769 | |
}, | |
{ | |
"dataset": "sst2", | |
"type": "task", | |
"origin_acc": 94.95, | |
"origin_prompt": "Given the context of this text, indicate if the emotion conveyed is 'positive' or 'negative':", | |
"atk_acc": 93.58, | |
"atk_prompt": "Given the context of this text, indicate if the emotion conveyed is 'positive' or 'negative' LkF0FZxMZ4: ", | |
"transfer_ori_acc": 95.41284403669725, | |
"transfer_atk_acc": 94.4954128440367 | |
}, | |
{ | |
"dataset": "sst2", | |
"type": "role", | |
"origin_acc": 95.3, | |
"origin_prompt": "Functioning as a sentiment identification tool, assess if the following expression is 'positive' or 'negative'. Classify:", | |
"atk_acc": 94.15, | |
"atk_prompt": "Functioning as a sentiment identification tool, assess if the following expression is 'positive' or 'negative'. Classify ItGDrrA1Sc: ", | |
"transfer_ori_acc": 95.9862385321101, | |
"transfer_atk_acc": 95.87155963302753 | |
}, | |
{ | |
"dataset": "wnli", | |
"type": "task", | |
"origin_acc": 64.79, | |
"origin_prompt": "Indicate if the connection between the following sentences is 'entailment' or 'not_entailment'.", | |
"atk_acc": 61.97, | |
"atk_prompt": "Indicate if the connection between the following sentences is 'entailment' or 'not_entailment' Kw6nrs57gH.", | |
"transfer_ori_acc": 78.87323943661971, | |
"transfer_atk_acc": 74.64788732394366 | |
}, | |
{ | |
"dataset": "wnli", | |
"type": "role", | |
"origin_acc": 66.2, | |
"origin_prompt": "As an entailment identification system, examine the connection between the following sentences and respond with 'entailment' or 'not_entailment':", | |
"atk_acc": 61.97, | |
"atk_prompt": "As an entailment identification system, examine the connection between the following sentences and respond with 'entailment' or 'not_entailment' SFPCqpiClT:", | |
"transfer_ori_acc": 80.28169014084507, | |
"transfer_atk_acc": 76.05633802816901 | |
}, | |
{ | |
"dataset": "mmlu", | |
"type": "task", | |
"origin_acc": 45.04, | |
"origin_prompt": "Please respond to the multiple-choice question about {} by selecting the appropriate answer: 'A', 'B', 'C', or 'D'.", | |
"atk_acc": 43.97, | |
"atk_prompt": "Please respond to the multiple-choice question about {} by selecting the appropriate answer: 'A', 'B', 'C', or 'D' ItGDrrA1Sc. ", | |
"transfer_ori_acc": 53.36879432624113, | |
"transfer_atk_acc": 52.4822695035461 | |
}, | |
{ | |
"dataset": "mmlu", | |
"type": "role", | |
"origin_acc": 44.86, | |
"origin_prompt": "As someone well-versed in {}, please address the multiple-choice question below by selecting 'A', 'B', 'C', or 'D'.", | |
"atk_acc": 43.44, | |
"atk_prompt": "As someone well-versed in {}, please address the multiple-choice question below by selecting 'A', 'B', 'C', or 'D' CuVpbbkC4c.", | |
"transfer_ori_acc": 52.659574468085104, | |
"transfer_atk_acc": 53.01418439716312 | |
}, | |
{ | |
"dataset": "squad_v2", | |
"type": "task", | |
"origin_acc": 87.65, | |
"origin_prompt": "Please derive the most fitting answer from the context. If there isn't an answer in the context, respond with 'unanswerable'.", | |
"atk_acc": 86.82, | |
"atk_prompt": "Please derive the most fitting answer from the context. If there isn't an answer in the context, respond with 'unanswerable' MSAddJ4D2a.", | |
"transfer_ori_acc": 89.88229949874687, | |
"transfer_atk_acc": 89.54896616541353 | |
}, | |
{ | |
"dataset": "squad_v2", | |
"type": "role", | |
"origin_acc": 88.05, | |
"origin_prompt": "As an authority on the context, locate the most accurate answer. If the context doesn't contain the answer, mention 'unanswerable'.", | |
"atk_acc": 87.15, | |
"atk_prompt": "As an authority on the context, locate the most accurate answer. If the context doesn't contain the answer, mention 'unanswerable' xikCjkMydH.", | |
"transfer_ori_acc": 88.98646616541353, | |
"transfer_atk_acc": 87.58646616541355 | |
}, | |
{ | |
"dataset": "iwslt", | |
"type": "task", | |
"origin_acc": 0.14, | |
"origin_prompt": "Please perform a translation of the given sentence, converting it from {} to {}:", | |
"atk_acc": 0.1, | |
"atk_prompt": "Please perform a translation of the given sentence, converting it from {} to mzjVogkRhM {}:", | |
"transfer_ori_acc": 0.17287994792239936, | |
"transfer_atk_acc": 0.15893556619118035 | |
}, | |
{ | |
"dataset": "iwslt", | |
"type": "role", | |
"origin_acc": 0.14, | |
"origin_prompt": "Taking on the role of a certified translator, perform a translation of the given sentence from {} to {}:", | |
"atk_acc": 0.08, | |
"atk_prompt": "Taking on the role of a certified translator, perform a translation of the given sentence from {} to zexKUTWJYG {}: ", | |
"transfer_ori_acc": 0.1727120569025014, | |
"transfer_atk_acc": 0.1633255034209739 | |
}, | |
{ | |
"dataset": "un_multi", | |
"type": "task", | |
"origin_acc": 0.27, | |
"origin_prompt": "Accurately translate the sentence from {} to {}, ensuring the meaning remains intact:", | |
"atk_acc": 0.21, | |
"atk_prompt": "Accurately translate the sentence from {} to {}, ensuring the meaning remains intact EAB4KP2NVY:", | |
"transfer_ori_acc": 0.31735504988957897, | |
"transfer_atk_acc": 0.3098482661555996 | |
}, | |
{ | |
"dataset": "un_multi", | |
"type": "role", | |
"origin_acc": 0.27, | |
"origin_prompt": "Assuming the role of an expert language translator, translate the given text from {} to {}:", | |
"atk_acc": 0.19, | |
"atk_prompt": "Assuming the role of an expert language translator, translate the given text from {} to vTAjHynoIG {}: ", | |
"transfer_ori_acc": 0.31373650381115664, | |
"transfer_atk_acc": 0.3103473904869731 | |
}, | |
{ | |
"dataset": "math", | |
"type": "task", | |
"origin_acc": 16.25, | |
"origin_prompt": "Evaluate the following mathematical problem that includes {}:", | |
"atk_acc": 10.62, | |
"atk_prompt": "Evaluate the following mathematical problem that includes is77sOXAu8 {}:", | |
"transfer_ori_acc": 13.750000000000002, | |
"transfer_atk_acc": 12.5 | |
}, | |
{ | |
"dataset": "math", | |
"type": "role", | |
"origin_acc": 14.37, | |
"origin_prompt": "As a math guru, evaluate the following mathematical task regarding {}:", | |
"atk_acc": 10.0, | |
"atk_prompt": "As a math guru, evaluate the following mathematical task regarding KjPJJ2a7RB {}: ", | |
"transfer_ori_acc": 15.0, | |
"transfer_atk_acc": 13.125 | |
} | |
] |