Spaces:
Runtime error
Runtime error
[ | |
{ | |
"dataset": "cola", | |
"type": "task", | |
"origin_acc": 81.0, | |
"origin_prompt": "Examine the sentence and decide if its grammar is 'Acceptable' or 'Unacceptable'.", | |
"atk_acc": 78.0, | |
"atk_prompt": "Examine the sentence and decide if its grammar is 'Acceptable' or 'Unacceptable' 9Q3S1F94fE. ", | |
"transfer_ori_acc": 76.3, | |
"transfer_atk_acc": 76.6 | |
}, | |
{ | |
"dataset": "cola", | |
"type": "role", | |
"origin_acc": 81.5, | |
"origin_prompt": "Functioning as a grammar evaluation tool, analyze the given sentence and decide if it is grammatically correct, responding with 'acceptable' or 'unacceptable'.", | |
"atk_acc": 79.5, | |
"atk_prompt": "Functioning as a grammar evaluation tool, analyze the given sentence and decide if it is grammatically correct, responding with 'acceptable' or 'unacceptable' 6a4Yn3RGVc. ", | |
"transfer_ori_acc": 75.2, | |
"transfer_atk_acc": 76.3 | |
}, | |
{ | |
"dataset": "mrpc", | |
"type": "task", | |
"origin_acc": 74.0, | |
"origin_prompt": "Assess if the two given sentences have equivalent meanings by selecting 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 68.5, | |
"atk_prompt": "Assess if the two given sentences have equivalent meanings by selecting 'equivalent' or 'not_equivalent' LkF0FZxMZ4. ", | |
"transfer_ori_acc": 82.84313725490196, | |
"transfer_atk_acc": 82.59803921568627 | |
}, | |
{ | |
"dataset": "mrpc", | |
"type": "role", | |
"origin_acc": 72.0, | |
"origin_prompt": "As a linguistic comparator, review the following pair of sentences and determine their semantic equivalence by choosing 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 68.0, | |
"atk_prompt": "As a linguistic comparator, review the following pair of sentences and determine their semantic equivalence by choosing 'equivalent' or 'not_equivalent' bhrRSokrfa. ", | |
"transfer_ori_acc": 79.90196078431373, | |
"transfer_atk_acc": 80.63725490196079 | |
}, | |
{ | |
"dataset": "qnli", | |
"type": "task", | |
"origin_acc": 84.5, | |
"origin_prompt": "Examine the context and question, and determine if the context logically implies the answer by selecting 'entailment' or 'not_entailment'.", | |
"atk_acc": 80.5, | |
"atk_prompt": "Examine the context and question, and determine if the context logically implies the answer by selecting 'entailment' or 'not_entailment' 9Q3S1F94fE. ", | |
"transfer_ori_acc": 86.5, | |
"transfer_atk_acc": 86.5 | |
}, | |
{ | |
"dataset": "qnli", | |
"type": "role", | |
"origin_acc": 84.5, | |
"origin_prompt": "As a textual inference expert, analyze if the answer to the question can be deduced from the provided context and select 'entailment' or 'not_entailment'.", | |
"atk_acc": 81.0, | |
"atk_prompt": "As a textual inference expert, analyze if the answer to the question can be deduced from the provided context and select 'entailment' or 'not_entailment' YHQiDfFhw4. ", | |
"transfer_ori_acc": 77.2, | |
"transfer_atk_acc": 74.2 | |
}, | |
{ | |
"dataset": "qqp", | |
"type": "task", | |
"origin_acc": 82.0, | |
"origin_prompt": "Can these two statements be considered equal in meaning? Answer with 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 79.0, | |
"atk_prompt": "Can these two statements be considered equal in meaning? Answer with 'equivalent' or 'not_equivalent' xikCjkMydH. ", | |
"transfer_ori_acc": 87.6, | |
"transfer_atk_acc": 87.8 | |
}, | |
{ | |
"dataset": "qqp", | |
"type": "role", | |
"origin_acc": 83.0, | |
"origin_prompt": "In the capacity of a question assessment system, indicate if the meaning of the provided questions is the same, responding with 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 81.5, | |
"atk_prompt": "In the capacity of a question assessment system, indicate if the meaning of the provided questions is the same, responding with 'equivalent' or 'not_equivalent' 5XeflW1ZJc. ", | |
"transfer_ori_acc": 86.1, | |
"transfer_atk_acc": 85.9 | |
}, | |
{ | |
"dataset": "rte", | |
"type": "task", | |
"origin_acc": 84.5, | |
"origin_prompt": "Review the two statements and categorize their relationship as either 'entailment' or 'not_entailment'.", | |
"atk_acc": 80.5, | |
"atk_prompt": "Review the two statements and categorize their relationship as either 'entailment' or 'not_entailment' 5XeflW1ZJc. ", | |
"transfer_ori_acc": 84.83754512635379, | |
"transfer_atk_acc": 85.5595667870036 | |
}, | |
{ | |
"dataset": "rte", | |
"type": "role", | |
"origin_acc": 84.0, | |
"origin_prompt": "In your role as an entailment analysis tool, assess the relationship between the given sentences and classify it as 'entailment' or 'not_entailment'.", | |
"atk_acc": 80.5, | |
"atk_prompt": "In your role as an entailment analysis tool, assess the relationship between the given sentences and classify it as 'entailment' or 'not_entailment' bhrRSokrfa. ", | |
"transfer_ori_acc": 83.03249097472924, | |
"transfer_atk_acc": 83.03249097472924 | |
}, | |
{ | |
"dataset": "sst2", | |
"type": "task", | |
"origin_acc": 96.0, | |
"origin_prompt": "Read the provided excerpt and choose between 'positive' and 'negative' to describe its sentiment.", | |
"atk_acc": 77.5, | |
"atk_prompt": "go the read excerpt and be between 'positive' and 'negative' to describing its mood. ", | |
"transfer_ori_acc": 94.72477064220183, | |
"transfer_atk_acc": 94.61009174311926 | |
}, | |
{ | |
"dataset": "sst2", | |
"type": "role", | |
"origin_acc": 97.5, | |
"origin_prompt": "In the capacity of a sentiment classifier, decide whether the given quote is 'positive' or 'negative'.", | |
"atk_acc": 13.5, | |
"atk_prompt": "in the capacity of a sentiment classizer, chose neither the said quotes is 'positive' or 'negative'. ", | |
"transfer_ori_acc": 94.95412844036697, | |
"transfer_atk_acc": 94.95412844036697 | |
}, | |
{ | |
"dataset": "wnli", | |
"type": "task", | |
"origin_acc": 69.01, | |
"origin_prompt": "Indicate if the connection between the following sentences is 'entailment' or 'not_entailment'.", | |
"atk_acc": 61.97, | |
"atk_prompt": "Indicate if the connection between the followping senZences is 'entailment' or 'not_entailment'. ", | |
"transfer_ori_acc": 64.7887323943662, | |
"transfer_atk_acc": 69.01408450704226 | |
}, | |
{ | |
"dataset": "wnli", | |
"type": "role", | |
"origin_acc": 66.2, | |
"origin_prompt": "In your role as an entailment analysis tool, assess the relationship between the given sentences and classify it as 'entailment' or 'not_entailment'.", | |
"atk_acc": 57.75, | |
"atk_prompt": "In your role as an entailment analysis tool, asress the rlationship between the given sentences and classify it as 'entailment' or 'not_entailment'. ", | |
"transfer_ori_acc": 57.74647887323944, | |
"transfer_atk_acc": 54.929577464788736 | |
}, | |
{ | |
"dataset": "mmlu", | |
"type": "task", | |
"origin_acc": 66.0, | |
"origin_prompt": "Please respond to the multiple-choice question about {} by selecting the appropriate answer: 'A', 'B', 'C', or 'D'.", | |
"atk_acc": 63.0, | |
"atk_prompt": "Please respond to the multiple-choice question about {} by selecting the appropriate answer: 'A', 'B', 'C', or 'D' MQnugHcaoy. ", | |
"transfer_ori_acc": 45.21276595744681, | |
"transfer_atk_acc": 45.21276595744681 | |
}, | |
{ | |
"dataset": "mmlu", | |
"type": "role", | |
"origin_acc": 65.5, | |
"origin_prompt": "As someone well-versed in {}, please address the multiple-choice question below by selecting 'A', 'B', 'C', or 'D'.", | |
"atk_acc": 63.0, | |
"atk_prompt": "As someone well-versed in {}, please address the multiple-choice question below by selecting 'A', 'B', 'C', or 'D' mOJ9nEwT4f.", | |
"transfer_ori_acc": 46.63120567375886, | |
"transfer_atk_acc": 46.98581560283688 | |
}, | |
{ | |
"dataset": "squad_v2", | |
"type": "task", | |
"origin_acc": 73.25, | |
"origin_prompt": "Based on the given context, provide the best possible answer. If there's no answer available in the context, reply with 'unanswerable'.", | |
"atk_acc": 71.09, | |
"atk_prompt": "Based on the given context, provide the best possible answer. If there's no answer available in the context, reply with 'unanswerable' ItGDrrA1Sc.", | |
"transfer_ori_acc": 87.04992770387507, | |
"transfer_atk_acc": 87.04992770387507 | |
}, | |
{ | |
"dataset": "squad_v2", | |
"type": "role", | |
"origin_acc": 72.63, | |
"origin_prompt": "As a proficient expert in the given context, search for the most relevant answer. If the answer cannot be found, respond by saying 'unanswerable'.", | |
"atk_acc": 63.07, | |
"atk_prompt": "As a proficient expert in the given context, search for the most relevant answer. If the answer cannot be found, respond by saying 'unanswerable' XStsD2pUzu.", | |
"transfer_ori_acc": 87.31659437054175, | |
"transfer_atk_acc": 87.31659437054175 | |
}, | |
{ | |
"dataset": "iwslt", | |
"type": "task", | |
"origin_acc": 0.24, | |
"origin_prompt": "Convert the following sentence from its original {} language to the target language {}.", | |
"atk_acc": 0.22, | |
"atk_prompt": "Convert the following sentence from its original {} language to the target language 5XeflW1ZJc {}. ", | |
"transfer_ori_acc": 0.2240211126734209, | |
"transfer_atk_acc": 0.22156249809369105 | |
}, | |
{ | |
"dataset": "iwslt", | |
"type": "role", | |
"origin_acc": 0.24, | |
"origin_prompt": "In the role of a language expert, perform a machine translation for the provided sentence, changing it from {} to {}.", | |
"atk_acc": 0.23, | |
"atk_prompt": "In the role of a language expert, perform a machine translation for the provided sentence, changing it from {} to MQnugHcaoy {}. ", | |
"transfer_ori_acc": 0.22070346732323043, | |
"transfer_atk_acc": 0.2112357399871226 | |
}, | |
{ | |
"dataset": "un_multi", | |
"type": "task", | |
"origin_acc": 0.35, | |
"origin_prompt": "Perform a machine translation for the provided sentence, changing it from {} to {}.", | |
"atk_acc": 0.34, | |
"atk_prompt": "Perform a machine translation for the provided sentence, changing it from {} to Kw6nrs57gH {}. ", | |
"transfer_ori_acc": 0.30870734220870066, | |
"transfer_atk_acc": 0.3125050772206132 | |
}, | |
{ | |
"dataset": "un_multi", | |
"type": "role", | |
"origin_acc": 0.35, | |
"origin_prompt": "As a qualified translator, convert the given sentence from its original {} language to the target language {}.", | |
"atk_acc": 0.31, | |
"atk_prompt": "As a qualified translator, convert the given sentence from its original {} language to the target language 5XeflW1ZJc {}. ", | |
"transfer_ori_acc": 0.31127691224033316, | |
"transfer_atk_acc": 0.31337641480593154 | |
}, | |
{ | |
"dataset": "math", | |
"type": "task", | |
"origin_acc": 23.12, | |
"origin_prompt": "Calculate the answer to the following math query about {}.", | |
"atk_acc": 19.38, | |
"atk_prompt": "Calculate the answer to the following math query about 5XeflW1ZJc {}. ", | |
"transfer_ori_acc": 16.25, | |
"transfer_atk_acc": 17.5 | |
}, | |
{ | |
"dataset": "math", | |
"type": "role", | |
"origin_acc": 22.5, | |
"origin_prompt": "As a mathematics instructor, calculate the answer to the following problem related to {}.", | |
"atk_acc": 21.25, | |
"atk_prompt": "As a mathematics instructor, calculate the answer to the following problem related to AMsRIKZniY {}. ", | |
"transfer_ori_acc": 17.5, | |
"transfer_atk_acc": 17.5 | |
} | |
] |