Spaces:
Running
Running
[ | |
{ | |
"dataset": "cola", | |
"type": "task", | |
"origin_acc": 86.4, | |
"origin_prompt": "Review the sentence below and identify whether its grammar is 'Acceptable' or 'Unacceptable'.", | |
"atk_acc": 85.6, | |
"atk_prompt": "Review the sentence below and identify whether its grammar is 'Acceptable' or 'Unacceptable' Q47Jmd4lMV. ", | |
"transfer_ori_acc": 80.5, | |
"transfer_atk_acc": 80.5 | |
}, | |
{ | |
"dataset": "cola", | |
"type": "role", | |
"origin_acc": 86.6, | |
"origin_prompt": "While performing grammar analysis, classify the grammar of the following sentence as 'acceptable' for correct grammar or 'unacceptable' for incorrect grammar.", | |
"atk_acc": 86.2, | |
"atk_prompt": "While performing grammar analysis, classify the grammar of the following sentence as 'acceptable' for correct grammar or 'unacceptable' for incorrect grammar EAB4KP2NVY. ", | |
"transfer_ori_acc": 81.5, | |
"transfer_atk_acc": 81.5 | |
}, | |
{ | |
"dataset": "mrpc", | |
"type": "task", | |
"origin_acc": 86.27, | |
"origin_prompt": "Do these two sentences have the same underlying meaning? Respond with 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 85.78, | |
"atk_prompt": "Do these two sentences have the same underlying meaning? Respond with 'equivalent' or 'not_equivalent' 6a4Yn3RGVc. ", | |
"transfer_ori_acc": 68.5, | |
"transfer_atk_acc": 72.0 | |
}, | |
{ | |
"dataset": "mrpc", | |
"type": "role", | |
"origin_acc": 87.5, | |
"origin_prompt": "As a textual similarity evaluator, analyze the following pair of sentences and determine if they are semantically 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 87.25, | |
"atk_prompt": "As a textual similarity evaluator, analyze the following pair of sentences and determine if they are semantically 'equivalent' or 'not_equivalent' ofw9fEkN5R. ", | |
"transfer_ori_acc": 73.0, | |
"transfer_atk_acc": 71.0 | |
}, | |
{ | |
"dataset": "qnli", | |
"type": "task", | |
"origin_acc": 94.0, | |
"origin_prompt": "Analyze the context and question, and indicate if the context entails the answer by choosing 'entailment' or 'not_entailment'.", | |
"atk_acc": 93.4, | |
"atk_prompt": "Analyze the context and question, and indicate if the context entails the answer by choosing 'entailment' or 'not_entailment' lf8wBa2yEm. ", | |
"transfer_ori_acc": 80.0, | |
"transfer_atk_acc": 79.0 | |
}, | |
{ | |
"dataset": "qnli", | |
"type": "role", | |
"origin_acc": 93.7, | |
"origin_prompt": "In your role as a semantic evaluator, determine if the provided context justifies the answer to the question and answer with 'entailment' or 'not_entailment'.", | |
"atk_acc": 93.5, | |
"atk_prompt": "In your role as a semantic evaluator, determine if the provided context justifies the answer to the question and answer with 'entailment' or 'not_entailment' zq0DcZ5dnI. ", | |
"transfer_ori_acc": 81.0, | |
"transfer_atk_acc": 83.0 | |
}, | |
{ | |
"dataset": "qqp", | |
"type": "task", | |
"origin_acc": 88.5, | |
"origin_prompt": "Are the following two questions equivalent or not? Answer me with \"equivalent\" or \"not_equivalent\".", | |
"atk_acc": 88.4, | |
"atk_prompt": "Are the following two questions equivalent or not? Answer me with \"equivalent\" or \"not_equivalent Cev0s49fbe\". ", | |
"transfer_ori_acc": 83.0, | |
"transfer_atk_acc": 82.0 | |
}, | |
{ | |
"dataset": "qqp", | |
"type": "role", | |
"origin_acc": 89.1, | |
"origin_prompt": "Functioning as a question similarity evaluation tool, analyze the given questions and decide if they share the same meaning, responding with 'equivalent' or 'not_equivalent'.", | |
"atk_acc": 88.7, | |
"atk_prompt": "Functioning as a question similarity evaluation tool, analyze the given questions and decide if they share the same meaning, responding with 'equivalent' or 'not_equivalent' KjPJJ2a7RB. ", | |
"transfer_ori_acc": 82.5, | |
"transfer_atk_acc": 83.0 | |
}, | |
{ | |
"dataset": "rte", | |
"type": "task", | |
"origin_acc": 93.5, | |
"origin_prompt": "Identify whether the given pair of sentences demonstrates entailment or not_entailment. Answer with 'entailment' or 'not_entailment'.", | |
"atk_acc": 92.78, | |
"atk_prompt": "Identify whether the given pair of sentences demonstrates entailment or not_entailment. Answer with 'entailment' or 'not_entailment' d6ZQ3u0GBQ. ", | |
"transfer_ori_acc": 82.0, | |
"transfer_atk_acc": 81.5 | |
}, | |
{ | |
"dataset": "rte", | |
"type": "role", | |
"origin_acc": 93.5, | |
"origin_prompt": "Acting as an entailment detection instrument, determine if the given pair of sentences demonstrates entailment or not_entailment. Answer with 'entailment' or 'not_entailment'.", | |
"atk_acc": 93.14, | |
"atk_prompt": "Acting as an entailment detection instrument, determine if the given pair of sentences demonstrates entailment or not_entailment. Answer with 'entailment' or 'not_entailment' BWmsLx8xOA. ", | |
"transfer_ori_acc": 83.0, | |
"transfer_atk_acc": 82.0 | |
}, | |
{ | |
"dataset": "sst2", | |
"type": "task", | |
"origin_acc": 96.79, | |
"origin_prompt": "Evaluate the sentiment of the given text and classify it as 'positive' or 'negative'.", | |
"atk_acc": 96.56, | |
"atk_prompt": "Evaluate the sentiment of the given text and classify it as 'positive' or 'negative' Cev0s49fbe. ", | |
"transfer_ori_acc": 95.5, | |
"transfer_atk_acc": 97.0 | |
}, | |
{ | |
"dataset": "sst2", | |
"type": "role", | |
"origin_acc": 96.56, | |
"origin_prompt": "As a sentiment classifier, determine whether the following text is 'positive' or 'negative'.", | |
"atk_acc": 96.44, | |
"atk_prompt": "As a sentiment classifier, determine whether the following text is 'positive' or 'negative' 6a4Yn3RGVc. ", | |
"transfer_ori_acc": 97.0, | |
"transfer_atk_acc": 97.5 | |
}, | |
{ | |
"dataset": "wnli", | |
"type": "task", | |
"origin_acc": 77.46, | |
"origin_prompt": "Assess the connection between the following sentences and classify it as 'entailment' or 'not_entailment'.", | |
"atk_acc": 76.06, | |
"atk_prompt": "Assess the connection between the following sentences and classify it as 'entailment' or 'not_entailment' 5yWbBXztUY. ", | |
"transfer_ori_acc": 64.7887323943662, | |
"transfer_atk_acc": 64.7887323943662 | |
}, | |
{ | |
"dataset": "wnli", | |
"type": "role", | |
"origin_acc": 76.06, | |
"origin_prompt": "In your role as an entailment analysis tool, assess the relationship between the given sentences and classify it as 'entailment' or 'not_entailment'.", | |
"atk_acc": 76.06, | |
"atk_prompt": "In your role as an entailment analysis tool, assess the relationship between the given sentences and classify it as 'entailment' or 'not_entailment' 5XeflW1ZJc. ", | |
"transfer_ori_acc": 61.97183098591549, | |
"transfer_atk_acc": 61.97183098591549 | |
}, | |
{ | |
"dataset": "mmlu", | |
"type": "task", | |
"origin_acc": 53.9, | |
"origin_prompt": "Below are multiple-choice question concerning {}. Indicate your response with 'A', 'B', 'C', or 'D'.", | |
"atk_acc": 52.84, | |
"atk_prompt": "Below are multiple-choice question concerning {}. Indicate your response with 'A', 'B', 'C', or 'D' RcRneWo6Iv. ", | |
"transfer_ori_acc": 65.5, | |
"transfer_atk_acc": 66.0 | |
}, | |
{ | |
"dataset": "mmlu", | |
"type": "role", | |
"origin_acc": 54.08, | |
"origin_prompt": "Considering your familiarity with {}, attend to the following multiple-choice question by picking 'A', 'B', 'C', or 'D'.", | |
"atk_acc": 53.01, | |
"atk_prompt": "Considering your familiarity with {}, attend to the following multiple-choice question by picking 'A', 'B', 'C', or 'D' Q47Jmd4lMV.", | |
"transfer_ori_acc": 66.0, | |
"transfer_atk_acc": 64.5 | |
}, | |
{ | |
"dataset": "squad_v2", | |
"type": "task", | |
"origin_acc": 91.05, | |
"origin_prompt": "Identify the most relevant answer from the context. If it's not possible to find an answer, state 'unanswerable'.", | |
"atk_acc": 90.05, | |
"atk_prompt": "Identify the most relevant answer from the context. If it's not possible to find an answer, state 'unanswerable' 5yWbBXztUY.", | |
"transfer_ori_acc": 71.9741019997885, | |
"transfer_atk_acc": 71.16240427722121 | |
}, | |
{ | |
"dataset": "squad_v2", | |
"type": "role", | |
"origin_acc": 90.55, | |
"origin_prompt": "Drawing upon your expertise in the context, determine the most suitable answer. If an answer isn't available, state 'unanswerable'.", | |
"atk_acc": 89.38, | |
"atk_prompt": "Drawing upon your expertise in the context, determine the most suitable answer. If an answer isn't available, state 'unanswerable' SvAp8RlOFn.", | |
"transfer_ori_acc": 72.70309414291108, | |
"transfer_atk_acc": 73.24261281137713 | |
}, | |
{ | |
"dataset": "iwslt", | |
"type": "task", | |
"origin_acc": 0.24, | |
"origin_prompt": "Convert the following sentence from its original {} language to the target language {}.", | |
"atk_acc": 0.19, | |
"atk_prompt": "Convert the following sentence from its original {} language to the target language lWJoGGRdjv {}. ", | |
"transfer_ori_acc": 0.23975721537241335, | |
"transfer_atk_acc": 0.2340884710610463 | |
}, | |
{ | |
"dataset": "iwslt", | |
"type": "role", | |
"origin_acc": 0.27, | |
"origin_prompt": "Taking on the role of a certified translator, perform a translation of the given sentence from {} to {}.", | |
"atk_acc": 0.23, | |
"atk_prompt": "Taking on the role of a certified translator, perform a translation of the given sentence from {} to njCuciQJpB {}. ", | |
"transfer_ori_acc": 0.24270720862925335, | |
"transfer_atk_acc": 0.24291215074105674 | |
}, | |
{ | |
"dataset": "un_multi", | |
"type": "task", | |
"origin_acc": 0.36, | |
"origin_prompt": "Perform a machine translation for the provided sentence, changing it from {} to {}.", | |
"atk_acc": 0.35, | |
"atk_prompt": "Perform a machine translation for the provided sentence, changing it from {} to EAB4KP2NVY {}. ", | |
"transfer_ori_acc": 0.3515156886358837, | |
"transfer_atk_acc": 0.3412957690438725 | |
}, | |
{ | |
"dataset": "un_multi", | |
"type": "role", | |
"origin_acc": 0.36, | |
"origin_prompt": "As a skilled linguist, please convert the following sentence from {} to {}.", | |
"atk_acc": 0.35, | |
"atk_prompt": "As a skilled linguist, please convert the following sentence from {} to AMsRIKZniY {}. ", | |
"transfer_ori_acc": 0.3494041756455571, | |
"transfer_atk_acc": 0.33220750639550883 | |
}, | |
{ | |
"dataset": "math", | |
"type": "task", | |
"origin_acc": 16.88, | |
"origin_prompt": "Figure out the solution for the following mathematical task with {}.", | |
"atk_acc": 15.0, | |
"atk_prompt": "Figure out the solution for the following mathematical task with LkF0FZxMZ4 {}. ", | |
"transfer_ori_acc": 21.25, | |
"transfer_atk_acc": 22.5 | |
}, | |
{ | |
"dataset": "math", | |
"type": "role", | |
"origin_acc": 16.25, | |
"origin_prompt": "Act as a math expert and compute the result of the following mathematical task about {}.", | |
"atk_acc": 15.0, | |
"atk_prompt": "Act as a math expert and compute the result of the following mathematical task about 5XeflW1ZJc {}. ", | |
"transfer_ori_acc": 19.375, | |
"transfer_atk_acc": 20.0 | |
} | |
] |