File size: 1,580 Bytes
6d7d1e3
9b970f9
72b6360
9b970f9
6d7d1e3
aae7167
e2828b6
0fa8793
6d7d1e3
 
c6f1559
6d7d1e3
 
 
32cfbbc
 
 
 
 
 
 
 
26647e7
 
 
 
 
 
 
 
 
32cfbbc
 
 
 
 
aac9ec9
 
 
 
 
 
 
 
 
32cfbbc
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
from datasets.features import Features, Sequence, Value
from evaluate.module import EvaluationModuleInfo
from evaluate.evaluation_suite import SubTask
import evaluate


class Suite(evaluate.EvaluationSuite):

    def _info(self):
        return EvaluationModuleInfo(
            module_type="evaluation_suite",
            description="dummy metric for tests",
            citation="insert citation here",
            features=Features({"predictions": Value("int64"), "references": Value("int64")}))

    def setup(self):
        self.preprocessor = None #lambda x: x["text"].lower()
        self.suite = [
            SubTask(
                data="imdb",
                split="test",
                data_preprocessor=self.preprocessor,
                args_for_task={
                    "metric": "accuracy",
                    "input_column": "text",
                    "label_column": "label",
                    "label_mapping": {
                        "LABEL_0": 0.0,
                        "LABEL_1": 1.0
                    }
                }
            ),
            SubTask(
                data="sst2",
                split="test[:10]",
                data_preprocessor=self.preprocessor,
                # args_for_task={
                #     "metric": "accuracy",
                #     "input_column": "sentence",
                #     "label_column": "label",
                #     "label_mapping": {
                #         "LABEL_0": 0.0,
                #         "LABEL_1": 1.0
                #     }
                # }
            )
        ]