yilunzhao commited on
Commit
5732810
1 Parent(s): 6fad5a0

Create knowledgemath.py

Browse files
Files changed (1) hide show
  1. knowledgemath.py +87 -0
knowledgemath.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ import os
15
+ import json
16
+
17
+ import datasets
18
+
19
+ _LICENSE = "MIT License"
20
+
21
+ # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
22
+ # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
23
+ _DATA_URL = (
24
+ "https://raw.githubusercontent.com/yilunzhao/RobuT/main/robut_data.zip"
25
+ )
26
+
27
+
28
+ class KnowledgeMath(datasets.GeneratorBasedBuilder):
29
+ BUILDER_CONFIGS = [
30
+ datasets.BuilderConfig(
31
+ name="main",
32
+ )
33
+ ]
34
+
35
+ DEFAULT_CONFIG_NAME = (
36
+ "main" # It's not mandatory to have a default configuration. Just use one if it make sense.
37
+ )
38
+
39
+ def _info(self):
40
+ features = datasets.Features(
41
+ {
42
+ "question_id": datasets.Value("string"),
43
+ "question": datasets.Value("string"),
44
+ "tables": datasets.features.Sequence(datasets.Value("string")),
45
+ "topic": datasets.Value("string"),
46
+ "ground_truth": datasets.Value("float64"),
47
+ "python_solution": datasets.Value("string"),
48
+ }
49
+ )
50
+ return datasets.DatasetInfo(
51
+ features=features,
52
+ homepage=_HOMEPAGE,
53
+ )
54
+
55
+ def _split_generators(self, dl_manager):
56
+ validation_path = datasets.DownloadManager.download("https://huggingface.co/datasets/yale-nlp/KnowledgeMath/raw/main/validation.json")
57
+ test_path = datasets.DownloadManager.download("https://huggingface.co/datasets/yale-nlp/KnowledgeMath/raw/main/test.json")
58
+ return [
59
+ datasets.SplitGenerator(
60
+ name="validation",
61
+ # These kwargs will be passed to _generate_examples
62
+ gen_kwargs={
63
+ "filepath": validation_path
64
+ },
65
+ ),
66
+ datasets.SplitGenerator(
67
+ name="test",
68
+ # These kwargs will be passed to _generate_examples
69
+ gen_kwargs={
70
+ "filepath": test_path
71
+ },
72
+ )
73
+ ]
74
+
75
+ # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
76
+ def _generate_examples(self, filepath):
77
+ # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
78
+ qa_data = json.load(open(filepath))
79
+ for idx, example in enumerate(qa_data):
80
+ yield idx, {
81
+ "question_id": example["question_id"],
82
+ "question": example["question"],
83
+ "tables": example["tables"],
84
+ "topic": example["topic"],
85
+ "ground_truth": example["ground_truth"],
86
+ "python_solution": example["python_solution"],
87
+ }