albertvillanova HF staff commited on
Commit
69bd48c
1 Parent(s): 33216bd

Convert dataset to Parquet (#3)

Browse files

- Convert dataset to Parquet (a618859d5c7a7aed0c495d4b9578fd64b77475b4)
- Delete loading script (e5bc9f04f53ed043139d789b7a7490a503bb7763)
- Delete loading script auxiliary file (99c7ac386ba24b7411832d80e9eb9c5897ae9a9a)
- Delete loading script auxiliary file (6f91e2cd21a741b56f5f80fed63079c98c27f1f5)

README.md CHANGED
@@ -32,16 +32,25 @@ dataset_info:
32
  dtype: string
33
  splits:
34
  - name: train
35
- num_bytes: 45723487
36
  num_examples: 21854
37
  - name: validation
38
- num_bytes: 5582545
39
  num_examples: 2732
40
  - name: test
41
- num_bytes: 5646752
42
  num_examples: 2732
43
- download_size: 61685715
44
- dataset_size: 56952784
 
 
 
 
 
 
 
 
 
45
  ---
46
  # Dataset Card for "code_x_glue_cc_defect_detection"
47
 
32
  dtype: string
33
  splits:
34
  - name: train
35
+ num_bytes: 45723451
36
  num_examples: 21854
37
  - name: validation
38
+ num_bytes: 5582533
39
  num_examples: 2732
40
  - name: test
41
+ num_bytes: 5646740
42
  num_examples: 2732
43
+ download_size: 22289955
44
+ dataset_size: 56952724
45
+ configs:
46
+ - config_name: default
47
+ data_files:
48
+ - split: train
49
+ path: data/train-*
50
+ - split: validation
51
+ path: data/validation-*
52
+ - split: test
53
+ path: data/test-*
54
  ---
55
  # Dataset Card for "code_x_glue_cc_defect_detection"
56
 
code_x_glue_cc_defect_detection.py DELETED
@@ -1,78 +0,0 @@
1
- from typing import List
2
-
3
- import datasets
4
-
5
- from .common import TrainValidTestChild
6
- from .generated_definitions import DEFINITIONS
7
-
8
-
9
- _DESCRIPTION = """Given a source code, the task is to identify whether it is an insecure code that may attack software systems, such as resource leaks, use-after-free vulnerabilities and DoS attack. We treat the task as binary classification (0/1), where 1 stands for insecure code and 0 for secure code.
10
- The dataset we use comes from the paper Devign: Effective Vulnerability Identification by Learning Comprehensive Program Semantics via Graph Neural Networks. We combine all projects and split 80%/10%/10% for training/dev/test."""
11
- _CITATION = """@inproceedings{zhou2019devign,
12
- title={Devign: Effective vulnerability identification by learning comprehensive program semantics via graph neural networks},
13
- author={Zhou, Yaqin and Liu, Shangqing and Siow, Jingkai and Du, Xiaoning and Liu, Yang},
14
- booktitle={Advances in Neural Information Processing Systems},
15
- pages={10197--10207}, year={2019}"""
16
-
17
-
18
- class CodeXGlueCcDefectDetectionImpl(TrainValidTestChild):
19
- _DESCRIPTION = _DESCRIPTION
20
- _CITATION = _CITATION
21
-
22
- _FEATURES = {
23
- "id": datasets.Value("int32"), # Index of the sample
24
- "func": datasets.Value("string"), # The source code
25
- "target": datasets.Value("bool"), # 0 or 1 (vulnerability or not)
26
- "project": datasets.Value("string"), # Original project that contains this code
27
- "commit_id": datasets.Value("string"), # Commit identifier in the original project
28
- }
29
- _SUPERVISED_KEYS = ["target"]
30
-
31
- def generate_urls(self, split_name):
32
- yield "index", f"{split_name}.txt"
33
- yield "data", "function.json"
34
-
35
- def _generate_examples(self, split_name, file_paths):
36
- import json
37
-
38
- js_all = json.load(open(file_paths["data"], encoding="utf-8"))
39
-
40
- index = set()
41
- with open(file_paths["index"], encoding="utf-8") as f:
42
- for line in f:
43
- line = line.strip()
44
- index.add(int(line))
45
-
46
- for idx, js in enumerate(js_all):
47
- if idx in index:
48
- js["id"] = idx
49
- js["target"] = int(js["target"]) == 1
50
- yield idx, js
51
-
52
-
53
- CLASS_MAPPING = {
54
- "CodeXGlueCcDefectDetection": CodeXGlueCcDefectDetectionImpl,
55
- }
56
-
57
-
58
- class CodeXGlueCcDefectDetection(datasets.GeneratorBasedBuilder):
59
- BUILDER_CONFIG_CLASS = datasets.BuilderConfig
60
- BUILDER_CONFIGS = [
61
- datasets.BuilderConfig(name=name, description=info["description"]) for name, info in DEFINITIONS.items()
62
- ]
63
-
64
- def _info(self):
65
- name = self.config.name
66
- info = DEFINITIONS[name]
67
- if info["class_name"] in CLASS_MAPPING:
68
- self.child = CLASS_MAPPING[info["class_name"]](info)
69
- else:
70
- raise RuntimeError(f"Unknown python class for dataset configuration {name}")
71
- ret = self.child._info()
72
- return ret
73
-
74
- def _split_generators(self, dl_manager: datasets.DownloadManager) -> List[datasets.SplitGenerator]:
75
- return self.child._split_generators(dl_manager=dl_manager)
76
-
77
- def _generate_examples(self, split_name, file_paths):
78
- return self.child._generate_examples(split_name, file_paths)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
common.py DELETED
@@ -1,75 +0,0 @@
1
- from typing import List
2
-
3
- import datasets
4
-
5
-
6
- # Citation, taken from https://github.com/microsoft/CodeXGLUE
7
- _DEFAULT_CITATION = """@article{CodeXGLUE,
8
- title={CodeXGLUE: A Benchmark Dataset and Open Challenge for Code Intelligence},
9
- year={2020},}"""
10
-
11
-
12
- class Child:
13
- _DESCRIPTION = None
14
- _FEATURES = None
15
- _CITATION = None
16
- SPLITS = {"train": datasets.Split.TRAIN}
17
- _SUPERVISED_KEYS = None
18
-
19
- def __init__(self, info):
20
- self.info = info
21
-
22
- def homepage(self):
23
- return self.info["project_url"]
24
-
25
- def _info(self):
26
- # This is the description that will appear on the datasets page.
27
- return datasets.DatasetInfo(
28
- description=self.info["description"] + "\n\n" + self._DESCRIPTION,
29
- features=datasets.Features(self._FEATURES),
30
- homepage=self.homepage(),
31
- citation=self._CITATION or _DEFAULT_CITATION,
32
- supervised_keys=self._SUPERVISED_KEYS,
33
- )
34
-
35
- def _split_generators(self, dl_manager: datasets.DownloadManager) -> List[datasets.SplitGenerator]:
36
- SPLITS = self.SPLITS
37
- _URL = self.info["raw_url"]
38
- urls_to_download = {}
39
- for split in SPLITS:
40
- if split not in urls_to_download:
41
- urls_to_download[split] = {}
42
-
43
- for key, url in self.generate_urls(split):
44
- if not url.startswith("http"):
45
- url = _URL + "/" + url
46
- urls_to_download[split][key] = url
47
-
48
- downloaded_files = {}
49
- for k, v in urls_to_download.items():
50
- downloaded_files[k] = dl_manager.download_and_extract(v)
51
-
52
- return [
53
- datasets.SplitGenerator(
54
- name=SPLITS[k],
55
- gen_kwargs={"split_name": k, "file_paths": downloaded_files[k]},
56
- )
57
- for k in SPLITS
58
- ]
59
-
60
- def check_empty(self, entries):
61
- all_empty = all([v == "" for v in entries.values()])
62
- all_non_empty = all([v != "" for v in entries.values()])
63
-
64
- if not all_non_empty and not all_empty:
65
- raise RuntimeError("Parallel data files should have the same number of lines.")
66
-
67
- return all_empty
68
-
69
-
70
- class TrainValidTestChild(Child):
71
- SPLITS = {
72
- "train": datasets.Split.TRAIN,
73
- "valid": datasets.Split.VALIDATION,
74
- "test": datasets.Split.TEST,
75
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
data/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:259c944eb4f8689e83895de476a00fe4cadf4839636769e725d3744fc36cfcd5
3
+ size 2227970
data/train-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e319c83e2e816a10aeeebe78668aa95b757b04e555700bf5f826766b0e80bb06
3
+ size 17847670
data/validation-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a17a76ed040d7f8657d1ff741f967b44704c008101ac958e2990ad468203cfa4
3
+ size 2214315
generated_definitions.py DELETED
@@ -1,12 +0,0 @@
1
- DEFINITIONS = {
2
- "default": {
3
- "class_name": "CodeXGlueCcDefectDetection",
4
- "dataset_type": "Code-Code",
5
- "description": "CodeXGLUE Defect-detection dataset, available at https://github.com/microsoft/CodeXGLUE/tree/main/Code-Code/Defect-detection",
6
- "dir_name": "Defect-detection",
7
- "name": "default",
8
- "project_url": "https://github.com/madlag/CodeXGLUE/tree/main/Code-Code/Defect-detection",
9
- "raw_url": "https://raw.githubusercontent.com/madlag/CodeXGLUE/main/Code-Code/Defect-detection/dataset",
10
- "sizes": {"test": 2732, "train": 21854, "validation": 2732},
11
- }
12
- }