Datasets:

Languages:
code
Size Categories:
10K<n<100K
Language Creators:
found
Annotations Creators:
found
Source Datasets:
original
Tags:
License:
albertvillanova HF staff commited on
Commit
e5bc9f0
1 Parent(s): a618859

Delete loading script

Browse files
Files changed (1) hide show
  1. code_x_glue_cc_defect_detection.py +0 -78
code_x_glue_cc_defect_detection.py DELETED
@@ -1,78 +0,0 @@
1
- from typing import List
2
-
3
- import datasets
4
-
5
- from .common import TrainValidTestChild
6
- from .generated_definitions import DEFINITIONS
7
-
8
-
9
- _DESCRIPTION = """Given a source code, the task is to identify whether it is an insecure code that may attack software systems, such as resource leaks, use-after-free vulnerabilities and DoS attack. We treat the task as binary classification (0/1), where 1 stands for insecure code and 0 for secure code.
10
- The dataset we use comes from the paper Devign: Effective Vulnerability Identification by Learning Comprehensive Program Semantics via Graph Neural Networks. We combine all projects and split 80%/10%/10% for training/dev/test."""
11
- _CITATION = """@inproceedings{zhou2019devign,
12
- title={Devign: Effective vulnerability identification by learning comprehensive program semantics via graph neural networks},
13
- author={Zhou, Yaqin and Liu, Shangqing and Siow, Jingkai and Du, Xiaoning and Liu, Yang},
14
- booktitle={Advances in Neural Information Processing Systems},
15
- pages={10197--10207}, year={2019}"""
16
-
17
-
18
- class CodeXGlueCcDefectDetectionImpl(TrainValidTestChild):
19
- _DESCRIPTION = _DESCRIPTION
20
- _CITATION = _CITATION
21
-
22
- _FEATURES = {
23
- "id": datasets.Value("int32"), # Index of the sample
24
- "func": datasets.Value("string"), # The source code
25
- "target": datasets.Value("bool"), # 0 or 1 (vulnerability or not)
26
- "project": datasets.Value("string"), # Original project that contains this code
27
- "commit_id": datasets.Value("string"), # Commit identifier in the original project
28
- }
29
- _SUPERVISED_KEYS = ["target"]
30
-
31
- def generate_urls(self, split_name):
32
- yield "index", f"{split_name}.txt"
33
- yield "data", "function.json"
34
-
35
- def _generate_examples(self, split_name, file_paths):
36
- import json
37
-
38
- js_all = json.load(open(file_paths["data"], encoding="utf-8"))
39
-
40
- index = set()
41
- with open(file_paths["index"], encoding="utf-8") as f:
42
- for line in f:
43
- line = line.strip()
44
- index.add(int(line))
45
-
46
- for idx, js in enumerate(js_all):
47
- if idx in index:
48
- js["id"] = idx
49
- js["target"] = int(js["target"]) == 1
50
- yield idx, js
51
-
52
-
53
- CLASS_MAPPING = {
54
- "CodeXGlueCcDefectDetection": CodeXGlueCcDefectDetectionImpl,
55
- }
56
-
57
-
58
- class CodeXGlueCcDefectDetection(datasets.GeneratorBasedBuilder):
59
- BUILDER_CONFIG_CLASS = datasets.BuilderConfig
60
- BUILDER_CONFIGS = [
61
- datasets.BuilderConfig(name=name, description=info["description"]) for name, info in DEFINITIONS.items()
62
- ]
63
-
64
- def _info(self):
65
- name = self.config.name
66
- info = DEFINITIONS[name]
67
- if info["class_name"] in CLASS_MAPPING:
68
- self.child = CLASS_MAPPING[info["class_name"]](info)
69
- else:
70
- raise RuntimeError(f"Unknown python class for dataset configuration {name}")
71
- ret = self.child._info()
72
- return ret
73
-
74
- def _split_generators(self, dl_manager: datasets.DownloadManager) -> List[datasets.SplitGenerator]:
75
- return self.child._split_generators(dl_manager=dl_manager)
76
-
77
- def _generate_examples(self, split_name, file_paths):
78
- return self.child._generate_examples(split_name, file_paths)