AtlasUnified commited on
Commit
936e2d0
1 Parent(s): e2106f5

Delete atlas-preprocessed-code.py

Browse files
Files changed (1) hide show
  1. atlas-preprocessed-code.py +0 -125
atlas-preprocessed-code.py DELETED
@@ -1,125 +0,0 @@
1
- import json
2
- import datasets
3
- import traceback
4
- import os
5
-
6
- logger = datasets.logging.get_logger(__name__)
7
-
8
- _DESCRIPTION = """\
9
- AtlasCode is a clean-room, fully open-source implementation of the LLaMa dataset.
10
- """
11
-
12
- # Define a list of programming languages for the subsets
13
- _LANGUAGE_SUBSETS = ["c#", "c++", "c", "go", "html", "haskell",
14
- "java", "javascript", "jupyter", "kotlin", "php", "perl",
15
- "python", "ruby", "rust", "shell", "swift", "typescript", "v"]
16
-
17
- # Generate URLs for the subsets
18
- _URL_LISTS = {language: f"https://huggingface.co/datasets/AtlasUnified/atlas-preprocessed-code/blob/main/{language}.jsonl"
19
- for language in _LANGUAGE_SUBSETS}
20
-
21
- _URL_BASE = 'https://huggingface.co/datasets/AtlasUnified/'
22
-
23
- _DATA_DIR = os.environ.get('ATLAS_CODE_DATA_DIR', None)
24
-
25
- class AtlasCodeConfig(datasets.BuilderConfig):
26
- """BuilderConfig for AtlasCode sample."""
27
-
28
- def __init__(self, *args, subsets, **kwargs):
29
- """BuilderConfig for AtlasCode.
30
- Args:
31
- **kwargs: keyword arguments forwarded to super.
32
- """
33
- super(AtlasCodeConfig, self).__init__(**kwargs)
34
- self.subsets = subsets
35
-
36
- class AtlasCode(datasets.GeneratorBasedBuilder):
37
- """AtlasCode: Reproducing the LLaMA training dataset of over 1.2 trillion tokens. Version 1.0.0."""
38
-
39
- # Generate builder configs for each language subset
40
- BUILDER_CONFIGS = [AtlasCodeConfig(name=language,
41
- subsets=[language],
42
- version=datasets.Version("1.0.0", ""),
43
- description=f"AtlasCode {language} subset")
44
- for language in _LANGUAGE_SUBSETS]
45
-
46
- def _info(self):
47
- return datasets.DatasetInfo(
48
- description=_DESCRIPTION,
49
- features=datasets.Features(
50
- {
51
- "text": datasets.Value("string"),
52
- "meta": datasets.Value("string"),
53
- "atlas_code_subset": datasets.Value("string"),
54
- }
55
- ),
56
- supervised_keys=None,
57
- )
58
-
59
- def _split_generators(self, dl_manager):
60
- url_lists = dl_manager.download_and_extract({
61
- subset: _URL_LISTS[subset] for subset in self.config.subsets
62
- })
63
-
64
- urls = {}
65
-
66
- for subset, url_list in url_lists.items():
67
- with open(url_list, encoding="utf-8") as f:
68
- urls[subset] = [line.strip() for line in f]
69
-
70
- if _DATA_DIR is not None:
71
- print(f'Reading data from {_DATA_DIR}')
72
- url_prefix_slashes = len(_URL_BASE.split('/'))
73
- downloaded_files = {
74
- subset: [
75
- os.path.join(_DATA_DIR, *url.split('/')[url_prefix_slashes:])
76
- for url in url_list
77
- ]
78
- for subset, url_list in urls.items()
79
- }
80
- else:
81
- downloaded_files = dl_manager.download(urls)
82
-
83
- return [
84
- datasets.SplitGenerator(
85
- name=datasets.Split.TRAIN,
86
- gen_kwargs = {
87
- "files": {
88
- subset: downloaded_files[subset]
89
- for subset in self.config.subsets
90
- }
91
- }
92
- )
93
- ]
94
-
95
- def _generate_examples(self, files):
96
- """This function returns the examples in the raw (text) form."""
97
- key = 0
98
- for subset in files:
99
- for path in files[subset]:
100
- with open(path, encoding="utf-8") as f:
101
- for i, row in enumerate(f):
102
- try:
103
- data = json.loads(row)
104
- if "meta" not in data:
105
- text = data["text"]
106
- del data["text"]
107
- yield key, {
108
- "text": text,
109
- "meta": json.dumps(data),
110
- "atlas_code_subset": subset,
111
- }
112
- else:
113
- yield key, {
114
- "text": data["text"],
115
- "meta": data["meta"],
116
- "atlas_code_subset": subset,
117
- }
118
- key += 1
119
- except Exception as e:
120
- print(f'Subset: {subset}')
121
- print(f'Path: {path}')
122
- print(f'Row: {row}')
123
- traceback.print_exc()
124
-
125
- raise e