andstor commited on
Commit
8c469fe
·
verified ·
1 Parent(s): 450cff2

Upload process.py

Browse files
Files changed (1) hide show
  1. process.py +219 -0
process.py ADDED
@@ -0,0 +1,219 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """Build source/target training pairs from andstor/defects4j_fixed."""
3
+
4
+ import argparse
5
+ import json
6
+ from collections import defaultdict
7
+ from pathlib import Path
8
+ from typing import Any, Dict, Iterable, List, Tuple
9
+
10
+ from datasets import Dataset, load_dataset
11
+
12
+
13
+ Record = Dict[str, Any]
14
+
15
+
16
+ def normalize_line_endings(text: str) -> str:
17
+ return text.replace("\r\n", "\n").replace("\r", "\n")
18
+
19
+
20
+ def collapse_whitespace(text: str) -> str:
21
+ return " ".join((text or "").split())
22
+
23
+
24
+ def class_declaration(class_info: Dict[str, Any]) -> str:
25
+ class_name = class_info.get("identifier", "UnknownClass")
26
+ superclass = (class_info.get("superclass") or "").strip()
27
+ interfaces = collapse_whitespace(class_info.get("interfaces", ""))
28
+
29
+ declaration = f"public class {class_name}"
30
+ if superclass:
31
+ declaration += f" extends {superclass}"
32
+ if interfaces:
33
+ declaration += f" implements {interfaces}"
34
+
35
+ return declaration + " {"
36
+
37
+
38
+ def field_lines(class_info: Dict[str, Any]) -> List[str]:
39
+ fields = class_info.get("fields") or []
40
+ lines: List[str] = []
41
+ for field in fields:
42
+ original = (field.get("original_string") or "").strip()
43
+ if original:
44
+ lines.append(original)
45
+ return lines
46
+
47
+
48
+ def method_signature(method_info: Dict[str, Any]) -> str:
49
+ signature = (method_info.get("full_signature") or "").strip()
50
+ if signature:
51
+ return collapse_whitespace(signature)
52
+
53
+ modifiers = collapse_whitespace(method_info.get("modifiers", ""))
54
+ return_type = collapse_whitespace(method_info.get("return", ""))
55
+ identifier = method_info.get("identifier", "unknownMethod")
56
+ parameters = collapse_whitespace(method_info.get("parameters", "()"))
57
+
58
+ pieces = [part for part in [modifiers, return_type, f"{identifier}{parameters}"] if part]
59
+ return " ".join(pieces)
60
+
61
+
62
+ def class_key(record: Record) -> Tuple[str, str, str]:
63
+ class_info = record.get("class", {})
64
+ return (
65
+ str(record.get("project_id", "")),
66
+ str(record.get("bug_id", "")),
67
+ str(class_info.get("file", class_info.get("identifier", ""))),
68
+ )
69
+
70
+
71
+ def method_key(method_info: Dict[str, Any]) -> str:
72
+ return method_signature(method_info)
73
+
74
+
75
+ def strip_first_signature_line(method_body: str) -> str:
76
+ body = normalize_line_endings(method_body)
77
+ brace_index = body.find("{")
78
+ if brace_index == -1:
79
+ return body.strip("\n")
80
+
81
+ return body[brace_index + 1 :].lstrip("\n").rstrip("\n")
82
+
83
+
84
+ def indent_lines(lines: Iterable[str], spaces: int = 4) -> List[str]:
85
+ prefix = " " * spaces
86
+ return [f"{prefix}{line}" if line else "" for line in lines]
87
+
88
+
89
+ def with_final_class_brace(target: str) -> str:
90
+ cleaned = target.rstrip("\n")
91
+ if not cleaned:
92
+ return "}"
93
+
94
+ return f"{cleaned}\n}}"
95
+
96
+
97
+ def build_source(
98
+ class_info: Dict[str, Any],
99
+ constructors: Iterable[str],
100
+ other_methods: Iterable[str],
101
+ focal_signature: str,
102
+ ) -> str:
103
+ parts: List[str] = [class_declaration(class_info)]
104
+
105
+ fields = field_lines(class_info)
106
+ if fields:
107
+ parts.append("\n".join(indent_lines(fields)))
108
+
109
+ ctor_lines = [f"{sig};" for sig in constructors if sig]
110
+ if ctor_lines:
111
+ parts.append("\n".join(indent_lines(ctor_lines)))
112
+
113
+ method_lines = [f"{sig};" for sig in other_methods if sig]
114
+ if method_lines:
115
+ parts.append("\n".join(indent_lines(method_lines)))
116
+
117
+ parts.append(f" {focal_signature} {{")
118
+ return "\n\n".join(parts)
119
+
120
+
121
+ def build_examples(records: List[Record]) -> List[Dict[str, Any]]:
122
+ grouped: Dict[Tuple[str, str, str], List[Record]] = defaultdict(list)
123
+ for record in records:
124
+ grouped[class_key(record)].append(record)
125
+
126
+ examples: List[Dict[str, Any]] = []
127
+
128
+ for record in records:
129
+ method_info = record.get("method", {})
130
+ class_info = record.get("class", {})
131
+ body = method_info.get("body", "")
132
+ focal_signature = method_signature(method_info)
133
+
134
+ cls_records = grouped[class_key(record)]
135
+
136
+ constructors = sorted(
137
+ {
138
+ method_signature(r.get("method", {}))
139
+ for r in cls_records
140
+ if r.get("method", {}).get("constructor")
141
+ }
142
+ )
143
+
144
+ focal_sig_key = method_key(method_info)
145
+ other_method_sigs = sorted(
146
+ {
147
+ method_signature(r.get("method", {}))
148
+ for r in cls_records
149
+ if not r.get("method", {}).get("constructor")
150
+ and method_key(r.get("method", {})) != focal_sig_key
151
+ }
152
+ )
153
+
154
+ source = build_source(class_info, constructors, other_method_sigs, focal_signature)
155
+ target = with_final_class_brace(strip_first_signature_line(body))
156
+
157
+ examples.append(
158
+ {
159
+ "id": record.get("id"),
160
+ "project_id": record.get("project_id"),
161
+ "bug_id": record.get("bug_id"),
162
+ "class_file": class_info.get("file"),
163
+ "method": method_info.get("identifier"),
164
+ "source": source,
165
+ "target": target,
166
+ }
167
+ )
168
+
169
+ return examples
170
+
171
+
172
+ def main() -> None:
173
+ parser = argparse.ArgumentParser(description=__doc__)
174
+ parser.add_argument(
175
+ "--split",
176
+ default="train",
177
+ help="Dataset split to load (default: train).",
178
+ )
179
+ parser.add_argument(
180
+ "--hub-repo",
181
+ default="defects4j_fixed_runnable",
182
+ help="Hugging Face dataset repo to upload to.",
183
+ )
184
+ parser.add_argument(
185
+ "--output",
186
+ default=None,
187
+ help="Optional output JSONL file path.",
188
+ )
189
+ parser.add_argument(
190
+ "--max-records",
191
+ type=int,
192
+ default=None,
193
+ help="Optional limit for quick smoke tests.",
194
+ )
195
+
196
+ args = parser.parse_args()
197
+
198
+ ds = load_dataset("andstor/defects4j_fixed", split=args.split)
199
+ records: List[Record] = list(ds)
200
+
201
+ if args.max_records is not None:
202
+ records = records[: args.max_records]
203
+
204
+ examples = build_examples(records)
205
+
206
+ hf_dataset = Dataset.from_list(examples)
207
+ hf_dataset.push_to_hub(args.hub_repo)
208
+ print(f"Uploaded {len(examples)} examples to {args.hub_repo}")
209
+
210
+ if args.output:
211
+ output_path = Path(args.output)
212
+ with output_path.open("w", encoding="utf-8") as f:
213
+ for example in examples:
214
+ f.write(json.dumps(example, ensure_ascii=False) + "\n")
215
+ print(f"Wrote {len(examples)} examples to {output_path}")
216
+
217
+
218
+ if __name__ == "__main__":
219
+ main()