Spaces:
Sleeping
Sleeping
| # parser.py | |
| import ast | |
| def get_category(node, parent=None): | |
| """Determine the category of an AST node or variable context, including variable roles.""" | |
| if isinstance(node, (ast.Import, ast.ImportFrom)): | |
| return 'import' | |
| elif isinstance(node, ast.FunctionDef) or isinstance(node, ast.AsyncFunctionDef): | |
| return 'function' | |
| elif isinstance(node, ast.ClassDef): | |
| return 'class' | |
| elif isinstance(node, ast.If): | |
| return 'if' | |
| elif isinstance(node, ast.While): | |
| return 'while' | |
| elif isinstance(node, ast.For): | |
| return 'for' | |
| elif isinstance(node, ast.Try): | |
| return 'try' | |
| elif isinstance(node, ast.Return): | |
| return 'return' | |
| elif isinstance(node, ast.Expr): | |
| return 'expression' | |
| elif isinstance(node, ast.ExceptHandler): | |
| return 'except' | |
| elif isinstance(node, (ast.Assign, ast.AnnAssign, ast.AugAssign)): | |
| if parent and isinstance(parent, (ast.FunctionDef, ast.AsyncFunctionDef, ast.If, ast.Try, ast.While, ast.For)): | |
| return 'assigned_variable' | |
| elif isinstance(node, ast.arg): # Input variables in function definitions | |
| if parent and isinstance(parent, (ast.FunctionDef, ast.AsyncFunctionDef)): | |
| return 'input_variable' | |
| elif isinstance(node, ast.Name): # Returned variables in return statements | |
| if parent and isinstance(parent, ast.Return): | |
| return 'returned_variable' | |
| else: | |
| return 'other' # Default to 'other' for unrecognized nodes | |
| def create_vector(category, level, location, total_lines, parent_path): | |
| """Create a 6D vector optimized for role similarity, integrating variable roles into category_id.""" | |
| category_map = { | |
| 'import': 1, 'function': 2, 'async_function': 3, 'class': 4, | |
| 'if': 5, 'while': 6, 'for': 7, 'try': 8, 'expression': 9, 'spacer': 10, | |
| 'other': 11, 'elif': 12, 'else': 13, 'except': 14, 'finally': 15, 'return': 16, | |
| 'assigned_variable': 17, 'input_variable': 18, 'returned_variable': 19 | |
| } | |
| category_id = category_map.get(category, 0) # Default to 0 for unknown categories | |
| start_line, end_line = location | |
| span = (end_line - start_line + 1) / total_lines | |
| center_pos = ((start_line + end_line) / 2) / total_lines | |
| parent_depth = len(parent_path) | |
| parent_weight = sum(category_map.get(parent.split('[')[0].lower(), 0) * (1 / (i + 1)) | |
| for i, parent in enumerate(parent_path)) / max(1, len(category_map)) | |
| return [category_id, level, center_pos, span, parent_depth, parent_weight] | |
| def is_blank_or_comment(line): | |
| """Check if a line is blank or a comment.""" | |
| stripped = line.strip() | |
| return not stripped or stripped.startswith('#') | |
| def parse_node(node, lines, prev_end, level=0, total_lines=None, parent_path=None, counters=None, processed_lines=None): | |
| if total_lines is None: | |
| total_lines = len(lines) | |
| if parent_path is None: | |
| parent_path = [] | |
| if counters is None: | |
| counters = {cat: 0 for cat in ['import', 'function', 'async_function', 'class', 'if', 'while', 'for', 'try', 'return', 'expression', 'other', 'spacer', 'elif', 'else', 'except', 'finally', 'assigned_variable', 'input_variable', 'returned_variable']} | |
| if processed_lines is None: | |
| processed_lines = set() | |
| parts = [] | |
| start_line = getattr(node, 'lineno', prev_end + 1) | |
| end_line = getattr(node, 'end_lineno', start_line) | |
| # Skip if any lines are already processed | |
| if any(line in processed_lines for line in range(start_line, end_line + 1)): | |
| return parts, [] | |
| # Get category, default to 'other' if None | |
| category = get_category(node, parent_path[-1] if parent_path else None) or 'other' | |
| if category not in counters: | |
| category = 'other' | |
| counters[category] += 1 | |
| node_id = f"{category.capitalize()}[{counters[category]}]" | |
| # Spacer before node (only for blank lines or comments) | |
| if start_line > prev_end + 1: | |
| spacer_lines = lines[prev_end:start_line - 1] | |
| spacer_lines_set = set(range(prev_end + 1, start_line)) | |
| if not spacer_lines_set.issubset(processed_lines): | |
| for i, line in enumerate(spacer_lines, prev_end + 1): | |
| if i not in processed_lines and is_blank_or_comment(line): | |
| counters['spacer'] += 1 | |
| spacer_node_id = f"Spacer[{counters['spacer']}]" | |
| parts.append({ | |
| 'category': 'spacer', | |
| 'source': line, | |
| 'location': (i, i), | |
| 'level': level, | |
| 'vector': create_vector('spacer', level, (i, i), total_lines, parent_path), | |
| 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
| 'node_id': spacer_node_id | |
| }) | |
| processed_lines.add(i) | |
| # Current node's header (e.g., 'def', 'if', 'try') | |
| current_path = parent_path + [node_id] | |
| if start_line not in processed_lines and not is_blank_or_comment(lines[start_line - 1]): | |
| parts.append({ | |
| 'category': category, | |
| 'source': lines[start_line - 1], | |
| 'location': (start_line, start_line), | |
| 'level': level, | |
| 'vector': create_vector(category, level, (start_line, start_line), total_lines, current_path), | |
| 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
| 'node_id': node_id | |
| }) | |
| processed_lines.add(start_line) | |
| # Handle variables in function definitions (input variables) | |
| category_sequence = [category] | |
| if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)) and node.args.args: | |
| for arg in node.args.args: | |
| var_start = start_line # Assume args are on the same line as function def for simplicity | |
| if var_start not in processed_lines: | |
| arg_category = get_category(arg, node) or 'input_variable' | |
| if arg_category not in counters: | |
| arg_category = 'input_variable' | |
| counters[arg_category] += 1 | |
| var_node_id = f"InputVariable[{counters[arg_category]}]" | |
| parts.append({ | |
| 'category': arg_category, | |
| 'source': f" {arg.arg},", # Indented as part of function | |
| 'location': (var_start, var_start), | |
| 'level': level + 1, | |
| 'vector': create_vector(arg_category, level + 1, (var_start, var_start), total_lines, current_path), | |
| 'parent_path': f"{current_path[0]} -> {var_node_id}", | |
| 'node_id': var_node_id | |
| }) | |
| processed_lines.add(var_start) | |
| category_sequence.append(arg_category) | |
| # Process nested bodies | |
| nested_prev_end = start_line | |
| for attr in ('body', 'orelse', 'handlers', 'finalbody'): | |
| if hasattr(node, attr) and getattr(node, attr): | |
| for child in getattr(node, attr): | |
| child_start = getattr(child, 'lineno', nested_prev_end + 1) | |
| child_end = getattr(child, 'end_lineno', child_start) | |
| if not any(line in processed_lines for line in range(child_start, child_end + 1)): | |
| if attr == 'orelse' and isinstance(node, ast.If) and child_start != start_line: | |
| sub_category = 'elif' if 'elif' in lines[child_start - 1] else 'else' | |
| if child_start not in processed_lines and not is_blank_or_comment(lines[child_start - 1]): | |
| counters[sub_category] += 1 | |
| sub_node_id = f"{sub_category.capitalize()}[{counters[sub_category]}]" | |
| parts.append({ | |
| 'category': sub_category, | |
| 'source': lines[child_start - 1], | |
| 'location': (child_start, child_start), | |
| 'level': level, | |
| 'vector': create_vector(sub_category, level, (child_start, child_start), total_lines, current_path), | |
| 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
| 'node_id': sub_node_id | |
| }) | |
| processed_lines.add(child_start) | |
| category_sequence.append(sub_category) | |
| child_parts, child_seq = parse_node(child, lines, child_start, level + 1, total_lines, current_path, counters, processed_lines) | |
| parts.extend(child_parts) | |
| category_sequence.extend(child_seq) | |
| nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else child_start) | |
| elif attr == 'handlers' and isinstance(child, ast.ExceptHandler): | |
| if child_start not in processed_lines and not is_blank_or_comment(lines[child_start - 1]): | |
| counters['except'] += 1 | |
| sub_node_id = f"Except[{counters['except']}]" | |
| parts.append({ | |
| 'category': 'except', | |
| 'source': lines[child_start - 1], | |
| 'location': (child_start, child_start), | |
| 'level': level, | |
| 'vector': create_vector('except', level, (child_start, child_start), total_lines, current_path), | |
| 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
| 'node_id': sub_node_id | |
| }) | |
| processed_lines.add(child_start) | |
| category_sequence.append('except') | |
| child_parts, child_seq = parse_node(child, lines, child_start, level + 1, total_lines, current_path, counters, processed_lines) | |
| parts.extend(child_parts) | |
| category_sequence.extend(child_seq) | |
| nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else child_start) | |
| elif attr == 'finalbody': | |
| if child_start not in processed_lines and not is_blank_or_comment(lines[child_start - 1]): | |
| counters['finally'] += 1 | |
| sub_node_id = f"Finally[{counters['finally']}]" | |
| parts.append({ | |
| 'category': 'finally', | |
| 'source': lines[child_start - 1], | |
| 'location': (child_start, child_start), | |
| 'level': level, | |
| 'vector': create_vector('finally', level, (child_start, child_start), total_lines, current_path), | |
| 'parent_path': ' -> '.join(parent_path) if parent_path else 'Top-Level', | |
| 'node_id': sub_node_id | |
| }) | |
| processed_lines.add(child_start) | |
| category_sequence.append('finally') | |
| child_parts, child_seq = parse_node(child, lines, child_start, level + 1, total_lines, current_path, counters, processed_lines) | |
| parts.extend(child_parts) | |
| category_sequence.extend(child_seq) | |
| nested_prev_end = max(nested_prev_end, child_parts[-1]['location'][1] if child_parts else child_start) | |
| else: | |
| # Handle assignments and returns for variable detection | |
| if isinstance(child, (ast.Assign, ast.AnnAssign, ast.AugAssign)): | |
| # Handle different target structures | |
| if isinstance(child, ast.Assign): | |
| for target in child.targets: | |
| if isinstance(target, ast.Name): | |
| var_start = child.lineno | |
| if var_start not in processed_lines and not is_blank_or_comment(lines[var_start - 1]): | |
| counters['assigned_variable'] += 1 | |
| var_node_id = f"AssignedVariable[{counters['assigned_variable']}]" | |
| parts.append({ | |
| 'category': 'assigned_variable', | |
| 'source': lines[var_start - 1], | |
| 'location': (var_start, var_start), | |
| 'level': level + 1, | |
| 'vector': create_vector('assigned_variable', level + 1, (var_start, var_start), total_lines, current_path), | |
| 'parent_path': f"{current_path[0]} -> {var_node_id}", | |
| 'node_id': var_node_id | |
| }) | |
| processed_lines.add(var_start) | |
| category_sequence.append('assigned_variable') | |
| else: # AnnAssign or AugAssign | |
| target = child.target | |
| if isinstance(target, ast.Name): | |
| var_start = child.lineno | |
| if var_start not in processed_lines and not is_blank_or_comment(lines[var_start - 1]): | |
| counters['assigned_variable'] += 1 | |
| var_node_id = f"AssignedVariable[{counters['assigned_variable']}]" | |
| parts.append({ | |
| 'category': 'assigned_variable', | |
| 'source': lines[var_start - 1], | |
| 'location': (var_start, var_start), | |
| 'level': level + 1, | |
| 'vector': create_vector('assigned_variable', level + 1, (var_start, var_start), total_lines, current_path), | |
| 'parent_path': f"{current_path[0]} -> {var_node_id}", | |
| 'node_id': var_node_id | |
| }) | |
| processed_lines.add(var_start) | |
| category_sequence.append('assigned_variable') | |
| elif isinstance(child, ast.Return): | |
| for value in ast.walk(child): | |
| if isinstance(value, ast.Name): | |
| var_start = child.lineno | |
| if var_start not in processed_lines and not is_blank_or_comment(lines[var_start - 1]): | |
| counters['returned_variable'] += 1 | |
| var_node_id = f"ReturnedVariable[{counters['returned_variable']}]" | |
| parts.append({ | |
| 'category': 'returned_variable', | |
| 'source': lines[var_start - 1], | |
| 'location': (var_start, var_start), | |
| 'level': level + 1, | |
| 'vector': create_vector('returned_variable', level + 1, (var_start, var_start), total_lines, current_path), | |
| 'parent_path': f"{current_path[0]} -> {var_node_id}", | |
| 'node_id': var_node_id | |
| }) | |
| processed_lines.add(var_start) | |
| category_sequence.append('returned_variable') | |
| child_parts, child_seq = parse_node(child, lines, nested_prev_end, level + 1, total_lines, current_path, counters, processed_lines) | |
| parts.extend(child_parts) | |
| category_sequence.extend(child_seq) | |
| nested_prev_end = child_parts[-1]['location'][1] if child_parts else nested_prev_end | |
| # Update end_line and source of the parent node if its body extends it | |
| if nested_prev_end > start_line and start_line not in processed_lines: | |
| final_end = nested_prev_end | |
| if start_line not in processed_lines: | |
| parts[-1]['location'] = (start_line, final_end) | |
| parts[-1]['source'] = ''.join(lines[start_line - 1:final_end]) | |
| parts[-1]['vector'] = create_vector(category, level, (start_line, final_end), total_lines, current_path) | |
| processed_lines.update(range(start_line, final_end + 1)) | |
| return parts, category_sequence | |
| def parse_python_code(code): | |
| lines = code.splitlines(keepends=True) | |
| total_lines = len(lines) | |
| try: | |
| tree = ast.parse(code) | |
| except SyntaxError: | |
| return ([{'category': 'error', 'source': 'Invalid Python code', 'location': (1, 1), 'level': 0, 'vector': [0, 0, 1.0, 0.0, 0, 0], 'parent_path': 'Top-Level', 'node_id': 'Error[1]'}], ['error']) | |
| parts = [] | |
| prev_end = 0 | |
| processed_lines = set() | |
| category_sequence = [] | |
| for stmt in tree.body: | |
| stmt_parts, stmt_seq = parse_node(stmt, lines, prev_end, total_lines=total_lines, processed_lines=processed_lines) | |
| parts.extend(stmt_parts) | |
| category_sequence.extend(stmt_seq) | |
| prev_end = stmt_parts[-1]['location'][1] if stmt_parts else prev_end | |
| if prev_end < total_lines: | |
| remaining_lines = lines[prev_end:] | |
| remaining_lines_set = set(range(prev_end + 1, total_lines + 1)) | |
| if not remaining_lines_set.issubset(processed_lines): | |
| for i, line in enumerate(remaining_lines, prev_end + 1): | |
| if i not in processed_lines: | |
| if is_blank_or_comment(line): | |
| counters = {'spacer': 0} | |
| counters['spacer'] += 1 | |
| spacer_node_id = f"Spacer[{counters['spacer']}]" | |
| parts.append({ | |
| 'category': 'spacer', | |
| 'source': line, | |
| 'location': (i, i), | |
| 'level': 0, | |
| 'vector': create_vector('spacer', 0, (i, i), total_lines, []), | |
| 'parent_path': 'Top-Level', | |
| 'node_id': spacer_node_id | |
| }) | |
| processed_lines.add(i) | |
| category_sequence.append('spacer') | |
| return parts, category_sequence | |
| def is_blank_or_comment(line): | |
| """Check if a line is blank or a comment.""" | |
| stripped = line.strip() | |
| return not stripped or stripped.startswith('#') |