"""
Function Classifier Module.

Intelligent classification system that distinguishes between local and library functions,
identifies entry points, and categorizes functions by purpose and scope across multiple
programming languages.
"""

import ast
import re
import json
from typing import Dict, List, Any, Optional, Set, Tuple, Union
from pathlib import Path
from dataclasses import dataclass, field
from collections import defaultdict
import logging

from .error_handler import AnalysisError, log_info, log_debug
from .project_scanner import ProjectStructure, ProjectFile
from .entry_discovery import EntryPoint, EntryPointDiscovery


@dataclass
class ClassificationConfig:
    """Configuration for function classification."""
    analysis_depth: int = 2
    include_library_functions: bool = False
    analyze_dependencies: bool = True
    analyze_complexity: bool = True
    min_confidence: float = 0.6
    find_orphaned_functions: bool = False
    detect_entry_candidates: bool = True
    classify_test_functions: bool = True
    classify_utility_functions: bool = True


@dataclass
class FunctionClassification:
    """Classification result for a single function."""
    function_name: str
    file_path: str
    line_number: int
    language: str
    scope: str  # 'local', 'library', 'unknown'
    function_type: str  # 'entry_point', 'utility', 'test', 'callback', etc.
    confidence: float
    complexity: Dict[str, Any] = field(default_factory=dict)
    calls_made: List[str] = field(default_factory=list)
    called_by: List[str] = field(default_factory=list)
    is_orphaned: bool = False
    metadata: Dict[str, Any] = field(default_factory=dict)


@dataclass
class FunctionInfo:
    """Detailed information about a function."""
    name: str
    file_path: str
    line_number: int
    language: str
    signature: str
    scope: str  # 'local', 'external', 'builtin', 'unknown'
    classification: str  # 'entry_point', 'utility', 'test', 'callback', 'handler', 'api'
    purpose: str  # 'main', 'cli', 'service', 'helper', 'test', 'config', 'data_processing'
    complexity_score: float  # 0.0 to 1.0
    call_count: int  # How many times it's called within the project
    calls_made: List[str]  # Functions this function calls
    parameters: List[str]
    return_type: Optional[str] = None
    docstring: Optional[str] = None
    decorators: List[str] = field(default_factory=list)
    is_async: bool = False
    is_static: bool = False
    is_private: bool = False
    module_path: Optional[str] = None


@dataclass
class ClassificationResult:
    """Results of function classification for a project."""
    project_path: str
    total_functions: int
    local_functions: int
    external_functions: int
    entry_points: List[EntryPoint]
    function_details: List[FunctionInfo]
    classification_summary: Dict[str, int]
    purpose_summary: Dict[str, int]
    language_breakdown: Dict[str, Dict[str, int]]


class FunctionClassifier:
    """Advanced function classifier for multi-language projects."""
    
    def __init__(self, project_scanner_result: Optional[ProjectStructure] = None):
        """Initialize the function classifier."""
        self.project_structure = project_scanner_result
        self.entry_discovery = EntryPointDiscovery()
        
        # Known standard library patterns by language
        self.builtin_patterns = {
            'python': {
                'modules': {
                    'os', 'sys', 'json', 'datetime', 'collections', 'itertools',
                    'functools', 'operator', 'math', 'random', 're', 'string',
                    'pathlib', 'urllib', 'http', 'socket', 'threading', 'asyncio',
                    'logging', 'typing', 'dataclasses', 'enum', 'abc', 'io',
                    'time', 'calendar', 'hashlib', 'base64', 'pickle', 'csv',
                    'sqlite3', 'unittest', 'pytest'
                },
                'functions': {
                    'print', 'len', 'range', 'enumerate', 'zip', 'map', 'filter',
                    'sum', 'max', 'min', 'sorted', 'reversed', 'all', 'any',
                    'abs', 'round', 'pow', 'divmod', 'isinstance', 'issubclass',
                    'hasattr', 'getattr', 'setattr', 'delattr', 'vars', 'dir',
                    'id', 'hash', 'repr', 'str', 'int', 'float', 'bool', 'list',
                    'dict', 'set', 'tuple', 'frozenset', 'bytearray', 'bytes',
                    'open', 'input', 'exec', 'eval', 'compile', '__import__'
                }
            },
            'cpp': {
                'headers': {
                    'iostream', 'vector', 'string', 'algorithm', 'memory',
                    'functional', 'utility', 'iterator', 'numeric', 'limits',
                    'cstdlib', 'cstdio', 'cstring', 'cmath', 'ctime', 'cassert',
                    'fstream', 'sstream', 'iomanip', 'regex', 'thread', 'mutex',
                    'chrono', 'random', 'exception', 'stdexcept', 'typeinfo',
                    'map', 'set', 'unordered_map', 'unordered_set', 'array',
                    'deque', 'list', 'forward_list', 'stack', 'queue'
                },
                'functions': {
                    'printf', 'scanf', 'malloc', 'free', 'strlen', 'strcpy',
                    'strcmp', 'strcat', 'memcpy', 'memset', 'exit', 'abort',
                    'atoi', 'atof', 'rand', 'srand', 'time'
                }
            },
            'typescript': {
                'globals': {
                    'console', 'window', 'document', 'process', 'global',
                    'Buffer', 'setTimeout', 'setInterval', 'clearTimeout',
                    'clearInterval', 'require', 'module', 'exports', '__dirname',
                    '__filename', 'JSON', 'Math', 'Date', 'RegExp', 'Array',
                    'Object', 'String', 'Number', 'Boolean', 'Promise'
                },
                'node_modules': {
                    'express', 'lodash', 'react', 'angular', 'vue', 'axios',
                    'moment', 'chalk', 'commander', 'inquirer', 'jest', 'mocha'
                }
            },
            'fortran': {
                'intrinsics': {
                    'write', 'read', 'open', 'close', 'allocate', 'deallocate',
                    'size', 'shape', 'ubound', 'lbound', 'present', 'associated',
                    'sin', 'cos', 'tan', 'exp', 'log', 'sqrt', 'abs', 'min', 'max',
                    'sum', 'product', 'count', 'any', 'all', 'minval', 'maxval'
                }
            }
        }
        
        # Classification patterns
        self.classification_patterns = {
            'entry_point': [
                r'main', r'__main__', r'cli', r'run', r'start', r'execute',
                r'app', r'server', r'daemon', r'service'
            ],
            'utility': [
                r'util', r'helper', r'tool', r'common', r'shared', r'base'
            ],
            'test': [
                r'test_', r'_test', r'spec_', r'_spec', r'mock', r'stub',
                r'fixture', r'setup', r'teardown'
            ],
            'callback': [
                r'callback', r'handler', r'listener', r'on_', r'_callback',
                r'event_', r'_event'
            ],
            'api': [
                r'api_', r'endpoint', r'route', r'controller', r'view',
                r'resource', r'service'
            ]
        }
        
        log_debug("Initialized FunctionClassifier")
    
    def classify_project_functions(self, project_path: Union[str, Path],
                                 project_structure: Optional[ProjectStructure] = None) -> ClassificationResult:
        """
        Classify all functions in a project.
        
        Args:
            project_path: Path to project root
            project_structure: Optional pre-scanned project structure
            
        Returns:
            Complete classification results
        """
        project_path = Path(project_path)
        
        if project_structure:
            self.project_structure = project_structure
        elif not self.project_structure:
            from .project_scanner import ProjectScanner
            scanner = ProjectScanner()
            self.project_structure = scanner.scan_project(project_path)
        
        log_info(f"Classifying functions in project: {project_path}")
        
        # Discover entry points
        entry_points = self.entry_discovery.discover_entry_points(project_path)
        
        # Analyze all functions
        all_functions = []
        for file in self.project_structure.files:
            if file.file_type == 'source' and file.language in {'python', 'cpp', 'typescript', 'fortran'}:
                functions = self._analyze_file_functions(file)
                all_functions.extend(functions)
        
        # Build call graph for better classification
        call_graph = self._build_call_graph(all_functions)
        
        # Enhance function info with call graph data
        for func in all_functions:
            func.call_count = call_graph.get(func.name, {}).get('called_by_count', 0)
            func.calls_made = call_graph.get(func.name, {}).get('calls', [])
        
        # Classify functions by scope (local vs external)
        self._classify_function_scope(all_functions)
        
        # Classify by purpose and type
        self._classify_function_purpose(all_functions, entry_points)
        
        # Generate summaries
        classification_summary = self._generate_classification_summary(all_functions)
        purpose_summary = self._generate_purpose_summary(all_functions)
        language_breakdown = self._generate_language_breakdown(all_functions)
        
        # Count statistics
        total_functions = len(all_functions)
        local_functions = len([f for f in all_functions if f.scope == 'local'])
        external_functions = len([f for f in all_functions if f.scope == 'external'])
        
        result = ClassificationResult(
            project_path=str(project_path),
            total_functions=total_functions,
            local_functions=local_functions,
            external_functions=external_functions,
            entry_points=entry_points,
            function_details=all_functions,
            classification_summary=classification_summary,
            purpose_summary=purpose_summary,
            language_breakdown=language_breakdown
        )
        
        log_info(f"Classification complete: {total_functions} functions analyzed, "
                f"{local_functions} local, {external_functions} external")
        
        return result
    
    def _analyze_file_functions(self, file: ProjectFile) -> List[FunctionInfo]:
        """Analyze functions in a single file."""
        if file.language == 'python':
            return self._analyze_python_functions(file)
        elif file.language == 'cpp':
            return self._analyze_cpp_functions(file)
        elif file.language == 'typescript':
            return self._analyze_typescript_functions(file)
        elif file.language == 'fortran':
            return self._analyze_fortran_functions(file)
        else:
            return []
    
    def _analyze_python_functions(self, file: ProjectFile) -> List[FunctionInfo]:
        """Analyze Python functions using AST."""
        functions = []
        
        try:
            with open(file.path, 'r', encoding=file.encoding or 'utf-8') as f:
                content = f.read()
            
            # Parse AST
            tree = ast.parse(content)
            
            for node in ast.walk(tree):
                if isinstance(node, ast.FunctionDef):
                    func_info = self._extract_python_function_info(node, file)
                    functions.append(func_info)
                elif isinstance(node, ast.AsyncFunctionDef):
                    func_info = self._extract_python_function_info(node, file)
                    func_info.is_async = True
                    functions.append(func_info)
        
        except Exception as e:
            log_debug(f"Error analyzing Python file {file.path}: {e}")
        
        return functions
    
    def _extract_python_function_info(self, node: ast.FunctionDef, file: ProjectFile) -> FunctionInfo:
        """Extract detailed function information from Python AST node."""
        # Get function signature
        args = []
        for arg in node.args.args:
            args.append(arg.arg)
        
        signature = f"def {node.name}({', '.join(args)}):"
        
        # Extract decorators
        decorators = []
        for decorator in node.decorator_list:
            if isinstance(decorator, ast.Name):
                decorators.append(decorator.id)
            elif isinstance(decorator, ast.Attribute):
                decorators.append(f"{decorator.value.id}.{decorator.attr}")
        
        # Get docstring
        docstring = None
        if (node.body and isinstance(node.body[0], ast.Expr) and
            isinstance(node.body[0].value, ast.Constant) and
            isinstance(node.body[0].value.value, str)):
            docstring = node.body[0].value.value
        
        # Determine if private
        is_private = node.name.startswith('_')
        
        # Determine if static
        is_static = 'staticmethod' in decorators
        
        # Calculate complexity (basic)
        complexity_score = self._calculate_complexity(node)
        
        return FunctionInfo(
            name=node.name,
            file_path=str(file.path),
            line_number=node.lineno,
            language=file.language,
            signature=signature,
            scope='unknown',  # Will be determined later
            classification='unknown',  # Will be determined later
            purpose='unknown',  # Will be determined later
            complexity_score=complexity_score,
            call_count=0,  # Will be filled by call graph analysis
            calls_made=[],  # Will be filled by call graph analysis
            parameters=args,
            docstring=docstring,
            decorators=decorators,
            is_async=isinstance(node, ast.AsyncFunctionDef),
            is_static=is_static,
            is_private=is_private,
            module_path=file.relative_path
        )
    
    def _analyze_cpp_functions(self, file: ProjectFile) -> List[FunctionInfo]:
        """Analyze C++ functions using regex parsing."""
        functions = []
        
        try:
            with open(file.path, 'r', encoding=file.encoding or 'utf-8') as f:
                content = f.read()
            
            # Find function definitions using regex
            # This is a simplified approach - for production, use libclang
            function_pattern = re.compile(
                r'(?:^|\n)\s*(?:(?:static|inline|virtual|explicit)\s+)*'
                r'(?:(?:const|unsigned|signed)\s+)*'
                r'([a-zA-Z_]\w*(?:\s*\*)*)\s+'  # Return type
                r'([a-zA-Z_]\w*)\s*'            # Function name
                r'\(([^)]*)\)\s*'               # Parameters
                r'(?:const\s*)?(?:override\s*)?'
                r'(?:\s*:\s*[^{]*)?'            # Constructor initializer list
                r'\s*{',                        # Opening brace
                re.MULTILINE
            )
            
            for match in function_pattern.finditer(content):
                return_type, func_name, params = match.groups()
                line_number = content[:match.start()].count('\n') + 1
                
                # Skip constructors/destructors for now
                if func_name == file.path.stem or func_name.startswith('~'):
                    continue
                
                signature = f"{return_type.strip()} {func_name}({params.strip()})"
                
                # Parse parameters
                param_list = []
                if params.strip():
                    param_parts = [p.strip().split()[-1] for p in params.split(',')]
                    param_list = [p for p in param_parts if p and not p.startswith('//')]
                
                func_info = FunctionInfo(
                    name=func_name,
                    file_path=str(file.path),
                    line_number=line_number,
                    language=file.language,
                    signature=signature,
                    scope='unknown',
                    classification='unknown',
                    purpose='unknown',
                    complexity_score=0.5,  # Default complexity
                    call_count=0,
                    calls_made=[],
                    parameters=param_list,
                    return_type=return_type.strip(),
                    is_static='static' in match.group(0),
                    is_private=func_name.startswith('_'),
                    module_path=file.relative_path
                )
                
                functions.append(func_info)
        
        except Exception as e:
            log_debug(f"Error analyzing C++ file {file.path}: {e}")
        
        return functions
    
    def _analyze_typescript_functions(self, file: ProjectFile) -> List[FunctionInfo]:
        """Analyze TypeScript functions using regex parsing."""
        functions = []
        
        try:
            with open(file.path, 'r', encoding=file.encoding or 'utf-8') as f:
                content = f.read()
            
            # Function declaration patterns
            patterns = [
                # function declaration
                r'(?:export\s+)?(?:async\s+)?function\s+([a-zA-Z_$]\w*)\s*\(([^)]*)\)(?:\s*:\s*([^{]+))?\s*{',
                # arrow function assignment
                r'(?:const|let|var)\s+([a-zA-Z_$]\w*)\s*=\s*(?:async\s+)?\(([^)]*)\)(?:\s*:\s*([^=>]+))?\s*=>\s*{',
                # method definition
                r'(?:async\s+)?([a-zA-Z_$]\w*)\s*\(([^)]*)\)(?:\s*:\s*([^{]+))?\s*{'
            ]
            
            for pattern in patterns:
                for match in re.finditer(pattern, content, re.MULTILINE):
                    func_name = match.group(1)
                    params = match.group(2) if len(match.groups()) > 1 else ''
                    return_type = match.group(3) if len(match.groups()) > 2 else None
                    
                    line_number = content[:match.start()].count('\n') + 1
                    
                    # Parse parameters
                    param_list = []
                    if params.strip():
                        param_parts = [p.strip().split(':')[0] for p in params.split(',')]
                        param_list = [p for p in param_parts if p]
                    
                    signature = f"function {func_name}({params.strip()})"
                    if return_type:
                        signature += f": {return_type.strip()}"
                    
                    func_info = FunctionInfo(
                        name=func_name,
                        file_path=str(file.path),
                        line_number=line_number,
                        language=file.language,
                        signature=signature,
                        scope='unknown',
                        classification='unknown',
                        purpose='unknown',
                        complexity_score=0.5,
                        call_count=0,
                        calls_made=[],
                        parameters=param_list,
                        return_type=return_type.strip() if return_type else None,
                        is_async='async' in match.group(0),
                        is_private=func_name.startswith('_'),
                        module_path=file.relative_path
                    )
                    
                    functions.append(func_info)
        
        except Exception as e:
            log_debug(f"Error analyzing TypeScript file {file.path}: {e}")
        
        return functions
    
    def _analyze_fortran_functions(self, file: ProjectFile) -> List[FunctionInfo]:
        """Analyze Fortran functions and subroutines."""
        functions = []
        
        try:
            with open(file.path, 'r', encoding=file.encoding or 'utf-8') as f:
                content = f.read()
            
            # Function and subroutine patterns
            patterns = [
                r'^\s*(?:recursive\s+)?function\s+([a-zA-Z_]\w*)\s*\(([^)]*)\)(?:\s+result\s*\([^)]*\))?\s*$',
                r'^\s*(?:recursive\s+)?subroutine\s+([a-zA-Z_]\w*)\s*\(([^)]*)\)\s*$'
            ]
            
            for pattern in patterns:
                for match in re.finditer(pattern, content, re.MULTILINE | re.IGNORECASE):
                    func_name = match.group(1)
                    params = match.group(2) if len(match.groups()) > 1 else ''
                    
                    line_number = content[:match.start()].count('\n') + 1
                    
                    # Parse parameters
                    param_list = []
                    if params.strip():
                        param_parts = [p.strip() for p in params.split(',')]
                        param_list = [p for p in param_parts if p]
                    
                    func_type = 'function' if 'function' in match.group(0).lower() else 'subroutine'
                    signature = f"{func_type} {func_name}({params.strip()})"
                    
                    func_info = FunctionInfo(
                        name=func_name,
                        file_path=str(file.path),
                        line_number=line_number,
                        language=file.language,
                        signature=signature,
                        scope='unknown',
                        classification='unknown',
                        purpose='unknown',
                        complexity_score=0.5,
                        call_count=0,
                        calls_made=[],
                        parameters=param_list,
                        is_private=func_name.startswith('_'),
                        module_path=file.relative_path
                    )
                    
                    functions.append(func_info)
        
        except Exception as e:
            log_debug(f"Error analyzing Fortran file {file.path}: {e}")
        
        return functions
    
    def _calculate_complexity(self, node: ast.AST) -> float:
        """Calculate basic complexity score for Python AST node."""
        complexity = 0
        
        for child in ast.walk(node):
            if isinstance(child, (ast.If, ast.For, ast.While, ast.With)):
                complexity += 1
            elif isinstance(child, (ast.Try, ast.ExceptHandler)):
                complexity += 0.5
            elif isinstance(child, ast.Call):
                complexity += 0.1
        
        # Normalize to 0-1 scale
        return min(1.0, complexity / 20.0)
    
    def _build_call_graph(self, functions: List[FunctionInfo]) -> Dict[str, Dict[str, Any]]:
        """Build a call graph from function analysis."""
        call_graph = {}
        
        # Create function name to info mapping
        func_map = {f.name: f for f in functions}
        
        # Analyze calls for each function
        for func in functions:
            calls = []
            called_by_count = 0
            
            try:
                with open(func.file_path, 'r') as f:
                    content = f.read()
                
                # Extract function calls (basic regex approach)
                if func.language == 'python':
                    call_pattern = r'([a-zA-Z_]\w*)\s*\('
                elif func.language in ['cpp', 'typescript']:
                    call_pattern = r'([a-zA-Z_]\w*)\s*\('
                elif func.language == 'fortran':
                    call_pattern = r'call\s+([a-zA-Z_]\w*)|([a-zA-Z_]\w*)\s*\('
                else:
                    call_pattern = r'([a-zA-Z_]\w*)\s*\('
                
                matches = re.findall(call_pattern, content, re.IGNORECASE)
                for match in matches:
                    called_func = match if isinstance(match, str) else next(m for m in match if m)
                    if called_func in func_map:
                        calls.append(called_func)
                        called_by_count += 1
            
            except Exception:
                pass
            
            call_graph[func.name] = {
                'calls': calls,
                'called_by_count': called_by_count
            }
        
        return call_graph
    
    def _classify_function_scope(self, functions: List[FunctionInfo]):
        """Classify functions as local, external, or builtin."""
        if not self.project_structure:
            return
        
        # Build set of local function names
        local_function_names = {f.name for f in functions}
        
        # Get builtin patterns for each language
        for func in functions:
            language = func.language
            
            # Check if it's a builtin
            if language in self.builtin_patterns:
                patterns = self.builtin_patterns[language]
                
                is_builtin = False
                for pattern_type, pattern_set in patterns.items():
                    if func.name in pattern_set:
                        func.scope = 'builtin'
                        is_builtin = True
                        break
                
                if is_builtin:
                    continue
            
            # Check if it's local (defined in project)
            if func.name in local_function_names:
                func.scope = 'local'
            else:
                # Check if it's from external dependencies
                is_external = False
                for build_system in self.project_structure.build_systems:
                    if func.name in build_system.dependencies:
                        func.scope = 'external'
                        is_external = True
                        break
                
                if not is_external:
                    func.scope = 'unknown'
    
    def _classify_function_purpose(self, functions: List[FunctionInfo], entry_points: List[EntryPoint]):
        """Classify functions by purpose and type."""
        entry_point_names = {ep.name for ep in entry_points}
        
        for func in functions:
            func_name_lower = func.name.lower()
            
            # Check if it's an entry point
            if func.name in entry_point_names:
                func.classification = 'entry_point'
                func.purpose = 'main'
                continue
            
            # Apply pattern matching for classification
            classified = False
            for classification, patterns in self.classification_patterns.items():
                for pattern in patterns:
                    if re.search(pattern, func_name_lower):
                        func.classification = classification
                        classified = True
                        break
                if classified:
                    break
            
            if not classified:
                func.classification = 'utility'
            
            # Determine purpose based on classification and other factors
            if func.classification == 'entry_point':
                func.purpose = 'main'
            elif func.classification == 'test':
                func.purpose = 'test'
            elif func.classification == 'api':
                func.purpose = 'service'
            elif func.classification == 'callback':
                func.purpose = 'helper'
            elif func.complexity_score > 0.7:
                func.purpose = 'data_processing'
            elif func.is_private:
                func.purpose = 'helper'
            else:
                func.purpose = 'utility'
    
    def _generate_classification_summary(self, functions: List[FunctionInfo]) -> Dict[str, int]:
        """Generate summary of function classifications."""
        summary = defaultdict(int)
        for func in functions:
            summary[func.classification] += 1
        return dict(summary)
    
    def _generate_purpose_summary(self, functions: List[FunctionInfo]) -> Dict[str, int]:
        """Generate summary of function purposes."""
        summary = defaultdict(int)
        for func in functions:
            summary[func.purpose] += 1
        return dict(summary)
    
    def _generate_language_breakdown(self, functions: List[FunctionInfo]) -> Dict[str, Dict[str, int]]:
        """Generate language-specific breakdowns."""
        breakdown = defaultdict(lambda: defaultdict(int))
        
        for func in functions:
            breakdown[func.language]['total'] += 1
            breakdown[func.language][func.scope] += 1
            breakdown[func.language][func.classification] += 1
        
        return {lang: dict(stats) for lang, stats in breakdown.items()}
    
    def get_entry_point_recommendations(self, classification_result: ClassificationResult) -> List[Dict[str, Any]]:
        """Recommend the best entry points for analysis."""
        recommendations = []
        
        # Get functions classified as entry points
        entry_functions = [f for f in classification_result.function_details 
                          if f.classification == 'entry_point']
        
        # Sort by various criteria
        entry_functions.sort(key=lambda f: (
            -f.call_count,  # Most called
            f.purpose == 'main',  # Main functions first
            -f.complexity_score,  # More complex first
            f.name == 'main'  # Specifically main function
        ))
        
        for func in entry_functions[:10]:  # Top 10 recommendations
            recommendations.append({
                'function_name': func.name,
                'file_path': func.file_path,
                'line_number': func.line_number,
                'language': func.language,
                'purpose': func.purpose,
                'call_count': func.call_count,
                'complexity_score': func.complexity_score,
                'signature': func.signature,
                'recommendation_reason': self._get_recommendation_reason(func)
            })
        
        return recommendations
    
    def _get_recommendation_reason(self, func: FunctionInfo) -> str:
        """Generate a reason for recommending this entry point."""
        reasons = []
        
        if func.name == 'main':
            reasons.append("Standard main function")
        if func.purpose == 'main':
            reasons.append("Identified as main entry point")
        if func.call_count > 0:
            reasons.append(f"Called {func.call_count} times")
        if func.complexity_score > 0.5:
            reasons.append("High complexity suggests important functionality")
        if func.classification == 'entry_point':
            reasons.append("Classified as entry point")
        
        return '; '.join(reasons) if reasons else "Function analysis suggests entry point"
    
    def filter_functions_by_criteria(self, classification_result: ClassificationResult,
                                   scope: Optional[str] = None,
                                   classification: Optional[str] = None,
                                   purpose: Optional[str] = None,
                                   language: Optional[str] = None,
                                   min_complexity: float = 0.0,
                                   max_complexity: float = 1.0) -> List[FunctionInfo]:
        """Filter functions by specified criteria."""
        filtered = classification_result.function_details
        
        if scope:
            filtered = [f for f in filtered if f.scope == scope]
        
        if classification:
            filtered = [f for f in filtered if f.classification == classification]
        
        if purpose:
            filtered = [f for f in filtered if f.purpose == purpose]
        
        if language:
            filtered = [f for f in filtered if f.language == language]
        
        filtered = [f for f in filtered if min_complexity <= f.complexity_score <= max_complexity]
        
        return filtered


# API Compatibility Exports - for test compatibility
from enum import Enum

class FunctionType(Enum):
    """Enumeration of function types."""
    LOCAL = "local"
    LIBRARY = "library"
    BUILTIN = "builtin"
    ENTRY_POINT = "entry_point"
    UTILITY = "utility"
    TEST = "test"
    CALLBACK = "callback"
    UNKNOWN = "unknown"

class FunctionScope(Enum):
    """Enumeration of function scopes."""
    MODULE = "module"
    CLASS = "class"
    GLOBAL = "global"
    LOCAL = "local"
    NESTED = "nested"
    LAMBDA = "lambda"
    GENERATOR = "generator"
    ASYNC = "async"