"""
Project Scanner Module.

Comprehensive project discovery and organization system that analyzes
multi-language codebases to identify project structure, build systems,
and file relationships.
"""

import os
import json
import re
from typing import Dict, List, Any, Optional, Set, Tuple, Union
from pathlib import Path
from dataclasses import dataclass, field
from collections import defaultdict
import logging
import yaml
import toml

from .error_handler import AnalysisError, log_info, log_debug

@dataclass
class EntryPoint:
    """Represents an entry point in the project."""
    name: str
    file_path: str
    entry_type: str  # 'main', 'cli', 'service', etc.
    line_number: int = 0
    language: str = 'unknown'


@dataclass
class ProjectScanConfig:
    """Configuration for project scanning."""
    deep_scan: bool = True
    include_dependencies: bool = False
    ignore_build_dirs: bool = True
    ignore_cache_dirs: bool = True
    max_depth: int = 10
    include_hidden_files: bool = False
    ignore_hidden: bool = True  # Alias for include_hidden_files (inverted)
    scan_binary_files: bool = False
    include_tests: bool = True  # For test compatibility
    file_size_limit: int = 10 * 1024 * 1024  # 10MB limit
    excluded_dirs: List[str] = field(default_factory=lambda: ['__pycache__', '.git', 'node_modules', 'build', 'dist'])
    excluded_extensions: List[str] = field(default_factory=lambda: ['.pyc', '.pyo', '.so', '.dll'])
    custom_ignore_patterns: List[str] = field(default_factory=list)
    custom_language_extensions: Dict[str, Set[str]] = field(default_factory=dict)


@dataclass
class ProjectFile:
    """Represents a single file in the project."""
    path: Path
    language: str
    size_bytes: int
    last_modified: float
    file_type: str  # 'source', 'header', 'config', 'build', 'test', 'doc'
    relative_path: str
    is_binary: bool = False
    encoding: Optional[str] = None


@dataclass
class BuildSystem:
    """Represents a detected build system."""
    name: str  # 'cmake', 'npm', 'setuptools', 'cargo', 'make', 'gradle'
    root_path: Path
    config_files: List[Path]
    target_directories: List[Path]
    dependencies: List[str] = field(default_factory=list)
    scripts: Dict[str, str] = field(default_factory=dict)
    
    def __eq__(self, other) -> bool:
        """Support comparison with BuildSystemType enums."""
        if hasattr(other, 'value') and hasattr(other, 'name'):  # It's an enum
            # Convert our name to uppercase to match enum name
            result = self.name.upper() == other.name or self.name == other.value
            return result
        if isinstance(other, str):
            return self.name == other
        if isinstance(other, BuildSystem):
            return self.name == other.name and self.root_path == other.root_path
        return False
    
    def __hash__(self) -> int:
        """Make BuildSystem hashable so it can be used in sets."""
        # Try to hash like the corresponding enum
        try:
            corresponding_enum = getattr(BuildSystemType, self.name.upper())
            return hash(corresponding_enum)
        except AttributeError:
            # Fallback to name-based hash
            return hash(self.name)


@dataclass
class ProjectStructure:
    """Represents the overall project structure."""
    root_path: Path
    name: str
    languages: Set[str]
    build_systems: List[BuildSystem]
    source_directories: List[Path]
    test_directories: List[Path]
    documentation_directories: List[Path]
    config_directories: List[Path]
    files: List[ProjectFile]
    total_size_bytes: int
    file_count_by_language: Dict[str, int]
    dependency_graph: Dict[str, List[str]]
    project_type: Optional['ProjectType'] = None
    statistics: Optional[Dict[str, Any]] = None
    entry_points: List[EntryPoint] = field(default_factory=list)
    metadata: Dict[str, Any] = field(default_factory=dict)
    
    @property
    def build_system_types(self) -> List['BuildSystemType']:
        """Get build system types as enums for compatibility."""
        return [getattr(BuildSystemType, bs.name.upper(), BuildSystemType.UNKNOWN) 
                for bs in self.build_systems]
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert to dictionary for serialization."""
        return {
            'root_path': str(self.root_path),
            'name': self.name,
            'languages': list(self.languages),
            'build_systems': [
                {'name': bs.name, 'root_path': str(bs.root_path)} if hasattr(bs, 'root_path')
                else str(bs.value) if hasattr(bs, 'value') else str(bs)
                for bs in self.build_systems
            ],
            'source_directories': [str(d) for d in self.source_directories],
            'test_directories': [str(d) for d in self.test_directories],
            'documentation_directories': [str(d) for d in self.documentation_directories],
            'config_directories': [str(d) for d in self.config_directories],
            'total_size_bytes': self.total_size_bytes,
            'file_count_by_language': self.file_count_by_language,
            'project_type': self.project_type.value if self.project_type else None,
            'statistics': self.statistics or {},
            'entry_points': [{'name': ep.name, 'file_path': ep.file_path, 'entry_type': ep.entry_type} for ep in self.entry_points],
            'dependencies': sum([bs.dependencies for bs in self.build_systems], [])
        }


class ProjectScanner:
    """Comprehensive project scanner for multi-language codebases."""
    
    def __init__(self, config: Optional[ProjectScanConfig] = None):
        """Initialize the project scanner."""
        self.config = config or ProjectScanConfig()
        
        # Language detection patterns
        self.language_extensions = {
            'python': {'.py', '.pyx', '.pyi', '.pyw'},
            'cpp': {'.cpp', '.cc', '.cxx', '.c', '.h', '.hpp', '.hxx'},
            'typescript': {'.ts', '.tsx', '.js', '.jsx'},
            'fortran': {'.f90', '.f95', '.f03', '.f08', '.f', '.for'},
            'rust': {'.rs'},
            'java': {'.java'},
            'go': {'.go'},
            'c#': {'.cs'},
            'php': {'.php'},
            'ruby': {'.rb'},
            'shell': {'.sh', '.bash', '.zsh', '.fish'},
            'yaml': {'.yaml', '.yml'},
            'json': {'.json'},
            'xml': {'.xml'},
            'markdown': {'.md', '.markdown'},
            'latex': {'.tex', '.sty', '.cls'},
        }
        
        # Build system detection patterns
        self.build_system_patterns = {
            'cmake': {'CMakeLists.txt', 'cmake'},
            'npm': {'package.json', 'package-lock.json', 'yarn.lock', 'node_modules'},
            'setuptools': {'setup.py', 'setup.cfg', 'pyproject.toml', 'requirements.txt'},
            'cargo': {'Cargo.toml', 'Cargo.lock'},
            'make': {'Makefile', 'makefile', 'GNUmakefile'},
            'gradle': {'build.gradle', 'build.gradle.kts', 'gradle.properties'},
            'maven': {'pom.xml'},
            'meson': {'meson.build'},
            'bazel': {'BUILD', 'BUILD.bazel', 'WORKSPACE'},
            'autotools': {'configure.ac', 'configure.in', 'Makefile.am', 'Makefile.in'},
        }
        
        # For test compatibility
        self.supported_languages = list(self.language_extensions.keys())
        self.build_system_detectors = list(self.build_system_patterns.keys())
        
        # Directory patterns to ignore
        self.ignore_directories = {
            # Version control
            '.git', '.svn', '.hg', '.bzr',
            # Build outputs
            'build', 'dist', 'out', 'target', 'bin', 'obj',
            # Dependencies
            'node_modules', '__pycache__', '.pytest_cache',
            'vendor', 'third_party', 'external',
            # IDE/Editor
            '.vscode', '.idea', '.vs', '.eclipse',
            # Temporary
            'tmp', 'temp', '.tmp',
            # Coverage/profiling
            '.coverage', '.nyc_output', 'coverage',
        }
        
        # File patterns to ignore
        self.ignore_file_patterns = {
            # Compiled/Generated
            r'.*\.pyc$', r'.*\.pyo$', r'.*\.o$', r'.*\.obj$',
            r'.*\.so$', r'.*\.dll$', r'.*\.dylib$',
            r'.*\.class$', r'.*\.jar$', r'.*\.exe$',
            # Lock files (except important ones)
            r'.*\.lock$', r'.*\.pid$',
            # Logs
            r'.*\.log$', r'.*\.out$',
            # Backups
            r'.*~$', r'.*\.bak$', r'.*\.orig$',
            # OS files
            r'\.DS_Store$', r'Thumbs\.db$',
        }
        
        log_debug("Initialized ProjectScanner")
    
    def scan_project(self, project_path: Union[str, Path]) -> ProjectStructure:
        """
        Perform comprehensive scan of a project directory.
        
        Args:
            project_path: Path to the project root directory
            
        Returns:
            ProjectStructure with complete project analysis
        """
        project_path = Path(project_path).resolve()
        
        if not project_path.exists() or not project_path.is_dir():
            raise AnalysisError(f"Project path does not exist or is not a directory: {project_path}")
        
        log_info(f"Scanning project: {project_path}")
        
        # Initialize project structure
        project_name = project_path.name
        languages = set()
        files = []
        total_size = 0
        file_count_by_language = defaultdict(int)
        
        # Scan all files
        try:
            project_files = self._find_project_files(project_path)
        except (PermissionError, OSError) as e:
            raise AnalysisError(f"Cannot access project directory: {e}")
        
        for file_path in project_files:
            try:
                project_file = self._analyze_file(file_path, project_path)
                files.append(project_file)
                languages.add(project_file.language)
                total_size += project_file.size_bytes
                file_count_by_language[project_file.language] += 1
            except Exception as e:
                log_debug(f"Error analyzing file {file_path}: {e}")
                continue
        
        # Detect build systems
        build_systems = self._detect_build_systems(project_path, files)
        
        # Categorize directories
        source_dirs = self._find_source_directories(project_path, files)
        test_dirs = self._find_test_directories(project_path, files)
        doc_dirs = self._find_documentation_directories(project_path, files)
        config_dirs = self._find_config_directories(project_path, files)
        
        # Build dependency graph
        dependency_graph = self._build_dependency_graph(files, build_systems)
        
        # Determine project type and generate statistics
        project_type = self._determine_project_type(languages, build_systems)
        statistics = self._generate_project_statistics(files, languages, build_systems)
        
        # Detect workspace structure and create metadata
        metadata = self._detect_workspace_structure(project_path, files, build_systems)
        
        project_structure = ProjectStructure(
            root_path=project_path,
            name=project_name,
            languages=languages,
            build_systems=build_systems,
            source_directories=source_dirs,
            test_directories=test_dirs,
            documentation_directories=doc_dirs,
            config_directories=config_dirs,
            files=files,
            total_size_bytes=total_size,
            file_count_by_language=dict(file_count_by_language),
            dependency_graph=dependency_graph,
            project_type=project_type,
            statistics=statistics,
            entry_points=statistics.get('entry_points', []),
            metadata=metadata
        )
        
        log_info(f"Project scan complete: {len(files)} files, "
                f"{len(languages)} languages, {len(build_systems)} build systems")
        
        # For backward compatibility, also create and store ProjectInfo version
        self._last_project_info = self._convert_to_project_info(project_structure)
        
        return self._last_project_info
    
    def _convert_to_project_info(self, project_structure: ProjectStructure) -> 'ProjectInfo':
        """Convert ProjectStructure to ProjectInfo for test compatibility."""
        return ProjectInfo(
            root_path=str(project_structure.root_path),
            project_type=project_structure.project_type,
            languages=list(project_structure.languages),
            build_systems=[getattr(BuildSystemType, bs.name.upper(), BuildSystemType.UNKNOWN) for bs in project_structure.build_systems],
            name=project_structure.name,
            source_directories=[str(d) for d in project_structure.source_directories],
            test_directories=[str(d) for d in project_structure.test_directories],
            documentation_directories=[str(d) for d in project_structure.documentation_directories],
            config_directories=[str(d) for d in project_structure.config_directories],
            files=[f.relative_path for f in project_structure.files],
            total_size_bytes=project_structure.total_size_bytes,
            file_count_by_language=project_structure.file_count_by_language,
            dependency_graph=project_structure.dependency_graph,
            statistics=project_structure.statistics or {},
            entry_points=project_structure.entry_points,
            metadata=project_structure.metadata,
            dependencies=sum([bs.dependencies for bs in project_structure.build_systems], [])
        )
    
    def _find_project_files(self, project_path: Path) -> List[Path]:
        """Find all relevant files in the project, respecting ignore patterns."""
        files = []
        
        for root, dirs, filenames in os.walk(project_path):
            root_path = Path(root)
            
            # Filter out ignored directories
            dirs[:] = [d for d in dirs if not self._should_ignore_directory(d)]
            
            for filename in filenames:
                file_path = root_path / filename
                
                # Skip ignored files
                if self._should_ignore_file(filename):
                    continue
                
                # Skip binary files (basic check)
                if self._is_likely_binary(file_path):
                    continue
                
                files.append(file_path)
        
        return files
    
    def _should_ignore_directory(self, dirname: str) -> bool:
        """Check if directory should be ignored."""
        return dirname in self.ignore_directories
    
    def _should_ignore_file(self, filename: str) -> bool:
        """Check if file should be ignored based on patterns."""
        filename_lower = filename.lower()
        
        for pattern in self.ignore_file_patterns:
            if re.match(pattern, filename_lower):
                return True
        
        return False
    
    def _is_likely_binary(self, file_path: Path) -> bool:
        """Basic check if file is likely binary."""
        try:
            # Check file size first (very large files might be binary)
            if file_path.stat().st_size > 10 * 1024 * 1024:  # 10MB
                return True
            
            # Try to read a small portion as text
            with open(file_path, 'rb') as f:
                chunk = f.read(1024)
                
                # If chunk contains null bytes, likely binary
                if b'\x00' in chunk:
                    return True
                
                # Try to decode as UTF-8
                try:
                    chunk.decode('utf-8')
                    return False
                except UnicodeDecodeError:
                    # Try other encodings
                    for encoding in ['latin1', 'cp1252']:
                        try:
                            chunk.decode(encoding)
                            return False
                        except UnicodeDecodeError:
                            continue
                    return True
        
        except (OSError, PermissionError):
            return True  # Assume binary if can't read
        
        return False
    
    def _analyze_file(self, file_path: Path, project_root: Path) -> ProjectFile:
        """Analyze a single file and extract metadata."""
        stat = file_path.stat()
        relative_path = str(file_path.relative_to(project_root))
        
        # Detect language
        language = self._detect_file_language(file_path)
        
        # Determine file type
        file_type = self._classify_file_type(file_path, relative_path)
        
        # Detect encoding
        encoding = self._detect_encoding(file_path)
        
        return ProjectFile(
            path=file_path,
            language=language,
            size_bytes=stat.st_size,
            last_modified=stat.st_mtime,
            file_type=file_type,
            relative_path=relative_path,
            is_binary=False,  # Already filtered out binaries
            encoding=encoding
        )
    
    def _detect_file_language(self, file_path: Path) -> str:
        """Detect the programming language of a file."""
        suffix = file_path.suffix.lower()
        
        for language, extensions in self.language_extensions.items():
            if suffix in extensions:
                return language
        
        # Special cases based on filename
        filename = file_path.name.lower()
        if filename in {'dockerfile', 'dockerfile.prod', 'dockerfile.dev'}:
            return 'docker'
        elif filename in {'vagrantfile'}:
            return 'ruby'
        elif filename.startswith('makefile'):
            return 'make'
        
        return 'unknown'
    
    def _classify_file_type(self, file_path: Path, relative_path: str) -> str:
        """Classify file type based on path and name."""
        path_lower = relative_path.lower()
        filename = file_path.name.lower()
        
        # Test files
        if ('test' in path_lower or 'spec' in path_lower or
            filename.startswith('test_') or filename.endswith('_test.py') or
            filename.endswith('.test.js') or filename.endswith('.spec.js')):
            return 'test'
        
        # Configuration files
        config_patterns = [
            'config', '.env', 'settings', 'pyproject.toml',
            'package.json', 'cargo.toml', 'cmake', 'makefile'
        ]
        for pattern in config_patterns:
            if pattern in path_lower or pattern in filename:
                return 'config'
        
        # Build files
        build_patterns = [
            'build', 'dist', 'setup.py', 'configure',
            'cmakelists.txt', 'build.gradle'
        ]
        for pattern in build_patterns:
            if pattern in path_lower or pattern in filename:
                return 'build'
        
        # Documentation
        doc_patterns = ['doc', 'readme', '.md', 'man', 'help']
        for pattern in doc_patterns:
            if pattern in path_lower:
                return 'doc'
        
        # Header files
        if file_path.suffix.lower() in {'.h', '.hpp', '.hxx'}:
            return 'header'
        
        # Default to source
        return 'source'
    
    def _detect_encoding(self, file_path: Path) -> Optional[str]:
        """Detect file encoding."""
        try:
            import chardet
            
            with open(file_path, 'rb') as f:
                raw_data = f.read(4096)  # Read first 4KB
                result = chardet.detect(raw_data)
                return result.get('encoding')
        
        except (ImportError, Exception):
            # Fallback to basic detection
            try:
                with open(file_path, 'r', encoding='utf-8') as f:
                    f.read(1024)
                return 'utf-8'
            except UnicodeDecodeError:
                return 'latin1'
    
    def _detect_build_systems(self, project_path: Path, files: Optional[List[ProjectFile]] = None) -> List[BuildSystem]:
        """Detect build systems in the project."""
        build_systems = []
        
        # If files not provided, scan the project path for files
        if files is None:
            files = []
            for file_path in self._find_project_files(project_path):
                try:
                    project_file = self._analyze_file(file_path, project_path)
                    files.append(project_file)
                except Exception:
                    continue
        
        # Create a map of filenames to files for quick lookup
        file_map = {f.path.name.lower(): f for f in files}
        
        for build_system_name, patterns in self.build_system_patterns.items():
            config_files = []
            
            # Check for build system indicators
            for pattern in patterns:
                if pattern.lower() in file_map:
                    config_files.append(file_map[pattern.lower()].path)
                
                # Also check if any file contains the pattern
                for file in files:
                    if pattern.lower() in file.path.name.lower():
                        config_files.append(file.path)
            
            if config_files:
                # Determine root path (closest to project root)
                root_path = min(config_files, key=lambda p: len(p.parts)).parent
                
                # Extract additional info based on build system type
                dependencies = []
                scripts = {}
                target_dirs = []
                
                try:
                    if build_system_name == 'npm':
                        package_json = next((f for f in config_files 
                                           if f.name == 'package.json'), None)
                        if package_json:
                            deps, scr = self._parse_package_json(package_json)
                            dependencies.extend(deps)
                            scripts.update(scr)
                            target_dirs = [root_path / 'node_modules', root_path / 'dist']
                    
                    elif build_system_name == 'setuptools':
                        setup_py = next((f for f in config_files 
                                       if f.name == 'setup.py'), None)
                        pyproject_toml = next((f for f in config_files 
                                             if f.name == 'pyproject.toml'), None)
                        if setup_py:
                            dependencies.extend(self._parse_setup_py(setup_py))
                        if pyproject_toml:
                            dependencies.extend(self._parse_pyproject_toml(pyproject_toml))
                        target_dirs = [root_path / 'build', root_path / 'dist']
                    
                    elif build_system_name == 'cmake':
                        cmake_file = next((f for f in config_files 
                                         if 'cmake' in f.name.lower()), None)
                        if cmake_file:
                            dependencies.extend(self._parse_cmake(cmake_file))
                        target_dirs = [root_path / 'build', root_path / 'cmake-build-debug']
                
                except Exception as e:
                    log_debug(f"Error parsing build system {build_system_name}: {e}")
                
                build_system = BuildSystem(
                    name=build_system_name,
                    root_path=root_path,
                    config_files=config_files,
                    target_directories=target_dirs,
                    dependencies=dependencies,
                    scripts=scripts
                )
                build_systems.append(build_system)
        
        return build_systems
    
    def _parse_package_json(self, package_json_path: Path) -> Tuple[List[str], Dict[str, str]]:
        """Parse package.json for dependencies and scripts."""
        try:
            with open(package_json_path, 'r') as f:
                data = json.load(f)
            
            dependencies = []
            dependencies.extend(data.get('dependencies', {}).keys())
            dependencies.extend(data.get('devDependencies', {}).keys())
            
            scripts = data.get('scripts', {})
            
            return dependencies, scripts
        except Exception:
            return [], {}
    
    def _parse_setup_py(self, setup_py_path: Path) -> List[str]:
        """Extract dependencies from setup.py (basic parsing)."""
        try:
            with open(setup_py_path, 'r') as f:
                content = f.read()
            
            # Look for install_requires pattern
            import re
            match = re.search(r'install_requires\s*=\s*\[(.*?)\]', content, re.DOTALL)
            if match:
                deps_str = match.group(1)
                # Extract quoted dependencies
                deps = re.findall(r'["\']([^"\']+)["\']', deps_str)
                return [dep.split('>=')[0].split('==')[0].strip() for dep in deps]
        except Exception:
            pass
        
        return []
    
    def _parse_pyproject_toml(self, pyproject_path: Path) -> List[str]:
        """Extract dependencies from pyproject.toml."""
        try:
            with open(pyproject_path, 'r') as f:
                data = toml.load(f)
            
            dependencies = []
            
            # Poetry format
            poetry_deps = data.get('tool', {}).get('poetry', {}).get('dependencies', {})
            dependencies.extend([dep for dep in poetry_deps.keys() if dep != 'python'])
            
            # PEP 621 format
            project_deps = data.get('project', {}).get('dependencies', [])
            dependencies.extend([dep.split('>=')[0].split('==')[0].strip() for dep in project_deps])
            
            return dependencies
        except Exception:
            return []
    
    def _parse_cmake(self, cmake_path: Path) -> List[str]:
        """Extract dependencies from CMake files (basic parsing)."""
        try:
            with open(cmake_path, 'r') as f:
                content = f.read()
            
            # Look for find_package calls
            import re
            packages = re.findall(r'find_package\s*\(\s*(\w+)', content, re.IGNORECASE)
            return packages
        except Exception:
            return []
    
    def _find_source_directories(self, project_path: Path, files: List[ProjectFile]) -> List[Path]:
        """Identify source code directories."""
        source_dirs = set()
        
        for file in files:
            if file.file_type == 'source' and file.language in {'python', 'cpp', 'typescript', 'fortran'}:
                source_dirs.add(file.path.parent)
        
        # Common source directory patterns
        common_source_dirs = ['src', 'lib', 'source', 'code']
        for dirname in common_source_dirs:
            src_dir = project_path / dirname
            if src_dir.exists() and src_dir.is_dir():
                source_dirs.add(src_dir)
        
        return sorted(list(source_dirs))
    
    def _find_test_directories(self, project_path: Path, files: List[ProjectFile]) -> List[Path]:
        """Identify test directories."""
        test_dirs = set()
        
        for file in files:
            if file.file_type == 'test':
                test_dirs.add(file.path.parent)
        
        # Common test directory patterns
        common_test_dirs = ['test', 'tests', 'spec', 'specs', '__tests__']
        for dirname in common_test_dirs:
            test_dir = project_path / dirname
            if test_dir.exists() and test_dir.is_dir():
                test_dirs.add(test_dir)
        
        return sorted(list(test_dirs))
    
    def _find_documentation_directories(self, project_path: Path, files: List[ProjectFile]) -> List[Path]:
        """Identify documentation directories."""
        doc_dirs = set()
        
        for file in files:
            if file.file_type == 'doc':
                doc_dirs.add(file.path.parent)
        
        # Common documentation directory patterns
        common_doc_dirs = ['doc', 'docs', 'documentation', 'man', 'help']
        for dirname in common_doc_dirs:
            doc_dir = project_path / dirname
            if doc_dir.exists() and doc_dir.is_dir():
                doc_dirs.add(doc_dir)
        
        return sorted(list(doc_dirs))
    
    def _find_config_directories(self, project_path: Path, files: List[ProjectFile]) -> List[Path]:
        """Identify configuration directories."""
        config_dirs = set()
        
        for file in files:
            if file.file_type in {'config', 'build'}:
                config_dirs.add(file.path.parent)
        
        # Common config directory patterns
        common_config_dirs = ['config', 'conf', 'cfg', 'settings', '.config']
        for dirname in common_config_dirs:
            config_dir = project_path / dirname
            if config_dir.exists() and config_dir.is_dir():
                config_dirs.add(config_dir)
        
        return sorted(list(config_dirs))
    
    def _build_dependency_graph(self, files: List[ProjectFile], 
                               build_systems: List[BuildSystem]) -> Dict[str, List[str]]:
        """Build a basic dependency graph from build systems and file analysis."""
        dependency_graph = {}
        
        # Add build system dependencies
        for build_system in build_systems:
            build_system_key = f"build_system:{build_system.name}"
            dependency_graph[build_system_key] = build_system.dependencies
        
        # Add language-specific dependencies (basic analysis)
        language_deps = defaultdict(list)
        
        for file in files:
            if file.language == 'python':
                # Extract import statements (basic)
                try:
                    with open(file.path, 'r', encoding=file.encoding or 'utf-8') as f:
                        content = f.read()
                    
                    import re
                    imports = re.findall(r'^\s*(?:from\s+(\S+)\s+)?import\s+(\S+)', content, re.MULTILINE)
                    for from_module, import_name in imports:
                        if from_module:
                            language_deps[file.relative_path].append(from_module)
                        else:
                            language_deps[file.relative_path].append(import_name.split('.')[0])
                
                except Exception:
                    pass
            
            elif file.language == 'cpp':
                # Extract #include statements (basic)
                try:
                    with open(file.path, 'r', encoding=file.encoding or 'utf-8') as f:
                        content = f.read()
                    
                    import re
                    includes = re.findall(r'#include\s+[<"]([^>"]+)[>"]', content)
                    language_deps[file.relative_path].extend(includes)
                
                except Exception:
                    pass
        
        # Merge language dependencies
        dependency_graph.update(dict(language_deps))
        
        return dependency_graph
    
    def get_project_statistics(self, project_structure: ProjectStructure) -> Dict[str, Any]:
        """Generate comprehensive project statistics."""
        stats = {
            'overview': {
                'name': project_structure.name,
                'root_path': str(project_structure.root_path),
                'total_files': len(project_structure.files),
                'total_size_mb': round(project_structure.total_size_bytes / (1024 * 1024), 2),
                'languages_count': len(project_structure.languages),
                'build_systems_count': len(project_structure.build_systems)
            },
            'languages': dict(project_structure.file_count_by_language),
            'build_systems': [bs.name for bs in project_structure.build_systems],
            'directories': {
                'source': len(project_structure.source_directories),
                'test': len(project_structure.test_directories),
                'documentation': len(project_structure.documentation_directories),
                'config': len(project_structure.config_directories)
            },
            'file_types': self._count_file_types(project_structure.files),
            'largest_files': self._get_largest_files(project_structure.files, 10),
            'dependency_summary': {
                'total_dependencies': len(project_structure.dependency_graph),
                'external_packages': self._count_external_dependencies(project_structure)
            }
        }
        
        return stats
    
    def _count_file_types(self, files: List[ProjectFile]) -> Dict[str, int]:
        """Count files by type."""
        type_counts = defaultdict(int)
        for file in files:
            type_counts[file.file_type] += 1
        return dict(type_counts)
    
    def _get_largest_files(self, files: List[ProjectFile], count: int) -> List[Dict[str, Any]]:
        """Get the largest files in the project."""
        sorted_files = sorted(files, key=lambda f: f.size_bytes, reverse=True)[:count]
        return [
            {
                'path': f.relative_path,
                'size_kb': round(f.size_bytes / 1024, 1),
                'language': f.language,
                'type': f.file_type
            }
            for f in sorted_files
        ]
    
    def _count_external_dependencies(self, project_structure: ProjectStructure) -> int:
        """Count external package dependencies across all build systems."""
        external_deps = set()
        
        for build_system in project_structure.build_systems:
            external_deps.update(build_system.dependencies)
        
        return len(external_deps)
    
    def _determine_project_type(self, languages: Set[str], build_systems: List[BuildSystem]) -> 'ProjectType':
        """Determine the primary project type based on languages and build systems."""
        # Filter out configuration and markup languages that don't define project type
        config_languages = {'unknown', 'json', 'yaml', 'xml', 'markdown', 'latex'}
        primary_languages = {lang for lang in languages if lang not in config_languages}
        
        # Use build system hints when no primary languages are detected
        if not primary_languages:
            for bs in build_systems:
                if bs.name == 'npm':
                    # NPM usually indicates JavaScript/TypeScript project
                    return ProjectType.TYPESCRIPT  # Default to TypeScript for npm
                elif bs.name == 'setuptools':
                    return ProjectType.PYTHON
                elif bs.name == 'cmake':
                    return ProjectType.CPP
                elif bs.name == 'cargo':
                    return ProjectType.RUST
                elif bs.name in ['maven', 'gradle']:
                    return ProjectType.JAVA
            return ProjectType.UNKNOWN
        
        # Single primary language projects
        if len(primary_languages) == 1:
            lang = next(iter(primary_languages))
            if lang == 'python':
                return ProjectType.PYTHON
            elif lang == 'cpp':
                return ProjectType.CPP
            elif lang == 'typescript':
                return ProjectType.TYPESCRIPT
            elif lang == 'fortran':
                return ProjectType.FORTRAN
            elif lang == 'rust':
                return ProjectType.RUST
            elif lang == 'java':
                return ProjectType.JAVA
        
        # Multi-language projects
        if len(primary_languages) > 1:
            return ProjectType.MIXED
            
        return ProjectType.UNKNOWN
    
    def _generate_project_statistics(self, files: List[ProjectFile], languages: Set[str], build_systems: List[BuildSystem]) -> Dict[str, Any]:
        """Generate project statistics for test compatibility."""
        stats = {}
        
        # File statistics by language
        for lang in languages:
            lang_files = [f for f in files if f.language == lang]
            stats[f"{lang}_files"] = [f.relative_path for f in lang_files]
        
        # Build system statistics
        stats['build_systems'] = [bs.name for bs in build_systems]
        
        # Size statistics
        stats['total_files'] = len(files)
        stats['total_size_bytes'] = sum(f.size_bytes for f in files)
        
        # Entry points detection - scan files for entry point patterns
        entry_points = []
        for file in files:
            if file.language in {'python', 'cpp', 'typescript', 'fortran'}:
                detected_entry_points = self._detect_file_entry_points(file)
                entry_points.extend(detected_entry_points)
        
        stats['entry_points'] = entry_points
        
        return stats

    def _detect_file_entry_points(self, file: ProjectFile) -> List[EntryPoint]:
        """Detect entry points in a single file."""
        entry_points = []
        
        try:
            with open(file.path, 'r', encoding=file.encoding or 'utf-8') as f:
                content = f.read()
                lines = content.split('\n')
            
            if file.language == 'python':
                # Look for if __name__ == '__main__':
                for i, line in enumerate(lines):
                    if "__name__" in line and "__main__" in line:
                        entry_points.append(EntryPoint(
                            name="__main__",
                            file_path=str(file.path),
                            entry_type="main",
                            line_number=i + 1,
                            language=file.language
                        ))
                
                # Look for common entry point function names
                for pattern in ['def main(', 'def cli(', 'def run(', 'def app(', 'def start(']:
                    for i, line in enumerate(lines):
                        if pattern in line:
                            func_name = pattern.split('(')[0].replace('def ', '').strip()
                            entry_points.append(EntryPoint(
                                name=func_name,
                                file_path=str(file.path),
                                entry_type="main",
                                line_number=i + 1,
                                language=file.language
                            ))
            
            elif file.language == 'cpp':
                # Look for main function
                import re
                for i, line in enumerate(lines):
                    if re.search(r'int\s+main\s*\(', line):
                        entry_points.append(EntryPoint(
                            name="main",
                            file_path=str(file.path),
                            entry_type="main",
                            line_number=i + 1,
                            language=file.language
                        ))
            
            elif file.language == 'typescript':
                # Look for common entry patterns
                for i, line in enumerate(lines):
                    if 'function main(' in line or 'const main =' in line:
                        entry_points.append(EntryPoint(
                            name="main",
                            file_path=str(file.path),
                            entry_type="main",
                            line_number=i + 1,
                            language=file.language
                        ))
                    if 'export default' in line:
                        entry_points.append(EntryPoint(
                            name="default",
                            file_path=str(file.path),
                            entry_type="main",
                            line_number=i + 1,
                            language=file.language
                        ))
            
            elif file.language == 'fortran':
                # Look for program statement
                import re
                for i, line in enumerate(lines):
                    match = re.search(r'^\s*program\s+(\w+)', line, re.IGNORECASE)
                    if match:
                        entry_points.append(EntryPoint(
                            name=match.group(1),
                            file_path=str(file.path),
                            entry_type="main",
                            line_number=i + 1,
                            language=file.language
                        ))
        
        except Exception:
            pass
        
        return entry_points

    def _detect_workspace_structure(self, project_path: Path, files: List[ProjectFile], build_systems: List[BuildSystem]) -> Dict[str, Any]:
        """Detect workspace/monorepo structure."""
        metadata = {}
        
        # Check for common workspace patterns
        workspace_indicators = [
            'lerna.json', 'nx.json', 'pnpm-workspace.yaml', 'rush.json'
        ]
        
        for file in files:
            if file.path.name in workspace_indicators:
                metadata['workspace'] = True
                metadata['workspace_type'] = file.path.name.split('.')[0]  # lerna, nx, etc.
                break
        
        # Check for package.json with workspaces
        for file in files:
            if file.path.name == 'package.json':
                try:
                    with open(file.path, 'r') as f:
                        content = f.read()
                        if '"workspaces"' in content:
                            metadata['workspace'] = True
                            metadata['workspace_type'] = 'npm_workspaces'
                except Exception:
                    pass
        
        # Check for multiple related projects (simple heuristic)
        if not metadata.get('workspace'):
            # Look for multiple package.json or setup.py files
            package_files = [f for f in files if f.path.name in ['package.json', 'setup.py', 'Cargo.toml', 'go.mod']]
            if len(package_files) > 1:
                metadata['multi_project'] = True
                metadata['project_count'] = len(package_files)
        
        return metadata

    # Compatibility methods for test expectations
    def _find_entry_points(self, project_path: Path, languages: List[str]) -> List[EntryPoint]:
        """Find entry points in the project."""
        entry_points = []
        
        # Scan project files for entry points in specified languages
        for file_path in self._find_project_files(project_path):
            try:
                project_file = self._analyze_file(file_path, project_path)
                if project_file.language in languages:
                    detected_entry_points = self._detect_file_entry_points(project_file)
                    entry_points.extend(detected_entry_points)
            except Exception:
                continue
        
        return entry_points
    
    def _analyze_dependencies(self, project_path: Path, languages: List[str]) -> List[str]:
        """Analyze project dependencies."""
        dependencies = []
        
        # Check for common dependency files
        dependency_files = {
            'python': ['requirements.txt', 'setup.py', 'pyproject.toml'],
            'typescript': ['package.json'],
            'cpp': ['CMakeLists.txt', 'conanfile.txt'],
            'fortran': ['CMakeLists.txt']
        }
        
        for language in languages:
            if language in dependency_files:
                for dep_file in dependency_files[language]:
                    dep_path = project_path / dep_file
                    if dep_path.exists():
                        deps = self._extract_dependencies_from_file(dep_path, language)
                        dependencies.extend(deps)
        
        return dependencies
    
    def _extract_dependencies_from_file(self, file_path: Path, language: str) -> List[str]:
        """Extract dependencies from a specific file."""
        dependencies = []
        
        try:
            with open(file_path, 'r', encoding='utf-8') as f:
                content = f.read()
            
            if language == 'python':
                if file_path.name == 'requirements.txt':
                    # Parse requirements.txt
                    for line in content.split('\n'):
                        line = line.strip()
                        if line and not line.startswith('#'):
                            dep_name = line.split('>=')[0].split('==')[0].split('<')[0].strip()
                            if dep_name:
                                dependencies.append(dep_name)
                
                elif file_path.name == 'setup.py':
                    # Extract from setup.py install_requires
                    import re
                    match = re.search(r'install_requires\s*=\s*\[(.*?)\]', content, re.DOTALL)
                    if match:
                        deps_str = match.group(1)
                        deps = re.findall(r'["\']([^"\']+)["\']', deps_str)
                        dependencies.extend([d.split('>=')[0].split('==')[0].strip() for d in deps])
                
                elif file_path.name == 'pyproject.toml':
                    # Basic TOML parsing for dependencies
                    if 'dependencies' in content:
                        import re
                        deps = re.findall(r'"([^"]+)"', content)
                        dependencies.extend([d.split('>=')[0].split('==')[0].strip() for d in deps if not d.startswith('python')])
            
            elif language == 'typescript' and file_path.name == 'package.json':
                import json
                try:
                    data = json.loads(content)
                    dependencies.extend(data.get('dependencies', {}).keys())
                    dependencies.extend(data.get('devDependencies', {}).keys())
                except json.JSONDecodeError:
                    pass
            
            elif language == 'cpp' and 'cmake' in file_path.name.lower():
                # Extract find_package calls from CMake
                import re
                packages = re.findall(r'find_package\s*\(\s*(\w+)', content, re.IGNORECASE)
                dependencies.extend(packages)
        
        except Exception:
            pass
        
        return dependencies
    
    def _get_file_statistics(self, project_path: Path) -> Dict[str, Any]:
        """Get file statistics."""
        stats = {
            'total_files': 0,
            'python_files': 0,
            'cpp_files': 0,
            'typescript_files': 0,
            'fortran_files': 0,
            'total_lines': 0
        }
        
        try:
            for file_path in self._find_project_files(project_path):
                project_file = self._analyze_file(file_path, project_path)
                stats['total_files'] += 1
                
                # Count by language
                if project_file.language == 'python':
                    stats['python_files'] += 1
                elif project_file.language == 'cpp':
                    stats['cpp_files'] += 1
                elif project_file.language == 'typescript':
                    stats['typescript_files'] += 1
                elif project_file.language == 'fortran':
                    stats['fortran_files'] += 1
                
                # Count lines
                try:
                    with open(file_path, 'r', encoding=project_file.encoding or 'utf-8') as f:
                        lines = len(f.readlines())
                        stats['total_lines'] += lines
                except Exception:
                    pass
        
        except Exception:
            pass
        
        return stats
    
    def _filter_files_by_extension(self, project_path: Path, extensions: List[str]) -> List[Path]:
        """Filter files by extension."""
        filtered_files = []
        
        try:
            for file_path in self._find_project_files(project_path):
                if any(str(file_path).endswith(ext) for ext in extensions):
                    filtered_files.append(file_path)
        except Exception:
            pass
        
        return filtered_files


# API Compatibility Exports - for test compatibility
from enum import Enum

class BuildSystemType(Enum):
    """Enumeration of supported build systems."""
    CMAKE = "cmake"
    NPM = "npm"
    SETUPTOOLS = "setuptools"
    CARGO = "cargo"
    MAKE = "make"
    GRADLE = "gradle"
    MAVEN = "maven"
    UNKNOWN = "unknown"

class ProjectType(Enum):
    """Enumeration of project types."""
    PYTHON = "python"
    CPP = "cpp"
    TYPESCRIPT = "typescript"
    FORTRAN = "fortran"
    RUST = "rust"
    JAVA = "java"
    LIBRARY = "library"
    APPLICATION = "application"
    SERVICE = "service"
    FRAMEWORK = "framework"
    TOOL = "tool"
    MIXED = "mixed"
    UNKNOWN = "unknown"

# Compatibility aliases for tests
ScanConfig = ProjectScanConfig

@dataclass
class ProjectInfo:
    """Compatibility class for test expectations."""
    root_path: str
    project_type: Optional['ProjectType'] = None
    languages: List[str] = field(default_factory=list)
    build_systems: List['BuildSystemType'] = field(default_factory=list)
    name: str = ""
    source_directories: List[str] = field(default_factory=list)
    test_directories: List[str] = field(default_factory=list)
    documentation_directories: List[str] = field(default_factory=list)
    config_directories: List[str] = field(default_factory=list)
    files: List[str] = field(default_factory=list)
    total_size_bytes: int = 0
    file_count_by_language: Dict[str, int] = field(default_factory=dict)
    dependency_graph: Dict[str, List[str]] = field(default_factory=dict)
    statistics: Dict[str, Any] = field(default_factory=dict)
    entry_points: List[EntryPoint] = field(default_factory=list)
    metadata: Dict[str, Any] = field(default_factory=dict)
    dependencies: List[str] = field(default_factory=list)
    
    def __post_init__(self):
        """Post-initialization to set defaults."""
        if not self.name:
            self.name = Path(self.root_path).name
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert to dictionary for serialization."""
        return {
            'root_path': self.root_path,
            'name': self.name,
            'languages': list(self.languages),
            'build_systems': [str(bs.value).upper() if hasattr(bs, 'value') else str(bs).upper() for bs in self.build_systems],
            'source_directories': self.source_directories,
            'test_directories': self.test_directories,
            'documentation_directories': self.documentation_directories,
            'config_directories': self.config_directories,
            'total_size_bytes': self.total_size_bytes,
            'file_count_by_language': self.file_count_by_language,
            'project_type': self.project_type.value.upper() if self.project_type else None,
            'statistics': self.statistics or {},
            'entry_points': [{'name': ep.name, 'file_path': ep.file_path, 'entry_type': ep.entry_type} for ep in self.entry_points],
            'dependencies': []
        }