"""
TypeScript/JavaScript module resolver.

Provides module resolution logic for ES6 imports, CommonJS requires,
TypeScript path mapping, and NPM package resolution.
"""

import json
import os
from pathlib import Path
from typing import Dict, List, Any, Optional, Set, Union, Tuple
import re

from ...core.error_handler import AnalysisError, log_debug, log_info


class ModuleResolver:
    """Resolver for TypeScript/JavaScript modules and dependencies."""
    
    def __init__(self, project_root: Optional[str] = None):
        """Initialize module resolver."""
        self.project_root = Path(project_root) if project_root else Path.cwd()
        self.tsconfig = None
        self.package_json = None
        self.node_modules_paths = []
        self.path_mappings = {}
        self.base_url = None
        
        self._load_project_config()
        self._find_node_modules()
    
    def _load_project_config(self):
        """Load TypeScript and package.json configuration."""
        # Load tsconfig.json
        for config_name in ['tsconfig.json', 'jsconfig.json']:
            config_path = self.project_root / config_name
            if config_path.exists():
                try:
                    with open(config_path, 'r') as f:
                        self.tsconfig = json.load(f)
                    log_debug(f"Loaded TypeScript config: {config_path}")
                    break
                except json.JSONDecodeError as e:
                    log_debug(f"Error parsing {config_path}: {e}")
        
        # Extract compiler options
        if self.tsconfig:
            compiler_options = self.tsconfig.get('compilerOptions', {})
            self.base_url = compiler_options.get('baseUrl')
            if self.base_url:
                self.base_url = self.project_root / self.base_url
            
            # Extract path mappings
            paths = compiler_options.get('paths', {})
            for pattern, targets in paths.items():
                self.path_mappings[pattern] = [
                    self.project_root / self.base_url / target if self.base_url else self.project_root / target
                    for target in targets
                ]
        
        # Load package.json
        package_path = self.project_root / 'package.json'
        if package_path.exists():
            try:
                with open(package_path, 'r') as f:
                    self.package_json = json.load(f)
                log_debug(f"Loaded package.json: {package_path}")
            except json.JSONDecodeError as e:
                log_debug(f"Error parsing package.json: {e}")
    
    def _find_node_modules(self):
        """Find all node_modules directories in the hierarchy."""
        current = self.project_root
        
        while current.parent != current:  # Until we reach the root
            node_modules = current / 'node_modules'
            if node_modules.exists() and node_modules.is_dir():
                self.node_modules_paths.append(node_modules)
            current = current.parent
        
        # Add global node_modules if available
        global_paths = [
            Path.home() / '.npm' / 'lib' / 'node_modules',
            Path('/usr/local/lib/node_modules'),
            Path('/usr/lib/node_modules')
        ]
        
        for global_path in global_paths:
            if global_path.exists() and global_path.is_dir():
                self.node_modules_paths.append(global_path)
    
    def resolve_import(self, import_path: str, from_file: str) -> Dict[str, Any]:
        """Resolve an import statement to its actual file path."""
        from_file = Path(from_file).resolve()
        from_dir = from_file.parent
        
        log_debug(f"Resolving import '{import_path}' from {from_file}")
        
        # Handle different import types
        if self._is_relative_import(import_path):
            return self._resolve_relative_import(import_path, from_dir)
        elif self._is_absolute_import(import_path):
            return self._resolve_absolute_import(import_path, from_dir)
        else:
            return self._resolve_node_modules_import(import_path, from_dir)
    
    def _is_relative_import(self, import_path: str) -> bool:
        """Check if import is relative (./ or ../)."""
        return import_path.startswith('./') or import_path.startswith('../')
    
    def _is_absolute_import(self, import_path: str) -> bool:
        """Check if import is absolute (starts with /)."""
        return import_path.startswith('/')
    
    def _resolve_relative_import(self, import_path: str, from_dir: Path) -> Dict[str, Any]:
        """Resolve relative import path."""
        try:
            # Resolve the relative path
            target_path = (from_dir / import_path).resolve()
            
            # Try different file extensions
            resolved_file = self._try_file_extensions(target_path)
            if resolved_file:
                return {
                    'success': True,
                    'resolved_path': str(resolved_file),
                    'import_type': 'relative',
                    'original_path': import_path,
                    'exists': True
                }
            
            # Try as directory with index file
            index_file = self._try_index_files(target_path)
            if index_file:
                return {
                    'success': True,
                    'resolved_path': str(index_file),
                    'import_type': 'relative',
                    'original_path': import_path,
                    'exists': True,
                    'is_index': True
                }
            
            return {
                'success': False,
                'error': f'Could not resolve relative import: {import_path}',
                'import_type': 'relative',
                'original_path': import_path,
                'tried_paths': self._get_tried_paths(target_path)
            }
            
        except Exception as e:
            return {
                'success': False,
                'error': f'Error resolving relative import: {str(e)}',
                'import_type': 'relative',
                'original_path': import_path
            }
    
    def _resolve_absolute_import(self, import_path: str, from_dir: Path) -> Dict[str, Any]:
        """Resolve absolute import path."""
        try:
            # Check if baseUrl is configured
            if self.base_url:
                target_path = self.base_url / import_path.lstrip('/')
            else:
                target_path = self.project_root / import_path.lstrip('/')
            
            resolved_file = self._try_file_extensions(target_path)
            if resolved_file:
                return {
                    'success': True,
                    'resolved_path': str(resolved_file),
                    'import_type': 'absolute',
                    'original_path': import_path,
                    'exists': True
                }
            
            index_file = self._try_index_files(target_path)
            if index_file:
                return {
                    'success': True,
                    'resolved_path': str(index_file),
                    'import_type': 'absolute',
                    'original_path': import_path,
                    'exists': True,
                    'is_index': True
                }
            
            return {
                'success': False,
                'error': f'Could not resolve absolute import: {import_path}',
                'import_type': 'absolute',
                'original_path': import_path
            }
            
        except Exception as e:
            return {
                'success': False,
                'error': f'Error resolving absolute import: {str(e)}',
                'import_type': 'absolute',
                'original_path': import_path
            }
    
    def _resolve_node_modules_import(self, import_path: str, from_dir: Path) -> Dict[str, Any]:
        """Resolve node_modules import (package or scoped package)."""
        try:
            # Check path mappings first
            mapped_path = self._check_path_mappings(import_path)
            if mapped_path:
                return mapped_path
            
            # Parse package name and subpath
            package_name, subpath = self._parse_package_import(import_path)
            
            # Search in node_modules directories
            for node_modules in self.node_modules_paths:
                package_dir = node_modules / package_name
                if package_dir.exists():
                    resolved = self._resolve_package_import(package_dir, subpath, import_path)
                    if resolved['success']:
                        return resolved
            
            return {
                'success': False,
                'error': f'Package not found: {package_name}',
                'import_type': 'node_modules',
                'original_path': import_path,
                'package_name': package_name,
                'subpath': subpath,
                'searched_paths': [str(nm) for nm in self.node_modules_paths]
            }
            
        except Exception as e:
            return {
                'success': False,
                'error': f'Error resolving node_modules import: {str(e)}',
                'import_type': 'node_modules',
                'original_path': import_path
            }
    
    def _check_path_mappings(self, import_path: str) -> Optional[Dict[str, Any]]:
        """Check TypeScript path mappings."""
        for pattern, targets in self.path_mappings.items():
            if self._matches_pattern(import_path, pattern):
                # Replace wildcard in pattern with actual path
                if '*' in pattern:
                    wildcard_match = import_path.replace(pattern.replace('*', ''), '')
                    for target in targets:
                        target_path = Path(str(target).replace('*', wildcard_match))
                        resolved_file = self._try_file_extensions(target_path)
                        if resolved_file:
                            return {
                                'success': True,
                                'resolved_path': str(resolved_file),
                                'import_type': 'path_mapping',
                                'original_path': import_path,
                                'pattern': pattern,
                                'target': str(target)
                            }
                else:
                    # Exact match
                    for target in targets:
                        resolved_file = self._try_file_extensions(target)
                        if resolved_file:
                            return {
                                'success': True,
                                'resolved_path': str(resolved_file),
                                'import_type': 'path_mapping',
                                'original_path': import_path,
                                'pattern': pattern,
                                'target': str(target)
                            }
        
        return None
    
    def _matches_pattern(self, path: str, pattern: str) -> bool:
        """Check if a path matches a pattern with wildcards."""
        if '*' not in pattern:
            return path == pattern
        
        # Convert pattern to regex
        regex_pattern = pattern.replace('*', '(.*)')
        return re.match(regex_pattern, path) is not None
    
    def _parse_package_import(self, import_path: str) -> Tuple[str, Optional[str]]:
        """Parse package import into package name and subpath."""
        if import_path.startswith('@'):
            # Scoped package (@scope/package or @scope/package/subpath)
            parts = import_path.split('/')
            if len(parts) >= 2:
                package_name = '/'.join(parts[:2])  # @scope/package
                subpath = '/'.join(parts[2:]) if len(parts) > 2 else None
            else:
                package_name = import_path
                subpath = None
        else:
            # Regular package (package or package/subpath)
            parts = import_path.split('/')
            package_name = parts[0]
            subpath = '/'.join(parts[1:]) if len(parts) > 1 else None
        
        return package_name, subpath
    
    def _resolve_package_import(self, package_dir: Path, subpath: Optional[str], 
                              original_path: str) -> Dict[str, Any]:
        """Resolve import within a package directory."""
        try:
            # Load package.json
            package_json_path = package_dir / 'package.json'
            package_json = None
            
            if package_json_path.exists():
                try:
                    with open(package_json_path, 'r') as f:
                        package_json = json.load(f)
                except json.JSONDecodeError:
                    pass
            
            if subpath:
                # Import specific file/directory within package
                target_path = package_dir / subpath
                
                resolved_file = self._try_file_extensions(target_path)
                if resolved_file:
                    return {
                        'success': True,
                        'resolved_path': str(resolved_file),
                        'import_type': 'node_modules',
                        'original_path': original_path,
                        'package_dir': str(package_dir),
                        'subpath': subpath
                    }
                
                index_file = self._try_index_files(target_path)
                if index_file:
                    return {
                        'success': True,
                        'resolved_path': str(index_file),
                        'import_type': 'node_modules',
                        'original_path': original_path,
                        'package_dir': str(package_dir),
                        'subpath': subpath,
                        'is_index': True
                    }
            else:
                # Import package root - check package.json main/types fields
                entry_points = []
                
                if package_json:
                    # Check different entry point fields in order of preference
                    for field in ['types', 'typings', 'main', 'module', 'browser']:
                        if field in package_json:
                            entry_points.append(package_json[field])
                
                # Add default entry points
                entry_points.extend(['index.ts', 'index.tsx', 'index.js', 'index.jsx'])
                
                for entry_point in entry_points:
                    if not entry_point:
                        continue
                    
                    entry_path = package_dir / entry_point
                    if entry_path.exists() and entry_path.is_file():
                        return {
                            'success': True,
                            'resolved_path': str(entry_path),
                            'import_type': 'node_modules',
                            'original_path': original_path,
                            'package_dir': str(package_dir),
                            'entry_point': entry_point,
                            'package_json': package_json
                        }
            
            return {
                'success': False,
                'error': f'Could not resolve package import: {original_path}',
                'package_dir': str(package_dir),
                'package_json': package_json
            }
            
        except Exception as e:
            return {
                'success': False,
                'error': f'Error resolving package import: {str(e)}',
                'package_dir': str(package_dir)
            }
    
    def _try_file_extensions(self, base_path: Path) -> Optional[Path]:
        """Try different file extensions for a path."""
        extensions = ['.ts', '.tsx', '.js', '.jsx', '.d.ts', '.json']
        
        # First try without extension
        if base_path.exists() and base_path.is_file():
            return base_path
        
        # Try with extensions
        for ext in extensions:
            file_path = base_path.with_suffix(ext)
            if file_path.exists() and file_path.is_file():
                return file_path
        
        return None
    
    def _try_index_files(self, dir_path: Path) -> Optional[Path]:
        """Try index files in a directory."""
        if not dir_path.exists() or not dir_path.is_dir():
            return None
        
        index_names = ['index.ts', 'index.tsx', 'index.js', 'index.jsx', 'index.d.ts']
        
        for index_name in index_names:
            index_path = dir_path / index_name
            if index_path.exists() and index_path.is_file():
                return index_path
        
        return None
    
    def _get_tried_paths(self, base_path: Path) -> List[str]:
        """Get list of paths that were tried for debugging."""
        tried = [str(base_path)]
        
        extensions = ['.ts', '.tsx', '.js', '.jsx', '.d.ts', '.json']
        for ext in extensions:
            tried.append(str(base_path.with_suffix(ext)))
        
        # Add index file attempts
        if base_path.is_dir() or not base_path.exists():
            index_names = ['index.ts', 'index.tsx', 'index.js', 'index.jsx']
            for index_name in index_names:
                tried.append(str(base_path / index_name))
        
        return tried
    
    def resolve_all_imports(self, file_path: str, imports: List[Dict[str, Any]]) -> Dict[str, Any]:
        """Resolve all imports in a file."""
        resolved_imports = []
        failed_imports = []
        
        for import_info in imports:
            module_specifier = import_info.get('moduleSpecifier', '')
            if not module_specifier:
                continue
            
            resolution = self.resolve_import(module_specifier, file_path)
            
            if resolution['success']:
                resolved_imports.append({
                    **import_info,
                    'resolution': resolution
                })
            else:
                failed_imports.append({
                    **import_info,
                    'resolution': resolution
                })
        
        return {
            'file_path': file_path,
            'total_imports': len(imports),
            'resolved_count': len(resolved_imports),
            'failed_count': len(failed_imports),
            'resolved_imports': resolved_imports,
            'failed_imports': failed_imports
        }
    
    def build_dependency_graph(self, file_paths: List[str]) -> Dict[str, Any]:
        """Build a dependency graph for multiple files."""
        graph = {
            'nodes': {},  # file_path -> node info
            'edges': [],  # [from_file, to_file, import_info]
            'external_dependencies': set(),
            'unresolved_imports': []
        }
        
        for file_path in file_paths:
            graph['nodes'][file_path] = {
                'path': file_path,
                'imports': [],
                'imported_by': [],
                'external_deps': []
            }
        
        # For each file, resolve its imports and build edges
        for file_path in file_paths:
            try:
                # This would need the parsed file data with imports
                # For now, we'll create a placeholder structure
                file_imports = []  # Would come from parsed data
                
                resolution_result = self.resolve_all_imports(file_path, file_imports)
                
                for resolved_import in resolution_result['resolved_imports']:
                    target_path = resolved_import['resolution']['resolved_path']
                    
                    if target_path in graph['nodes']:
                        # Internal dependency
                        graph['edges'].append([file_path, target_path, resolved_import])
                        graph['nodes'][file_path]['imports'].append(target_path)
                        graph['nodes'][target_path]['imported_by'].append(file_path)
                    else:
                        # External dependency
                        module_spec = resolved_import.get('moduleSpecifier', '')
                        graph['external_dependencies'].add(module_spec)
                        graph['nodes'][file_path]['external_deps'].append(module_spec)
                
                for failed_import in resolution_result['failed_imports']:
                    graph['unresolved_imports'].append({
                        'file': file_path,
                        'import': failed_import
                    })
                    
            except Exception as e:
                log_debug(f"Error processing imports for {file_path}: {e}")
        
        # Convert external_dependencies set to list for JSON serialization
        graph['external_dependencies'] = list(graph['external_dependencies'])
        
        return graph
    
    def get_package_info(self, package_name: str) -> Optional[Dict[str, Any]]:
        """Get information about an installed package."""
        package_name, _ = self._parse_package_import(package_name)
        
        for node_modules in self.node_modules_paths:
            package_dir = node_modules / package_name
            package_json_path = package_dir / 'package.json'
            
            if package_json_path.exists():
                try:
                    with open(package_json_path, 'r') as f:
                        package_json = json.load(f)
                    
                    return {
                        'name': package_json.get('name', package_name),
                        'version': package_json.get('version', 'unknown'),
                        'description': package_json.get('description', ''),
                        'main': package_json.get('main', ''),
                        'types': package_json.get('types') or package_json.get('typings'),
                        'dependencies': package_json.get('dependencies', {}),
                        'peerDependencies': package_json.get('peerDependencies', {}),
                        'package_dir': str(package_dir),
                        'package_json_path': str(package_json_path)
                    }
                except (json.JSONDecodeError, IOError):
                    continue
        
        return None
    
    def find_circular_dependencies(self, dependency_graph: Dict[str, Any]) -> List[List[str]]:
        """Find circular dependencies in the dependency graph."""
        def dfs(node: str, path: List[str], visited: Set[str], 
                rec_stack: Set[str]) -> List[List[str]]:
            visited.add(node)
            rec_stack.add(node)
            path.append(node)
            
            cycles = []
            
            for neighbor in dependency_graph['nodes'][node]['imports']:
                if neighbor not in visited:
                    cycles.extend(dfs(neighbor, path[:], visited, rec_stack))
                elif neighbor in rec_stack:
                    # Found a cycle
                    cycle_start = path.index(neighbor)
                    cycle = path[cycle_start:] + [neighbor]
                    cycles.append(cycle)
            
            rec_stack.remove(node)
            return cycles
        
        all_cycles = []
        visited = set()
        
        for node in dependency_graph['nodes']:
            if node not in visited:
                cycles = dfs(node, [], visited, set())
                all_cycles.extend(cycles)
        
        return all_cycles