"""
TypeScript dependency resolver.

Resolves TypeScript imports, handles module resolution, and manages dependency
graphs for complex TypeScript projects with various module systems.
"""

from typing import Dict, List, Any, Optional, Set, Tuple
from collections import defaultdict, deque
from pathlib import Path
import json
import re

from ...core.error_handler import AnalysisError, log_info


class TypeScriptDependencyResolver:
    """Resolves TypeScript dependencies and module paths."""
    
    def __init__(self, project_root: str = '.'):
        """Initialize the dependency resolver."""
        self.project_root = Path(project_root)
        self.tsconfig = {}
        self.package_json = {}
        self.node_modules_cache = {}
        self.resolution_cache = {}
        self.path_mappings = {}
        self.load_project_config()
    
    def resolve_dependencies(self, file_path: str, imports: List[Dict[str, Any]]) -> Dict[str, Any]:
        """Resolve all dependencies for a given file."""
        try:
            resolved_imports = []
            unresolved_imports = []
            external_dependencies = set()
            
            for import_info in imports:
                module_path = import_info.get('module', '')
                if not module_path:
                    continue
                
                resolution = self.resolve_module(module_path, file_path)
                
                if resolution['resolved']:
                    resolved_imports.append({
                        **import_info,
                        'resolved_path': resolution['resolved_path'],
                        'resolution_type': resolution['type'],
                        'exists': resolution.get('exists', True)
                    })
                    
                    if resolution['type'] == 'external':
                        external_dependencies.add(resolution['package_name'])
                else:
                    unresolved_imports.append({
                        **import_info,
                        'reason': resolution.get('reason', 'Unknown resolution failure')
                    })
            
            return {
                'file_path': file_path,
                'resolved_imports': resolved_imports,
                'unresolved_imports': unresolved_imports,
                'external_dependencies': list(external_dependencies),
                'resolution_summary': {
                    'total_imports': len(imports),
                    'resolved_count': len(resolved_imports),
                    'unresolved_count': len(unresolved_imports),
                    'external_count': len(external_dependencies)
                }
            }
            
        except Exception as e:
            raise AnalysisError(f"Failed to resolve dependencies for {file_path}: {str(e)}")
    
    def resolve_module(self, module_path: str, from_file: str) -> Dict[str, Any]:
        """Resolve a single module import."""
        cache_key = f"{from_file}::{module_path}"
        if cache_key in self.resolution_cache:
            return self.resolution_cache[cache_key]
        
        resolution = self._perform_module_resolution(module_path, from_file)
        self.resolution_cache[cache_key] = resolution
        return resolution
    
    def _perform_module_resolution(self, module_path: str, from_file: str) -> Dict[str, Any]:
        """Perform the actual module resolution logic."""
        from_dir = Path(from_file).parent
        
        # Relative path resolution
        if module_path.startswith('./') or module_path.startswith('../'):
            return self._resolve_relative_module(module_path, from_dir)
        
        # Absolute path resolution
        if module_path.startswith('/'):
            return self._resolve_absolute_module(module_path)
        
        # Path mapping resolution (tsconfig paths)
        if self.path_mappings:
            path_mapped = self._resolve_path_mapping(module_path, from_dir)
            if path_mapped['resolved']:
                return path_mapped
        
        # Node modules resolution
        return self._resolve_node_module(module_path, from_dir)
    
    def _resolve_relative_module(self, module_path: str, from_dir: Path) -> Dict[str, Any]:
        """Resolve relative module paths."""
        target_path = from_dir / module_path
        
        # Try different extensions
        extensions = ['.ts', '.tsx', '.d.ts', '.js', '.jsx', '.json']
        
        # First try with original extension if provided
        if target_path.suffix:
            if target_path.exists():
                return {
                    'resolved': True,
                    'resolved_path': str(target_path.resolve()),
                    'type': 'relative',
                    'exists': True,
                    'extension': target_path.suffix
                }
        
        # Try adding extensions
        for ext in extensions:
            file_path = target_path.with_suffix(ext)
            if file_path.exists():
                return {
                    'resolved': True,
                    'resolved_path': str(file_path.resolve()),
                    'type': 'relative',
                    'exists': True,
                    'extension': ext
                }
        
        # Try index files
        if target_path.is_dir():
            for ext in extensions:
                index_path = target_path / f"index{ext}"
                if index_path.exists():
                    return {
                        'resolved': True,
                        'resolved_path': str(index_path.resolve()),
                        'type': 'relative',
                        'exists': True,
                        'extension': ext,
                        'is_index': True
                    }
        
        return {
            'resolved': False,
            'type': 'relative',
            'reason': f"File not found: {target_path}"
        }
    
    def _resolve_absolute_module(self, module_path: str) -> Dict[str, Any]:
        """Resolve absolute module paths."""
        abs_path = Path(module_path)
        
        if abs_path.exists():
            return {
                'resolved': True,
                'resolved_path': str(abs_path.resolve()),
                'type': 'absolute',
                'exists': True
            }
        
        return {
            'resolved': False,
            'type': 'absolute',
            'reason': f"Absolute path not found: {abs_path}"
        }
    
    def _resolve_path_mapping(self, module_path: str, from_dir: Path) -> Dict[str, Any]:
        """Resolve using TypeScript path mappings."""
        if not self.path_mappings:
            return {'resolved': False}
        
        base_url = self.path_mappings.get('baseUrl', '.')
        paths = self.path_mappings.get('paths', {})
        
        for pattern, targets in paths.items():
            if self._matches_pattern(module_path, pattern):
                for target in targets:
                    # Replace wildcards
                    resolved_target = self._replace_wildcards(target, module_path, pattern)
                    target_path = self.project_root / base_url / resolved_target
                    
                    # Try different extensions
                    extensions = ['.ts', '.tsx', '.d.ts', '.js', '.jsx']
                    
                    for ext in extensions:
                        file_path = target_path.with_suffix(ext)
                        if file_path.exists():
                            return {
                                'resolved': True,
                                'resolved_path': str(file_path.resolve()),
                                'type': 'path_mapping',
                                'exists': True,
                                'extension': ext,
                                'pattern_matched': pattern
                            }
                    
                    # Try as directory with index
                    if target_path.is_dir():
                        for ext in extensions:
                            index_path = target_path / f"index{ext}"
                            if index_path.exists():
                                return {
                                    'resolved': True,
                                    'resolved_path': str(index_path.resolve()),
                                    'type': 'path_mapping',
                                    'exists': True,
                                    'extension': ext,
                                    'is_index': True,
                                    'pattern_matched': pattern
                                }
        
        return {'resolved': False}
    
    def _resolve_node_module(self, module_path: str, from_dir: Path) -> Dict[str, Any]:
        """Resolve node_modules dependencies."""
        # Split package name and subpath
        package_name, subpath = self._parse_package_path(module_path)
        
        # Look for node_modules starting from current directory
        current_dir = from_dir
        while current_dir != current_dir.parent:
            node_modules = current_dir / 'node_modules'
            if node_modules.exists():
                package_dir = node_modules / package_name
                if package_dir.exists():
                    return self._resolve_package(package_dir, subpath, package_name)
            current_dir = current_dir.parent
        
        # Check if it's a built-in Node.js module
        if self._is_builtin_module(package_name):
            return {
                'resolved': True,
                'resolved_path': module_path,
                'type': 'builtin',
                'package_name': package_name,
                'builtin': True
            }
        
        return {
            'resolved': False,
            'type': 'external',
            'reason': f"Package not found in node_modules: {package_name}",
            'package_name': package_name
        }
    
    def _resolve_package(self, package_dir: Path, subpath: Optional[str], package_name: str) -> Dict[str, Any]:
        """Resolve a specific package in node_modules."""
        # Try to read package.json
        package_json_path = package_dir / 'package.json'
        package_json = {}
        
        if package_json_path.exists():
            try:
                with open(package_json_path, 'r') as f:
                    package_json = json.load(f)
            except Exception:
                pass
        
        # If no subpath, resolve main entry point
        if not subpath:
            return self._resolve_package_main(package_dir, package_json, package_name)
        
        # Resolve subpath
        target_path = package_dir / subpath
        
        # Try different extensions
        extensions = ['.ts', '.tsx', '.d.ts', '.js', '.jsx', '.json']
        
        for ext in extensions:
            file_path = target_path.with_suffix(ext)
            if file_path.exists():
                return {
                    'resolved': True,
                    'resolved_path': str(file_path.resolve()),
                    'type': 'external',
                    'exists': True,
                    'extension': ext,
                    'package_name': package_name,
                    'subpath': subpath
                }
        
        # Try as directory
        if target_path.is_dir():
            for ext in extensions:
                index_path = target_path / f"index{ext}"
                if index_path.exists():
                    return {
                        'resolved': True,
                        'resolved_path': str(index_path.resolve()),
                        'type': 'external',
                        'exists': True,
                        'extension': ext,
                        'package_name': package_name,
                        'subpath': subpath,
                        'is_index': True
                    }
        
        return {
            'resolved': False,
            'type': 'external',
            'reason': f"Subpath not found in package {package_name}: {subpath}",
            'package_name': package_name
        }
    
    def _resolve_package_main(self, package_dir: Path, package_json: Dict[str, Any], package_name: str) -> Dict[str, Any]:
        """Resolve the main entry point of a package."""
        # Check for TypeScript-specific entry points
        main_fields = ['types', 'typings', 'module', 'main']
        
        for field in main_fields:
            if field in package_json:
                main_path = package_dir / package_json[field]
                if main_path.exists():
                    return {
                        'resolved': True,
                        'resolved_path': str(main_path.resolve()),
                        'type': 'external',
                        'exists': True,
                        'package_name': package_name,
                        'entry_field': field
                    }
        
        # Try common entry points
        common_entries = ['index.ts', 'index.tsx', 'index.d.ts', 'index.js', 'index.json']
        
        for entry in common_entries:
            entry_path = package_dir / entry
            if entry_path.exists():
                return {
                    'resolved': True,
                    'resolved_path': str(entry_path.resolve()),
                    'type': 'external',
                    'exists': True,
                    'package_name': package_name,
                    'is_index': True,
                    'extension': Path(entry).suffix
                }
        
        # Package exists but no valid entry point found
        return {
            'resolved': False,
            'type': 'external',
            'reason': f"No valid entry point found for package: {package_name}",
            'package_name': package_name
        }
    
    def _matches_pattern(self, module_path: str, pattern: str) -> bool:
        """Check if module path matches a path mapping pattern."""
        # Convert pattern to regex
        regex_pattern = pattern.replace('*', '([^/]*)')
        regex_pattern = f"^{regex_pattern}$"
        
        return bool(re.match(regex_pattern, module_path))
    
    def _replace_wildcards(self, target: str, module_path: str, pattern: str) -> str:
        """Replace wildcards in target path with matched parts."""
        # Simple wildcard replacement
        if '*' in pattern and '*' in target:
            pattern_regex = pattern.replace('*', '([^/]*)')
            match = re.match(f"^{pattern_regex}$", module_path)
            if match:
                result = target
                for i, group in enumerate(match.groups()):
                    result = result.replace('*', group, 1)
                return result
        
        return target
    
    def _parse_package_path(self, module_path: str) -> Tuple[str, Optional[str]]:
        """Parse package name and subpath from module path."""
        parts = module_path.split('/')
        
        if module_path.startswith('@'):
            # Scoped package
            if len(parts) < 2:
                return module_path, None
            package_name = '/'.join(parts[:2])
            subpath = '/'.join(parts[2:]) if len(parts) > 2 else None
        else:
            # Regular package
            package_name = parts[0]
            subpath = '/'.join(parts[1:]) if len(parts) > 1 else None
        
        return package_name, subpath
    
    def _is_builtin_module(self, module_name: str) -> bool:
        """Check if module is a built-in Node.js module."""
        builtin_modules = {
            'assert', 'buffer', 'child_process', 'cluster', 'crypto',
            'dgram', 'dns', 'events', 'fs', 'http', 'https', 'net',
            'os', 'path', 'querystring', 'readline', 'stream',
            'string_decoder', 'tls', 'tty', 'url', 'util', 'v8',
            'vm', 'zlib', 'process'
        }
        
        return module_name in builtin_modules
    
    def load_project_config(self):
        """Load project configuration from tsconfig.json and package.json."""
        # Load tsconfig.json
        tsconfig_path = self.project_root / 'tsconfig.json'
        if tsconfig_path.exists():
            try:
                with open(tsconfig_path, 'r') as f:
                    self.tsconfig = json.load(f)
                
                # Extract path mappings
                compiler_options = self.tsconfig.get('compilerOptions', {})
                if 'paths' in compiler_options:
                    self.path_mappings = {
                        'baseUrl': compiler_options.get('baseUrl', '.'),
                        'paths': compiler_options['paths']
                    }
            except Exception as e:
                log_info(f"Could not load tsconfig.json: {e}")
        
        # Load package.json
        package_json_path = self.project_root / 'package.json'
        if package_json_path.exists():
            try:
                with open(package_json_path, 'r') as f:
                    self.package_json = json.load(f)
            except Exception as e:
                log_info(f"Could not load package.json: {e}")
    
    def get_dependency_graph(self, entry_files: List[str]) -> Dict[str, Any]:
        """Build a complete dependency graph starting from entry files."""
        graph = defaultdict(set)
        visited = set()
        to_process = deque(entry_files)
        file_info = {}
        
        while to_process:
            current_file = to_process.popleft()
            if current_file in visited:
                continue
            
            visited.add(current_file)
            
            try:
                # This would typically get imports from the parsed file
                # For now, we'll use a placeholder
                imports = []  # Would be populated from actual file analysis
                
                resolution = self.resolve_dependencies(current_file, imports)
                file_info[current_file] = resolution
                
                for resolved_import in resolution['resolved_imports']:
                    dep_path = resolved_import['resolved_path']
                    graph[current_file].add(dep_path)
                    
                    # Only process TypeScript/JavaScript files
                    if self._is_processable_file(dep_path):
                        to_process.append(dep_path)
                        
            except Exception as e:
                log_info(f"Error processing {current_file}: {e}")
        
        return {
            'graph': dict(graph),
            'file_info': file_info,
            'entry_files': entry_files,
            'total_files': len(visited),
            'cycles': self._detect_cycles(graph),
            'external_dependencies': self._collect_external_dependencies(file_info)
        }
    
    def _is_processable_file(self, file_path: str) -> bool:
        """Check if file should be processed for dependencies."""
        processable_extensions = {'.ts', '.tsx', '.js', '.jsx'}
        return Path(file_path).suffix in processable_extensions
    
    def _detect_cycles(self, graph: Dict[str, Set[str]]) -> List[List[str]]:
        """Detect circular dependencies in the dependency graph."""
        cycles = []
        visited = set()
        recursion_stack = set()
        
        def dfs(node: str, path: List[str]):
            if node in recursion_stack:
                # Found a cycle
                cycle_start = path.index(node)
                cycles.append(path[cycle_start:] + [node])
                return
            
            if node in visited:
                return
            
            visited.add(node)
            recursion_stack.add(node)
            path.append(node)
            
            for neighbor in graph.get(node, set()):
                if neighbor in graph:  # Only process internal dependencies
                    dfs(neighbor, path.copy())
            
            recursion_stack.remove(node)
        
        for node in graph:
            if node not in visited:
                dfs(node, [])
        
        return cycles
    
    def _collect_external_dependencies(self, file_info: Dict[str, Dict[str, Any]]) -> Dict[str, Any]:
        """Collect all external dependencies from file info."""
        dependencies = defaultdict(set)
        
        for file_data in file_info.values():
            for dep in file_data.get('external_dependencies', []):
                dependencies[dep].add(file_data['file_path'])
        
        return {
            'packages': list(dependencies.keys()),
            'usage': {pkg: list(files) for pkg, files in dependencies.items()},
            'total_external': len(dependencies)
        }


class DependencyAnalyzer:
    """Analyzes dependency patterns and provides insights."""
    
    def __init__(self):
        """Initialize the dependency analyzer."""
        self.analysis_cache = {}
    
    def analyze_dependency_patterns(self, dependency_graph: Dict[str, Any]) -> Dict[str, Any]:
        """Analyze patterns in the dependency graph."""
        graph = dependency_graph['graph']
        file_info = dependency_graph['file_info']
        
        analysis = {
            'complexity_metrics': self._calculate_complexity_metrics(graph),
            'hotspots': self._identify_hotspots(graph, file_info),
            'unused_files': self._find_unused_files(graph, dependency_graph['entry_files']),
            'dependency_depth': self._calculate_dependency_depth(graph, dependency_graph['entry_files']),
            'coupling_metrics': self._calculate_coupling_metrics(graph),
            'recommendations': []
        }
        
        # Generate recommendations based on analysis
        analysis['recommendations'] = self._generate_recommendations(analysis, dependency_graph)
        
        return analysis
    
    def _calculate_complexity_metrics(self, graph: Dict[str, Set[str]]) -> Dict[str, Any]:
        """Calculate complexity metrics for the dependency graph."""
        total_files = len(graph)
        total_dependencies = sum(len(deps) for deps in graph.values())
        
        # Calculate various metrics
        max_dependencies = max(len(deps) for deps in graph.values()) if graph else 0
        avg_dependencies = total_dependencies / total_files if total_files > 0 else 0
        
        # Files with no dependencies
        leaf_files = sum(1 for deps in graph.values() if len(deps) == 0)
        
        # Files that are heavily depended upon
        dependents = defaultdict(int)
        for file_deps in graph.values():
            for dep in file_deps:
                dependents[dep] += 1
        
        most_depended = max(dependents.values()) if dependents else 0
        
        return {
            'total_files': total_files,
            'total_dependencies': total_dependencies,
            'average_dependencies_per_file': round(avg_dependencies, 2),
            'max_dependencies_per_file': max_dependencies,
            'leaf_files': leaf_files,
            'most_depended_upon_count': most_depended,
            'dependency_density': total_dependencies / (total_files * total_files) if total_files > 0 else 0
        }
    
    def _identify_hotspots(self, graph: Dict[str, Set[str]], file_info: Dict[str, Dict[str, Any]]) -> List[Dict[str, Any]]:
        """Identify problematic files (hotspots) in the dependency graph."""
        hotspots = []
        
        # Count how many files depend on each file
        dependents = defaultdict(int)
        for file_deps in graph.values():
            for dep in file_deps:
                dependents[dep] += 1
        
        # Find files with many dependents
        for file_path, dependent_count in dependents.items():
            if dependent_count > 5:  # Threshold for hotspot
                hotspots.append({
                    'file': file_path,
                    'dependent_count': dependent_count,
                    'type': 'high_coupling',
                    'description': f'File is used by {dependent_count} other files'
                })
        
        # Find files with many dependencies
        for file_path, deps in graph.items():
            if len(deps) > 10:  # Threshold for complex file
                hotspots.append({
                    'file': file_path,
                    'dependency_count': len(deps),
                    'type': 'high_complexity',
                    'description': f'File depends on {len(deps)} other files'
                })
        
        return sorted(hotspots, key=lambda x: x.get('dependent_count', 0) + x.get('dependency_count', 0), reverse=True)
    
    def _find_unused_files(self, graph: Dict[str, Set[str]], entry_files: List[str]) -> List[str]:
        """Find files that are not reachable from entry files."""
        reachable = set()
        to_visit = deque(entry_files)
        
        while to_visit:
            current = to_visit.popleft()
            if current in reachable:
                continue
            
            reachable.add(current)
            for dep in graph.get(current, set()):
                to_visit.append(dep)
        
        all_files = set(graph.keys())
        unused = all_files - reachable
        
        return list(unused)
    
    def _calculate_dependency_depth(self, graph: Dict[str, Set[str]], entry_files: List[str]) -> Dict[str, Any]:
        """Calculate the depth of dependencies from entry files."""
        depths = {}
        
        for entry_file in entry_files:
            visited = set()
            to_visit = deque([(entry_file, 0)])
            
            while to_visit:
                current, depth = to_visit.popleft()
                
                if current in visited:
                    continue
                
                visited.add(current)
                depths[current] = min(depths.get(current, float('inf')), depth)
                
                for dep in graph.get(current, set()):
                    to_visit.append((dep, depth + 1))
        
        return {
            'file_depths': depths,
            'max_depth': max(depths.values()) if depths else 0,
            'average_depth': sum(depths.values()) / len(depths) if depths else 0
        }
    
    def _calculate_coupling_metrics(self, graph: Dict[str, Set[str]]) -> Dict[str, Any]:
        """Calculate coupling metrics between files."""
        # Efferent coupling (Ce) - how many other files this file depends on
        efferent = {file: len(deps) for file, deps in graph.items()}
        
        # Afferent coupling (Ca) - how many other files depend on this file  
        afferent = defaultdict(int)
        for file_deps in graph.values():
            for dep in file_deps:
                afferent[dep] += 1
        
        # Instability (I) = Ce / (Ce + Ca)
        instability = {}
        for file in graph.keys():
            ce = efferent.get(file, 0)
            ca = afferent.get(file, 0)
            instability[file] = ce / (ce + ca) if (ce + ca) > 0 else 0
        
        return {
            'efferent_coupling': dict(efferent),
            'afferent_coupling': dict(afferent),
            'instability': instability,
            'most_stable_files': sorted(instability.items(), key=lambda x: x[1])[:5],
            'most_unstable_files': sorted(instability.items(), key=lambda x: x[1], reverse=True)[:5]
        }
    
    def _generate_recommendations(self, analysis: Dict[str, Any], dependency_graph: Dict[str, Any]) -> List[str]:
        """Generate recommendations based on dependency analysis."""
        recommendations = []
        
        # High coupling recommendations
        hotspots = analysis.get('hotspots', [])
        high_coupling_files = [h for h in hotspots if h['type'] == 'high_coupling']
        if high_coupling_files:
            recommendations.append(
                f"Consider refactoring {len(high_coupling_files)} files with high coupling to reduce dependencies"
            )
        
        # Circular dependency recommendations
        cycles = dependency_graph.get('cycles', [])
        if cycles:
            recommendations.append(
                f"Resolve {len(cycles)} circular dependencies to improve maintainability"
            )
        
        # Unused files recommendations
        unused = analysis.get('unused_files', [])
        if unused:
            recommendations.append(
                f"Consider removing {len(unused)} unused files to reduce codebase size"
            )
        
        # Deep dependency chains
        max_depth = analysis.get('dependency_depth', {}).get('max_depth', 0)
        if max_depth > 6:
            recommendations.append(
                "Consider flattening deep dependency chains to improve build times"
            )
        
        return recommendations