"""
TypeScript module system analyzer.

Analyzes TypeScript modules, imports, exports, and module relationships
including ES6 modules, CommonJS, and ambient modules.
"""

from typing import Dict, List, Any, Optional, Set, Tuple
from collections import defaultdict, deque
import re
from pathlib import Path

from ...core.error_handler import AnalysisError, log_info


class TypeScriptModuleAnalyzer:
    """Analyzes TypeScript module system and relationships."""
    
    def __init__(self):
        """Initialize the module analyzer."""
        self.module_cache = {}
        self.resolved_modules = {}
        self.module_types = {}
    
    def analyze_modules(self, file_content: str, file_path: str) -> Dict[str, Any]:
        """Analyze TypeScript modules in a file."""
        try:
            imports = self._extract_imports(file_content)
            exports = self._extract_exports(file_content)
            modules = self._extract_module_declarations(file_content)
            namespaces = self._extract_namespaces(file_content)
            
            return {
                'file_path': file_path,
                'imports': imports,
                'exports': exports,
                'modules': modules,
                'namespaces': namespaces,
                'module_type': self._determine_module_type(file_content, imports, exports),
                'dependencies': self._extract_dependencies(imports),
                're_exports': self._extract_re_exports(file_content)
            }
            
        except Exception as e:
            raise AnalysisError(f"Failed to analyze modules in {file_path}: {str(e)}")
    
    def _extract_imports(self, content: str) -> List[Dict[str, Any]]:
        """Extract import statements from TypeScript code."""
        imports = []
        
        # ES6 import patterns
        import_patterns = [
            # import { named } from 'module'
            r"import\s+\{\s*([^}]+)\s*\}\s+from\s+['\"]([^'\"]+)['\"]",
            # import * as name from 'module'
            r"import\s+\*\s+as\s+(\w+)\s+from\s+['\"]([^'\"]+)['\"]",
            # import default from 'module'
            r"import\s+(\w+)\s+from\s+['\"]([^'\"]+)['\"]",
            # import 'module' (side effect)
            r"import\s+['\"]([^'\"]+)['\"]",
            # import default, { named } from 'module'
            r"import\s+(\w+)\s*,\s*\{\s*([^}]+)\s*\}\s+from\s+['\"]([^'\"]+)['\"]"
        ]
        
        lines = content.split('\n')
        for line_num, line in enumerate(lines, 1):
            line = line.strip()
            if not line.startswith('import'):
                continue
            
            # Named imports
            match = re.search(import_patterns[0], line)
            if match:
                named_imports = [item.strip() for item in match.group(1).split(',')]
                imports.append({
                    'type': 'named',
                    'module': match.group(2),
                    'items': named_imports,
                    'line': line_num,
                    'raw': line
                })
                continue
            
            # Namespace import
            match = re.search(import_patterns[1], line)
            if match:
                imports.append({
                    'type': 'namespace',
                    'module': match.group(2),
                    'alias': match.group(1),
                    'line': line_num,
                    'raw': line
                })
                continue
            
            # Default import
            match = re.search(import_patterns[2], line)
            if match:
                imports.append({
                    'type': 'default',
                    'module': match.group(2),
                    'name': match.group(1),
                    'line': line_num,
                    'raw': line
                })
                continue
            
            # Side effect import
            match = re.search(import_patterns[3], line)
            if match:
                imports.append({
                    'type': 'side_effect',
                    'module': match.group(1),
                    'line': line_num,
                    'raw': line
                })
                continue
            
            # Mixed default + named
            match = re.search(import_patterns[4], line)
            if match:
                named_imports = [item.strip() for item in match.group(2).split(',')]
                imports.append({
                    'type': 'mixed',
                    'module': match.group(3),
                    'default': match.group(1),
                    'items': named_imports,
                    'line': line_num,
                    'raw': line
                })
        
        # CommonJS require statements
        require_pattern = r"(?:const|let|var)\s+(?:\{([^}]+)\}|\w+)\s*=\s*require\s*\(\s*['\"]([^'\"]+)['\"]\s*\)"
        for line_num, line in enumerate(content.split('\n'), 1):
            match = re.search(require_pattern, line.strip())
            if match:
                imports.append({
                    'type': 'require',
                    'module': match.group(2),
                    'line': line_num,
                    'raw': line.strip()
                })
        
        return imports
    
    def _extract_exports(self, content: str) -> List[Dict[str, Any]]:
        """Extract export statements from TypeScript code."""
        exports = []
        
        # Export patterns
        export_patterns = [
            # export { named }
            r"export\s+\{\s*([^}]+)\s*\}",
            # export { named } from 'module'
            r"export\s+\{\s*([^}]+)\s*\}\s+from\s+['\"]([^'\"]+)['\"]",
            # export * from 'module'
            r"export\s+\*\s+from\s+['\"]([^'\"]+)['\"]",
            # export * as name from 'module'
            r"export\s+\*\s+as\s+(\w+)\s+from\s+['\"]([^'\"]+)['\"]",
            # export default
            r"export\s+default\s+(.+)",
            # export function/class/const/let/var
            r"export\s+(function|class|const|let|var|interface|type|enum)\s+(\w+)",
        ]
        
        lines = content.split('\n')
        for line_num, line in enumerate(lines, 1):
            line = line.strip()
            if not line.startswith('export'):
                continue
            
            # Named exports
            match = re.search(export_patterns[0], line)
            if match and 'from' not in line:
                named_exports = [item.strip() for item in match.group(1).split(',')]
                exports.append({
                    'type': 'named',
                    'items': named_exports,
                    'line': line_num,
                    'raw': line
                })
                continue
            
            # Re-exports with named
            match = re.search(export_patterns[1], line)
            if match:
                named_exports = [item.strip() for item in match.group(1).split(',')]
                exports.append({
                    'type': 're_export_named',
                    'items': named_exports,
                    'module': match.group(2),
                    'line': line_num,
                    'raw': line
                })
                continue
            
            # Re-export all
            match = re.search(export_patterns[2], line)
            if match:
                exports.append({
                    'type': 're_export_all',
                    'module': match.group(1),
                    'line': line_num,
                    'raw': line
                })
                continue
            
            # Re-export as namespace
            match = re.search(export_patterns[3], line)
            if match:
                exports.append({
                    'type': 're_export_namespace',
                    'alias': match.group(1),
                    'module': match.group(2),
                    'line': line_num,
                    'raw': line
                })
                continue
            
            # Default export
            match = re.search(export_patterns[4], line)
            if match:
                exports.append({
                    'type': 'default',
                    'value': match.group(1).strip(),
                    'line': line_num,
                    'raw': line
                })
                continue
            
            # Declaration exports
            match = re.search(export_patterns[5], line)
            if match:
                exports.append({
                    'type': 'declaration',
                    'declaration_type': match.group(1),
                    'name': match.group(2),
                    'line': line_num,
                    'raw': line
                })
        
        return exports
    
    def _extract_module_declarations(self, content: str) -> List[Dict[str, Any]]:
        """Extract module declarations (declare module, ambient modules)."""
        modules = []
        
        # Module declaration patterns
        module_patterns = [
            r"declare\s+module\s+['\"]([^'\"]+)['\"]\s*\{",
            r"module\s+([a-zA-Z_][a-zA-Z0-9_.]*)\s*\{",
        ]
        
        lines = content.split('\n')
        for line_num, line in enumerate(lines, 1):
            line = line.strip()
            
            # Ambient module
            match = re.search(module_patterns[0], line)
            if match:
                modules.append({
                    'type': 'ambient',
                    'name': match.group(1),
                    'line': line_num,
                    'raw': line
                })
                continue
            
            # Internal module
            match = re.search(module_patterns[1], line)
            if match and not line.startswith('declare'):
                modules.append({
                    'type': 'internal',
                    'name': match.group(1),
                    'line': line_num,
                    'raw': line
                })
        
        return modules
    
    def _extract_namespaces(self, content: str) -> List[Dict[str, Any]]:
        """Extract namespace declarations."""
        namespaces = []
        
        # Namespace patterns
        namespace_patterns = [
            r"namespace\s+([a-zA-Z_][a-zA-Z0-9_.]*)\s*\{",
            r"declare\s+namespace\s+([a-zA-Z_][a-zA-Z0-9_.]*)\s*\{",
        ]
        
        lines = content.split('\n')
        for line_num, line in enumerate(lines, 1):
            line = line.strip()
            
            for pattern in namespace_patterns:
                match = re.search(pattern, line)
                if match:
                    is_declare = 'declare' in line
                    namespaces.append({
                        'name': match.group(1),
                        'type': 'ambient' if is_declare else 'regular',
                        'line': line_num,
                        'raw': line
                    })
                    break
        
        return namespaces
    
    def _determine_module_type(self, content: str, imports: List[Dict[str, Any]], 
                              exports: List[Dict[str, Any]]) -> str:
        """Determine the module type (ES6, CommonJS, UMD, etc.)."""
        has_es6_imports = any(imp['type'] in ['named', 'default', 'namespace', 'side_effect'] 
                             for imp in imports)
        has_es6_exports = any(exp['type'] in ['named', 'default', 'declaration'] 
                             for exp in exports)
        has_commonjs = 'require(' in content or 'module.exports' in content or 'exports.' in content
        has_amd = 'define(' in content
        
        if has_es6_imports or has_es6_exports:
            if has_commonjs:
                return 'mixed'
            return 'es6'
        elif has_commonjs:
            if has_amd:
                return 'umd'
            return 'commonjs'
        elif has_amd:
            return 'amd'
        else:
            return 'global'
    
    def _extract_dependencies(self, imports: List[Dict[str, Any]]) -> List[str]:
        """Extract unique dependencies from imports."""
        dependencies = set()
        
        for imp in imports:
            module = imp.get('module', '')
            if module:
                dependencies.add(module)
        
        return list(dependencies)
    
    def _extract_re_exports(self, content: str) -> List[Dict[str, Any]]:
        """Extract re-export statements."""
        re_exports = []
        
        # Re-export patterns
        patterns = [
            r"export\s+\{\s*([^}]+)\s*\}\s+from\s+['\"]([^'\"]+)['\"]",
            r"export\s+\*\s+from\s+['\"]([^'\"]+)['\"]",
            r"export\s+\*\s+as\s+(\w+)\s+from\s+['\"]([^'\"]+)['\"]"
        ]
        
        lines = content.split('\n')
        for line_num, line in enumerate(lines, 1):
            line = line.strip()
            
            # Named re-exports
            match = re.search(patterns[0], line)
            if match:
                items = [item.strip() for item in match.group(1).split(',')]
                re_exports.append({
                    'type': 'named',
                    'items': items,
                    'module': match.group(2),
                    'line': line_num
                })
                continue
            
            # All re-exports
            match = re.search(patterns[1], line)
            if match:
                re_exports.append({
                    'type': 'all',
                    'module': match.group(1),
                    'line': line_num
                })
                continue
            
            # Namespace re-exports
            match = re.search(patterns[2], line)
            if match:
                re_exports.append({
                    'type': 'namespace',
                    'alias': match.group(1),
                    'module': match.group(2),
                    'line': line_num
                })
        
        return re_exports


class ModuleResolver:
    """Resolves TypeScript module paths and dependencies."""
    
    def __init__(self, base_path: str = '.'):
        """Initialize the module resolver."""
        self.base_path = Path(base_path)
        self.resolution_cache = {}
        self.tsconfig_paths = {}
    
    def resolve_module(self, module_path: str, from_file: str) -> Dict[str, Any]:
        """Resolve a module path to its actual location."""
        cache_key = f"{from_file}::{module_path}"
        if cache_key in self.resolution_cache:
            return self.resolution_cache[cache_key]
        
        resolution = self._resolve_module_path(module_path, from_file)
        self.resolution_cache[cache_key] = resolution
        return resolution
    
    def _resolve_module_path(self, module_path: str, from_file: str) -> Dict[str, Any]:
        """Internal module path resolution logic."""
        from_dir = Path(from_file).parent
        
        # Relative path resolution
        if module_path.startswith('./') or module_path.startswith('../'):
            return self._resolve_relative_path(module_path, from_dir)
        
        # Absolute path resolution
        if module_path.startswith('/'):
            return self._resolve_absolute_path(module_path)
        
        # Node modules resolution
        return self._resolve_node_modules(module_path, from_dir)
    
    def _resolve_relative_path(self, module_path: str, from_dir: Path) -> Dict[str, Any]:
        """Resolve relative module paths."""
        resolved_path = from_dir / module_path
        
        # Try different extensions
        extensions = ['.ts', '.tsx', '.js', '.jsx', '.d.ts']
        
        for ext in extensions:
            file_path = resolved_path.with_suffix(ext)
            if file_path.exists():
                return {
                    'resolved': True,
                    'type': 'relative',
                    'resolved_path': str(file_path),
                    'exists': True,
                    'extension': ext
                }
        
        # Try index files
        for ext in extensions:
            index_path = resolved_path / f"index{ext}"
            if index_path.exists():
                return {
                    'resolved': True,
                    'type': 'relative',
                    'resolved_path': str(index_path),
                    'exists': True,
                    'extension': ext,
                    'is_index': True
                }
        
        return {
            'resolved': False,
            'type': 'relative',
            'resolved_path': str(resolved_path),
            'exists': False
        }
    
    def _resolve_absolute_path(self, module_path: str) -> Dict[str, Any]:
        """Resolve absolute module paths."""
        path = Path(module_path)
        
        return {
            'resolved': True,
            'type': 'absolute',
            'resolved_path': str(path),
            'exists': path.exists()
        }
    
    def _resolve_node_modules(self, module_path: str, from_dir: Path) -> Dict[str, Any]:
        """Resolve node_modules dependencies."""
        current_dir = from_dir
        
        while current_dir != current_dir.parent:
            node_modules = current_dir / 'node_modules' / module_path
            
            if node_modules.exists():
                # Check for package.json
                package_json = node_modules / 'package.json'
                if package_json.exists():
                    return {
                        'type': 'node_modules',
                        'resolved_path': str(node_modules),
                        'exists': True,
                        'has_package_json': True
                    }
                
                # Check for index files
                for ext in ['.ts', '.js', '.d.ts']:
                    index_file = node_modules / f"index{ext}"
                    if index_file.exists():
                        return {
                            'type': 'node_modules',
                            'resolved_path': str(index_file),
                            'exists': True,
                            'is_index': True
                        }
            
            current_dir = current_dir.parent
        
        return {
            'type': 'node_modules',
            'resolved_path': module_path,
            'exists': False,
            'external': True
        }
    
    def load_tsconfig_paths(self, tsconfig_path: str):
        """Load path mappings from tsconfig.json."""
        try:
            import json
            with open(tsconfig_path, 'r') as f:
                config = json.load(f)
            
            compiler_options = config.get('compilerOptions', {})
            base_url = compiler_options.get('baseUrl', '.')
            paths = compiler_options.get('paths', {})
            
            self.tsconfig_paths = {
                'base_url': base_url,
                'paths': paths
            }
            
        except Exception as e:
            log_info(f"Could not load tsconfig paths: {e}")


class ModuleDependencyGraph:
    """Builds and analyzes module dependency graphs."""
    
    def __init__(self):
        """Initialize the dependency graph builder."""
        self.graph = defaultdict(set)
        self.reverse_graph = defaultdict(set)
        self.modules = set()
    
    def add_dependency(self, from_module: str, to_module: str):
        """Add a dependency relationship."""
        self.graph[from_module].add(to_module)
        self.reverse_graph[to_module].add(from_module)
        self.modules.add(from_module)
        self.modules.add(to_module)
    
    def find_cycles(self) -> List[List[str]]:
        """Find circular dependencies in the module graph."""
        visited = set()
        rec_stack = set()
        cycles = []
        
        def dfs(node: str, path: List[str]):
            if node in rec_stack:
                # Found a cycle
                cycle_start = path.index(node)
                cycles.append(path[cycle_start:] + [node])
                return
            
            if node in visited:
                return
            
            visited.add(node)
            rec_stack.add(node)
            path.append(node)
            
            for neighbor in self.graph[node]:
                dfs(neighbor, path.copy())
            
            rec_stack.remove(node)
        
        for module in self.modules:
            if module not in visited:
                dfs(module, [])
        
        return cycles
    
    def topological_sort(self) -> List[str]:
        """Perform topological sort of modules."""
        # For dependency graphs, we want dependencies first, so use reverse graph
        in_degree = {module: len(self.graph[module]) for module in self.modules}
        
        queue = deque([module for module in self.modules if in_degree[module] == 0])
        result = []
        
        while queue:
            module = queue.popleft()
            result.append(module)
            
            # For each module that depends on the current module
            for dependent in self.reverse_graph[module]:
                in_degree[dependent] -= 1
                if in_degree[dependent] == 0:
                    queue.append(dependent)
        
        return result if len(result) == len(self.modules) else []  # Empty if cycles exist
    
    def get_dependents(self, module: str) -> Set[str]:
        """Get all modules that depend on this module."""
        return self.reverse_graph[module].copy()
    
    def get_dependencies(self, module: str) -> Set[str]:
        """Get all dependencies of this module."""
        return self.graph[module].copy()