"""
TypeScript workspace management for npm/yarn workspaces.

Provides comprehensive support for mono-repository structures using npm workspaces
or yarn workspaces, including dependency resolution, cross-package analysis,
and workspace-aware build system integration.
"""

import json
import glob
from typing import Dict, List, Any, Optional, Set, Tuple
from pathlib import Path
from collections import defaultdict
import re

from ...core.error_handler import AnalysisError, log_info, log_debug
from ...core.project_scanner import ProjectScanner
from ...core.function_classifier import FunctionClassifier


class WorkspaceDetector:
    """Detects and analyzes npm/yarn workspace configurations."""
    
    def __init__(self, project_root: str = '.'):
        """Initialize workspace detector."""
        self.project_root = Path(project_root)
        self.workspace_config = {}
        self.packages = {}
        self.workspace_type = None
        self.project_scanner = ProjectScanner()
        self.function_classifier = FunctionClassifier()
    
    def detect_workspace(self) -> Dict[str, Any]:
        """Detect workspace configuration and structure."""
        detection_result = {
            'is_workspace': False,
            'workspace_type': None,
            'root_package': None,
            'packages': [],
            'workspace_config': {},
            'errors': []
        }
        
        try:
            # Check for package.json in root
            root_package_path = self.project_root / 'package.json'
            if not root_package_path.exists():
                detection_result['errors'].append("No package.json found in project root")
                return detection_result
            
            # Load root package.json
            with open(root_package_path, 'r') as f:
                root_package = json.load(f)
            
            detection_result['root_package'] = root_package
            
            # Check for npm workspaces
            if 'workspaces' in root_package:
                detection_result['is_workspace'] = True
                detection_result['workspace_type'] = 'npm'
                detection_result['workspace_config'] = root_package['workspaces']
                detection_result['packages'] = self._discover_npm_packages(root_package['workspaces'])
            
            # Check for yarn workspaces (alternative locations)
            elif 'private' in root_package and root_package.get('private'):
                # Check for yarn.lock
                yarn_lock = self.project_root / 'yarn.lock'
                if yarn_lock.exists():
                    # Look for workspace patterns in package.json or external config
                    packages = self._discover_yarn_packages()
                    if packages:
                        detection_result['is_workspace'] = True
                        detection_result['workspace_type'] = 'yarn'
                        detection_result['packages'] = packages
            
            # Check for lerna.json (monorepo tool)
            lerna_config = self.project_root / 'lerna.json'
            if lerna_config.exists():
                with open(lerna_config, 'r') as f:
                    lerna_data = json.load(f)
                detection_result['lerna_config'] = lerna_data
                if 'packages' in lerna_data and not detection_result['is_workspace']:
                    detection_result['is_workspace'] = True
                    detection_result['workspace_type'] = 'lerna'
                    detection_result['packages'] = self._discover_lerna_packages(lerna_data['packages'])
            
            # Check for rush.json (Microsoft Rush)
            rush_config = self.project_root / 'rush.json'
            if rush_config.exists():
                with open(rush_config, 'r') as f:
                    rush_data = json.load(f)
                detection_result['rush_config'] = rush_data
                if 'projects' in rush_data and not detection_result['is_workspace']:
                    detection_result['is_workspace'] = True
                    detection_result['workspace_type'] = 'rush'
                    detection_result['packages'] = self._discover_rush_packages(rush_data['projects'])
            
            log_info(f"Workspace detection: {detection_result['workspace_type']} with {len(detection_result['packages'])} packages")
            
        except Exception as e:
            detection_result['errors'].append(f"Workspace detection failed: {str(e)}")
            log_debug(f"Workspace detection error: {e}")
        
        return detection_result
    
    def _discover_npm_packages(self, workspaces_config) -> List[Dict[str, Any]]:
        """Discover npm workspace packages."""
        packages = []
        
        # Handle different workspace config formats
        if isinstance(workspaces_config, list):
            patterns = workspaces_config
        elif isinstance(workspaces_config, dict):
            patterns = workspaces_config.get('packages', [])
        else:
            patterns = [str(workspaces_config)]
        
        for pattern in patterns:
            package_paths = self._glob_packages(pattern)
            for pkg_path in package_paths:
                package_info = self._analyze_package(pkg_path)
                if package_info:
                    packages.append(package_info)
        
        return packages
    
    def _discover_yarn_packages(self) -> List[Dict[str, Any]]:
        """Discover yarn workspace packages."""
        packages = []
        
        # Common yarn workspace patterns
        common_patterns = [
            'packages/*',
            'apps/*',
            'libs/*',
            'packages/*/*',
            'workspaces/*'
        ]
        
        for pattern in common_patterns:
            package_paths = self._glob_packages(pattern)
            for pkg_path in package_paths:
                package_info = self._analyze_package(pkg_path)
                if package_info:
                    packages.append(package_info)
        
        return packages
    
    def _discover_lerna_packages(self, patterns: List[str]) -> List[Dict[str, Any]]:
        """Discover lerna packages."""
        packages = []
        
        for pattern in patterns:
            package_paths = self._glob_packages(pattern)
            for pkg_path in package_paths:
                package_info = self._analyze_package(pkg_path)
                if package_info:
                    packages.append(package_info)
        
        return packages
    
    def _discover_rush_packages(self, projects: List[Dict[str, str]]) -> List[Dict[str, Any]]:
        """Discover Rush packages."""
        packages = []
        
        for project in projects:
            project_folder = project.get('projectFolder', '')
            if project_folder:
                pkg_path = self.project_root / project_folder
                if pkg_path.exists():
                    package_info = self._analyze_package(pkg_path)
                    if package_info:
                        package_info['rush_project_name'] = project.get('packageName', '')
                        packages.append(package_info)
        
        return packages
    
    def _glob_packages(self, pattern: str) -> List[Path]:
        """Find package directories using glob pattern."""
        full_pattern = str(self.project_root / pattern)
        matched_paths = glob.glob(full_pattern)
        
        package_paths = []
        for path_str in matched_paths:
            path = Path(path_str)
            if path.is_dir() and (path / 'package.json').exists():
                package_paths.append(path)
        
        return package_paths
    
    def _analyze_package(self, package_path: Path) -> Optional[Dict[str, Any]]:
        """Analyze a single package."""
        try:
            package_json_path = package_path / 'package.json'
            if not package_json_path.exists():
                return None
            
            with open(package_json_path, 'r') as f:
                package_data = json.load(f)
            
            # Analyze package structure
            package_info = {
                'name': package_data.get('name', package_path.name),
                'version': package_data.get('version', '0.0.0'),
                'path': str(package_path),
                'relative_path': str(package_path.relative_to(self.project_root)),
                'package_json': package_data,
                'main_entry': package_data.get('main', 'index.js'),
                'module_entry': package_data.get('module'),
                'types_entry': package_data.get('types') or package_data.get('typings'),
                'scripts': package_data.get('scripts', {}),
                'dependencies': package_data.get('dependencies', {}),
                'dev_dependencies': package_data.get('devDependencies', {}),
                'peer_dependencies': package_data.get('peerDependencies', {}),
                'is_private': package_data.get('private', False),
                'source_files': [],
                'entry_points': [],
                'exports': package_data.get('exports', {})
            }
            
            # Discover source files
            package_info['source_files'] = self._discover_source_files(package_path)
            
            # Analyze entry points
            package_info['entry_points'] = self._analyze_entry_points(package_path, package_data)
            
            # Calculate package metrics
            package_info['metrics'] = self._calculate_package_metrics(package_path, package_info)
            
            return package_info
            
        except Exception as e:
            log_debug(f"Error analyzing package {package_path}: {e}")
            return None
    
    def _discover_source_files(self, package_path: Path) -> List[str]:
        """Discover TypeScript/JavaScript source files in package."""
        source_files = []
        
        # Common source directories
        source_dirs = ['src', 'lib', 'dist', '.']
        extensions = ['*.ts', '*.tsx', '*.js', '*.jsx', '*.d.ts']
        
        for source_dir in source_dirs:
            dir_path = package_path / source_dir
            if dir_path.exists():
                for ext in extensions:
                    pattern = str(dir_path / '**' / ext)
                    files = glob.glob(pattern, recursive=True)
                    source_files.extend([str(Path(f).relative_to(package_path)) for f in files])
        
        # Remove duplicates and sort
        return sorted(list(set(source_files)))
    
    def _analyze_entry_points(self, package_path: Path, package_data: Dict[str, Any]) -> List[Dict[str, Any]]:
        """Analyze package entry points."""
        entry_points = []
        
        # Main entry point
        main_entry = package_data.get('main')
        if main_entry:
            entry_path = package_path / main_entry
            entry_points.append({
                'type': 'main',
                'path': main_entry,
                'absolute_path': str(entry_path),
                'exists': entry_path.exists()
            })
        
        # Module entry point (ES6 modules)
        module_entry = package_data.get('module')
        if module_entry and module_entry != main_entry:
            entry_path = package_path / module_entry
            entry_points.append({
                'type': 'module',
                'path': module_entry,
                'absolute_path': str(entry_path),
                'exists': entry_path.exists()
            })
        
        # Types entry point
        types_entry = package_data.get('types') or package_data.get('typings')
        if types_entry:
            entry_path = package_path / types_entry
            entry_points.append({
                'type': 'types',
                'path': types_entry,
                'absolute_path': str(entry_path),
                'exists': entry_path.exists()
            })
        
        # Exports field (Node.js 12+)
        exports = package_data.get('exports', {})
        if exports:
            entry_points.extend(self._parse_exports_field(package_path, exports))
        
        # Bin entries (CLI commands)
        bin_entries = package_data.get('bin', {})
        if isinstance(bin_entries, str):
            bin_entries = {package_data.get('name', 'cli'): bin_entries}
        
        for bin_name, bin_path in bin_entries.items():
            entry_path = package_path / bin_path
            entry_points.append({
                'type': 'bin',
                'name': bin_name,
                'path': bin_path,
                'absolute_path': str(entry_path),
                'exists': entry_path.exists()
            })
        
        return entry_points
    
    def _parse_exports_field(self, package_path: Path, exports: Any) -> List[Dict[str, Any]]:
        """Parse the exports field from package.json."""
        entry_points = []
        
        def parse_export_value(key: str, value: Any):
            if isinstance(value, str):
                entry_path = package_path / value
                entry_points.append({
                    'type': 'export',
                    'export_key': key,
                    'path': value,
                    'absolute_path': str(entry_path),
                    'exists': entry_path.exists()
                })
            elif isinstance(value, dict):
                for condition, path in value.items():
                    if isinstance(path, str):
                        entry_path = package_path / path
                        entry_points.append({
                            'type': 'export',
                            'export_key': f"{key}.{condition}",
                            'condition': condition,
                            'path': path,
                            'absolute_path': str(entry_path),
                            'exists': entry_path.exists()
                        })
        
        if isinstance(exports, str):
            parse_export_value('.', exports)
        elif isinstance(exports, dict):
            for key, value in exports.items():
                parse_export_value(key, value)
        
        return entry_points
    
    def _calculate_package_metrics(self, package_path: Path, package_info: Dict[str, Any]) -> Dict[str, Any]:
        """Calculate metrics for a package."""
        metrics = {
            'source_file_count': len(package_info['source_files']),
            'dependency_count': len(package_info['dependencies']),
            'dev_dependency_count': len(package_info['dev_dependencies']),
            'peer_dependency_count': len(package_info['peer_dependencies']),
            'script_count': len(package_info['scripts']),
            'entry_point_count': len(package_info['entry_points']),
            'has_types': bool(package_info['types_entry']),
            'has_tests': self._has_test_files(package_path, package_info),
            'has_build_script': 'build' in package_info['scripts'],
            'has_test_script': 'test' in package_info['scripts']
        }
        
        # Calculate total lines of code (approximate)
        try:
            total_lines = 0
            for source_file in package_info['source_files'][:10]:  # Limit to first 10 files for performance
                file_path = package_path / source_file
                if file_path.exists() and file_path.suffix in ['.ts', '.tsx', '.js', '.jsx']:
                    with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
                        total_lines += len(f.readlines())
            
            metrics['estimated_loc'] = total_lines
        except Exception:
            metrics['estimated_loc'] = 0
        
        return metrics
    
    def _has_test_files(self, package_path: Path, package_info: Dict[str, Any]) -> bool:
        """Check if package has test files."""
        test_patterns = [
            '**/*.test.ts', '**/*.test.tsx', '**/*.test.js', '**/*.test.jsx',
            '**/*.spec.ts', '**/*.spec.tsx', '**/*.spec.js', '**/*.spec.jsx',
            '**/test/**/*', '**/tests/**/*', '**/__tests__/**/*'
        ]
        
        for pattern in test_patterns:
            test_files = glob.glob(str(package_path / pattern), recursive=True)
            if test_files:
                return True
        
        return False


class WorkspaceDependencyResolver:
    """Resolves dependencies within workspace context."""
    
    def __init__(self, workspace_info: Dict[str, Any]):
        """Initialize with workspace information."""
        self.workspace_info = workspace_info
        self.packages = {pkg['name']: pkg for pkg in workspace_info.get('packages', [])}
        self.package_by_path = {pkg['path']: pkg for pkg in workspace_info.get('packages', [])}
        self.dependency_graph = defaultdict(set)
        self.reverse_dependency_graph = defaultdict(set)
        self._build_workspace_graph()
    
    def _build_workspace_graph(self):
        """Build dependency graph for workspace packages."""
        for package in self.workspace_info.get('packages', []):
            package_name = package['name']
            dependencies = package.get('dependencies', {})
            
            for dep_name in dependencies.keys():
                if dep_name in self.packages:
                    self.dependency_graph[package_name].add(dep_name)
                    self.reverse_dependency_graph[dep_name].add(package_name)
    
    def resolve_workspace_import(self, import_path: str, from_package: str) -> Dict[str, Any]:
        """Resolve import within workspace context."""
        resolution = {
            'resolved': False,
            'type': 'unknown',
            'target_package': None,
            'resolved_path': None,
            'is_workspace_internal': False
        }
        
        try:
            # Check if import refers to workspace package
            if import_path in self.packages:
                target_package = self.packages[import_path]
                resolution.update({
                    'resolved': True,
                    'type': 'workspace_package',
                    'target_package': target_package['name'],
                    'resolved_path': target_package['path'],
                    'is_workspace_internal': True,
                    'package_info': target_package
                })
                return resolution
            
            # Check for scoped package names
            if import_path.startswith('@'):
                # Handle scoped package imports
                for pkg_name, pkg_info in self.packages.items():
                    if pkg_name == import_path or import_path.startswith(f"{pkg_name}/"):
                        subpath = import_path[len(pkg_name):].lstrip('/')
                        resolved_path = self._resolve_package_subpath(pkg_info, subpath)
                        
                        resolution.update({
                            'resolved': True,
                            'type': 'workspace_package_subpath',
                            'target_package': pkg_name,
                            'resolved_path': resolved_path,
                            'is_workspace_internal': True,
                            'subpath': subpath,
                            'package_info': pkg_info
                        })
                        return resolution
            
            # Check for relative imports that cross package boundaries
            if import_path.startswith('./') or import_path.startswith('../'):
                from_pkg_info = self._get_package_for_name(from_package)
                if from_pkg_info:
                    cross_package_resolution = self._resolve_cross_package_relative(import_path, from_pkg_info)
                    if cross_package_resolution['resolved']:
                        resolution.update(cross_package_resolution)
                        return resolution
            
        except Exception as e:
            resolution['error'] = str(e)
        
        return resolution
    
    def _resolve_package_subpath(self, package_info: Dict[str, Any], subpath: str) -> str:
        """Resolve subpath within a package."""
        package_path = Path(package_info['path'])
        
        if not subpath:
            # Return main entry point
            main_entry = package_info.get('main_entry', 'index.js')
            return str(package_path / main_entry)
        
        # Try to resolve subpath directly
        target_path = package_path / subpath
        if target_path.exists():
            return str(target_path)
        
        # Try with extensions
        extensions = ['.ts', '.tsx', '.js', '.jsx', '.d.ts']
        for ext in extensions:
            ext_path = target_path.with_suffix(ext)
            if ext_path.exists():
                return str(ext_path)
        
        # Try as directory with index
        if target_path.is_dir():
            for ext in extensions:
                index_path = target_path / f"index{ext}"
                if index_path.exists():
                    return str(index_path)
        
        # Fallback to original path
        return str(target_path)
    
    def _resolve_cross_package_relative(self, import_path: str, from_package: Dict[str, Any]) -> Dict[str, Any]:
        """Resolve relative imports that might cross package boundaries."""
        from_path = Path(from_package['path'])
        target_path = from_path / import_path
        target_path = target_path.resolve()
        
        # Check if target is in a different workspace package
        for pkg_info in self.workspace_info.get('packages', []):
            pkg_path = Path(pkg_info['path']).resolve()
            try:
                if target_path.is_relative_to(pkg_path):
                    relative_to_pkg = target_path.relative_to(pkg_path)
                    return {
                        'resolved': True,
                        'type': 'cross_package_relative',
                        'target_package': pkg_info['name'],
                        'resolved_path': str(target_path),
                        'is_workspace_internal': True,
                        'relative_path_in_target': str(relative_to_pkg),
                        'package_info': pkg_info
                    }
            except ValueError:
                # Path is not relative to this package
                continue
        
        return {'resolved': False}
    
    def _get_package_for_name(self, package_name: str) -> Optional[Dict[str, Any]]:
        """Get package info by name."""
        return self.packages.get(package_name)
    
    def get_workspace_dependency_graph(self) -> Dict[str, Any]:
        """Get complete workspace dependency graph."""
        return {
            'dependencies': dict(self.dependency_graph),
            'reverse_dependencies': dict(self.reverse_dependency_graph),
            'packages': list(self.packages.keys()),
            'external_dependencies': self._get_external_dependencies(),
            'cycles': self._detect_cycles(),
            'orphaned_packages': self._find_orphaned_packages()
        }
    
    def _get_external_dependencies(self) -> Dict[str, Set[str]]:
        """Get external (non-workspace) dependencies for each package."""
        external_deps = {}
        
        for package in self.workspace_info.get('packages', []):
            package_name = package['name']
            all_deps = set(package.get('dependencies', {}).keys())
            workspace_deps = self.dependency_graph.get(package_name, set())
            external_deps[package_name] = all_deps - workspace_deps
        
        return {k: list(v) for k, v in external_deps.items()}
    
    def _detect_cycles(self) -> List[List[str]]:
        """Detect circular dependencies in workspace."""
        visited = set()
        rec_stack = set()
        cycles = []
        
        def dfs(node: str, path: List[str]):
            if node in rec_stack:
                # Found cycle
                cycle_start = path.index(node)
                cycles.append(path[cycle_start:] + [node])
                return
            
            if node in visited:
                return
            
            visited.add(node)
            rec_stack.add(node)
            path.append(node)
            
            for neighbor in self.dependency_graph.get(node, set()):
                dfs(neighbor, path.copy())
            
            rec_stack.remove(node)
        
        for package in self.packages.keys():
            if package not in visited:
                dfs(package, [])
        
        return cycles
    
    def _find_orphaned_packages(self) -> List[str]:
        """Find packages with no dependencies to other workspace packages."""
        orphaned = []
        
        for package_name in self.packages.keys():
            if (not self.dependency_graph.get(package_name) and 
                not self.reverse_dependency_graph.get(package_name)):
                orphaned.append(package_name)
        
        return orphaned


class WorkspaceAnalyzer:
    """Comprehensive workspace analysis and insights."""
    
    def __init__(self, workspace_info: Dict[str, Any]):
        """Initialize workspace analyzer."""
        self.workspace_info = workspace_info
        self.dependency_resolver = WorkspaceDependencyResolver(workspace_info)
        self.function_classifier = FunctionClassifier()
    
    def analyze_workspace(self) -> Dict[str, Any]:
        """Perform comprehensive workspace analysis."""
        analysis = {
            'workspace_overview': self._analyze_workspace_overview(),
            'dependency_analysis': self._analyze_dependencies(),
            'package_health': self._analyze_package_health(),
            'build_analysis': self._analyze_build_system(),
            'recommendations': []
        }
        
        # Generate recommendations
        analysis['recommendations'] = self._generate_recommendations(analysis)
        
        return analysis
    
    def _analyze_workspace_overview(self) -> Dict[str, Any]:
        """Analyze high-level workspace characteristics."""
        packages = self.workspace_info.get('packages', [])
        
        overview = {
            'total_packages': len(packages),
            'workspace_type': self.workspace_info.get('workspace_type', 'unknown'),
            'private_packages': sum(1 for p in packages if p.get('is_private', False)),
            'public_packages': sum(1 for p in packages if not p.get('is_private', False)),
            'packages_with_types': sum(1 for p in packages if p.get('metrics', {}).get('has_types', False)),
            'packages_with_tests': sum(1 for p in packages if p.get('metrics', {}).get('has_tests', False)),
            'total_dependencies': sum(len(p.get('dependencies', {})) for p in packages),
            'total_source_files': sum(p.get('metrics', {}).get('source_file_count', 0) for p in packages),
            'estimated_total_loc': sum(p.get('metrics', {}).get('estimated_loc', 0) for p in packages)
        }
        
        # Package size distribution
        package_sizes = [p.get('metrics', {}).get('source_file_count', 0) for p in packages]
        if package_sizes:
            overview['package_size_stats'] = {
                'min': min(package_sizes),
                'max': max(package_sizes),
                'average': sum(package_sizes) / len(package_sizes),
                'median': sorted(package_sizes)[len(package_sizes) // 2]
            }
        
        return overview
    
    def _analyze_dependencies(self) -> Dict[str, Any]:
        """Analyze workspace dependency patterns."""
        dep_graph = self.dependency_resolver.get_workspace_dependency_graph()
        
        analysis = {
            'internal_dependencies': dep_graph['dependencies'],
            'external_dependencies': dep_graph['external_dependencies'],
            'dependency_cycles': dep_graph['cycles'],
            'orphaned_packages': dep_graph['orphaned_packages'],
            'dependency_metrics': self._calculate_dependency_metrics(dep_graph)
        }
        
        return analysis
    
    def _calculate_dependency_metrics(self, dep_graph: Dict[str, Any]) -> Dict[str, Any]:
        """Calculate detailed dependency metrics."""
        packages = dep_graph['packages']
        dependencies = dep_graph['dependencies']
        reverse_deps = dep_graph['reverse_dependencies']
        
        metrics = {
            'total_internal_edges': sum(len(deps) for deps in dependencies.values()),
            'average_dependencies_per_package': 0,
            'max_dependencies': 0,
            'most_depended_upon': None,
            'max_dependents': 0,
            'dependency_fan_out': {},  # How many packages each package depends on
            'dependency_fan_in': {},   # How many packages depend on each package
        }
        
        if packages:
            # Calculate fan-out (dependencies)
            for pkg in packages:
                fan_out = len(dependencies.get(pkg, set()))
                metrics['dependency_fan_out'][pkg] = fan_out
                metrics['max_dependencies'] = max(metrics['max_dependencies'], fan_out)
            
            metrics['average_dependencies_per_package'] = (
                sum(metrics['dependency_fan_out'].values()) / len(packages)
            )
            
            # Calculate fan-in (dependents)
            for pkg in packages:
                fan_in = len(reverse_deps.get(pkg, set()))
                metrics['dependency_fan_in'][pkg] = fan_in
                if fan_in > metrics['max_dependents']:
                    metrics['max_dependents'] = fan_in
                    metrics['most_depended_upon'] = pkg
        
        return metrics
    
    def _analyze_package_health(self) -> Dict[str, Any]:
        """Analyze health metrics for each package."""
        packages = self.workspace_info.get('packages', [])
        health_analysis = {
            'package_scores': {},
            'unhealthy_packages': [],
            'best_practices_compliance': {},
            'overall_health_score': 0
        }
        
        total_score = 0
        for package in packages:
            score = self._calculate_package_health_score(package)
            health_analysis['package_scores'][package['name']] = score
            total_score += score['overall_score']
            
            if score['overall_score'] < 60:  # Threshold for unhealthy
                health_analysis['unhealthy_packages'].append({
                    'name': package['name'],
                    'score': score['overall_score'],
                    'issues': score['issues']
                })
        
        if packages:
            health_analysis['overall_health_score'] = total_score / len(packages)
        
        return health_analysis
    
    def _calculate_package_health_score(self, package: Dict[str, Any]) -> Dict[str, Any]:
        """Calculate health score for individual package."""
        score = {
            'overall_score': 0,
            'category_scores': {},
            'issues': []
        }
        
        metrics = package.get('metrics', {})
        
        # TypeScript adoption (20 points)
        ts_score = 0
        if metrics.get('has_types', False):
            ts_score = 20
        elif any(f.endswith('.ts') or f.endswith('.tsx') for f in package.get('source_files', [])):
            ts_score = 15
        score['category_scores']['typescript'] = ts_score
        
        # Testing (20 points)
        test_score = 0
        if metrics.get('has_tests', False) and metrics.get('has_test_script', False):
            test_score = 20
        elif metrics.get('has_tests', False):
            test_score = 10
        elif metrics.get('has_test_script', False):
            test_score = 5
        score['category_scores']['testing'] = test_score
        
        # Build system (15 points)
        build_score = 0
        if metrics.get('has_build_script', False):
            build_score = 15
        score['category_scores']['build'] = build_score
        
        # Package.json completeness (15 points)
        pkg_json = package.get('package_json', {})
        pkg_score = 0
        if pkg_json.get('description'):
            pkg_score += 3
        if pkg_json.get('keywords'):
            pkg_score += 3
        if pkg_json.get('author') or pkg_json.get('contributors'):
            pkg_score += 3
        if pkg_json.get('license'):
            pkg_score += 3
        if pkg_json.get('repository'):
            pkg_score += 3
        score['category_scores']['package_metadata'] = pkg_score
        
        # Entry points (15 points)
        entry_score = 0
        entry_points = package.get('entry_points', [])
        if any(ep['type'] == 'main' for ep in entry_points):
            entry_score += 7
        if any(ep['type'] == 'types' for ep in entry_points):
            entry_score += 8
        score['category_scores']['entry_points'] = entry_score
        
        # Dependency management (15 points)
        dep_score = 15  # Start with full score
        dep_count = metrics.get('dependency_count', 0)
        if dep_count > 20:  # Too many dependencies
            dep_score -= 5
            score['issues'].append(f"High dependency count: {dep_count}")
        if dep_count == 0 and metrics.get('source_file_count', 0) > 5:
            dep_score -= 3  # Suspicious for larger packages
        score['category_scores']['dependencies'] = dep_score
        
        # Calculate overall score
        score['overall_score'] = sum(score['category_scores'].values())
        
        # Add issues based on scores
        if score['category_scores']['typescript'] < 15:
            score['issues'].append("Missing TypeScript support")
        if score['category_scores']['testing'] < 10:
            score['issues'].append("Insufficient testing setup")
        if score['category_scores']['build'] == 0:
            score['issues'].append("No build script defined")
        
        return score
    
    def _analyze_build_system(self) -> Dict[str, Any]:
        """Analyze build system configuration across workspace."""
        packages = self.workspace_info.get('packages', [])
        
        build_analysis = {
            'build_tools': defaultdict(int),
            'script_patterns': defaultdict(int),
            'typescript_configs': 0,
            'bundler_usage': defaultdict(int),
            'inconsistencies': []
        }
        
        for package in packages:
            scripts = package.get('scripts', {})
            
            # Analyze script patterns
            for script_name, script_command in scripts.items():
                build_analysis['script_patterns'][script_name] += 1
                
                # Detect build tools
                if 'webpack' in script_command:
                    build_analysis['bundler_usage']['webpack'] += 1
                elif 'rollup' in script_command:
                    build_analysis['bundler_usage']['rollup'] += 1
                elif 'vite' in script_command:
                    build_analysis['bundler_usage']['vite'] += 1
                elif 'parcel' in script_command:
                    build_analysis['bundler_usage']['parcel'] += 1
                
                if 'tsc' in script_command:
                    build_analysis['build_tools']['typescript'] += 1
                elif 'babel' in script_command:
                    build_analysis['build_tools']['babel'] += 1
                elif 'esbuild' in script_command:
                    build_analysis['build_tools']['esbuild'] += 1
            
            # Check for TypeScript config
            package_path = Path(package['path'])
            if (package_path / 'tsconfig.json').exists():
                build_analysis['typescript_configs'] += 1
        
        # Detect inconsistencies
        if len(build_analysis['bundler_usage']) > 1:
            build_analysis['inconsistencies'].append(
                f"Multiple bundlers used: {list(build_analysis['bundler_usage'].keys())}"
            )
        
        if len(build_analysis['build_tools']) > 2:
            build_analysis['inconsistencies'].append(
                f"Multiple build tools used: {list(build_analysis['build_tools'].keys())}"
            )
        
        return build_analysis
    
    def _generate_recommendations(self, analysis: Dict[str, Any]) -> List[str]:
        """Generate actionable recommendations for workspace improvement."""
        recommendations = []
        
        overview = analysis['workspace_overview']
        dep_analysis = analysis['dependency_analysis']
        health = analysis['package_health']
        build = analysis['build_analysis']
        
        # TypeScript recommendations
        if overview['packages_with_types'] < overview['total_packages'] * 0.8:
            recommendations.append(
                f"Consider adding TypeScript support to {overview['total_packages'] - overview['packages_with_types']} packages"
            )
        
        # Testing recommendations
        if overview['packages_with_tests'] < overview['total_packages'] * 0.7:
            recommendations.append(
                f"Add tests to {overview['total_packages'] - overview['packages_with_tests']} packages"
            )
        
        # Dependency recommendations
        if dep_analysis['dependency_cycles']:
            recommendations.append(
                f"Resolve {len(dep_analysis['dependency_cycles'])} circular dependencies"
            )
        
        if dep_analysis['orphaned_packages']:
            recommendations.append(
                f"Review {len(dep_analysis['orphaned_packages'])} isolated packages for potential consolidation"
            )
        
        # Health recommendations
        if health['unhealthy_packages']:
            recommendations.append(
                f"Improve health of {len(health['unhealthy_packages'])} low-scoring packages"
            )
        
        # Build system recommendations
        if build['inconsistencies']:
            recommendations.append(
                f"Standardize build tools to reduce {len(build['inconsistencies'])} inconsistencies"
            )
        
        if build['typescript_configs'] < overview['total_packages'] * 0.8:
            recommendations.append(
                "Add TypeScript configuration to remaining packages"
            )
        
        return recommendations