#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
编译器批量处理器
优化编译器调用，减少进程启动开销，提升AST提取效率
"""

import os
import json
import subprocess
import tempfile
import time
from typing import Dict, List, Any, Optional, Tuple
from dataclasses import dataclass, field
from pathlib import Path
from concurrent.futures import ThreadPoolExecutor, as_completed
from threading import Lock
import shutil

from ..utils_module.logger import ObfuscationLogger
from ..utils_module.language_utils import detect_language_for_file


@dataclass
class CompilerTask:
    """编译器任务"""
    task_id: str
    compiler: str  # swiftc, javac, kotlinc, clang
    files: List[str]
    options: List[str] = field(default_factory=list)
    output_dir: str = None
    metadata: Dict[str, Any] = field(default_factory=dict)


@dataclass
class CompilerResult:
    """编译结果"""
    task_id: str
    success: bool
    output: str
    error: str
    ast_data: Dict[str, Any] = None
    execution_time: float = 0.0
    metadata: Dict[str, Any] = field(default_factory=dict)


class CompilerBatchProcessor:
    """编译器批量处理器"""

    def __init__(self, cache_dir: str = None, logger: ObfuscationLogger = None):
        """
        初始化编译器批量处理器

        Args:
            cache_dir: 缓存目录
            logger: 日志记录器
        """
        self.logger = logger
        self.cache_dir = cache_dir or os.path.join(os.getcwd(), '.compiler_cache')
        os.makedirs(self.cache_dir, exist_ok=True)

        # 编译器配置
        self.compiler_configs = {
            'swift': {
                'command': 'swiftc',
                'ast_flags': ['-dump-ast', '-suppress-warnings'],
                'batch_size': 50,
                'timeout': 30
            },
            'java': {
                'command': 'javac',
                'ast_flags': ['-XprintRounds', '-XprintProcessorInfo'],
                'batch_size': 100,
                'timeout': 20
            },
            'kotlin': {
                'command': 'kotlinc',
                'ast_flags': ['-Xdump-declarations'],
                'batch_size': 50,
                'timeout': 25
            },
            'objc': {
                'command': 'clang',
                'ast_flags': ['-Xclang', '-ast-dump', '-fsyntax-only'],
                'batch_size': 30,
                'timeout': 20
            }
        }

        # 批量队列
        self.task_queue: List[CompilerTask] = []
        self.queue_lock = Lock()

        # 编译器进程池
        self.compiler_pools: Dict[str, subprocess.Popen] = {}
        self.pool_lock = Lock()

        # 统计
        self.stats = {
            'total_tasks': 0,
            'successful_tasks': 0,
            'failed_tasks': 0,
            'total_files': 0,
            'batch_executions': 0,
            'cache_hits': 0,
            'total_time': 0.0,
            'compiler_starts': 0,
            'compiler_reuses': 0
        }

    def process_batch(self, tasks: List[CompilerTask],
                     parallel: bool = True,
                     max_workers: int = 4) -> List[CompilerResult]:
        """
        批量处理编译任务

        Args:
            tasks: 编译任务列表
            parallel: 是否并行处理
            max_workers: 最大工作线程数

        Returns:
            编译结果列表
        """
        if not tasks:
            return []

        start_time = time.time()
        self.stats['total_tasks'] += len(tasks)

        # 按编译器类型分组
        grouped_tasks = self._group_tasks_by_compiler(tasks)

        results = []

        if parallel:
            # 并行处理
            with ThreadPoolExecutor(max_workers=max_workers) as executor:
                futures = []

                for compiler_type, compiler_tasks in grouped_tasks.items():
                    # 创建批次
                    batches = self._create_batches(compiler_type, compiler_tasks)

                    for batch in batches:
                        future = executor.submit(
                            self._execute_batch,
                            compiler_type,
                            batch
                        )
                        futures.append(future)

                # 收集结果
                for future in as_completed(futures):
                    try:
                        batch_results = future.result()
                        results.extend(batch_results)
                    except Exception as e:
                        if self.logger:
                            self.logger.log_error(f"批处理失败: {e}")

        else:
            # 串行处理
            for compiler_type, compiler_tasks in grouped_tasks.items():
                batches = self._create_batches(compiler_type, compiler_tasks)

                for batch in batches:
                    batch_results = self._execute_batch(compiler_type, batch)
                    results.extend(batch_results)

        # 更新统计
        self.stats['total_time'] += time.time() - start_time

        if self.logger:
            self.logger.log_operation(
                "批量编译完成",
                f"处理 {len(tasks)} 个任务，成功 {self.stats['successful_tasks']}，"
                f"失败 {self.stats['failed_tasks']}"
            )

        return results

    def _group_tasks_by_compiler(self, tasks: List[CompilerTask]) -> Dict[str, List[CompilerTask]]:
        """
        按编译器类型分组任务

        Args:
            tasks: 任务列表

        Returns:
            分组后的任务字典
        """
        grouped = {}

        for task in tasks:
            # 从文件扩展名推断编译器类型
            compiler_type = self._detect_compiler_type(task.files[0] if task.files else '')

            if compiler_type not in grouped:
                grouped[compiler_type] = []

            grouped[compiler_type].append(task)

        return grouped

    def _detect_compiler_type(self, file_path: str) -> str:
        """
        检测编译器类型

        Args:
            file_path: 文件路径

        Returns:
            编译器类型
        """
        language = detect_language_for_file(file_path)
        if language in {'swift', 'java', 'kotlin', 'objc'}:
            return language
        return 'unknown'

    def _create_batches(self, compiler_type: str,
                       tasks: List[CompilerTask]) -> List[List[CompilerTask]]:
        """
        创建批次

        Args:
            compiler_type: 编译器类型
            tasks: 任务列表

        Returns:
            批次列表
        """
        config = self.compiler_configs.get(compiler_type, {})
        batch_size = config.get('batch_size', 50)

        batches = []
        current_batch = []
        current_file_count = 0

        for task in tasks:
            file_count = len(task.files)

            # 如果当前批次加上这个任务会超过限制，先保存当前批次
            if current_batch and current_file_count + file_count > batch_size:
                batches.append(current_batch)
                current_batch = []
                current_file_count = 0

            current_batch.append(task)
            current_file_count += file_count

            self.stats['total_files'] += file_count

        # 保存最后一个批次
        if current_batch:
            batches.append(current_batch)

        return batches

    def _execute_batch(self, compiler_type: str,
                      batch: List[CompilerTask]) -> List[CompilerResult]:
        """
        执行批处理

        Args:
            compiler_type: 编译器类型
            batch: 任务批次

        Returns:
            结果列表
        """
        self.stats['batch_executions'] += 1

        config = self.compiler_configs.get(compiler_type, {})
        results = []

        # 收集所有文件
        all_files = []
        for task in batch:
            all_files.extend(task.files)

        # 检查缓存
        cache_key = self._generate_cache_key(compiler_type, all_files)
        cached_result = self._get_cached_result(cache_key)

        if cached_result:
            self.stats['cache_hits'] += 1
            # 分配缓存结果给各个任务
            for task in batch:
                results.append(CompilerResult(
                    task_id=task.task_id,
                    success=True,
                    output=cached_result.get('output', ''),
                    error='',
                    ast_data=cached_result.get('ast_data', {}),
                    execution_time=0.0
                ))
            return results

        # 执行编译
        start_time = time.time()

        try:
            if compiler_type == 'swift':
                batch_results = self._execute_swift_batch(batch, config)
            elif compiler_type == 'java':
                batch_results = self._execute_java_batch(batch, config)
            elif compiler_type == 'kotlin':
                batch_results = self._execute_kotlin_batch(batch, config)
            elif compiler_type == 'objc':
                batch_results = self._execute_objc_batch(batch, config)
            else:
                # 未知编译器，逐个处理
                batch_results = self._execute_generic_batch(batch, config)

            execution_time = time.time() - start_time

            # 更新结果
            for i, task in enumerate(batch):
                if i < len(batch_results):
                    result = batch_results[i]
                    result.task_id = task.task_id
                    result.execution_time = execution_time / len(batch)
                    results.append(result)

                    if result.success:
                        self.stats['successful_tasks'] += 1
                    else:
                        self.stats['failed_tasks'] += 1
                else:
                    # 缺失的结果
                    results.append(CompilerResult(
                        task_id=task.task_id,
                        success=False,
                        output='',
                        error='批处理未返回结果',
                        execution_time=0.0
                    ))
                    self.stats['failed_tasks'] += 1

            # 缓存成功的结果
            if all(r.success for r in results):
                self._cache_result(cache_key, {
                    'output': results[0].output if results else '',
                    'ast_data': self._merge_ast_data([r.ast_data for r in results])
                })

        except Exception as e:
            if self.logger:
                self.logger.log_error(f"批处理执行失败: {e}")

            # 所有任务标记为失败
            for task in batch:
                results.append(CompilerResult(
                    task_id=task.task_id,
                    success=False,
                    output='',
                    error=str(e),
                    execution_time=0.0
                ))
                self.stats['failed_tasks'] += 1

        return results

    def _execute_swift_batch(self, batch: List[CompilerTask],
                           config: Dict[str, Any]) -> List[CompilerResult]:
        """
        执行Swift批处理

        Args:
            batch: 任务批次
            config: 编译器配置

        Returns:
            结果列表
        """
        results = []

        # 创建临时目录
        with tempfile.TemporaryDirectory(prefix='swift_batch_') as temp_dir:
            # 收集所有文件
            all_files = []
            for task in batch:
                all_files.extend(task.files)

            # 创建文件列表
            file_list = os.path.join(temp_dir, 'files.txt')
            with open(file_list, 'w') as f:
                f.write('\n'.join(all_files))

            # 构建命令
            cmd = [config['command']]
            cmd.extend(config['ast_flags'])
            cmd.extend(['-filelist', file_list])

            # 执行命令
            try:
                process = subprocess.run(
                    cmd,
                    capture_output=True,
                    text=True,
                    timeout=config.get('timeout', 30)
                )

                # 解析输出
                ast_data = self._parse_swift_ast(process.stdout)

                # 为每个任务创建结果
                for task in batch:
                    task_ast = {}
                    # 提取该任务相关的AST数据
                    for file in task.files:
                        if file in ast_data:
                            task_ast[file] = ast_data[file]

                    results.append(CompilerResult(
                        task_id=task.task_id,
                        success=process.returncode == 0,
                        output=process.stdout,
                        error=process.stderr,
                        ast_data=task_ast
                    ))

            except subprocess.TimeoutExpired:
                # 超时处理
                for task in batch:
                    results.append(CompilerResult(
                        task_id=task.task_id,
                        success=False,
                        output='',
                        error='编译超时'
                    ))

            except Exception as e:
                # 其他错误
                for task in batch:
                    results.append(CompilerResult(
                        task_id=task.task_id,
                        success=False,
                        output='',
                        error=str(e)
                    ))

        return results

    def _execute_java_batch(self, batch: List[CompilerTask],
                          config: Dict[str, Any]) -> List[CompilerResult]:
        """执行Java批处理"""
        results = []

        with tempfile.TemporaryDirectory(prefix='java_batch_') as temp_dir:
            # Java支持一次编译多个文件
            all_files = []
            for task in batch:
                all_files.extend(task.files)

            # 构建命令
            cmd = [config['command']]
            cmd.extend(['-d', temp_dir])  # 输出目录
            cmd.extend(['-Xlint:none'])   # 禁用警告
            cmd.extend(all_files)

            # 执行编译
            try:
                process = subprocess.run(
                    cmd,
                    capture_output=True,
                    text=True,
                    timeout=config.get('timeout', 20)
                )

                success = process.returncode == 0

                # 为每个任务创建结果
                for task in batch:
                    results.append(CompilerResult(
                        task_id=task.task_id,
                        success=success,
                        output=process.stdout,
                        error=process.stderr,
                        ast_data={}
                    ))

            except Exception as e:
                for task in batch:
                    results.append(CompilerResult(
                        task_id=task.task_id,
                        success=False,
                        output='',
                        error=str(e)
                    ))

        return results

    def _execute_kotlin_batch(self, batch: List[CompilerTask],
                            config: Dict[str, Any]) -> List[CompilerResult]:
        """执行Kotlin批处理"""
        results = []

        with tempfile.TemporaryDirectory(prefix='kotlin_batch_') as temp_dir:
            all_files = []
            for task in batch:
                all_files.extend(task.files)

            # 构建命令
            cmd = [config['command']]
            cmd.extend(['-d', temp_dir])
            cmd.extend(config.get('ast_flags', []))
            cmd.extend(all_files)

            try:
                process = subprocess.run(
                    cmd,
                    capture_output=True,
                    text=True,
                    timeout=config.get('timeout', 25)
                )

                # 解析Kotlin输出
                ast_data = self._parse_kotlin_declarations(process.stdout)

                for task in batch:
                    task_ast = {}
                    for file in task.files:
                        if file in ast_data:
                            task_ast[file] = ast_data[file]

                    results.append(CompilerResult(
                        task_id=task.task_id,
                        success=process.returncode == 0,
                        output=process.stdout,
                        error=process.stderr,
                        ast_data=task_ast
                    ))

            except Exception as e:
                for task in batch:
                    results.append(CompilerResult(
                        task_id=task.task_id,
                        success=False,
                        output='',
                        error=str(e)
                    ))

        return results

    def _execute_objc_batch(self, batch: List[CompilerTask],
                          config: Dict[str, Any]) -> List[CompilerResult]:
        """执行Objective-C批处理"""
        results = []

        # Clang可以一次处理多个文件
        all_files = []
        for task in batch:
            all_files.extend(task.files)

        # 构建命令
        cmd = [config['command']]
        cmd.extend(config['ast_flags'])
        cmd.extend(all_files)

        try:
            process = subprocess.run(
                cmd,
                capture_output=True,
                text=True,
                timeout=config.get('timeout', 20)
            )

            # 解析Clang AST
            ast_data = self._parse_clang_ast(process.stdout)

            for task in batch:
                task_ast = {}
                for file in task.files:
                    if file in ast_data:
                        task_ast[file] = ast_data[file]

                results.append(CompilerResult(
                    task_id=task.task_id,
                    success=process.returncode == 0,
                    output=process.stdout,
                    error=process.stderr,
                    ast_data=task_ast
                ))

        except Exception as e:
            for task in batch:
                results.append(CompilerResult(
                    task_id=task.task_id,
                    success=False,
                    output='',
                    error=str(e)
                ))

        return results

    def _execute_generic_batch(self, batch: List[CompilerTask],
                             config: Dict[str, Any]) -> List[CompilerResult]:
        """执行通用批处理（逐个处理）"""
        results = []

        for task in batch:
            try:
                # 简单执行
                cmd = [task.compiler or 'echo']
                cmd.extend(task.options)
                cmd.extend(task.files)

                process = subprocess.run(
                    cmd,
                    capture_output=True,
                    text=True,
                    timeout=10
                )

                results.append(CompilerResult(
                    task_id=task.task_id,
                    success=process.returncode == 0,
                    output=process.stdout,
                    error=process.stderr
                ))

            except Exception as e:
                results.append(CompilerResult(
                    task_id=task.task_id,
                    success=False,
                    output='',
                    error=str(e)
                ))

        return results

    def _parse_swift_ast(self, output: str) -> Dict[str, Any]:
        """解析Swift AST输出"""
        ast_data = {}

        current_file = None
        lines = output.split('\n')

        for line in lines:
            # 检测文件标记
            if '(source_file' in line:
                # 提取文件名
                parts = line.split('"')
                if len(parts) > 1:
                    current_file = parts[1]
                    ast_data[current_file] = {
                        'classes': [],
                        'functions': [],
                        'properties': []
                    }

            elif current_file:
                # 提取符号
                if 'class_decl' in line:
                    # 类声明
                    name = self._extract_swift_identifier(line)
                    if name:
                        ast_data[current_file]['classes'].append(name)

                elif 'func_decl' in line:
                    # 函数声明
                    name = self._extract_swift_identifier(line)
                    if name:
                        ast_data[current_file]['functions'].append(name)

                elif 'var_decl' in line or 'let_decl' in line:
                    # 变量声明
                    name = self._extract_swift_identifier(line)
                    if name:
                        ast_data[current_file]['properties'].append(name)

        return ast_data

    def _extract_swift_identifier(self, line: str) -> Optional[str]:
        """提取Swift标识符"""
        # 查找标识符模式
        import re
        match = re.search(r'"([^"]+)"', line)
        if match:
            return match.group(1)
        return None

    def _parse_kotlin_declarations(self, output: str) -> Dict[str, Any]:
        """解析Kotlin声明输出"""
        # 简化实现
        return {}

    def _parse_clang_ast(self, output: str) -> Dict[str, Any]:
        """解析Clang AST输出"""
        # 简化实现
        return {}

    def _generate_cache_key(self, compiler_type: str, files: List[str]) -> str:
        """生成缓存键"""
        import hashlib

        # 排序文件以确保一致性
        sorted_files = sorted(files)

        # 计算哈希
        content = f"{compiler_type}:{':'.join(sorted_files)}"
        return hashlib.md5(content.encode()).hexdigest()

    def _get_cached_result(self, cache_key: str) -> Optional[Dict[str, Any]]:
        """获取缓存结果"""
        cache_file = os.path.join(self.cache_dir, f"{cache_key}.json")

        if os.path.exists(cache_file):
            try:
                with open(cache_file, 'r') as f:
                    return json.load(f)
            except:
                pass

        return None

    def _cache_result(self, cache_key: str, result: Dict[str, Any]):
        """缓存结果"""
        cache_file = os.path.join(self.cache_dir, f"{cache_key}.json")

        try:
            with open(cache_file, 'w') as f:
                json.dump(result, f)
        except:
            pass

    def _merge_ast_data(self, ast_data_list: List[Dict[str, Any]]) -> Dict[str, Any]:
        """合并AST数据"""
        merged = {}

        for ast_data in ast_data_list:
            if ast_data:
                merged.update(ast_data)

        return merged

    def get_statistics(self) -> Dict[str, Any]:
        """
        获取统计信息

        Returns:
            统计信息字典
        """
        stats = dict(self.stats)

        # 计算额外指标
        if stats['total_tasks'] > 0:
            stats['success_rate'] = stats['successful_tasks'] / stats['total_tasks']
            stats['cache_hit_rate'] = stats['cache_hits'] / stats['batch_executions'] if stats['batch_executions'] > 0 else 0
            stats['avg_batch_size'] = stats['total_files'] / stats['batch_executions'] if stats['batch_executions'] > 0 else 0
            stats['avg_task_time'] = stats['total_time'] / stats['total_tasks']

        return stats

    def clear_cache(self):
        """清理缓存"""
        if os.path.exists(self.cache_dir):
            shutil.rmtree(self.cache_dir)
            os.makedirs(self.cache_dir, exist_ok=True)

        if self.logger:
            self.logger.log_operation("缓存清理", "编译器缓存已清理")
