"""
备份命令实现

实现backup命令的具体功能。
"""

import click
from pathlib import Path
from typing import Dict, Any, Optional
from datetime import datetime

from .base import BaseCommand
from ...core.backup import BackupManager, BackupStrategy
from ...utils.exceptions import ESArchiveError


class BackupCommand(BaseCommand):
    """备份命令实现"""
    
    def __init__(self, config, debug_mode: bool = False):
        """初始化备份命令"""
        super().__init__(config, debug_mode=debug_mode)
        self.backup_manager = BackupManager(self.es_client, config.config.backup.model_dump())
    
    def execute(self, index: str, output_dir: str, params: Dict[str, Any]):
        """执行备份命令
        
        Args:
            index: 索引名称
            output_dir: 输出目录
            params: 备份参数
            
        Returns:
            备份元数据
        """
        try:
            # 验证参数
            validated_params = self.validate_parameters(params)
            
            # 检查索引是否存在
            if not self.validate_index_exists(index):
                raise ESArchiveError(f"索引不存在: {index}")
            
            # 确保输出目录存在
            output_path = Path(output_dir)
            output_path.mkdir(parents=True, exist_ok=True)
            
            # 显示备份信息
            self._show_backup_info(index, output_dir, validated_params)
            
            # 创建备份策略
            strategy_params = {
                'strategy_type': validated_params.get('strategy_type', 'full'),
                'time_field': validated_params.get('time_field'),
                'time_range_start': validated_params.get('time_range_start'),
                'time_range_end': validated_params.get('time_range_end'),
                'compression': validated_params.get('compression', 'gzip'),
                'batch_size': validated_params.get('batch_size', 1000),
                'parallel_workers': validated_params.get('parallel_workers', 4),
                'validate_data': validated_params.get('validate_data', True)
            }
            strategy = BackupStrategy(**strategy_params)
            
            # 执行备份
            self.echo_info(f"开始备份索引: {index}")
            
            with click.progressbar(
                length=100,
                label="备份进度",
                show_percent=True,
                show_eta=True
            ) as bar:
                def progress_callback(progress_info):
                    if 'progress' in progress_info:
                        new_pos = int(progress_info['progress'] * 100)
                        bar.update(new_pos - bar.pos)

                        # 显示详细信息
                        if 'documents_processed' in progress_info and 'total_documents' in progress_info:
                            bar.label = f"备份进度 ({progress_info['documents_processed']:,}/{progress_info['total_documents']:,})"

                # 执行备份
                metadata = self.backup_manager.backup_index(
                    index=index,
                    output_dir=output_dir,
                    strategy=strategy,
                    batch_size=validated_params.get('batch_size', 1000),
                    backup_name=validated_params.get('backup_name'),
                    progress_callback=progress_callback
                )

                # 完成进度条
                bar.update(100 - bar.pos)
            
            self.echo_success("备份完成")
            self._show_backup_result(metadata)

            # 检查是否需要删除已备份的数据
            if validated_params.get('delete_after_backup', False):
                # 使用备份元数据中的优化策略，它包含了自动检测的时间字段
                self._delete_backed_up_data(index, metadata.strategy, metadata)

            return metadata
            
        except KeyboardInterrupt:
            self.handle_keyboard_interrupt()
        except Exception as e:
            self.logger.exception("备份执行失败")
            raise ESArchiveError(f"备份失败: {e}")
    
    def validate_parameters(self, params: Dict[str, Any]) -> Dict[str, Any]:
        """验证备份参数
        
        Args:
            params: 原始参数
            
        Returns:
            验证后的参数
        """
        validated = params.copy()
        
        # 验证策略类型
        strategy_type = validated.get('strategy_type', 'full')
        if strategy_type not in ['full', 'incremental', 'time_range']:
            raise ESArchiveError(f"无效的备份策略: {strategy_type}")
        
        # 验证批量大小
        batch_size = validated.get('batch_size', 1000)
        if not isinstance(batch_size, int) or batch_size <= 0:
            raise ESArchiveError("批量大小必须是正整数")
        if batch_size > 10000:
            self.echo_warning("批量大小过大，可能影响性能")
        
        # 验证并行工作线程数
        parallel_workers = validated.get('parallel_workers', 4)
        if not isinstance(parallel_workers, int) or parallel_workers <= 0:
            raise ESArchiveError("并行工作线程数必须是正整数")
        if parallel_workers > 16:
            self.echo_warning("并行工作线程数过多，可能影响ES集群性能")
        
        # 验证压缩算法
        compression = validated.get('compression', 'gzip')
        if compression not in ['gzip', 'lz4', 'none']:
            raise ESArchiveError(f"不支持的压缩算法: {compression}")
        
        # 验证时间范围（按北京时区处理）
        if strategy_type in ['incremental', 'time_range']:
            start_time = validated.get('time_range_start')
            end_time = validated.get('time_range_end')

            if start_time and end_time:
                # 北京时区
                from datetime import timezone, timedelta
                beijing_tz = timezone(timedelta(hours=8))

                if isinstance(start_time, str):
                    try:
                        # 解析时间并设置为北京时区
                        parsed_start = datetime.fromisoformat(start_time)
                        if parsed_start.tzinfo is None:
                            # 如果没有时区信息，假设为北京时区
                            parsed_start = parsed_start.replace(tzinfo=beijing_tz)
                        validated['time_range_start'] = parsed_start
                    except ValueError:
                        raise ESArchiveError(f"无效的开始时间格式: {start_time}")

                if isinstance(end_time, str):
                    try:
                        # 解析时间并设置为北京时区
                        parsed_end = datetime.fromisoformat(end_time)
                        if parsed_end.tzinfo is None:
                            # 如果没有时区信息，假设为北京时区
                            parsed_end = parsed_end.replace(tzinfo=beijing_tz)
                        validated['time_range_end'] = parsed_end
                    except ValueError:
                        raise ESArchiveError(f"无效的结束时间格式: {end_time}")

                if validated['time_range_start'] > validated['time_range_end']:
                    raise ESArchiveError("开始时间不能晚于结束时间")
        
        return validated
    
    def _show_backup_info(self, index: str, output_dir: str, params: Dict[str, Any]):
        """显示备份信息
        
        Args:
            index: 索引名称
            output_dir: 输出目录
            params: 备份参数
        """
        click.echo("\n备份配置:")
        click.echo(f"  索引名称: {index}")
        click.echo(f"  输出目录: {output_dir}")
        click.echo(f"  备份策略: {params.get('strategy_type', 'full')}")
        click.echo(f"  批量大小: {params.get('batch_size', 1000)}")
        click.echo(f"  压缩算法: {params.get('compression', 'gzip')}")
        click.echo(f"  并行线程: {params.get('parallel_workers', 4)}")
        click.echo(f"  数据验证: {'是' if params.get('validate_data', True) else '否'}")
        
        if params.get('time_field'):
            click.echo(f"  时间字段: {params['time_field']}")
        
        if params.get('time_range_start'):
            click.echo(f"  开始时间: {params['time_range_start']}")
        
        if params.get('time_range_end'):
            click.echo(f"  结束时间: {params['time_range_end']}")
        
        click.echo()
    
    def _show_backup_result(self, metadata):
        """显示备份结果

        Args:
            metadata: 备份元数据
        """
        from datetime import timezone, timedelta

        # 北京时区
        beijing_tz = timezone(timedelta(hours=8))

        # 转换时间到北京时区
        start_time_beijing = metadata.start_time.astimezone(beijing_tz) if metadata.start_time else None
        end_time_beijing = metadata.end_time.astimezone(beijing_tz) if metadata.end_time else None

        click.echo("\n备份结果:")
        click.echo(f"  备份ID: {metadata.backup_id}")
        click.echo(f"  备份路径: {metadata.backup_path}")

        if start_time_beijing:
            click.echo(f"  开始时间: {start_time_beijing.strftime('%Y-%m-%d %H:%M:%S')} (北京时间)")

        if end_time_beijing:
            click.echo(f"  结束时间: {end_time_beijing.strftime('%Y-%m-%d %H:%M:%S')} (北京时间)")

        if metadata.end_time and metadata.start_time:
            duration = (metadata.end_time - metadata.start_time).total_seconds()
            click.echo(f"  持续时间: {self.format_duration(duration)}")

        click.echo(f"  文档数量: {metadata.backed_up_documents:,}")

        if metadata.total_size_bytes > 0:
            click.echo(f"  原始大小: {self.format_size(metadata.total_size_bytes)}")
        
        if metadata.compressed_size_bytes > 0:
            click.echo(f"  压缩大小: {self.format_size(metadata.compressed_size_bytes)}")
            
            if metadata.total_size_bytes > 0:
                ratio = metadata.compressed_size_bytes / metadata.total_size_bytes
                click.echo(f"  压缩率: {ratio:.1%}")
        
        click.echo(f"  分片数量: {len(metadata.chunks)}")
        click.echo(f"  状态: {metadata.status}")
        
        if metadata.error_message:
            self.echo_error(f"错误信息: {metadata.error_message}")
        
        click.echo()
    
    def estimate_backup_size(self, index: str, params: Dict[str, Any]) -> Dict[str, Any]:
        """估算备份大小
        
        Args:
            index: 索引名称
            params: 备份参数
            
        Returns:
            大小估算信息
        """
        try:
            strategy = BackupStrategy(**params)
            
            # 获取备份元数据（模拟）
            backup_metadata = {
                "source_index": index,
                "backed_up_documents": 0  # 这里需要实际查询
            }
            
            return self.backup_manager.strategy_manager.estimate_backup_size(
                strategy, self.es_client, backup_metadata
            )
            
        except Exception as e:
            self.logger.warning(f"估算备份大小失败: {e}")
            return {}
    
    def preview_backup(self, index: str, params: Dict[str, Any]) -> Dict[str, Any]:
        """预览备份计划
        
        Args:
            index: 索引名称
            params: 备份参数
            
        Returns:
            备份计划信息
        """
        try:
            strategy = BackupStrategy(**params)
            
            # 获取备份元数据（模拟）
            backup_metadata = {
                "source_index": index,
                "backed_up_documents": 0
            }
            
            return self.backup_manager.strategy_manager.create_backup_plan(
                strategy, self.es_client, backup_metadata
            )
            
        except Exception as e:
            self.logger.warning(f"预览备份计划失败: {e}")
            return {}
    
    def show_backup_preview(self, index: str, params: Dict[str, Any]):
        """显示备份预览
        
        Args:
            index: 索引名称
            params: 备份参数
        """
        try:
            plan = self.preview_backup(index, params)
            
            if not plan:
                self.echo_warning("无法生成备份预览")
                return
            
            click.echo("\n备份预览:")
            
            # 显示验证结果
            validation = plan.get('validation', {})
            if validation.get('warnings'):
                self.echo_warning("警告:")
                for warning in validation['warnings']:
                    click.echo(f"  - {warning}")
            
            if validation.get('errors'):
                self.echo_error("错误:")
                for error in validation['errors']:
                    click.echo(f"  - {error}")
            
            # 显示大小估算
            estimation = plan.get('size_estimation', {})
            if estimation:
                click.echo(f"\n大小估算:")
                click.echo(f"  预计文档数: {estimation.get('estimated_documents', 0):,}")
                click.echo(f"  预计大小: {self.format_size(estimation.get('estimated_size_bytes', 0))}")
                click.echo(f"  压缩后大小: {self.format_size(estimation.get('estimated_compressed_size_bytes', 0))}")
            
            # 显示时间估算
            time_estimation = plan.get('time_estimation', {})
            if time_estimation:
                click.echo(f"\n时间估算:")
                click.echo(f"  预计耗时: {self.format_duration(time_estimation.get('estimated_seconds', 0))}")
            
            click.echo()

        except Exception as e:
            self.echo_error(f"显示备份预览失败: {e}")

    def _delete_backed_up_data(self, index: str, strategy, metadata) -> None:
        """删除已备份的数据

        Args:
            index: 索引名称
            strategy: 备份策略
            metadata: 备份元数据
        """
        try:
            # 确认删除操作
            if not click.confirm(
                f"\n⚠️  警告: 即将删除索引 '{index}' 中已备份的 {metadata.backed_up_documents:,} 个文档。\n"
                f"   备份ID: {metadata.backup_id}\n"
                f"   此操作不可逆！是否继续？",
                default=False
            ):
                self.echo_info("取消删除操作")
                return

            self.echo_info("开始删除已备份的数据...")

            # 构建删除查询
            delete_query = self._build_delete_query(strategy)

            # 执行删除操作
            es_client = self.backup_manager.es_client

            with click.progressbar(
                length=100,
                label="删除进度",
                show_percent=True,
                show_eta=True
            ) as bar:
                # 使用封装的 delete_by_query 方法（支持调试模式）
                response = es_client.delete_by_query(
                    index=index,
                    body={"query": delete_query},
                    wait_for_completion=True,
                    refresh=True
                )
                bar.update(100)

            deleted_count = response.get('deleted', 0)

            self.echo_success(f"删除完成！共删除 {deleted_count:,} 个文档")

            # 记录删除操作到备份元数据
            self._update_backup_metadata_with_deletion(metadata, deleted_count)

        except Exception as e:
            self.echo_error(f"删除操作失败: {e}")
            raise

    def _build_delete_query(self, strategy) -> dict:
        """构建删除查询

        注意：删除查询必须与备份查询完全一致，确保只删除已备份的数据

        Args:
            strategy: 备份策略

        Returns:
            删除查询字典
        """
        # 使用与备份相同的查询构建逻辑
        from ...utils.query_builder import QueryBuilder

        if strategy.strategy_type == "full":
            return {"match_all": {}}

        if strategy.time_field and (strategy.time_range_start or strategy.time_range_end):
            query_builder = QueryBuilder()
            # 使用与备份完全相同的查询逻辑
            query = query_builder.build_time_range_query(
                strategy.time_field,
                strategy.time_range_start,
                strategy.time_range_end
            )
            # 返回完整的查询，而不是只提取range部分
            return query

        return {"match_all": {}}

    def _update_backup_metadata_with_deletion(self, metadata, deleted_count: int) -> None:
        """更新备份元数据，记录删除信息

        Args:
            metadata: 备份元数据
            deleted_count: 删除的文档数量
        """
        try:
            import json
            from pathlib import Path

            metadata_path = Path(metadata.backup_path) / "metadata.json"

            if metadata_path.exists():
                with open(metadata_path, 'r', encoding='utf-8') as f:
                    metadata_dict = json.load(f)

                # 添加删除信息
                metadata_dict['deletion_info'] = {
                    'deleted_documents': deleted_count,
                    'deletion_time': datetime.now().isoformat(),
                    'deletion_confirmed': True
                }

                with open(metadata_path, 'w', encoding='utf-8') as f:
                    json.dump(metadata_dict, f, indent=2, ensure_ascii=False, default=str)

                self.echo_info(f"删除信息已记录到备份元数据: {metadata_path}")

        except Exception as e:
            self.echo_warning(f"更新备份元数据失败: {e}")
