import pandas as pd
import logging
from typing import Dict, List, Any, Optional
from sqlalchemy.orm import Session
from app.models.datasource import DataSource, DataSyncTask, DataSyncExecution, TaskStatus, SyncType
from app.utils.doris_client import DorisClient, get_doris_client
from app.database import get_db
from datetime import datetime
import json

logger = logging.getLogger(__name__)


class ETLService:
    """ETL 数据同步服务"""
    
    def __init__(self, db: Session):
        self.db = db
        self.doris_client = get_doris_client()
    
    def test_doris_connection(self) -> Dict[str, Any]:
        """测试 Doris 连接"""
        try:
            is_connected = self.doris_client.test_connection()
            return {
                "connected": is_connected,
                "host": self.doris_client.host,
                "port": self.doris_client.port,
                "database": self.doris_client.database,
                "message": "连接成功" if is_connected else "连接失败"
            }
        except Exception as e:
            logger.error(f"测试 Doris 连接失败: {str(e)}")
            return {
                "connected": False,
                "message": f"连接失败: {str(e)}"
            }
    
    def create_sync_task(self, task_data: Dict[str, Any]) -> DataSyncTask:
        """创建数据同步任务"""
        task = DataSyncTask(**task_data)
        self.db.add(task)
        self.db.commit()
        self.db.refresh(task)
        return task
    
    def execute_sync_task(self, task_id: int) -> DataSyncExecution:
        """执行数据同步任务"""
        # 获取同步任务
        task = self.db.query(DataSyncTask).filter(DataSyncTask.id == task_id).first()
        if not task:
            raise ValueError(f"同步任务 {task_id} 不存在")
        
        # 创建执行记录
        execution = DataSyncExecution(
            task_id=task_id,
            status=TaskStatus.RUNNING,
            start_time=datetime.now()
        )
        self.db.add(execution)
        self.db.commit()
        self.db.refresh(execution)
        
        try:
            # 执行数据同步
            result = self._execute_sync_logic(task, execution)
            
            # 更新执行状态
            execution.status = TaskStatus.SUCCESS if result["success"] else TaskStatus.FAILED
            execution.end_time = datetime.now()
            execution.duration = int((execution.end_time - execution.start_time).total_seconds())
            execution.total_records = result.get("total_records", 0)
            execution.success_records = result.get("success_records", 0)
            execution.failed_records = result.get("failed_records", 0)
            execution.execution_log = result.get("log", "")
            
            if not result["success"]:
                execution.error_message = result.get("error", "")
            
        except Exception as e:
            # 处理异常
            execution.status = TaskStatus.FAILED
            execution.end_time = datetime.now()
            execution.duration = int((execution.end_time - execution.start_time).total_seconds())
            execution.error_message = str(e)
            execution.execution_log = f"执行失败: {str(e)}"
            logger.error(f"同步任务执行失败: {str(e)}")
        
        self.db.commit()
        return execution
    
    def _execute_sync_logic(self, task: DataSyncTask, execution: DataSyncExecution) -> Dict[str, Any]:
        """执行具体的同步逻辑"""
        try:
            # 获取源数据源
            source_ds = self.db.query(DataSource).filter(DataSource.id == task.source_datasource_id).first()
            if not source_ds:
                return {"success": False, "error": "源数据源不存在"}
            
            # 获取目标数据源（应该是 Doris）
            target_ds = self.db.query(DataSource).filter(DataSource.id == task.target_datasource_id).first()
            if not target_ds:
                return {"success": False, "error": "目标数据源不存在"}
            
            # 构建源数据查询
            source_query = self._build_source_query(task, source_ds)
            
            # 从源数据源读取数据
            source_data = self._read_source_data(source_ds, source_query)
            
            if not source_data:
                return {
                    "success": True,
                    "total_records": 0,
                    "success_records": 0,
                    "failed_records": 0,
                    "log": "没有数据需要同步"
                }
            
            # 数据转换处理
            transformed_data = self._transform_data(source_data, task)
            
            # 写入 Doris 数据仓库
            write_result = self._write_to_doris(transformed_data, task)
            
            return {
                "success": write_result["success"],
                "total_records": len(source_data),
                "success_records": write_result.get("success_records", 0),
                "failed_records": write_result.get("failed_records", 0),
                "log": write_result.get("log", ""),
                "error": write_result.get("error", "")
            }
            
        except Exception as e:
            logger.error(f"同步逻辑执行失败: {str(e)}")
            return {"success": False, "error": str(e)}
    
    def _build_source_query(self, task: DataSyncTask, source_ds: DataSource) -> str:
        """构建源数据查询语句"""
        base_query = f"SELECT * FROM {task.source_table}"
        
        # 添加过滤条件
        if task.filter_condition:
            base_query += f" WHERE {task.filter_condition}"
        
        # 增量同步逻辑
        if task.sync_type == SyncType.INCREMENTAL:
            # 这里可以根据具体业务逻辑添加增量条件
            # 比如基于时间戳字段的增量同步
            pass
        
        return base_query
    
    def _read_source_data(self, source_ds: DataSource, query: str) -> List[Dict[str, Any]]:
        """从源数据源读取数据"""
        try:
            # 根据数据源类型创建连接
            if source_ds.source_type.value == "mysql":
                connection_url = f"mysql+pymysql://{source_ds.username}:{source_ds.password}@{source_ds.host}:{source_ds.port}/{source_ds.database}"
            elif source_ds.source_type.value == "postgresql":
                connection_url = f"postgresql://{source_ds.username}:{source_ds.password}@{source_ds.host}:{source_ds.port}/{source_ds.database}"
            else:
                raise ValueError(f"不支持的数据源类型: {source_ds.source_type}")
            
            # 使用 pandas 读取数据
            df = pd.read_sql(query, connection_url)
            return df.to_dict('records')
            
        except Exception as e:
            logger.error(f"读取源数据失败: {str(e)}")
            raise
    
    def _transform_data(self, data: List[Dict[str, Any]], task: DataSyncTask) -> List[Dict[str, Any]]:
        """数据转换处理"""
        if not task.field_mapping:
            return data
        
        try:
            # 解析字段映射配置
            field_mapping = json.loads(task.field_mapping) if isinstance(task.field_mapping, str) else task.field_mapping
            
            transformed_data = []
            for record in data:
                transformed_record = {}
                for source_field, target_field in field_mapping.items():
                    if source_field in record:
                        transformed_record[target_field] = record[source_field]
                transformed_data.append(transformed_record)
            
            return transformed_data
            
        except Exception as e:
            logger.error(f"数据转换失败: {str(e)}")
            return data  # 转换失败时返回原始数据
    
    def _write_to_doris(self, data: List[Dict[str, Any]], task: DataSyncTask) -> Dict[str, Any]:
        """写入数据到 Doris 数据仓库"""
        try:
            # 根据同步类型处理
            if task.sync_type == SyncType.FULL:
                # 全量同步：先清空目标表，再插入新数据
                truncate_sql = f"TRUNCATE TABLE {task.target_table}"
                self.doris_client.execute_sql(truncate_sql)
            
            # 批量插入数据
            success = self.doris_client.bulk_insert(task.target_table, data)
            
            if success:
                return {
                    "success": True,
                    "success_records": len(data),
                    "failed_records": 0,
                    "log": f"成功同步 {len(data)} 条数据到表 {task.target_table}"
                }
            else:
                return {
                    "success": False,
                    "success_records": 0,
                    "failed_records": len(data),
                    "error": "批量插入失败"
                }
                
        except Exception as e:
            logger.error(f"写入 Doris 失败: {str(e)}")
            return {
                "success": False,
                "success_records": 0,
                "failed_records": len(data),
                "error": str(e)
            }
    
    def get_doris_tables(self) -> List[Dict[str, Any]]:
        """获取 Doris 数据仓库中的表列表"""
        return self.doris_client.get_tables()
    
    def get_doris_table_schema(self, table_name: str) -> List[Dict[str, Any]]:
        """获取 Doris 表结构"""
        return self.doris_client.get_table_schema(table_name)
    
    def create_doris_table_from_source(self, source_table_schema: List[Dict[str, Any]], 
                                      target_table_name: str) -> bool:
        """根据源表结构在 Doris 中创建目标表"""
        try:
            # 构建 CREATE TABLE 语句
            columns = []
            for col in source_table_schema:
                col_name = col['column_name']
                data_type = self._map_data_type_to_doris(col['data_type'])
                nullable = "NULL" if col['is_nullable'] == 'YES' else "NOT NULL"
                comment = f"COMMENT '{col.get('column_comment', '')}'" if col.get('column_comment') else ""
                
                columns.append(f"`{col_name}` {data_type} {nullable} {comment}")
            
            create_sql = f"""
            CREATE TABLE IF NOT EXISTS {target_table_name} (
                {','.join(columns)}
            ) ENGINE=OLAP
            DISTRIBUTED BY HASH(`{source_table_schema[0]['column_name']}`) BUCKETS 1
            PROPERTIES (
                "replication_num" = "1"
            )
            """
            
            self.doris_client.execute_sql(create_sql)
            logger.info(f"成功创建 Doris 表: {target_table_name}")
            return True
            
        except Exception as e:
            logger.error(f"创建 Doris 表失败: {str(e)}")
            return False
    
    def _map_data_type_to_doris(self, source_type: str) -> str:
        """将源数据库类型映射到 Doris 数据类型"""
        type_mapping = {
            'int': 'INT',
            'integer': 'INT',
            'bigint': 'BIGINT',
            'varchar': 'VARCHAR(255)',
            'char': 'CHAR(255)',
            'text': 'STRING',
            'datetime': 'DATETIME',
            'timestamp': 'DATETIME',
            'date': 'DATE',
            'decimal': 'DECIMAL(10,2)',
            'float': 'FLOAT',
            'double': 'DOUBLE',
            'boolean': 'BOOLEAN',
            'bool': 'BOOLEAN'
        }
        
        # 处理带长度的类型，如 varchar(100)
        base_type = source_type.split('(')[0].lower()
        return type_mapping.get(base_type, 'STRING')


def get_etl_service(db: Session = None) -> ETLService:
    """获取 ETL 服务实例"""
    if db is None:
        db = next(get_db())
    return ETLService(db) 