import os
import uuid
from llama_index.core import SQLDatabase, Settings
from llama_index.core.query_engine import NLSQLTableQueryEngine
from llama_index.core.llms import LLM
from llama_index.core.llms.callbacks import llm_completion_callback, llm_chat_callback
from llama_index.core.base.llms.types import CompletionResponse, LLMMetadata, ChatMessage, ChatResponse
from llama_index.core.base.llms.generic_utils import completion_response_to_chat_response, stream_completion_response_to_chat_response
from sqlalchemy import create_engine, text
import pymysql
from langchain_openai import ChatOpenAI
from config.mysql_config import MYSQL_CONFIG, OPENAI_CONFIG
from typing import Any, Optional

# 导入日志系统
from utils.log_initializer import init_logging_system
from utils.log_manager import get_database_logger, get_system_logger, get_query_logger

# 导入会话上下文管理
from .session_context import SessionContext

# 自定义LLM包装器，将LangChain的ChatOpenAI包装成LlamaIndex兼容的LLM
class LangChainLLMWrapper(LLM):
    """将LangChain的ChatOpenAI包装成LlamaIndex兼容的LLM接口"""
    
    def __init__(self, langchain_llm: ChatOpenAI, session_context: Optional[SessionContext] = None, **kwargs):
        super().__init__(**kwargs)
        # 使用私有属性存储langchain_llm，避免字段验证问题
        self._langchain_llm = langchain_llm
        self._session_context = session_context
        
    @property
    def metadata(self) -> LLMMetadata:
        return LLMMetadata(
            context_window=4096,
            num_output=1024,
            model_name=self._langchain_llm.model_name
        )
    
    @llm_completion_callback()
    def complete(self, prompt: str, **kwargs: Any) -> CompletionResponse:
        """完成文本生成"""
        try:
            # 如果有会话上下文，则添加上下文信息到prompt中
            enhanced_prompt = self._enhance_prompt_with_context(prompt)
            
            # 使用LangChain的ChatOpenAI进行文本生成
            response = self._langchain_llm.invoke(enhanced_prompt)
            return CompletionResponse(text=response.content)
        except Exception as e:
            logger = get_system_logger('llm_wrapper')
            logger.error("LLM调用失败", error=str(e))
            raise e
    
    def _enhance_prompt_with_context(self, prompt: str) -> str:
        """使用会话上下文增强prompt"""
        if not self._session_context:
            return prompt
        
        # 获取上下文摘要
        context_summary = self._session_context.get_context_summary()
        
        # 分析当前查询意图
        intent = self._session_context.analyze_query_intent(prompt)
        
        # 构建增强的prompt
        enhanced_parts = []
        
        # 添加上下文信息
        if context_summary and context_summary != "这是一个新的会话，没有历史查询记录。":
            enhanced_parts.append("=== 会话上下文信息 ===")
            enhanced_parts.append(context_summary)
            enhanced_parts.append("")
        
        # 根据意图添加特殊指导
        if intent['is_follow_up'] or intent['references_previous'] or intent['contains_pronouns']:
            enhanced_parts.append("=== 重要提示 ===")
            enhanced_parts.append("用户的查询可能引用了之前的对话内容，请结合上下文信息理解用户意图。")
            if intent['contains_pronouns']:
                enhanced_parts.append("查询中包含代词引用，请根据上下文确定具体指代对象。")
            enhanced_parts.append("")
        
        # 添加原始用户查询
        enhanced_parts.append("=== 用户查询 ===")
        enhanced_parts.append(prompt)
        
        return "\n".join(enhanced_parts)
    
    @llm_completion_callback()
    def stream_complete(self, prompt: str, **kwargs: Any):
        """流式完成（暂不实现）"""
        raise NotImplementedError("Stream completion not implemented")
    
    @llm_chat_callback()
    def chat(self, messages, **kwargs: Any) -> ChatResponse:
        """聊天接口"""
        try:
            # 将ChatMessage转换为字符串
            if isinstance(messages, list):
                prompt = "\n".join([msg.content for msg in messages if hasattr(msg, 'content')])
            else:
                prompt = str(messages)
            
            # 使用complete方法
            completion_response = self.complete(prompt, **kwargs)
            return completion_response_to_chat_response(completion_response)
        except Exception as e:
            logger = get_system_logger('llm_wrapper')
            logger.error("Chat调用失败", error=str(e))
            raise e
    
    @llm_chat_callback()
    def stream_chat(self, messages, **kwargs: Any):
        """流式聊天（暂不实现）"""
        raise NotImplementedError("Stream chat not implemented")
    
    async def acomplete(self, prompt: str, **kwargs: Any) -> CompletionResponse:
        """异步完成（使用同步方法）"""
        return self.complete(prompt, **kwargs)
    
    async def astream_complete(self, prompt: str, **kwargs: Any):
        """异步流式完成（暂不实现）"""
        raise NotImplementedError("Async stream completion not implemented")
    
    async def achat(self, messages, **kwargs: Any) -> ChatResponse:
        """异步聊天（使用同步方法）"""
        return self.chat(messages, **kwargs)
    
    async def astream_chat(self, messages, **kwargs: Any):
        """异步流式聊天（暂不实现）"""
        raise NotImplementedError("Async stream chat not implemented")

# 初始化日志系统
init_logging_system()
system_logger = get_system_logger('xiyan_mysql')
system_logger.info("使用LangChain ChatOpenAI包装器", model=OPENAI_CONFIG['model'])
system_logger.info("API基础URL配置", base_url=OPENAI_CONFIG['base_url'])

def create_mysql_connection():
    """创建MySQL数据库连接"""
    # 初始化日志记录器
    db_logger = get_database_logger('mysql_connection')
    
    # 从配置文件获取MySQL数据库连接配置
    mysql_config = MYSQL_CONFIG
    
    # 构建MySQL连接字符串
    connection_string = f"mysql+pymysql://{mysql_config['user']}:{mysql_config['password']}@{mysql_config['host']}:{mysql_config['port']}/{mysql_config['database']}?charset={mysql_config['charset']}"
    
    try:
        # 创建数据库引擎
        engine = create_engine(connection_string)
        
        # 测试连接
        with engine.connect() as connection:
            result = connection.execute(text("SELECT 1"))
            db_logger.info("MySQL数据库连接成功")
            
        return engine
    except Exception as e:
        db_logger.error("MySQL数据库连接失败", error=str(e))
        print(f"❌ MySQL数据库连接失败: {e}")
        print(f"🔧 连接字符串: {connection_string}")
        print(f"🔧 错误类型: {type(e).__name__}")
        return None

def display_database_schema(engine):
    """显示数据库结构信息"""
    try:
        with engine.connect() as connection:
            # 获取所有表名
            tables_result = connection.execute(text("SHOW TABLES"))
            tables = [row[0] for row in tables_result]
            
            db_logger = get_database_logger('schema_display')
            db_logger.info("开始显示数据库结构信息")
            
            for table_name in tables:
                db_logger.info("显示表信息", table_name=table_name)
                
                # 获取表结构
                desc_result = connection.execute(text(f"DESCRIBE {table_name}"))
                columns = desc_result.fetchall()
                
                db_logger.info("字段信息", table_name=table_name, columns_count=len(columns))
                for column in columns:
                    field, type_, null, key, default, extra = column
                    key_info = f" ({key})" if key else ""
                    null_info = "NULL" if null == "YES" else "NOT NULL"
                    db_logger.debug("字段详情", field=field, type=type_, null=null_info, key=key_info)
                
                # 获取表记录数
                count_result = connection.execute(text(f"SELECT COUNT(*) FROM {table_name}"))
                count = count_result.fetchone()[0]
                db_logger.info("表记录统计", table_name=table_name, record_count=count)
                
                # 显示前几条示例数据
                if count > 0:
                    sample_result = connection.execute(text(f"SELECT * FROM {table_name} LIMIT 3"))
                    samples = sample_result.fetchall()
                    if samples:
                        db_logger.info("显示示例数据", table_name=table_name, sample_count=len(samples))
                        column_names = list(sample_result.keys())
                        for i, sample in enumerate(samples, 1):
                            sample_data = {col_name: value for col_name, value in zip(column_names, sample)}
                            db_logger.debug("示例数据详情", table_name=table_name, sample_index=i, data=sample_data)
                
    except Exception as e:
        db_logger = get_database_logger('schema_display')
        db_logger.error("获取数据库结构失败", error=str(e))

def main():
    """主函数"""
    system_logger = get_system_logger('main')
    system_logger.info("启动MySQL数据库连接工具")
    
    # 创建会话上下文
    session_id = str(uuid.uuid4())
    session_context = SessionContext(session_id=session_id, max_history=20)
    system_logger.info("会话上下文创建成功", session_id=session_id)
    
    # 创建MySQL数据库连接
    engine = create_mysql_connection()
    if not engine:
        return
    
    # 显示数据库结构
    display_database_schema(engine)
    
    # 创建LlamaIndex SQL数据库对象
    try:
        sql_database = SQLDatabase(engine)
        system_logger.info("LlamaIndex SQL数据库对象创建成功")
        
        # 使用LangChain ChatOpenAI初始化LLM（避免模型名称验证问题）
        system_logger.info("正在初始化阿里云百练API LLM")
        try:
            # 创建LangChain ChatOpenAI实例
            langchain_llm = ChatOpenAI(
                api_key=OPENAI_CONFIG['api_key'],
                base_url=OPENAI_CONFIG['base_url'],
                model=OPENAI_CONFIG['model'],
                temperature=OPENAI_CONFIG['temperature']
            )
            system_logger.info("LangChain ChatOpenAI初始化成功", model=OPENAI_CONFIG['model'])
            
            # 包装成LlamaIndex兼容的LLM，传入会话上下文
            llm = LangChainLLMWrapper(langchain_llm, session_context=session_context)
            system_logger.info("LlamaIndex LLM包装器创建成功")
            
        except Exception as llm_error:
            system_logger.error("LLM初始化失败", 
                               error=str(llm_error),
                               api_key_prefix=OPENAI_CONFIG['api_key'][:20],
                               base_url=OPENAI_CONFIG['base_url'],
                               model=OPENAI_CONFIG['model'],
                               temperature=OPENAI_CONFIG['temperature'])
            raise llm_error
        
        # 设置embedding模型配置
        system_logger.info("正在配置embedding模型")
        try:
            # 设置环境变量，让LlamaIndex使用阿里云API进行embedding
            os.environ['OPENAI_API_KEY'] = OPENAI_CONFIG['api_key']
            os.environ['OPENAI_API_BASE'] = OPENAI_CONFIG['base_url']
            system_logger.info("Embedding模型配置成功")
        except Exception as embed_error:
            system_logger.warning("Embedding模型配置失败", error=str(embed_error))
        
        # 创建查询引擎
        system_logger.info("正在创建自然语言SQL查询引擎")
        try:
            query_engine = NLSQLTableQueryEngine(
                sql_database=sql_database,
                llm=llm
            )
            system_logger.info("查询引擎创建成功")
            
            # 测试LLM连接
            system_logger.info("正在测试LLM连接")
            test_response = llm.complete("测试连接")
            system_logger.info("LLM连接测试成功", response_preview=test_response.text[:50])
            
        except Exception as engine_error:
            system_logger.error("查询引擎创建失败",
                               error=str(engine_error),
                               error_type=type(engine_error).__name__,
                               root_cause=str(engine_error.__cause__) if hasattr(engine_error, '__cause__') and engine_error.__cause__ else None)
            raise engine_error
        
        system_logger.info("自然语言SQL查询引擎创建成功")
        system_logger.info("系统已就绪，可以开始自然语言查询")
        
        # 交互式查询循环
        print("\n🎯 Text2SQL智能查询系统已启动!")
        print("💡 支持的命令:")
        print("   - 输入自然语言问题进行查询")
        print("   - 'history' 或 '历史' - 查看查询历史")
        print("   - 'clear' 或 'reset' 或 '清空' 或 '重置' - 清空会话历史")
        print("   - 'stats' 或 '统计' - 查看会话统计")
        print("   - 'quit' 或 '退出' - 退出系统")
        
        while True:
            print("\n请输入您的查询问题:")
            user_query = input("❓ ")
            
            if user_query.lower() in ['quit', 'exit', '退出']:
                # 显示会话统计
                stats = session_context.get_statistics()
                print(f"\n📊 会话统计:")
                print(f"   总查询数: {stats['total_queries']}")
                print(f"   成功查询: {stats['successful_queries']}")
                print(f"   失败查询: {stats['failed_queries']}")
                print(f"   成功率: {stats['success_rate']:.1%}")
                print(f"   会话时长: {stats['duration']:.1f}秒")
                
                system_logger.info("用户退出系统", session_stats=stats)
                print("👋 再见!")
                break
            
            # 处理会话管理命令
            if user_query.lower() in ['history', '历史']:
                recent_queries = session_context.get_recent_queries(5)
                if recent_queries:
                    print("\n📋 最近的查询历史:")
                    for i, record in enumerate(recent_queries, 1):
                        status = "✅" if record.success else "❌"
                        print(f"{i}. {status} {record.user_query}")
                        print(f"   SQL: {record.generated_sql}")
                        print(f"   时间: {record.timestamp.strftime('%H:%M:%S')}")
                else:
                    print("📋 暂无查询历史")
                continue
            
            if user_query.lower() in ['clear', 'reset', '清空', '重置']:
                session_context.clear_history()
                print("🧹 会话历史已清空，开始新的会话")
                continue
            
            if user_query.lower() in ['stats', '统计']:
                stats = session_context.get_statistics()
                print(f"\n📊 会话统计:")
                print(f"   会话ID: {stats['session_id'][:8]}...")
                print(f"   总查询数: {stats['total_queries']}")
                print(f"   成功查询: {stats['successful_queries']}")
                print(f"   失败查询: {stats['failed_queries']}")
                print(f"   成功率: {stats['success_rate']:.1%}")
                print(f"   当前关注表: {stats['current_table_focus'] or '无'}")
                print(f"   历史记录数: {stats['history_length']}")
                continue
                
            if user_query.strip():
                try:
                    query_logger = get_query_logger('user_query')
                    query_logger.info("开始处理用户查询", user_input=user_query)
                    
                    # 分析查询意图
                    intent = session_context.analyze_query_intent(user_query)
                    query_logger.debug("查询意图分析", intent=intent)
                    
                    response = query_engine.query(user_query)
                    # 尝试获取生成的SQL
                    sql_query = "未知"
                    if hasattr(response, 'metadata') and response.metadata:
                        sql_query = response.metadata.get('sql_query', '未知')
                    elif hasattr(response, 'source_nodes') and response.source_nodes:
                        # 尝试从source_nodes获取SQL
                        for node in response.source_nodes:
                            if hasattr(node, 'metadata') and 'sql_query' in node.metadata:
                                sql_query = node.metadata['sql_query']
                                break
                    
                    # 从SQL中提取表名并更新关注表
                    if sql_query != "未知":
                        table_name = session_context.extract_table_from_sql(sql_query)
                        if table_name:
                            session_context.update_table_focus(table_name)
                    
                    # 记录成功的查询到会话上下文
                    session_context.add_query_record(
                        user_query=user_query,
                        generated_sql=sql_query,
                        result=str(response),
                        success=True,
                        metadata={'intent': intent}
                    )
                    
                    query_logger.info("查询执行成功", 
                                     generated_sql=sql_query,
                                     result=str(response),
                                     intent=intent)
                    
                    print(f"\n📋 查询结果:")
                    print(f"🔤 生成的SQL: {sql_query}")
                    print(f"📊 结果: {response}")
                    
                    # 显示上下文提示
                    if intent['is_follow_up'] or intent['contains_pronouns']:
                        print(f"💡 检测到上下文查询，已结合历史信息进行理解")
                    
                except Exception as e:
                    # 记录失败的查询到会话上下文
                    session_context.add_query_record(
                        user_query=user_query,
                        generated_sql="查询失败",
                        result="",
                        success=False,
                        error_message=str(e)
                    )
                    
                    query_logger.error("查询执行失败",
                                      user_input=user_query,
                                      error=str(e),
                                      error_type=type(e).__name__,
                                      root_cause=str(e.__cause__) if hasattr(e, '__cause__') and e.__cause__ else None)
                    
                    print(f"❌ 查询失败: {e}")
                    print(f"🔧 错误类型: {type(e).__name__}")
                    print(f"🔧 错误详情: {str(e)}")
                    if hasattr(e, '__cause__') and e.__cause__:
                        print(f"🔧 根本原因: {e.__cause__}")
                    
                    # 尝试直接测试LLM
                    try:
                        print("\n🔧 尝试直接测试LLM...")
                        test_prompt = f"请为以下查询生成SQL: {user_query}"
                        test_response = llm.complete(test_prompt)
                        query_logger.debug("LLM直接测试成功", test_response=test_response.text[:200])
                        print(f"🔧 LLM直接响应: {test_response.text[:200]}...")
                    except Exception as llm_test_error:
                        query_logger.error("LLM直接测试失败", error=str(llm_test_error))
                        print(f"🔧 LLM直接测试也失败: {llm_test_error}")
            else:
                query_logger.warning("用户输入无效查询")
                print("⚠️ 请输入有效的查询问题")
                
    except Exception as e:
        system_logger.error("创建查询引擎失败", error=str(e))
        print(f"❌ 创建查询引擎失败: {e}")

if __name__ == "__main__":
    main()