#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
RAG系统主入口文件
整合离线处理和实时处理功能
"""

import os
import sys
import argparse
import logging
from typing import Dict, Any

# 添加当前目录到Python路径
sys.path.append(os.path.dirname(os.path.abspath(__file__)))

from offline_processor import OfflineProcessor
from hybrid_retrieval import HybridRetrieval
from realtime_processor import RealtimeProcessor, MockLLMProcessor
from api_endpoints import app, init_processors

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)

class RAGSystem:
    """RAG系统主类"""
    
    def __init__(self, 
                 pdf_path: str = "论人与自然的相处之道.pdf",
                 collection_name: str = "nature_human_relationship",
                 es_index_name: str = "nature_documents",
                 use_mock_llm: bool = True):
        """
        初始化RAG系统
        
        Args:
            pdf_path: PDF文件路径
            collection_name: ChromaDB集合名称
            es_index_name: Elasticsearch索引名称
            use_mock_llm: 是否使用模拟LLM
        """
        self.pdf_path = pdf_path
        self.collection_name = collection_name
        self.es_index_name = es_index_name
        self.use_mock_llm = use_mock_llm
        
        # 初始化组件
        self.offline_processor = None
        self.hybrid_retrieval = None
        self.realtime_processor = None
        
        logger.info("RAG系统初始化完成")
    
    def setup_offline_processing(self, chunk_size: int = 500, chunk_overlap: int = 50):
        """设置离线处理"""
        self.offline_processor = OfflineProcessor(
            pdf_path=self.pdf_path,
            chunk_size=chunk_size,
            chunk_overlap=chunk_overlap,
            collection_name=self.collection_name
        )
        logger.info("离线处理器设置完成")
    
    def setup_hybrid_retrieval(self):
        """设置混合检索"""
        self.hybrid_retrieval = HybridRetrieval(
            collection_name=self.collection_name,
            es_index_name=self.es_index_name
        )
        logger.info("混合检索器设置完成")
    
    def setup_realtime_processing(self):
        """设置实时处理"""
        if self.use_mock_llm:
            self.realtime_processor = MockLLMProcessor(
                collection_name=self.collection_name,
                es_index_name=self.es_index_name
            )
            logger.info("实时处理器设置完成（使用模拟LLM）")
        else:
            self.realtime_processor = RealtimeProcessor(
                collection_name=self.collection_name,
                es_index_name=self.es_index_name
            )
            logger.info("实时处理器设置完成（使用真实LLM）")
    
    def run_offline_processing(self) -> Dict[str, Any]:
        """运行离线处理"""
        if not self.offline_processor:
            self.setup_offline_processing()
        
        logger.info("开始离线处理...")
        result = self.offline_processor.process_offline()
        logger.info("离线处理完成")
        
        return result
    
    def run_hybrid_retrieval_test(self, query: str, top_k: int = 5) -> Dict[str, Any]:
        """运行混合检索测试"""
        if not self.hybrid_retrieval:
            self.setup_hybrid_retrieval()
        
        logger.info(f"开始混合检索测试，查询: {query}")
        results = self.hybrid_retrieval.hybrid_search(query, top_k)
        logger.info(f"混合检索完成，返回 {len(results)} 个结果")
        
        return {
            "query": query,
            "results": results,
            "count": len(results)
        }
    
    def run_realtime_qa(self, question: str, top_k: int = 5) -> Dict[str, Any]:
        """运行实时问答"""
        if not self.realtime_processor:
            self.setup_realtime_processing()
        
        logger.info(f"开始实时问答，问题: {question}")
        result = self.realtime_processor.process_question(question, top_k)
        logger.info("实时问答完成")
        
        return result
    
    def interactive_qa(self):
        """交互式问答"""
        if not self.realtime_processor:
            self.setup_realtime_processing()
        
        print("=" * 60)
        print("🤖 RAG系统交互式问答")
        print("=" * 60)
        print("输入 'quit' 或 'exit' 退出")
        print("输入 'help' 查看帮助")
        print("-" * 60)
        
        while True:
            try:
                question = input("\n💬 请输入您的问题: ").strip()
                
                if question.lower() in ['quit', 'exit', '退出']:
                    print("👋 再见！")
                    break
                
                if question.lower() in ['help', '帮助']:
                    print("\n📖 帮助信息:")
                    print("  - 可以询问关于人与自然关系的问题")
                    print("  - 例如: '什么是生态危机？'、'如何实现可持续发展？'")
                    print("  - 输入 'quit' 或 'exit' 退出")
                    continue
                
                if not question:
                    print("❌ 问题不能为空，请重新输入")
                    continue
                
                print(f"\n🔍 正在处理问题: {question}")
                print("-" * 40)
                
                result = self.realtime_processor.process_question(question)
                
                if result['status'] == 'success':
                    print(f"✅ 回答:")
                    print(f"{result['answer']}")
                    print(f"\n📚 参考了 {result['retrieval_count']} 个相关文档")
                else:
                    print(f"❌ 处理失败: {result['message']}")
                
            except KeyboardInterrupt:
                print("\n👋 再见！")
                break
            except Exception as e:
                print(f"❌ 发生错误: {e}")
    
    def run_complete_pipeline(self):
        """运行完整流程"""
        print("🚀 开始RAG系统完整流程")
        print("=" * 60)
        
        # 步骤1: 离线处理
        print("📄 步骤1: 离线处理")
        offline_result = self.run_offline_processing()
        print(f"✅ 离线处理完成，生成了 {offline_result['total_chunks']} 个文档块")
        
        # 步骤2: 混合检索测试
        print("\n🔍 步骤2: 混合检索测试")
        test_queries = ["人与自然的关系", "生态危机", "可持续发展"]
        for query in test_queries:
            retrieval_result = self.run_hybrid_retrieval_test(query, top_k=3)
            print(f"✅ 查询 '{query}' 检索到 {retrieval_result['count']} 个结果")
        
        # 步骤3: 实时问答测试
        print("\n💬 步骤3: 实时问答测试")
        test_questions = [
            "人与自然的关系是什么？",
            "什么是生态危机？",
            "如何实现可持续发展？"
        ]
        
        for question in test_questions:
            qa_result = self.run_realtime_qa(question)
            if qa_result['status'] == 'success':
                print(f"✅ 问题: {question}")
                print(f"   回答: {qa_result['answer'][:100]}...")
            else:
                print(f"❌ 问题处理失败: {qa_result['message']}")
        
        print("\n🎉 RAG系统完整流程运行完成！")
        
        # 启动交互式问答
        start_interactive = input("\n是否启动交互式问答？(y/n): ").lower().strip()
        if start_interactive == 'y':
            self.interactive_qa()

def main():
    """主函数"""
    parser = argparse.ArgumentParser(description="RAG系统主程序")
    parser.add_argument("--mode", choices=["offline", "retrieval", "qa", "interactive", "complete", "api"], 
                       default="complete", help="运行模式")
    parser.add_argument("--pdf", default="论人与自然的相处之道.pdf", help="PDF文件路径")
    parser.add_argument("--query", help="检索查询（retrieval模式）")
    parser.add_argument("--question", help="问答问题（qa模式）")
    parser.add_argument("--top-k", type=int, default=5, help="检索结果数量")
    parser.add_argument("--use-real-llm", action="store_true", help="使用真实LLM（默认使用模拟LLM）")
    parser.add_argument("--port", type=int, default=5000, help="API服务器端口（api模式）")
    
    args = parser.parse_args()
    
    # 创建RAG系统
    rag_system = RAGSystem(
        pdf_path=args.pdf,
        use_mock_llm=not args.use_real_llm
    )
    
    try:
        if args.mode == "offline":
            # 离线处理模式
            result = rag_system.run_offline_processing()
            print(f"离线处理结果: {result}")
            
        elif args.mode == "retrieval":
            # 检索模式
            if not args.query:
                print("❌ 检索模式需要指定 --query 参数")
                return
            
            rag_system.setup_hybrid_retrieval()
            result = rag_system.run_hybrid_retrieval_test(args.query, args.top_k)
            print(f"检索结果: {result}")
            
        elif args.mode == "qa":
            # 问答模式
            if not args.question:
                print("❌ 问答模式需要指定 --question 参数")
                return
            
            result = rag_system.run_realtime_qa(args.question, args.top_k)
            print(f"问答结果: {result}")
            
        elif args.mode == "interactive":
            # 交互式问答模式
            rag_system.interactive_qa()
            
        elif args.mode == "complete":
            # 完整流程模式
            rag_system.run_complete_pipeline()
            
        elif args.mode == "api":
            # API服务器模式
            print(f"🚀 启动RAG API服务器，端口: {args.port}")
            print("API接口地址:")
            print(f"  - 健康检查: http://localhost:{args.port}/health")
            print(f"  - RAG查询: http://localhost:{args.port}/api/rag/query")
            print(f"  - 文档检索: http://localhost:{args.port}/api/rag/retrieve")
            print("按 Ctrl+C 停止服务器")
            
            app.run(host='0.0.0.0', port=args.port, debug=False)
            
    except KeyboardInterrupt:
        print("\n👋 程序已停止")
    except Exception as e:
        logger.error(f"程序运行失败: {e}")
        raise

if __name__ == "__main__":
    main()
