import os
import yaml
import argparse
from model_loader import ModelLoader
from document_processor import DocumentProcessor
from vector_store import VectorStore
from qa_system import QASystem

def load_config(config_path="config/config.yaml"):
    with open(config_path, 'r') as f:
        return yaml.safe_load(f)

def initialize_system(config_path="config/config.yaml"):
    """初始化整个系统"""
    # 创建ModelLoader实例
    model_loader = ModelLoader(config_path)
    
    # 加载嵌入模型
    embedding_model = model_loader.load_embedding_model()
    
    # 初始化向量存储
    vector_store = VectorStore(embedding_model, config_path)
    loaded = vector_store.load()
    
    if not loaded:
        print("No existing vector store found. Creating a new one...")
    
    # 初始化QA系统
    qa_system = QASystem(vector_store, config_path)
    
    return {
        'document_processor': DocumentProcessor(config_path),
        'vector_store': vector_store,
        'qa_system': qa_system
    }

def check_ollama_service():
    """检查Ollama服务是否正在运行"""
    import requests
    try:
        config = load_config()
        base_url = config.get('ollama', {}).get('base_url', 'http://localhost:11434')
        requests.get(f"{base_url}/api/version")
        return True
    except requests.exceptions.RequestException:
        return False

def index_documents(document_processor, vector_store, docs_dir):
    """索引文档"""
    chunks = document_processor.process_directory(docs_dir)
    vector_store.add_documents(chunks)
    vector_store.save()
    print(f"Indexed {len(chunks)} document chunks.")

def interactive_qa(qa_system):
    """交互式问答"""
    print("\n===== 公共卫生知识库智能助理 (Ollama版) =====")
    print("输入'exit'或'quit'退出系统")
    print("输入'chat'进入聊天模式")
    
    # 标准问答模式
    while True:
        query = input("\n请输入您的问题: ")
        
        if query.lower() in ['exit', 'quit']:
            break
            
        if query.lower() == 'chat':
            chat_mode(qa_system)
            continue
        
        answer = qa_system.answer(query)
        print(f"\n回答: {answer}")

def chat_mode(qa_system):
    """聊天模式，支持上下文对话"""
    print("\n===== 进入聊天模式 =====")
    print("输入'exit'或'quit'返回标准问答模式")
    
    messages = []
    
    while True:
        query = input("\n您: ")
        
        if query.lower() in ['exit', 'quit']:
            break
        
        # 添加用户消息
        messages.append({"role": "user", "content": query})
        
        # 获取回答
        answer = qa_system.chat(messages)
        print(f"\nDeepSeek: {answer}")
        
        # 添加助手回复
        messages.append({"role": "assistant", "content": answer})
    
    print("\n===== 返回标准问答模式 =====")

def main():
    parser = argparse.ArgumentParser(description="DeepSeek知识库系统 (Ollama版)")
    parser.add_argument("--index", help="索引文档目录", type=str)
    parser.add_argument("--query", help="单次查询模式", type=str)
    parser.add_argument("--interactive", help="交互式问答模式", action="store_true")
    args = parser.parse_args()
    
    # 检查Ollama服务
    if not check_ollama_service():
        print("错误: Ollama服务未运行。请确保Ollama服务已启动。")
        print("可以通过运行以下命令启动Ollama: ollama serve")
        return
    
    # 初始化系统
    system = initialize_system()
    
    # 索引文档
    if args.index:
        index_documents(
            system['document_processor'], 
            system['vector_store'], 
            args.index
        )
    
    # 单次查询
    if args.query:
        answer = system['qa_system'].answer(args.query)
        print(f"问题: {args.query}")
        print(f"回答: {answer}")
    
    # 交互式问答
    if args.interactive or (not args.index and not args.query):
        interactive_qa(system['qa_system'])

if __name__ == "__main__":
    main()