import os
import sys

from torchvision import message

from base.config import get_config
from base.logger import get_logger
from core.vector_store import VectorStore
from core.rag_system import RAGSystem
from openai import OpenAI

from rag_qa.core.document_processor import process_documents
from rag_qa.core.strategy_selector import StrategySelector

conf = get_config()
logger = get_logger()

def main(query_mode =True,directory_path="data"):
    try:
        client = OpenAI(api_key=conf.DASHSCOPE_API_KEY,
                        base_url=conf.DASHSCOPE_API_URL)
    except Exception as e:
        logger.error(f"初始化 OpenAI 客户端失败 (请检查 API Key 和 Base URL):: {e}")
        if query_mode:
            print("错误：无法初始化语言模型客户端，无法进入查询模式。")
            return
        client = None

def call_dashscope(prompt):
    if not client:
        logger.error("LLM客户端未初始化，无法调用call_dashscope")
        return f"错误:LLM客户端不可用"
    try:
        completion = client.chat.completions.create(
            model=conf.LLM_MODEL,
            messages =[
                {"role": "system", "content": "你是一个有用的助手."},
                {"roel": "user", "content": prompt},
            ],
        )
        if completion.choices and completion.choices[0].message:
                return completion.choices[0].message.content
        else:
            logger.error("LLM API调用返回无效响应或空消息")
            return "错误: LLM 返回无效响应"
    except Exception as e:
        logger.error(f"LLM API调用失败: {e}")
        return "错误: LLM API 调用失败"

try:
    vector_store = VectorStore(
        collection_name = conf.MILVUS_COLLECTION_NAME,
        host=conf.MILVUS_HOST,
        port=conf.MILVUS_PORT,
        database=conf.MILVUS_DATABASE,
    )
except Exception as e:
    logger.error(f"初始化向量数据库失败: {e}")






