from llama_index.core import PromptTemplate

QA_TEMPLATE = (
    "<|im_start|>system\n"
    "你是一个专业的法律助手，请严格根据以下法律条文回答问题：\n"
    "相关法律条文：\n{context_str}\n<|im_end|>\n"
    "<|im_start|>user\n{query_str}<|im_end|>\n"
    "<|im_start|>assistant\n"
)

response_template = PromptTemplate(QA_TEMPLATE)


class Config:
    EMBED_MODEL_PATH = r"E:/AIProject/rag-law/model/embedding/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2"
    LLM_MODEL_PATH = r"/home/jukeai/ai_projects/llm/Qwen/Qwen1.5-7B-Chat"

    DATA_DIR = "./data"
    VECTOR_DB_DIR = "./chroma_db"
    PERSIST_DIR = "./storage"

    COLLECTION_NAME = "chinese_labor_laws"
    TOP_K = 3

    REMOTE_LLM_API = True

    #deepseek的api_key
    DEEP_SEEK_API_KEY="xxxxxxxxxxxxxxxx"

    TEMPLATE = response_template

