# config/settings.py

# PostgreSQL (PGVector) settings
PG_DATABASE = "vector_db"
PG_HOST = "localhost"
PG_PASSWORD = "password"
PG_PORT = 5433
PG_USER = "postgres"
PG_TABLE_NAME = "data_llamaindex"
PG_EMBED_DIM = 384

# MongoDB settings
MONGO_URI = "mongodb://localhost:27017"
MONGO_DB_NAME = "llamaindex"

# Retriever settings
VECTOR_SIMILARITY_TOP_K = 5
BM25_SIMILARITY_TOP_K = 5
FUSION_SIMILARITY_TOP_K = 10
FUSION_NUM_QUERIES = 3
FUSION_MODE = "relative_score"

# Postprocessor settings
RERANKER_MODEL = "cross-encoder/ms-marco-MiniLM-L-2-v2"
RERANKER_TOP_N = 5
METADATA_REPLACEMENT_KEY = "window"


# config/settings.py

# ... (之前的 PG, Mongo, Retriever, Postprocessor 设置) ...

# LLM and Embedding Model settings
DEEPSEEK_API_KEY = "sk-dc55e0ed77c844fdb5c32acbe1f105e6"
HUGGINGFACE_TOKEN = "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"

LLM_MODEL_NAME = "deepseek-chat"
EMBED_MODEL_PATH = "local:D:/pythonProject17/transformers/model_em/BAAI/bge-small-en-v1.5"