from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
from llama_index.llms.llama_cpp import LlamaCPP

# 配置本地GGUF模型
llm = LlamaCPP(
    model_path="mode/deepseek-coder-1.3b-instruct.Q4_K_M.gguf",
    temperature=0.1,
    max_new_tokens=2048,
    context_window=4096
)

# 初始化本地嵌入模型
embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-zh-v1.5")

# 加载文档
documents = SimpleDirectoryReader("data").load_data()

# 创建索引
index = VectorStoreIndex.from_documents(
    documents,
    embed_model=embed_model
)

# 创建查询引擎
query_engine = index.as_query_engine(llm=llm)

# 执行查询
response = query_engine.query("你是谁？")
print(response)

