from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.llms import Ollama
from langchain_core.prompts import ChatPromptTemplate
import psycopg2
from sentence_transformers import SentenceTransformer

# 嵌入模型使用方式一：联网
# embeddings = HuggingFaceEmbeddings(model_name='BAAI/bge-base-zh')

# 嵌入模型使用方式二：本地
embeddings = SentenceTransformer(
    r'D:\Models_Home\Huggingface\models--BAAI--bge-base-zh\snapshots\0e5f83d4895db7955e4cb9ed37ab73f7ded339b6'
)


llm = Ollama(model="qwen2.5")
prompt = ChatPromptTemplate.from_template("""仅根据提供的上下文回答以下问题:

<context>
{context}
</context>

Question: {input}""")
query = "微笑明天慈善基金志愿者算社会实践活动省级奖励吗?"

conn = psycopg2.connect(
    host="localhost",
    port=5432,
    database="postgres",
    user="postgres",
    password="123",
)

# 方式一
# 对query进行编码
# embed = embeddings.embed_query(query)

# 方式二
embed = embeddings.encode(query)
embed = [float(f"{x:.18f}") for x in embed]
print(embed)

# 创建游标
cur = conn.cursor()

# 执行查询
cur.execute("SELECT * FROM rag ORDER BY embedding <=> '"+ str(embed) +" 'LIMIT 3;")

# 获取结果
results = cur.fetchall()
for i in results:
    print(i[1])


# 将查询结果作为提示给ai分析结合再给我答案
document_chain = prompt | llm
print(document_chain.invoke({
    "input": query,
    "context": results[0][1]+results[1][1],
}))


