import os
from langchain_community.vectorstores import Neo4jVector
from langchain_openai import ChatOpenAI
from langchain_community.embeddings import DashScopeEmbeddings
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_neo4j import Neo4jGraph

# OPENAI API 密钥
os.environ["OPENAI_API_KEY"] = "sk-38b1a77d899b4e708287a296ceeb02e3"
# neo4j 凭证
os.environ["NEO4J_URI"] = "neo4j+s://6a4e9a3a.databases.neo4j.io"
os.environ["NEO4J_USERNAME"] = "neo4j"
os.environ["NEO4J_PASSWORD"] = "pxv2vsJaXU0BBtht710jXeHy7LLx7zAf2BZ7pgOuSG4"

graph = Neo4jGraph(refresh_schema=False)
# 实例化 Neo4j 向量
neo4j_vector = Neo4jVector.from_existing_graph(
    DashScopeEmbeddings(model="text-embedding-v3"),
    url=os.getenv("NEO4J_URI"),
    username=os.getenv("NEO4J_USERNAME"),
    password=os.getenv("NEO4J_PASSWORD"),
    index_name="nodes",
    node_label="Node",
    text_node_properties=["name", "description"],
    embedding_node_property='embedding'
)

def graphQueryTool(query:str):

    prompt = ChatPromptTemplate.from_template(
        """Answer the question based on the context provided.

        Context: {context}

        Question: {question}"""
    )

    # 创建一个 lambda 函数将上下文传递给 Neo4jVector retriever
    context_to_retriever = lambda x: x["question"]

    # 创建链，将上下文赋值给 Neo4jVector retriever
    final_chain = (
            RunnablePassthrough.assign(context=context_to_retriever, target=lambda x: neo4j_vector)
            | prompt
            | ChatOpenAI(api_key="sk-38b1a77d899b4e708287a296ceeb02e3",
                         base_url="https://dashscope.aliyuncs.com/compatible-mode/v1", model="qwen-plus")
            | StrOutputParser()
    )

    result = final_chain.invoke({'question': query})
    return result


