
import os

from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import HumanMessagePromptTemplate, ChatPromptTemplate, ChatMessagePromptTemplate, \
    SystemMessagePromptTemplate
from langchain_google_genai import ChatGoogleGenerativeAI
from langsmith import traceable
import google.generativeai as genai

import wikipedia as wp

from langsmith import traceable




os.environ["LANGCHAIN_TRACING_V2"] = "true"
os.environ["LANGCHAIN_PROJECT"] = "playground"
os.environ["LANGCHAIN_API_KEY"] = "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"
os.environ["GOOGLE_API_KEY"] = "AIzaSyBJoz7BvdFgWTBwzcu-0xWpJKfEJOR6vPM"

llm = ChatGoogleGenerativeAI(model="models/gemini-1.5-pro-latest", temperature=0.7)

@traceable
def generate_wiki_search(question):
    messages = [
        {"role": "system", "content": "Generate a search query to pass into wikipedia to answer the user's question. Return only the search query and nothing more. This will passed in directly to the wikipedia search engine."},
        {"role": "user", "content": "{question}"}
    ]

    system_message_prompt = SystemMessagePromptTemplate.from_template(role="system", template="Generate a search query to pass into wikipedia to answer the user's question. Return only the search query and nothing more. This will passed in directly to the wikipedia search engine.")
    user_message_prompt = HumanMessagePromptTemplate.from_template(role="user", template=question)
    chat_prompt = ChatPromptTemplate.from_messages([system_message_prompt, user_message_prompt])
    chain = chat_prompt | llm | StrOutputParser()
    return chain.invoke({"question":question})

#@traceable(run_type="retriever")
def retrieve(query):
    results = []
    for term in wp.search(query, results = 10):
        try:
            page = wp.page(term, auto_suggest=False)
            results.append({
                "page_content": page.summary,
                "type": "Document",
                "metadata": {"url": page.url}
            })
        except wp.DisambiguationError:
            pass
        if len(results) >= 2:
            return results

@traceable
def generate_answer(question, context):
    messages = [
        {"role": "system", "content": f"Answer the user's question based ONLY on the content below:\n\n{context}"},
        {"role": "user", "content": question}
    ]

    return llm.invoke(messages)

#@traceable
def rag_pipeline(question):
    query = generate_wiki_search(question)
    context = "\n\n".join([doc["page_content"] for doc in retrieve(query)])
    answer = generate_answer(question, context)
    return answer

if __name__ == "__main__":
    query = 'artificial intelligence'
    result = rag_pipeline(query)
    print(result)

    #retrieve(query)