import os
import bs4
from langchain.chains.combine_documents import create_stuff_documents_chain
from langchain.chains.retrieval import create_retrieval_chain
from langchain_chroma import Chroma

from langchain_community.document_loaders import WebBaseLoader
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
from langchain_text_splitters import RecursiveCharacterTextSplitter

from demo3 import vector_stores

os.environ['LANGCHAIN_TRACING_V2'] = "true"
os.environ['LANGCHAIN_API_KEY'] = '1123'

model = ChatOpenAI(model='gpt-4-turbo')

# 加载数据 一篇博客

loader = WebBaseLoader(
    web_path=['https://lilianweng.github.io/posts/2023-06-23-agent/'],
    bs_kwargs=dict(
        parse_only=bs4.SoupStrainer(class_=('post-header', 'post-title', 'post-content'))
    )
)

docs = loader.load()

print(len(docs))  # array

# 大文件切割
# text="12222222222222"
splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)  # 一个块20个字符，允许重叠4个字符
splitters = splitter.split_documents(docs)

for s in splitters:
    print(s, end="***\n")

# 存储
vector_stores = Chroma.from_documents(documents=splitters, embedding=OpenAIEmbeddings())
retriver = vector_stores.as_retriever()

# 检索
system_prompt = """
You are an assistant for question-answering tasks.
{context}"""

prompt = ChatPromptTemplate.from_messages(
    [
        ("system", system_prompt),
        # MessagesPlaceholder("chat_history"),
        ("human", "{input}")
    ]
)

chain1 = create_stuff_documents_chain(model, prompt)

chain2 = create_retrieval_chain(retriver, chain1)

resp = chain2.invoke({'input': "what is task decomposition?"})

print(resp['answer'])

# 创建一个子链，以及子链的提示模板
contextualize_q_system_prompt = """
Given a chat history and the latest user question which might reference context"""

retriver_history_temp = ChatPromptTemplate.from_messages(
    [
        ('system', contextualize_q_system_prompt),
        MessagesPlaceholder('chat_history'),
        ('human', 'what is ai')
    ]
)
# 生成

