Spaces:
Sleeping
Sleeping
import gradio as gr | |
from langchain_core.prompts import ChatPromptTemplate | |
from langchain_community.vectorstores import Clickhouse, ClickhouseSettings | |
from langchain_community.vectorstores import FAISS | |
from langchain_community.chat_message_histories import ChatMessageHistory | |
from langchain_core.chat_history import BaseChatMessageHistory | |
from langchain_core.runnables.history import RunnableWithMessageHistory | |
from langchain_core.runnables import ConfigurableFieldSpec | |
from langchain.schema import Document | |
from langchain.prompts import PromptTemplate | |
from langchain_core.output_parsers import JsonOutputParser | |
from langchain_core.output_parsers import StrOutputParser | |
from langchain_pinecone import PineconeVectorStore | |
from typing_extensions import TypedDict | |
from typing import Dict, List | |
import warnings | |
from lib.gradio_custom_theme import DarkTheme | |
from lib.graph import build_workflow | |
warnings.filterwarnings("ignore") | |
from dotenv import load_dotenv | |
load_dotenv() | |
def deploy(): | |
app = build_workflow() | |
pdf_open_js = """ | |
<script> | |
function openPdf(src) { | |
const opener = document.getElementById("opener"); | |
opener.src = src; | |
console.log("here", src); | |
return false; | |
} | |
</script> | |
""" | |
dark_theme = DarkTheme() | |
with gr.Blocks(head=pdf_open_js, fill_height=True, theme=dark_theme) as demo: | |
with gr.Row(): | |
with gr.Column(scale=1): | |
chatbot_rag = gr.Chatbot( | |
label=f"RAG: llama3 + документы", | |
height=740, | |
sanitize_html=False, | |
show_copy_button=True, | |
) | |
chat_input = gr.MultimodalTextbox( | |
interactive=True, | |
file_types=None, | |
placeholder="Введите сообщение...", | |
show_label=False, | |
scale=4, | |
) | |
with gr.Column(scale=1.5): | |
pdf_output = gr.HTML( | |
"<iframe id='opener' width='100%' height='740px' src=''></iframe>" | |
) | |
# clear = gr.Button("Clear") | |
def user_rag(history, message): | |
if message["text"] is not None: | |
history.append((message["text"], None)) | |
return history, gr.update(value=None, interactive=False) | |
def bot_rag(history): | |
result = app.invoke({"question": history[-1][0]}) | |
form_answer = result["generation"].strip() | |
history[-1][1] = form_answer | |
return history | |
chat_input.submit( | |
user_rag, [chatbot_rag, chat_input], [chatbot_rag, chat_input], queue=False | |
).then(bot_rag, chatbot_rag, chatbot_rag).then( | |
lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input] | |
) | |
demo.launch(share=True) | |
if __name__ == "__main__": | |
deploy() | |