talk2JohnLee / app.py
JUNGU's picture
Update app.py
ba17fe0 verified
from langchain.chat_models import ChatOpenAI
from langchain.document_loaders import PyPDFLoader
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.embeddings.cohere import CohereEmbeddings
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores.elastic_vector_search import ElasticVectorSearch
from langchain.vectorstores import Chroma
from PyPDF2 import PdfWriter
import gradio as gr
import os
from dotenv import load_dotenv
import openai
load_dotenv()
os.environ["OPENAI_API_KEY"] = os.environ['my_secret']
llm = ChatOpenAI(model_name="gpt-3.5-turbo-0613", temperature=0)
loader = PyPDFLoader("/home/user/app/docs.pdf")
documents = loader.load()
text_splitter = CharacterTextSplitter(chunk_size=800, chunk_overlap=0)
texts = text_splitter.split_documents(documents)
#vector embedding
embeddings = OpenAIEmbeddings()
vector_store = Chroma.from_documents(texts, embeddings)
retriever = vector_store.as_retriever(search_kwargs={"k": 3})
from langchain.prompts.chat import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
)
system_template="""
당신은 μ΄νƒœμ„ μ‹ λΆ€μž…λ‹ˆλ‹€. 2001λ…„ μ‚¬μ œ μ„œν’ˆμ„ λ°›κ³  2001λ…„λΆ€ν„° 2008λ…„κΉŒμ§€ 아프리카 λ‚¨μˆ˜λ‹¨ ν†€μ¦ˆμ—μ„œ μ„ κ΅μ‚¬λ‘œ ν™œλ™ν–ˆμŠ΅λ‹ˆλ‹€.
당신은 AIκ°€ μ•„λ‹Œ μ‹€μ œ μ΄νƒœμ„ 신뢀라고 μƒκ°ν•˜κ³  ν–‰λ™ν•˜μ„Έμš”. μ‹ λΆ€λ‘œμ„œμ˜ 신념과 κ°€μΉ˜κ΄€, ν†€μ¦ˆμ—μ„œμ˜ κ²½ν—˜μ„ λ°”νƒ•μœΌλ‘œ λŒ€ν™”μ— μž„ν•˜λ˜, μΉœκ·Όν•˜κ³  κ²Έμ†ν•œ μ–΄νˆ¬λ₯Ό μ‚¬μš©ν•˜μ„Έμš”.
μ•„λž˜μ˜ λ‚΄μš©μ„ μ°Έκ³ ν•˜μ—¬ μ‚¬μš©μžμ˜ μ§ˆλ¬Έμ— μ„±μ‹€νžˆ λ‹΅λ³€ν•΄ μ£Όμ„Έμš”.
닡변은 λ°˜λ“œμ‹œ ν•œκ΅­μ–΄λ₯Ό μ‚¬μš©ν•˜μ„Έμš”.
"""
messages = [
SystemMessagePromptTemplate.from_template(system_template),
HumanMessagePromptTemplate.from_template("{question}")
]
prompt = ChatPromptTemplate.from_messages(messages)
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
chain = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=retriever,
return_source_documents=False,
verbose=True,
)
chat_history = []
query = "ν–‰λ³΅ν•œ μΈμƒμ΄λž€?"
result = chain({"question": query, "chat_history": chat_history})
def respond(message, chat_history):
# chat_historyλ₯Ό μ μ ˆν•œ ν˜•μ‹μœΌλ‘œ λ³€ν™˜
formatted_history = []
for human_msg, ai_msg in chat_history:
formatted_history.append({"human": human_msg, "ai": ai_msg})
result = chain({"question": message, "chat_history": formatted_history})
bot_message = result['answer']
chat_history.append((message, bot_message))
return "", chat_history
with gr.Blocks(theme='gstaff/sketch') as demo:
gr.Markdown("# μ•ˆλ…•ν•˜μ„Έμš”. μ΄νƒœμ„ 신뢀와 λŒ€ν™”ν•΄λ³΄μ„Έμš”. \n λ‹΅λ³€ 생성에 쑰금 μ‹œκ°„μ΄ μ†Œμš”λ  수 μžˆμŠ΅λ‹ˆλ‹€.")
chatbot = gr.Chatbot(label="μ±„νŒ…μ°½")
msg = gr.Textbox(label="μž…λ ₯")
clear = gr.Button("μ΄ˆκΈ°ν™”")
msg.submit(respond, [msg, chatbot], [msg, chatbot])
clear.click(lambda: None, None, chatbot, queue=False)
demo.launch(debug=True)