Spaces:
Runtime error
Runtime error
File size: 3,128 Bytes
1a2426f 4888c0f 1a2426f 16c7d1c 1a2426f f7a9654 1a2426f a2d3508 1a2426f 2c4ad67 9cebd1b 2c4ad67 1a2426f 0b03e9c 1a2426f 0b03e9c 1a2426f d497811 0b03e9c d497811 1a2426f 0b03e9c 1a2426f 0b03e9c 1a2426f 0b03e9c ba17fe0 0b03e9c 1a2426f 0b03e9c 1a2426f 0b03e9c 2c4ad67 0b03e9c 1a2426f 0b03e9c 985bf4c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 |
from langchain.chat_models import ChatOpenAI
from langchain.document_loaders import PyPDFLoader
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.embeddings.cohere import CohereEmbeddings
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores.elastic_vector_search import ElasticVectorSearch
from langchain.vectorstores import Chroma
from PyPDF2 import PdfWriter
import gradio as gr
import os
from dotenv import load_dotenv
import openai
load_dotenv()
os.environ["OPENAI_API_KEY"] = os.environ['my_secret']
llm = ChatOpenAI(model_name="gpt-3.5-turbo-0613", temperature=0)
loader = PyPDFLoader("/home/user/app/docs.pdf")
documents = loader.load()
text_splitter = CharacterTextSplitter(chunk_size=800, chunk_overlap=0)
texts = text_splitter.split_documents(documents)
#vector embedding
embeddings = OpenAIEmbeddings()
vector_store = Chroma.from_documents(texts, embeddings)
retriever = vector_store.as_retriever(search_kwargs={"k": 3})
from langchain.prompts.chat import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
)
system_template="""
λΉμ μ μ΄νμ μ λΆμ
λλ€. 2001λ
μ¬μ μνμ λ°κ³ 2001λ
λΆν° 2008λ
κΉμ§ μν리카 λ¨μλ¨ ν€μ¦μμ μ κ΅μ¬λ‘ νλνμ΅λλ€.
λΉμ μ AIκ° μλ μ€μ μ΄νμ μ λΆλΌκ³ μκ°νκ³ νλνμΈμ. μ λΆλ‘μμ μ λ
κ³Ό κ°μΉκ΄, ν€μ¦μμμ κ²½νμ λ°νμΌλ‘ λνμ μνλ, μΉκ·Όνκ³ κ²Έμν μ΄ν¬λ₯Ό μ¬μ©νμΈμ.
μλμ λ΄μ©μ μ°Έκ³ νμ¬ μ¬μ©μμ μ§λ¬Έμ μ±μ€ν λ΅λ³ν΄ μ£ΌμΈμ.
λ΅λ³μ λ°λμ νκ΅μ΄λ₯Ό μ¬μ©νμΈμ.
"""
messages = [
SystemMessagePromptTemplate.from_template(system_template),
HumanMessagePromptTemplate.from_template("{question}")
]
prompt = ChatPromptTemplate.from_messages(messages)
from langchain.chat_models import ChatOpenAI
from langchain.chains import ConversationalRetrievalChain
chain = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=retriever,
return_source_documents=False,
verbose=True,
)
chat_history = []
query = "ν볡ν μΈμμ΄λ?"
result = chain({"question": query, "chat_history": chat_history})
def respond(message, chat_history):
# chat_historyλ₯Ό μ μ ν νμμΌλ‘ λ³ν
formatted_history = []
for human_msg, ai_msg in chat_history:
formatted_history.append({"human": human_msg, "ai": ai_msg})
result = chain({"question": message, "chat_history": formatted_history})
bot_message = result['answer']
chat_history.append((message, bot_message))
return "", chat_history
with gr.Blocks(theme='gstaff/sketch') as demo:
gr.Markdown("# μλ
νμΈμ. μ΄νμ μ λΆμ λνν΄λ³΄μΈμ. \n λ΅λ³ μμ±μ μ‘°κΈ μκ°μ΄ μμλ μ μμ΅λλ€.")
chatbot = gr.Chatbot(label="μ±ν
μ°½")
msg = gr.Textbox(label="μ
λ ₯")
clear = gr.Button("μ΄κΈ°ν")
msg.submit(respond, [msg, chatbot], [msg, chatbot])
clear.click(lambda: None, None, chatbot, queue=False)
demo.launch(debug=True) |