|
|
|
import os |
|
from dotenv import load_dotenv |
|
import openai |
|
import streamlit as st |
|
from langchain_openai import ChatOpenAI |
|
from langchain.chains import RetrievalQA |
|
from langchain_openai import OpenAIEmbeddings |
|
from langchain_community.vectorstores import FAISS |
|
|
|
load_dotenv() |
|
|
|
|
|
|
|
|
|
|
|
|
|
openai.api_key = os.environ["OPENAI_API_KEY"] |
|
|
|
|
|
MODEL_NAME = "gpt-3.5-turbo-16k-0613" |
|
|
|
MODEL_TEMPERATURE = 0.9 |
|
|
|
|
|
st.title("Welcome to TOKAI Chatbot.ver8 🔰") |
|
st.markdown("お困りになっていることを教えてください。") |
|
|
|
|
|
vectoreStore = FAISS.load_local("faiss_index/", OpenAIEmbeddings(), allow_dangerous_deserialization=True) |
|
|
|
|
|
retriever = vectoreStore.as_retriever(search_type="similarity", search_kwargs={"k": 3}) |
|
|
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
|
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
|
|
|
|
if prompt := st.chat_input("ここに質問事項を入力してください。"): |
|
|
|
|
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
with st.chat_message("user"): |
|
st.markdown(prompt) |
|
|
|
|
|
qa = RetrievalQA.from_chain_type( |
|
llm=ChatOpenAI(temperature=MODEL_TEMPERATURE, model_name=MODEL_NAME), |
|
chain_type="stuff", |
|
retriever=retriever, |
|
return_source_documents=False |
|
) |
|
|
|
|
|
query = f"東海大学の授業要覧以外に関する質問には答えないでください。次の質問に答えてください。:{prompt}" |
|
res = qa.run(query) |
|
|
|
|
|
st.session_state.messages.append({"role": "assistant", "content": res}) |
|
|
|
with st.chat_message("assistant"): |
|
st.markdown(res) |