|
from LLM import LLM |
|
import streamlit as st |
|
|
|
def format_chat_history(chat_history): |
|
formatted_history = "" |
|
for chat in chat_history: |
|
formatted_history += f"{chat[0]}: {chat[1]}\n" |
|
return formatted_history |
|
|
|
def main(): |
|
st.title("LLM Chat") |
|
|
|
model = "gpt2" |
|
llm = LLM(model) |
|
|
|
chat_history = [] |
|
context = "You are an helpfully assistant in a school. You are helping a student with his homework." |
|
chat = llm.get_chat(context=context) |
|
while True: |
|
user_input = st.text_input("User:") |
|
if st.button("Send"): |
|
if user_input: |
|
chat_history.append(("User", user_input)) |
|
bot_response = chat.answerStoreHistory(qn=user_input) |
|
chat_history.append(("Bot", bot_response)) |
|
|
|
st.text_area("Chat History:", value=format_chat_history(chat_history), readonly=True) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|