Spaces:
Runtime error
Runtime error
import streamlit as st | |
from streamlit_chat import message as st_message | |
from transformers import AutoTokenizer, AutoModelWithLMHead | |
import string | |
import random | |
import requests | |
import os | |
def get_models(): | |
# it may be necessary for other frameworks to cache the model | |
# seems pytorch keeps an internal state of the conversation | |
model_name = "deepparag/Aeona" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelWithLMHead.from_pretrained(model_name) | |
return tokenizer, model | |
if "history" not in st.session_state: | |
st.session_state.history = [] | |
st.session_state.aimlId="huggingface_space_"+''.join(random.choices(string.ascii_uppercase + | |
string.digits, k = 10)) | |
def generate_answer(): | |
user_message = st.session_state.input_text | |
response=requests.post(os.environ['AIML']+ f"?test=test&id={st.session_state.aimlId}&text={st.session_state.input_text}" ).content.decode("utf-8"); | |
if str(response).find("idk") == -1 and response.find("<oob>")==-1 and response.find("Something is wrong with my")==-1 and str(response).find("AIML") == -1 and str(response).find("Index") == -1 and str(response).find("<html>") == -1: | |
message_bot= str(response).replace("<br/>", "\n") | |
else: | |
tokenizer, model = get_models() | |
inputs = tokenizer(st.session_state.input_text+ tokenizer.eos_token, return_tensors="pt") | |
result = model.generate(**inputs, max_length=1000) | |
message_bot = tokenizer.decode( | |
result[0], skip_special_tokens=True | |
) | |
st.session_state.history.append({"message": user_message, "is_user": True}) | |
st.session_state.history.append({"message": message_bot, "is_user": False}) | |
st.title("Talk with the Aeona!") | |
st.write("Aeona hopes to become an AI which as human as possible with goal of becoming your friend.") | |
st.write("To do this we hope to combine a AI which uses the dialoggpt-2 framework and discord messages") | |
st.write("The input will start out by going to an AIML chatbot based on a modified version of ALICE") | |
st.write("If the AIML has not valid answer it will proceed to use the AI") | |
st.write("The ai right now is focused mainly on discord and you can invite the bot here: https://www.aeona.xyz/") | |
st.text_input("Chat with Aeona", key="input_text", on_change=generate_answer) | |
for chat in st.session_state.history: | |
st_message(**chat) # unpacking | |