File size: 2,458 Bytes
c193c40
 
 
 
 
 
 
 
 
 
 
 
 
4733cb5
c193c40
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
012f32b
c193c40
2932522
c193c40
 
 
 
 
 
 
 
 
 
761e24e
c193c40
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58

import streamlit as st
from streamlit_chat import message as st_message
from transformers import AutoTokenizer, AutoModelWithLMHead
import string
import random
import requests
import os

@st.experimental_singleton
def get_models():
    # it may be necessary for other frameworks to cache the model
    # seems pytorch keeps an internal state of the conversation
    model_name = "deepparag/Aeona-Beta"
    tokenizer = AutoTokenizer.from_pretrained(model_name)
    model = AutoModelWithLMHead.from_pretrained(model_name)
    return tokenizer, model


if "history" not in st.session_state:
    st.session_state.history = []
    st.session_state.aimlId="huggingface_space_"+''.join(random.choices(string.ascii_uppercase +
                             string.digits, k = 10))




def generate_answer():
    user_message = st.session_state.input_text
     
    response=requests.post(os.environ['AIML']+ f"?test=test&id={st.session_state.aimlId}&text={st.session_state.input_text}" ).content.decode("utf-8");
    if str(response).find("idk") == -1 and response.find("<oob>")==-1 and    response.find("Something is wrong with my")==-1 and str(response).find("AIML") == -1 and str(response).find("Index") == -1 and str(response).find("<html>") == -1:
      message_bot= str(response).replace("<br/>", "\n")
    else:
      tokenizer, model = get_models()
     
      inputs = tokenizer(st.session_state.input_text+ tokenizer.eos_token, return_tensors="pt")
      result = model.generate(**inputs, max_length=1000)
      message_bot = tokenizer.decode(
          result[0], skip_special_tokens=True
      )
      
    st.session_state.history.append({"message": user_message, "is_user": True})
    st.session_state.history.append({"message": message_bot, "is_user": False})

st.title("Talk with the Aeona!")
st.write("Aeona hopes to become an AI which as human as possible with goal of becoming your friend.")
st.write("To do this we hope to combine a AI which uses the dialoggpt-2 framework and discord messages")
st.write("The input will start out by going to an AIML chatbot based on a modified version of ALICE")
st.write("If the AIML has not valid answer it will proceed to use the AI")
st.write("The ai right now is focused mainly on discord and you can invite the bot here: https://aeona.xyz/")

st.text_input("Chat with Aeona", key="input_text", on_change=generate_answer)

for chat in st.session_state.history:
    st_message(**chat)  # unpacking