LamaAl commited on
Commit
6910d80
1 Parent(s): 052389d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -0
app.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from streamlit_chat import message as st_message
3
+ from transformers import BlenderbotTokenizer
4
+ from transformers import BlenderbotForConditionalGeneration
5
+
6
+
7
+ @st.experimental_singleton
8
+ def get_models():
9
+ # it may be necessary for other frameworks to cache the model
10
+ # seems pytorch keeps an internal state of the conversation
11
+ model_name = "facebook/blenderbot-400M-distill"
12
+ tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
13
+ model = BlenderbotForConditionalGeneration.from_pretrained(model_name)
14
+ return tokenizer, model
15
+
16
+
17
+ if "history" not in st.session_state:
18
+ st.session_state.history = []
19
+
20
+ st.title("Hello Chatbot")
21
+
22
+
23
+ def generate_answer():
24
+ tokenizer, model = get_models()
25
+ user_message = st.session_state.input_text
26
+ inputs = tokenizer(st.session_state.input_text, return_tensors="pt")
27
+ result = model.generate(**inputs)
28
+ message_bot = tokenizer.decode(
29
+ result[0], skip_special_tokens=True
30
+ ) # .replace("<s>", "").replace("</s>", "")
31
+
32
+ st.session_state.history.append({"message": user_message, "is_user": True})
33
+ st.session_state.history.append({"message": message_bot, "is_user": False})
34
+
35
+
36
+ st.text_input("Talk to the bot", key="input_text", on_change=generate_answer)
37
+
38
+ for chat in st.session_state.history:
39
+ st_message(**chat) # unpacking