Pippoz commited on
Commit
2742c1d
1 Parent(s): a62c911

adding chat bot

Browse files
Files changed (3) hide show
  1. app.py +2 -1
  2. pages/chat.py +42 -0
  3. requirements.txt +2 -1
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import streamlit as st
2
  # Custom imports
3
  from multipage import MultiPage
4
- from pages import text_gen, fill_mask, home_page, translation # import your pages here
5
 
6
  # Create an instance of the app
7
  app = MultiPage()
@@ -9,6 +9,7 @@ app = MultiPage()
9
 
10
  # Add all your applications (pages) here
11
  app.add_page("Home Page", home_page.app)
 
12
  app.add_page("Mask Fill", fill_mask.app)
13
  app.add_page("Text Generation", text_gen.app)
14
  app.add_page("Translation", translation.app)
1
  import streamlit as st
2
  # Custom imports
3
  from multipage import MultiPage
4
+ from pages import text_gen, fill_mask, home_page, translation, chat # import your pages here
5
 
6
  # Create an instance of the app
7
  app = MultiPage()
9
 
10
  # Add all your applications (pages) here
11
  app.add_page("Home Page", home_page.app)
12
+ app.add_page("Chat Bot", chat.app)
13
  app.add_page("Mask Fill", fill_mask.app)
14
  app.add_page("Text Generation", text_gen.app)
15
  app.add_page("Translation", translation.app)
pages/chat.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from streamlit_chat import message as st_message
3
+ from transformers import BlenderbotTokenizer
4
+ from transformers import BlenderbotForConditionalGeneration
5
+
6
+
7
+ def app():
8
+ @st.experimental_singleton
9
+ def get_models():
10
+ # it may be necessary for other frameworks to cache the model
11
+ # seems pytorch keeps an internal state of the conversation
12
+ model_name = "facebook/blenderbot-400M-distill"
13
+ tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
14
+ model = BlenderbotForConditionalGeneration.from_pretrained(model_name)
15
+ return tokenizer, model
16
+
17
+
18
+ if "history" not in st.session_state:
19
+ st.session_state.history = []
20
+
21
+ st.title("IoT Chat Robot")
22
+ st.write('Just chat with a friendly and smart AI')
23
+ st.markdown('## ')
24
+
25
+
26
+ def generate_answer():
27
+ tokenizer, model = get_models()
28
+ user_message = st.session_state.input_text
29
+ inputs = tokenizer(st.session_state.input_text, return_tensors="pt")
30
+ result = model.generate(**inputs)
31
+ message_bot = tokenizer.decode(
32
+ result[0], skip_special_tokens=True
33
+ ) # .replace("<s>", "").replace("</s>", "")
34
+
35
+ st.session_state.history.append({"message": user_message, "is_user": True})
36
+ st.session_state.history.append({"message": message_bot, "is_user": False})
37
+
38
+
39
+ st.text_input("Hi, nice to meet you AI!", key="input_text", on_change=generate_answer)
40
+
41
+ for chat in st.session_state.history:
42
+ st_message(**chat) # unpacking
requirements.txt CHANGED
@@ -4,4 +4,5 @@ altair
4
  pandas
5
  sacremoses
6
  sentencepiece
7
- torch
 
4
  pandas
5
  sacremoses
6
  sentencepiece
7
+ torch
8
+ streamlit_chat