gabriel lopez commited on
Commit
40f8a3f
1 Parent(s): 0e1a80a

working app

Browse files
Files changed (4) hide show
  1. Pipfile +14 -0
  2. Pipfile.lock +0 -0
  3. app.py +17 -15
  4. requirements.txt +2 -2
Pipfile ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [[source]]
2
+ url = "https://pypi.org/simple"
3
+ verify_ssl = true
4
+ name = "pypi"
5
+
6
+ [packages]
7
+ gradio = "==3.10.1"
8
+ tensorflow = "==2.5.0"
9
+ transformers = ">=4.21.0"
10
+
11
+ [dev-packages]
12
+
13
+ [requires]
14
+ python_version = "3.9"
Pipfile.lock ADDED
The diff for this file is too large to render. See raw diff
 
app.py CHANGED
@@ -3,28 +3,25 @@ import tensorflow as tf
3
  import gradio as gr
4
 
5
  TITLE = "DialoGPT -- Chatbot"
6
- DESCRIPTION = "<center>Have funny/existencial dialogs with non-human entities</center>"
 
 
 
7
  EXAMPLES = [
8
- ["How will the world end?"],
9
- ["Does the universe have a purpose?"],
10
- ["Is the universe infinite?"],
11
- ["Was Einstein right about time being relative?"],
12
- ["What is Pythagoras theorem?"],
13
- ["What is the meaning of life?"],
14
  ]
15
  ARTICLE = r"""<center>
16
- This application allowsa you to talk with a machine.
17
- In the back-end I'm using the DialoGPT model from microsoft.<br>
18
- This model extends GPT2 towards the conversational neural response generetion domain.<br>
19
- ArXiv paper: https://arxiv.org/abs/1911.00536<br>
20
- Done by dr. Gabriel Lopez<br>
21
  For more please visit: <a href='https://sites.google.com/view/dr-gabriel-lopez/home'>My Page</a><br>
22
  </center>"""
23
 
24
  tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
25
  model = TFAutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
26
 
27
-
28
  def chat_with_bot(user_input, chat_history_and_input=[]):
29
  emb_user_input = tokenizer.encode(
30
  user_input + tokenizer.eos_token, return_tensors="tf"
@@ -45,9 +42,14 @@ def chat_with_bot(user_input, chat_history_and_input=[]):
45
  )
46
  return bot_response, chat_history_and_input
47
 
 
 
 
 
 
48
  gr.Interface(
49
- inputs=["text", "state"],
50
- outputs=["text", "state"],
51
  examples=EXAMPLES,
52
  title=TITLE,
53
  description=DESCRIPTION,
 
3
  import gradio as gr
4
 
5
  TITLE = "DialoGPT -- Chatbot"
6
+ DESCRIPTION = """<center>This application allows you to talk with a machine.
7
+ In the back-end is using the DialoGPT model from Microsoft.<br>
8
+ This model extends GPT2 towards the conversational neural response generetion domain.<br>
9
+ You can also see the <a href="https://arxiv.org/abs/1911.00536">ArXiv paper</a><br></center>"""
10
  EXAMPLES = [
11
+ ["What is your favorite videogame?"],
12
+ ["What do you do for work?"],
13
+ ["What are your hobbies?"],
14
+ ["What is your favorite food?"],
 
 
15
  ]
16
  ARTICLE = r"""<center>
17
+ Done by dr. Gabriel Lopez<br>
 
 
 
 
18
  For more please visit: <a href='https://sites.google.com/view/dr-gabriel-lopez/home'>My Page</a><br>
19
  </center>"""
20
 
21
  tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
22
  model = TFAutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
23
 
24
+ # interaction function
25
  def chat_with_bot(user_input, chat_history_and_input=[]):
26
  emb_user_input = tokenizer.encode(
27
  user_input + tokenizer.eos_token, return_tensors="tf"
 
42
  )
43
  return bot_response, chat_history_and_input
44
 
45
+
46
+ # gradio interface
47
+ in_text = gr.Textbox(value="How was the class?", label="Start chatting!")
48
+ out_text = gr.Textbox(value="", label="Chatbot response:")
49
+
50
  gr.Interface(
51
+ inputs=[in_text, "state"],
52
+ outputs=[out_text, "state"],
53
  examples=EXAMPLES,
54
  title=TITLE,
55
  description=DESCRIPTION,
requirements.txt CHANGED
@@ -1,3 +1,3 @@
1
- gradio==3.10.1
2
- tensorflow==2.4.1
3
  transformers==4.24.0
 
 
 
 
 
1
  transformers==4.24.0
2
+ gradio==3.10.1
3
+ tensorflow==2.5.0