paragon-analytics commited on
Commit
2e1d56a
1 Parent(s): 1b6ea30

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +74 -48
app.py CHANGED
@@ -1,6 +1,7 @@
1
  # Import packages:
2
 
3
  import numpy as np
 
4
  import matplotlib.pyplot as plt
5
  import re
6
  # tensorflow imports:
@@ -10,16 +11,7 @@ from tensorflow.keras import losses
10
  from tensorflow.keras import layers
11
  from tensorflow.keras.layers.experimental import preprocessing
12
  from tensorflow.keras.optimizers import RMSprop
13
- # # keras imports:
14
- from keras.models import Model
15
- from keras.layers import LSTM, Activation, Dense, Dropout, Input, Embedding, RepeatVector, TimeDistributed
16
- from keras.preprocessing.text import Tokenizer
17
- from keras_preprocessing import sequence
18
- from tensorflow.keras.utils import to_categorical
19
- from keras.callbacks import EarlyStopping
20
- from keras.models import Sequential
21
- from keras import layers
22
- from keras.backend import clear_session
23
  import pickle
24
  import gradio as gr
25
  import yake
@@ -28,6 +20,15 @@ from spacy import displacy
28
  import streamlit as st
29
  import spacy_streamlit
30
  nlp = spacy.load('en_core_web_sm')
 
 
 
 
 
 
 
 
 
31
 
32
  kw_extractor = yake.KeywordExtractor()
33
  custom_kw_extractor = yake.KeywordExtractor(lan="en", n=2, dedupLim=0.2, top=10, features=None)
@@ -35,22 +36,32 @@ custom_kw_extractor = yake.KeywordExtractor(lan="en", n=2, dedupLim=0.2, top=10,
35
  max_words = 2000
36
  max_len = 111
37
 
 
 
 
 
 
38
  # load the model from disk
39
- filename = 'lstm_model.sav'
40
- lmodel = pickle.load(open(filename, 'rb'))
41
 
42
  # load the model from disk
43
- filename = 'tokenizer.pickle'
44
- tok = pickle.load(open(filename, 'rb'))
45
 
46
  def process_final_text(text):
47
  X_test = str(text).lower()
48
- l = []
49
- l.append(X_test)
50
- test_sequences = tok.texts_to_sequences(l)
51
- test_sequences_matrix = sequence.pad_sequences(test_sequences,maxlen=max_len)
52
- lstm_prob = lmodel.predict(test_sequences_matrix.tolist()).flatten()
53
- lstm_pred = np.where(lstm_prob>=0.5,1,0)
 
 
 
 
 
54
 
55
  # Get Keywords:
56
  keywords = custom_kw_extractor.extract_keywords(X_test)
@@ -80,36 +91,50 @@ def process_final_text(text):
80
  + sp_html
81
  + ""
82
  )
83
- return {"Persuasive": float(lstm_prob[0]), "Non-Persuasive": 1-float(lstm_prob[0])},keywords,NER
84
 
85
- def main(prob1,prob2,sol1,sol2,inv,act):
86
- text = str(prob1) + " " + str(prob2) + " " + str(sol1) + " " + str(sol2) + " " + str(inv) + " " + str(act)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
  obj = process_final_text(text)
88
- return obj[0],obj[1],obj[2]
89
 
90
- title = "Welcome to **PersuAID** 🪐"
91
  description1 = """
92
- It is difficult to write persuasive product descriptions. It could take time and money to create a good one. PersuAID uses a template to organize your thoughts when writing a persuasive product description. It's AI model is trained on tens of thousands of product descriptions. Why don't you give it a try? Just add your text and hit Create & Analyze ✨
93
- """
94
-
95
- description2 = """
96
- Although encouraged, you don't have to fill all the boxes. Just try the ones that matter to you. After getting your first score, modify your answers and hit Create & Analyze again 🤞
97
- """
98
 
99
  with gr.Blocks(title=title) as demo:
100
  gr.Markdown(f"## {title}")
101
- gr.Markdown("""![marketing](file/marketing.jpg)""")
102
  gr.Markdown(description1)
103
  gr.Markdown("""---""")
104
- gr.Markdown(description2)
105
- gr.Markdown("""---""")
106
- prob1 = gr.Textbox(label="A great product description is the one that solves an important problem. What is the single, most important problem that your product solves?",lines=2, placeholder="Type it here ...")
107
- prob2 = gr.Textbox(label="Are there any other problems your product solves? If none, leave blank.",lines=2, placeholder="Type them here, ...")
108
- sol1 = gr.Textbox(label="What solution does your product offer to the main problem you described first?",lines=2, placeholder="Type your answer here ...")
109
- sol2 = gr.Textbox(label="How does your product solves the problem?",lines=2, placeholder="Type your answer here ...")
110
- inv = gr.Textbox(label="Now it's time to invite your audience to take action. Encourage them to try your product 💖",lines=2, placeholder="Type your invitation here ...")
111
- act = gr.Textbox(label="And finally, describe (short sentence) what action you want them to take. This could also be your product description as a punchline 💪",lines=2, placeholder="Type the desired action/ outcome/ punchline here ...")
112
- submit_btn = gr.Button("Create & Analyze")
113
  #text = gr.Textbox(label="Text:",lines=2, placeholder="Please enter text here ...")
114
  #submit_btn2 = gr.Button("Analyze")
115
 
@@ -119,17 +144,18 @@ with gr.Blocks(title=title) as demo:
119
  color_map={"+++": "royalblue","++": "cornflowerblue",
120
  "+": "lightsteelblue", "NA":"white"})
121
  NER = gr.HTML(label = 'NER:')
 
 
 
 
122
 
123
  submit_btn.click(
124
  main,
125
- [prob1,prob2,sol1,sol2,inv,act],
126
- [label,impplot,NER], api_name="PrsTalk"
127
  )
128
 
129
-
130
-
131
- gr.Markdown("### Click on any of the examples below to see how it works:")
132
- gr.Examples([["It is difficult to write persuasive product descriptions.", "It could take time and money to create a good one.", "PersuAID uses a template to organize your thoughts when writing a persuasive product description.", "It's AI model is trained on tens of thousands of product descriptions.", "Why don't you give it a try?", "Just add your text and hit Create & Analyze ✨"], ["What is performance?", "Zero to Sixty or Sixty to Zero? How a car performs a quarter mile or a quarter century? Is performance about the joy of driving or the importance of surviving?",
133
- "To us performance is not about doing one thing well ...", "it is about doing everything well .. because in the end everything matters.", "Performance without compromise.", "That is what drives you..... Mercedes Benz"], ["Talking to your friends about their problems with drugs and alcohol might not be easy.", "", "", "", "Courage.", "The anti-drug."], ["When experiencing depression, I couldn't get out of bed or focus.", "My goals and dreams drifted away.", "I got my life back through the help of a support group of people who had been where I was.", "They helped me see that my life wasn't limited that I could still achieve my dreams.","We've been there.", "We can help. Call 800-826-3632 to learn more."], ["Up to 6 million homeless animals enter shelters nationwide every year.", "Some abandoned, some sick or injured they're just looking for a place to call home.", "When you rescue a shelter animal you won't just be making a difference in their life, you could be making a difference in your own.", "", "Visit your local animal shelter today and save a life.", "HumaneSociety.org"], ["You always dreamed of living in a place like this,", "But the land can be challenging.", "That's where our expertise comes in.", "Your John Deere dealer will find the best solution to get the most for your money and the most out of your property...", "Because we know you love your land...every last bit of it.", "That's how we run... and nothin' runs like a Deere."]], [prob1,prob2,sol1,sol2,inv,act], [label,impplot,NER], main, cache_examples=True)
134
 
135
  demo.launch()
 
1
  # Import packages:
2
 
3
  import numpy as np
4
+ import pandas as pd
5
  import matplotlib.pyplot as plt
6
  import re
7
  # tensorflow imports:
 
11
  from tensorflow.keras import layers
12
  from tensorflow.keras.layers.experimental import preprocessing
13
  from tensorflow.keras.optimizers import RMSprop
14
+
 
 
 
 
 
 
 
 
 
15
  import pickle
16
  import gradio as gr
17
  import yake
 
20
  import streamlit as st
21
  import spacy_streamlit
22
  nlp = spacy.load('en_core_web_sm')
23
+ import torch
24
+ import tensorflow as tf
25
+ from transformers import RobertaTokenizer, RobertaModel, AutoModelForSequenceClassification, TFAutoModelForSequenceClassification
26
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
27
+ tokenizer = AutoTokenizer.from_pretrained("paragon-analytics/roberta_persuade")
28
+ model = AutoModelForSequenceClassification.from_pretrained("paragon-analytics/roberta_persuade")
29
+
30
+ # para_tokenizer = AutoTokenizer.from_pretrained("paragon-analytics/t5_para")
31
+ # para_model = AutoModelForSeq2SeqLM.from_pretrained("paragon-analytics/t5_para")
32
 
33
  kw_extractor = yake.KeywordExtractor()
34
  custom_kw_extractor = yake.KeywordExtractor(lan="en", n=2, dedupLim=0.2, top=10, features=None)
 
36
  max_words = 2000
37
  max_len = 111
38
 
39
+ from transformers_interpret import SequenceClassificationExplainer
40
+ cls_explainer = SequenceClassificationExplainer(
41
+ model,
42
+ tokenizer)
43
+
44
  # load the model from disk
45
+ #filename = 'resil_lstm_model.sav'
46
+ #lmodel = pickle.load(open(filename, 'rb'))
47
 
48
  # load the model from disk
49
+ #filename = 'tokenizer.pickle'
50
+ #tok = pickle.load(open(filename, 'rb'))
51
 
52
  def process_final_text(text):
53
  X_test = str(text).lower()
54
+ #l = []
55
+ #l.append(X_test)
56
+ #test_sequences = tok.texts_to_sequences(l)
57
+ #test_sequences_matrix = sequence.pad_sequences(test_sequences,maxlen=max_len)
58
+ #lstm_prob = lmodel.predict(test_sequences_matrix.tolist()).flatten()
59
+ #lstm_pred = np.where(lstm_prob>=0.5,1,0)
60
+
61
+ encoded_input = tokenizer(X_test, return_tensors='pt')
62
+ output = model(**encoded_input)
63
+ scores = output[0][0].detach().numpy()
64
+ scores = tf.nn.softmax(scores)
65
 
66
  # Get Keywords:
67
  keywords = custom_kw_extractor.extract_keywords(X_test)
 
91
  + sp_html
92
  + ""
93
  )
 
94
 
95
+ # Transformer Interpret:
96
+ word_attributions = cls_explainer(X_test)
97
+ letter = []
98
+ score = []
99
+ for i in word_attributions:
100
+ if i[1]>0.5:
101
+ a = "++"
102
+ elif (i[1]<=0.5) and (i[1]>0.1):
103
+ a = "+"
104
+ elif (i[1]>=-0.5) and (i[1]<-0.1):
105
+ a = "-"
106
+ elif i[1]<-0.5:
107
+ a = "--"
108
+ else:
109
+ a = "NA"
110
+
111
+ letter.append(i[0])
112
+ score.append(a)
113
+
114
+ word_attributions = [(letter[i], score[i]) for i in range(0, len(letter))]
115
+
116
+ # # Paraphraser:
117
+ # batch = para_tokenizer(X_test, return_tensors='pt')
118
+ # generated_ids = para_model.generate(batch['input_ids'])
119
+ # para_list = para_tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
120
+
121
+ return {"Persuasive": float(scores.numpy()[1]), "Non-Persuasive": float(scores.numpy()[0])},keywords,NER,word_attributions
122
+
123
+ def main(prob1):
124
+ text = str(prob1)
125
  obj = process_final_text(text)
126
+ return obj[0],obj[1],obj[2],obj[3]
127
 
128
+ title = "Welcome to **Persuade** 🪐"
129
  description1 = """
130
+ This app takes text (up to a few sentences) and predicts to what extent the text contains persuasive content."""
 
 
 
 
 
131
 
132
  with gr.Blocks(title=title) as demo:
133
  gr.Markdown(f"## {title}")
 
134
  gr.Markdown(description1)
135
  gr.Markdown("""---""")
136
+ prob1 = gr.Textbox(label="Enter Your Text Here:",lines=2, placeholder="Type it here ...")
137
+ submit_btn = gr.Button("Analyze")
 
 
 
 
 
 
 
138
  #text = gr.Textbox(label="Text:",lines=2, placeholder="Please enter text here ...")
139
  #submit_btn2 = gr.Button("Analyze")
140
 
 
144
  color_map={"+++": "royalblue","++": "cornflowerblue",
145
  "+": "lightsteelblue", "NA":"white"})
146
  NER = gr.HTML(label = 'NER:')
147
+ intp =gr.HighlightedText(label="Word Scores",
148
+ combine_adjacent=False).style(color_map={"++": "darkgreen","+": "green",
149
+ "--": "darkred",
150
+ "-": "red", "NA":"white"})
151
 
152
  submit_btn.click(
153
  main,
154
+ [prob1],
155
+ [label,impplot,NER,intp], api_name="ResText"
156
  )
157
 
158
+ gr.Markdown("### Click on any of the examples below to see to what extent they contain resilience messaging:")
159
+ gr.Examples([["Please stay at home and avoid unnecessary trips."],["Please stay at home and avoid unnecessary trips. We will survive this."],["We will survive this."],["Watch today’s news briefing with the latest updates on COVID-19 in Connecticut."],["So let's keep doing what we know works. Let's stay strong, and let's beat this virus. I know we can, and I know we can come out stronger on the other side."],["It is really wonderful how much resilience there is in human nature. Let any obstructing cause, no matter what, be removed in any way, even by death, and we fly back to first principles of hope and enjoyment."],["Resilience is accepting your new reality, even if it’s less good than the one you had before. You can fight it, you can do nothing but scream about what you’ve lost, or you can accept that and try to put together something that’s good."],["You survived all of the days you thought you couldn't, never underestimate your resilience."],["Like tiny seeds with potent power to push through tough ground and become mighty trees, we hold innate reserves of unimaginable strength. We are resilient."]], [prob1], [label,impplot,NER,intp], main, cache_examples=True)
 
 
 
160
 
161
  demo.launch()