Rainsilves commited on
Commit
71da2de
1 Parent(s): 619c74f

added translation and summarization

Browse files
app.py → Text-Generation.py RENAMED
File without changes
pages/Text-to-Text.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ from unittest import result
3
+ import string
4
+ import streamlit as st
5
+ import torch
6
+ from torch.nn import functional as F
7
+ from transformers import (AutoModelForCausalLM, AutoModelForQuestionAnswering,
8
+ AutoModelForSeq2SeqLM,
9
+ AutoModelForSequenceClassification, AutoTokenizer,
10
+ GPT2Tokenizer, LogitsProcessor, LogitsProcessorList,
11
+ pipeline, top_k_top_p_filtering)
12
+
13
+
14
+
15
+ st.set_page_config(page_title="Gadsby")
16
+ st.title("Gadsby - Constrained Text G̶e̶n̶e̶r̶a̶t̶i̶o̶n̶ to Text with Transformers")
17
+ st.image("https://upload.wikimedia.org/wikipedia/commons/1/1d/Gadsby_%28book_cover%29.jpg")
18
+ st.caption("The inspiration for this space: https://en.wikipedia.org/wiki/Gadsby_(novel)")
19
+
20
+
21
+
22
+ form = st.sidebar.form("choose_settings")
23
+ form.header("Main Settings")
24
+
25
+ model_name = form.text_area("Enter the name of the pre-trained model from transformers that we are using for Text-to-Text", value = "google/pegasus-cnn_dailymail")
26
+ form.caption("This will download a new model, so it may take awhile or even break if the model is too large")
27
+ mode = form.selectbox("What kind of constrained generation are we doing?", ["lipogram", "reverse_lipogram", "e-prime", "rhopalism", "length_constrained", "greater_than_length", "Pangram", "rhopalism-lipogram"])
28
+ form.caption("Lipograms mean that a letter (or substring) is not allowed in the generated string, reverse lipograms force a letter to be in the generated string")
29
+
30
+ if mode == "lipogram":
31
+ naughty_strings_list = st.text_area("Enter the list of strings that you don't want in each word seperated by a space", value = "E e")
32
+ naughty_strings = naughty_strings_list.split(" ")
33
+ elif mode == "e-prime":
34
+ e_prime_string = """be being been am is isn't are aren't was wasn't were weren't i'm you're we're they're he's she's it's there's here's where's how's what's who's that's aint isnt arent wasnt werent im youre were theyre hes shes its theres heres wheres hows whats whos thats aint Be Being Been Am Is Isn't Are Aren't Was Wasn't Were Weren't I'm You're We're They're He's She's It's There's Here's Where's How's What's Who's That's Aint Isnt Arent Wasnt Werent Im Youre Were Theyre Hes Shes Its Theres Heres Wheres Hows Whats Whos Thats Aint BE BEING BEEN AM IS ISN'T ARE AREN'T WAS WASN'T WERE WEREN'T I'M YOU'RE WE'RE THEY'RE HE'S SHE'S IT'S THERE'S HERE'S WHERE'S HOW'S WHAT'S WHO'S THAT'S AINT ISNT ARENT WASNT WERENT IM YOURE WERE THEYRE HES SHES ITS THERES HERES WHERES HOWS WHATS WHOS THATS AINT"""
35
+ st.caption("The default word list is the list needed to enforce the language model to generate english without usage of the verb to be")
36
+ naughty_strings_list = st.text_area("Enter the list of strings that you don't want to be generated (exact match)", value = e_prime_string)
37
+ naughty_strings = naughty_strings_list.split(" ")
38
+ elif mode == "reverse_lipogram":
39
+ nice_strings_list = st.text_area("Enter the list of strings that you DO want in each word seperated by a space", value = "t T")
40
+ nice_strings = nice_strings_list.split(" ")
41
+ elif mode == "rhopalism":
42
+ length_constraint = form.number_input("Enter the length that the Rhopalism shoud start with", value = 1)
43
+ st.caption("Rhopalisms are usually reliable but sometimes you need to try generating two or three times for a perfect one")
44
+ elif mode == "rhopalism-lipogram":
45
+ naughty_strings_list = st.text_area("Enter the list of strings that you don't want in each word seperated by a space", value = "E e")
46
+ naughty_strings = naughty_strings_list.split(" ")
47
+ length_constraint = form.number_input("Enter the length that the Rhopalism shoud start with", value = 1)
48
+ st.caption("Rhopalisms are usually reliable but sometimes you need to try generating two or three times for a perfect one")
49
+ else:
50
+ length_constraint = form.number_input("Enter the length should each word be restricted to (or greater/less than)", value = 5) + 1
51
+
52
+
53
+ length = form.number_input("Select how long you want the generated text to be", value = 100)
54
+ number_of_tokens_to_sample = form.number_input("Select how many tokens we want to search through when we do the filtering", value = 25000)
55
+ form.caption("Settings this to higher numbers will improve the experience but will cause generating to slow. Low numbers may cause lots of blank or failed generations")
56
+ temperature = form.number_input("How spicy/interesting do we want our models output to be", value = 0.10, min_value = 0.0)
57
+ form.caption("Setting this higher decreases the likelihood of high probability words and increases the likelihood of low probability (and presumably more interesting) words")
58
+ form.caption("For more details on what these settings mean, see here: https://huggingface.co/blog/how-to-generate")
59
+
60
+
61
+ sequence = st.text_area("Enter a custom prompt", value = "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct.")
62
+ decoded_sequence = ""
63
+
64
+ form.form_submit_button("Generate some Constrained Text!")
65
+
66
+
67
+ with st.spinner("Please wait while the model loads:"):
68
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
69
+ model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
70
+ model.config.pad_token_id = model.config.eos_token_id
71
+
72
+ def isPalindrome(s):
73
+ return s == s[::-1]
74
+
75
+
76
+ if mode == "rhopalism" or mode == "rhopalism-lipogram":
77
+ rhopalism_len = length_constraint
78
+
79
+
80
+
81
+ nice_strings_pangram = list(string.ascii_lowercase)
82
+
83
+ decoder_input_ids = tokenizer.encode("<pad>", add_special_tokens=False, return_tensors="pt")
84
+
85
+ def get_next_word_without_e():
86
+ input_ids = tokenizer.encode(sequence, return_tensors="pt")
87
+ # get logits of last hidden state
88
+
89
+ next_token_candidates_logits = model(input_ids = input_ids, decoder_input_ids = decoder_input_ids)[0][:, -1, :]
90
+ if temperature != 1.0:
91
+ next_token_candidates_logits = next_token_candidates_logits / temperature
92
+ # filter
93
+ filtered_next_token_candidates_logits = top_k_top_p_filtering(next_token_candidates_logits, top_k=number_of_tokens_to_sample, top_p=number_of_tokens_to_sample)
94
+ # sample and get a probability distribution
95
+ probs = F.softmax(filtered_next_token_candidates_logits, dim=-1)
96
+ next_token_candidates = torch.multinomial(probs, num_samples=number_of_tokens_to_sample) ## 10000 random samples
97
+ word_list = []
98
+ for candidate_string in next_token_candidates:
99
+ for candidate in candidate_string:
100
+ resulting_string = tokenizer.decode(candidate, skip_special_tokens=True)# clean_up_tokenization_spaces=True)
101
+ ###Constrained text generation starts HERE
102
+ ##Lipogram - No naughty strings used
103
+ if mode == "lipogram" or mode == "e-prime":
104
+ if all(nauty_string not in resulting_string for nauty_string in naughty_strings): ## This returns at the first naughty strings
105
+ return resulting_string, candidate
106
+ ##Reverse-Lipogram - Must use things in nice_strings
107
+ elif mode == "reverse_lipogram":
108
+ if any(nice_string in resulting_string for nice_string in nice_strings):
109
+ return resulting_string, candidate
110
+ ##Length constraints
111
+ elif mode == "length_constrained":
112
+ ##Seems reliable if length is greater than 4
113
+ if len(resulting_string) == length_constraint:
114
+ return resulting_string, candidate
115
+ elif mode == "greater_than_length":
116
+ ##Only sort of works
117
+ if len(resulting_string) >= length_constraint:
118
+ return resulting_string, candidate
119
+ elif mode == "rhopalism":
120
+ ##Mostly works
121
+ if len(resulting_string) == rhopalism_len:
122
+ return resulting_string, candidate
123
+ elif mode == "Pangram":
124
+ if any(c in nice_strings_pangram for c in resulting_string):
125
+ return resulting_string, candidate
126
+ elif mode == "rhopalism-lipogram":
127
+ if len(resulting_string) == rhopalism_len:
128
+ if all(nauty_string not in resulting_string for nauty_string in naughty_strings):
129
+ return resulting_string, candidate
130
+
131
+
132
+
133
+ return " "
134
+
135
+
136
+ new_sequence = ""
137
+
138
+ j = 0
139
+ i = length
140
+ while i > 0:
141
+ new_word, new_candidate = get_next_word_without_e()
142
+ decoder_input_ids = torch.cat([decoder_input_ids, new_candidate.view(1, -1)], axis=-1)
143
+ if new_word.endswith(" "):
144
+ new_sequence = new_sequence + new_word
145
+ else:
146
+ new_sequence = new_sequence + new_word + " "
147
+ if mode == "rhopalism" or mode == "rhopalism-lipogram":
148
+ rhopalism_len += 1
149
+ i = i-1
150
+ if mode == "Pangram":
151
+ for character in sequence:
152
+ if character in nice_strings_pangram:
153
+ nice_strings_pangram.remove(character)
154
+ j += 1
155
+
156
+ st.write("GENERATED SEQUENCE: ")
157
+ #st.write(new_sequence)
158
+ st.write(tokenizer.decode(decoder_input_ids[0], skip_special_tokens=True))
159
+ #st.write(nice_strings_pangram)
160
+