AyushSingh127 commited on
Commit
fba9090
1 Parent(s): 49bb472

Upload 11 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tweet_model/variables/variables.data-00000-of-00001 filter=lfs diff=lfs merge=lfs -text
acronym.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"aka":"also known as","asap":"as soon as possible","brb":"be right back","btw":"by the way","dob":"date of birth","faq":"frequently asked questions","fyi":"for your information","idk":"i don't know","idc":"i don't care","iirc":"if i recall correctly","imo":"in my opinion","irl":"in real life","lmk":"let me know","lol":"laugh out loud","ngl":"not gonna lie","noyb":"none of your business","nvm":"never mind","ofc":"of course","omg":"oh my god","pfa":"please find attached","rofl":"rolling on the floor laughing","stfu":"shut the fuck up","tba":"to be announced","tbc":"to be continued","tbd":"to be determined","tbh":"to be honest","ttyl":"talk to you later","wtf":"what the fuck","wth":"what the heck"}
app.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # importing libraries
2
+
3
+ import streamlit as st
4
+ import tensorflow as tf
5
+ import nltk
6
+ from PIL import Image
7
+ from nltk.stem import WordNetLemmatizer
8
+ from nltk.tokenize import RegexpTokenizer
9
+ import string
10
+ import re
11
+ import pandas as pd
12
+ import numpy as np
13
+
14
+
15
+ try:
16
+ nltk.find("corpora/wordnet.zip")
17
+ except:
18
+ nltk.download("wordnet")
19
+
20
+ # read files
21
+ try:
22
+ acronyms_dict,contractions_dict,stops
23
+ except(NameError):
24
+ acronyms_dict = pd.read_json("acronym.json", typ="series")
25
+ contractions_dict = pd.read_json("contraction.json", typ="series")
26
+ stops = list(pd.read_csv("stop_words.csv").values.flatten())
27
+
28
+
29
+
30
+
31
+
32
+ # preprocess function
33
+
34
+ # Defining tokenizer
35
+ regexp = RegexpTokenizer("[\w']+")
36
+
37
+ def preprocess(text):
38
+
39
+ text = text.lower() # lowercase
40
+ text = text.strip() # whitespaces
41
+
42
+ # Removing html tags
43
+ html = re.compile(r'<.*?>')
44
+ text = html.sub(r'', text) # html tags
45
+
46
+ # Removing emoji patterns
47
+ emoji_pattern = re.compile("["
48
+ u"\U0001F600-\U0001F64F" # emoticons
49
+ u"\U0001F300-\U0001F5FF" # symbols & pictographs
50
+ u"\U0001F680-\U0001F6FF" # transport & map symbols
51
+ u"\U0001F1E0-\U0001F1FF" # flags (iOS)
52
+ u"\U00002702-\U000027B0"
53
+ u"\U000024C2-\U0001F251"
54
+ "]+", flags = re.UNICODE)
55
+ text = emoji_pattern.sub(r'', text) # unicode char
56
+
57
+ # Removing urls
58
+ http = "https?://\S+|www\.\S+" # matching strings beginning with http (but not just "http")
59
+ pattern = r"({})".format(http) # creating pattern
60
+ text = re.sub(pattern, "", text) # remove urls
61
+
62
+ # Removing twitter usernames
63
+ pattern = r'@[\w_]+'
64
+ text = re.sub(pattern, "", text) # remove @twitter usernames
65
+
66
+ # Removing punctuations and numbers
67
+ punct_str = string.punctuation + string.digits
68
+ punct_str = punct_str.replace("'", "")
69
+ punct_str = punct_str.replace("-", "")
70
+ text = text.translate(str.maketrans('', '', punct_str)) # punctuation and numbers
71
+
72
+ # Replacing "-" in text with empty space
73
+ text = text.replace("-", " ") # "-"
74
+
75
+ # Substituting acronyms
76
+ words = []
77
+ for word in regexp.tokenize(text):
78
+ if word in acronyms_dict.index:
79
+ words = words + acronyms_dict[word].split()
80
+ else:
81
+ words = words + word.split()
82
+ text = ' '.join(words) # acronyms
83
+
84
+ # Substituting Contractions
85
+ words = []
86
+ for word in regexp.tokenize(text):
87
+ if word in contractions_dict.index:
88
+ words = words + contractions_dict[word].split()
89
+ else:
90
+ words = words + word.split()
91
+ text = " ".join(words) # contractions
92
+
93
+ punct_str = string.punctuation
94
+ text = text.translate(str.maketrans('', '', punct_str)) # punctuation again to remove "'" # spellchecker
95
+
96
+
97
+ # lemmatization
98
+ lemmatizer = WordNetLemmatizer()
99
+ text = " ".join([lemmatizer.lemmatize(word) for word in regexp.tokenize(text)]) # lemmatize
100
+
101
+ # Stopwords Removal
102
+ text = ' '.join([word for word in regexp.tokenize(text) if word not in stops]) # stopwords
103
+
104
+ # Removing all characters except alphabets and " " (space)
105
+ filter = string.ascii_letters + " "
106
+ text = "".join([chr for chr in text if chr in filter]) # remove all characters except alphabets and " " (space)
107
+
108
+ # Removing words with one alphabet occuring more than 3 times continuously
109
+ pattern = r'\b\w*?(.)\1{2,}\w*\b'
110
+ text = re.sub(pattern, "", text).strip() # remove words with one alphabet occuring more than 3 times continuously
111
+
112
+ # Removing words with less than 3 characters
113
+ short_words = r'\b\w{1,2}\b'
114
+ text = re.sub(short_words, "", text) # remove words with less than 3 characters
115
+
116
+ # return final output
117
+ return text
118
+
119
+
120
+
121
+ # making frontend
122
+ st.write("# Disaster Tweet Prediction")
123
+
124
+ img=Image.open("disaster.jpg")
125
+ st.image(img,width=500)
126
+
127
+
128
+ tweet=st.text_input(label="",value="Enter or paste your tweet here")
129
+
130
+
131
+
132
+
133
+ # model load
134
+ @st.cache_resource
135
+ def cache_model(model_name):
136
+ model=tf.keras.models.load_model(model_name)
137
+ return model
138
+
139
+ model=cache_model("tweet_model")
140
+
141
+
142
+
143
+ if len(tweet)>0:
144
+ clean_tweet=preprocess(tweet)
145
+ y_pred=model.predict([clean_tweet])
146
+ y_pred_num=int(np.round(y_pred)[0][0])
147
+
148
+
149
+ if y_pred_num==0:
150
+ st.write(f"## Non-Disaster tweet with disaster probability {round(y_pred[0][0],4)*100}%")
151
+
152
+ else:
153
+ st.write(f"## Disaster tweet with disaster probability {round(y_pred[0][0],4)*100}%")
154
+
155
+
contraction.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"'aight":"alright","ain't":"are not","amn't":"am not","arencha":"are not you","aren't":"are not","'bout":"about","can't":"cannot","cap'n":"captain","'cause":"because","'cept":"except","could've":"could have","couldn't":"could not","couldn't've":"could not have","dammit":"damn it","daren't":"dare not","daresn't":"dare not","dasn't":"dare not","didn't":"did not","doesn't":"does not","doin'":"doing","don't":"do not","dunno":"do not know","d'ye":"do you","e'en":"even","e'er":"ever","'em":"them","everybody's":"everybody is","everyone's":"everyone is","fo'c'sle":"forecastle","finna":"fixing to","'gainst":"against","g'day":"good day","gimme":"give me","giv'n":"given","gonna":"going to","gon't":"go not","gotcha":"got you","gotta":"got to","gtg":"got to go","hadn't":"had not","had've":"had have","hasn't":"has not","haven't":"have not","he'd":"he had","he'll":"he shall","helluva":"hell of a","he's":"he is","here's":"here is","he've":"he have","how'd":"how would","howdy":"how do you do","how'll":"how will","how're":"how are","how's":"how is","i'd":"i had","i'd've":"i would have","i'll":"i shall","i'm":"i am","imma":"i am about to","i'm'a":"i am about to","i'm'o":"i am going to","innit":"is it not","ion":"i do not","i've":"i have","isn't":"is not","it'd":"it would","it'll":"it shall","it's":"it is","iunno":"i do not know","kinda":"kind of","let's":"let us","li'l":"little","ma'am":"madam","mayn't":"may not","may've":"may have","methinks":"me thinks","mightn't":"might not","might've":"might have","mustn't":"must not","mustn't've":"must not have","must've":"must have","'neath":"beneath","needn't":"need not","nal":"and all","ne'er":"never","o'clock":"of the clock","o'er":"over","ol'":"old","oughtn't":"ought not","'round":"around","'s":"is","shalln't":"shall not","shan't":"shall not","she'd":"she had","she'll":"she shall","she's":"she is","should've":"should have","shouldn't":"should not","shouldn't've":"should not have","somebody's":"somebody is","someone's":"someone is","something's":"something is","so're":"so are","so's":"so is","so've":"so have","that'll":"that shall","that're":"that are","that's":"that is","that'd":"that would","there'd":"there had","there'll":"there shall","there're":"there are","there's":"there is","these're":"these are","these've":"these have","they'd":"they had","they'll":"they shall","they're":"they are","they've":"they have","this's":"this is","those're":"those are","those've":"those have","'thout":"without","'til":"until","'tis":"it is","to've":"to have","'twas":"it was","'tween":"between","'twhere":"it were","wanna":"want to","wasn't":"was not","we'd":"we had","we'd've":"we would have","we'll":"we shall","we're":"we are","we've":"we have","weren't":"were not","whatcha":"what are you","what'd":"what did","what'll":"what shall","what're":"what are","what's":"what is","what've":"what have","when's":"when is","where'd":"where did","where'll":"where shall","where're":"where are","where's":"where is","where've":"where have","which'd":"which had","which'll":"which shall","which're":"which are","which's":"which is","which've":"which have","who'd":"who would","who'd've":"who would have","who'll":"who shall","who're":"who are","who's":"who is","who've":"who have","why'd":"why did","why're":"why are","why's":"why is","willn't":"will not","won't":"will not","wonnot":"will not","would've":"would have","wouldn't":"would not","wouldn't've":"would not have","y'all":"you all","y'all'd've":"you all would have","y'all'd'n't've":"you all would not have","y'all're":"you all are","y'all'ren't":"you all are not","y'at":"you at","yes'm":"yes madam","yessir":"yes sir","you'd":"you had","you'll":"you shall","you're":"you are","you've":"you have","aight":"alright","aint":"are not","amnt":"am not","arent":"are not","cant":"cannot","cause":"because","couldve":"could have","couldnt":"could not","couldntve":"could not have","darent":"dare not","daresnt":"dare not","dasnt":"dare not","didnt":"did not","doesnt":"does not","doin":"doing","dont":"do not","eer":"ever","everybodys":"everybody is","everyones":"everyone is","gday":"good day","givn":"given","gont":"go not","hadnt":"had not","hadve":"had have","hasnt":"has not","havent":"have not","hed":"he had","hell":"he shall","hes":"he is","heve":"he have","howd":"how did","howll":"how will","howre":"how are","hows":"how is","idve":"i would have","ill":"i shall","im":"i am","ima":"i am about to","imo":"i am going to","ive":"i have","isnt":"is not","itd":"it would","itll":"it shall","its":"it is","lets":"let us","lil":"little","maam":"madam","maynt":"may not","mayve":"may have","mightnt":"might not","mightve":"might have","mustnt":"must not","mustntve":"must not have","mustve":"must have","neednt":"need not","neer":"never","oclock":"of the clock","oer":"over","ol":"old","oughtnt":"ought not","shallnt":"shall not","shant":"shall not","shed":"she had","shell":"she shall","shes":"she is","shouldve":"should have","shouldnt":"should not","shouldntve":"should not have","somebodys":"somebody is","someones":"someone is","somethings":"something is","thatll":"that shall","thatre":"that are","thatd":"that would","thered":"there had","therell":"there shall","therere":"there are","theres":"there is","thesere":"these are","theseve":"these have","theyd":"they had","theyll":"they shall","theyre":"they are","theyve":"they have","thiss":"this is","thosere":"those are","thoseve":"those have","tis":"it is","tove":"to have","twas":"it was","wasnt":"was not","wed":"we had","wedve":"we would have","were":"we are","weve":"we have","werent":"were not","whatd":"what did","whatll":"what shall","whatre":"what are","whats":"what is","whatve":"what have","whens":"when is","whered":"where did","wherell":"where shall","wherere":"where are","wheres":"where is","whereve":"where have","whichd":"which had","whichll":"which shall","whichre":"which are","whichs":"which is","whichve":"which have","whod":"who would","whodve":"who would have","wholl":"who shall","whore":"who are","whos":"who is","whove":"who have","whyd":"why did","whyre":"why are","whys":"why is","wont":"will not","wouldve":"would have","wouldnt":"would not","wouldntve":"would not have","yall":"you all","yalldve":"you all would have","yallre":"you all are","youd":"you had","youll":"you shall","youre":"you are","youve":"you have","'re":"are","thats":"that is"}
disaster.jpg ADDED
requirments.txt.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ streamlit == 1.24.1
2
+ Pillow == 9.4.0
3
+ tensorflow == 2.13.0
4
+ nltk == 3.7
5
+ pandas == 1.5.3
6
+ numpy == 1.23.5
7
+ regex == 2022.7.9
stop_words.csv ADDED
@@ -0,0 +1,451 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0
2
+ nowhere
3
+ everything
4
+ via
5
+ two
6
+ hereafter
7
+ him
8
+ both
9
+ by
10
+ d
11
+ did
12
+ how
13
+ nevertheless
14
+ in case
15
+ anything
16
+ once
17
+ doesn
18
+ in order that
19
+ 's
20
+ ourselves
21
+ several
22
+ whether or not
23
+ least
24
+ above
25
+ û
26
+ was
27
+ ten
28
+ for
29
+ becoming
30
+ less
31
+ than
32
+ herself
33
+ next
34
+ you'll
35
+ wasn
36
+ other
37
+ outside
38
+ as
39
+ our
40
+ whole
41
+ her
42
+ i
43
+ yourselves
44
+ as soon as
45
+ namely
46
+ within
47
+ cannot
48
+ hasn
49
+ almost
50
+ should
51
+ 've
52
+ those
53
+ hers
54
+ each
55
+ now
56
+ wasn't
57
+ whereupon
58
+ themselves
59
+ needn't
60
+ well
61
+ n
62
+ wherever
63
+ ve
64
+ shouldn
65
+ became
66
+ doesn't
67
+ none
68
+ their
69
+ between
70
+ name
71
+ onto
72
+ so
73
+ front
74
+ per
75
+ ‘re
76
+ over
77
+ nothing
78
+ aboard
79
+ couldn't
80
+ still
81
+ ã
82
+ ‘s
83
+ more
84
+ v
85
+ that'll
86
+ us
87
+ mostly
88
+ ûª
89
+ c
90
+ somewhere
91
+ this
92
+ we
93
+ can
94
+ ca
95
+ been
96
+ nobody
97
+ too
98
+ supposing
99
+ hundred
100
+ fifty
101
+ whenever
102
+ doing
103
+ ‘d
104
+ former
105
+ shan
106
+ as much as
107
+ towards
108
+ seem
109
+ besides
110
+ during
111
+ toward
112
+ concerning
113
+ anyone
114
+ into
115
+ made
116
+ again
117
+ wouldn't
118
+ you've
119
+ ’d
120
+ hereby
121
+ its
122
+ below
123
+ others
124
+ isn't
125
+ except
126
+ down
127
+ on
128
+ 're
129
+ four
130
+ like
131
+ y
132
+ any
133
+ using
134
+ 'm
135
+ back
136
+ else
137
+ because
138
+ who
139
+ ûªm
140
+ am
141
+ being
142
+ twenty
143
+ aren
144
+ had
145
+ it's
146
+ she
147
+ formerly
148
+ quite
149
+ three
150
+ in
151
+ do
152
+ itself
153
+ something
154
+ r
155
+ amid
156
+ them
157
+ call
158
+ whose
159
+ hasn't
160
+ in the event that
161
+ anywhere
162
+ therein
163
+ around
164
+ of
165
+ sometime
166
+ ûó
167
+ due
168
+ put
169
+ unlike
170
+ near
171
+ won't
172
+ there
173
+ didn't
174
+ may
175
+ no
176
+ and
177
+ a
178
+ enough
179
+ that
180
+ isn
181
+ forty
182
+ s
183
+ t
184
+ anyway
185
+ haven't
186
+ u
187
+ shan't
188
+ despite
189
+ along
190
+ meanwhile
191
+ by the time
192
+ would
193
+ seeming
194
+ only if
195
+ might
196
+ e
197
+ one
198
+ could
199
+ if
200
+ somehow
201
+ having
202
+ it
203
+ anyhow
204
+ indeed
205
+ beneath
206
+ even
207
+ full
208
+ hence
209
+ you
210
+ seems
211
+ ‘ve
212
+ x
213
+ while
214
+ you're
215
+ whither
216
+ unless
217
+ his
218
+ used
219
+ provided that
220
+ as if
221
+ please
222
+ is
223
+ further
224
+ top
225
+ go
226
+ behind
227
+ noone
228
+ myself
229
+ are
230
+ out
231
+ eleven
232
+ with
233
+ must
234
+ have
235
+ therefore
236
+ they
237
+ afterwards
238
+ whom
239
+ amount
240
+ z
241
+ n't
242
+ various
243
+ ‘m
244
+ also
245
+ p
246
+ mustn
247
+ f
248
+ inside
249
+ ‘ll
250
+ upon
251
+ will
252
+ always
253
+ together
254
+ really
255
+ twelve
256
+ whether
257
+ ì
258
+ as though
259
+ neither
260
+ perhaps
261
+ ûò
262
+ which
263
+ thence
264
+ didn
265
+ ma
266
+ under
267
+ ûªve
268
+ everywhere
269
+ much
270
+ give
271
+ part
272
+ were
273
+ b
274
+ empty
275
+ thru
276
+ same
277
+ herein
278
+ needn
279
+ moreover
280
+ beforehand
281
+ many
282
+ or
283
+ often
284
+ not
285
+ å
286
+ o
287
+ should've
288
+ where
289
+ bottom
290
+ all
291
+ already
292
+ latter
293
+ before
294
+ your
295
+ six
296
+ otherwise
297
+ my
298
+ ûªs
299
+ don't
300
+ someone
301
+ becomes
302
+ minus
303
+ whatever
304
+ has
305
+ just
306
+ regarding
307
+ nor
308
+ weren
309
+ mustn't
310
+ since
311
+ ’re
312
+ now that
313
+ show
314
+ elsewhere
315
+ through
316
+ he
317
+ mightn
318
+ up
319
+ such
320
+ j
321
+ keep
322
+ 'll
323
+ done
324
+ m
325
+ ’ve
326
+ till
327
+ beside
328
+ amongst
329
+ theirs
330
+ only
331
+ does
332
+ whereafter
333
+ latterly
334
+ sometimes
335
+ an
336
+ why
337
+ get
338
+ mine
339
+ until
340
+ w
341
+ the
342
+ without
343
+ although
344
+ me
345
+ thus
346
+ some
347
+ ûªre
348
+ across
349
+ but
350
+ ’m
351
+ k
352
+ most
353
+ ll
354
+ five
355
+ off
356
+ very
357
+ everyone
358
+ among
359
+ n‘t
360
+ haven
361
+ l
362
+ first
363
+ from
364
+ underneath
365
+ throughout
366
+ weren't
367
+ whereby
368
+ become
369
+ after
370
+ hadn
371
+ ’s
372
+ take
373
+ what
374
+ lest
375
+ won
376
+ see
377
+ as long as
378
+ eight
379
+ sixty
380
+ though
381
+ seemed
382
+ ours
383
+ you'd
384
+ thereby
385
+ at
386
+ against
387
+ whereas
388
+ every
389
+ ain
390
+ make
391
+ ûówe
392
+ beyond
393
+ say
394
+ own
395
+ rather
396
+ g
397
+ even though
398
+ himself
399
+ thereupon
400
+ yours
401
+ aren't
402
+ shouldn't
403
+ q
404
+ however
405
+ fifteen
406
+ alone
407
+ last
408
+ versus
409
+ mightn't
410
+ here
411
+ hadn't
412
+ another
413
+ yourself
414
+ thereafter
415
+ about
416
+ to
417
+ whence
418
+ wherein
419
+ wouldn
420
+ whoever
421
+ serious
422
+ side
423
+ round
424
+ n’t
425
+ then
426
+ ’ll
427
+ considering
428
+ plus
429
+ h
430
+ re
431
+ either
432
+ yet
433
+ even if
434
+ nine
435
+ when
436
+ move
437
+ be
438
+ couldn
439
+ 'd
440
+ following
441
+ ever
442
+ ï
443
+ ûï
444
+ third
445
+ don
446
+ she's
447
+ few
448
+ these
449
+ ìñ
450
+ hereupon
451
+ never
tweet_model/fingerprint.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dbc847804566269a4753f5e1d54a6121fc9fa197219bb176581fc97c93352772
3
+ size 55
tweet_model/keras_metadata.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25c16a1606b2ab8dd224e89758af5d8ad5d9c22089321516f164e62ab2ec2bf1
3
+ size 7583
tweet_model/saved_model.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d59fdbb8ccdab13ea1460bc145f10f7c6641bbb4c8854038dec4d9bbba7fc6ca
3
+ size 8895117
tweet_model/variables/variables.data-00000-of-00001 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7936125c6eda8d065b6a39e3e258bf772a402962e62bc07ab416c35d5639e414
3
+ size 1027644334
tweet_model/variables/variables.index ADDED
Binary file (13.3 kB). View file