liamebs commited on
Commit
fbccc42
1 Parent(s): 148af61

Misc. changes

Browse files
Files changed (1) hide show
  1. app.py +1 -200
app.py CHANGED
@@ -13,52 +13,16 @@ import io
13
  from IPython.display import Image, display, HTML
14
  from PIL import Image
15
  import base64
16
-
17
- # removed dotenv and hf key requirements to see how HF Spaces handles it
18
-
19
- # In[2]:
20
 
21
 
22
  # Helper function
23
  import requests, json
24
 
25
- #Summarization endpoint
26
- from transformers import pipeline
27
-
28
- get_completion = pipeline("summarization", model="sshleifer/distilbart-cnn-12-6")
29
-
30
- def summarize(input):
31
- output = get_completion(input)
32
- return output[0]['summary_text']
33
-
34
-
35
-
36
- # ## Building a Named Entity Recognition app
37
-
38
- # We are using this [Inference Endpoint](https://huggingface.co/inference-endpoints) for `dslim/bert-base-NER`, a 108M parameter fine-tuned BART model on the NER task.
39
-
40
- # ### How about running it locally?
41
- #
42
- # ```py
43
- # from transformers import pipeline
44
- #
45
- # get_completion = pipeline("ner", model="dslim/bert-base-NER")
46
- #
47
- # def ner(input):
48
- # output = get_completion(input)
49
- # return {"text": input, "entities": output}
50
- #
51
- # ```
52
-
53
- # In[8]:
54
-
55
  from transformers import pipeline
56
  get_completion = pipeline("ner", model="dslim/bert-base-NER")
57
 
58
 
59
- # In[12]:
60
-
61
-
62
  def ner(input):
63
  output = get_completion(input)
64
  return {"text": input, "entities": output}
@@ -74,166 +38,3 @@ demo = gr.Interface(fn=ner,
74
  examples=["My name is Andrew and I live in California", "My name is Poli and work at HuggingFace"])
75
  demo.launch()
76
 
77
-
78
- # ### Adding a helper function to merge tokens
79
-
80
- # In[17]:
81
-
82
-
83
- def merge_tokens(tokens):
84
- merged_tokens = []
85
- for token in tokens:
86
- if merged_tokens and token['entity'].startswith('I-') and merged_tokens[-1]['entity'].endswith(token['entity'][2:]):
87
- # If current token continues the entity of the last one, merge them
88
- last_token = merged_tokens[-1]
89
- last_token['word'] += token['word'].replace('##', '')
90
- last_token['end'] = token['end']
91
- last_token['score'] = (last_token['score'] + token['score']) / 2
92
- else:
93
- # Otherwise, add the token to the list
94
- merged_tokens.append(token)
95
-
96
- return merged_tokens
97
-
98
- def ner(input):
99
- output = get_completion(input)
100
- merged_tokens = merge_tokens(output)
101
- return {"text": input, "entities": merged_tokens}
102
-
103
- gr.close_all()
104
- demo = gr.Interface(fn=ner,
105
- inputs=[gr.Textbox(label="Text to find entities", lines=2)],
106
- outputs=[gr.HighlightedText(label="Text with entities")],
107
- title="NER with dslim/bert-base-NER",
108
- description="Find entities using the `dslim/bert-base-NER` model under the hood!",
109
- allow_flagging="never",
110
- examples=["My name is Andrew, I'm building DeeplearningAI and I live in California", "My name is Poli, I live in Vienna and work at HuggingFace"])
111
-
112
- demo.launch()
113
-
114
-
115
- # In[15]:
116
-
117
-
118
- gr.close_all()
119
-
120
-
121
- # In[ ]:
122
-
123
-
124
-
125
-
126
-
127
- # In[ ]:
128
-
129
-
130
-
131
-
132
-
133
- # In[ ]:
134
-
135
-
136
-
137
-
138
-
139
- # In[ ]:
140
-
141
-
142
-
143
-
144
-
145
- # In[ ]:
146
-
147
-
148
-
149
-
150
-
151
- # In[ ]:
152
-
153
-
154
-
155
-
156
-
157
- # In[ ]:
158
-
159
-
160
-
161
-
162
-
163
- # In[ ]:
164
-
165
-
166
-
167
-
168
-
169
- # In[ ]:
170
-
171
-
172
-
173
-
174
-
175
- # In[ ]:
176
-
177
-
178
-
179
-
180
-
181
- # In[ ]:
182
-
183
-
184
-
185
-
186
-
187
- # In[ ]:
188
-
189
-
190
-
191
-
192
-
193
- # In[ ]:
194
-
195
-
196
-
197
-
198
-
199
- # In[ ]:
200
-
201
-
202
-
203
-
204
-
205
- # In[ ]:
206
-
207
-
208
-
209
-
210
-
211
- # In[ ]:
212
-
213
-
214
-
215
-
216
-
217
- # In[ ]:
218
-
219
-
220
-
221
-
222
-
223
- # In[ ]:
224
-
225
-
226
-
227
-
228
-
229
- # In[ ]:
230
-
231
-
232
-
233
-
234
-
235
- # In[ ]:
236
-
237
-
238
-
239
-
 
13
  from IPython.display import Image, display, HTML
14
  from PIL import Image
15
  import base64
16
+ import gradio as gr
 
 
 
17
 
18
 
19
  # Helper function
20
  import requests, json
21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  from transformers import pipeline
23
  get_completion = pipeline("ner", model="dslim/bert-base-NER")
24
 
25
 
 
 
 
26
  def ner(input):
27
  output = get_completion(input)
28
  return {"text": input, "entities": output}
 
38
  examples=["My name is Andrew and I live in California", "My name is Poli and work at HuggingFace"])
39
  demo.launch()
40