liamebs commited on
Commit
dacb921
1 Parent(s): a799ad1

Initial applications setup

Browse files
Files changed (2) hide show
  1. app.py +99 -0
  2. requirements.txt +3 -0
app.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # # L1: NLP tasks with a simple interface 🗞️
5
+
6
+ # Load your HF API key and relevant Python libraries.
7
+
8
+ # In[1]:
9
+
10
+
11
+ import os
12
+ import io
13
+ from IPython.display import Image, display, HTML
14
+ from PIL import Image
15
+ import base64
16
+
17
+ # removed dotenv and hf key requirements to see how HF Spaces handles it
18
+
19
+ # In[2]:
20
+
21
+
22
+ # Helper function
23
+ import requests, json
24
+
25
+ #Summarization endpoint
26
+ from transformers import pipeline
27
+
28
+ get_completion = pipeline("summarization", model="sshleifer/distilbart-cnn-12-6")
29
+
30
+ def summarize(input):
31
+ output = get_completion(input)
32
+ return output[0]['summary_text']
33
+
34
+
35
+ # ## Building a text summarization app
36
+
37
+ # Here we are using an [Inference Endpoint](https://huggingface.co/inference-endpoints) for the `shleifer/distilbart-cnn-12-6`, a 306M parameter distilled model from `facebook/bart-large-cnn`.
38
+
39
+ # ### How about running it locally?
40
+ # The code would look very similar if you were running it locally instead of from an API. The same is true for all the models in the rest of the course, make sure to check the [Pipelines](https://huggingface.co/docs/transformers/main_classes/pipelines) documentation page
41
+ #
42
+ # ```py
43
+ # from transformers import pipeline
44
+ #
45
+ # get_completion = pipeline("summarization", model="shleifer/distilbart-cnn-12-6")
46
+ #
47
+ # def summarize(input):
48
+ # output = get_completion(input)
49
+ # return output[0]['summary_text']
50
+ #
51
+ # ```
52
+
53
+ # In[3]:
54
+
55
+
56
+ text = ('''The tower is 324 metres (1,063 ft) tall, about the same height
57
+ as an 81-storey building, and the tallest structure in Paris.
58
+ Its base is square, measuring 125 metres (410 ft) on each side.
59
+ During its construction, the Eiffel Tower surpassed the Washington
60
+ Monument to become the tallest man-made structure in the world,
61
+ a title it held for 41 years until the Chrysler Building
62
+ in New York City was finished in 1930. It was the first structure
63
+ to reach a height of 300 metres. Due to the addition of a broadcasting
64
+ aerial at the top of the tower in 1957, it is now taller than the
65
+ Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the
66
+ Eiffel Tower is the second tallest free-standing structure in France
67
+ after the Millau Viaduct.''')
68
+
69
+ get_completion(text)
70
+
71
+
72
+ # ### Getting started with Gradio `gr.Interface`
73
+ #
74
+ # #### How about running it locally?
75
+ # The code would look very similar if you were running it locally. Simply remove all the paramters in the launch method
76
+ #
77
+ # ```py
78
+ # demo.launch()
79
+ # ```
80
+
81
+
82
+
83
+ import gradio as gr
84
+
85
+ def summarize(input):
86
+ output = get_completion(input)
87
+ return output[0]['summary_text']
88
+
89
+ gr.close_all()
90
+ demo = gr.Interface(fn=summarize,
91
+ inputs=[gr.Textbox(label="Text to summarize", lines=6)],
92
+ outputs=[gr.Textbox(label="Result", lines=3)],
93
+ title="Text summarization with distilbart-cnn",
94
+ description="Summarize any text using the `shleifer/distilbart-cnn-12-6` model under the hood!"
95
+ )
96
+ demo.launch()
97
+
98
+ gr.close_all()
99
+
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ transformers
2
+ IPython
3
+ torch