cybertrapped sharpbai commited on
Commit
9dbf814
0 Parent(s):

Duplicate from sharpbai/text_generation

Browse files

Co-authored-by: Tim Bai <sharpbai@users.noreply.huggingface.co>

Files changed (7) hide show
  1. .gitattributes +31 -0
  2. DESCRIPTION.md +1 -0
  3. README.md +11 -0
  4. app.py +22 -0
  5. requirements.txt +3 -0
  6. run.ipynb +1 -0
  7. run.py +22 -0
.gitattributes ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ftz filter=lfs diff=lfs merge=lfs -text
6
+ *.gz filter=lfs diff=lfs merge=lfs -text
7
+ *.h5 filter=lfs diff=lfs merge=lfs -text
8
+ *.joblib filter=lfs diff=lfs merge=lfs -text
9
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
10
+ *.model filter=lfs diff=lfs merge=lfs -text
11
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
12
+ *.npy filter=lfs diff=lfs merge=lfs -text
13
+ *.npz filter=lfs diff=lfs merge=lfs -text
14
+ *.onnx filter=lfs diff=lfs merge=lfs -text
15
+ *.ot filter=lfs diff=lfs merge=lfs -text
16
+ *.parquet filter=lfs diff=lfs merge=lfs -text
17
+ *.pickle filter=lfs diff=lfs merge=lfs -text
18
+ *.pkl filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pt filter=lfs diff=lfs merge=lfs -text
21
+ *.pth filter=lfs diff=lfs merge=lfs -text
22
+ *.rar filter=lfs diff=lfs merge=lfs -text
23
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
24
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
25
+ *.tflite filter=lfs diff=lfs merge=lfs -text
26
+ *.tgz filter=lfs diff=lfs merge=lfs -text
27
+ *.wasm filter=lfs diff=lfs merge=lfs -text
28
+ *.xz filter=lfs diff=lfs merge=lfs -text
29
+ *.zip filter=lfs diff=lfs merge=lfs -text
30
+ *.zst filter=lfs diff=lfs merge=lfs -text
31
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
DESCRIPTION.md ADDED
@@ -0,0 +1 @@
 
 
1
+ This text generation demo takes in input text and returns generated text. It uses the Transformers library to set up the model and has two examples.
README.md ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: text_generation
3
+ emoji: 🔥
4
+ colorFrom: indigo
5
+ colorTo: indigo
6
+ sdk: gradio
7
+ sdk_version: 3.33.1
8
+ app_file: run.py
9
+ pinned: false
10
+ duplicated_from: sharpbai/text_generation
11
+ ---
app.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import pipeline
3
+
4
+ generator = pipeline('text-generation', model='gpt2')
5
+
6
+ def generate(text):
7
+ result = generator(text, max_length=256, num_return_sequences=1)
8
+ return result[0]["generated_text"]
9
+
10
+ examples = [
11
+ ["The Moon's orbit around Earth has"],
12
+ ["The smooth Borealis basin in the Northern Hemisphere covers 40%"],
13
+ ]
14
+
15
+ demo = gr.Interface(
16
+ fn=generate,
17
+ inputs=gr.inputs.Textbox(lines=5, label="Input Text"),
18
+ outputs=gr.outputs.Textbox(label="Generated Text"),
19
+ examples=examples
20
+ )
21
+
22
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ git+https://github.com/huggingface/transformers
2
+ gradio
3
+ torch
run.ipynb ADDED
@@ -0,0 +1 @@
 
 
1
+ {"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: text_generation\n", "### This text generation demo takes in input text and returns generated text. It uses the Transformers library to set up the model and has two examples.\n", " "]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio git+https://github.com/huggingface/transformers gradio torch"]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "from transformers import pipeline\n", "\n", "generator = pipeline('text-generation', model='gpt2')\n", "\n", "def generate(text):\n", " result = generator(text, max_length=30, num_return_sequences=1)\n", " return result[0][\"generated_text\"]\n", "\n", "examples = [\n", " [\"The Moon's orbit around Earth has\"],\n", " [\"The smooth Borealis basin in the Northern Hemisphere covers 40%\"],\n", "]\n", "\n", "demo = gr.Interface(\n", " fn=generate,\n", " inputs=gr.inputs.Textbox(lines=5, label=\"Input Text\"),\n", " outputs=gr.outputs.Textbox(label=\"Generated Text\"),\n", " examples=examples\n", ")\n", "\n", "demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
run.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import pipeline
3
+
4
+ generator = pipeline('text-generation', model='gpt2')
5
+
6
+ def generate(text):
7
+ result = generator(text, max_length=30, num_return_sequences=1)
8
+ return result[0]["generated_text"]
9
+
10
+ examples = [
11
+ ["The Moon's orbit around Earth has"],
12
+ ["The smooth Borealis basin in the Northern Hemisphere covers 40%"],
13
+ ]
14
+
15
+ demo = gr.Interface(
16
+ fn=generate,
17
+ inputs=gr.inputs.Textbox(lines=5, label="Input Text"),
18
+ outputs=gr.outputs.Textbox(label="Generated Text"),
19
+ examples=examples
20
+ )
21
+
22
+ demo.launch()