king007 ankrgyl commited on
Commit
cd82b64
0 Parent(s):

Duplicate from impira/flan-playground

Browse files

Co-authored-by: Ankur Goyal <ankrgyl@users.noreply.huggingface.co>

Files changed (7) hide show
  1. .flake8 +3 -0
  2. .gitignore +5 -0
  3. .isort.cfg +6 -0
  4. .pre-commit-config.yaml +29 -0
  5. README.md +11 -0
  6. app.py +33 -0
  7. requirements.txt +2 -0
.flake8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ [flake8]
2
+ max-line-length = 119
3
+ ignore = E402, E203, E501, W503
.gitignore ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ venv
2
+ *.swo
3
+ *.swp
4
+ *.pyc
5
+ flagged_data_points
.isort.cfg ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ [settings]
2
+ line_length=119
3
+ multi_line_output=3
4
+ use_parentheses=true
5
+ lines_after_imports=2
6
+ include_trailing_comma=True
.pre-commit-config.yaml ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ repos:
2
+ - repo: "https://github.com/pre-commit/pre-commit-hooks"
3
+ rev: v4.3.0
4
+ hooks:
5
+ - id: check-yaml
6
+ - id: end-of-file-fixer
7
+ - id: trailing-whitespace
8
+ - repo: "https://github.com/psf/black"
9
+ rev: 22.6.0
10
+ hooks:
11
+ - id: black
12
+ files: ./
13
+ - repo: "https://github.com/PyCQA/isort"
14
+ rev: 5.10.1
15
+ hooks:
16
+ - id: isort
17
+ args:
18
+ - --settings-path
19
+ - .isort.cfg
20
+ files: ./
21
+ - repo: https://github.com/codespell-project/codespell
22
+ rev: v2.2.1
23
+ hooks:
24
+ - id: codespell
25
+
26
+ - repo: https://github.com/pre-commit/mirrors-prettier
27
+ rev: v2.7.1
28
+ hooks:
29
+ - id: prettier
README.md ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Flan Playground
3
+ emoji: 🍮
4
+ colorFrom: green
5
+ colorTo: green
6
+ sdk: gradio
7
+ sdk_version: 3.2
8
+ app_file: app.py
9
+ pinned: true
10
+ duplicated_from: impira/flan-playground
11
+ ---
app.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import pipeline
2
+ import gradio as gr
3
+
4
+ PIPELINES = {}
5
+
6
+
7
+ def build_pipeline(size):
8
+ global PIPELINES
9
+ if size in PIPELINES:
10
+ return PIPELINES[size]
11
+
12
+ PIPELINES[size] = pipeline(
13
+ "text2text-generation", model=f"google/flan-t5-{size}", max_length=256
14
+ )
15
+ return PIPELINES[size]
16
+
17
+
18
+ def greet(input_text, size):
19
+ pipe = build_pipeline(size)
20
+ return pipe(input_text)[0]["generated_text"]
21
+
22
+
23
+ demo = gr.Interface(
24
+ fn=greet,
25
+ inputs=[
26
+ gr.Textbox(lines=2, placeholder="Enter your task text..."),
27
+ gr.Radio(choices=["small", "base", "large", "xl"], value="base"),
28
+ ],
29
+ outputs=[gr.Textbox(lines=2)],
30
+ )
31
+
32
+ if __name__ == "__main__":
33
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ torch
2
+ transformers==4.22.2