harry85 commited on
Commit
c57403a
·
verified ·
1 Parent(s): 488eae7

Upload 4 files

Browse files
Files changed (4) hide show
  1. Dockerfile +27 -0
  2. README.md +4 -4
  3. app.py +25 -0
  4. requirements.txt +6 -0
Dockerfile ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use the official Python 3.9 image
2
+ FROM python:3.9
3
+
4
+ # Set the working directory to /code
5
+ WORKDIR /code
6
+
7
+ # Copy the current directory contents into the container at /code
8
+ COPY ./requirements.txt /code/requirements.txt
9
+
10
+ # Install requirements.txt
11
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
12
+
13
+ # Set up a new user named "user" with user ID 1000
14
+ RUN useradd -m -u 1000 user
15
+ # Switch to the "user" user
16
+ USER user
17
+ # Set home to the user's home directory
18
+ ENV HOME=/home/user \
19
+ PATH=/home/user/.local/bin:$PATH
20
+
21
+ # Set the working directory to the user's home directory
22
+ WORKDIR $HOME/app
23
+
24
+ # Copy the current directory contents into the container at $HOME/app setting the owner to the user
25
+ COPY --chown=user . $HOME/app
26
+
27
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
README.md CHANGED
@@ -1,8 +1,8 @@
1
  ---
2
- title: LLM Text Generation
3
- emoji: 👀
4
- colorFrom: red
5
- colorTo: pink
6
  sdk: docker
7
  pinned: false
8
  license: mit
 
1
  ---
2
+ title: Text Generation
3
+ emoji: 🌍
4
+ colorFrom: green
5
+ colorTo: yellow
6
  sdk: docker
7
  pinned: false
8
  license: mit
app.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from transformers import pipeline
3
+
4
+
5
+ # NOTE - we configure docs_url to serve the interactive Docs at the root path
6
+ # of the app. This way, we can use the docs as a landing page for the app on Spaces.
7
+ app = FastAPI(docs_url="/")
8
+
9
+ pipe = pipeline("text2text-generation", model="google/flan-t5-small")
10
+
11
+
12
+ @app.get("/")
13
+ def greet_json():
14
+ return {"working..."}
15
+
16
+
17
+ @app.get("/generate")
18
+ def generate(text: str):
19
+ """
20
+ Using the text2text-generation pipeline from `transformers`, generate text
21
+ from the given input text. The model used is `google/flan-t5-small`, which
22
+ can be found [here](https://huggingface.co/google/flan-t5-small).
23
+ """
24
+ output = pipe(text)
25
+ return {"output": output[0]["generated_text"]}
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ fastapi==0.74.*
2
+ requests==2.27.*
3
+ uvicorn[standard]==0.17.*
4
+ sentencepiece==0.1.*
5
+ torch==1.11.*
6
+ transformers==4.*