IsmaelMousa commited on
Commit
6352a01
1 Parent(s): cfb8232

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +21 -0
  2. main.py +25 -0
  3. requirements.txt +6 -0
Dockerfile ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9
2
+
3
+ WORKDIR /app
4
+
5
+ COPY requirements.txt /app/requirements.txt
6
+
7
+ RUN pip install --upgrade pip --no-cache-dir && \
8
+ pip install --no-cache-dir -r /app/requirements.txt
9
+
10
+ RUN useradd user
11
+
12
+ USER user
13
+
14
+ ENV HOME=/home/user \
15
+ PATH=/home/user/.local/bin:$PATH
16
+
17
+ WORKDIR $HOME/app
18
+
19
+ COPY --chown=user . $HOME/app
20
+
21
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
main.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from transformers import pipeline
3
+
4
+ app = FastAPI()
5
+ pipe = pipeline(task="text-generation", model="gpt2-large", framework="pt")
6
+
7
+
8
+ @app.get("/")
9
+ def root():
10
+ """
11
+ Returns home page.
12
+ """
13
+ return {"message": "Hello Ismael"}
14
+
15
+
16
+ @app.get("/generate")
17
+ def generate(text: str):
18
+ """
19
+ Using the text-generation pipeline from `transformers`, generate text
20
+ from the given input text. The model used is `openai-community/gpt2-large`, which
21
+ can be found [here](<https://huggingface.co/openai-community/gpt2-large>).
22
+ """
23
+ output = pipe(text)
24
+
25
+ return {"output": output[0]["generated_text"]}
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ fastapi==0.74.*
2
+ requests==2.27.*
3
+ uvicorn[standard]==0.17.*
4
+ sentencepiece==0.1.*
5
+ torch==1.11.*
6
+ transformers==4.*