MohamedSaeed-dev commited on
Commit
362b7c9
1 Parent(s): 208371b

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +25 -0
  2. app.py +19 -0
  3. requirements.txt +4 -0
Dockerfile ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use an official Python runtime as a parent image
2
+ FROM python:3.9-slim
3
+
4
+ # Set the working directory in the container
5
+ WORKDIR /code
6
+
7
+ # Copy the current directory contents into the container at /app
8
+ COPY ./requirements.txt /code/requirements.txt
9
+
10
+ # Install any needed packages specified in requirements.txt
11
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
12
+
13
+ RUN useradd user
14
+
15
+ USER user
16
+
17
+ ENV HOME=/home/user \
18
+ PATH=/home/user/.local/bin:$PATH
19
+
20
+ WORKDIR $HOME/app
21
+
22
+ COPY --chown=user . $HOME/app
23
+
24
+ # Run app.py when the container launches
25
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from transformers import pipeline
3
+
4
+ app = FastAPI()
5
+
6
+
7
+ pipe = pipeline("text-generation", model="MohamedSaeed-dev/gemma-2b-1500steps")
8
+
9
+ @app.post("/generate/")
10
+ async def generate_text(text:str):
11
+ messages = [
12
+ {"role": "user", "content": text},
13
+ ]
14
+ output = pipe(messages)
15
+ return {"output": output[0]["generated_text"]}
16
+
17
+ @app.get("/")
18
+ async def root():
19
+ return {"message": "Welcome to the LLM API!"}
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ fastapi
2
+ uvicorn
3
+ torch
4
+ transformers