File size: 1,618 Bytes
2d0a1cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
# syntax=docker/dockerfile:1

# Base image using the specified Python version
ARG PYTHON_VERSION=3.12.3
FROM python:${PYTHON_VERSION}-slim as base

# Set environment variables to prevent Python from writing .pyc files
# and to keep Python from buffering stdout and stderr
ENV PYTHONDONTWRITEBYTECODE=1 \
    PYTHONUNBUFFERED=1

# Set working directory inside the container
WORKDIR /app

# Install TensorFlow and Keras (no need for tf-keras as a separate package)
# Using a single RUN command to reduce layers
RUN pip install tensorflow==2.16.1 keras

# Create a non-privileged user to run the application
# Follow best practices from Docker's user handling
ARG UID=10001
RUN adduser --disabled-password --gecos "" --home "/nonexistent" \
    --shell "/sbin/nologin" --no-create-home --uid "${UID}" appuser

# Prepare environment for Transformers and NLTK
# Creating cache directories with appropriate permissions
ENV TRANSFORMERS_CACHE=/tmp/.cache/huggingface \
    NLTK_DATA=/tmp/nltk_data
RUN mkdir -p $TRANSFORMERS_CACHE $NLTK_DATA && chmod -R 777 $TRANSFORMERS_CACHE $NLTK_DATA

# Copy the Python requirements file and install dependencies
# Using a single COPY to reduce layers and ensure requirements.txt is present
COPY requirements.txt .
RUN pip install -r requirements.txt

# Copy the rest of the application source code into the container
COPY . .

# Switch to the non-privileged user for running the application
USER appuser

# Expose the port that the application will listen on
EXPOSE 7860

# Command to run the application using uvicorn
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]