File size: 1,389 Bytes
fbf941a 9c1188f fbf941a 9c1188f 4213f50 fbf941a 4213f50 fbf941a 4213f50 9c1188f fbf941a 3739999 fbf941a 3739999 fbf941a 9c1188f fbf941a 9c1188f fbf941a 9c1188f fbf941a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
FROM python:3.9
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
git \
build-essential \
wget \
&& rm -rf /var/lib/apt/lists/*
# Clone llama.cpp and build it
RUN git clone https://github.com/ggerganov/llama.cpp.git /app/llama.cpp
RUN cd /app/llama.cpp/models && \
wget -O stablelm-2-zephyr-1_6b-Q4_0.gguf https://huggingface.co/stabilityai/stablelm-2-zephyr-1_6b/resolve/main/stablelm-2-zephyr-1_6b-Q4_0.gguf?download=true
RUN cd /app/llama.cpp && \
make -j
# Create a virtual environment and activate it
RUN python -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
COPY ./requirements.txt /app/requirements.txt
RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt
# Set up a new user named "user" with user ID 1000
RUN useradd -m -u 1000 user
# Switch to the "user" user
USER user
# Set home to the user's home directory
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH \
PYTHONPATH=$HOME/app \
PYTHONUNBUFFERED=1 \
GRADIO_ALLOW_FLAGGING=never \
GRADIO_NUM_PORTS=1 \
GRADIO_SERVER_NAME=0.0.0.0 \
GRADIO_THEME=huggingface \
SYSTEM=spaces
# Set the working directory to the user's home directory
WORKDIR $HOME/app
# Copy the current directory contents into the container at $HOME/app setting the owner to the user
COPY --chown=user . $HOME/app
CMD ["python", "app.py"]
|