File size: 1,289 Bytes
00d7f53 c349f19 00d7f53 96e1736 00d7f53 a9e8ff1 00d7f53 96e1736 00d7f53 96e1736 a9e8ff1 00d7f53 a3e5109 00d7f53 7347470 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
# Start from the TGI base image
FROM ghcr.io/huggingface/text-generation-inference:1.3 as base
# Create a non-root user with UID 1000
RUN useradd -m -u 1000 -s /bin/bash jupyteruser
# Switch to the non-root user
USER jupyteruser
# Set working directory
WORKDIR /home/jupyteruser
# Install JupyterLab and plugins
RUN pip install --user jupyterlab jupyterlab-vim==0.15.1 jupyterlab-vimrc
# Add local python bin directory to PATH
ENV PATH="/home/jupyteruser/.local/bin:${PATH}"
# Copy any necessary files (if needed)
# COPY --chown=jupyteruser:jupyteruser your-files /home/jupyteruser/your-destination
# AWS Sagemaker compatible image
# Assuming this part remains the same from your original Dockerfile
FROM base as sagemaker
COPY sagemaker-entrypoint.sh entrypoint.sh
RUN chmod +x entrypoint.sh
ENTRYPOINT ["./entrypoint.sh"]
# Final image
FROM base
# Switch to the non-root user
USER jupyteruser
# Set working directory
WORKDIR /home/jupyteruser
# Add local python bin directory to PATH
ENV PATH="/home/jupyteruser/.local/bin:${PATH}"
# Add JupyterLab entrypoint
ENTRYPOINT ["jupyter", "lab", "--ip=0.0.0.0", "--NotebookApp.token=''", "--port", "7860", "--no-browser"]
# Optional: Set CMD to launch TGI or any other command
#CMD ["text-generation-launcher", "--json-output"]
|