# Builder stage | |
# FROM ubuntu:latest | |
# # Update packages and install curl and gnupg | |
# RUN apt-get update && apt-get install -y \ | |
# curl \ | |
# gnupg | |
# # Add NVIDIA package repositories | |
# RUN curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \ | |
# && echo "deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://nvidia.github.io/libnvidia-container/stable/deb/ $(. /etc/os-release; echo $UBUNTU_CODENAME) main" > /etc/apt/sources.list.d/nvidia-container-toolkit.list | |
# # Install NVIDIA container toolkit (Check for any updated methods or URLs for Ubuntu jammy) | |
# RUN apt-get update && apt-get install -y nvidia-container-toolkit || true | |
# # Install application | |
# RUN curl https://ollama.ai/install.sh | sh | |
# # Below is to fix embedding bug as per | |
# # RUN curl -fsSL https://ollama.com/install.sh | sed 's#https://ollama.com/download#https://github.com/jmorganca/ollama/releases/download/v0.1.29#' | sh | |
# # Create the directory and give appropriate permissions | |
# RUN mkdir -p /.ollama && chmod 777 /.ollama | |
# WORKDIR /.ollama | |
# # Copy the entry point script | |
# COPY entrypoint.sh /entrypoint.sh | |
# RUN chmod +x /entrypoint.sh | |
# # Set the entry point script as the default command | |
# ENTRYPOINT ["/entrypoint.sh"] | |
# CMD ["ollama", "serve"] | |
# # Set the model as an environment variable (this can be overridden) | |
# ENV model=${model} | |
# Expose the server port | |
# Use the official Ollama Docker image as the base image | |
FROM ollama/ollama:latest | |
RUN apt update && apt install -y python3 && apt install -y python3-pip | |
RUN pip install litellm | |
RUN pip install 'litellm[proxy]' | |
# Create a directory for Ollama data | |
RUN mkdir -p /.ollama | |
RUN chmod -R 777 /.ollama | |
WORKDIR /.ollama | |
# Copy the entry point script | |
COPY entrypoint.sh /entrypoint.sh | |
RUN chmod +x /entrypoint.sh | |
# Set the entry point script as the default command | |
ENTRYPOINT ["/entrypoint.sh"] | |
# Set the model as an environment variable (this can be overridden) | |
ENV model=${model} | |
# Expose the port that Ollama runs on | |
EXPOSE 7860 | |
# Command to start the Ollama server | |
CMD ["serve"] | |