Spaces:
Sleeping
Sleeping
# Builder stage | |
FROM ubuntu:latest | |
# Install dependencies | |
RUN apt-get update && apt-get install -y \ | |
curl \ | |
gnupg \ | |
&& curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \ | |
&& echo "deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://nvidia.github.io/libnvidia-container/stable/deb/ $(. /etc/os-release; echo $UBUNTU_CODENAME) main" > /etc/apt/sources.list.d/nvidia-container-toolkit.list \ | |
&& apt-get update \ | |
&& apt-get install -y nvidia-container-toolkit \ | |
&& rm -rf /var/lib/apt/lists/* | |
# Install ollama | |
RUN curl https://ollama.ai/install.sh | sh | |
# Create the directory and give appropriate permissions | |
RUN mkdir -p /.ollama && chmod 777 /.ollama | |
WORKDIR /.ollama | |
# Copy the entry point script | |
COPY entrypoint.sh /entrypoint.sh | |
RUN chmod +x /entrypoint.sh | |
# Set the entry point script as the default command | |
ENTRYPOINT ["/entrypoint.sh"] | |
# Set default arguments for the entry point script | |
CMD ["ollama", "serve"] | |
# Set the model as an environment variable (this can be overridden) | |
ENV model=llama2 | |
# Expose the server port | |
EXPOSE 7860 | |