|
ARG CUDA_IMAGE="12.1.1-devel-ubuntu22.04" |
|
FROM nvidia/cuda:${CUDA_IMAGE} |
|
|
|
|
|
ENV HOST 0.0.0.0 |
|
RUN useradd -m -u 1000 user |
|
WORKDIR /home/user/app |
|
COPY --link --chown=1000 ./ /home/user/app |
|
|
|
RUN apt-get update && apt-get upgrade -y \ |
|
&& apt-get install -y git git-lfs build-essential \ |
|
python3 python3-pip gcc wget \ |
|
ocl-icd-opencl-dev opencl-headers clinfo \ |
|
libclblast-dev libopenblas-dev \ |
|
&& mkdir -p /etc/OpenCL/vendors && echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd |
|
|
|
|
|
ENV CUDA_DOCKER_ARCH=all |
|
ENV LLAMA_CUBLAS=1 |
|
|
|
|
|
RUN python3 -m pip install --no-cache-dir --upgrade pip pytest cmake scikit-build setuptools fastapi uvicorn sse-starlette pydantic-settings starlette-context huggingface-hub==0.14.1 flask apscheduler |
|
|
|
|
|
RUN CMAKE_ARGS="-DLLAMA_CUBLAS=ON -DLLAMA_AVX2=OFF -DLLAMA_F16C=OFF -DLLAMA_FMA=OFF" FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir |
|
|
|
RUN git config --global user.email "amatenkov@ntr.ai" |
|
RUN git config --global user.name "Andrew Matenkov" |
|
|
|
EXPOSE 7860 |
|
|
|
CMD python3 -m app |