File size: 741 Bytes
bc13612
291e202
050ba43
291e202
 
 
 
 
5711d0b
 
291e202
 
0ace2cd
 
291e202
8276f8e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
FROM python:3.11.6-bullseye

RUN apt update && apt install -y cmake libopenblas-dev
RUN git clone https://github.com/ggerganov/llama.cpp
WORKDIR llama.cpp

RUN mkdir build
WORKDIR build
RUN cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS -DLLAMA_NATIVE=ON
RUN cmake --build . --config Release

WORKDIR bin
RUN wget https://huggingface.co/nisten/obsidian-3b-multimodal-q6-gguf/resolve/main/mmproj-obsidian-f16.gguf -O mmproj-model-f16.gguf
RUN wget https://huggingface.co/nisten/obsidian-3b-multimodal-q6-gguf/resolve/main/obsidian-q6.gguf -O ggml-model-f16.gguf

CMD ["./server", "-m", "ggml-model-f16.gguf", "--mmproj", "mmproj-model-f16.gguf", "--host", "0.0.0.0", "--port", "7860", "-c", "2048", "--batch-size", "1024", "--verbose"]