File size: 709 Bytes
f99e672
 
 
 
 
 
 
 
57b0171
 
389688e
f99e672
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
#!/bin/bash

N_GPU_LAYERS="${N_GPU_LAYERS:-10}"
MODEL_PATH=${MODEL_PATH:-/data/models/mixtral-8x7b-v0.1.Q5_K_M.gguf}
MODEL_URL_="${MODEL_URL:-https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/resolve/main/mixtral-8x7b-v0.1.Q5_K_M.gguf}"
PORT="${PORT:-7860}"
CONTEXT="${CONTEXT:-32768}"

huggingface-cli download "pmysl/c4ai-command-r-plus-GGUF" command-r-plus-Q5_K_M-00001-of-00002.gguf --local-dir /data/models
huggingface-cli download "pmysl/c4ai-command-r-plus-GGUF" command-r-plus-Q5_K_M-00002-of-00002.gguf --local-dir /data/models

#--n-gpu-layers $N_GPU_LAYERS
#-c $CONTEXT
cd /app
./server -m "/data/models/command-r-plus-Q6_K-00001-of-00002.gguf" --port $PORT --host 0.0.0.0 --path "/app/public"