iandennismiller
commited on
Commit
•
8f8ff22
1
Parent(s):
9374608
remove prompt caching
Browse files- bin/llama.sh +1 -1
bin/llama.sh
CHANGED
@@ -47,7 +47,6 @@ function llama_interactive {
|
|
47 |
llama \
|
48 |
--n-gpu-layers 1 \
|
49 |
--model "$LLAMA_MODELS_PATH/$LLAMA_MODEL_NAME" \
|
50 |
-
--prompt-cache "$LLAMA_CACHE_PATH/${LLAMA_MODEL_NAME//[\/\.]/-}-${LLAMA_CONTEXT_SIZE}.cache" \
|
51 |
--file "$(get_model_prompt $LLAMA_MODEL_NAME)" \
|
52 |
--in-prefix "$(get_model_prefix $LLAMA_TEMPLATE)" \
|
53 |
--in-suffix "$(get_model_suffix $LLAMA_TEMPLATE)" \
|
@@ -70,6 +69,7 @@ function llama_interactive {
|
|
70 |
--escape \
|
71 |
--log-disable
|
72 |
|
|
|
73 |
# --temp "$LLAMA_TEMPERATURE" \
|
74 |
# --top-p "$LLAMA_TOP_P" \
|
75 |
# --top-k "$LLAMA_TOP_K" \
|
|
|
47 |
llama \
|
48 |
--n-gpu-layers 1 \
|
49 |
--model "$LLAMA_MODELS_PATH/$LLAMA_MODEL_NAME" \
|
|
|
50 |
--file "$(get_model_prompt $LLAMA_MODEL_NAME)" \
|
51 |
--in-prefix "$(get_model_prefix $LLAMA_TEMPLATE)" \
|
52 |
--in-suffix "$(get_model_suffix $LLAMA_TEMPLATE)" \
|
|
|
69 |
--escape \
|
70 |
--log-disable
|
71 |
|
72 |
+
# --prompt-cache "$LLAMA_CACHE_PATH/${LLAMA_MODEL_NAME//[\/\.]/-}-${LLAMA_CONTEXT_SIZE}.cache" \
|
73 |
# --temp "$LLAMA_TEMPERATURE" \
|
74 |
# --top-p "$LLAMA_TOP_P" \
|
75 |
# --top-k "$LLAMA_TOP_K" \
|