Spaces:
Runtime error
Runtime error
Space Message
Browse files- Dockerfile +1 -1
- default.json +1 -0
Dockerfile
CHANGED
@@ -8,5 +8,5 @@ RUN git clone https://github.com/lostruins/koboldcpp /opt/koboldcpp
|
|
8 |
WORKDIR /opt/koboldcpp
|
9 |
RUN make LLAMA_OPENBLAS=1 LLAMA_CUBLAS=1 LLAMA_PORTABLE=1
|
10 |
RUN wget -O model.ggml $MODEL
|
11 |
-
CMD /bin/python3 ./koboldcpp.py --model model.ggml $ADDITIONAL --port 7860 --hordeconfig $MODEL_NAME 1 1
|
12 |
|
|
|
8 |
WORKDIR /opt/koboldcpp
|
9 |
RUN make LLAMA_OPENBLAS=1 LLAMA_CUBLAS=1 LLAMA_PORTABLE=1
|
10 |
RUN wget -O model.ggml $MODEL
|
11 |
+
CMD /bin/python3 ./koboldcpp.py --model model.ggml $ADDITIONAL --port 7860 --hordeconfig $MODEL_NAME 1 1 --preloadstory default.json
|
12 |
|
default.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"gamestarted":true,"prompt":"","memory":"","authorsnote":"","anotetemplate":"[Author's note: <|>]","actions":[],"actions_metadata":{},"worldinfo":[],"wifolders_d":{},"wifolders_l":[],"extrastopseq":"","anotestr":320,"wisearchdepth":0,"wiinsertlocation":0,"savedsettings":null,"savedaestheticsettings":null, "welcome":"This Huggingface Space can be used as a KoboldAI or OpenAI API by using the custom endpoint link above.\nWant your own? Duplicate this space, and remove the additional parameters if you run on the CPU tier. (You can customize this message and the default settings in default.json)"}
|