Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
@@ -38,8 +38,8 @@ def get_system_tokens(model):
|
|
38 |
return get_message_tokens(model, **system_message)
|
39 |
|
40 |
|
41 |
-
repo_name = "IlyaGusev/
|
42 |
-
model_name = "ggml-model-
|
43 |
|
44 |
snapshot_download(repo_id=repo_name, local_dir=".", allow_patterns=model_name)
|
45 |
|
@@ -100,7 +100,7 @@ with gr.Blocks(
|
|
100 |
) as demo:
|
101 |
favicon = '<img src="https://cdn.midjourney.com/b88e5beb-6324-4820-8504-a1a37a9ba36d/0_1.png" width="48px" style="display: inline">'
|
102 |
gr.Markdown(
|
103 |
-
f"""<h1><center>{favicon}Saiga2 13B
|
104 |
|
105 |
This is a demo of a **Russian**-speaking LLaMA2-based model. If you are interested in other languages, please check other models, such as [MPT-7B-Chat](https://huggingface.co/spaces/mosaicml/mpt-7b-chat).
|
106 |
|
|
|
38 |
return get_message_tokens(model, **system_message)
|
39 |
|
40 |
|
41 |
+
repo_name = "IlyaGusev/saiga2_13b_gguf"
|
42 |
+
model_name = "ggml-model-q4_K.gguf"
|
43 |
|
44 |
snapshot_download(repo_id=repo_name, local_dir=".", allow_patterns=model_name)
|
45 |
|
|
|
100 |
) as demo:
|
101 |
favicon = '<img src="https://cdn.midjourney.com/b88e5beb-6324-4820-8504-a1a37a9ba36d/0_1.png" width="48px" style="display: inline">'
|
102 |
gr.Markdown(
|
103 |
+
f"""<h1><center>{favicon}Saiga2 13B GGUF Q4_K</center></h1>
|
104 |
|
105 |
This is a demo of a **Russian**-speaking LLaMA2-based model. If you are interested in other languages, please check other models, such as [MPT-7B-Chat](https://huggingface.co/spaces/mosaicml/mpt-7b-chat).
|
106 |
|