Upload README.md
Browse files
README.md
CHANGED
@@ -186,7 +186,7 @@ CT_METAL=1 pip install ctransformers>=0.2.24 --no-binary ctransformers
|
|
186 |
from ctransformers import AutoModelForCausalLM
|
187 |
|
188 |
# Set gpu_layers to the number of layers to offload to GPU. Set to 0 if no GPU acceleration is available on your system.
|
189 |
-
llm = AutoModelForCausalLM.from_pretrained("
|
190 |
|
191 |
print(llm("AI is going to"))
|
192 |
```
|
|
|
186 |
from ctransformers import AutoModelForCausalLM
|
187 |
|
188 |
# Set gpu_layers to the number of layers to offload to GPU. Set to 0 if no GPU acceleration is available on your system.
|
189 |
+
llm = AutoModelForCausalLM.from_pretrained("TheBloke/WizardCoder-Python-34B-V1.0-GGUF", model_file="wizardcoder-python-34b-v1.0.q4_K_M.gguf", model_type="llama", gpu_layers=50)
|
190 |
|
191 |
print(llm("AI is going to"))
|
192 |
```
|