isayahc commited on
Commit
50a222c
1 Parent(s): 91bdf3d

Update app.py

Browse files

initialized config sooner

Files changed (1) hide show
  1. app.py +12 -9
app.py CHANGED
@@ -13,15 +13,6 @@ import gradio as gr
13
  # local_llm = AutoModel.from_pretrained("TheBloke/zephyr-7B-beta-GGUF")
14
 
15
 
16
- local_llm = CTransformers(
17
- model = "TheBloke/zephyr-7B-beta-GGUF",
18
- model_file = "zephyr-7b-beta.Q4_0.gguf",
19
- model_type="mistral",
20
- lib="avx2", #for CPU use
21
- **config
22
- )
23
-
24
-
25
  config = {
26
  "max_new_token": 1024,
27
  "repetition_penalty": 1.1,
@@ -32,6 +23,18 @@ config = {
32
  "threads": int(os.cpu_count() / 2),
33
  }
34
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  llm_init = CTransformers(model=local_llm, model_type="mistral", lib="avx2", **config)
36
 
37
  prompt_template = """Use the following piece of information to answers the question asked by the user.
 
13
  # local_llm = AutoModel.from_pretrained("TheBloke/zephyr-7B-beta-GGUF")
14
 
15
 
 
 
 
 
 
 
 
 
 
16
  config = {
17
  "max_new_token": 1024,
18
  "repetition_penalty": 1.1,
 
23
  "threads": int(os.cpu_count() / 2),
24
  }
25
 
26
+
27
+ local_llm = CTransformers(
28
+ model = "TheBloke/zephyr-7B-beta-GGUF",
29
+ model_file = "zephyr-7b-beta.Q4_0.gguf",
30
+ model_type="mistral",
31
+ lib="avx2", #for CPU use
32
+ **config
33
+ )
34
+
35
+
36
+
37
+
38
  llm_init = CTransformers(model=local_llm, model_type="mistral", lib="avx2", **config)
39
 
40
  prompt_template = """Use the following piece of information to answers the question asked by the user.