Monster commited on
Commit
6571183
1 Parent(s): e6a1bdb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -15
app.py CHANGED
@@ -12,12 +12,23 @@ llm = Llama(model_path="./ggml-model-q4_1.bin")
12
 
13
 
14
  ins = '''Below is an instruction that describes a task. Write a response that appropriately completes the request.
 
15
  ### Instruction:
16
  {}
17
 
18
  ### Response:
19
  '''
20
 
 
 
 
 
 
 
 
 
 
 
21
  theme = gr.themes.Monochrome(
22
  primary_hue="indigo",
23
  secondary_hue="blue",
@@ -26,20 +37,14 @@ theme = gr.themes.Monochrome(
26
  font=[gr.themes.GoogleFont("Open Sans"), "ui-sans-serif", "system-ui", "sans-serif"],
27
  )
28
 
29
-
30
-
31
-
32
- # def generate(instruction):
33
- # response = llm(ins.format(instruction))
34
- # response = response['choices'][0]['text']
35
- # result = ""
36
- # for word in response.split(" "):
37
- # result += word + " "
38
- # yield result
39
-
40
- def generate(instruction):
41
  result = ""
42
- for x in llm(ins.format(instruction), stop=['### Instruction:', '### End'], stream=True):
43
  result += x['choices'][0]['text']
44
  yield result
45
 
@@ -123,7 +128,12 @@ with gr.Blocks(theme=seafoam, analytics_enabled=False, css=css) as demo:
123
 
124
  with gr.Row():
125
  with gr.Column(scale=3):
126
- instruction = gr.Textbox(placeholder="Enter your question here", label="Question", elem_id="q-input")
 
 
 
 
 
127
 
128
  with gr.Box():
129
  gr.Markdown("**Answer**")
@@ -139,7 +149,7 @@ with gr.Blocks(theme=seafoam, analytics_enabled=False, css=css) as demo:
139
 
140
 
141
 
142
- submit.click(generate, inputs=[instruction], outputs=[output])
143
  instruction.submit(generate, inputs=[instruction], outputs=[output])
144
 
145
  demo.queue(concurrency_count=1).launch(debug=True)
 
12
 
13
 
14
  ins = '''Below is an instruction that describes a task. Write a response that appropriately completes the request.
15
+
16
  ### Instruction:
17
  {}
18
 
19
  ### Response:
20
  '''
21
 
22
+ ins_inp = '''Below is an instruction that describes a task. Write a response that appropriately completes the request.
23
+ ### Instruction:
24
+ {}
25
+
26
+ ### Input:
27
+ {}
28
+
29
+ ### Response:
30
+ '''
31
+
32
  theme = gr.themes.Monochrome(
33
  primary_hue="indigo",
34
  secondary_hue="blue",
 
37
  font=[gr.themes.GoogleFont("Open Sans"), "ui-sans-serif", "system-ui", "sans-serif"],
38
  )
39
 
40
+ def generate(
41
+ instruction
42
+ input=None,
43
+ temperature=0.1,
44
+ top_p=0.75,
45
+ top_k=40,):
 
 
 
 
 
 
46
  result = ""
47
+ for x in llm(ins.format(instruction), stop=['### Instruction:', '### End'], stream=True, temperature=temperature, top_p=top_p, top_k=top_k):
48
  result += x['choices'][0]['text']
49
  yield result
50
 
 
128
 
129
  with gr.Row():
130
  with gr.Column(scale=3):
131
+ instruction = gr.Textbox(lines=2, placeholder="Enter your question here", label="Question", elem_id="q-input")
132
+ input = gr.components.Textbox(lines=2, label="Input", placeholder="none")
133
+ temperature = gr.components.Slider(minimum=0, maximum=1, value=0.1, label="Temperature"),
134
+ top_p = gr.components.Slider(minimum=0, maximum=1, value=0.75, label="Top p"),
135
+ top_k = gr.components.Slider(minimum=0, maximum=100, step=1, value=40, label="Top k")
136
+
137
 
138
  with gr.Box():
139
  gr.Markdown("**Answer**")
 
149
 
150
 
151
 
152
+ submit.click(generate, inputs=[instruction, input, temperature, top_p, top_k], outputs=[output])
153
  instruction.submit(generate, inputs=[instruction], outputs=[output])
154
 
155
  demo.queue(concurrency_count=1).launch(debug=True)