Spaces:
Runtime error
Runtime error
Dawoodthouseef
commited on
Commit
•
c09235b
1
Parent(s):
ae44536
Update app.py
Browse files
app.py
CHANGED
@@ -12,13 +12,16 @@ from huggingface_hub import hf_hub_download
|
|
12 |
|
13 |
# Set gpu_layers to the number of layers to offload to GPU. Set to 0 if no GPU acceleration is available on your system.
|
14 |
model = AutoModelForCausalLM.from_pretrained("TheBloke/Mistral-7B-Instruct-v0.1-GGUF", model_file="mistral-7b-instruct-v0.1.Q5_K_S.gguf", model_type="mistral", gpu_layers=0)
|
15 |
-
ins = '''[INST] <<SYS>>
|
16 |
You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature.
|
17 |
If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.
|
18 |
<</SYS>>
|
19 |
{} [/INST]
|
20 |
'''
|
21 |
-
|
|
|
|
|
|
|
22 |
theme = gr.themes.Monochrome(
|
23 |
primary_hue="indigo",
|
24 |
secondary_hue="blue",
|
@@ -26,8 +29,8 @@ theme = gr.themes.Monochrome(
|
|
26 |
radius_size=gr.themes.sizes.radius_sm,
|
27 |
font=[gr.themes.GoogleFont("Open Sans"), "ui-sans-serif", "system-ui", "sans-serif"],
|
28 |
)
|
29 |
-
def response(question):
|
30 |
-
res = model(ins.format(question))
|
31 |
yield res
|
32 |
|
33 |
|
@@ -107,6 +110,8 @@ with gr.Blocks(theme=seafoam, analytics_enabled=False, css=css) as demo:
|
|
107 |
)
|
108 |
|
109 |
with gr.Row():
|
|
|
|
|
110 |
with gr.Column(scale=3):
|
111 |
instruction = gr.Textbox(placeholder="Enter your question here", label="Question", elem_id="q-input")
|
112 |
|
@@ -116,7 +121,7 @@ with gr.Blocks(theme=seafoam, analytics_enabled=False, css=css) as demo:
|
|
116 |
submit = gr.Button("Generate", variant="primary")
|
117 |
gr.Examples(
|
118 |
examples=examples,
|
119 |
-
inputs=[instruction],
|
120 |
cache_examples=True,
|
121 |
fn=process_example,
|
122 |
outputs=[output],
|
@@ -124,7 +129,7 @@ with gr.Blocks(theme=seafoam, analytics_enabled=False, css=css) as demo:
|
|
124 |
|
125 |
|
126 |
|
127 |
-
submit.click(response, inputs=[instruction], outputs=[output])
|
128 |
-
instruction.submit(response, inputs=[instruction], outputs=[output])
|
129 |
|
130 |
demo.queue(concurrency_count=1).launch(debug=False)
|
|
|
12 |
|
13 |
# Set gpu_layers to the number of layers to offload to GPU. Set to 0 if no GPU acceleration is available on your system.
|
14 |
model = AutoModelForCausalLM.from_pretrained("TheBloke/Mistral-7B-Instruct-v0.1-GGUF", model_file="mistral-7b-instruct-v0.1.Q5_K_S.gguf", model_type="mistral", gpu_layers=0)
|
15 |
+
"""ins = '''[INST] <<SYS>>
|
16 |
You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature.
|
17 |
If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.
|
18 |
<</SYS>>
|
19 |
{} [/INST]
|
20 |
'''
|
21 |
+
"""
|
22 |
+
ins=""" System:Your Helpful Assistant
|
23 |
+
{User}:{question}
|
24 |
+
{bot_name}:"""
|
25 |
theme = gr.themes.Monochrome(
|
26 |
primary_hue="indigo",
|
27 |
secondary_hue="blue",
|
|
|
29 |
radius_size=gr.themes.sizes.radius_sm,
|
30 |
font=[gr.themes.GoogleFont("Open Sans"), "ui-sans-serif", "system-ui", "sans-serif"],
|
31 |
)
|
32 |
+
def response(user_name,bot_name,question):
|
33 |
+
res = model(ins.format(user_name=user_name,bot_name,bot_namequestion=question))
|
34 |
yield res
|
35 |
|
36 |
|
|
|
110 |
)
|
111 |
|
112 |
with gr.Row():
|
113 |
+
user_name = gr.Textbox(placeholder="Enter your Name", label="User Name", elem_id="user-input")
|
114 |
+
bot_name = gr.Textbox(placeholder="Enter your question here", label="Question", elem_id="q-input")
|
115 |
with gr.Column(scale=3):
|
116 |
instruction = gr.Textbox(placeholder="Enter your question here", label="Question", elem_id="q-input")
|
117 |
|
|
|
121 |
submit = gr.Button("Generate", variant="primary")
|
122 |
gr.Examples(
|
123 |
examples=examples,
|
124 |
+
inputs=[user_name,bot_name,instruction],
|
125 |
cache_examples=True,
|
126 |
fn=process_example,
|
127 |
outputs=[output],
|
|
|
129 |
|
130 |
|
131 |
|
132 |
+
submit.click(response, inputs=[use_name,bot_name,instruction], outputs=[output])
|
133 |
+
instruction.submit(response, inputs=[use_name,bot_name,instruction], outputs=[output])
|
134 |
|
135 |
demo.queue(concurrency_count=1).launch(debug=False)
|