Monster commited on
Commit
4dad17b
1 Parent(s): f10213f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +153 -0
app.py ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+ from typing import Iterable
3
+ import gradio as gr
4
+ from gradio.themes.base import Base
5
+ from gradio.themes.utils import colors, fonts, sizes
6
+ import subprocess
7
+
8
+ from huggingface_hub import hf_hub_download
9
+ from llama_cpp import Llama
10
+ from llama_cpp import LlamaRAMCache
11
+
12
+ hf_hub_download(repo_id="TheBloke/Llama-2-7B-chat-GGML", filename="llama-2-7b-chat.ggmlv3.q4_K_M.bin", local_dir=".")
13
+
14
+ llm = Llama(model_path="./llama-2-7b-chat.ggmlv3.q4_K_M.bin")
15
+
16
+ cache = LlamaRAMCache(capacity_bytes=2 << 30)
17
+
18
+ llm.set_cache(cache)
19
+
20
+
21
+ ins = '''[INST] <<SYS>>
22
+ You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature.
23
+ If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.
24
+ <</SYS>>
25
+ {} [/INST]
26
+ '''
27
+
28
+ theme = gr.themes.Monochrome(
29
+ primary_hue="indigo",
30
+ secondary_hue="blue",
31
+ neutral_hue="slate",
32
+ radius_size=gr.themes.sizes.radius_sm,
33
+ font=[gr.themes.GoogleFont("Open Sans"), "ui-sans-serif", "system-ui", "sans-serif"],
34
+ )
35
+
36
+
37
+
38
+
39
+ # def generate(instruction):
40
+ # response = llm(ins.format(instruction))
41
+ # response = response['choices'][0]['text']
42
+ # result = ""
43
+ # for word in response.split(" "):
44
+ # result += word + " "
45
+ # yield result
46
+
47
+ def generate(instruction):
48
+ result = ""
49
+ for x in llm(ins.format(instruction), stop=['USER:'], stream=True):
50
+ result += x['choices'][0]['text']
51
+ yield result
52
+
53
+
54
+
55
+ examples = [
56
+ "Instead of making a peanut butter and jelly sandwich, what else could I combine peanut butter with in a sandwich? Give five ideas",
57
+ "How do I make a campfire?",
58
+ "Explain to me the difference between nuclear fission and fusion.",
59
+ "I'm selling my Nikon D-750, write a short blurb for my ad."
60
+ ]
61
+
62
+ def process_example(args):
63
+ for x in generate(args):
64
+ pass
65
+ return x
66
+
67
+ css = ".generating {visibility: hidden}"
68
+
69
+ # Based on the gradio theming guide and borrowed from https://huggingface.co/spaces/shivi/dolly-v2-demo
70
+ class SeafoamCustom(Base):
71
+ def __init__(
72
+ self,
73
+ *,
74
+ primary_hue: colors.Color | str = colors.emerald,
75
+ secondary_hue: colors.Color | str = colors.blue,
76
+ neutral_hue: colors.Color | str = colors.blue,
77
+ spacing_size: sizes.Size | str = sizes.spacing_md,
78
+ radius_size: sizes.Size | str = sizes.radius_md,
79
+ font: fonts.Font
80
+ | str
81
+ | Iterable[fonts.Font | str] = (
82
+ fonts.GoogleFont("Quicksand"),
83
+ "ui-sans-serif",
84
+ "sans-serif",
85
+ ),
86
+ font_mono: fonts.Font
87
+ | str
88
+ | Iterable[fonts.Font | str] = (
89
+ fonts.GoogleFont("IBM Plex Mono"),
90
+ "ui-monospace",
91
+ "monospace",
92
+ ),
93
+ ):
94
+ super().__init__(
95
+ primary_hue=primary_hue,
96
+ secondary_hue=secondary_hue,
97
+ neutral_hue=neutral_hue,
98
+ spacing_size=spacing_size,
99
+ radius_size=radius_size,
100
+ font=font,
101
+ font_mono=font_mono,
102
+ )
103
+ super().set(
104
+ button_primary_background_fill="linear-gradient(90deg, *primary_300, *secondary_400)",
105
+ button_primary_background_fill_hover="linear-gradient(90deg, *primary_200, *secondary_300)",
106
+ button_primary_text_color="white",
107
+ button_primary_background_fill_dark="linear-gradient(90deg, *primary_600, *secondary_800)",
108
+ block_shadow="*shadow_drop_lg",
109
+ button_shadow="*shadow_drop_lg",
110
+ input_background_fill="zinc",
111
+ input_border_color="*secondary_300",
112
+ input_shadow="*shadow_drop",
113
+ input_shadow_focus="*shadow_drop_lg",
114
+ )
115
+
116
+
117
+ seafoam = SeafoamCustom()
118
+
119
+
120
+ with gr.Blocks(theme=seafoam, analytics_enabled=False, css=css) as demo:
121
+ with gr.Column():
122
+ gr.Markdown(
123
+ """ ## Meta's Llama 2 7B-chat GGML
124
+
125
+ 4bit (q4_K_M)
126
+
127
+ Type in the box below and click the button to generate answers to your most pressing questions!
128
+
129
+ """
130
+ )
131
+
132
+ with gr.Row():
133
+ with gr.Column(scale=3):
134
+ instruction = gr.Textbox(placeholder="Enter your question here", label="Question", elem_id="q-input")
135
+
136
+ with gr.Box():
137
+ gr.Markdown("**Answer**")
138
+ output = gr.Markdown(elem_id="q-output")
139
+ submit = gr.Button("Generate", variant="primary")
140
+ gr.Examples(
141
+ examples=examples,
142
+ inputs=[instruction],
143
+ cache_examples=False,
144
+ fn=process_example,
145
+ outputs=[output],
146
+ )
147
+
148
+
149
+
150
+ submit.click(generate, inputs=[instruction], outputs=[output])
151
+ instruction.submit(generate, inputs=[instruction], outputs=[output])
152
+
153
+ demo.queue(concurrency_count=1).launch(debug=False)