shivi commited on
Commit
4d75b37
1 Parent(s): 71fc9aa

Upload 4 files

Browse files
Files changed (4) hide show
  1. app.py +199 -0
  2. dolly.jpg +0 -0
  3. examples.csv +11 -0
  4. requirements.txt +4 -0
app.py ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+ from typing import Iterable
3
+ import gradio as gr
4
+ from gradio.themes.base import Base
5
+ from gradio.themes.utils import colors, fonts, sizes
6
+ import time
7
+ import torch
8
+ from transformers import pipeline
9
+ import pandas as pd
10
+
11
+ instruct_pipeline = pipeline(model="databricks/dolly-v2-12b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
12
+
13
+
14
+ def run_pipeline(prompt):
15
+ response = instruct_pipeline(prompt)
16
+ return response
17
+
18
+ def get_user_input(input_question, history):
19
+ return "", history + [[input_question, None]]
20
+
21
+ def get_qa_user_input(input_question, history):
22
+ return "", history + [[input_question, None]]
23
+
24
+ def dolly_chat(history):
25
+ prompt = history[-1][0]
26
+ bot_message = run_pipeline(prompt)
27
+ history[-1][1] = bot_message
28
+ return history
29
+
30
+ def qa_bot(context, history):
31
+ query = history[-1][0]
32
+ prompt = f'instruction: {query} \ncontext: {context}'
33
+ bot_message = run_pipeline(prompt)
34
+ history[-1][1] = bot_message
35
+ return history
36
+
37
+ def reset_chatbot():
38
+ return gr.update(value="")
39
+
40
+ def load_customer_support_example():
41
+ df = pd.read_csv("examples.csv")
42
+ return df['doc'].iloc[0], df['question'].iloc[0]
43
+
44
+ def load_databricks_doc_example():
45
+ df = pd.read_csv("examples.csv")
46
+ return df['doc'].iloc[1], df['question'].iloc[1]
47
+
48
+ # Referred & modified from https://gradio.app/theming-guide/
49
+ class SeafoamCustom(Base):
50
+ def __init__(
51
+ self,
52
+ *,
53
+ primary_hue: colors.Color | str = colors.emerald,
54
+ secondary_hue: colors.Color | str = colors.blue,
55
+ neutral_hue: colors.Color | str = colors.blue,
56
+ spacing_size: sizes.Size | str = sizes.spacing_md,
57
+ radius_size: sizes.Size | str = sizes.radius_md,
58
+ font: fonts.Font
59
+ | str
60
+ | Iterable[fonts.Font | str] = (
61
+ fonts.GoogleFont("Quicksand"),
62
+ "ui-sans-serif",
63
+ "sans-serif",
64
+ ),
65
+ font_mono: fonts.Font
66
+ | str
67
+ | Iterable[fonts.Font | str] = (
68
+ fonts.GoogleFont("IBM Plex Mono"),
69
+ "ui-monospace",
70
+ "monospace",
71
+ ),
72
+ ):
73
+ super().__init__(
74
+ primary_hue=primary_hue,
75
+ secondary_hue=secondary_hue,
76
+ neutral_hue=neutral_hue,
77
+ spacing_size=spacing_size,
78
+ radius_size=radius_size,
79
+ font=font,
80
+ font_mono=font_mono,
81
+ )
82
+ super().set(
83
+ button_primary_background_fill="linear-gradient(90deg, *primary_300, *secondary_400)",
84
+ button_primary_background_fill_hover="linear-gradient(90deg, *primary_200, *secondary_300)",
85
+ button_primary_text_color="white",
86
+ button_primary_background_fill_dark="linear-gradient(90deg, *primary_600, *secondary_800)",
87
+ block_shadow="*shadow_drop_lg",
88
+ button_shadow="*shadow_drop_lg",
89
+ input_background_fill="zinc",
90
+ input_border_color="*secondary_300",
91
+ input_shadow="*shadow_drop",
92
+ input_shadow_focus="*shadow_drop_lg",
93
+ )
94
+
95
+
96
+ seafoam = SeafoamCustom()
97
+
98
+ with gr.Blocks(theme=seafoam) as demo:
99
+
100
+ with gr.Row(variant='panel'):
101
+ with gr.Column():
102
+ gr.HTML(
103
+ """<html><img src='file/dolly.jpg', alt='dolly logo', width=150, height=150 /><br></html>"""
104
+ )
105
+ with gr.Column():
106
+ gr.Markdown("# **<p align='center'>Dolly 2.0: World's First Truly Open Instruction-Tuned LLM</p>**")
107
+ gr.Markdown("Dolly 2.0, the first open source, instruction-following LLM, fine-tuned on a human-generated instruction dataset licensed for research and commercial use. It's a 12B parameter language model based on the EleutherAI pythia model family and fine-tuned exclusively on a new, high-quality human generated instruction following dataset, crowdsourced among Databricks employees.")
108
+
109
+
110
+
111
+ qa_bot_state = gr.State(value=[])
112
+
113
+ with gr.Tabs():
114
+ with gr.TabItem("Dolly Chat"):
115
+
116
+ with gr.Row():
117
+
118
+ with gr.Column():
119
+ chatbot = gr.Chatbot(label="Chat History")
120
+ input_question = gr.Text(
121
+ label="Instruction",
122
+ placeholder="Type prompt and hit enter.",
123
+ )
124
+ clear = gr.Button("Clear", variant="primary")
125
+
126
+ with gr.Row():
127
+ with gr.Accordion("Show example inputs I can load:", open=False):
128
+ gr.Examples(
129
+ [
130
+ ["Explain to me the difference between nuclear fission and fusion."],
131
+ ["Give me a list of 5 science fiction books I should read next."],
132
+ ["I'm selling my Nikon D-750, write a short blurb for my ad."],
133
+ ["Write a song about sour donuts"],
134
+ ["Write a tweet about a new book launch by J.K. Rowling."],
135
+
136
+ ],
137
+ [input_question],
138
+ [],
139
+ None,
140
+ cache_examples=False,
141
+ )
142
+
143
+ with gr.TabItem("Q&A with Context"):
144
+
145
+ with gr.Row():
146
+
147
+ with gr.Column():
148
+ input_context = gr.Text(label="Add context here", lines=10)
149
+
150
+ with gr.Column():
151
+ qa_chatbot = gr.Chatbot(label="Q&A History")
152
+ qa_input_question = gr.Text(
153
+ label="Input Question",
154
+ placeholder="Type question here and hit enter.",
155
+ )
156
+ qa_clear = gr.Button("Clear", variant="primary")
157
+
158
+ with gr.Row():
159
+ with gr.Accordion("Show example inputs I can load:", open=False):
160
+ example_1 = gr.Button("Load Customer support example")
161
+ example_2 = gr.Button("Load Databricks documentation example")
162
+
163
+
164
+ input_question.submit(
165
+ get_user_input,
166
+ [input_question, chatbot],
167
+ [input_question, chatbot],
168
+ ).then(dolly_chat, [chatbot], chatbot)
169
+
170
+
171
+ clear.click(lambda: None, None, chatbot)
172
+
173
+
174
+ qa_input_question.submit(
175
+ get_qa_user_input,
176
+ [qa_input_question, qa_chatbot],
177
+ [qa_input_question, qa_chatbot],
178
+ ).then(qa_bot, [input_context, qa_chatbot], qa_chatbot)
179
+
180
+ qa_clear.click(lambda: None, None, qa_chatbot)
181
+
182
+ # reset the chatbot Q&A history when input context changes
183
+ input_context.change(fn=reset_chatbot, inputs=[], outputs=qa_chatbot)
184
+
185
+ example_1.click(
186
+ load_customer_support_example,
187
+ [],
188
+ [input_context, qa_input_question],
189
+ )
190
+
191
+ example_2.click(
192
+ load_databricks_doc_example,
193
+ [],
194
+ [input_context, qa_input_question],
195
+ )
196
+
197
+ if __name__ == "__main__":
198
+
199
+ demo.queue(concurrency_count=1,max_size=100).launch(max_threads=5,debug=True, share=True)
dolly.jpg ADDED
examples.csv ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ doc,question
2
+ "I am writing to express my deep disappointment and frustration with the iPhone 14 Pro Max that I recently purchased. As a long-time Apple user and loyal customer, I was excited to upgrade to the latest and greatest iPhone model, but unfortunately, my experience with this device has been nothing short of a nightmare.
3
+
4
+ Firstly, I would like to address the issue of battery life on this device. I was under the impression that Apple had made significant improvements to their battery technology, but unfortunately, this has not been my experience. Despite using the phone conservatively, I find that I have to charge it at least twice a day just to ensure it doesn't die on me when I need it the most. This is extremely inconvenient and frustrating, especially when I have to carry around a bulky power bank or constantly hunt for charging outlets.
5
+
6
+ Furthermore, I am extremely disappointed with the camera quality on this device. Despite Apple's claims of improved camera technology, I have found that the photos I take on this phone are often blurry or grainy, and the colors are not as vibrant as I would like. This is especially disappointing considering the high price point of the iPhone 14 Pro Max, which is marketed as a premium smartphone with a top-of-the-line camera.
7
+
8
+ Overall, I feel as though I have been let down by Apple and their latest iPhone offering. As a loyal customer who has invested a significant amount of money into their products over the years, I expect better from a company that prides itself on innovation and customer satisfaction. I urge Apple to take these concerns seriously and make necessary improvements to the iPhone 14 Pro Max and future models.
9
+
10
+ Thank you for your attention to this matter.",Give me a list of the main complaints in this customer support ticket. Do not write a reply.
11
+ "Databricks SQL Serverless supports serverless compute. Admins can create serverless SQL warehouses (formerly SQL endpoints) that enable instant compute and are managed by Databricks. Serverless SQL warehouses use compute clusters in your Databricks account. Use them with Databricks SQL queries just like you normally would with the original customer-hosted SQL warehouses, which are now called classic SQL warehouses. Databricks changed the name from SQL endpoint to SQL warehouse because, in the industry, endpoint refers to either a remote computing device that communicates with a network that it's connected to, or an entry point to a cloud service. A data warehouse is a data management system that stores current and historical data from multiple sources in a business friendly manner for easier insights and reporting. SQL warehouse accurately describes the full capabilities of this compute resource. If serverless SQL warehouses are enabled for your account, note the following: New SQL warehouses are serverless by default when you create them from the UI. New SQL warehouses are not serverless by default when you create them using the API, which requires that you explicitly specify serverless. You can also create new pro or classic SQL warehouses using either method. You can upgrade a pro or classic SQL warehouse to a serverless SQL warehouse or a classic SQL warehouse to a pro SQL warehouse. You can also downgrade from serverless to pro or classic. This feature only affects Databricks SQL. It does not affect how Databricks Runtime clusters work with notebooks and jobs in the Data Science & Engineering or Databricks Machine Learning workspace environments. Databricks Runtime clusters always run in the classic data plane in your AWS account. See Serverless quotas. If your account needs updated terms of use, workspace admins are prompted in the Databricks SQL UI. If your workspace has an AWS instance profile, you might need to update the trust relationship to support serverless compute, depending on how and when it was created.",What is the default configuration for new DBSQL warehouses?
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ gradio
2
+ torch
3
+ transformers
4
+ accelerate