phoen1x commited on
Commit
ff2c107
1 Parent(s): 1aa0e55

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -51
app.py CHANGED
@@ -105,24 +105,28 @@
105
 
106
  from huggingface_hub import InferenceClient
107
  import gradio as gr
108
- import PyPDF2
109
 
110
  client = InferenceClient(
111
  "mistralai/Mixtral-8x7B-Instruct-v0.1"
112
  )
113
 
114
-
115
  def format_prompt(message, history):
116
- prompt = "<s>"
117
- for user_prompt, bot_response in history:
118
- prompt += f"[INST] {user_prompt} [/INST]"
119
- prompt += f" {bot_response}</s> "
120
- prompt += f"[INST] {message} [/INST]"
121
- return prompt
122
-
123
- def generate(
124
- prompt, history, system_prompt, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
125
- ):
 
 
 
 
 
126
  temperature = float(temperature)
127
  if temperature < 1e-2:
128
  temperature = 1e-2
@@ -138,6 +142,9 @@ def generate(
138
  )
139
 
140
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
 
 
 
141
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
142
  output = ""
143
 
@@ -146,13 +153,13 @@ def generate(
146
  yield output
147
  return output
148
 
149
-
150
  additional_inputs=[
151
  gr.Textbox(
152
  label="System Prompt",
153
  max_lines=1,
154
  interactive=True,
155
  ),
 
156
  gr.Slider(
157
  label="Temperature",
158
  value=0.9,
@@ -191,45 +198,21 @@ additional_inputs=[
191
  )
192
  ]
193
 
194
- examples=[["I'm planning a vacation to Japan. Can you suggest a one-week itinerary including must-visit places and local cuisines to try?", None, None, None, None, None, ],
195
- ["Can you write a short story about a time-traveling detective who solves historical mysteries?", None, None, None, None, None,],
196
- ["I'm trying to learn French. Can you provide some common phrases that would be useful for a beginner, along with their pronunciations?", None, None, None, None, None,],
197
- ["I have chicken, rice, and bell peppers in my kitchen. Can you suggest an easy recipe I can make with these ingredients?", None, None, None, None, None,],
198
- ["Can you explain how the QuickSort algorithm works and provide a Python implementation?", None, None, None, None, None,],
199
- ["What are some unique features of Rust that make it stand out compared to other systems programming languages like C++?", None, None, None, None, None,],
200
- ]
 
201
 
202
- gr.Interface(
203
  fn=generate,
204
- inputs=[
205
- gr.Textbox(
206
- label="Prompt",
207
- lines=3,
208
- placeholder="Start typing here...",
209
- default="",
210
- example="Can you summarize the content of the document?",
211
- type="str"
212
- ),
213
- gr.Textbox(
214
- label="History",
215
- lines=3,
216
- placeholder="Enter conversation history...",
217
- default="",
218
- type="str"
219
- ),
220
- gr.Textbox(
221
- label="System Prompt",
222
- lines=1,
223
- placeholder="Enter system prompt...",
224
- default="",
225
- type="str"
226
- ),
227
- gr.File(label="Upload PDF Document", type="upload"),
228
- *additional_inputs
229
- ],
230
- outputs=gr.Textbox(label="Output"),
231
  title="Mixtral 46.7B",
232
  examples=examples,
233
- allow_flagging=False,
234
- allow_screenshot=False
235
- ).launch(show_api=True)
 
105
 
106
  from huggingface_hub import InferenceClient
107
  import gradio as gr
108
+ from PyPDF2 import PdfReader
109
 
110
  client = InferenceClient(
111
  "mistralai/Mixtral-8x7B-Instruct-v0.1"
112
  )
113
 
 
114
  def format_prompt(message, history):
115
+ prompt = "<s>"
116
+ for user_prompt, bot_response in history:
117
+ prompt += f"[INST] {user_prompt} [/INST]"
118
+ prompt += f" {bot_response}</s> "
119
+ prompt += f"[INST] {message} [/INST]"
120
+ return prompt
121
+
122
+ def extract_text_from_pdf(pdf_file):
123
+ text = ""
124
+ pdf_reader = PdfReader(pdf_file)
125
+ for page in pdf_reader.pages:
126
+ text += page.extract_text()
127
+ return text
128
+
129
+ def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0, pdf_text=None):
130
  temperature = float(temperature)
131
  if temperature < 1e-2:
132
  temperature = 1e-2
 
142
  )
143
 
144
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
145
+ if pdf_text:
146
+ formatted_prompt += pdf_text
147
+
148
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
149
  output = ""
150
 
 
153
  yield output
154
  return output
155
 
 
156
  additional_inputs=[
157
  gr.Textbox(
158
  label="System Prompt",
159
  max_lines=1,
160
  interactive=True,
161
  ),
162
+ gr.File("file", label="Upload PDF"),
163
  gr.Slider(
164
  label="Temperature",
165
  value=0.9,
 
198
  )
199
  ]
200
 
201
+ examples=[
202
+ ["I'm planning a vacation to Japan. Can you suggest a one-week itinerary including must-visit places and local cuisines to try?", None, None, None, None, None, ],
203
+ ["Can you write a short story about a time-traveling detective who solves historical mysteries?", None, None, None, None, None,],
204
+ ["I'm trying to learn French. Can you provide some common phrases that would be useful for a beginner, along with their pronunciations?", None, None, None, None, None,],
205
+ ["I have chicken, rice, and bell peppers in my kitchen. Can you suggest an easy recipe I can make with these ingredients?", None, None, None, None, None,],
206
+ ["Can you explain how the QuickSort algorithm works and provide a Python implementation?", None, None, None, None, None,],
207
+ ["What are some unique features of Rust that make it stand out compared to other systems programming languages like C++?", None, None, None, None, None,],
208
+ ]
209
 
210
+ gr.ChatInterface(
211
  fn=generate,
212
+ chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
213
+ additional_inputs=additional_inputs,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
214
  title="Mixtral 46.7B",
215
  examples=examples,
216
+ concurrency_limit=20,
217
+ ).launch(show_api= True)
218
+