fl399 commited on
Commit
ba3183a
1 Parent(s): 58b14ea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -188,14 +188,14 @@ def evaluate(
188
  return output
189
 
190
 
191
- def process_document(image, question, llm, num_shot):
192
  # image = Image.open(image)
193
  inputs = processor_deplot(images=image, text="Generate the underlying data table for the figure below:", return_tensors="pt").to(0, torch.bfloat16)
194
  predictions = model_deplot.generate(**inputs, max_new_tokens=512)
195
  table = processor_deplot.decode(predictions[0], skip_special_tokens=True).replace("<0x0A>", "\n")
196
 
197
  # send prompt+table to LLM
198
- res = evaluate(table, question, llm=llm, num_shot=num_shot)
199
  if llm == "alpaca-lora":
200
  return [table, res.split("A:")[-1]]
201
  else:
@@ -241,7 +241,7 @@ with gr.Blocks(theme=theme) as demo:
241
  output_text = gr.Textbox(lines=8,label="Output")
242
 
243
  gr.Examples(
244
- examples=[["deplot_case_study_m1.png", "What is the sum of numbers of Indonesia and Ireland? Remember to think step by step.", "alpaca-lora"],
245
  ["deplot_case_study_m1.png", "Summarise the chart for me please.", "alpaca-lora"],
246
  ["deplot_case_study_3.png", "By how much did China's growth rate drop? Think step by step.", "alpaca-lora"],
247
  ["deplot_case_study_4.png", "How many papers are submitted in 2020?", "alpaca-lora"],
@@ -249,11 +249,11 @@ with gr.Blocks(theme=theme) as demo:
249
  ["deplot_case_study_4.png", "How many papers are submitted in 2020?", "flan-ul2"],
250
  ["deplot_case_study_4.png", "acceptance rate = # accepted / #submitted . What is the acceptance rate of 2010?", "flan-ul2"],
251
  ["deplot_case_study_m1.png", "Summarise the chart for me please.", "flan-ul2"],
252
- ],
253
- cache_examples=True,
254
- inputs=[input_image, instruction, llm],
255
- outputs=[output_table, output_text],
256
- fn=process_document
257
  )
258
 
259
  gr.Markdown(
 
188
  return output
189
 
190
 
191
+ def process_document(image, question, llm):
192
  # image = Image.open(image)
193
  inputs = processor_deplot(images=image, text="Generate the underlying data table for the figure below:", return_tensors="pt").to(0, torch.bfloat16)
194
  predictions = model_deplot.generate(**inputs, max_new_tokens=512)
195
  table = processor_deplot.decode(predictions[0], skip_special_tokens=True).replace("<0x0A>", "\n")
196
 
197
  # send prompt+table to LLM
198
+ res = evaluate(table, question, llm=llm)
199
  if llm == "alpaca-lora":
200
  return [table, res.split("A:")[-1]]
201
  else:
 
241
  output_text = gr.Textbox(lines=8,label="Output")
242
 
243
  gr.Examples(
244
+ examples=[["deplot_case_study_m1.png", "What is the sum of numbers of Indonesia and Ireland? Remember to think step by step.", "alpaca-lora"],
245
  ["deplot_case_study_m1.png", "Summarise the chart for me please.", "alpaca-lora"],
246
  ["deplot_case_study_3.png", "By how much did China's growth rate drop? Think step by step.", "alpaca-lora"],
247
  ["deplot_case_study_4.png", "How many papers are submitted in 2020?", "alpaca-lora"],
 
249
  ["deplot_case_study_4.png", "How many papers are submitted in 2020?", "flan-ul2"],
250
  ["deplot_case_study_4.png", "acceptance rate = # accepted / #submitted . What is the acceptance rate of 2010?", "flan-ul2"],
251
  ["deplot_case_study_m1.png", "Summarise the chart for me please.", "flan-ul2"],
252
+ ],
253
+ cache_examples=True,
254
+ inputs=[input_image, instruction, llm],
255
+ outputs=[output_table, output_text],
256
+ fn=process_document
257
  )
258
 
259
  gr.Markdown(