visheratin commited on
Commit
aa179a2
1 Parent(s): 67e4fcc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -9
app.py CHANGED
@@ -42,14 +42,9 @@ def cached_vision_process(image, max_crops, num_tokens):
42
  return image_features.to(DEVICE, dtype=DTYPE)
43
 
44
  @spaces.GPU(duration=20)
45
- def answer_question(image, question, max_crops, num_tokens):
46
  prompt = f"""<|im_start|>user
47
  <image>
48
- {question}<|im_end|>
49
- <|im_start|>assistant
50
- """
51
- replace_prompt = f"""<|im_start|>user
52
-
53
  {question}<|im_end|>
54
  <|im_start|>assistant
55
  """
@@ -65,6 +60,9 @@ def answer_question(image, question, max_crops, num_tokens):
65
  "use_cache": True,
66
  "eos_token_id": processor.tokenizer.eos_token_id,
67
  "pad_token_id": processor.tokenizer.eos_token_id,
 
 
 
68
  }
69
  thread = Thread(target=model.generate, kwargs=generation_kwargs)
70
  thread.start()
@@ -101,8 +99,12 @@ with gr.Blocks() as demo:
101
  with gr.Row():
102
  img = gr.Image(type="pil", label="Upload or Drag an Image")
103
  output = gr.TextArea(label="Answer")
 
 
 
 
104
 
105
- submit.click(answer_question, [img, prompt, max_crops, num_tokens], output)
106
- prompt.submit(answer_question, [img, prompt, max_crops, num_tokens], output)
107
 
108
- demo.queue().launch(debug=True, share=True)
 
42
  return image_features.to(DEVICE, dtype=DTYPE)
43
 
44
  @spaces.GPU(duration=20)
45
+ def answer_question(image, question, max_crops, num_tokens, sample, temperature, top_k):
46
  prompt = f"""<|im_start|>user
47
  <image>
 
 
 
 
 
48
  {question}<|im_end|>
49
  <|im_start|>assistant
50
  """
 
60
  "use_cache": True,
61
  "eos_token_id": processor.tokenizer.eos_token_id,
62
  "pad_token_id": processor.tokenizer.eos_token_id,
63
+ "temperature": temperature,
64
+ "sample": sample,
65
+ "top_k": top_k,
66
  }
67
  thread = Thread(target=model.generate, kwargs=generation_kwargs)
68
  thread.start()
 
99
  with gr.Row():
100
  img = gr.Image(type="pil", label="Upload or Drag an Image")
101
  output = gr.TextArea(label="Answer")
102
+ with gr.Row():
103
+ sample = gr.Checkbox(label="Sample", value=False)
104
+ temperature = gr.Slider(minimum=0, maximum=1, step=0.1, value=0, label="Temperature")
105
+ top_k = gr.Slider(minimum=0, maximum=50, step=1, value=0, label="Top-K")
106
 
107
+ submit.click(answer_question, [img, prompt, max_crops, num_tokens, sample, temperature, top_k], output)
108
+ prompt.submit(answer_question, [img, prompt, max_crops, num_tokens, sample, temperature, top_k], output)
109
 
110
+ demo.queue().launch(debug=True)