kushinm commited on
Commit
68e19a9
β€’
1 Parent(s): cd9755a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -18,6 +18,7 @@ model_id = "Salesforce/blip-image-captioning-base" ## load modelID for BLIP
18
  blipmodel = BlipForConditionalGeneration.from_pretrained(model_id)
19
  blipprocessor = BlipProcessor.from_pretrained(model_id)
20
 
 
21
 
22
  def evaluate_caption(image, caption):
23
  # # Pre-process image
@@ -59,7 +60,7 @@ with gr.Blocks() as demo:
59
  # inputs=["image", "text"]
60
 
61
  with gr.Column():
62
- im = gr.Image(label="Target Image", interactive = False, type="pil",value =f'images/{im_path_str}',height=500)
63
  caps = gr.Textbox(label="Player 1 Caption")
64
  submit_btn = gr.Button("Submit!!")
65
  # outputs=["text","text"],
@@ -77,4 +78,4 @@ with gr.Blocks() as demo:
77
  # btn = gr.Button("Flag")
78
  # btn.click(lambda *args: callback.flag(args), [im, caps, out1, out2], None, preprocess=False)
79
 
80
- demo.launch(debug=False)
 
18
  blipmodel = BlipForConditionalGeneration.from_pretrained(model_id)
19
  blipprocessor = BlipProcessor.from_pretrained(model_id)
20
 
21
+ im_dir = os.path.join(os.getcwd(),'images')
22
 
23
  def evaluate_caption(image, caption):
24
  # # Pre-process image
 
60
  # inputs=["image", "text"]
61
 
62
  with gr.Column():
63
+ im = gr.Image(label="Target Image", interactive = False, type="pil",value =os.path.join(im_dir,im_path_str),height=500)
64
  caps = gr.Textbox(label="Player 1 Caption")
65
  submit_btn = gr.Button("Submit!!")
66
  # outputs=["text","text"],
 
78
  # btn = gr.Button("Flag")
79
  # btn.click(lambda *args: callback.flag(args), [im, caps, out1, out2], None, preprocess=False)
80
 
81
+ demo.launch()