hysts HF staff commited on
Commit
64325c9
1 Parent(s): 863b4ea

Use gradio_client.file

Browse files
Files changed (1) hide show
  1. app.py +11 -7
app.py CHANGED
@@ -2,7 +2,7 @@ from __future__ import annotations
2
 
3
  import gradio as gr
4
  import torch
5
- from gradio_client import Client
6
 
7
  DESCRIPTION = "# Comparing image captioning models"
8
  ORIGINAL_SPACE_INFO = """\
@@ -26,7 +26,7 @@ torch.hub.download_url_to_file(
26
  def generate_caption_git(image_path: str) -> str:
27
  try:
28
  client = Client("hysts/image-captioning-with-git")
29
- return client.predict(image_path, api_name="/caption")
30
  except Exception:
31
  gr.Warning("The GIT-large Space is currently unavailable. Please try again later.")
32
  return ""
@@ -35,7 +35,7 @@ def generate_caption_git(image_path: str) -> str:
35
  def generate_caption_blip(image_path: str) -> str:
36
  try:
37
  client = Client("hysts/image-captioning-with-blip")
38
- return client.predict(image_path, "A picture of", api_name="/caption")
39
  except Exception:
40
  gr.Warning("The BLIP-large Space is currently unavailable. Please try again later.")
41
  return ""
@@ -45,7 +45,7 @@ def generate_caption_blip2_opt(image_path: str) -> str:
45
  try:
46
  client = Client("merve/BLIP2-with-transformers")
47
  return client.predict(
48
- image_path,
49
  "Beam search",
50
  1, # temperature
51
  1, # length penalty
@@ -61,7 +61,7 @@ def generate_caption_blip2_t5xxl(image_path: str) -> str:
61
  try:
62
  client = Client("hysts/BLIP2")
63
  return client.predict(
64
- image_path,
65
  "Beam search",
66
  1, # temperature
67
  1, # length penalty
@@ -81,7 +81,7 @@ def generate_caption_instructblip(image_path: str) -> str:
81
  try:
82
  client = Client("hysts/InstructBLIP")
83
  return client.predict(
84
- image_path,
85
  "Describe the image.",
86
  "Beam search",
87
  5, # beam size
@@ -101,7 +101,11 @@ def generate_caption_instructblip(image_path: str) -> str:
101
  def generate_caption_fuyu(image_path: str) -> str:
102
  try:
103
  client = Client("adept/fuyu-8b-demo")
104
- return client.predict(image_path, "Generate a coco style caption.\n", fn_index=3)
 
 
 
 
105
  except Exception:
106
  gr.Warning("The Fuyu-8B Space is currently unavailable. Please try again later.")
107
  return ""
 
2
 
3
  import gradio as gr
4
  import torch
5
+ from gradio_client import Client, file
6
 
7
  DESCRIPTION = "# Comparing image captioning models"
8
  ORIGINAL_SPACE_INFO = """\
 
26
  def generate_caption_git(image_path: str) -> str:
27
  try:
28
  client = Client("hysts/image-captioning-with-git")
29
+ return client.predict(file(image_path), api_name="/caption")
30
  except Exception:
31
  gr.Warning("The GIT-large Space is currently unavailable. Please try again later.")
32
  return ""
 
35
  def generate_caption_blip(image_path: str) -> str:
36
  try:
37
  client = Client("hysts/image-captioning-with-blip")
38
+ return client.predict(file(image_path), "A picture of", api_name="/caption")
39
  except Exception:
40
  gr.Warning("The BLIP-large Space is currently unavailable. Please try again later.")
41
  return ""
 
45
  try:
46
  client = Client("merve/BLIP2-with-transformers")
47
  return client.predict(
48
+ file(image_path),
49
  "Beam search",
50
  1, # temperature
51
  1, # length penalty
 
61
  try:
62
  client = Client("hysts/BLIP2")
63
  return client.predict(
64
+ file(image_path),
65
  "Beam search",
66
  1, # temperature
67
  1, # length penalty
 
81
  try:
82
  client = Client("hysts/InstructBLIP")
83
  return client.predict(
84
+ file(image_path),
85
  "Describe the image.",
86
  "Beam search",
87
  5, # beam size
 
101
  def generate_caption_fuyu(image_path: str) -> str:
102
  try:
103
  client = Client("adept/fuyu-8b-demo")
104
+ return client.predict(
105
+ file(image_path),
106
+ "Generate a coco style caption.\n",
107
+ fn_index=3,
108
+ )
109
  except Exception:
110
  gr.Warning("The Fuyu-8B Space is currently unavailable. Please try again later.")
111
  return ""