tombio commited on
Commit
39884f7
1 Parent(s): 456dd19

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -8
app.py CHANGED
@@ -37,8 +37,7 @@ def get_url_im(t):
37
  return Image.open(BytesIO(response.content))
38
 
39
  @torch.no_grad()
40
- def get_im_c(im_path, clip_model):
41
- im = Image.open(im_path).convert("RGB")
42
  prompts = preprocess(im).to(device).unsqueeze(0)
43
  return clip_model.encode_image(prompts).float()
44
 
@@ -120,15 +119,19 @@ def run(args):
120
  import gradio
121
 
122
  def my_inference_function(person):
123
- ex = [
124
  "Image", "Image", "Text/URL", "Nothing", "Nothing",
125
- "","","central symmetric figure detailed artwork","","",
126
- "gainsborough.jpeg","blonder.jpeg","blonder.jpeg","blonder.jpeg","blonder.jpeg",
127
- 1,1.35,1.4,1,1,
128
- 3.0, 1, 0, 30,
 
 
 
 
129
  ]
130
 
131
- return person
132
 
133
  gradio_interface = gradio.Interface(
134
  fn=my_inference_function,
 
37
  return Image.open(BytesIO(response.content))
38
 
39
  @torch.no_grad()
40
+ def get_im_c(im, clip_model):
 
41
  prompts = preprocess(im).to(device).unsqueeze(0)
42
  return clip_model.encode_image(prompts).float()
43
 
 
119
  import gradio
120
 
121
  def my_inference_function(person):
122
+ inputs = [
123
  "Image", "Image", "Text/URL", "Nothing", "Nothing",
124
+ "","","flowers","","",
125
+ Image.open("ex2-1.jpeg.jpeg").convert("RGB"),
126
+ Image.open("ex2-2.jpeg").convert("RGB"),
127
+ Image.open("blonder.jpeg").convert("RGB"),
128
+ person,
129
+ Image.open("blonder.jpeg").convert("RGB"),
130
+ 1,1,1.5,1.4,1,
131
+ 3.0, 1, 0, 40,
132
  ]
133
 
134
+ return run(inputs)
135
 
136
  gradio_interface = gradio.Interface(
137
  fn=my_inference_function,