Zhengyi commited on
Commit
3c43212
1 Parent(s): 5c77e16
Files changed (2) hide show
  1. app.py +3 -4
  2. model/crm/model.py +6 -2
app.py CHANGED
@@ -88,7 +88,7 @@ def preprocess_image(image, background_choice, foreground_ratio, backgroud_color
88
  background = Image.new("RGBA", image.size, (0, 0, 0, 0))
89
  image = Image.alpha_composite(background, image)
90
  else:
91
- image = remove_background(image, rembg_session, force_remove=True)
92
  image = do_resize_content(image, foreground_ratio)
93
  image = expand_to_square(image)
94
  image = add_background(image, backgroud_color)
@@ -198,11 +198,10 @@ with gr.Blocks() as demo:
198
  xyz_ouput = gr.Image(interactive=False, label="Output CCM image")
199
 
200
  output_model = gr.Model3D(
201
- label="Output GLB",
202
  interactive=False,
203
  )
204
- gr.Markdown("Note: The GLB model shown here has a darker lighting and enlarged UV seams. Download for correct results.")
205
- # output_obj = gr.File(interactive=False, label="Output OBJ")
206
 
207
  inputs = [
208
  processed_image,
 
88
  background = Image.new("RGBA", image.size, (0, 0, 0, 0))
89
  image = Image.alpha_composite(background, image)
90
  else:
91
+ image = remove_background(image, rembg_session, force=True)
92
  image = do_resize_content(image, foreground_ratio)
93
  image = expand_to_square(image)
94
  image = add_background(image, backgroud_color)
 
198
  xyz_ouput = gr.Image(interactive=False, label="Output CCM image")
199
 
200
  output_model = gr.Model3D(
201
+ label="Output OBJ",
202
  interactive=False,
203
  )
204
+ gr.Markdown("Note: Use our official code to support higher resolution texture.")
 
205
 
206
  inputs = [
207
  processed_image,
model/crm/model.py CHANGED
@@ -98,8 +98,12 @@ class CRM(nn.Module):
98
  # Expect predicted colors value range from [-1, 1]
99
  colors = (colors * 0.5 + 0.5).clip(0, 1)
100
 
101
- verts = verts[..., [0, 2, 1]].squeeze().cpu().numpy()
102
- faces = faces[..., [2, 1, 0]][..., [0, 2, 1]].squeeze().cpu().numpy()#faces[..., [2, 1, 0]].squeeze().cpu().numpy()
 
 
 
 
103
 
104
  # export the final mesh
105
  with torch.no_grad():
 
98
  # Expect predicted colors value range from [-1, 1]
99
  colors = (colors * 0.5 + 0.5).clip(0, 1)
100
 
101
+ verts = verts[..., [0, 2, 1]]
102
+ verts[..., 0]*= -1
103
+ verts[..., 2]*= -1
104
+ verts = verts.squeeze().cpu().numpy()
105
+ faces = faces[..., [2, 1, 0]][..., [0, 2, 1]]#[..., [1, 0, 2]]
106
+ faces = faces.squeeze().cpu().numpy()#faces[..., [2, 1, 0]].squeeze().cpu().numpy()
107
 
108
  # export the final mesh
109
  with torch.no_grad():