Zhengyi commited on
Commit
5c77e16
1 Parent(s): d56e314

remove cuda

Browse files
Files changed (3) hide show
  1. app.py +4 -4
  2. inference.py +19 -15
  3. model/crm/model.py +4 -4
app.py CHANGED
@@ -104,8 +104,8 @@ def gen_image(input_image, seed, scale, step):
104
  np_imgs = np.concatenate(stage1_images, 1)
105
  np_xyzs = np.concatenate(stage2_images, 1)
106
 
107
- glb_path, obj_path = generate3d(model, np_imgs, np_xyzs, args.device)
108
- return Image.fromarray(np_imgs), Image.fromarray(np_xyzs), glb_path, obj_path
109
 
110
 
111
  parser = argparse.ArgumentParser()
@@ -202,7 +202,7 @@ with gr.Blocks() as demo:
202
  interactive=False,
203
  )
204
  gr.Markdown("Note: The GLB model shown here has a darker lighting and enlarged UV seams. Download for correct results.")
205
- output_obj = gr.File(interactive=False, label="Output OBJ")
206
 
207
  inputs = [
208
  processed_image,
@@ -214,7 +214,7 @@ with gr.Blocks() as demo:
214
  image_output,
215
  xyz_ouput,
216
  output_model,
217
- output_obj,
218
  ]
219
 
220
 
 
104
  np_imgs = np.concatenate(stage1_images, 1)
105
  np_xyzs = np.concatenate(stage2_images, 1)
106
 
107
+ glb_path = generate3d(model, np_imgs, np_xyzs, args.device)
108
+ return Image.fromarray(np_imgs), Image.fromarray(np_xyzs), glb_path#, obj_path
109
 
110
 
111
  parser = argparse.ArgumentParser()
 
202
  interactive=False,
203
  )
204
  gr.Markdown("Note: The GLB model shown here has a darker lighting and enlarged UV seams. Download for correct results.")
205
+ # output_obj = gr.File(interactive=False, label="Output OBJ")
206
 
207
  inputs = [
208
  processed_image,
 
214
  image_output,
215
  xyz_ouput,
216
  output_model,
217
+ # output_obj,
218
  ]
219
 
220
 
inference.py CHANGED
@@ -37,7 +37,7 @@ def generate3d(model, rgb, ccm, device):
37
  triplane = torch.cat([color,xyz],dim=1).to(device)
38
  # 3D visualize
39
  model.eval()
40
- glctx = dr.RasterizeGLContext()#dr.RasterizeCudaContext()
41
 
42
  if model.denoising == True:
43
  tnew = 20
@@ -67,29 +67,33 @@ def generate3d(model, rgb, ccm, device):
67
 
68
 
69
  from kiui.mesh_utils import clean_mesh
70
- verts, faces = clean_mesh(data_config['verts'].squeeze().cpu().numpy().astype(np.float32), data_config['faces'].squeeze().cpu().numpy().astype(np.int32), repair = False, remesh=False, remesh_size=0.005)
71
  data_config['verts'] = torch.from_numpy(verts).cuda().contiguous()
72
  data_config['faces'] = torch.from_numpy(faces).cuda().contiguous()
73
 
74
  start_time = time.time()
75
  with torch.no_grad():
76
- mesh_path_obj = tempfile.NamedTemporaryFile(suffix=f"", delete=False).name
77
- model.export_mesh_wt_uv(glctx, data_config, mesh_path_obj, "", device, res=(1024,1024), tri_fea_2=triplane_feature2)
78
-
79
- mesh = Mesh.load(mesh_path_obj+".obj", bound=0.9, front_dir="+z")
80
  mesh_path_glb = tempfile.NamedTemporaryFile(suffix=f"", delete=False).name
81
- mesh.write(mesh_path_glb+".glb")
 
 
 
 
 
 
 
 
82
 
83
- # mesh_obj2 = trimesh.load(mesh_path_glb+".glb", file_type='glb')
84
- # mesh_path_obj2 = tempfile.NamedTemporaryFile(suffix=f"", delete=False).name
85
- # mesh_obj2.export(mesh_path_obj2+".obj")
86
 
87
- with zipfile.ZipFile(mesh_path_obj+'.zip', 'w') as myzip:
88
- myzip.write(mesh_path_obj+'.obj', mesh_path_obj.split("/")[-1]+'.obj')
89
- myzip.write(mesh_path_obj+'.png', mesh_path_obj.split("/")[-1]+'.png')
90
- myzip.write(mesh_path_obj+'.mtl', mesh_path_obj.split("/")[-1]+'.mtl')
91
 
92
  end_time = time.time()
93
  elapsed_time = end_time - start_time
94
  print(f"uv takes {elapsed_time}s")
95
- return mesh_path_glb+".glb", mesh_path_obj+'.zip'
 
37
  triplane = torch.cat([color,xyz],dim=1).to(device)
38
  # 3D visualize
39
  model.eval()
40
+
41
 
42
  if model.denoising == True:
43
  tnew = 20
 
67
 
68
 
69
  from kiui.mesh_utils import clean_mesh
70
+ verts, faces = clean_mesh(data_config['verts'].squeeze().cpu().numpy().astype(np.float32), data_config['faces'].squeeze().cpu().numpy().astype(np.int32), repair = False, remesh=True, remesh_size=0.005, remesh_iters=1)
71
  data_config['verts'] = torch.from_numpy(verts).cuda().contiguous()
72
  data_config['faces'] = torch.from_numpy(faces).cuda().contiguous()
73
 
74
  start_time = time.time()
75
  with torch.no_grad():
 
 
 
 
76
  mesh_path_glb = tempfile.NamedTemporaryFile(suffix=f"", delete=False).name
77
+ model.export_mesh(data_config, mesh_path_glb, tri_fea_2 = triplane_feature2)
78
+
79
+ # glctx = dr.RasterizeGLContext()#dr.RasterizeCudaContext()
80
+ # mesh_path_obj = tempfile.NamedTemporaryFile(suffix=f"", delete=False).name
81
+ # model.export_mesh_wt_uv(glctx, data_config, mesh_path_obj, "", device, res=(1024,1024), tri_fea_2=triplane_feature2)
82
+
83
+ # mesh = Mesh.load(mesh_path_obj+".obj", bound=0.9, front_dir="+z")
84
+ # mesh_path_glb = tempfile.NamedTemporaryFile(suffix=f"", delete=False).name
85
+ # mesh.write(mesh_path_glb+".glb")
86
 
87
+ # # mesh_obj2 = trimesh.load(mesh_path_glb+".glb", file_type='glb')
88
+ # # mesh_path_obj2 = tempfile.NamedTemporaryFile(suffix=f"", delete=False).name
89
+ # # mesh_obj2.export(mesh_path_obj2+".obj")
90
 
91
+ # with zipfile.ZipFile(mesh_path_obj+'.zip', 'w') as myzip:
92
+ # myzip.write(mesh_path_obj+'.obj', mesh_path_obj.split("/")[-1]+'.obj')
93
+ # myzip.write(mesh_path_obj+'.png', mesh_path_obj.split("/")[-1]+'.png')
94
+ # myzip.write(mesh_path_obj+'.mtl', mesh_path_obj.split("/")[-1]+'.mtl')
95
 
96
  end_time = time.time()
97
  elapsed_time = end_time - start_time
98
  print(f"uv takes {elapsed_time}s")
99
+ return mesh_path_glb+".obj"
model/crm/model.py CHANGED
@@ -89,7 +89,7 @@ class CRM(nn.Module):
89
  _, verts, faces = self.renderer(data, pred_sdf, deformation, tet_verts, tet_indices, weight= weight)
90
  return verts[0].unsqueeze(0), faces[0].int()
91
 
92
- def export_mesh(self, data, out_dir, ind, device=None, tri_fea_2 = None):
93
  verts = data['verts']
94
  faces = data['faces']
95
 
@@ -98,13 +98,13 @@ class CRM(nn.Module):
98
  # Expect predicted colors value range from [-1, 1]
99
  colors = (colors * 0.5 + 0.5).clip(0, 1)
100
 
101
- verts = verts.squeeze().cpu().numpy()
102
- faces = faces[..., [2, 1, 0]].squeeze().cpu().numpy()
103
 
104
  # export the final mesh
105
  with torch.no_grad():
106
  mesh = trimesh.Trimesh(verts, faces, vertex_colors=colors, process=False) # important, process=True leads to seg fault...
107
- mesh.export(out_dir / f'{ind}.obj')
108
 
109
  def export_mesh_wt_uv(self, ctx, data, out_dir, ind, device, res, tri_fea_2=None):
110
 
 
89
  _, verts, faces = self.renderer(data, pred_sdf, deformation, tet_verts, tet_indices, weight= weight)
90
  return verts[0].unsqueeze(0), faces[0].int()
91
 
92
+ def export_mesh(self, data, out_dir, tri_fea_2 = None):
93
  verts = data['verts']
94
  faces = data['faces']
95
 
 
98
  # Expect predicted colors value range from [-1, 1]
99
  colors = (colors * 0.5 + 0.5).clip(0, 1)
100
 
101
+ verts = verts[..., [0, 2, 1]].squeeze().cpu().numpy()
102
+ faces = faces[..., [2, 1, 0]][..., [0, 2, 1]].squeeze().cpu().numpy()#faces[..., [2, 1, 0]].squeeze().cpu().numpy()
103
 
104
  # export the final mesh
105
  with torch.no_grad():
106
  mesh = trimesh.Trimesh(verts, faces, vertex_colors=colors, process=False) # important, process=True leads to seg fault...
107
+ mesh.export(f'{out_dir}.obj')
108
 
109
  def export_mesh_wt_uv(self, ctx, data, out_dir, ind, device, res, tri_fea_2=None):
110