jacklangerman dmytromishkin commited on
Commit
11827d2
1 Parent(s): a90c35f

metric-fix (#3)

Browse files

- Added a fix to the metric: corrected indexes mismatch, and added zeromean normalization (2633f6b0f66a8fa4edb6e5a9c77ca55b004ebc71)
- update metric (4a7e4e02fe8c2cbceb48e1c646c4d02996523634)
- update constants (5cd2bb760d54caa704cd189d2a204a6ff9eb31a7)
- Cleaned-up, and added diameter-based cv cost (d7cb5e40aa3a01ee8d5358eca2258e6799b99d41)
- Merge branch 'metric-fix' into pr/1 (4a295c28cfbba070667a9d7edc736fb30c96bd1b)
- tweak docs (57535bbad2d6fc63d5fba90159e1fb47170d42c0)
- tweak docs more (c3c7e12032769b9469ca9e4f9ada6830d219be7f)


Co-authored-by: Dmytro Mishkin <dmytromishkin@users.noreply.huggingface.co>

Files changed (4) hide show
  1. hoho/vis.py +3 -2
  2. hoho/wed.py +72 -19
  3. requirements.txt +7 -5
  4. setup.py +1 -1
hoho/vis.py CHANGED
@@ -133,7 +133,8 @@ def create_image_grid(images, target_length=312, num_per_row=2):
133
  return grid_img
134
 
135
 
136
- import matplotlib
 
137
  def visualize_depth(depth, min_depth=None, max_depth=None, cmap='rainbow'):
138
  depth = np.array(depth)
139
 
@@ -148,7 +149,7 @@ def visualize_depth(depth, min_depth=None, max_depth=None, cmap='rainbow'):
148
  depth = np.clip(depth, 0, 1)
149
 
150
  # Use the matplotlib colormap to convert the depth to an RGB image
151
- cmap = matplotlib.cm.get_cmap(cmap)
152
  depth_image = (cmap(depth) * 255).astype(np.uint8)
153
 
154
  # Convert the depth image to a PIL image
 
133
  return grid_img
134
 
135
 
136
+ import matplotlib.pyplot as plt
137
+
138
  def visualize_depth(depth, min_depth=None, max_depth=None, cmap='rainbow'):
139
  depth = np.array(depth)
140
 
 
149
  depth = np.clip(depth, 0, 1)
150
 
151
  # Use the matplotlib colormap to convert the depth to an RGB image
152
+ cmap = plt.get_cmap(cmap)
153
  depth_image = (cmap(depth) * 255).astype(np.uint8)
154
 
155
  # Convert the depth image to a PIL image
hoho/wed.py CHANGED
@@ -2,43 +2,94 @@ from scipy.spatial.distance import cdist
2
  from scipy.optimize import linear_sum_assignment
3
  import numpy as np
4
 
5
- def compute_WED(pd_vertices, pd_edges, gt_vertices, gt_edges, cv=1.0, ce=1.0, normalized=True, squared=False):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  pd_vertices = np.array(pd_vertices)
7
  gt_vertices = np.array(gt_vertices)
 
 
 
 
 
 
 
 
 
 
 
 
8
  pd_edges = np.array(pd_edges)
9
- gt_edges = np.array(gt_edges)
10
 
11
  # Step 1: Bipartite Matching
12
- if squared:
13
- distances = cdist(pd_vertices, gt_vertices, metric='sqeuclidean')
14
- else:
15
- distances = cdist(pd_vertices, gt_vertices, metric='euclidean')
16
-
17
  row_ind, col_ind = linear_sum_assignment(distances)
 
18
 
19
  # Step 2: Vertex Translation
20
-
21
- if squared:
22
- translation_costs = cv * np.sqrt(np.sum(distances[row_ind, col_ind]))
23
- else:
24
- translation_costs = cv * np.sum(distances[row_ind, col_ind])
25
 
26
  # Additional: Vertex Deletion
27
  unmatched_pd_indices = set(range(len(pd_vertices))) - set(row_ind)
28
- deletion_costs = cv * len(unmatched_pd_indices) # Assuming a fixed cost for vertex deletion
29
 
30
  # Step 3: Vertex Insertion
31
  unmatched_gt_indices = set(range(len(gt_vertices))) - set(col_ind)
32
- insertion_costs = cv * len(unmatched_gt_indices) # Assuming a fixed cost for vertex insertion
33
 
34
  # Step 4: Edge Deletion and Insertion
35
- updated_pd_edges = [(row_ind[np.where(col_ind == edge[0])[0][0]], row_ind[np.where(col_ind == edge[1])[0][0]]) for edge in pd_edges if edge[0] in col_ind and edge[1] in col_ind]
36
- pd_edges_set = set(map(tuple, updated_pd_edges))
37
- gt_edges_set = set(map(tuple, gt_edges))
 
38
 
39
  # Delete edges not in ground truth
40
  edges_to_delete = pd_edges_set - gt_edges_set
41
- deletion_edge_costs = ce * sum(np.linalg.norm(pd_vertices[edge[0]] - pd_vertices[edge[1]]) for edge in edges_to_delete)
 
 
 
42
 
43
  # Insert missing edges from ground truth
44
  edges_to_insert = gt_edges_set - pd_edges_set
@@ -46,9 +97,11 @@ def compute_WED(pd_vertices, pd_edges, gt_vertices, gt_edges, cv=1.0, ce=1.0, no
46
 
47
  # Step 5: Calculation of WED
48
  WED = translation_costs + deletion_costs + insertion_costs + deletion_edge_costs + insertion_edge_costs
 
49
 
50
  if normalized:
51
  total_length_of_gt_edges = np.linalg.norm((gt_vertices[gt_edges[:, 0]] - gt_vertices[gt_edges[:, 1]]), axis=1).sum()
52
  WED = WED / total_length_of_gt_edges
53
-
 
54
  return WED
 
2
  from scipy.optimize import linear_sum_assignment
3
  import numpy as np
4
 
5
+
6
+ def preregister_mean_std(verts_to_transform, target_verts, single_scale=True):
7
+ mu_target = target_verts.mean(axis=0)
8
+ mu_in = verts_to_transform.mean(axis=0)
9
+ std_target = np.std(target_verts, axis=0)
10
+ std_in = np.std(verts_to_transform, axis=0)
11
+
12
+ if np.any(std_in == 0):
13
+ std_in[std_in == 0] = 1
14
+ if np.any(std_target == 0):
15
+ std_target[std_target == 0] = 1
16
+ if np.any(np.isnan(std_in)):
17
+ std_in[np.isnan(std_in)] = 1
18
+ if np.any(np.isnan(std_target)):
19
+ std_target[np.isnan(std_target)] = 1
20
+
21
+ if single_scale:
22
+ std_target = np.linalg.norm(std_target)
23
+ std_in = np.linalg.norm(std_in)
24
+
25
+ transformed_verts = (verts_to_transform - mu_in) / std_in
26
+ transformed_verts = transformed_verts * std_target + mu_target
27
+
28
+ return transformed_verts
29
+
30
+
31
+ def compute_WED(pd_vertices, pd_edges, gt_vertices, gt_edges, cv=-1, ce=1.0, normalized=True, preregister=True, single_scale=True):
32
+ '''The function computes the Wireframe Edge Distance (WED) between two graphs.
33
+ pd_vertices: list of predicted vertices
34
+ pd_edges: list of predicted edges
35
+ gt_vertices: list of ground truth vertices
36
+ gt_edges: list of ground truth edges
37
+ cv: vertex cost (the cost in centimeters of missing a vertex, default is -1, which means 1/4 of the diameter of the ground truth mesh)
38
+ ce: edge cost (multiplier of the edge length for edge deletion and insertion, default is 1.0)
39
+ normalized: if True, the WED is normalized by the total length of the ground truth edges
40
+ preregister: if True, the predicted vertices have their mean and scale matched to the ground truth vertices
41
+ '''
42
+
43
+ # Vertex coordinates are in centimeters. When cv and ce are set to 100.0 and 1.0 respectively,
44
+ # missing a vertex is equivanlent predicting it 1 meter away from the ground truth vertex.
45
+ # This is equivalent to setting cv=1 and ce=1 when the vertex coordinates are in meters.
46
+ # When a negative cv value is set (the default behavior), cv is reset to 1/4 of the diameter of the ground truth wireframe.
47
+
48
  pd_vertices = np.array(pd_vertices)
49
  gt_vertices = np.array(gt_vertices)
50
+
51
+ diameter = cdist(gt_vertices, gt_vertices).max()
52
+
53
+ if cv < 0:
54
+ cv = diameter / 4.0
55
+ # Cost of addining or deleting a vertex is set to 1/4 of the diameter of the ground truth mesh
56
+
57
+ # Step 0: Prenormalize / preregister
58
+ if preregister:
59
+ pd_vertices = preregister_mean_std(pd_vertices, gt_vertices, single_scale=single_scale)
60
+
61
+
62
  pd_edges = np.array(pd_edges)
63
+ gt_edges = np.array(gt_edges)
64
 
65
  # Step 1: Bipartite Matching
66
+ distances = cdist(pd_vertices, gt_vertices, metric='euclidean')
 
 
 
 
67
  row_ind, col_ind = linear_sum_assignment(distances)
68
+
69
 
70
  # Step 2: Vertex Translation
71
+ translation_costs = np.sum(distances[row_ind, col_ind])
 
 
 
 
72
 
73
  # Additional: Vertex Deletion
74
  unmatched_pd_indices = set(range(len(pd_vertices))) - set(row_ind)
75
+ deletion_costs = cv * len(unmatched_pd_indices)
76
 
77
  # Step 3: Vertex Insertion
78
  unmatched_gt_indices = set(range(len(gt_vertices))) - set(col_ind)
79
+ insertion_costs = cv * len(unmatched_gt_indices)
80
 
81
  # Step 4: Edge Deletion and Insertion
82
+ updated_pd_edges = [(col_ind[np.where(row_ind == edge[0])[0][0]], col_ind[np.where(row_ind == edge[1])[0][0]]) for edge in pd_edges if edge[0] in row_ind and edge[1] in row_ind]
83
+ pd_edges_set = set(map(tuple, [set(edge) for edge in updated_pd_edges]))
84
+ gt_edges_set = set(map(tuple, [set(edge) for edge in gt_edges]))
85
+
86
 
87
  # Delete edges not in ground truth
88
  edges_to_delete = pd_edges_set - gt_edges_set
89
+
90
+ vert_tf = [np.where(col_ind == v)[0][0] if v in col_ind else 0 for v in range(len(gt_vertices))]
91
+ deletion_edge_costs = ce * sum(np.linalg.norm(pd_vertices[vert_tf[edge[0]]] - pd_vertices[vert_tf[edge[1]]]) for edge in edges_to_delete)
92
+
93
 
94
  # Insert missing edges from ground truth
95
  edges_to_insert = gt_edges_set - pd_edges_set
 
97
 
98
  # Step 5: Calculation of WED
99
  WED = translation_costs + deletion_costs + insertion_costs + deletion_edge_costs + insertion_edge_costs
100
+
101
 
102
  if normalized:
103
  total_length_of_gt_edges = np.linalg.norm((gt_vertices[gt_edges[:, 0]] - gt_vertices[gt_edges[:, 1]]), axis=1).sum()
104
  WED = WED / total_length_of_gt_edges
105
+
106
+ # print ("Total length", total_length_of_gt_edges)
107
  return WED
requirements.txt CHANGED
@@ -1,8 +1,10 @@
 
 
 
1
  numpy
2
  pillow
3
- webdataset
4
- trimesh
5
- scipy
6
- datasets
7
  pycolmap
8
- plotly
 
 
 
1
+ datasets
2
+ ipywidgets
3
+ matplotlib
4
  numpy
5
  pillow
6
+ plotly
 
 
 
7
  pycolmap
8
+ scipy
9
+ trimesh
10
+ webdataset
setup.py CHANGED
@@ -6,7 +6,7 @@ with open('requirements.txt') as f:
6
  required = f.read().splitlines()
7
 
8
  setup(name='hoho',
9
- version='0.0.2',
10
  description='Tools and utilites for the HoHo Dataset and S23DR Competition',
11
  url='usm3d.github.io',
12
  author='Jack Langerman, Dmytro Mishkin, S23DR Orgainizing Team',
 
6
  required = f.read().splitlines()
7
 
8
  setup(name='hoho',
9
+ version='0.0.3',
10
  description='Tools and utilites for the HoHo Dataset and S23DR Competition',
11
  url='usm3d.github.io',
12
  author='Jack Langerman, Dmytro Mishkin, S23DR Orgainizing Team',