2up1down commited on
Commit
d479d7b
1 Parent(s): f6b0eeb

Upload 3 files

Browse files

with cv fallback

Files changed (3) hide show
  1. app.py +121 -34
  2. classical.py +159 -0
  3. keypoints-best.pt +2 -2
app.py CHANGED
@@ -7,7 +7,9 @@ from google.cloud import vision
7
  _api_key = os.environ["API_KEY"]
8
  _project_id = os.environ["PROJECT_ID"]
9
  client = vision.ImageAnnotatorClient(client_options={"quota_project_id": _project_id, "api_key": _api_key})
 
10
 
 
11
  import math
12
  from scipy.spatial import KDTree
13
  import io
@@ -15,7 +17,9 @@ from time import time
15
  from PIL import Image, ImageDraw, ImageFilter
16
  import numpy as np
17
  import cv2
18
-
 
 
19
  from typing import Union
20
 
21
  modelPh = r'corners-best.pt'
@@ -161,7 +165,7 @@ def median_point_of_bounding_box(x1, y1, x2, y2, x3, y3, x4, y4):
161
 
162
  def to_numeric(text:str):
163
  try:
164
- return float(text)
165
  except:
166
  pass
167
  return None
@@ -189,7 +193,8 @@ def result_as_validvalue(contents:list[dict])->tuple[list[dict], list[str]]:
189
 
190
  distance = lambda a,b : np.sqrt(np.square(np.array(a)-np.array(b)).sum())
191
 
192
- def determine_ocr_neighbors(center, valid:list[dict])->tuple[ list, float ]:
 
193
  def cosangle(a,b):
194
  na = np.linalg.norm(a)
195
  nb = np.linalg.norm(b)
@@ -199,6 +204,8 @@ def determine_ocr_neighbors(center, valid:list[dict])->tuple[ list, float ]:
199
  # compute angles between values
200
  values = [valid[0]]
201
  values[0]["dang"] = 0
 
 
202
  rates = []
203
  angS = 0
204
  for v in valid[1:]:
@@ -207,26 +214,44 @@ def determine_ocr_neighbors(center, valid:list[dict])->tuple[ list, float ]:
207
  a = np.array(values[-1]["mid"]) - center
208
  b = np.array(v["mid"]) - center
209
  ang,_ = cosangle(a,b)
210
- # if _ <0:
211
- # Warning(f"skipping {u['value']} rot:{_}")
212
- # continue
213
  angS += ang
214
  u["dang"] = ang
 
215
  u["dvda"] = u["dv"] / ang
216
  rates.append(u["dvda"])
 
 
 
217
  values.append(u)
218
 
 
 
 
219
  rates = np.array(rates)
 
 
 
220
  meanAng = angS/len(valid)
221
  if len(rates)>=6:
222
  ix = np.bitwise_and(rates> np.quantile(rates, 0.05) , rates<np.quantile(rates, 0.95))
223
  if not np.all(~ix):
224
  rates = rates[ix]
225
- rate = rates.mean()
 
 
 
 
 
 
 
 
 
226
  rate, meanAng
227
  return values, rate
228
 
229
 
 
230
  def vec_angle(v1, v2)->tuple[float, bool]:
231
  vector1 = v1/np.linalg.norm(v1)
232
  vector2 = v2/np.linalg.norm(v2)
@@ -236,38 +261,83 @@ def vec_angle(v1, v2)->tuple[float, bool]:
236
 
237
  def angles_from_tip(keypoints, values, nearestIx):
238
  center = keypoints["center"]
239
- tip = keypoints["tip"] - center
240
- v = values[nearestIx[0]]
241
- a = v["mid"] - center
242
- ang = vec_angle(a,tip)
243
- cumsum = 0
244
- for i in range(nearestIx[0],-1,-1):
245
- values[i]["before"] = abs(ang)+cumsum
246
- cumsum += values[i]["dang"]
247
-
248
- v = values[nearestIx[1]]
249
- a = v["mid"] - center
250
- ang = vec_angle(a,tip)
251
- values[nearestIx[1]]["dang"] = 0
252
- cumsum = 0
253
- for i in range(nearestIx[1], len(values)):
254
- cumsum -= values[i]["dang"]
255
- values[i]["before"] = -abs(ang)+cumsum
 
 
 
 
 
 
 
 
 
 
256
 
257
  return values
258
 
259
 
 
260
  def sort_clockwise_with_start(coordinates, x_center, y_center, starting_index):
261
  angles = [math.atan2(y - y_center, x - x_center) for x, y in coordinates]
262
  sorted_indices = sorted(range(len(angles)), key=lambda i: (angles[i] - angles[starting_index] + 2 * math.pi) % (2 * math.pi))
263
  return sorted_indices, angles
264
 
265
  def remove_nonrange_value(valid):
266
- meanArea = np.mean([e["apchar"] for e in valid])
 
267
  cutoff = 0.5
268
- valid = list(filter(lambda e: abs(e["apchar"]-meanArea)/meanArea < cutoff, valid))
 
269
  return valid
270
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
271
  def get_needle_value(img, keypoints):
272
 
273
  tic2 = time()
@@ -282,22 +352,35 @@ def get_needle_value(img, keypoints):
282
  valid.append({"text":"tip", "mid":keypoints["tip"]})
283
  ix,an = sort_clockwise_with_start([e["mid"] for e in valid],*keypoints["center"], 0)
284
  valid = [valid[i] for i in ix]
285
- assert valid[-1]["text"]!="tip" and valid[0]["text"]!="tip", "failed to properly detect tip"
 
 
 
286
  nearestIx=[]
287
  for i,v in enumerate(valid):
288
  if "tip"==v["text"]:
289
  nearestIx = [i-1,i]
290
  valid.pop(i)
291
  break
 
 
 
 
 
 
 
 
 
 
292
  nearestIx = np.array(nearestIx)
293
- valid = remove_nonrange_value(valid)
294
 
295
  center = np.array(keypoints["center"])
296
- values, rate = determine_ocr_neighbors(center, valid)
297
  assert len(values)>=2, "failed to find at least 2 OCR values"
298
 
299
  # import pandas as pd
300
  # print(pd.DataFrame.from_dict(values))
 
301
 
302
  # tree = KDTree([v["mid"] for v in values])
303
  # # find bounding ocr values of tip
@@ -329,7 +412,7 @@ def get_needle_value(img, keypoints):
329
  # print(f"total took: {toc-tic:.1g}")
330
  tipValues = np.array(tipValues)
331
 
332
- debug(img, contents, keypoints)
333
 
334
  startValue= float(values[0]["value"])
335
  tipvalue= round(float(tipValues[nearestIx].mean()),2)
@@ -365,30 +448,34 @@ def debug(img, contents, keypoints):
365
 
366
 
367
  def predict(img, detect_gauge_first):
 
368
  if detect_gauge_first:
369
  model0 = get_load_PhModel()
370
  results = model0.predict(img)
371
- phimgs,_ = get_corners(results, img)
372
  if len(phimgs)==0:
373
  raise gr.Error("no gauge found")
374
  else:
375
  phimgs = [img.copy()]
376
 
377
  payloads = []
378
- for phimg in phimgs:
379
  model = get_load_KpModel()
380
  phimg = preprocessImg(phimg)
381
  results = model.predict(phimg)
382
  keypoints = get_keypoints(results)
383
-
384
  angle2tip, totalAngle = calculate_sweep_angles(keypoints)
385
-
386
  phimg = phimg.filter(ImageFilter.UnsharpMask(radius=3))
387
  payload = get_needle_value(phimg, keypoints)
388
  payload["angleToTip"] = round(float(angle2tip),2)
 
 
389
  payload["totalAngle"] = round(float(totalAngle),2)
390
  for k,v in payload.items():
391
  print(k, type(v),v)
 
 
392
  payloads.append(payload)
393
 
394
  return payloads
 
7
  _api_key = os.environ["API_KEY"]
8
  _project_id = os.environ["PROJECT_ID"]
9
  client = vision.ImageAnnotatorClient(client_options={"quota_project_id": _project_id, "api_key": _api_key})
10
+ # client = vision.ImageAnnotatorClient()
11
 
12
+ AngTol = 10
13
  import math
14
  from scipy.spatial import KDTree
15
  import io
 
17
  from PIL import Image, ImageDraw, ImageFilter
18
  import numpy as np
19
  import cv2
20
+ import sys
21
+ sys.path.insert(0, ".")
22
+ import classical
23
  from typing import Union
24
 
25
  modelPh = r'corners-best.pt'
 
165
 
166
  def to_numeric(text:str):
167
  try:
168
+ return float(text.replace(",","."))
169
  except:
170
  pass
171
  return None
 
193
 
194
  distance = lambda a,b : np.sqrt(np.square(np.array(a)-np.array(b)).sum())
195
 
196
+ def determine_ocr_neighbors(keypoints, valid:list[dict], nearestIx)->tuple[ list, float ]:
197
+ center = np.array(keypoints["center"])
198
  def cosangle(a,b):
199
  na = np.linalg.norm(a)
200
  nb = np.linalg.norm(b)
 
204
  # compute angles between values
205
  values = [valid[0]]
206
  values[0]["dang"] = 0
207
+
208
+ values[0]["ds"] = distance(center, values[0]["mid"])
209
  rates = []
210
  angS = 0
211
  for v in valid[1:]:
 
214
  a = np.array(values[-1]["mid"]) - center
215
  b = np.array(v["mid"]) - center
216
  ang,_ = cosangle(a,b)
217
+ u["rot"] = _
 
 
218
  angS += ang
219
  u["dang"] = ang
220
+ # u["ddir"] = rot # counter clockwise?
221
  u["dvda"] = u["dv"] / ang
222
  rates.append(u["dvda"])
223
+ #
224
+ # u["ds"] = distance(values[-1]["mid"], u["mid"])
225
+ u["ds"] = distance(center, u["mid"])
226
  values.append(u)
227
 
228
+ if nearestIx[0]==0:
229
+ rates.insert(0, rates[0])
230
+
231
  rates = np.array(rates)
232
+ # filter outlier rate
233
+ # ix = np.bitwise_and(rates> np.quantile(rates, 0.05) , rates<np.quantile(rates, 0.95))
234
+ # rate = rates[ix].mean()
235
  meanAng = angS/len(valid)
236
  if len(rates)>=6:
237
  ix = np.bitwise_and(rates> np.quantile(rates, 0.05) , rates<np.quantile(rates, 0.95))
238
  if not np.all(~ix):
239
  rates = rates[ix]
240
+ rate = rates.mean()
241
+ elif len(nearestIx)==2:
242
+ n = [nearestIx[0], nearestIx[1]]
243
+ rank = np.hstack([np.arange(0,n[0]+1)[::-1], np.arange(n[1],len(rates))-n[1]]).astype(float)
244
+ weights = np.exp(-2*rank)
245
+ weights /= weights.sum()
246
+ rate = np.average(rates, weights=weights)
247
+ elif len(nearestIx)==1:
248
+ rate = rates[nearestIx[0]]
249
+
250
  rate, meanAng
251
  return values, rate
252
 
253
 
254
+
255
  def vec_angle(v1, v2)->tuple[float, bool]:
256
  vector1 = v1/np.linalg.norm(v1)
257
  vector2 = v2/np.linalg.norm(v2)
 
261
 
262
  def angles_from_tip(keypoints, values, nearestIx):
263
  center = keypoints["center"]
264
+ tip = keypoints["tip"] - center
265
+ N = len(nearestIx)
266
+ start = nearestIx[0]
267
+ if N==2 or (N==1 and nearestIx[0]==len(values)-1):
268
+ v = values[start]
269
+ a = v["mid"] - center
270
+ ang = vec_angle(a,tip)
271
+ cumsum = 0
272
+ for i in range(start,-1,-1):
273
+ values[i]["before"] = abs(ang)+cumsum
274
+ cumsum += values[i]["dang"]
275
+
276
+ if N==2 or (N==1 and nearestIx[0]==0):
277
+ if N==1:
278
+ start = nearestIx[0]
279
+ else:
280
+ start = nearestIx[1]
281
+
282
+ v = values[start]
283
+ a = v["mid"] - center
284
+ ang = vec_angle(a,tip)
285
+
286
+ values[start]["dang"] = 0
287
+ cumsum = 0
288
+ for i in range(start, len(values)):
289
+ cumsum -= values[i]["dang"]
290
+ values[i]["before"] = -abs(ang)+cumsum
291
 
292
  return values
293
 
294
 
295
+
296
  def sort_clockwise_with_start(coordinates, x_center, y_center, starting_index):
297
  angles = [math.atan2(y - y_center, x - x_center) for x, y in coordinates]
298
  sorted_indices = sorted(range(len(angles)), key=lambda i: (angles[i] - angles[starting_index] + 2 * math.pi) % (2 * math.pi))
299
  return sorted_indices, angles
300
 
301
  def remove_nonrange_value(valid):
302
+ # meanArea = np.mean([e["apchar"] for e in valid])
303
+ meanArea = np.mean([e["apchar"] for e in valid if "apchar" in e])
304
  cutoff = 0.5
305
+ # valid = list(filter(lambda e: abs(e["apchar"]-meanArea)/meanArea < cutoff, valid))
306
+ valid = list(filter(lambda e: True if e["text"]=="tip" else abs(e["apchar"]-meanArea)/meanArea < cutoff, valid))
307
  return valid
308
 
309
+
310
+
311
+ def check_tip(img, keypoints):
312
+
313
+ lines = classical.get_needle_line(np.array(img))
314
+ if lines is None or len(lines)==0:
315
+ return False
316
+ # lines = lines.squeeze()
317
+ if lines.ndim==1:
318
+ lines = np.expand_dims(lines,axis=0)
319
+ # nearest line to center,
320
+ dist2 = lambda a,b: (a[0]-b[0])**2 + (a[1]-b[1])**2
321
+ center = keypoints["center"]
322
+ ds = [ min(dist2(center, e[:2]), dist2(center, e[2:])) for e in lines] # closest line to center
323
+ ix= np.argsort(ds)
324
+ ix, ds
325
+ l = lines[ix][0]
326
+ a = np.array([l[0]-l[2], l[1]-l[3]])
327
+ a
328
+ tip = keypoints["tip"] - center
329
+ ang = vec_angle(a, tip)
330
+ if abs(ang) > AngTol:
331
+ # furthest point from center is tip
332
+ if dist2(l[:2],center) > dist2(l[2:],center):
333
+ keypoints["tip"] = l[:2]
334
+ else:
335
+ keypoints["tip"] = l[2:]
336
+ print("new point ", keypoints["tip"])
337
+ return True
338
+ return False
339
+
340
+
341
  def get_needle_value(img, keypoints):
342
 
343
  tic2 = time()
 
352
  valid.append({"text":"tip", "mid":keypoints["tip"]})
353
  ix,an = sort_clockwise_with_start([e["mid"] for e in valid],*keypoints["center"], 0)
354
  valid = [valid[i] for i in ix]
355
+ # assert valid[-1]["text"]!="tip" and valid[0]["text"]!="tip", "failed to properly detect tip"
356
+ valid = remove_nonrange_value(valid)
357
+
358
+ i=0
359
  nearestIx=[]
360
  for i,v in enumerate(valid):
361
  if "tip"==v["text"]:
362
  nearestIx = [i-1,i]
363
  valid.pop(i)
364
  break
365
+ if len(valid)==nearestIx[1] or -1==nearestIx[0]:
366
+ # nearestIx[1] = 0 # tip is out of bounds
367
+ tip = keypoints["tip"] - keypoints["center"]
368
+ b = valid[0]["mid"] - keypoints["center"]
369
+ a = valid[-1]["mid"] - keypoints["center"]
370
+ if abs(vec_angle(tip,a)) < abs(vec_angle(tip, b)):
371
+ nearestIx = [len(valid)-1]
372
+ else:
373
+ nearestIx = [0]
374
+ # nearest to
375
  nearestIx = np.array(nearestIx)
 
376
 
377
  center = np.array(keypoints["center"])
378
+ values, rate = determine_ocr_neighbors(keypoints, valid, nearestIx)
379
  assert len(values)>=2, "failed to find at least 2 OCR values"
380
 
381
  # import pandas as pd
382
  # print(pd.DataFrame.from_dict(values))
383
+ # print(nearestIx)
384
 
385
  # tree = KDTree([v["mid"] for v in values])
386
  # # find bounding ocr values of tip
 
412
  # print(f"total took: {toc-tic:.1g}")
413
  tipValues = np.array(tipValues)
414
 
415
+ # debug(img, contents, keypoints)
416
 
417
  startValue= float(values[0]["value"])
418
  tipvalue= round(float(tipValues[nearestIx].mean()),2)
 
448
 
449
 
450
  def predict(img, detect_gauge_first):
451
+ KPs = []
452
  if detect_gauge_first:
453
  model0 = get_load_PhModel()
454
  results = model0.predict(img)
455
+ phimgs,KPs = get_corners(results, img)
456
  if len(phimgs)==0:
457
  raise gr.Error("no gauge found")
458
  else:
459
  phimgs = [img.copy()]
460
 
461
  payloads = []
462
+ for i,phimg in enumerate(phimgs):
463
  model = get_load_KpModel()
464
  phimg = preprocessImg(phimg)
465
  results = model.predict(phimg)
466
  keypoints = get_keypoints(results)
 
467
  angle2tip, totalAngle = calculate_sweep_angles(keypoints)
468
+ angReplaced = check_tip(phimg, keypoints)
469
  phimg = phimg.filter(ImageFilter.UnsharpMask(radius=3))
470
  payload = get_needle_value(phimg, keypoints)
471
  payload["angleToTip"] = round(float(angle2tip),2)
472
+ if angReplaced:
473
+ payload["angleToTip"] = None
474
  payload["totalAngle"] = round(float(totalAngle),2)
475
  for k,v in payload.items():
476
  print(k, type(v),v)
477
+ if len(KPs)>i:
478
+ payload["bbox"] = {k:v.astype(int).tolist() for k,v in KPs[i].items()}
479
  payloads.append(payload)
480
 
481
  return payloads
classical.py ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2
2
+ import numpy as np
3
+ linelen = lambda l: np.sqrt((l[0]-l[2])**2 + (l[1]-l[3])**2)
4
+ import math
5
+
6
+ class HoughBundler:
7
+ def __init__(self,min_distance=5,min_angle=2):
8
+ self.min_distance = min_distance
9
+ self.min_angle = min_angle
10
+
11
+ def get_orientation(self, line):
12
+ orientation = math.atan2(abs((line[3] - line[1])), abs((line[2] - line[0])))
13
+ return math.degrees(orientation)
14
+
15
+ def check_is_line_different(self, line_1, groups, min_distance_to_merge, min_angle_to_merge):
16
+ for group in groups:
17
+ for line_2 in group:
18
+ if self.get_distance(line_2, line_1) < min_distance_to_merge:
19
+ orientation_1 = self.get_orientation(line_1)
20
+ orientation_2 = self.get_orientation(line_2)
21
+ if abs(orientation_1 - orientation_2) < min_angle_to_merge:
22
+ group.append(line_1)
23
+ return False
24
+ return True
25
+
26
+ def distance_point_to_line(self, point, line):
27
+ px, py = point
28
+ x1, y1, x2, y2 = line
29
+
30
+ def line_magnitude(x1, y1, x2, y2):
31
+ line_magnitude = math.sqrt(math.pow((x2 - x1), 2) + math.pow((y2 - y1), 2))
32
+ return line_magnitude
33
+
34
+ lmag = line_magnitude(x1, y1, x2, y2)
35
+ if lmag < 0.00000001:
36
+ distance_point_to_line = 9999
37
+ return distance_point_to_line
38
+
39
+ u1 = (((px - x1) * (x2 - x1)) + ((py - y1) * (y2 - y1)))
40
+ u = u1 / (lmag * lmag)
41
+
42
+ if (u < 0.00001) or (u > 1):
43
+ #// closest point does not fall within the line segment, take the shorter distance
44
+ #// to an endpoint
45
+ ix = line_magnitude(px, py, x1, y1)
46
+ iy = line_magnitude(px, py, x2, y2)
47
+ if ix > iy:
48
+ distance_point_to_line = iy
49
+ else:
50
+ distance_point_to_line = ix
51
+ else:
52
+ # Intersecting point is on the line, use the formula
53
+ ix = x1 + u * (x2 - x1)
54
+ iy = y1 + u * (y2 - y1)
55
+ distance_point_to_line = line_magnitude(px, py, ix, iy)
56
+
57
+ return distance_point_to_line
58
+
59
+ def get_distance(self, a_line, b_line):
60
+ dist1 = self.distance_point_to_line(a_line[:2], b_line)
61
+ dist2 = self.distance_point_to_line(a_line[2:], b_line)
62
+ dist3 = self.distance_point_to_line(b_line[:2], a_line)
63
+ dist4 = self.distance_point_to_line(b_line[2:], a_line)
64
+
65
+ return min(dist1, dist2, dist3, dist4)
66
+
67
+ def merge_lines_into_groups(self, lines):
68
+ groups = [] # all lines groups are here
69
+ # first line will create new group every time
70
+ groups.append([lines[0]])
71
+ # if line is different from existing gropus, create a new group
72
+ for line_new in lines[1:]:
73
+ if self.check_is_line_different(line_new, groups, self.min_distance, self.min_angle):
74
+ groups.append([line_new])
75
+
76
+ return groups
77
+
78
+ def merge_line_segments(self, lines):
79
+ orientation = self.get_orientation(lines[0])
80
+
81
+ if(len(lines) == 1):
82
+ return np.block([[lines[0][:2], lines[0][2:]]])
83
+
84
+ points = []
85
+ for line in lines:
86
+ points.append(line[:2])
87
+ points.append(line[2:])
88
+ if 45 < orientation <= 90:
89
+ #sort by y
90
+ points = sorted(points, key=lambda point: point[1])
91
+ else:
92
+ #sort by x
93
+ points = sorted(points, key=lambda point: point[0])
94
+
95
+ p0 = np.array(points[:2]).mean(axis=0)
96
+ p1 = np.array(points[-2:]).mean(axis=0)
97
+ return np.block([[p0,p1]]).astype(int)
98
+ # return np.block([[points[0],points[-1]]])
99
+
100
+ def process_lines(self, lines):
101
+ lines_horizontal = []
102
+ lines_vertical = []
103
+
104
+ for line_i in [l[0] for l in lines]:
105
+ orientation = self.get_orientation(line_i)
106
+ # if vertical
107
+ if 45 < orientation <= 90:
108
+ lines_vertical.append(line_i)
109
+ else:
110
+ lines_horizontal.append(line_i)
111
+
112
+ lines_vertical = sorted(lines_vertical , key=lambda line: line[1])
113
+ lines_horizontal = sorted(lines_horizontal , key=lambda line: line[0])
114
+ merged_lines_all = []
115
+
116
+ # for each cluster in vertical and horizantal lines leave only one line
117
+ for i in [lines_horizontal, lines_vertical]:
118
+ if len(i) > 0:
119
+ groups = self.merge_lines_into_groups(i)
120
+ merged_lines = []
121
+ for group in groups:
122
+ merged_lines.append(self.merge_line_segments(group))
123
+ merged_lines_all.extend(merged_lines)
124
+
125
+ return np.asarray(merged_lines_all)
126
+
127
+
128
+ def get_needle_line(im)->list:
129
+ # fn = "./0.png"
130
+ # # load grayscale image
131
+ # im = cv2.imread(fn)
132
+ gray_im = cv2.cvtColor(im, cv2.COLOR_RGB2GRAY)
133
+ sz = 640
134
+ h,w,_ = im.shape
135
+ _hf = h/sz
136
+ _wf = w/sz
137
+ gray_im = cv2.resize(gray_im, (sz,sz))
138
+
139
+ blur = cv2.GaussianBlur(gray_im, (0,0), 5)
140
+
141
+ # edges = cv2.Canny(blur, 50, 100)
142
+ edges = cv2.Canny(blur, 20, 60)
143
+
144
+ rectKernel = cv2.getStructuringElement(cv2.MORPH_RECT, (11, 19))
145
+ hat = cv2.morphologyEx(edges, cv2.MORPH_BLACKHAT, rectKernel)
146
+ # hat = cv2.morphologyEx(edges, cv2.MORPH_TOPHAT, rectKernel)
147
+ # k = cv2.getStructuringElement(cv2.MORPH_ERODE, (13,13))
148
+ k = None
149
+ hat = cv2.erode(hat,k, iterations=2)
150
+
151
+ minLineLength = 60
152
+ maxLineGap = 10
153
+ plines = cv2.HoughLinesP(image=edges, rho=3, theta=np.pi / 180, threshold=10,minLineLength=minLineLength, maxLineGap=maxLineGap) # rho is set to 3 to detect more lines, easier to get more then filter them out later
154
+ if len(plines)<=1:
155
+ return plines.squeeze() * [_wf, _hf, _wf, _hf]
156
+
157
+ bundler = HoughBundler(min_distance=120,min_angle=5)
158
+ clines = bundler.process_lines(plines)
159
+ return clines.squeeze() * [_wf, _hf, _wf, _hf]
keypoints-best.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d583485a30cd58e986231e7a02b84ce86e117d7eb48d4b5a901e4bada55319ac
3
- size 6408962
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b9b6ac6b3e5dd73a4e00af18365b13e0607cb8e4b00cfd9189e005b86103124
3
+ size 6409410