lowercaseonly
commited on
Commit
•
62030e0
1
Parent(s):
6d0530f
Added Loading/Statistics/Preprocessing Scripts and Class Info Files
Browse files- classes.json +80 -0
- classes_color.json +73 -0
- classes_discontinuous.json +19 -0
- classes_ports.json +29 -0
- consistency.py +269 -0
- loader.py +227 -0
- properties.json +126 -0
- segmentation.py +348 -0
- utils.py +132 -0
classes.json
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"__background__": 0,
|
3 |
+
|
4 |
+
"text": 1,
|
5 |
+
"junction": 2,
|
6 |
+
"crossover": 3,
|
7 |
+
"terminal": 4,
|
8 |
+
"gnd": 5,
|
9 |
+
"vss": 6,
|
10 |
+
|
11 |
+
"voltage.dc": 7,
|
12 |
+
"voltage.ac": 8,
|
13 |
+
"voltage.battery": 9,
|
14 |
+
|
15 |
+
"resistor": 10,
|
16 |
+
"resistor.adjustable": 11,
|
17 |
+
"resistor.photo": 12,
|
18 |
+
|
19 |
+
"capacitor.unpolarized": 13,
|
20 |
+
"capacitor.polarized": 14,
|
21 |
+
"capacitor.adjustable": 15,
|
22 |
+
|
23 |
+
"inductor": 16,
|
24 |
+
"inductor.ferrite": 17,
|
25 |
+
"inductor.coupled": 18,
|
26 |
+
"transformer": 19,
|
27 |
+
|
28 |
+
"diode": 20,
|
29 |
+
"diode.light_emitting": 21,
|
30 |
+
"diode.thyrector": 22,
|
31 |
+
"diode.zener": 23,
|
32 |
+
|
33 |
+
"diac": 24,
|
34 |
+
"triac": 25,
|
35 |
+
"thyristor": 26,
|
36 |
+
"varistor": 27,
|
37 |
+
|
38 |
+
"transistor.bjt": 28,
|
39 |
+
"transistor.fet": 29,
|
40 |
+
"transistor.photo": 30,
|
41 |
+
|
42 |
+
"operational_amplifier": 31,
|
43 |
+
"operational_amplifier.schmitt_trigger": 32,
|
44 |
+
"optocoupler": 33,
|
45 |
+
|
46 |
+
"integrated_circuit": 34,
|
47 |
+
"integrated_circuit.ne555": 35,
|
48 |
+
"integrated_circuit.voltage_regulator": 36,
|
49 |
+
|
50 |
+
"xor": 37,
|
51 |
+
"and": 38,
|
52 |
+
"or": 39,
|
53 |
+
"not": 40,
|
54 |
+
"nand": 41,
|
55 |
+
"nor": 42,
|
56 |
+
|
57 |
+
"probe": 43,
|
58 |
+
"probe.current": 44,
|
59 |
+
"probe.voltage": 45,
|
60 |
+
|
61 |
+
"switch": 46,
|
62 |
+
"relay": 47,
|
63 |
+
|
64 |
+
"socket": 48,
|
65 |
+
"fuse": 49,
|
66 |
+
|
67 |
+
"speaker": 50,
|
68 |
+
"motor": 51,
|
69 |
+
"lamp": 52,
|
70 |
+
"microphone": 53,
|
71 |
+
"antenna": 54,
|
72 |
+
"crystal": 55,
|
73 |
+
|
74 |
+
"mechanical": 56,
|
75 |
+
"magnetic": 57,
|
76 |
+
"optical": 58,
|
77 |
+
"block": 59,
|
78 |
+
|
79 |
+
"unknown": 60
|
80 |
+
}
|
classes_color.json
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"__background__": [0,0,0],
|
3 |
+
|
4 |
+
"text": [255,0,0],
|
5 |
+
"junction": [0,255,0],
|
6 |
+
"crossover": [0,0,255],
|
7 |
+
"terminal": [255,255,0],
|
8 |
+
"gnd": [0,255,255],
|
9 |
+
"vss": [255,0,255],
|
10 |
+
|
11 |
+
"voltage.dc": [192,192,192],
|
12 |
+
"voltage.ac": [128,128,128],
|
13 |
+
"voltage.battery": [128,0,0],
|
14 |
+
|
15 |
+
"resistor": [128,128,0],
|
16 |
+
"resistor.adjustable": [0,128,0],
|
17 |
+
"resistor.photo": [128,0,128],
|
18 |
+
|
19 |
+
"capacitor.unpolarized": [0,128,128],
|
20 |
+
"capacitor.polarized": [0,0,128],
|
21 |
+
"capacitor.adjustable": [200,200,200],
|
22 |
+
|
23 |
+
"inductor": [165,42,42],
|
24 |
+
"inductor.ferrite": [100,100,100],
|
25 |
+
"transformer": [178,34,34],
|
26 |
+
|
27 |
+
"diode": [255,127,80],
|
28 |
+
"diode.light_emitting": [240,128,128],
|
29 |
+
"diode.thyrector": [233,150,122],
|
30 |
+
"diode.zener": [255,160,122],
|
31 |
+
|
32 |
+
"diac": [255,140,0],
|
33 |
+
"triac": [255,165,0],
|
34 |
+
"thyristor": [184,134,11],
|
35 |
+
"varistor": [154,205,50],
|
36 |
+
|
37 |
+
"transistor.bjt": [124,252,0],
|
38 |
+
"transistor.fet": [143,188,143],
|
39 |
+
"transistor.photo": [0,255,127],
|
40 |
+
|
41 |
+
"operational_amplifier": [46,139,87],
|
42 |
+
"operational_amplifier.schmitt_trigger": [47,79,79],
|
43 |
+
"optocoupler": [176,224,230],
|
44 |
+
|
45 |
+
"integrated_circuit": [70,130,180],
|
46 |
+
"integrated_circuit.ne555": [100,149,237],
|
47 |
+
"integrated_circuit.voltage_regulator": [0,191,255],
|
48 |
+
|
49 |
+
"xor": [30,144,255],
|
50 |
+
"and": [135,206,235],
|
51 |
+
"or": [25,25,112],
|
52 |
+
"not": [138,43,226],
|
53 |
+
"nand": [75,0,130],
|
54 |
+
"nor": [147,112,219],
|
55 |
+
|
56 |
+
"probe.current": [139,0,139],
|
57 |
+
"probe.voltage": [218,112,214],
|
58 |
+
|
59 |
+
"switch": [219,112,147],
|
60 |
+
"relay": [250,235,215],
|
61 |
+
|
62 |
+
"socket": [210,105,30],
|
63 |
+
"fuse": [244,164,96],
|
64 |
+
|
65 |
+
"speaker": [188,143,143],
|
66 |
+
"motor": [255,218,185],
|
67 |
+
"lamp": [255,240,245],
|
68 |
+
"microphone": [255,239,213],
|
69 |
+
"antenna": [112,128,144],
|
70 |
+
"crystal": [230,230,250],
|
71 |
+
|
72 |
+
"unknown": [240,255,240]
|
73 |
+
}
|
classes_discontinuous.json
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[
|
2 |
+
"capacitor.unpolarized",
|
3 |
+
"text",
|
4 |
+
"gnd",
|
5 |
+
"vss",
|
6 |
+
"voltage.battery",
|
7 |
+
"resistor.photo",
|
8 |
+
"capacitor.unpolarized",
|
9 |
+
"capacitor.polarized",
|
10 |
+
"capacitor.adjustable",
|
11 |
+
"inductor.ferrite",
|
12 |
+
"transformer",
|
13 |
+
"diode.light_emitting",
|
14 |
+
"thyristor",
|
15 |
+
"transistor.photo",
|
16 |
+
"switch",
|
17 |
+
"relay",
|
18 |
+
"crystal"
|
19 |
+
]
|
classes_ports.json
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"text": [],
|
3 |
+
"voltage.dc": [{"name": "negative", "position": [0.5, 1]}, {"name": "positive", "position": [0.5, 0]}],
|
4 |
+
"voltage.ac": [{"name": "connector", "position": [0.5, 1]}, {"name": "connector", "position": [0.5, 0]}],
|
5 |
+
"voltage.battery": [{"name": "negative", "position": [0.5, 1]}, {"name": "positive", "position": [0.5, 0]}],
|
6 |
+
|
7 |
+
"resistor": [{"name": "connector", "position": [0, 0.5]}, {"name": "connector", "position": [1, 0.5]}],
|
8 |
+
|
9 |
+
"capacitor.unpolarized": [{"name": "connector", "position": [0, 0.5]}, {"name": "connector", "position": [1, 0.5]}],
|
10 |
+
"capacitor.polarized": [{"name": "negative", "position": [1, 0.5]}, {"name": "positive", "position": [0, 0.5]}],
|
11 |
+
|
12 |
+
"inductor": [{"name": "connector", "position": [0, 0.5]}, {"name": "connector", "position": [1, 0.5]}],
|
13 |
+
|
14 |
+
"diode":[{"name": "cathode", "position": [1, 0.5]}, {"name": "anode", "position": [0, 0.5]}],
|
15 |
+
"diode.light_emitting": [{"name": "cathode", "position": [1, 0.5]}, {"name": "anode", "position": [0, 0.5]}],
|
16 |
+
"diode.thyrector": [{"name": "cathode", "position": [1, 0.5]}, {"name": "anode", "position": [0, 0.5]}],
|
17 |
+
"diode.zener":[{"name": "cathode", "position": [1, 0.5]}, {"name": "anode", "position": [0, 0.5]}],
|
18 |
+
|
19 |
+
"transistor.bjt":[{"name": "base", "position": [0, 0.5]}, {"name": "collector", "position": [0.7, 1]}, {"name": "emitter", "position": [0.7, 0]}],
|
20 |
+
"transistor.fet":[{"name": "gate", "position": [0, 0.5]}, {"name": "source", "position": [0.5, 1]}, {"name": "drain", "position": [0.5, 0]}],
|
21 |
+
"transistor.photo":[{"name": "emitter", "position": [0.5, 1]}, {"name": "collector", "position": [0.5, 0]}],
|
22 |
+
|
23 |
+
"xor": [{"name": "output", "position": [1, 0.5]}, {"name": "input", "position": [0, 0.25]}, {"name": "input", "position": [0, 0.75]}],
|
24 |
+
"and": [{"name": "output", "position": [1, 0.5]}, {"name": "input", "position": [0, 0.25]}, {"name": "input", "position": [0, 0.75]}],
|
25 |
+
"or": [{"name": "output", "position": [1, 0.5]}, {"name": "input", "position": [0, 0.25]}, {"name": "input", "position": [0, 0.75]}],
|
26 |
+
"not": [{"name": "output", "position": [1, 0.5]}, {"name": "input", "position": [0, 0.5]}],
|
27 |
+
"nand": [{"name": "output", "position": [1, 0.5]}, {"name": "input", "position": [0, 0.25]}, {"name": "input", "position": [0, 0.75]}],
|
28 |
+
"nor": [{"name": "output", "position": [1, 0.5]}, {"name": "input", "position": [0, 0.25]}, {"name": "input", "position": [0, 0.75]}]
|
29 |
+
}
|
consistency.py
ADDED
@@ -0,0 +1,269 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""consistency.py: Integrity Check, Correction by Mapping for Annotation Class, Metadata Cleaning, Statistics"""
|
2 |
+
|
3 |
+
# System Imports
|
4 |
+
import os
|
5 |
+
import sys
|
6 |
+
import re
|
7 |
+
|
8 |
+
# Project Imports
|
9 |
+
from loader import load_classes, load_properties, read_dataset, write_dataset, file_name
|
10 |
+
|
11 |
+
# Third-Party Imports
|
12 |
+
import matplotlib.pyplot as plt
|
13 |
+
import numpy as np
|
14 |
+
|
15 |
+
__author__ = "Johannes Bayer, Shabi Haider"
|
16 |
+
__copyright__ = "Copyright 2021-2023, DFKI"
|
17 |
+
__license__ = "CC"
|
18 |
+
__version__ = "0.0.2"
|
19 |
+
__email__ = "johannes.bayer@dfki.de"
|
20 |
+
__status__ = "Prototype"
|
21 |
+
|
22 |
+
|
23 |
+
|
24 |
+
|
25 |
+
# Edit this lookup table for relabeling purposes
|
26 |
+
MAPPING_LOOKUP = {
|
27 |
+
"integrated_cricuit": "integrated_circuit",
|
28 |
+
"zener": "diode.zener"
|
29 |
+
}
|
30 |
+
|
31 |
+
|
32 |
+
def consistency(db: list, classes: dict, recover: dict = {}) -> tuple:
|
33 |
+
"""Checks Whether Annotation Classes are in provided Classes Dict and Attempts Recovery"""
|
34 |
+
|
35 |
+
total, ok, mapped, faulty, rotation, text = 0, 0, 0, 0, 0, 0
|
36 |
+
|
37 |
+
for sample in db:
|
38 |
+
for bbox in sample["bboxes"] + sample["polygons"] + sample["points"]:
|
39 |
+
total += 1
|
40 |
+
|
41 |
+
if bbox["class"] in classes:
|
42 |
+
ok += 1
|
43 |
+
|
44 |
+
if bbox["class"] in recover:
|
45 |
+
bbox["class"] = recover[bbox["class"]]
|
46 |
+
mapped += 1
|
47 |
+
|
48 |
+
if bbox["class"] not in classes and bbox["class"] not in recover:
|
49 |
+
print(f"Can't recover faulty label in {file_name(sample)}: {bbox['class']}")
|
50 |
+
faulty += 1
|
51 |
+
|
52 |
+
if bbox["rotation"] is not None:
|
53 |
+
rotation += 1
|
54 |
+
|
55 |
+
if bbox["class"] == "text" and bbox["text"] is None:
|
56 |
+
print(f"Missing Text in {file_name(sample)} -> {bbox['xmin']}, {bbox['ymin']}")
|
57 |
+
|
58 |
+
if bbox["text"] is not None:
|
59 |
+
if bbox["text"].strip() != bbox["text"]:
|
60 |
+
print(f"Removing leading of trailing spaces from: {bbox['text']}")
|
61 |
+
bbox["text"] = bbox["text"].strip()
|
62 |
+
|
63 |
+
if bbox["class"] != "text":
|
64 |
+
print(f"Text string outside Text BB in {file_name(sample)}: {bbox['class']}: {bbox['text']}")
|
65 |
+
|
66 |
+
text += 1
|
67 |
+
|
68 |
+
return total, ok, mapped, faulty, rotation, text
|
69 |
+
|
70 |
+
|
71 |
+
def consistency_circuit(db: list, classes: dict) -> None:
|
72 |
+
"""Checks whether the Amount of Annotation per Class is Consistent Among the Samples of a Circuits"""
|
73 |
+
|
74 |
+
print("BBox Inconsistency Report:")
|
75 |
+
sample_cls_bb_count = {(sample["circuit"], sample["drawing"], sample["picture"]):
|
76 |
+
{cls: len([bbox for bbox in sample["bboxes"] if bbox["class"] == cls])
|
77 |
+
for cls in classes} for sample in db}
|
78 |
+
|
79 |
+
for circuit in set(sample["circuit"] for sample in db):
|
80 |
+
circuit_samples = [sample for sample in sample_cls_bb_count if sample[0] == circuit]
|
81 |
+
for cls in classes:
|
82 |
+
check = [sample_cls_bb_count[sample][cls] for sample in circuit_samples]
|
83 |
+
if not all(c == check[0] for c in check):
|
84 |
+
print(f" Circuit {circuit}: {cls}: {check}")
|
85 |
+
|
86 |
+
|
87 |
+
def circuit_annotations(db: list, classes: dict) -> None:
|
88 |
+
"""Plots the Annotations per Sample and Class"""
|
89 |
+
|
90 |
+
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(8, 6))
|
91 |
+
axes.plot([len(sample["bboxes"]) for sample in db], label="all")
|
92 |
+
|
93 |
+
for cls in classes:
|
94 |
+
axes.plot([len([annotation for annotation in sample["bboxes"]
|
95 |
+
if annotation["class"] == cls]) for sample in db], label=cls)
|
96 |
+
|
97 |
+
plt.minorticks_on()
|
98 |
+
axes.set_xticks(np.arange(0, len(db)+1, step=8))
|
99 |
+
axes.set_xticks(np.arange(0, len(db), step=8)+4, minor=True)
|
100 |
+
axes.grid(axis='x', linestyle='solid')
|
101 |
+
axes.grid(axis='x', linestyle='dotted', alpha=0.7, which="minor")
|
102 |
+
|
103 |
+
plt.title("Class Distribution in Samples")
|
104 |
+
plt.xlabel("Image Sample")
|
105 |
+
plt.ylabel("BB Annotation Count")
|
106 |
+
|
107 |
+
plt.yscale('log')
|
108 |
+
plt.legend(ncol=2, loc='center left', bbox_to_anchor=(1.0, 0.5))
|
109 |
+
plt.show()
|
110 |
+
|
111 |
+
|
112 |
+
def class_distribution(db: list, classes: dict) -> None:
|
113 |
+
"""Plots the Class Distribution over the Dataset"""
|
114 |
+
|
115 |
+
class_nbrs = np.arange(len(classes))
|
116 |
+
class_counts = [sum([len([bbox for bbox in sample["bboxes"] + sample["polygons"] + sample["points"]
|
117 |
+
if bbox["class"] == cls])
|
118 |
+
for sample in db]) for cls in classes]
|
119 |
+
|
120 |
+
bars = plt.bar(class_nbrs, class_counts)
|
121 |
+
plt.xticks(class_nbrs, labels=classes, rotation=90)
|
122 |
+
plt.yscale('log')
|
123 |
+
plt.title("Class Distribution")
|
124 |
+
plt.xlabel("Class")
|
125 |
+
plt.ylabel("BB Annotation Count")
|
126 |
+
|
127 |
+
for rect in bars:
|
128 |
+
height = rect.get_height()
|
129 |
+
plt.annotate('{}'.format(height),
|
130 |
+
xy=(rect.get_x() + rect.get_width() / 2, height),
|
131 |
+
xytext=(0, -3), textcoords="offset points", ha='center', va='top', rotation=90)
|
132 |
+
|
133 |
+
plt.show()
|
134 |
+
|
135 |
+
|
136 |
+
def class_sizes(db: list, classes: dict) -> None:
|
137 |
+
""""""
|
138 |
+
|
139 |
+
plt.title('BB Sizes')
|
140 |
+
plt.boxplot([[max(bbox["xmax"]-bbox["xmin"], bbox["ymax"]-bbox["ymin"])
|
141 |
+
for sample in db for bbox in sample["bboxes"] if bbox["class"] == cls]
|
142 |
+
for cls in classes])
|
143 |
+
class_nbrs = np.arange(len(classes))+1
|
144 |
+
plt.xticks(class_nbrs, labels=classes, rotation=90)
|
145 |
+
plt.show()
|
146 |
+
|
147 |
+
|
148 |
+
def image_count(drafter: int = None, segmentation: bool = False) -> int:
|
149 |
+
"""Counts the Raw Images or Segmentation Maps in the Dataset"""
|
150 |
+
|
151 |
+
return len([file_name for root, _, files in os.walk(".")
|
152 |
+
for file_name in files
|
153 |
+
if ("segmentation" if segmentation else "annotation") in root and
|
154 |
+
(not drafter or f"drafter_{drafter}{os.sep}" in root)])
|
155 |
+
|
156 |
+
|
157 |
+
def read_check_write(classes: dict, drafter: int = None, segmentation: bool = False):
|
158 |
+
"""Reads Annotations, Checks Consistency with Provided Classes
|
159 |
+
Writes Corrected Annotations Back and Returns the Annotations"""
|
160 |
+
|
161 |
+
db = read_dataset(drafter=drafter, segmentation=segmentation)
|
162 |
+
ann_total, ann_ok, ann_mapped, ann_faulty, ann_rot, ann_text = consistency(db, classes)
|
163 |
+
write_dataset(db, segmentation=segmentation)
|
164 |
+
|
165 |
+
print("")
|
166 |
+
print(" Class and File Consistency Report")
|
167 |
+
print(" -------------------------------------")
|
168 |
+
print(f"Annotation Type: {'Polygon' if segmentation else 'Bounding Box'}")
|
169 |
+
print(f"Class Label Count: {len(classes)}")
|
170 |
+
print(f"Raw Image Files: {image_count(drafter=drafter, segmentation=segmentation)}")
|
171 |
+
print(f"Processed Annotation Files: {len(db)}")
|
172 |
+
print(f"Total Annotation Count: {ann_total}")
|
173 |
+
print(f"Consistent Annotations: {ann_ok}")
|
174 |
+
print(f"Faulty Annotations (no recovery): {ann_faulty}")
|
175 |
+
print(f"Corrected Annotations by Mapping: {ann_mapped}")
|
176 |
+
print(f"Annotations with Rotation: {ann_rot}")
|
177 |
+
print(f"Annotations with Text: {ann_text}")
|
178 |
+
|
179 |
+
return db
|
180 |
+
|
181 |
+
|
182 |
+
def text_statistics(db: list, plot_unique_labels: bool = False):
|
183 |
+
"""Generates and Plots Statistics on Text Classes"""
|
184 |
+
|
185 |
+
print("")
|
186 |
+
print(" Text Statistics")
|
187 |
+
print("---------------------")
|
188 |
+
|
189 |
+
text_bbs = len([bbox for sample in db for bbox in sample["bboxes"] if bbox["class"] == "text"])
|
190 |
+
print(f"Text BB Annotations: {text_bbs}")
|
191 |
+
|
192 |
+
text_labels = [bbox["text"] for sample in db for bbox in sample["bboxes"] if bbox["text"] is not None]
|
193 |
+
print(f"Overall Text Label Count: {len(text_labels)}")
|
194 |
+
|
195 |
+
text_labels_unique = set(text_labels)
|
196 |
+
print(f"Unique Text Label Count: {len(text_labels_unique)}")
|
197 |
+
|
198 |
+
print(f"Total Character Count: {sum([len(text_label) for text_label in text_labels])}")
|
199 |
+
|
200 |
+
print("\nSet of all characters occurring in all text labels:")
|
201 |
+
char_set = set([char_set for label in text_labels_unique for char_set in label])
|
202 |
+
chars = sorted(list(char_set))
|
203 |
+
print(chars)
|
204 |
+
|
205 |
+
char_nbrs = np.arange(len(chars))
|
206 |
+
char_counts = [sum([len([None for text_char in text_label if text_char == char])
|
207 |
+
for text_label in text_labels])
|
208 |
+
for char in chars]
|
209 |
+
plt.bar(char_nbrs, char_counts)
|
210 |
+
plt.xticks(char_nbrs, chars)
|
211 |
+
plt.title("Character Frequencies")
|
212 |
+
plt.xlabel("Character")
|
213 |
+
plt.ylabel("Overall Count")
|
214 |
+
plt.show()
|
215 |
+
print("\nCharacter Frequencies:")
|
216 |
+
print({char: 1/char_count for char, char_count in zip(chars, char_counts)})
|
217 |
+
|
218 |
+
max_text_len = max([len(text_label) for text_label in text_labels])
|
219 |
+
text_lengths = np.arange(max_text_len)+1
|
220 |
+
text_count_by_length = [len([None for text_label in text_labels if len(text_label) == text_length])
|
221 |
+
for text_length in text_lengths]
|
222 |
+
plt.bar(text_lengths, text_count_by_length)
|
223 |
+
plt.xticks(text_lengths, rotation=90)
|
224 |
+
plt.title("Text Length Distribution")
|
225 |
+
plt.xlabel("Character Count")
|
226 |
+
plt.ylabel("Annotation Count")
|
227 |
+
plt.show()
|
228 |
+
|
229 |
+
text_instances = text_labels_unique if plot_unique_labels else text_labels
|
230 |
+
text_classes_names = []
|
231 |
+
text_classes_instances = []
|
232 |
+
|
233 |
+
for text_class in load_properties():
|
234 |
+
text_classes_names.append(text_class["name"])
|
235 |
+
text_classes_instances.append([text_instance for text_instance in text_instances
|
236 |
+
if re.match(text_class["regex"], text_instance)])
|
237 |
+
|
238 |
+
text_classified = [text for text_class_instances in text_classes_instances for text in text_class_instances]
|
239 |
+
text_classes_names.append("Unclassified")
|
240 |
+
text_classes_instances.append([text_instance for text_instance in text_instances
|
241 |
+
if text_instance not in text_classified])
|
242 |
+
|
243 |
+
for text_class_name, text_class_instances in zip(text_classes_names, text_classes_instances):
|
244 |
+
print(f"\n{text_class_name}:")
|
245 |
+
print(sorted(list(set(text_class_instances))))
|
246 |
+
|
247 |
+
plt.bar(text_classes_names, [len(text_class_instances) for text_class_instances in text_classes_instances])
|
248 |
+
plt.title('Count of matching pattern')
|
249 |
+
plt.xlabel('Regex')
|
250 |
+
plt.ylabel('No. of text matched')
|
251 |
+
plt.xticks(rotation=90)
|
252 |
+
plt.tight_layout()
|
253 |
+
plt.show()
|
254 |
+
|
255 |
+
|
256 |
+
|
257 |
+
if __name__ == "__main__":
|
258 |
+
drafter_selected = int(sys.argv[1]) if len(sys.argv) == 2 else None
|
259 |
+
classes = load_classes()
|
260 |
+
|
261 |
+
#db_poly = read_check_write(classes, drafter_selected, segmentation=True)
|
262 |
+
db_bb = read_check_write(classes, drafter_selected)
|
263 |
+
|
264 |
+
class_sizes(db_bb, classes)
|
265 |
+
circuit_annotations(db_bb, classes)
|
266 |
+
class_distribution(db_bb, classes)
|
267 |
+
#class_distribution(db_poly, classes)
|
268 |
+
consistency_circuit(db_bb, classes)
|
269 |
+
text_statistics(db_bb)
|
loader.py
ADDED
@@ -0,0 +1,227 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""loader.py: Load and Store Functions for the Dataset"""
|
2 |
+
|
3 |
+
# System Imports
|
4 |
+
import os, sys
|
5 |
+
from os.path import join, realpath
|
6 |
+
import json
|
7 |
+
import xml.etree.ElementTree as ET
|
8 |
+
from lxml import etree
|
9 |
+
|
10 |
+
# Third Party Imports
|
11 |
+
import cv2
|
12 |
+
|
13 |
+
__author__ = "Johannes Bayer"
|
14 |
+
__copyright__ = "Copyright 2022-2023, DFKI"
|
15 |
+
__license__ = "CC"
|
16 |
+
__version__ = "0.0.1"
|
17 |
+
__email__ = "johannes.bayer@dfki.de"
|
18 |
+
__status__ = "Prototype"
|
19 |
+
|
20 |
+
|
21 |
+
|
22 |
+
def load_classes() -> dict:
|
23 |
+
"""Returns the List of Classes as Encoding Map"""
|
24 |
+
|
25 |
+
with open("classes.json") as classes_file:
|
26 |
+
return json.loads(classes_file.read())
|
27 |
+
|
28 |
+
|
29 |
+
def load_classes_ports() -> dict:
|
30 |
+
"""Reads Symbol Library from File"""
|
31 |
+
|
32 |
+
with open("classes_ports.json") as json_file:
|
33 |
+
return json.loads(json_file.read())
|
34 |
+
|
35 |
+
|
36 |
+
def load_properties() -> dict:
|
37 |
+
"""Loads the Properties RegEx File"""
|
38 |
+
|
39 |
+
with open("properties.json") as json_file:
|
40 |
+
return json.loads(json_file.read())
|
41 |
+
|
42 |
+
|
43 |
+
def _sample_info_from_path(path: str) -> tuple:
|
44 |
+
"""Extracts Sample Metadata from File Path"""
|
45 |
+
|
46 |
+
drafter, _, file_name = os.path.normpath(path).split(os.sep)[-3:]
|
47 |
+
circuit, drawing, picture = file_name.split("_")
|
48 |
+
picture, suffix = picture.split(".")
|
49 |
+
return drafter.split("_")[1], int(circuit[1:]), int(drawing[1:]), int(picture[1:]), suffix
|
50 |
+
|
51 |
+
|
52 |
+
def file_name(sample: dict) -> str:
|
53 |
+
"""return the Raw Image File Name of a Sample"""
|
54 |
+
|
55 |
+
return f"C{sample['circuit']}_D{sample['drawing']}_P{sample['picture']}.{sample['format']}"
|
56 |
+
|
57 |
+
|
58 |
+
def read_pascal_voc(path: str) -> dict:
|
59 |
+
"""Reads the Content of a Pascal VOC Annotation File"""
|
60 |
+
|
61 |
+
root = ET.parse(path).getroot()
|
62 |
+
circuit, drawing, picture = root.find("filename").text.split("_")
|
63 |
+
drafter = int(os.path.normpath(path).split(os.sep)[-3].split("_")[1])
|
64 |
+
|
65 |
+
return {"drafter": drafter,
|
66 |
+
"circuit": int(circuit[1:]),
|
67 |
+
"drawing": int(drawing[1:]),
|
68 |
+
"picture": int(picture.split(".")[0][1:]),
|
69 |
+
"format": picture.split(".")[-1],
|
70 |
+
"width": int(root.find("size/width").text),
|
71 |
+
"height": int(int(root.find("size/height").text)),
|
72 |
+
"bboxes": [{"class": annotation.find("name").text,
|
73 |
+
"xmin": int(annotation.find("bndbox/xmin").text),
|
74 |
+
"xmax": int(annotation.find("bndbox/xmax").text),
|
75 |
+
"ymin": int(annotation.find("bndbox/ymin").text),
|
76 |
+
"ymax": int(annotation.find("bndbox/ymax").text),
|
77 |
+
"rotation": int(annotation.find("bndbox/rotation").text) if annotation.find("bndbox/rotation") is not None else None,
|
78 |
+
"text": annotation.find("text").text if annotation.find("text") is not None else None}
|
79 |
+
for annotation in root.findall('object')],
|
80 |
+
"polygons": [], "points": []}
|
81 |
+
|
82 |
+
|
83 |
+
def write_pascal_voc(sample: dict) -> None:
|
84 |
+
"""Writes a Sample's Content to an Pascal VOC Annotation File"""
|
85 |
+
|
86 |
+
root = etree.Element("annotation")
|
87 |
+
etree.SubElement(root, "folder").text = "images"
|
88 |
+
etree.SubElement(root, "filename").text = file_name(sample)
|
89 |
+
etree.SubElement(root, "path").text = join(".", f"drafter_{sample['drafter']}", "images", file_name(sample))
|
90 |
+
src = etree.SubElement(root, "source")
|
91 |
+
etree.SubElement(src, "database").text = "CGHD"
|
92 |
+
size = etree.SubElement(root, "size")
|
93 |
+
etree.SubElement(size, "width").text = str(sample["width"])
|
94 |
+
etree.SubElement(size, "height").text = str(sample["height"])
|
95 |
+
etree.SubElement(size, "depth").text = "3"
|
96 |
+
etree.SubElement(root, "segmented").text = "0"
|
97 |
+
|
98 |
+
for bbox in sample["bboxes"]:
|
99 |
+
xml_obj = etree.SubElement(root, "object")
|
100 |
+
etree.SubElement(xml_obj, "name").text = bbox["class"]
|
101 |
+
etree.SubElement(xml_obj, "pose").text = "Unspecified"
|
102 |
+
etree.SubElement(xml_obj, "truncated").text = "0"
|
103 |
+
etree.SubElement(xml_obj, "difficult").text = "0"
|
104 |
+
xml_bbox = etree.SubElement(xml_obj, "bndbox")
|
105 |
+
|
106 |
+
for elem in ["xmin", "ymin", "xmax", "ymax"]:
|
107 |
+
etree.SubElement(xml_bbox, elem).text = str(bbox[elem])
|
108 |
+
|
109 |
+
if bbox["rotation"] is not None:
|
110 |
+
etree.SubElement(xml_bbox, "rotation").text = str(bbox["rotation"])
|
111 |
+
|
112 |
+
if bbox["text"]:
|
113 |
+
etree.SubElement(xml_obj, "text").text = bbox["text"]
|
114 |
+
|
115 |
+
etree.indent(root, space="\t")
|
116 |
+
etree.ElementTree(root).write(join(".", f"drafter_{sample['drafter']}", "annotations",
|
117 |
+
f"C{sample['circuit']}_D{sample['drawing']}_P{sample['picture']}.xml"),
|
118 |
+
pretty_print=True)
|
119 |
+
|
120 |
+
|
121 |
+
def read_labelme(path: str) -> dict:
|
122 |
+
"""Reads and Returns Geometric Objects from a LabelME JSON File"""
|
123 |
+
|
124 |
+
with open(path) as json_file:
|
125 |
+
json_data = json.load(json_file)
|
126 |
+
|
127 |
+
drafter, circuit, drawing, picture, _ = _sample_info_from_path(path)
|
128 |
+
suffix = json_data['imagePath'].split(".")[-1]
|
129 |
+
|
130 |
+
return {'img_path': json_data['imagePath'], 'drafter': drafter, 'circuit': circuit,
|
131 |
+
'drawing': drawing, 'picture': picture, 'format': suffix,
|
132 |
+
'height': json_data['imageHeight'], 'width': json_data['imageWidth'], 'bboxes': [],
|
133 |
+
'polygons': [{'class': shape['label'], 'points': shape['points'],
|
134 |
+
'rotation': shape.get('rotation', None),
|
135 |
+
'text': shape.get('text', None),
|
136 |
+
'group': shape.get('group_id', None)}
|
137 |
+
for shape in json_data['shapes']
|
138 |
+
if shape['shape_type'] == "polygon"],
|
139 |
+
'points': [{'class': shape['label'], 'points': shape['points'][0],
|
140 |
+
'group': shape['group_id'] if 'group_id' in shape else None}
|
141 |
+
for shape in json_data['shapes']
|
142 |
+
if shape['shape_type'] == "point"]}
|
143 |
+
|
144 |
+
|
145 |
+
def write_labelme(geo_data: dict, path: str = None) -> None:
|
146 |
+
"""Writes Geometric Objects to a LabelMe JSON File"""
|
147 |
+
|
148 |
+
if not path:
|
149 |
+
path = join(f"drafter_{geo_data['drafter']}", "instances",
|
150 |
+
f"C{geo_data['circuit']}_D{geo_data['drawing']}_P{geo_data['picture']}.json")
|
151 |
+
|
152 |
+
with open(path, 'w') as json_file:
|
153 |
+
json.dump({'version': '3.16.7', 'flags': {}, 'lineColor': [0, 255, 0, 128], 'fillColor': [255, 0, 0, 128],
|
154 |
+
'imagePath': geo_data['img_path'], 'imageData': None,
|
155 |
+
'imageHeight': geo_data['height'], 'imageWidth': geo_data['width'],
|
156 |
+
'shapes': [{'label': polygon['class'], 'line_color': None, 'fill_color': None,
|
157 |
+
'points': polygon['points'],
|
158 |
+
**({'group_id': polygon['group']} if polygon['group'] is not None else {}),
|
159 |
+
**({'rotation': polygon['rotation']} if polygon.get('rotation', None) else {}),
|
160 |
+
**({'text': polygon['text']} if polygon.get('text', None) else {}),
|
161 |
+
'shape_type': 'polygon', 'flags': {}}
|
162 |
+
for polygon in geo_data['polygons']] +
|
163 |
+
[{'label': point['class'], 'points': [[point['points'][0], point['points'][1]]],
|
164 |
+
'group_id': point['group'], 'shape_type': 'point', 'flags': {}}
|
165 |
+
for point in geo_data['points']]},
|
166 |
+
json_file, indent=2)
|
167 |
+
|
168 |
+
|
169 |
+
def read_dataset(drafter: int = None, circuit: int = None, segmentation=False, folder: str = None) -> list:
|
170 |
+
"""Reads all BB Annotation Files from Folder Structure
|
171 |
+
This Method can be invoked from Anywhere, can be restricted to a specified drafter
|
172 |
+
and can be use for both BB and Polygon Annotations. Alternative annotation sub-folder
|
173 |
+
can be specified to read processed ground truth."""
|
174 |
+
|
175 |
+
db_root = os.sep.join(realpath(__file__).split(os.sep)[:-1])
|
176 |
+
|
177 |
+
return sorted([(read_labelme if segmentation else read_pascal_voc)(join(root, file_name))
|
178 |
+
for root, _, files in os.walk(db_root)
|
179 |
+
for file_name in files
|
180 |
+
if (folder if folder else ("instances" if segmentation else "annotations")) in root and
|
181 |
+
(not circuit or f"C{circuit}_" in file_name) and
|
182 |
+
(not drafter or f"drafter_{drafter}{os.sep}" in root)],
|
183 |
+
key=lambda sample: sample["circuit"]*100+sample["drawing"]*10+sample["picture"])
|
184 |
+
|
185 |
+
|
186 |
+
def write_dataset(db: list, segmentation=False) -> None:
|
187 |
+
"""Writes a Dataset"""
|
188 |
+
|
189 |
+
for sample in db:
|
190 |
+
(write_labelme if segmentation else write_pascal_voc)(sample)
|
191 |
+
|
192 |
+
|
193 |
+
def read_images(**kwargs) -> list:
|
194 |
+
"""Loads Images and BB Annotations and returns them as as List of Pairs"""
|
195 |
+
|
196 |
+
db_root = os.sep.join(realpath(__file__).split(os.sep)[:-1])
|
197 |
+
|
198 |
+
return [(cv2.imread(join(db_root, f"drafter_{sample['drafter']}", "images", file_name(sample))), sample)
|
199 |
+
for sample in read_dataset(**kwargs)]
|
200 |
+
|
201 |
+
|
202 |
+
def read_snippets(**kwargs):
|
203 |
+
"""Loads Image Snippets and BBoxes and returns them as List of Pairs"""
|
204 |
+
|
205 |
+
snippets = []
|
206 |
+
|
207 |
+
for img, annotations in read_images(**kwargs):
|
208 |
+
for bbox in annotations['bboxes']:
|
209 |
+
snippets += [(img[bbox['ymin']:bbox['ymax'], bbox['xmin']:bbox['xmax']], bbox)]
|
210 |
+
|
211 |
+
return snippets
|
212 |
+
|
213 |
+
|
214 |
+
if __name__ == "__main__":
|
215 |
+
"""Sample Loader Usage, Dumps All Text Snippets of (Selectable or All) Drafter to Test Folder"""
|
216 |
+
|
217 |
+
os.mkdir("test")
|
218 |
+
args = {'drafter': int(sys.argv[1])} if len(sys.argv) == 2 else {}
|
219 |
+
|
220 |
+
for nbr, (snippet, bbox) in enumerate(read_snippets(**args)):
|
221 |
+
if bbox['class'] == "text" and bbox.get("text", ""):
|
222 |
+
if bbox['rotation'] == 90:
|
223 |
+
snippet = cv2.rotate(snippet, cv2.ROTATE_90_CLOCKWISE)
|
224 |
+
if bbox['rotation'] == 270:
|
225 |
+
snippet = cv2.rotate(snippet, cv2.ROTATE_90_COUNTERCLOCKWISE)
|
226 |
+
|
227 |
+
cv2.imwrite(join("test", f"{bbox['text']}.{nbr}.png"), snippet)
|
properties.json
ADDED
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[
|
2 |
+
{
|
3 |
+
"name": "Resistor Name",
|
4 |
+
"regex": "^R[0-9]+[Ω]$"
|
5 |
+
},
|
6 |
+
{
|
7 |
+
"name": "Resistance",
|
8 |
+
"regex": "[0-9]*([,.][0-9]+)?(\\s*Ω|\\s*kΩ|\\s*KΩ|ohms|\\s*M|K|R|k|\\s*meg|Ohms|(\\s*ohm|\\s*Ohm|m|M))+?[0-9]*"
|
9 |
+
},
|
10 |
+
{
|
11 |
+
"name": "Capacitor Name",
|
12 |
+
"regex": "^C[0-9]+(F|µF)$"
|
13 |
+
},
|
14 |
+
{
|
15 |
+
"name": "Capacity",
|
16 |
+
"regex":"[0-9]*([,.][0-9]+)?[KunµpPfFm][a-zA-Z]?[0-9]*"
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"name": "Inductivity Name",
|
20 |
+
"regex": "^(nH|µH|mH|mh)$"
|
21 |
+
},
|
22 |
+
{
|
23 |
+
"name": "Inductivity",
|
24 |
+
"regex": "^[0-9]+(?:[,.][0-9]+)?(nH|µH|mH|mh)$"
|
25 |
+
},
|
26 |
+
{
|
27 |
+
"name": "Frequency Name",
|
28 |
+
"regex": "(?:m|MHz|k|K|G|hz|KHz|Hz)$"
|
29 |
+
},
|
30 |
+
{
|
31 |
+
"name": "Frequency",
|
32 |
+
"regex": "[0-9]+(?:[,.][0-9]+)?(?:m|MHz|k|K|G|hz|Hz|HZ)$"
|
33 |
+
},
|
34 |
+
{
|
35 |
+
"name": "Wattage Name",
|
36 |
+
"regex": "^(?:KW|W|Watt|Watts)$"
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"name": "Wattage",
|
40 |
+
"regex": "^[0-9]+(?:[\\/,.][0-9]+)?(?:w|KW|\\s*W|watt|\\s*Watt|Watts|\\s*WATTS)$"
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"name": "Current Name",
|
44 |
+
"regex": "(A|mA|MA|UA)$"
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"name": "Current",
|
48 |
+
"regex": "^[0-9]+(?:[,.][0-9]+)?(A|µA|mA|VA|MA)$"
|
49 |
+
},
|
50 |
+
{
|
51 |
+
"name": "Voltage Name",
|
52 |
+
"regex": "(v|V|VCD|V\\s*AC|AC|DC|VCC|VDD)$"
|
53 |
+
},
|
54 |
+
{
|
55 |
+
"name": "Voltage",
|
56 |
+
"regex": "^[-|+|...|0-9]+(?:[,.][0-9]+)?(?:V|v|\\s*V|\\s*volt|uV|uv|VOLT|Volt|kV|Volts|nV|nv|KV|VDC|V\\s*AC|[\\dVv]*[,\\dA-Z]*[A-Z\\s*]*-[\\s*\\d]*[,\\dA-Z]*)$"
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"name": "AC Name",
|
60 |
+
"regex": "^(?:V|VDC|VAC|ac|AC)$"
|
61 |
+
},
|
62 |
+
{
|
63 |
+
"name": "AC",
|
64 |
+
"regex": "^[0-9]+(?:[,.][0-9]+)?(?:V|AC|VDC|\\s*V\\s*AC)$"
|
65 |
+
},
|
66 |
+
{
|
67 |
+
"name": "DC Name",
|
68 |
+
"regex": "^(?:V|VDC|VAC|DC|Dc)$"
|
69 |
+
},
|
70 |
+
{
|
71 |
+
"name": "DC",
|
72 |
+
"regex": "^[-|+|0-9]+(?:[,.][0-9]+)?(?:V|\\s*VDC|vdc|VAC)$"
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"name": "Component Name",
|
76 |
+
"regex": "[0-9\\(]*[\\s*a-zA-Z]+[\\)]*"
|
77 |
+
},
|
78 |
+
{
|
79 |
+
"name": "Name",
|
80 |
+
"class": "resistor",
|
81 |
+
"regex": "^R[0-9]+$"
|
82 |
+
},
|
83 |
+
{
|
84 |
+
"name": "Name",
|
85 |
+
"regex": "^VR[0-9]+$"
|
86 |
+
},
|
87 |
+
{
|
88 |
+
"name": "Name",
|
89 |
+
"regex": "^C[0-9]+$"
|
90 |
+
},
|
91 |
+
{
|
92 |
+
"name": "Name",
|
93 |
+
"regex": "^Q[0-9]+$"
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"name": "Name",
|
97 |
+
"regex": "^D[0-9]+$"
|
98 |
+
},
|
99 |
+
{
|
100 |
+
"name": "Name",
|
101 |
+
"regex": "^L[0-9]+$"
|
102 |
+
},
|
103 |
+
{
|
104 |
+
"name": "Name",
|
105 |
+
"regex": "^(SW|Sw)[0-9]+$"
|
106 |
+
},
|
107 |
+
{
|
108 |
+
"name": "voltage-type",
|
109 |
+
"regex": "(VCC|VDD)$"
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"name": "Device",
|
113 |
+
"class": "transistor.bjt",
|
114 |
+
"regex": "^[0-9]?[A-Z]+[0-9]+$"
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"name": "Taper",
|
118 |
+
"class": "resistor.adjustable",
|
119 |
+
"regex": "(log|lin)$"
|
120 |
+
},
|
121 |
+
{
|
122 |
+
"name": "Pin",
|
123 |
+
"regex": "^[0-9]{1,2}$"
|
124 |
+
}
|
125 |
+
|
126 |
+
]
|
segmentation.py
ADDED
@@ -0,0 +1,348 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""segmentation.py: Toolkit for Generation of Instance Segmentation Material"""
|
2 |
+
|
3 |
+
# System Imports
|
4 |
+
import sys
|
5 |
+
import os
|
6 |
+
from os.path import join, exists
|
7 |
+
import json
|
8 |
+
from math import dist
|
9 |
+
|
10 |
+
# Project Imports
|
11 |
+
from loader import read_pascal_voc, read_labelme, write_labelme, load_classes_ports
|
12 |
+
from utils import transform, associated_keypoints
|
13 |
+
|
14 |
+
# Third-Party Imports
|
15 |
+
import cv2
|
16 |
+
import numpy as np
|
17 |
+
|
18 |
+
__author__ = "Amit Kumar Roy"
|
19 |
+
__copyright__ = "Copyright 2022-2023, DFKI"
|
20 |
+
__credits__ = ["Amit Kumar Roy", "Johannes Bayer"]
|
21 |
+
__license__ = "CC"
|
22 |
+
__version__ = "0.0.1"
|
23 |
+
__email__ = "johannes.bayer@dfki.de"
|
24 |
+
__status__ = "Prototype"
|
25 |
+
|
26 |
+
|
27 |
+
|
28 |
+
def binary_to_multi_seg_map(drafter: str, sample: str, suffix: str, source_folder: str, target_folder: str,
|
29 |
+
color_map: dict) -> None:
|
30 |
+
"""Creates a Multi Class Segmentation File from a Binary Segmentation File and an Coarse Instance Polygon File"""
|
31 |
+
|
32 |
+
bin_seg_map = cv2.imread(join(drafter, "segmentation", f"{sample}.{suffix}"))
|
33 |
+
bin_seg_map[np.all(bin_seg_map <= (10, 10, 10), axis=-1)] = (0, 0, 0)
|
34 |
+
shape_mask = np.ones(bin_seg_map.shape, dtype=np.uint8)*255
|
35 |
+
geo_data = read_labelme(join(drafter, source_folder, f"{sample}.json"))
|
36 |
+
|
37 |
+
for shape in sorted(geo_data["polygons"],
|
38 |
+
key=lambda shape: -(max([p[0] for p in shape['points']])-min([p[0] for p in shape['points']])) *
|
39 |
+
(max([p[1] for p in shape['points']])-min([p[1] for p in shape['points']]))):
|
40 |
+
cv2.fillPoly(shape_mask,
|
41 |
+
pts=[np.array(shape["points"], dtype=np.int32)],
|
42 |
+
color=color_map[shape["class"]])
|
43 |
+
|
44 |
+
multi_seg_map = cv2.bitwise_and(cv2.bitwise_not(bin_seg_map), shape_mask)
|
45 |
+
|
46 |
+
for point in geo_data['points']:
|
47 |
+
if point['class'] == "connector":
|
48 |
+
x, y = point['points']
|
49 |
+
cv2.line(multi_seg_map, (int(x-20), int(y-20)), (int(x+20), int(y+20)), (255, 255, 255), 2)
|
50 |
+
cv2.line(multi_seg_map, (int(x-20), int(y+20)), (int(x+20), int(y-20)), (255, 255, 255), 2)
|
51 |
+
|
52 |
+
cv2.imwrite(join(drafter, target_folder, f"{sample}.png"), multi_seg_map)
|
53 |
+
|
54 |
+
|
55 |
+
def generate_keypoints(drafter: str, sample: str, suffix: str, source_folder: str, target_folder: str,
|
56 |
+
keep_polygons: bool = True, margin=3) -> None:
|
57 |
+
"""Generates Connector Keypoints, optionally discarding existing polygons"""
|
58 |
+
|
59 |
+
bin_seg_map = cv2.imread(join(drafter, "segmentation", f"{sample}.{suffix}"), cv2.IMREAD_GRAYSCALE)
|
60 |
+
_, bin_seg_map = cv2.threshold(bin_seg_map, 127, 255, cv2.THRESH_BINARY_INV)
|
61 |
+
geo_data = read_labelme(join(drafter, source_folder, f"{sample}.json"))
|
62 |
+
detector_params = cv2.SimpleBlobDetector_Params()
|
63 |
+
detector_params.minArea = 3
|
64 |
+
detector_params.minDistBetweenBlobs = 3
|
65 |
+
detector_params.minThreshold = 10
|
66 |
+
detector_params.maxThreshold = 255
|
67 |
+
detector_params.blobColor = 255
|
68 |
+
detector_params.filterByArea = False
|
69 |
+
detector_params.filterByCircularity = False
|
70 |
+
detector_params.filterByConvexity = False
|
71 |
+
detector_params.filterByInertia = False
|
72 |
+
detector = cv2.SimpleBlobDetector_create(detector_params)
|
73 |
+
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (5, 5))
|
74 |
+
|
75 |
+
for nbr, shape in enumerate(geo_data["polygons"]):
|
76 |
+
if shape['class'] == "text":
|
77 |
+
cv2.fillPoly(bin_seg_map, pts=[np.array(shape["points"], dtype=np.int32)], color=[0, 0, 0])
|
78 |
+
|
79 |
+
for nbr, shape in enumerate(geo_data["polygons"]):
|
80 |
+
shape['group'] = nbr
|
81 |
+
|
82 |
+
if shape['class'] != "text" and shape['class'] != "wire":
|
83 |
+
x_min = max(int(min([p[0] for p in shape['points']]))-margin, 0)
|
84 |
+
x_max = min(int(max([p[0] for p in shape['points']]))+margin, bin_seg_map.shape[1])
|
85 |
+
y_min = max(int(min([p[1] for p in shape['points']]))-margin, 0)
|
86 |
+
y_max = min(int(max([p[1] for p in shape['points']]))+margin, bin_seg_map.shape[0])
|
87 |
+
cropout = bin_seg_map[y_min:y_max, x_min:x_max]
|
88 |
+
shape_mask = np.zeros((y_max-y_min, x_max-x_min), dtype=np.uint8)
|
89 |
+
cv2.polylines(shape_mask, pts=[np.array(shape["points"]-np.array([[x_min, y_min]]), dtype=np.int32)],
|
90 |
+
isClosed=True, color=[255, 255, 255], thickness=2)
|
91 |
+
intersect_map = cv2.bitwise_and(cropout, shape_mask)
|
92 |
+
keypoints = detector.detect(intersect_map)
|
93 |
+
geo_data['points'] += [{'class': "connector", 'points': (keypoint.pt[0]+x_min, keypoint.pt[1]+y_min),
|
94 |
+
'group': nbr} for keypoint in keypoints]
|
95 |
+
|
96 |
+
for shape in geo_data["polygons"]:
|
97 |
+
if shape['class'] == "wire":
|
98 |
+
wire_connectors = [point["points"] for point in geo_data['points']
|
99 |
+
if cv2.pointPolygonTest(np.array(shape["points"]), np.array(point['points']), True) > -4]
|
100 |
+
|
101 |
+
if len(wire_connectors) != 2:
|
102 |
+
print(f" Anomaly Wire Connector Count: {len(wire_connectors)} -> {shape['points'][0]}")
|
103 |
+
|
104 |
+
geo_data['points'] += [{'class': "connector", 'points': (point[0], point[1]),
|
105 |
+
'group': shape['group']} for point in wire_connectors]
|
106 |
+
|
107 |
+
geo_data['polygons'] = geo_data['polygons'] if keep_polygons else []
|
108 |
+
write_labelme(geo_data, join(drafter, target_folder, f"{sample}.json"))
|
109 |
+
|
110 |
+
|
111 |
+
def generate_wires(drafter: str, sample: str, suffix: str, source_folder: str, target_folder: str) -> None:
|
112 |
+
"""Generates wire polygons"""
|
113 |
+
|
114 |
+
geo_data = read_labelme(join(drafter, source_folder, f"{sample}.json"))
|
115 |
+
bin_seg_map = cv2.imread(join(drafter, "segmentation", f"{sample}.{suffix}"), cv2.IMREAD_GRAYSCALE)
|
116 |
+
_, bin_seg_map = cv2.threshold(bin_seg_map, 127, 255, cv2.THRESH_BINARY_INV)
|
117 |
+
|
118 |
+
for nbr, shape in enumerate(geo_data["polygons"]):
|
119 |
+
cv2.fillPoly(bin_seg_map, pts=[np.array(shape["points"], dtype=np.int32)], color=[0, 0, 0])
|
120 |
+
|
121 |
+
cntrs = cv2.findContours(bin_seg_map, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
122 |
+
contours = cntrs[0] if len(cntrs) == 2 else cntrs[1]
|
123 |
+
|
124 |
+
for contour in contours:
|
125 |
+
if len(contour) > 3:
|
126 |
+
geo_data['polygons'] += [{'class': "wire", 'points': np.squeeze(contour).tolist(), 'group': None}]
|
127 |
+
|
128 |
+
write_labelme(geo_data, join(drafter, target_folder, f"{sample}.json"))
|
129 |
+
|
130 |
+
|
131 |
+
def pascalvoc_to_labelme(drafter: str, sample: str, suffix: str, source_folder: str, target_folder: str,
|
132 |
+
keep_existing_json: bool = True) -> None:
|
133 |
+
"""Converts a Bounding Box (Rectangle) Annotation File to an Instance Mask (Polygon) File
|
134 |
+
|
135 |
+
Has no Effect in its default Configuration on a
|
136 |
+
consistently populated Dataset."""
|
137 |
+
|
138 |
+
if keep_existing_json and exists(join(drafter, target_folder, f"{sample}.json")):
|
139 |
+
print(" -> SKIP (already exists)")
|
140 |
+
return None
|
141 |
+
|
142 |
+
xml_data = read_pascal_voc(join(drafter, source_folder, f"{sample}.xml"))
|
143 |
+
xml_data['points'] = [] # Adapt to Segmentation Scenario
|
144 |
+
xml_data['img_path'] = join("..", "segmentation", f"{sample}.{suffix}") # Alter source image
|
145 |
+
xml_data['polygons'] = [{'class': bbox['class'], 'group': None, # Keep Class, Prune Rotation and Texts
|
146 |
+
'points': [[bbox['xmin'], bbox['ymin']], # Turn Rectangles into Polygons
|
147 |
+
[bbox['xmax'], bbox['ymin']],
|
148 |
+
[bbox['xmax'], bbox['ymax']],
|
149 |
+
[bbox['xmin'], bbox['ymax']]]}
|
150 |
+
for bbox in xml_data['bboxes']]
|
151 |
+
write_labelme(xml_data, join(drafter, target_folder, f"{sample}.json"))
|
152 |
+
|
153 |
+
|
154 |
+
def labelme_raw_image(drafter: str, sample: str, suffix: str, source_folder: str, target_folder: str) -> None:
|
155 |
+
"""Resets the Source Images of a LabelME file to the Rwa Image"""
|
156 |
+
|
157 |
+
geo_data = read_labelme(join(drafter, source_folder, f"{sample}.json"))
|
158 |
+
geo_data['img_path'] = join("..", "images", f"{sample}.{suffix}")
|
159 |
+
write_labelme(geo_data, join(drafter, target_folder, f"{sample}.json"))
|
160 |
+
|
161 |
+
|
162 |
+
def convex_hull(thresh_img: np.ndarray, polygon: np.ndarray) -> list:
|
163 |
+
"""Calculates the Convex Hull of a Binary Image, falling back to Polygon"""
|
164 |
+
|
165 |
+
cntrs = cv2.findContours(thresh_img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
166 |
+
cntrs = cntrs[0] if len(cntrs) == 2 else cntrs[1]
|
167 |
+
good_contours = [contour for contour in cntrs if cv2.contourArea(contour) > 10]
|
168 |
+
|
169 |
+
if good_contours:
|
170 |
+
contours_combined = np.vstack(good_contours)
|
171 |
+
hull = cv2.convexHull(contours_combined)
|
172 |
+
return np.squeeze(hull).tolist()
|
173 |
+
|
174 |
+
return polygon.tolist()
|
175 |
+
|
176 |
+
|
177 |
+
def refine_polygons(drafter: str, sample: str, suffix: str, source_folder: str, target_folder: str,
|
178 |
+
classes_discontinuous: list) -> None:
|
179 |
+
"""Main Function for Polygon Refinement"""
|
180 |
+
|
181 |
+
geo_data = read_labelme(join(drafter, source_folder, f"{sample}.json"))
|
182 |
+
img = cv2.imread(join(drafter, "segmentation", f"{sample}.{suffix}"))
|
183 |
+
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
184 |
+
(_, img) = cv2.threshold(gray, 1, 255, cv2.THRESH_BINARY_INV | cv2.THRESH_OTSU)
|
185 |
+
background_mask = np.zeros(img.shape, dtype=np.uint8)
|
186 |
+
|
187 |
+
for shape in geo_data['polygons']:
|
188 |
+
if shape["class"] != "wire":
|
189 |
+
polygon = np.array(shape["points"], dtype=np.int32)
|
190 |
+
mask_single_components = cv2.fillPoly(background_mask, pts=[polygon], color=(255, 255, 255))
|
191 |
+
bitwise_and_result = cv2.bitwise_and(img, mask_single_components)
|
192 |
+
background_mask = np.zeros(img.shape, dtype=np.uint8)
|
193 |
+
|
194 |
+
if shape["class"] in classes_discontinuous:
|
195 |
+
hull_list = convex_hull(bitwise_and_result, polygon)
|
196 |
+
shape['points'] = hull_list
|
197 |
+
|
198 |
+
else:
|
199 |
+
contours, _ = cv2.findContours(bitwise_and_result, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
200 |
+
|
201 |
+
if contours:
|
202 |
+
contour = max(contours, key=len)
|
203 |
+
contour = np.squeeze(contour)
|
204 |
+
contour_list = contour.tolist()
|
205 |
+
shape['points'] = contour_list
|
206 |
+
|
207 |
+
else:
|
208 |
+
print(f" !!! WARNING: Empty Polygon: {shape['group']} !!!")
|
209 |
+
|
210 |
+
write_labelme(geo_data, join(drafter, target_folder, f"{sample}.json"))
|
211 |
+
|
212 |
+
|
213 |
+
def bounding_box(points):
|
214 |
+
xmin = min(point[0] for point in points)
|
215 |
+
ymin = min(point[1] for point in points)
|
216 |
+
xmax = max(point[0] for point in points)
|
217 |
+
ymax = max(point[1] for point in points)
|
218 |
+
return [xmin, ymin, xmax, ymax]
|
219 |
+
|
220 |
+
|
221 |
+
def overlap(bbox1, bbox2):
|
222 |
+
|
223 |
+
if bbox1["xmin"] > bbox2[2] or bbox1["xmax"] < bbox2[0]:
|
224 |
+
return False
|
225 |
+
|
226 |
+
if bbox1["ymin"] > bbox2[3] or bbox1["ymax"] < bbox2[1]:
|
227 |
+
return False
|
228 |
+
|
229 |
+
return True
|
230 |
+
|
231 |
+
|
232 |
+
def find_closest_points(list1, list2):
|
233 |
+
reordered_list2 = []
|
234 |
+
for x1, y1 in list1:
|
235 |
+
min_distance = float("inf")
|
236 |
+
min_point = None
|
237 |
+
for x2, y2 in list2:
|
238 |
+
distance = math.sqrt((x2 - x1)**2 + (y2 - y1)**2)
|
239 |
+
if distance < min_distance:
|
240 |
+
min_distance = distance
|
241 |
+
min_point = (x2, y2)
|
242 |
+
reordered_list2.append(min_point)
|
243 |
+
return [list(row) for row in reordered_list2]
|
244 |
+
|
245 |
+
|
246 |
+
def connector_type_assignment(drafter: str, sample: str, suffix: str, source_folder: str, target_folder: str) -> None:
|
247 |
+
"""Connector Point to Port Type Assignment by Geometric Transformation Matching"""
|
248 |
+
|
249 |
+
bboxes = read_pascal_voc(join(drafter, "annotations", f"{sample}.xml"))
|
250 |
+
instances = read_labelme(join(drafter, source_folder, f"{sample}.json"))
|
251 |
+
classes_ports = load_classes_ports()
|
252 |
+
bad_connector_symbols = 0
|
253 |
+
|
254 |
+
for shape in instances["polygons"]:
|
255 |
+
if shape["class"] in classes_ports.keys():
|
256 |
+
connectors = associated_keypoints(instances, shape)
|
257 |
+
cls_ports = classes_ports[shape["class"]]
|
258 |
+
bboxes_match = [bbox for bbox in bboxes['bboxes']
|
259 |
+
if overlap(bbox, bounding_box(shape["points"])) and bbox['class'] == shape['class']]
|
260 |
+
|
261 |
+
if len(cls_ports) != len(connectors):
|
262 |
+
print(f" Bad Connector Count: {shape['class']} {shape['points'][0]} -> {len(cls_ports)} vs. {len(connectors)}")
|
263 |
+
bad_connector_symbols += 1
|
264 |
+
|
265 |
+
if len(bboxes_match) != 1:
|
266 |
+
print(f" No BB for Polygon: {shape['class']} {shape['points'][0]}")
|
267 |
+
continue
|
268 |
+
|
269 |
+
if bboxes_match[0]["rotation"] is None:
|
270 |
+
print(f" Missing Rotation in BB: {shape['class']} {shape['points'][0]}")
|
271 |
+
bboxes_match[0]["rotation"] = 0
|
272 |
+
|
273 |
+
cls_ports_transformed = [transform(port, bboxes_match[0]) for port in cls_ports]
|
274 |
+
|
275 |
+
for con in connectors:
|
276 |
+
closest = sorted(cls_ports_transformed,
|
277 |
+
key=lambda cls_port: dist(cls_port['position'], con['points']))[0]
|
278 |
+
con['class'] = f"connector.{closest['name']}"
|
279 |
+
|
280 |
+
shape['rotation'] = bboxes_match[0]['rotation']
|
281 |
+
shape['text'] = bboxes_match[0]['text']
|
282 |
+
|
283 |
+
write_labelme(instances, join(drafter, target_folder, f"{sample}.json"))
|
284 |
+
return bad_connector_symbols
|
285 |
+
|
286 |
+
|
287 |
+
def pipeline(drafter: str, sample: str, suffix: str, source_folder: str, target_folder: str, **kwargs) -> None:
|
288 |
+
"""Standard Workflow"""
|
289 |
+
|
290 |
+
generate_wires(drafter, sample, suffix, source_folder, target_folder)
|
291 |
+
generate_keypoints(drafter, sample, suffix, target_folder, target_folder)
|
292 |
+
refine_polygons(drafter, sample, suffix, target_folder, target_folder, kwargs["classes_discontinuous"])
|
293 |
+
labelme_raw_image(drafter, sample, suffix, target_folder, target_folder)
|
294 |
+
return connector_type_assignment(drafter, sample, suffix, target_folder, target_folder)
|
295 |
+
|
296 |
+
|
297 |
+
def execute(function: callable, source_folder: str, target_folder: str, drafter: str, info_msg: str, **kwargs):
|
298 |
+
"""Walks through the Dataset and applies the specified Function"""
|
299 |
+
|
300 |
+
bad_connector_symbols = 0
|
301 |
+
|
302 |
+
for drafter_dir in [f"drafter_{drafter}"] if drafter else sorted(next(os.walk('.'))[1]):
|
303 |
+
if drafter_dir.startswith("drafter_"):
|
304 |
+
|
305 |
+
if not os.path.isdir(join(drafter_dir, target_folder)):
|
306 |
+
os.mkdir(join(drafter_dir, target_folder))
|
307 |
+
|
308 |
+
for sample_raw in sorted(next(os.walk(join(drafter_dir, "segmentation")))[2]):
|
309 |
+
sample, suffix = sample_raw.split(".")
|
310 |
+
print(f"{info_msg} for: {drafter_dir} -> {sample}")
|
311 |
+
res = function(drafter_dir, sample, suffix, source_folder, target_folder, **kwargs)
|
312 |
+
if res is not None:
|
313 |
+
bad_connector_symbols += res
|
314 |
+
|
315 |
+
print(f"Overall Symbols with incorrect Connector Count: {bad_connector_symbols}")
|
316 |
+
|
317 |
+
|
318 |
+
if __name__ == "__main__":
|
319 |
+
|
320 |
+
with open("classes_discontinuous.json") as f:
|
321 |
+
classes_discontinuous = json.load(f)
|
322 |
+
|
323 |
+
with open('classes_color.json') as f:
|
324 |
+
color_map = json.load(f)
|
325 |
+
|
326 |
+
commands = {"transform": [pascalvoc_to_labelme, "annotations", "instances", "Transforming BBs -> Masks", {}],
|
327 |
+
"wire": [generate_wires, "instances", "wires", "Generating Wires", {}],
|
328 |
+
"keypoint": [generate_keypoints, "instances", "keypoints", "Generating Keypoints", {}],
|
329 |
+
"create": [binary_to_multi_seg_map, "instances", "segmentation_multi_class",
|
330 |
+
"Generating Multi-Class Segmentation Map", {"color_map": color_map}],
|
331 |
+
"refine": [refine_polygons, "instances", "instances_refined", "Refining Polygons",
|
332 |
+
{"classes_discontinuous": classes_discontinuous}],
|
333 |
+
"reset": [labelme_raw_image, "instances_refined", "instances_refined",
|
334 |
+
"Resetting Source Image", {}],
|
335 |
+
"assign": [connector_type_assignment, "instances_refined", "instances_refined",
|
336 |
+
"Assigning Connector Types", {}],
|
337 |
+
"pipeline": [pipeline, "instances", "instances_refined", "Processing",
|
338 |
+
{"classes_discontinuous": classes_discontinuous}]}
|
339 |
+
|
340 |
+
if len(sys.argv) > 1 and sys.argv[1] in commands:
|
341 |
+
fun, source, target, info, paras = commands[sys.argv[1]]
|
342 |
+
drafter = sys.argv[2] if len(sys.argv) > 2 else ""
|
343 |
+
target = sys.argv[3] if len(sys.argv) > 3 else target
|
344 |
+
source = sys.argv[4] if len(sys.argv) > 4 else source
|
345 |
+
execute(fun, source, target, drafter, info, **paras)
|
346 |
+
|
347 |
+
else:
|
348 |
+
print(f"Invalid command. Must be one of: {list(commands.keys())}")
|
utils.py
ADDED
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""utils.py: Helper Functions to keep this Repo Standalone"""
|
2 |
+
|
3 |
+
# System Imports
|
4 |
+
from math import sin, cos, radians
|
5 |
+
|
6 |
+
__author__ = "Johannes Bayer"
|
7 |
+
__copyright__ = "Copyright 2023, DFKI"
|
8 |
+
__license__ = "CC"
|
9 |
+
__version__ = "0.0.1"
|
10 |
+
__email__ = "johannes.bayer@dfki.de"
|
11 |
+
__status__ = "Prototype"
|
12 |
+
|
13 |
+
|
14 |
+
def shift(p, q):
|
15 |
+
"""Shifts a Point by another point"""
|
16 |
+
|
17 |
+
return [p[0]+q[0], p[1]+q[1]]
|
18 |
+
|
19 |
+
|
20 |
+
def rotate(p, angle):
|
21 |
+
"""Rotates a Point by an Angle"""
|
22 |
+
|
23 |
+
return [p[0] * cos(angle) - p[1] * sin(angle),
|
24 |
+
p[0] * sin(angle) + p[1] * cos(angle)]
|
25 |
+
|
26 |
+
|
27 |
+
def scale(p, scale_x, scale_y):
|
28 |
+
"""Scales a Point in two Dimensions"""
|
29 |
+
|
30 |
+
return [p[0]*scale_x, p[1]*scale_y]
|
31 |
+
|
32 |
+
|
33 |
+
def transform(port, bb):
|
34 |
+
"""Transforms a Point from Unit Space (classes ports) to Global Bounding Box (image)"""
|
35 |
+
|
36 |
+
p = shift(port['position'], (-.5, -0.5)) # Normalize: [0.0, 1.0]^2 -> [-0.5, 0-5]^2
|
37 |
+
p = scale(p, 1.0, -1.0) # Flip
|
38 |
+
p = rotate(p, -radians(bb['rotation']))
|
39 |
+
p = scale(p, bb["xmax"] - bb["xmin"], bb["ymax"] - bb["ymin"])
|
40 |
+
p = shift(p, [(bb["xmin"]+bb["xmax"])/2, (bb["ymin"]+bb["ymax"])/2])
|
41 |
+
|
42 |
+
return {"name": port['name'], "position": p}
|
43 |
+
|
44 |
+
|
45 |
+
def associated_keypoints(instances, shape):
|
46 |
+
"""Returns the points with same group id as the provided polygon"""
|
47 |
+
|
48 |
+
return [point for point in instances["points"]
|
49 |
+
if point["group"] == shape["group"] and point["class"] == "connector"]
|
50 |
+
|
51 |
+
|
52 |
+
def poly_to_bb():
|
53 |
+
pass
|
54 |
+
|
55 |
+
def IoU(bb1, bb2):
|
56 |
+
"""Intersection over Union"""
|
57 |
+
|
58 |
+
intersection = 1
|
59 |
+
union = 1
|
60 |
+
|
61 |
+
return intersection/union
|
62 |
+
|
63 |
+
|
64 |
+
if __name__ == "__main__":
|
65 |
+
|
66 |
+
import sys
|
67 |
+
from loader import read_pascal_voc, write_pascal_voc
|
68 |
+
import numpy as np
|
69 |
+
import random
|
70 |
+
|
71 |
+
if len(sys.argv) == 3:
|
72 |
+
source = sys.argv[1]
|
73 |
+
target = sys.argv[2]
|
74 |
+
|
75 |
+
ann1, ann2 = [[bbox for bbox in read_pascal_voc(path)['bboxes'] if bbox['class'] == "text"]
|
76 |
+
for path in [source, target]]
|
77 |
+
|
78 |
+
if not len(ann1) == len(ann2):
|
79 |
+
|
80 |
+
print(f"Warning: Unequal Text Count ({len(ann1)} vs. {len(ann2)}), cropping..")
|
81 |
+
consensus = min(len(ann1), len(ann2))
|
82 |
+
ann1 = ann1[:consensus]
|
83 |
+
ann2 = ann2[:consensus]
|
84 |
+
|
85 |
+
x1 = np.array([(bbox['xmin']+bbox['xmax'])/2 for bbox in ann1])
|
86 |
+
y1 = np.array([(bbox['ymin']+bbox['ymax'])/2 for bbox in ann1])
|
87 |
+
x2 = np.array([(bbox['xmin']+bbox['xmax'])/2 for bbox in ann2])
|
88 |
+
y2 = np.array([(bbox['ymin']+bbox['ymax'])/2 for bbox in ann2])
|
89 |
+
x1 = ((x1-np.min(x1))/(np.max(x1)-np.min(x1))) * (np.max(x2)-np.min(x2)) + np.min(x2)
|
90 |
+
y1 = ((y1-np.min(y1))/(np.max(y1)-np.min(y1))) * (np.max(y2)-np.min(y2)) + np.min(y2)
|
91 |
+
dist = np.sqrt((x1-x2[np.newaxis].T)**2 + (y1-y2[np.newaxis].T)**2)
|
92 |
+
indices_1 = np.arange(len(ann1))
|
93 |
+
indices_2 = np.arange(len(ann2))
|
94 |
+
print(np.sum(np.diagonal(dist)))
|
95 |
+
|
96 |
+
for i in range(10000):
|
97 |
+
if random.random() > 0.5:
|
98 |
+
max_dist_pos = np.argmax(np.diagonal(dist)) # Mitigate Largest Cost
|
99 |
+
|
100 |
+
else:
|
101 |
+
max_dist_pos = random.randint(0, len(ann1)-1)
|
102 |
+
|
103 |
+
if np.min(dist[max_dist_pos, :]) < np.min(dist[:, max_dist_pos]):
|
104 |
+
min_dist_pos = np.argmin(dist[max_dist_pos, :])
|
105 |
+
dist[:, [max_dist_pos, min_dist_pos]] = dist[:, [min_dist_pos, max_dist_pos]] # Swap Columns
|
106 |
+
indices_1[[max_dist_pos, min_dist_pos]] = indices_1[[min_dist_pos, max_dist_pos]]
|
107 |
+
|
108 |
+
else:
|
109 |
+
min_dist_pos = np.argmin(dist[:, max_dist_pos])
|
110 |
+
dist[[max_dist_pos, min_dist_pos], :] = dist[[min_dist_pos, max_dist_pos], :] # Swap Rows
|
111 |
+
indices_2[[max_dist_pos, min_dist_pos]] = indices_2[[min_dist_pos, max_dist_pos]]
|
112 |
+
|
113 |
+
print(np.sum(np.diagonal(dist)))
|
114 |
+
|
115 |
+
wb = read_pascal_voc(target)
|
116 |
+
|
117 |
+
for i in range(len(ann1)):
|
118 |
+
ann2[indices_2[i]]['text'] = ann1[indices_1[i]]['text']
|
119 |
+
bbox_match = [bbox for bbox in wb['bboxes']
|
120 |
+
if bbox['xmin'] == ann2[indices_2[i]]['xmin'] and
|
121 |
+
bbox['xmax'] == ann2[indices_2[i]]['xmax'] and
|
122 |
+
bbox['ymin'] == ann2[indices_2[i]]['ymin'] and
|
123 |
+
bbox['ymax'] == ann2[indices_2[i]]['ymax']]
|
124 |
+
|
125 |
+
if len(bbox_match) == 1:
|
126 |
+
bbox_match[0]['text'] = ann1[indices_1[i]]['text']
|
127 |
+
bbox_match[0]['rotation'] = ann1[indices_1[i]]['rotation']
|
128 |
+
|
129 |
+
write_pascal_voc(wb)
|
130 |
+
|
131 |
+
else:
|
132 |
+
print("Args: source target")
|