File size: 3,785 Bytes
9223079
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e15a186
 
 
9223079
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e15a186
 
 
9223079
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
from pathlib import Path
import argparse

from . import colmap_from_nvm
from ... import extract_features, match_features, triangulation
from ... import pairs_from_covisibility, pairs_from_retrieval, localize_sfm


CONDITIONS = [
    "dawn",
    "dusk",
    "night",
    "night-rain",
    "overcast-summer",
    "overcast-winter",
    "rain",
    "snow",
    "sun",
]


def generate_query_list(dataset, image_dir, path):
    h, w = 1024, 1024
    intrinsics_filename = "intrinsics/{}_intrinsics.txt"
    cameras = {}
    for side in ["left", "right", "rear"]:
        with open(dataset / intrinsics_filename.format(side), "r") as f:
            fx = f.readline().split()[1]
            fy = f.readline().split()[1]
            cx = f.readline().split()[1]
            cy = f.readline().split()[1]
            assert fx == fy
            params = ["SIMPLE_RADIAL", w, h, fx, cx, cy, 0.0]
            cameras[side] = [str(p) for p in params]

    queries = sorted(image_dir.glob("**/*.jpg"))
    queries = [str(q.relative_to(image_dir.parents[0])) for q in queries]

    out = [[q] + cameras[Path(q).parent.name] for q in queries]
    with open(path, "w") as f:
        f.write("\n".join(map(" ".join, out)))


parser = argparse.ArgumentParser()
parser.add_argument(
    "--dataset",
    type=Path,
    default="datasets/robotcar",
    help="Path to the dataset, default: %(default)s",
)
parser.add_argument(
    "--outputs",
    type=Path,
    default="outputs/robotcar",
    help="Path to the output directory, default: %(default)s",
)
parser.add_argument(
    "--num_covis",
    type=int,
    default=20,
    help="Number of image pairs for SfM, default: %(default)s",
)
parser.add_argument(
    "--num_loc",
    type=int,
    default=20,
    help="Number of image pairs for loc, default: %(default)s",
)
args = parser.parse_args()

# Setup the paths
dataset = args.dataset
images = dataset / "images/"

outputs = args.outputs  # where everything will be saved
outputs.mkdir(exist_ok=True, parents=True)
query_list = outputs / "{condition}_queries_with_intrinsics.txt"
sift_sfm = outputs / "sfm_sift"
reference_sfm = outputs / "sfm_superpoint+superglue"
sfm_pairs = outputs / f"pairs-db-covis{args.num_covis}.txt"
loc_pairs = outputs / f"pairs-query-netvlad{args.num_loc}.txt"
results = (
    outputs / f"RobotCar_hloc_superpoint+superglue_netvlad{args.num_loc}.txt"
)

# pick one of the configurations for extraction and matching
retrieval_conf = extract_features.confs["netvlad"]
feature_conf = extract_features.confs["superpoint_aachen"]
matcher_conf = match_features.confs["superglue"]

for condition in CONDITIONS:
    generate_query_list(
        dataset, images / condition, str(query_list).format(condition=condition)
    )

features = extract_features.main(feature_conf, images, outputs, as_half=True)

colmap_from_nvm.main(
    dataset / "3D-models/all-merged/all.nvm",
    dataset / "3D-models/overcast-reference.db",
    sift_sfm,
)
pairs_from_covisibility.main(sift_sfm, sfm_pairs, num_matched=args.num_covis)
sfm_matches = match_features.main(
    matcher_conf, sfm_pairs, feature_conf["output"], outputs
)

triangulation.main(
    reference_sfm, sift_sfm, images, sfm_pairs, features, sfm_matches
)

global_descriptors = extract_features.main(retrieval_conf, images, outputs)
# TODO: do per location and per camera
pairs_from_retrieval.main(
    global_descriptors,
    loc_pairs,
    args.num_loc,
    query_prefix=CONDITIONS,
    db_model=reference_sfm,
)
loc_matches = match_features.main(
    matcher_conf, loc_pairs, feature_conf["output"], outputs
)

localize_sfm.main(
    reference_sfm,
    Path(str(query_list).format(condition="*")),
    loc_pairs,
    features,
    loc_matches,
    results,
    covisibility_clustering=False,
    prepend_camera_name=True,
)