File size: 4,223 Bytes
9223079 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 |
from pathlib import Path
import argparse
from .utils import create_query_list_with_intrinsics, scale_sfm_images, evaluate
from ... import extract_features, match_features, pairs_from_covisibility
from ... import triangulation, localize_sfm, pairs_from_retrieval, logger
SCENES = ["KingsCollege", "OldHospital", "ShopFacade", "StMarysChurch", "GreatCourt"]
def run_scene(images, gt_dir, outputs, results, num_covis, num_loc):
ref_sfm_sift = gt_dir / "model_train"
test_list = gt_dir / "list_query.txt"
outputs.mkdir(exist_ok=True, parents=True)
ref_sfm = outputs / "sfm_superpoint+superglue"
ref_sfm_scaled = outputs / "sfm_sift_scaled"
query_list = outputs / "query_list_with_intrinsics.txt"
sfm_pairs = outputs / f"pairs-db-covis{num_covis}.txt"
loc_pairs = outputs / f"pairs-query-netvlad{num_loc}.txt"
feature_conf = {
"output": "feats-superpoint-n4096-r1024",
"model": {
"name": "superpoint",
"nms_radius": 3,
"max_keypoints": 4096,
},
"preprocessing": {
"grayscale": True,
"resize_max": 1024,
},
}
matcher_conf = match_features.confs["superglue"]
retrieval_conf = extract_features.confs["netvlad"]
create_query_list_with_intrinsics(
gt_dir / "empty_all", query_list, test_list, ext=".txt", image_dir=images
)
with open(test_list, "r") as f:
query_seqs = {q.split("/")[0] for q in f.read().rstrip().split("\n")}
global_descriptors = extract_features.main(retrieval_conf, images, outputs)
pairs_from_retrieval.main(
global_descriptors,
loc_pairs,
num_loc,
db_model=ref_sfm_sift,
query_prefix=query_seqs,
)
features = extract_features.main(feature_conf, images, outputs, as_half=True)
pairs_from_covisibility.main(ref_sfm_sift, sfm_pairs, num_matched=num_covis)
sfm_matches = match_features.main(
matcher_conf, sfm_pairs, feature_conf["output"], outputs
)
scale_sfm_images(ref_sfm_sift, ref_sfm_scaled, images)
triangulation.main(
ref_sfm, ref_sfm_scaled, images, sfm_pairs, features, sfm_matches
)
loc_matches = match_features.main(
matcher_conf, loc_pairs, feature_conf["output"], outputs
)
localize_sfm.main(
ref_sfm,
query_list,
loc_pairs,
features,
loc_matches,
results,
covisibility_clustering=False,
prepend_camera_name=True,
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--scenes", default=SCENES, choices=SCENES, nargs="+")
parser.add_argument("--overwrite", action="store_true")
parser.add_argument(
"--dataset",
type=Path,
default="datasets/cambridge",
help="Path to the dataset, default: %(default)s",
)
parser.add_argument(
"--outputs",
type=Path,
default="outputs/cambridge",
help="Path to the output directory, default: %(default)s",
)
parser.add_argument(
"--num_covis",
type=int,
default=20,
help="Number of image pairs for SfM, default: %(default)s",
)
parser.add_argument(
"--num_loc",
type=int,
default=10,
help="Number of image pairs for loc, default: %(default)s",
)
args = parser.parse_args()
gt_dirs = args.dataset / "CambridgeLandmarks_Colmap_Retriangulated_1024px"
all_results = {}
for scene in args.scenes:
logger.info(f'Working on scene "{scene}".')
results = args.outputs / scene / "results.txt"
if args.overwrite or not results.exists():
run_scene(
args.dataset / scene,
gt_dirs / scene,
args.outputs / scene,
results,
args.num_covis,
args.num_loc,
)
all_results[scene] = results
for scene in args.scenes:
logger.info(f'Evaluate scene "{scene}".')
evaluate(
gt_dirs / scene / "empty_all",
all_results[scene],
gt_dirs / scene / "list_query.txt",
ext=".txt",
)
|