Skip to content

Commit d6f0eb9

Browse files
style: pre-commit fixes
1 parent e2a19a9 commit d6f0eb9

File tree

12 files changed

+28
-32
lines changed

12 files changed

+28
-32
lines changed

imcui/hloc/extract_features.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -523,7 +523,7 @@ def main(
523523
overwrite: bool = False,
524524
) -> Path:
525525
logger.info(
526-
"Extracting local features with configuration:" f"\n{pprint.pformat(conf)}"
526+
f"Extracting local features with configuration:\n{pprint.pformat(conf)}"
527527
)
528528

529529
dataset = ImageDataset(image_dir, conf["preprocessing"], image_list)

imcui/hloc/extractors/dog.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def _init(self, conf):
4040
elif conf["descriptor"] == "hardnet":
4141
self.describe = kornia.feature.HardNet(pretrained=True)
4242
elif conf["descriptor"] not in ["sift", "rootsift"]:
43-
raise ValueError(f'Unknown descriptor: {conf["descriptor"]}')
43+
raise ValueError(f"Unknown descriptor: {conf['descriptor']}")
4444

4545
self.sift = None # lazily instantiated on the first image
4646
self.dummy_param = torch.nn.Parameter(torch.empty(0))
@@ -104,7 +104,7 @@ def _forward(self, data):
104104
patches[start_idx:end_idx]
105105
)
106106
else:
107-
raise ValueError(f'Unknown descriptor: {self.conf["descriptor"]}')
107+
raise ValueError(f"Unknown descriptor: {self.conf['descriptor']}")
108108

109109
keypoints = torch.from_numpy(keypoints[:, :2]) # keep only x, y
110110
scales = torch.from_numpy(scales)

imcui/hloc/extractors/example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ class Example(BaseModel):
2626

2727
def _init(self, conf):
2828
# set checkpoints paths if needed
29-
model_path = example_path / "checkpoints" / f'{conf["model_name"]}'
29+
model_path = example_path / "checkpoints" / f"{conf['model_name']}"
3030
if not model_path.exists():
3131
logger.info(f"No model found at {model_path}")
3232

imcui/hloc/extractors/sift.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ def _init(self, conf):
132132
else:
133133
backends = {"opencv", "pycolmap", "pycolmap_cpu", "pycolmap_cuda"}
134134
raise ValueError(
135-
f"Unknown backend: {backend} not in " f"{{{','.join(backends)}}}."
135+
f"Unknown backend: {backend} not in {{{','.join(backends)}}}."
136136
)
137137
logger.info("Load SIFT model done.")
138138

imcui/hloc/match_dense.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -858,7 +858,7 @@ def match_and_assign(
858858

859859
# Invalidate matches that are far from selected bin by reassignment
860860
if max_kps is not None:
861-
logger.info(f'Reassign matches with max_error={conf["max_error"]}.')
861+
logger.info(f"Reassign matches with max_error={conf['max_error']}.")
862862
assign_matches(pairs, match_path, cpdict, max_error=conf["max_error"])
863863

864864

@@ -1092,7 +1092,7 @@ def main(
10921092
overwrite: bool = False,
10931093
) -> Path:
10941094
logger.info(
1095-
"Extracting semi-dense features with configuration:" f"\n{pprint.pformat(conf)}"
1095+
f"Extracting semi-dense features with configuration:\n{pprint.pformat(conf)}"
10961096
)
10971097

10981098
if features is None:
@@ -1102,17 +1102,17 @@ def main(
11021102
features_q = features
11031103
if matches is None:
11041104
raise ValueError(
1105-
"Either provide both features and matches as Path" " or both as names."
1105+
"Either provide both features and matches as Path or both as names."
11061106
)
11071107
else:
11081108
if export_dir is None:
11091109
raise ValueError(
11101110
"Provide an export_dir if features and matches"
11111111
f" are not file paths: {features}, {matches}."
11121112
)
1113-
features_q = Path(export_dir, f'{features}{conf["output"]}.h5')
1113+
features_q = Path(export_dir, f"{features}{conf['output']}.h5")
11141114
if matches is None:
1115-
matches = Path(export_dir, f'{conf["output"]}_{pairs.stem}.h5')
1115+
matches = Path(export_dir, f"{conf['output']}_{pairs.stem}.h5")
11161116

11171117
if features_ref is None:
11181118
features_ref = []

imcui/hloc/match_features.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -267,16 +267,16 @@ def main(
267267
features_q = features
268268
if matches is None:
269269
raise ValueError(
270-
"Either provide both features and matches as Path" " or both as names."
270+
"Either provide both features and matches as Path or both as names."
271271
)
272272
else:
273273
if export_dir is None:
274274
raise ValueError(
275-
"Provide an export_dir if features is not" f" a file path: {features}."
275+
f"Provide an export_dir if features is not a file path: {features}."
276276
)
277277
features_q = Path(export_dir, features + ".h5")
278278
if matches is None:
279-
matches = Path(export_dir, f'{features}_{conf["output"]}_{pairs.stem}.h5')
279+
matches = Path(export_dir, f"{features}_{conf['output']}_{pairs.stem}.h5")
280280

281281
if features_ref is None:
282282
features_ref = features_q
@@ -317,9 +317,7 @@ def match_from_paths(
317317
feature_path_ref: Path,
318318
overwrite: bool = False,
319319
) -> Path:
320-
logger.info(
321-
"Matching local features with configuration:" f"\n{pprint.pformat(conf)}"
322-
)
320+
logger.info(f"Matching local features with configuration:\n{pprint.pformat(conf)}")
323321

324322
if not feature_path_q.exists():
325323
raise FileNotFoundError(f"Query feature file {feature_path_q}.")

imcui/hloc/pipelines/4Seasons/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -227,5 +227,5 @@ def evaluate_submission(submission_dir, relocs, ths=[0.1, 0.2, 0.5]):
227227
recall = [np.mean(error <= th) for th in ths]
228228
s = f"Relocalization evaluation {submission_dir.name}/{reloc.name}\n"
229229
s += " / ".join([f"{th:>7}m" for th in ths]) + "\n"
230-
s += " / ".join([f"{100*r:>7.3f}%" for r in recall])
230+
s += " / ".join([f"{100 * r:>7.3f}%" for r in recall])
231231
logger.info(s)

imcui/hloc/pipelines/7Scenes/create_gt_sfm.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -111,9 +111,9 @@ def correct_sfm_with_gt_depth(sfm_path, depth_folder_path, output_path):
111111
new_p3D_ids[new_p3D_ids != -1] = sub_p3D_ids
112112
img = img._replace(point3D_ids=new_p3D_ids)
113113

114-
assert len(img.point3D_ids[img.point3D_ids != -1]) == len(
115-
scs
116-
), f"{len(scs)}, {len(img.point3D_ids[img.point3D_ids != -1])}"
114+
assert len(img.point3D_ids[img.point3D_ids != -1]) == len(scs), (
115+
f"{len(scs)}, {len(img.point3D_ids[img.point3D_ids != -1])}"
116+
)
117117
for i, p3did in enumerate(img.point3D_ids[img.point3D_ids != -1]):
118118
points3D[p3did] = points3D[p3did]._replace(xyz=scs[i])
119119
images[imgid] = img

imcui/hloc/pipelines/Cambridge/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -141,5 +141,5 @@ def evaluate(model, results, list_file=None, ext=".bin", only_localized=False):
141141
threshs_R = [1.0, 2.0, 3.0, 5.0, 2.0, 5.0, 10.0]
142142
for th_t, th_R in zip(threshs_t, threshs_R):
143143
ratio = np.mean((errors_t < th_t) & (errors_R < th_R))
144-
out += f"\n\t{th_t*100:.0f}cm, {th_R:.0f}deg : {ratio*100:.2f}%"
144+
out += f"\n\t{th_t * 100:.0f}cm, {th_R:.0f}deg : {ratio * 100:.2f}%"
145145
logger.info(out)

imcui/hloc/reconstruction.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -92,9 +92,7 @@ def run_reconstruction(
9292
largest_index = index
9393
largest_num_images = num_images
9494
assert largest_index is not None
95-
logger.info(
96-
f"Largest model is #{largest_index} " f"with {largest_num_images} images."
97-
)
95+
logger.info(f"Largest model is #{largest_index} with {largest_num_images} images.")
9896

9997
for filename in ["images.bin", "cameras.bin", "points3D.bin"]:
10098
if (sfm_dir / filename).exists():

0 commit comments

Comments
 (0)