diff --git a/Contributors.md b/Contributors.md
index cddfe747..d474b188 100644
--- a/Contributors.md
+++ b/Contributors.md
@@ -25,4 +25,3 @@ You can request contact information through the project members above, or tag th
| Andy Smith
([@andrewphilipsmith](https://github.com/andrewphilipsmith)) | Research Data Scientist (Turing) | 2022 - 2023 |
| Daniel van Strien
([@davanstrien ](https://github.com/davanstrien)) | Data Librarian (British Library) | 2019-2021 |
| Olivia Vane
([@ov212 ](https://github.com/ov212)) | Research Software Engineer (British Library) | 2019-2021 |
-
diff --git a/mapreader/_version.py b/mapreader/_version.py
index 19b5cac7..40cc6dd5 100644
--- a/mapreader/_version.py
+++ b/mapreader/_version.py
@@ -17,7 +17,7 @@
import re
import subprocess
import sys
-from typing import Callable, Dict
+from typing import Callable
def get_keywords():
@@ -55,8 +55,8 @@ class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
-LONG_VERSION_PY: Dict[str, str] = {}
-HANDLERS: Dict[str, Dict[str, Callable]] = {}
+LONG_VERSION_PY: dict[str, str] = {}
+HANDLERS: dict[str, dict[str, Callable]] = {}
def register_vcs_handler(vcs, method): # decorator
@@ -107,7 +107,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
return None, None
else:
if verbose:
- print("unable to find command, tried %s" % (commands,))
+ print(f"unable to find command, tried {commands}")
return None, None
stdout = process.communicate()[0].strip().decode()
if process.returncode != 0:
@@ -142,8 +142,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
if verbose:
print(
- "Tried directories %s but none started with prefix %s"
- % (str(rootdirs), parentdir_prefix)
+ "Tried directories {} but none started with prefix {}".format(str(rootdirs), parentdir_prefix)
)
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@@ -157,7 +156,7 @@ def git_get_keywords(versionfile_abs):
# _version.py.
keywords = {}
try:
- with open(versionfile_abs, "r") as fobj:
+ with open(versionfile_abs) as fobj:
for line in fobj:
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
@@ -359,7 +358,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
- pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
+ pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format(
full_tag,
tag_prefix,
)
diff --git a/mapreader/annotate/utils.py b/mapreader/annotate/utils.py
index d14ea4ff..674c78ff 100644
--- a/mapreader/annotate/utils.py
+++ b/mapreader/annotate/utils.py
@@ -1,11 +1,9 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
import os
import random
import sys
-from typing import Dict, List, Optional, Tuple, Union
import matplotlib.pyplot as plt
import numpy as np
@@ -30,7 +28,7 @@
from mapreader import load_patches, loader
-def display_record(record: Tuple[str, str, str, int, int]) -> None:
+def display_record(record: tuple[str, str, str, int, int]) -> None:
"""
Displays an image and optionally, a context image with a patch border.
@@ -175,13 +173,13 @@ def display_record(record: Tuple[str, str, str, int, int]) -> None:
def prepare_data(
df: pd.DataFrame,
- col_names: Optional[List[str]] = None,
- annotation_set: Optional[str] = "001",
- label_col_name: Optional[str] = "label",
- redo: Optional[bool] = False,
- random_state: Optional[Union[int, str]] = "random",
- num_samples: Optional[int] = 100,
-) -> List[List[Union[str, int]]]:
+ col_names: list[str] | None = None,
+ annotation_set: str | None = "001",
+ label_col_name: str | None = "label",
+ redo: bool | None = False,
+ random_state: int | str | None = "random",
+ num_samples: int | None = 100,
+) -> list[list[str | int]]:
"""
Prepare data for image annotation by selecting a subset of images from a
DataFrame.
@@ -265,12 +263,12 @@ def prepare_data(
def annotation_interface(
- data: List,
- list_labels: List,
- list_colors: Optional[List[str]] = None,
- annotation_set: Optional[str] = "001",
- method: Optional[str] = "ipyannotate",
- list_shortcuts: Optional[List[str]] = None,
+ data: list,
+ list_labels: list,
+ list_colors: list[str] | None = None,
+ annotation_set: str | None = "001",
+ method: str | None = "ipyannotate",
+ list_shortcuts: list[str] | None = None,
) -> Annotation:
"""
Create an annotation interface for a list of patches with corresponding
@@ -379,25 +377,25 @@ def prepare_annotation(
userID: str,
task: str,
annotation_tasks_file: str,
- custom_labels: List[str] = None,
- annotation_set: Optional[str] = "001",
- redo_annotation: Optional[bool] = False,
- patch_paths: Optional[Union[str, bool]] = False,
- parent_paths: Optional[str] = False,
- tree_level: Optional[str] = "patch",
- sortby: Optional[str] = None,
- min_alpha_channel: Optional[float] = None,
- min_mean_pixel: Optional[float] = None,
- max_mean_pixel: Optional[float] = None,
- min_std_pixel: Optional[float] = None,
- max_std_pixel: Optional[float] = None,
- context_image: Optional[bool] = False,
- xoffset: Optional[int] = 500,
- yoffset: Optional[int] = 500,
- urlmain: Optional[str] = "https://maps.nls.uk/view/",
- random_state: Optional[Union[str, int]] = "random",
- list_shortcuts: Optional[List[tuple]] = None,
-) -> Dict:
+ custom_labels: list[str] = None,
+ annotation_set: str | None = "001",
+ redo_annotation: bool | None = False,
+ patch_paths: str | bool | None = False,
+ parent_paths: str | None = False,
+ tree_level: str | None = "patch",
+ sortby: str | None = None,
+ min_alpha_channel: float | None = None,
+ min_mean_pixel: float | None = None,
+ max_mean_pixel: float | None = None,
+ min_std_pixel: float | None = None,
+ max_std_pixel: float | None = None,
+ context_image: bool | None = False,
+ xoffset: int | None = 500,
+ yoffset: int | None = 500,
+ urlmain: str | None = "https://maps.nls.uk/view/",
+ random_state: str | int | None = "random",
+ list_shortcuts: list[tuple] | None = None,
+) -> dict:
"""Prepare image data for annotation and launch the annotation interface.
Parameters
diff --git a/mapreader/classify/classifier.py b/mapreader/classify/classifier.py
index 35dce0af..0543973e 100644
--- a/mapreader/classify/classifier.py
+++ b/mapreader/classify/classifier.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
import copy
@@ -9,7 +8,7 @@
import sys
import time
from datetime import datetime
-from typing import Any, Dict, Hashable, Iterable, List, Optional, Tuple, Union
+from typing import Any, Hashable, Iterable
import joblib
import matplotlib.pyplot as plt
@@ -33,14 +32,14 @@
class ClassifierContainer:
def __init__(
self,
- model: Union[str, nn.Module, None],
- dataloaders: Union[Dict[str, DataLoader], None],
- labels_map: Union[Dict[int, str], None],
- device: Optional[str] = "default",
- input_size: Optional[int] = (224, 224),
- is_inception: Optional[bool] = False,
- load_path: Optional[str] = None,
- force_device: Optional[bool] = False,
+ model: str | (nn.Module | None),
+ dataloaders: dict[str, DataLoader] | None,
+ labels_map: dict[int, str] | None,
+ device: str | None = "default",
+ input_size: int | None = (224, 224),
+ is_inception: bool | None = False,
+ load_path: str | None = None,
+ force_device: bool | None = False,
**kwargs,
):
"""
@@ -181,8 +180,8 @@ def generate_layerwise_lrs(
self,
min_lr: float,
max_lr: float,
- spacing: Optional[str] = "linspace",
- ) -> List[Dict]:
+ spacing: str | None = "linspace",
+ ) -> list[dict]:
"""
Calculates layer-wise learning rates for a given set of model
parameters.
@@ -224,11 +223,11 @@ def generate_layerwise_lrs(
def initialize_optimizer(
self,
- optim_type: Optional[str] = "adam",
- params2optimize: Optional[Union[str, Iterable]] = "infer",
- optim_param_dict: Optional[dict] = None,
- add_optim: Optional[bool] = True,
- ) -> Union[torch.optim.Optimizer, None]:
+ optim_type: str | None = "adam",
+ params2optimize: str | Iterable | None = "infer",
+ optim_param_dict: dict | None = None,
+ add_optim: bool | None = True,
+ ) -> torch.optim.Optimizer | None:
"""
Initializes an optimizer for the model and adds it to the classifier
object.
@@ -309,10 +308,10 @@ def add_optimizer(self, optimizer: torch.optim.Optimizer) -> None:
def initialize_scheduler(
self,
- scheduler_type: Optional[str] = "steplr",
- scheduler_param_dict: Optional[dict] = None,
- add_scheduler: Optional[bool] = True,
- ) -> Union[torch.optim.lr_scheduler._LRScheduler, None]:
+ scheduler_type: str | None = "steplr",
+ scheduler_param_dict: dict | None = None,
+ add_scheduler: bool | None = True,
+ ) -> torch.optim.lr_scheduler._LRScheduler | None:
"""
Initializes a learning rate scheduler for the optimizer and adds it to
the classifier object.
@@ -394,7 +393,7 @@ def add_scheduler(self, scheduler: torch.optim.lr_scheduler._LRScheduler) -> Non
self.scheduler = scheduler
def add_criterion(
- self, criterion: Optional[Union[str, nn.modules.loss._Loss]] = "cross entropy"
+ self, criterion: str | nn.modules.loss._Loss | None = "cross entropy"
) -> None:
"""
Add a loss criterion to the classifier object.
@@ -444,8 +443,8 @@ def add_criterion(
def model_summary(
self,
- input_size: Optional[Union[tuple, list]] = None,
- trainable_col: Optional[bool] = False,
+ input_size: tuple | list | None = None,
+ trainable_col: bool | None = False,
**kwargs,
) -> None:
"""
@@ -498,7 +497,7 @@ def model_summary(
)
print(model_summary)
- def freeze_layers(self, layers_to_freeze: Optional[List[str]] = None) -> None:
+ def freeze_layers(self, layers_to_freeze: list[str] | None = None) -> None:
"""
Freezes the specified layers in the neural network by setting
``requires_grad`` attribute to False for their parameters.
@@ -532,7 +531,7 @@ def freeze_layers(self, layers_to_freeze: Optional[List[str]] = None) -> None:
elif (layer[-1] != "*") and (layer == name):
param.requires_grad = False
- def unfreeze_layers(self, layers_to_unfreeze: Optional[List[str]] = None):
+ def unfreeze_layers(self, layers_to_unfreeze: list[str] | None = None):
"""
Unfreezes the specified layers in the neural network by setting
``requires_grad`` attribute to True for their parameters.
@@ -567,7 +566,7 @@ def unfreeze_layers(self, layers_to_unfreeze: Optional[List[str]] = None):
param.requires_grad = True
def only_keep_layers(
- self, only_keep_layers_list: Optional[List[str]] = None
+ self, only_keep_layers_list: list[str] | None = None
) -> None:
"""
Only keep the specified layers (``only_keep_layers_list``) for
@@ -595,9 +594,9 @@ def only_keep_layers(
def inference(
self,
- set_name: Optional[str] = "infer",
- verbose: Optional[bool] = False,
- print_info_batch_freq: Optional[int] = 5,
+ set_name: str | None = "infer",
+ verbose: bool | None = False,
+ print_info_batch_freq: int | None = 5,
):
"""
Run inference on a specified dataset (``set_name``).
@@ -656,14 +655,14 @@ def train_component_summary(self) -> None:
def train(
self,
- phases: Optional[List[str]] = None,
- num_epochs: Optional[int] = 25,
- save_model_dir: Optional[Union[str, None]] = "models",
- verbose: Optional[bool] = False,
- tensorboard_path: Optional[Union[str, None]] = None,
- tmp_file_save_freq: Optional[Union[int, None]] = 2,
- remove_after_load: Optional[bool] = True,
- print_info_batch_freq: Optional[Union[int, None]] = 5,
+ phases: list[str] | None = None,
+ num_epochs: int | None = 25,
+ save_model_dir: str | None | None = "models",
+ verbose: bool | None = False,
+ tensorboard_path: str | None | None = None,
+ tmp_file_save_freq: int | None | None = 2,
+ remove_after_load: bool | None = True,
+ print_info_batch_freq: int | None | None = 5,
) -> None:
"""
Train the model on the specified phases for a given number of epochs.
@@ -737,13 +736,13 @@ def train(
def train_core(
self,
- phases: Optional[List[str]] = None,
- num_epochs: Optional[int] = 25,
- save_model_dir: Optional[Union[str, None]] = "models",
- verbose: Optional[bool] = False,
- tensorboard_path: Optional[Union[str, None]] = None,
- tmp_file_save_freq: Optional[Union[int, None]] = 2,
- print_info_batch_freq: Optional[Union[int, None]] = 5,
+ phases: list[str] | None = None,
+ num_epochs: int | None = 25,
+ save_model_dir: str | None | None = "models",
+ verbose: bool | None = False,
+ tensorboard_path: str | None | None = None,
+ tmp_file_save_freq: int | None | None = 2,
+ print_info_batch_freq: int | None | None = 5,
) -> None:
"""
Trains/fine-tunes a classifier for the specified number of epochs on
@@ -1057,7 +1056,7 @@ def calculate_add_metrics(
y_pred,
y_score,
phase: str,
- epoch: Optional[int] = -1,
+ epoch: int | None = -1,
tboard_writer=None,
) -> None:
"""
@@ -1209,7 +1208,7 @@ def _gen_epoch_msg(self, phase: str, epoch_msg: str) -> str:
return epoch_msg
def _add_metrics(
- self, k: Hashable, v: Union[int, float, complex, np.number]
+ self, k: Hashable, v: int | (float | (complex | np.number))
) -> None:
"""
Adds a metric value to a dictionary of metrics tracked during training.
@@ -1240,17 +1239,17 @@ def _add_metrics(
def plot_metric(
self,
- y_axis: List[str],
+ y_axis: list[str],
y_label: str,
- legends: List[str],
- x_axis: Optional[str] = "epoch",
- x_label: Optional[str] = "epoch",
- colors: Optional[List[str]] = 5 * ["k", "tab:red"],
- styles: Optional[List[str]] = 10 * ["-"],
- markers: Optional[List[str]] = 10 * ["o"],
- figsize: Optional[Tuple[int, int]] = (10, 5),
- plt_yrange: Optional[Tuple[float, float]] = None,
- plt_xrange: Optional[Tuple[float, float]] = None,
+ legends: list[str],
+ x_axis: str | None = "epoch",
+ x_label: str | None = "epoch",
+ colors: list[str] | None = 5 * ["k", "tab:red"],
+ styles: list[str] | None = 10 * ["-"],
+ markers: list[str] | None = 10 * ["o"],
+ figsize: tuple[int, int] | None = (10, 5),
+ plt_yrange: tuple[float, float] | None = None,
+ plt_xrange: tuple[float, float] | None = None,
):
"""
Plot the metrics of the classifier object.
@@ -1360,9 +1359,9 @@ def plot_metric(
def _initialize_model(
self,
model_name: str,
- pretrained: Optional[bool] = True,
- last_layer_num_classes: Optional[Union[str, int]] = "default",
- ) -> Tuple[Any, int, bool]:
+ pretrained: bool | None = True,
+ last_layer_num_classes: str | int | None = "default",
+ ) -> tuple[Any, int, bool]:
"""
Initializes a PyTorch model with the option to change the number of
classes in the last layer (``last_layer_num_classes``).
@@ -1458,10 +1457,10 @@ def _initialize_model(
def show_sample(
self,
- set_name: Optional[str] = "train",
- batch_number: Optional[int] = 1,
- print_batch_info: Optional[bool] = True,
- figsize: Optional[Tuple[int, int]] = (15, 10),
+ set_name: str | None = "train",
+ batch_number: int | None = 1,
+ print_batch_info: bool | None = True,
+ figsize: tuple[int, int] | None = (15, 10),
):
"""
Displays a sample of training or validation data in a grid format with
@@ -1530,7 +1529,7 @@ def show_sample(
figsize=figsize,
)
- def print_batch_info(self, set_name: Optional[str] = "train") -> None:
+ def print_batch_info(self, set_name: str | None = "train") -> None:
"""
Print information about a dataset's batches, samples, and batch-size.
@@ -1563,8 +1562,8 @@ def print_batch_info(self, set_name: Optional[str] = "train") -> None:
@staticmethod
def _imshow(
inp: np.ndarray,
- title: Optional[str] = None,
- figsize: Optional[Tuple[int, int]] = (15, 10),
+ title: str | None = None,
+ figsize: tuple[int, int] | None = (15, 10),
) -> None:
"""
Displays an image of a tensor using matplotlib.pyplot.
@@ -1603,11 +1602,11 @@ def _imshow(
def show_inference_sample_results(
self,
label: str,
- num_samples: Optional[int] = 6,
- set_name: Optional[str] = "test",
- min_conf: Optional[Union[None, float]] = None,
- max_conf: Optional[Union[None, float]] = None,
- figsize: Optional[Tuple[int, int]] = (15, 15),
+ num_samples: int | None = 6,
+ set_name: str | None = "test",
+ min_conf: None | float | None = None,
+ max_conf: None | float | None = None,
+ figsize: tuple[int, int] | None = (15, 15),
) -> None:
"""
Shows a sample of the results of the inference.
@@ -1698,8 +1697,8 @@ def show_inference_sample_results(
def save(
self,
- save_path: Optional[str] = "default.obj",
- force: Optional[bool] = False,
+ save_path: str | None = "default.obj",
+ force: bool | None = False,
) -> None:
"""
Save the object to a file.
@@ -1757,10 +1756,10 @@ def load_dataset(
self,
dataset: PatchDataset,
set_name: str,
- batch_size: Optional[int] = 16,
- sampler: Optional[Union[Sampler, None]] = None,
- shuffle: Optional[bool] = False,
- num_workers: Optional[int] = 0,
+ batch_size: int | None = 16,
+ sampler: Sampler | None | None = None,
+ shuffle: bool | None = False,
+ num_workers: int | None = 0,
**kwargs,
) -> None:
"""Creates a DataLoader from a PatchDataset and adds it to the ``dataloaders`` dictionary.
@@ -1797,7 +1796,7 @@ def load_dataset(
def load(
self,
load_path: str,
- force_device: Optional[bool] = False,
+ force_device: bool | None = False,
) -> None:
"""
This function loads the state of a class instance from a saved file
@@ -1928,9 +1927,9 @@ def cprint(self, type_info: str, bc_color: str, text: str) -> None:
def update_progress(
self,
- progress: Union[float, int],
- text: Optional[str] = "",
- barLength: Optional[int] = 30,
+ progress: float | int,
+ text: str | None = "",
+ barLength: int | None = 30,
) -> None:
"""Update the progress bar.
diff --git a/mapreader/classify/classifier_context.py b/mapreader/classify/classifier_context.py
index ef7814af..83ab0ac4 100644
--- a/mapreader/classify/classifier_context.py
+++ b/mapreader/classify/classifier_context.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
import copy
@@ -7,8 +6,6 @@
import time
# from tqdm.autonotebook import tqdm
-from typing import Dict, List, Optional, Tuple, Union
-
import matplotlib.pyplot as plt
import numpy as np
import torch
@@ -20,14 +17,14 @@
class ClassifierContextContainer(ClassifierContainer):
def train(
self,
- phases: Optional[List[str]] = None,
- num_epochs: Optional[int] = 25,
- save_model_dir: Optional[Union[str, None]] = "models",
- verbosity_level: Optional[int] = 1,
- tensorboard_path: Optional[Union[str, None]] = None,
- tmp_file_save_freq: Optional[Union[int, None]] = 2,
- remove_after_load: Optional[bool] = True,
- print_info_batch_freq: Optional[Union[int, None]] = 5,
+ phases: list[str] | None = None,
+ num_epochs: int | None = 25,
+ save_model_dir: str | None | None = "models",
+ verbosity_level: int | None = 1,
+ tensorboard_path: str | None | None = None,
+ tmp_file_save_freq: int | None | None = 2,
+ remove_after_load: bool | None = True,
+ print_info_batch_freq: int | None | None = 5,
) -> None:
"""
Train the model on the specified phases for a given number of epochs.
@@ -99,13 +96,13 @@ def train(
def train_core(
self,
- phases: Optional[List[str]] = None,
- num_epochs: Optional[int] = 25,
- save_model_dir: Optional[Union[str, None]] = "models",
- verbosity_level: Optional[int] = 1,
- tensorboard_path: Optional[Union[str, None]] = None,
- tmp_file_save_freq: Optional[Union[int, None]] = 2,
- print_info_batch_freq: Optional[Union[int, None]] = 5,
+ phases: list[str] | None = None,
+ num_epochs: int | None = 25,
+ save_model_dir: str | None | None = "models",
+ verbosity_level: int | None = 1,
+ tensorboard_path: str | None | None = None,
+ tmp_file_save_freq: int | None | None = 2,
+ print_info_batch_freq: int | None | None = 5,
) -> None:
"""
Trains/fine-tunes a classifier for the specified number of epochs on
@@ -423,10 +420,10 @@ def train_core(
def show_sample(
self,
- set_name: Optional[str] = "train",
- batch_number: Optional[int] = 1,
- print_batch_info: Optional[bool] = True,
- figsize: Optional[Tuple[int, int]] = (15, 10),
+ set_name: str | None = "train",
+ batch_number: int | None = 1,
+ print_batch_info: bool | None = True,
+ figsize: tuple[int, int] | None = (15, 10),
) -> None:
"""
Displays a sample of training or validation data in a grid format with
@@ -506,9 +503,9 @@ def generate_layerwise_lrs(
self,
min_lr: float,
max_lr: float,
- spacing: Optional[str] = "linspace",
- sep_group_names: List[str] = None,
- ) -> List[Dict]:
+ spacing: str | None = "linspace",
+ sep_group_names: list[str] = None,
+ ) -> list[dict]:
"""
Calculates layer-wise learning rates for a given set of model
parameters.
@@ -570,11 +567,11 @@ def generate_layerwise_lrs(
def show_inference_sample_results(
self,
label: str,
- num_samples: Optional[int] = 6,
- set_name: Optional[str] = "train",
- min_conf: Optional[Union[None, float]] = None,
- max_conf: Optional[Union[None, float]] = None,
- figsize: Optional[Tuple[int, int]] = (15, 15),
+ num_samples: int | None = 6,
+ set_name: str | None = "train",
+ min_conf: None | float | None = None,
+ max_conf: None | float | None = None,
+ figsize: tuple[int, int] | None = (15, 15),
) -> None:
"""
Shows a sample of the results of the inference.
diff --git a/mapreader/classify/custom_models.py b/mapreader/classify/custom_models.py
index 1996937d..69072594 100644
--- a/mapreader/classify/custom_models.py
+++ b/mapreader/classify/custom_models.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
import torch
@@ -30,7 +29,7 @@ def __init__(
fc_layer : nn.Linear
The fully connected layer at the end of the model.
"""
- super(twoParallelModels, self).__init__()
+ super().__init__()
self.features1 = feature1
self.features2 = feature2
self.fc_layer = fc_layer
diff --git a/mapreader/classify/datasets.py b/mapreader/classify/datasets.py
index d0f2a334..3fee2e4d 100644
--- a/mapreader/classify/datasets.py
+++ b/mapreader/classify/datasets.py
@@ -1,9 +1,8 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
import os
-from typing import Callable, Optional, Tuple, Union
+from typing import Callable
import matplotlib.pyplot as plt
import numpy as np
@@ -28,13 +27,13 @@
class PatchDataset(Dataset):
def __init__(
self,
- patch_df: Union[pd.DataFrame, str],
- transform: Union[str, transforms.Compose, Callable],
+ patch_df: pd.DataFrame | str,
+ transform: str | (transforms.Compose | Callable),
delimiter: str = ",",
- patch_paths_col: Optional[str] = "image_path",
- label_col: Optional[str] = None,
- label_index_col: Optional[str] = None,
- image_mode: Optional[str] = "RGB",
+ patch_paths_col: str | None = "image_path",
+ label_col: str | None = None,
+ label_index_col: str | None = None,
+ image_mode: str | None = "RGB",
):
"""A PyTorch Dataset class for loading image patches from a DataFrame.
@@ -165,8 +164,8 @@ def __len__(self) -> int:
return len(self.patch_df)
def __getitem__(
- self, idx: Union[int, torch.Tensor]
- ) -> Tuple[torch.Tensor, str, int]:
+ self, idx: int | torch.Tensor
+ ) -> tuple[torch.Tensor, str, int]:
"""
Return the image, its label and the index of that label at the given index in the dataset.
@@ -211,7 +210,7 @@ def __getitem__(
return img, image_label, image_label_index
- def return_orig_image(self, idx: Union[int, torch.Tensor]) -> Image:
+ def return_orig_image(self, idx: int | torch.Tensor) -> Image:
"""
Return the original image associated with the given index.
@@ -251,8 +250,8 @@ def return_orig_image(self, idx: Union[int, torch.Tensor]) -> Image:
def _default_transform(
self,
- t_type: Optional[str] = "train",
- resize: Optional[Union[int, Tuple[int, int]]] = (224, 224),
+ t_type: str | None = "train",
+ resize: int | tuple[int, int] | None = (224, 224),
) -> transforms.Compose:
"""
Returns the default image transformations for the train, test and validation sets as a transforms.Compose.
@@ -330,20 +329,20 @@ def _get_label_index(self, label: str) -> int:
class PatchContextDataset(PatchDataset):
def __init__(
self,
- patch_df: Union[pd.DataFrame, str],
+ patch_df: pd.DataFrame | str,
transform1: str,
transform2: str,
delimiter: str = ",",
- patch_paths_col: Optional[str] = "image_path",
- label_col: Optional[str] = None,
- label_index_col: Optional[str] = None,
- image_mode: Optional[str] = "RGB",
- context_save_path: Optional[str] = "./maps/maps_context",
- create_context: Optional[bool] = False,
- parent_path: Optional[str] = "./maps",
- x_offset: Optional[float] = 1.0,
- y_offset: Optional[float] = 1.0,
- slice_method: Optional[str] = "scale",
+ patch_paths_col: str | None = "image_path",
+ label_col: str | None = None,
+ label_index_col: str | None = None,
+ image_mode: str | None = "RGB",
+ context_save_path: str | None = "./maps/maps_context",
+ create_context: bool | None = False,
+ parent_path: str | None = "./maps",
+ x_offset: float | None = 1.0,
+ y_offset: float | None = 1.0,
+ slice_method: str | None = "scale",
):
"""
A PyTorch Dataset class for loading contextual information about image
@@ -504,12 +503,12 @@ def __init__(
def save_parents(
self,
- processors: Optional[int] = 10,
- sleep_time: Optional[float] = 0.001,
- use_parhugin: Optional[bool] = True,
- parent_delimiter: Optional[str] = "#",
- loc_delimiter: Optional[str] = "-",
- overwrite: Optional[bool] = False,
+ processors: int | None = 10,
+ sleep_time: float | None = 0.001,
+ use_parhugin: bool | None = True,
+ parent_delimiter: str | None = "#",
+ loc_delimiter: str | None = "-",
+ overwrite: bool | None = False,
) -> None:
"""
Save parent patches for all patches in the patch_df.
@@ -570,10 +569,10 @@ def save_parents(
def save_parents_idx(
self,
idx: int,
- parent_delimiter: Optional[str] = "#",
- loc_delimiter: Optional[str] = "-",
- overwrite: Optional[bool] = False,
- return_image: Optional[bool] = False,
+ parent_delimiter: str | None = "#",
+ loc_delimiter: str | None = "-",
+ overwrite: bool | None = False,
+ return_image: bool | None = False,
) -> None:
"""
Save the parents of a specific patch to the specified location.
@@ -719,8 +718,8 @@ def plot_sample(self, idx: int) -> None:
plt.show()
def __getitem__(
- self, idx: Union[int, torch.Tensor]
- ) -> Tuple[torch.Tensor, torch.Tensor, str, int]:
+ self, idx: int | torch.Tensor
+ ) -> tuple[torch.Tensor, torch.Tensor, str, int]:
"""
Retrieves the patch image, the context image and the label at the
given index in the dataset (``idx``).
diff --git a/mapreader/classify/load_annotations.py b/mapreader/classify/load_annotations.py
index d01a1e90..51590a24 100644
--- a/mapreader/classify/load_annotations.py
+++ b/mapreader/classify/load_annotations.py
@@ -1,10 +1,9 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
import os
from decimal import Decimal
-from typing import Callable, Optional, Union
+from typing import Callable
import matplotlib.pyplot as plt
import numpy as np
@@ -32,17 +31,17 @@ def __init__(self):
def load(
self,
- annotations: Union[str, pd.DataFrame],
- delimiter: Optional[str] = ",",
- images_dir: Optional[str] = None,
- remove_broken: Optional[bool] = True,
- ignore_broken: Optional[bool] = False,
- id_col: Optional[str] = "image_id",
- patch_paths_col: Optional[str] = "image_path",
- label_col: Optional[str] = "label",
- append: Optional[bool] = True,
- scramble_frame: Optional[bool] = False,
- reset_index: Optional[bool] = False,
+ annotations: str | pd.DataFrame,
+ delimiter: str | None = ",",
+ images_dir: str | None = None,
+ remove_broken: bool | None = True,
+ ignore_broken: bool | None = False,
+ id_col: str | None = "image_id",
+ patch_paths_col: str | None = "image_path",
+ label_col: str | None = "label",
+ append: bool | None = True,
+ scramble_frame: bool | None = False,
+ reset_index: bool | None = False,
):
"""Loads annotations from a csv file or dataframe and can be used to set the ``id_col``, ``patch_paths_col`` and ``label_col`` attributes.
@@ -150,9 +149,9 @@ def load(
def _load_annotations_csv(
self,
annotations: str,
- delimiter: Optional[str] = ",",
- scramble_frame: Optional[bool] = False,
- reset_index: Optional[bool] = False,
+ delimiter: str | None = ",",
+ scramble_frame: bool | None = False,
+ reset_index: bool | None = False,
) -> pd.DataFrame:
"""Loads annotations from a csv file.
@@ -194,8 +193,8 @@ def _load_annotations_csv(
def _check_patch_paths(
self,
- remove_broken: Optional[bool] = True,
- ignore_broken: Optional[bool] = False,
+ remove_broken: bool | None = True,
+ ignore_broken: bool | None = False,
) -> None:
"""
Checks the file paths of annotations and manages broken paths.
@@ -301,12 +300,12 @@ def print_unique_labels(self) -> None:
def review_labels(
self,
- label_to_review: Optional[str] = None,
- chunks: Optional[int] = 8 * 3,
- num_cols: Optional[int] = 8,
- exclude_df: Optional[pd.DataFrame] = None,
- include_df: Optional[pd.DataFrame] = None,
- deduplicate_col: Optional[str] = "image_id",
+ label_to_review: str | None = None,
+ chunks: int | None = 8 * 3,
+ num_cols: int | None = 8,
+ exclude_df: pd.DataFrame | None = None,
+ include_df: pd.DataFrame | None = None,
+ deduplicate_col: str | None = "image_id",
) -> None:
"""
Perform image review on annotations and update labels for a given
@@ -456,7 +455,7 @@ def review_labels(
print("[INFO] Exited.")
- def show_sample(self, label_to_show: str, num_samples: Optional[int] = 9) -> None:
+ def show_sample(self, label_to_show: str, num_samples: int | None = 9) -> None:
"""Show a random sample of images with the specified label (tar_label).
Parameters
@@ -498,13 +497,13 @@ def show_sample(self, label_to_show: str, num_samples: Optional[int] = 9) -> Non
def create_datasets(
self,
- frac_train: Optional[float] = 0.70,
- frac_val: Optional[float] = 0.15,
- frac_test: Optional[float] = 0.15,
- random_state: Optional[int] = 1364,
- train_transform: Optional[Union[str, Compose, Callable]] = "train",
- val_transform: Optional[Union[str, Compose, Callable]] = "val",
- test_transform: Optional[Union[str, Compose, Callable]] = "test",
+ frac_train: float | None = 0.70,
+ frac_val: float | None = 0.15,
+ frac_test: float | None = 0.15,
+ random_state: int | None = 1364,
+ train_transform: str | (Compose | Callable) | None = "train",
+ val_transform: str | (Compose | Callable) | None = "val",
+ test_transform: str | (Compose | Callable) | None = "test",
) -> None:
"""
Splits the dataset into three subsets: training, validation, and test sets (DataFrames) and saves them as a dictionary in ``self.datasets``.
@@ -642,10 +641,10 @@ def create_datasets(
def create_dataloaders(
self,
- batch_size: Optional[int] = 16,
- sampler: Optional[Union[Sampler, str, None]] = "default",
- shuffle: Optional[bool] = False,
- num_workers: Optional[int] = 0,
+ batch_size: int | None = 16,
+ sampler: Sampler | (str | None) | None = "default",
+ shuffle: bool | None = False,
+ num_workers: int | None = 0,
**kwargs,
) -> None:
"""Creates a dictionary containing PyTorch dataloaders
diff --git a/mapreader/download/data_structures.py b/mapreader/download/data_structures.py
index 56cb3ad1..9115057b 100644
--- a/mapreader/download/data_structures.py
+++ b/mapreader/download/data_structures.py
@@ -20,7 +20,7 @@ def __init__(self, lat: float, lon: float):
self.lon = lon
def __str__(self):
- return "({lat}, {lon})".format(lat=self.lat, lon=self.lon)
+ return f"({self.lat}, {self.lon})"
def __repr__(self):
return str(self)
@@ -45,7 +45,7 @@ def __init__(self, x: int, y: int, z: int):
self.z = z
def __str__(self):
- return "({z}, {x}, {y})".format(z=self.z, x=self.x, y=self.y)
+ return f"({self.z}, {self.x}, {self.y})"
def __repr__(self):
return str(self)
@@ -85,7 +85,7 @@ def y_range(self):
return range(self.lower_corner.y, self.upper_corner.y + 1)
def __str__(self):
- return "[{p1}x{p2}]".format(p1=self.lower_corner, p2=self.upper_corner)
+ return f"[{self.lower_corner}x{self.upper_corner}]"
def __repr__(self):
return str(self)
diff --git a/mapreader/download/downloader.py b/mapreader/download/downloader.py
index 1e1fd278..e1ede5d0 100644
--- a/mapreader/download/downloader.py
+++ b/mapreader/download/downloader.py
@@ -2,7 +2,6 @@
import os
import shutil
-from typing import Optional, Union
from shapely.geometry import Polygon
@@ -19,7 +18,7 @@ class Downloader:
def __init__(
self,
- download_url: Union[str, list],
+ download_url: str | list,
):
"""Initialise Downloader object.
@@ -111,9 +110,9 @@ def _download_map(self, grid_bb: GridBoundingBox) -> bool:
def download_map_by_polygon(
self,
polygon: Polygon,
- zoom_level: Optional[int] = 14,
- path_save: Optional[str] = "maps",
- overwrite: Optional[bool] = False,
+ zoom_level: int | None = 14,
+ path_save: str | None = "maps",
+ overwrite: bool | None = False,
) -> None:
"""
Downloads a map contained within a polygon.
diff --git a/mapreader/download/downloader_utils.py b/mapreader/download/downloader_utils.py
index ad90d108..f86bdbb2 100644
--- a/mapreader/download/downloader_utils.py
+++ b/mapreader/download/downloader_utils.py
@@ -1,7 +1,6 @@
from __future__ import annotations
import math
-from typing import Tuple
from shapely.geometry import LineString, Polygon, box
@@ -99,19 +98,15 @@ def get_polygon_from_grid_bb(grid_bb: GridBoundingBox):
-------
shapely.Polygon
"""
- lower_corner = grid_bb.lower_corner # SW
- upper_corner = grid_bb.upper_corner # SW
+ lower_corner = grid_bb.lower_corner # SW
+ upper_corner = grid_bb.upper_corner # SW
# for NE corner of upper right tile, do x+1 and y+1
- upper_corner_NE = GridIndex(
- upper_corner.x + 1,
- upper_corner.y + 1,
- upper_corner.z
- )
+ upper_corner_NE = GridIndex(upper_corner.x + 1, upper_corner.y + 1, upper_corner.z)
- SW_coord = get_coordinate_from_index(lower_corner)
+ SW_coord = get_coordinate_from_index(lower_corner)
NE_coord = get_coordinate_from_index(upper_corner_NE)
-
+
polygon = create_polygon_from_latlons(
SW_coord.lat, SW_coord.lon, NE_coord.lat, NE_coord.lon
)
@@ -160,7 +155,7 @@ def get_coordinate_from_index(grid_index: GridIndex) -> Coordinate:
return Coordinate(lat, lon)
-def _get_index_from_coordinate(lon: float, lat: float, z: int) -> Tuple[(int, int)]:
+def _get_index_from_coordinate(lon: float, lat: float, z: int) -> tuple[(int, int)]:
"""Generate (x,y) tuple from Coordinate latitudes and longitudes.
Returns
@@ -176,7 +171,7 @@ def _get_index_from_coordinate(lon: float, lat: float, z: int) -> Tuple[(int, in
return x, y
-def _get_coordinate_from_index(x: int, y: int, z: int) -> Tuple[(float, float)]:
+def _get_coordinate_from_index(x: int, y: int, z: int) -> tuple[(float, float)]:
"""Generate (lon, lat) tuple from GridIndex x, y and zoom level (z).
Returns
diff --git a/mapreader/download/sheet_downloader.py b/mapreader/download/sheet_downloader.py
index 845b96fe..a47c44a7 100644
--- a/mapreader/download/sheet_downloader.py
+++ b/mapreader/download/sheet_downloader.py
@@ -5,7 +5,6 @@
import re
import shutil
from functools import reduce
-from typing import Optional, Union
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
@@ -31,7 +30,7 @@ class SheetDownloader:
def __init__(
self,
metadata_path: str,
- download_url: Union[str, list],
+ download_url: str | list,
) -> None:
"""
Initialize SheetDownloader class
@@ -57,7 +56,7 @@ def __init__(
), "[ERROR] Please pass metadata_path as string."
if os.path.isfile(metadata_path):
- with open(metadata_path, "r") as f:
+ with open(metadata_path) as f:
self.metadata = json.load(f)
self.features = self.metadata["features"]
print(self.__str__())
@@ -99,7 +98,7 @@ def get_polygons(self) -> None:
self.polygons = True
- def get_grid_bb(self, zoom_level: Optional[int] = 14) -> None:
+ def get_grid_bb(self, zoom_level: int | None = 14) -> None:
"""
For each map in metadata, creates a grid bounding box from map polygons and saves to ``features`` dictionary.
@@ -189,9 +188,9 @@ def get_minmax_latlon(self) -> None:
## queries
def query_map_sheets_by_wfs_ids(
self,
- wfs_ids: Union[list, int],
- append: Optional[bool] = False,
- print: Optional[bool] = False,
+ wfs_ids: list | int,
+ append: bool | None = False,
+ print: bool | None = False,
) -> None:
"""
Find map sheets by WFS ID numbers.
@@ -233,9 +232,9 @@ def query_map_sheets_by_wfs_ids(
def query_map_sheets_by_polygon(
self,
polygon: Polygon,
- mode: Optional[str] = "within",
- append: Optional[bool] = False,
- print: Optional[bool] = False,
+ mode: str | None = "within",
+ append: bool | None = False,
+ print: bool | None = False,
) -> None:
"""
Find map sheets which are found within or intersecting with a defined polygon.
@@ -295,8 +294,8 @@ def query_map_sheets_by_polygon(
def query_map_sheets_by_coordinates(
self,
coords: tuple,
- append: Optional[bool] = False,
- print: Optional[bool] = False,
+ append: bool | None = False,
+ print: bool | None = False,
) -> None:
"""
Find maps sheets which contain a defined set of coordinates.
@@ -337,8 +336,8 @@ def query_map_sheets_by_coordinates(
def query_map_sheets_by_line(
self,
line: LineString,
- append: Optional[bool] = False,
- print: Optional[bool] = False,
+ append: bool | None = False,
+ print: bool | None = False,
) -> None:
"""
Find maps sheets which intersect with a line.
@@ -384,9 +383,9 @@ def query_map_sheets_by_line(
def query_map_sheets_by_string(
self,
string: str,
- keys: Union[str, list] = None,
- append: Optional[bool] = False,
- print: Optional[bool] = False,
+ keys: str | list = None,
+ append: bool | None = False,
+ print: bool | None = False,
) -> None:
"""
Find map sheets by searching for a string in a chosen metadata field.
@@ -585,9 +584,9 @@ def _save_metadata(
def _download_map_sheets(
self,
features: list,
- path_save: Optional[str] = "maps",
- metadata_fname: Optional[str] = "metadata.csv",
- overwrite: Optional[bool] = False,
+ path_save: str | None = "maps",
+ metadata_fname: str | None = "metadata.csv",
+ overwrite: bool | None = False,
):
"""Download map sheets from a list of features.
@@ -614,9 +613,9 @@ def _download_map_sheets(
def download_all_map_sheets(
self,
- path_save: Optional[str] = "maps",
- metadata_fname: Optional[str] = "metadata.csv",
- overwrite: Optional[bool] = False,
+ path_save: str | None = "maps",
+ metadata_fname: str | None = "metadata.csv",
+ overwrite: bool | None = False,
) -> None:
"""
Downloads all map sheets in metadata.
@@ -641,10 +640,10 @@ def download_all_map_sheets(
def download_map_sheets_by_wfs_ids(
self,
- wfs_ids: Union[list, int],
- path_save: Optional[str] = "maps",
- metadata_fname: Optional[str] = "metadata.csv",
- overwrite: Optional[bool] = False,
+ wfs_ids: list | int,
+ path_save: str | None = "maps",
+ metadata_fname: str | None = "metadata.csv",
+ overwrite: bool | None = False,
) -> None:
"""
Downloads map sheets by WFS ID numbers.
@@ -692,10 +691,10 @@ def download_map_sheets_by_wfs_ids(
def download_map_sheets_by_polygon(
self,
polygon: Polygon,
- path_save: Optional[str] = "maps",
- metadata_fname: Optional[str] = "metadata.csv",
- mode: Optional[str] = "within",
- overwrite: Optional[bool] = False,
+ path_save: str | None = "maps",
+ metadata_fname: str | None = "metadata.csv",
+ mode: str | None = "within",
+ overwrite: bool | None = False,
) -> None:
"""
Downloads any map sheets which are found within or intersecting with a defined polygon.
@@ -762,9 +761,9 @@ def download_map_sheets_by_polygon(
def download_map_sheets_by_coordinates(
self,
coords: tuple,
- path_save: Optional[str] = "maps",
- metadata_fname: Optional[str] = "metadata.csv",
- overwrite: Optional[bool] = False,
+ path_save: str | None = "maps",
+ metadata_fname: str | None = "metadata.csv",
+ overwrite: bool | None = False,
) -> None:
"""
Downloads any maps sheets which contain a defined set of coordinates.
@@ -810,9 +809,9 @@ def download_map_sheets_by_coordinates(
def download_map_sheets_by_line(
self,
line: LineString,
- path_save: Optional[str] = "maps",
- metadata_fname: Optional[str] = "metadata.csv",
- overwrite: Optional[bool] = False,
+ path_save: str | None = "maps",
+ metadata_fname: str | None = "metadata.csv",
+ overwrite: bool | None = False,
) -> None:
"""
Downloads any maps sheets which intersect with a line.
@@ -863,10 +862,10 @@ def download_map_sheets_by_line(
def download_map_sheets_by_string(
self,
string: str,
- keys: Union[str, list] = None,
- path_save: Optional[str] = "maps",
- metadata_fname: Optional[str] = "metadata.csv",
- overwrite: Optional[bool] = False,
+ keys: str | list = None,
+ path_save: str | None = "maps",
+ metadata_fname: str | None = "metadata.csv",
+ overwrite: bool | None = False,
) -> None:
"""
Download map sheets by searching for a string in a chosen metadata field.
@@ -924,9 +923,9 @@ def download_map_sheets_by_string(
def download_map_sheets_by_queries(
self,
- path_save: Optional[str] = "maps",
- metadata_fname: Optional[str] = "metadata.csv",
- overwrite: Optional[bool] = False,
+ path_save: str | None = "maps",
+ metadata_fname: str | None = "metadata.csv",
+ overwrite: bool | None = False,
) -> None:
"""
Downloads map sheets saved as query results.
@@ -989,8 +988,8 @@ def hist_published_dates(self, **kwargs) -> None:
def plot_features_on_map(
self,
features: list,
- map_extent: Optional[Union[str, list, tuple]] = None,
- add_id: Optional[bool] = True,
+ map_extent: str | (list | tuple) | None = None,
+ add_id: bool | None = True,
) -> None:
"""
Plots boundaries of map sheets on a map using ``cartopy`` library, (if available).
@@ -1085,8 +1084,8 @@ def plot_features_on_map(
def plot_all_metadata_on_map(
self,
- map_extent: Optional[Union[str, list, tuple]] = None,
- add_id: Optional[bool] = True,
+ map_extent: str | (list | tuple) | None = None,
+ add_id: bool | None = True,
) -> None:
"""
Plots boundaries of all map sheets in metadata on a map using ``cartopy`` library (if available).
@@ -1109,8 +1108,8 @@ def plot_all_metadata_on_map(
def plot_queries_on_map(
self,
- map_extent: Optional[Union[str, list, tuple]] = None,
- add_id: Optional[bool] = True,
+ map_extent: str | (list | tuple) | None = None,
+ add_id: bool | None = True,
) -> None:
"""
Plots boundaries of query results on a map using ``cartopy`` library (if available).
diff --git a/mapreader/download/tile_loading.py b/mapreader/download/tile_loading.py
index 6bbd1f40..96d83924 100644
--- a/mapreader/download/tile_loading.py
+++ b/mapreader/download/tile_loading.py
@@ -4,7 +4,6 @@
import logging
import os
import urllib.request
-from typing import Union
from joblib import Parallel, delayed
from tqdm import tqdm
@@ -21,7 +20,7 @@ class TileDownloader:
def __init__(
self,
tile_servers: list = None,
- img_format: Union[str, None] = None,
+ img_format: str | None = None,
show_progress: bool = False,
):
"""
@@ -99,15 +98,13 @@ def download_tiles(
os.makedirs(self.temp_folder, exist_ok=True)
if not download_in_parallel:
logger.info(
- "Downloading {} tiles sequentially to disk ..".format(
- grid_bb.covered_cells
- )
+ f"Downloading {grid_bb.covered_cells} tiles sequentially to disk .."
)
return self._download_tiles_sequentially(grid_bb)
# download in parallel
logger.info(
- "Downloading {} tiles to disk (in parallel)..".format(grid_bb.covered_cells)
+ f"Downloading {grid_bb.covered_cells} tiles to disk (in parallel).."
)
delayed_downloads = [
delayed(self._download_tile_in_parallel)(
@@ -175,7 +172,7 @@ def _update_progressbar(self, share: float):
print(
"\r",
- "{:3.0f}%".format(share * 100)
+ f"{share * 100:3.0f}%"
+ "|"
+ "■" * visible
+ "□" * invisible
diff --git a/mapreader/download/tile_merging.py b/mapreader/download/tile_merging.py
index 874f437f..d0b3f768 100644
--- a/mapreader/download/tile_merging.py
+++ b/mapreader/download/tile_merging.py
@@ -3,7 +3,6 @@
import logging
import os
-from typing import Union
from PIL import Image
from tqdm import tqdm
@@ -20,9 +19,9 @@
class TileMerger:
def __init__(
self,
- output_folder: Union[str, None] = None,
- img_input_format: Union[str, None] = None,
- img_output_format: Union[str, None] = None,
+ output_folder: str | None = None,
+ img_input_format: str | None = None,
+ img_output_format: str | None = None,
show_progress=False,
):
"""TileMerger object.
@@ -132,19 +131,19 @@ def _load_tile_size(self, grid_bb: GridBoundingBox):
start_image = self._load_image_to_grid_cell(grid_bb.upper_corner)
except FileNotFoundError as err:
logger.warning("Image has missing tiles in upper right corner.")
- raise FileNotFoundError("[ERROR] Image is missing tiles for both lower left and upper right corners.")
-
+ raise FileNotFoundError(
+ "[ERROR] Image is missing tiles for both lower left and upper right corners."
+ )
+
img_size = start_image.size
assert (
img_size[0] == img_size[1]
- ), "Tiles must be quadratic. This tile, however, is rectangular: {}".format(
- img_size
- )
+ ), f"Tiles must be quadratic. This tile, however, is rectangular: {img_size}"
tile_size = img_size[0]
return tile_size
def merge(
- self, grid_bb: GridBoundingBox, file_name: Union[str, None] = None
+ self, grid_bb: GridBoundingBox, file_name: str | None = None
) -> bool:
"""Merges cells contained within GridBoundingBox.
@@ -165,7 +164,7 @@ def merge(
try:
tile_size = self._load_tile_size(grid_bb)
except FileNotFoundError:
- return False # unsuccessful
+ return False # unsuccessful
merged_image = Image.new(
"RGBA", (len(grid_bb.x_range) * tile_size, len(grid_bb.y_range) * tile_size)
@@ -193,16 +192,14 @@ def merge(
if file_name is None:
file_name = self._get_output_name(grid_bb)
- out_path = "{}{}.{}".format(
- self.output_folder, file_name, self.img_output_format[0]
- )
+ out_path = f"{self.output_folder}{file_name}.{self.img_output_format[0]}"
merged_image.save(out_path, self.img_output_format[1])
success = True if os.path.exists(out_path) else False
if success:
logger.info(
- "Merge successful! The image has been stored at '{}'".format(out_path)
+ f"Merge successful! The image has been stored at '{out_path}'"
)
else:
- logger.warning("Merge unsuccessful! '{}' not saved.".format(out_path))
+ logger.warning(f"Merge unsuccessful! '{out_path}' not saved.")
return success
diff --git a/mapreader/load/geo_utils.py b/mapreader/load/geo_utils.py
index 37e54a39..04ecc987 100644
--- a/mapreader/load/geo_utils.py
+++ b/mapreader/load/geo_utils.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
import numpy as np
@@ -35,7 +34,7 @@ def extractGeoInfo(image_path):
tiff_coord = tuple(tiff_src.bounds)
print(f"[INFO] Shape: {tiff_shape}. \n[INFO] CRS: {tiff_proj}.")
- print("[INFO] Coordinates: %.4f %.4f %.4f %.4f" % tiff_coord)
+ print("[INFO] Coordinates: {:.4f} {:.4f} {:.4f} {:.4f}".format(*tiff_coord))
return tiff_shape, tiff_proj, tiff_coord
@@ -66,7 +65,7 @@ def reproject_geo_info(image_path, target_crs="EPSG:4326", calc_size_in_m=False)
coord = (xmin, ymin, xmax, ymax)
print(f"[INFO] New CRS: {target_crs}")
- print("[INFO] Reprojected coordinates: %.4f %.4f %.4f %.4f" % coord)
+ print("[INFO] Reprojected coordinates: {:.4f} {:.4f} {:.4f} {:.4f}".format(*coord))
height, width, _ = tiff_shape
diff --git a/mapreader/load/images.py b/mapreader/load/images.py
index 65d228f9..9e15b17d 100644
--- a/mapreader/load/images.py
+++ b/mapreader/load/images.py
@@ -9,7 +9,7 @@
import random
import warnings
from glob import glob
-from typing import Dict, List, Literal, Optional, Tuple, Union
+from typing import Literal
import matplotlib.image as mpimg
import matplotlib.patches as patches
@@ -66,11 +66,11 @@ class MapImages:
def __init__(
self,
- path_images: Optional[str] = None,
- file_ext: Optional[Union[str, bool]] = False,
- tree_level: Optional[str] = "parent",
- parent_path: Optional[str] = None,
- **kwargs: Dict,
+ path_images: str | None = None,
+ file_ext: str | bool | None = False,
+ tree_level: str | None = "parent",
+ parent_path: str | None = None,
+ **kwargs: dict,
):
"""Initializes the MapImages class."""
@@ -150,9 +150,9 @@ def __str__(self) -> Literal[""]:
def _images_constructor(
self,
image_path: str,
- parent_path: Optional[str] = None,
- tree_level: Optional[str] = "parent",
- **kwargs: Dict,
+ parent_path: str | None = None,
+ tree_level: str | None = "parent",
+ **kwargs: dict,
) -> None:
"""
Constructs image data from the given image path and parent path and adds it to the ``MapImages`` instance's ``images`` attribute.
@@ -249,7 +249,7 @@ def _check_image_mode(image_path):
)
@staticmethod
- def _convert_image_path(inp_path: str) -> Tuple[str, str, str]:
+ def _convert_image_path(inp_path: str) -> tuple[str, str, str]:
"""
Convert an image path into an absolute path and find basename and directory name.
@@ -270,12 +270,12 @@ def _convert_image_path(inp_path: str) -> Tuple[str, str, str]:
def add_metadata(
self,
- metadata: Union[str, pd.DataFrame],
- index_col: Optional[Union[int, str]] = 0,
- delimiter: Optional[str] = ",",
- columns: Optional[List[str]] = None,
- tree_level: Optional[str] = "parent",
- ignore_mismatch: Optional[bool] = False,
+ metadata: str | pd.DataFrame,
+ index_col: int | str | None = 0,
+ delimiter: str | None = ",",
+ columns: list[str] | None = None,
+ tree_level: str | None = "parent",
+ ignore_mismatch: bool | None = False,
) -> None:
"""
Add metadata information to the images dictionary.
@@ -426,9 +426,9 @@ def add_metadata(
def show_sample(
self,
num_samples: int,
- tree_level: Optional[str] = "patch",
- random_seed: Optional[int] = 65,
- **kwargs: Dict,
+ tree_level: str | None = "patch",
+ random_seed: int | None = 65,
+ **kwargs: dict,
) -> None:
"""
Display a sample of images from a particular level in the image
@@ -475,15 +475,15 @@ def show_sample(
plt.tight_layout()
plt.show()
- def list_parents(self) -> List[str]:
+ def list_parents(self) -> list[str]:
"""Return list of all parents"""
return list(self.parents.keys())
- def list_patches(self) -> List[str]:
+ def list_patches(self) -> list[str]:
"""Return list of all patches"""
return list(self.patches.keys())
- def add_shape(self, tree_level: Optional[str] = "parent") -> None:
+ def add_shape(self, tree_level: str | None = "parent") -> None:
"""
Add a shape to each image in the specified level of the image
hierarchy.
@@ -509,7 +509,7 @@ def add_shape(self, tree_level: Optional[str] = "parent") -> None:
for image_id in image_ids:
self._add_shape_id(image_id=image_id)
- def add_coord_increments(self, verbose: Optional[bool] = False) -> None:
+ def add_coord_increments(self, verbose: bool | None = False) -> None:
"""
Adds coordinate increments to each image at the parent level.
@@ -572,7 +572,7 @@ def add_patch_polygons(self, verbose: bool = False) -> None:
self._add_patch_polygons_id(patch_id, verbose)
def add_center_coord(
- self, tree_level: Optional[str] = "patch", verbose: Optional[bool] = False
+ self, tree_level: str | None = "patch", verbose: bool | None = False
) -> None:
"""
Adds center coordinates to each image at the specified tree level.
@@ -627,7 +627,7 @@ def add_center_coord(
def _add_shape_id(
self,
- image_id: Union[int, str],
+ image_id: int | str,
) -> None:
"""
Add shape (image_height, image_width, image_channels) of the image
@@ -662,7 +662,7 @@ def _add_shape_id(
)
def _add_coord_increments_id(
- self, image_id: Union[int, str], verbose: Optional[bool] = False
+ self, image_id: int | str, verbose: bool | None = False
) -> None:
"""
Add pixel-wise delta longitude (``dlon``) and delta latitude
@@ -801,8 +801,8 @@ def _add_patch_polygons_id(self, image_id: str, verbose: bool = False) -> None:
def _add_center_coord_id(
self,
- image_id: Union[int, str],
- verbose: Optional[bool] = False,
+ image_id: int | str,
+ verbose: bool | None = False,
) -> None:
"""
Calculates and adds center coordinates (longitude as ``center_lon``
@@ -845,10 +845,10 @@ def _add_center_coord_id(
def _calc_pixel_height_width(
self,
- parent_id: Union[int, str],
- method: Optional[str] = "great-circle",
- verbose: Optional[bool] = False,
- ) -> Tuple[Tuple, float, float]:
+ parent_id: int | str,
+ method: str | None = "great-circle",
+ verbose: bool | None = False,
+ ) -> tuple[tuple, float, float]:
"""
Calculate the height and width of each pixel in a given image in meters.
@@ -930,16 +930,16 @@ def _calc_pixel_height_width(
def patchify_all(
self,
- method: Optional[str] = "pixel",
- patch_size: Optional[int] = 100,
- tree_level: Optional[str] = "parent",
- path_save: Optional[str] = None,
- add_to_parents: Optional[bool] = True,
- square_cuts: Optional[bool] = False,
- resize_factor: Optional[bool] = False,
- output_format: Optional[str] = "png",
- rewrite: Optional[bool] = False,
- verbose: Optional[bool] = False,
+ method: str | None = "pixel",
+ patch_size: int | None = 100,
+ tree_level: str | None = "parent",
+ path_save: str | None = None,
+ add_to_parents: bool | None = True,
+ square_cuts: bool | None = False,
+ resize_factor: bool | None = False,
+ output_format: str | None = "png",
+ rewrite: bool | None = False,
+ verbose: bool | None = False,
) -> None:
"""
Patchify all images in the specified ``tree_level`` and (if ``add_to_parents=True``) add the patches to the MapImages instance's ``images`` dictionary.
@@ -1029,12 +1029,12 @@ def _patchify_by_pixel(
image_id: str,
patch_size: int,
path_save: str,
- add_to_parents: Optional[bool] = True,
- square_cuts: Optional[bool] = False,
- resize_factor: Optional[bool] = False,
- output_format: Optional[str] = "png",
- rewrite: Optional[bool] = False,
- verbose: Optional[bool] = False,
+ add_to_parents: bool | None = True,
+ square_cuts: bool | None = False,
+ resize_factor: bool | None = False,
+ output_format: str | None = "png",
+ rewrite: bool | None = False,
+ verbose: bool | None = False,
):
"""Patchify one image and (if ``add_to_parents=True``) add the patch to the MapImages instance's ``images`` dictionary.
@@ -1154,7 +1154,7 @@ def _add_patch_to_parent(self, patch_id: str) -> None:
if patch_id not in self.parents[patch_parent]["patches"]:
self.parents[patch_parent]["patches"].append(patch_id)
- def _make_dir(self, path_make: str, exists_ok: Optional[bool] = True) -> None:
+ def _make_dir(self, path_make: str, exists_ok: bool | None = True) -> None:
"""
Helper method to make directories.
@@ -1165,10 +1165,10 @@ def _make_dir(self, path_make: str, exists_ok: Optional[bool] = True) -> None:
def calc_pixel_stats(
self,
- parent_id: Optional[str] = None,
- calc_mean: Optional[bool] = True,
- calc_std: Optional[bool] = True,
- verbose: Optional[bool] = False,
+ parent_id: str | None = None,
+ calc_mean: bool | None = True,
+ calc_std: bool | None = True,
+ verbose: bool | None = False,
) -> None:
"""
Calculate the mean and standard deviation of pixel values for all
@@ -1254,10 +1254,10 @@ def calc_pixel_stats(
def convert_images(
self,
- save: Optional[bool] = False,
- save_format: Optional[str] = "csv",
- delimiter: Optional[str] = ",",
- ) -> Tuple[pd.DataFrame, pd.DataFrame]:
+ save: bool | None = False,
+ save_format: str | None = "csv",
+ delimiter: str | None = ",",
+ ) -> tuple[pd.DataFrame, pd.DataFrame]:
"""
Convert the ``MapImages`` instance's ``images`` dictionary into pandas
DataFrames for easy manipulation.
@@ -1308,8 +1308,8 @@ def convert_images(
def show_parent(
self,
parent_id: str,
- column_to_plot: Optional[str] = None,
- **kwargs: Dict,
+ column_to_plot: str | None = None,
+ **kwargs: dict,
) -> None:
"""
A wrapper method for `.show()` which plots all patches of a
@@ -1342,21 +1342,21 @@ def show_parent(
def show(
self,
- image_ids: Union[str, List[str]],
- column_to_plot: Optional[str] = None,
- figsize: Optional[tuple] = (10, 10),
- plot_parent: Optional[bool] = True,
- patch_border: Optional[bool] = True,
- border_color: Optional[str] = "r",
- vmin: Optional[float] = None,
- vmax: Optional[float] = None,
- alpha: Optional[float] = 1.0,
- cmap: Optional[str] = "viridis",
- discrete_cmap: Optional[int] = 256,
- plot_histogram: Optional[bool] = False,
- save_kml_dir: Optional[Union[bool, str]] = False,
- image_width_resolution: Optional[int] = None,
- kml_dpi_image: Optional[int] = None,
+ image_ids: str | list[str],
+ column_to_plot: str | None = None,
+ figsize: tuple | None = (10, 10),
+ plot_parent: bool | None = True,
+ patch_border: bool | None = True,
+ border_color: str | None = "r",
+ vmin: float | None = None,
+ vmax: float | None = None,
+ alpha: float | None = 1.0,
+ cmap: str | None = "viridis",
+ discrete_cmap: int | None = 256,
+ plot_histogram: bool | None = False,
+ save_kml_dir: bool | str | None = False,
+ image_width_resolution: int | None = None,
+ kml_dpi_image: int | None = None,
) -> None:
"""
Plot images from a list of `image_ids`.
@@ -1588,8 +1588,8 @@ def _create_kml(
self,
kml_out_path: str,
column_to_plot: str,
- coords: Union[List, Tuple],
- counter: Optional[int] = -1,
+ coords: list | tuple,
+ counter: int | None = -1,
) -> None:
"""Create a KML file.
@@ -1653,11 +1653,11 @@ def _hist_values_array(
def load_patches(
self,
patch_paths: str,
- patch_file_ext: Optional[Union[str, bool]] = False,
- parent_paths: Optional[Union[str, bool]] = False,
- parent_file_ext: Optional[Union[str, bool]] = False,
- add_geo_info: Optional[bool] = False,
- clear_images: Optional[bool] = False,
+ patch_file_ext: str | bool | None = False,
+ parent_paths: str | bool | None = False,
+ parent_file_ext: str | bool | None = False,
+ add_geo_info: bool | None = False,
+ clear_images: bool | None = False,
) -> None:
"""
Loads patch images from the given paths and adds them to the ``images``
@@ -1737,7 +1737,7 @@ def load_patches(
@staticmethod
def detect_parent_id_from_path(
- image_id: Union[int, str], parent_delimiter: Optional[str] = "#"
+ image_id: int | str, parent_delimiter: str | None = "#"
) -> str:
"""
Detect parent IDs from ``image_id``.
@@ -1759,9 +1759,9 @@ def detect_parent_id_from_path(
@staticmethod
def detect_pixel_bounds_from_path(
- image_id: Union[int, str],
+ image_id: int | str,
# border_delimiter="-" # <-- not in use in this method
- ) -> Tuple[int, int, int, int]:
+ ) -> tuple[int, int, int, int]:
"""
Detects borders from the path assuming patch is named using the
following format: ``...-min_x-min_y-max_x-max_y-...``
@@ -1792,11 +1792,11 @@ def detect_pixel_bounds_from_path(
def load_parents(
self,
- parent_paths: Optional[Union[str, bool]] = False,
- parent_ids: Optional[Union[List[str], str, bool]] = False,
- parent_file_ext: Optional[Union[str, bool]] = False,
- overwrite: Optional[bool] = False,
- add_geo_info: Optional[bool] = False,
+ parent_paths: str | bool | None = False,
+ parent_ids: list[str] | (str | bool) | None = False,
+ parent_file_ext: str | bool | None = False,
+ overwrite: bool | None = False,
+ add_geo_info: bool | None = False,
) -> None:
"""
Load parent images from file paths (``parent_paths``).
@@ -1867,9 +1867,9 @@ def load_parents(
def load_df(
self,
- parent_df: Optional[pd.DataFrame] = None,
- patch_df: Optional[pd.DataFrame] = None,
- clear_images: Optional[bool] = True,
+ parent_df: pd.DataFrame | None = None,
+ patch_df: pd.DataFrame | None = None,
+ clear_images: bool | None = True,
) -> None:
"""
Create ``MapImages`` instance by loading data from pandas DataFrame(s).
@@ -1904,12 +1904,12 @@ def load_df(
def load_csv(
self,
- parent_path: Optional[str] = None,
- patch_path: Optional[str] = None,
- clear_images: Optional[bool] = False,
- index_col_patch: Optional[int] = 0,
- index_col_parent: Optional[int] = 0,
- delimiter: Optional[str] = ",",
+ parent_path: str | None = None,
+ patch_path: str | None = None,
+ clear_images: bool | None = False,
+ index_col_patch: int | None = 0,
+ index_col_parent: int | None = 0,
+ delimiter: str | None = ",",
) -> None:
"""
Load CSV files containing information about parent and patches,
@@ -1960,8 +1960,8 @@ def load_csv(
def add_geo_info(
self,
- target_crs: Optional[str] = "EPSG:4326",
- verbose: Optional[bool] = True,
+ target_crs: str | None = "EPSG:4326",
+ verbose: bool | None = True,
) -> None:
"""
Add coordinates (reprojected to EPSG:4326) to all parents images using image metadata.
@@ -1989,8 +1989,8 @@ def add_geo_info(
def _add_geo_info_id(
self,
image_id: str,
- target_crs: Optional[str] = "EPSG:4326",
- verbose: Optional[bool] = True,
+ target_crs: str | None = "EPSG:4326",
+ verbose: bool | None = True,
) -> None:
"""
Add coordinates (reprojected to EPSG:4326) to an image.
@@ -2078,9 +2078,9 @@ def _get_tree_level(self, image_id: str) -> str:
def save_patches_as_geotiffs(
self,
- rewrite: Optional[bool] = False,
- verbose: Optional[bool] = False,
- crs: Optional[str] = None,
+ rewrite: bool | None = False,
+ verbose: bool | None = False,
+ crs: str | None = None,
) -> None:
"""Save all patches in MapImages instance as geotiffs.
@@ -2104,9 +2104,9 @@ def save_patches_as_geotiffs(
def _save_patch_as_geotiff(
self,
patch_id: str,
- rewrite: Optional[bool] = False,
- verbose: Optional[bool] = False,
- crs: Optional[str] = None,
+ rewrite: bool | None = False,
+ verbose: bool | None = False,
+ crs: str | None = None,
) -> None:
"""Save a patch as a geotiff.
@@ -2183,9 +2183,9 @@ def _save_patch_as_geotiff(
def save_patches_to_geojson(
self,
- geojson_fname: Optional[str] = "patches.geojson",
- rewrite: Optional[bool] = False,
- crs: Optional[str] = None,
+ geojson_fname: str | None = "patches.geojson",
+ rewrite: bool | None = False,
+ crs: str | None = None,
) -> None:
"""Saves patches to a geojson file.
diff --git a/mapreader/load/loader.py b/mapreader/load/loader.py
index aed3f5cf..77838fde 100644
--- a/mapreader/load/loader.py
+++ b/mapreader/load/loader.py
@@ -1,16 +1,13 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
-from typing import Optional, Union
-
from mapreader.load.images import MapImages
def loader(
- path_images: Optional[str] = None,
- tree_level: Optional[str] = "parent",
- parent_path: Optional[str] = None,
+ path_images: str | None = None,
+ tree_level: str | None = "parent",
+ parent_path: str | None = None,
**kwargs: dict,
) -> MapImages:
"""
@@ -53,11 +50,11 @@ def loader(
def load_patches(
patch_paths: str,
- patch_file_ext: Optional[Union[str, bool]] = False,
- parent_paths: Optional[Union[str, bool]] = False,
- parent_file_ext: Optional[Union[str, bool]] = False,
- add_geo_info: Optional[bool] = False,
- clear_images: Optional[bool] = False,
+ patch_file_ext: str | bool | None = False,
+ parent_paths: str | bool | None = False,
+ parent_file_ext: str | bool | None = False,
+ add_geo_info: bool | None = False,
+ clear_images: bool | None = False,
) -> MapImages:
"""
Creates a ``MapImages`` class to manage a collection of image paths and
diff --git a/mapreader/process/process.py b/mapreader/process/process.py
index d83df08c..ab38bd26 100644
--- a/mapreader/process/process.py
+++ b/mapreader/process/process.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
try:
@@ -17,12 +16,11 @@
import os
import subprocess
from glob import glob
-from typing import List, Optional, Union
def preprocess_all(
- image_paths: Union[List[str], str], save_preproc_dir: str, **kwds
-) -> List[str]:
+ image_paths: list[str] | str, save_preproc_dir: str, **kwds
+) -> list[str]:
"""
Preprocess all images in a list of file paths or a directory using the
``preprocess`` function and save them to the specified directory.
@@ -61,12 +59,12 @@ def preprocess_all(
def preprocess(
image_path: str,
save_preproc_dir: str,
- dst_crs: Optional[str] = "EPSG:3857",
- crop_prefix: Optional[str] = "preproc_",
- reproj_prefix: Optional[str] = "preproc_tmp_",
- resample_prefix: Optional[str] = "preproc_resample_",
- resize_percent: Optional[int] = 40,
- remove_reproj_file: Optional[bool] = True,
+ dst_crs: str | None = "EPSG:3857",
+ crop_prefix: str | None = "preproc_",
+ reproj_prefix: str | None = "preproc_tmp_",
+ resample_prefix: str | None = "preproc_resample_",
+ resize_percent: int | None = 40,
+ remove_reproj_file: bool | None = True,
) -> str:
"""
Preprocesses an image file by reprojecting it to a new coordinate
diff --git a/mapreader/utils/compute_and_save_stats.py b/mapreader/utils/compute_and_save_stats.py
index de6a9071..c4fcc718 100644
--- a/mapreader/utils/compute_and_save_stats.py
+++ b/mapreader/utils/compute_and_save_stats.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
import os
diff --git a/mapreader/utils/slice_parallel.py b/mapreader/utils/slice_parallel.py
index 43324713..f467ad0a 100644
--- a/mapreader/utils/slice_parallel.py
+++ b/mapreader/utils/slice_parallel.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
from __future__ import annotations
import glob
diff --git a/versioneer.py b/versioneer.py
index 871a67d3..380e05dd 100644
--- a/versioneer.py
+++ b/versioneer.py
@@ -317,7 +317,7 @@
import subprocess
import sys
from pathlib import Path
-from typing import Callable, Dict
+from typing import Callable
have_tomllib = True
if sys.version_info >= (3, 11):
@@ -368,8 +368,7 @@ def get_root():
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals():
print(
- "Warning: build in %s is using versioneer.py from %s"
- % (os.path.dirname(my_path), versioneer_py)
+ "Warning: build in {} is using versioneer.py from {}".format(os.path.dirname(my_path), versioneer_py)
)
except NameError:
pass
@@ -419,8 +418,8 @@ class NotThisMethod(Exception):
# these dictionaries contain VCS-specific tools
-LONG_VERSION_PY: Dict[str, str] = {}
-HANDLERS: Dict[str, Dict[str, Callable]] = {}
+LONG_VERSION_PY: dict[str, str] = {}
+HANDLERS: dict[str, dict[str, Callable]] = {}
def register_vcs_handler(vcs, method): # decorator
@@ -469,7 +468,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
return None, None
else:
if verbose:
- print("unable to find command, tried %s" % (commands,))
+ print(f"unable to find command, tried {commands}")
return None, None
stdout = process.communicate()[0].strip().decode()
if process.returncode != 0:
@@ -1152,7 +1151,7 @@ def git_get_keywords(versionfile_abs):
# _version.py.
keywords = {}
try:
- with open(versionfile_abs, "r") as fobj:
+ with open(versionfile_abs) as fobj:
for line in fobj:
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
@@ -1354,7 +1353,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
- pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
+ pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format(
full_tag,
tag_prefix,
)
@@ -1406,7 +1405,7 @@ def do_vcs_install(versionfile_source, ipy):
files.append(versioneer_file)
present = False
try:
- with open(".gitattributes", "r") as fobj:
+ with open(".gitattributes") as fobj:
for line in fobj:
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
@@ -1445,8 +1444,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
if verbose:
print(
- "Tried directories %s but none started with prefix %s"
- % (str(rootdirs), parentdir_prefix)
+ "Tried directories {} but none started with prefix {}".format(str(rootdirs), parentdir_prefix)
)
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@@ -1495,7 +1493,7 @@ def write_to_version_file(filename, versions):
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
- print("set %s to '%s'" % (filename, versions["version"]))
+ print("set {} to '{}'".format(filename, versions["version"]))
def plus_or_dot(pieces):
@@ -1800,7 +1798,7 @@ def get_versions(verbose=False):
try:
ver = versions_from_file(versionfile_abs)
if verbose:
- print("got version from file %s %s" % (versionfile_abs, ver))
+ print(f"got version from file {versionfile_abs} {ver}")
return ver
except NotThisMethod:
pass
@@ -2188,7 +2186,7 @@ def do_setup():
ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py")
if os.path.exists(ipy):
try:
- with open(ipy, "r") as f:
+ with open(ipy) as f:
old = f.read()
except OSError:
old = ""
@@ -2220,7 +2218,7 @@ def scan_setup_py():
found = set()
setters = False
errors = 0
- with open("setup.py", "r") as f:
+ with open("setup.py") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")