diff --git a/.gitignore b/.gitignore index deba84b..ea8123f 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,7 @@ cover/* MANIFEST *__pycache__* + +# linting with shared config file +ruff-github-workflows.toml +ruff-merged.toml diff --git a/ruff.toml b/ruff.toml index 7940626..16a9f06 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,43 +1,47 @@ -# Ruff configuration file: ruff.toml - -# Define the required version for Ruff -required-version = ">=0.7.0" - -line-length = 79 - -# Specify directories or files to be excluded from Ruff linting, in addition to default exclusions -extend-exclude = [ - ".git", - "__pycache__", - ".env", - ".venv", - "env", - "venv", - "ENV", - "env.bak", - "venv.bak", - "ctypes", - "pydispatch", -] - -# Uncomment the following sections as needed - -# [format] -# Format settings for Ruff (quote-style and indent-style) -# quote-style = "double" -# indent-style= "tab" - -# [lint] -# Define linting rules selection and ignore list -# select = [ -# "A", # flake8-builtins (A) -# "COM", # flake8-commas -# "PL", # Pylint -# ] -ignore = [ - "F821", # Undefined name `_` -] - -# [lint.per-file-ignores] -# Define file-specific linting rule ignores -# "lib_dop/r_dop_import_lib.py" = ["ERA001", "PLR2004"] \ No newline at end of file +lint.ignore = [ + "A002", + "ANN001", + "ANN002", + "ANN003", + "ANN201", + "B006", + "BLE001", + "D100", + "D104", + "D205", + "D401", + "E501", + "F821", + "FBT002", + "FBT003", + "FURB101", + "I001", + "INT001", + "PERF401", + "PLC1901", + "PLR0912", + "PLR0913", + "PLR0914", + "PLR0915", + "PLR0917", + "PLR2004", + "PLW2901", + "SIM115", + "PTH103", + "PTH107", + "PTH109", + "PTH110", + "PTH112", + "PTH113", + "PTH118", + "PTH119", + "PTH123", + "PTH207", + "PTH208", + "S101", + "S113", + "S404", + "S603", + "S604", + "S605", +] \ No newline at end of file diff --git a/src/grass_gis_helpers/cleanup.py b/src/grass_gis_helpers/cleanup.py index 80baca2..b3f5f2a 100644 --- a/src/grass_gis_helpers/cleanup.py +++ b/src/grass_gis_helpers/cleanup.py @@ -40,10 +40,9 @@ def general_cleanup( orig_region=None, rm_mask=False, ): - """General cleanup function""" - + """General cleanup function.""" grass.message(_("Cleaning up...")) - nulldev = open(os.devnull, "w") + nulldev = open(os.devnull, "w", encoding="utf-8") kwargs = {"flags": "f", "quiet": True, "stderr": nulldev} rm_groups.extend(rm_groups_wo_rasters) for rmg in rm_groups: @@ -52,7 +51,9 @@ def general_cleanup( for rmg_wr in rm_groups_w_rasters: if grass.find_file(name=rmg_wr, element="group")["file"]: group_rasters = grass.parse_command( - "i.group", flags="lg", group=rmg_wr + "i.group", + flags="lg", + group=rmg_wr, ) rm_rasters.extend(group_rasters) grass.run_command("g.remove", type="group", name=rmg, **kwargs) @@ -70,14 +71,17 @@ def general_cleanup( shutil.rmtree(rmdir) if orig_region is not None: find_reg = grass.find_file(name=orig_region, element="windows") - if "file" in find_reg and find_reg["file"]: + if find_reg.get("file"): grass.run_command("g.region", region=orig_region) grass.run_command( - "g.remove", type="region", name=orig_region, **kwargs + "g.remove", + type="region", + name=orig_region, + **kwargs, ) for rmreg in rm_regions: find_reg = grass.find_file(name=rmreg, element="windows") - if "file" in find_reg and find_reg["file"]: + if find_reg.get("file"): grass.run_command("g.remove", type="region", name=rmreg, **kwargs) strds = grass.parse_command("t.list", type="strds", quiet=True) stvds = grass.parse_command("t.list", type="stvds", quiet=True) @@ -134,9 +138,8 @@ def general_cleanup( quiet=True, stderr=nulldev, ) - if rm_mask: - if grass.find_file(name="MASK", element="raster")["file"]: - grass.run_command("r.mask", flags="r") + if rm_mask and grass.find_file(name="MASK", element="raster")["file"]: + grass.run_command("r.mask", flags="r") # get location size get_location_size() @@ -148,8 +151,9 @@ def general_cleanup( def rm_vects(vects): """Function to remove clean vector maps Args: - vects (list): list of vector maps which should be removed""" - nuldev = open(os.devnull, "w") + vects (list): list of vector maps which should be removed. + """ + nuldev = open(os.devnull, "w", encoding="utf-8") kwargs = {"flags": "f", "quiet": True, "stderr": nuldev} for rmv in vects: if grass.find_file(name=rmv, element="vector")["file"]: @@ -160,24 +164,24 @@ def reset_region(region): """Function to set the region to the given region Args: region (str): the name of the saved region which should be set and - deleted + deleted. """ - nulldev = open(os.devnull, "w") + nulldev = open(os.devnull, "w", encoding="utf-8") kwargs = {"flags": "f", "quiet": True, "stderr": nulldev} - if region: - if grass.find_file(name=region, element="windows")["file"]: - grass.run_command("g.region", region=region) - grass.run_command("g.remove", type="region", name=region, **kwargs) + if region and grass.find_file(name=region, element="windows")["file"]: + grass.run_command("g.region", region=region) + grass.run_command("g.remove", type="region", name=region, **kwargs) def cleaning_tmp_location(original_gisrc, tmp_loc, gisdbase, tmp_gisrc): - """Cleaning up things from temporary location + """Cleaning up things from temporary location. Args: original_gisrc (str): The path to the original GISRC file tmp_loc (str): The name of the temporary location gisdbase (str): The GISDBASE info tmp_gisrc (str): The path to the temporary GISRC file + """ # switch back to original gisrc if original_gisrc: diff --git a/src/grass_gis_helpers/data_import.py b/src/grass_gis_helpers/data_import.py index 865b027..dde29c4 100644 --- a/src/grass_gis_helpers/data_import.py +++ b/src/grass_gis_helpers/data_import.py @@ -35,12 +35,13 @@ def download_and_import_tindex(tindex_url, output, download_dir): - """Download and import tile index from url + """Download and import tile index from url. Args: tindex_url (str): URL of tile index output (str): The output name for the tile index download_dir (str): The directory where the data should be downloaded + """ cur_dir = os.getcwd() zip_name = os.path.basename(tindex_url) @@ -71,7 +72,7 @@ def download_and_import_tindex(tindex_url, output, download_dir): def get_list_of_tindex_locations(tindex, aoi=None): """Select the locations of the tindex which overlap with the AOI or the - current region + current region. Args: tindex (str): Name of the tindex vector map @@ -79,6 +80,7 @@ def get_list_of_tindex_locations(tindex, aoi=None): Returns: (list): List with locations which overlap with the AOI or the current region + """ tindex_clipped = f"clipped_tindex_vect_{grass.tempname(8)}" try: @@ -98,7 +100,8 @@ def get_list_of_tindex_locations(tindex, aoi=None): tiles = [ val[0] for val in grass.vector_db_select( - tindex_clipped, columns="location" + tindex_clipped, + columns="location", )["values"].values() ] finally: @@ -132,9 +135,11 @@ def import_local_raster_data( imported; e.g. for DOP import band_dict = { 1: "red", 2: "green", 3: "blue", 4: "nir" } + Returns: imported_local_data (bool): True if local data imported, otherwise False + """ grass.message(_("Importing local raster data...")) imported_local_data = False @@ -187,7 +192,7 @@ def import_local_raster_data( err_m2 = "already exists and will be overwritten" if err_m1 in r_import[1].decode(): continue - elif err_m2 in r_import[1].decode(): + if err_m2 in r_import[1].decode(): pass elif r_import[1].decode() != "": grass.fatal(_(r_import[1].decode())) @@ -218,7 +223,7 @@ def import_local_raster_data( def get_xyz_file_infos(xyz_file, separator="space"): - """Get the infos of a XYZ file to resolution, bounding box and pixelcenter + """Get the infos of a XYZ file to resolution, bounding box and pixelcenter. Args: xyz_file (str): XYZ file path to import @@ -227,6 +232,7 @@ def get_xyz_file_infos(xyz_file, separator="space"): res (float): Resolution of the XYZ file xyz_reg (dict): Dictionary with region of the XYZ file shift_needed (bool): Boolean if the XYZ file hat to be shifted + """ gdalinfo_cmd = ["gdalinfo", xyz_file] process = grass.Popen(gdalinfo_cmd, stdout=PIPE, stderr=PIPE) @@ -236,16 +242,16 @@ def get_xyz_file_infos(xyz_file, separator="space"): res = float(stdout.split("Pixel Size = (")[1].split(",")[0]) # get bbox bbox_x1 = float( - stdout.split("Upper Left")[1].replace("(", "").split(",")[0].strip() + stdout.split("Upper Left")[1].replace("(", "").split(",")[0].strip(), ) bbox_x2 = float( - stdout.split("Upper Right")[1].replace("(", "").split(",")[0].strip() + stdout.split("Upper Right")[1].replace("(", "").split(",")[0].strip(), ) bbox_y1 = float( - stdout.split("Upper Left")[1].split(",")[1].split(")")[0].strip() + stdout.split("Upper Left")[1].split(",")[1].split(")")[0].strip(), ) bbox_y2 = float( - stdout.split("Lower Left")[1].split(",")[1].split(")")[0].strip() + stdout.split("Lower Left")[1].split(",")[1].split(")")[0].strip(), ) # check if shift is needed # The shift is only needed if the bbox does not contain the pixel centers @@ -279,9 +285,12 @@ def get_xyz_file_infos(xyz_file, separator="space"): def import_single_local_xyz_file( - xyz_file, output, use_cur_reg=False, separator="space" + xyz_file, + output, + use_cur_reg=False, + separator="space", ): - """Import single XYZ file + """Import single XYZ file. Args: xyz_file (str): XYZ file path to import @@ -292,9 +301,11 @@ def import_single_local_xyz_file( separator (str): Separator of XYZ file; default is "space" Returns: output (str): If the output is imported, otherwise return None + """ res, xyz_reg, shift_needed = get_xyz_file_infos( - xyz_file, separator=separator + xyz_file, + separator=separator, ) # check if aoi overlaps if use_cur_reg: @@ -369,6 +380,7 @@ def import_local_xyz_files( will be appended Returns: imported_local_data (bool): True if local data imported, otherwise False + """ grass.message(_("Importing local XYZ data...")) imported_local_data = False @@ -404,7 +416,7 @@ def import_local_xyz_files( if name: all_raster.append(name) grass.message( - _(f"XYZ file <{os.path.basename(xyz_file)}> imported.") + _(f"XYZ file <{os.path.basename(xyz_file)}> imported."), ) # check if raster were imported if len(all_raster) > 0: @@ -413,7 +425,7 @@ def import_local_xyz_files( def import_local_vector_data(aoi_map, local_data_dir, rm_vectors, output): - """Import vector data from local file path + """Import vector data from local file path. Args: aoi_map (str): Name of vector map defining AOI @@ -423,6 +435,7 @@ def import_local_vector_data(aoi_map, local_data_dir, rm_vectors, output): Returns: imported_local_data (bool): True if local data imported, otherwise False + """ imported_local_data = False @@ -432,7 +445,8 @@ def import_local_vector_data(aoi_map, local_data_dir, rm_vectors, output): recursive=True, ) shp_files = glob.glob( - os.path.join(local_data_dir, "**", "*.shp"), recursive=True + os.path.join(local_data_dir, "**", "*.shp"), + recursive=True, ) files.extend(shp_files) diff --git a/src/grass_gis_helpers/general.py b/src/grass_gis_helpers/general.py index 7cb099e..c141251 100644 --- a/src/grass_gis_helpers/general.py +++ b/src/grass_gis_helpers/general.py @@ -24,23 +24,22 @@ def set_nprocs(nprocs): - """Set nprocs to value if it is -2, otherwise check value""" + """Set nprocs to value if it is -2, otherwise check value.""" if isinstance(nprocs, str): nprocs = int(nprocs) if nprocs == -2: return mp.cpu_count() - 1 if mp.cpu_count() > 1 else 1 - else: - nprocs_real = mp.cpu_count() - if nprocs > nprocs_real: - grass.warning( - f"Using {nprocs} parallel processes but only " - f"{nprocs_real} CPUs available." - ) - return nprocs + nprocs_real = mp.cpu_count() + if nprocs > nprocs_real: + grass.warning( + f"Using {nprocs} parallel processes but only " + f"{nprocs_real} CPUs available.", + ) + return nprocs def communicate_grass_command(*args, **kwargs): - """Return stdout and stderr from executed GRASS command""" + """Return stdout and stderr from executed GRASS command.""" kwargs["stdout"] = grass.PIPE kwargs["stderr"] = grass.PIPE grass_ps = grass.start_command(*args, **kwargs) @@ -48,22 +47,24 @@ def communicate_grass_command(*args, **kwargs): def check_grass_version(comp_version=(8, 0, 0)): - """Returns boolean, if current GRASS version is >= some compare version""" + """Returns boolean, if current GRASS version is >= some compare version.""" cur_version = tuple( [ int(x.replace("dev", "")) if x != "dev" else 0 for x in grass.version()["version"].split(".") - ] + ], ) return cur_version >= comp_version def log_memory(grassenv=None): - """Log memory usage""" + """Log memory usage.""" if not grassenv: grassenv = grass.gisenv() cmd = grass.Popen( - f"df -h {grassenv['GISDBASE']}", shell=True, stdout=subprocess.PIPE + f"df -h {grassenv['GISDBASE']}", + shell=True, + stdout=subprocess.PIPE, ) grass.message( _( @@ -71,21 +72,22 @@ def log_memory(grassenv=None): "\nDisk usage of GRASS GIS database:\n", f"{cmd.communicate()[0].decode('utf-8').rstrip()}\n", ), - ) + ), ) - grass.message(_(f"\nmemory: \n{str(psutil.virtual_memory())}")) - grass.message(_(f"\nswap memory: \n{str(psutil.swap_memory())}")) + grass.message(_(f"\nmemory: \n{psutil.virtual_memory()!s}")) + grass.message(_(f"\nswap memory: \n{psutil.swap_memory()!s}")) # ulimit -a cmd = grass.Popen("ulimit -a", shell=True, stdout=subprocess.PIPE) grass.message( - _(f"\nulimit -a: \n{cmd.communicate()[0].decode('utf-8').rstrip()}") + _(f"\nulimit -a: \n{cmd.communicate()[0].decode('utf-8').rstrip()}"), ) def get_free_ram(unit, percent=100): """The function gives the amount of the percentages of the available RAM memory and free swap space. + Args: unit(string): 'GB' or 'MB' percent(int): number of percent which should be used of the available @@ -94,7 +96,7 @@ def get_free_ram(unit, percent=100): Returns: memory_MB_percent/memory_GB_percent(int): percent of the the available memory and free swap in MB or - GB + GB. """ # use psutil cause of alpine busybox free version for RAM/SWAP usage @@ -105,41 +107,44 @@ def get_free_ram(unit, percent=100): if unit == "MB": memory_mb_percent = memory_mb * percent / 100.0 - return int(round(memory_mb_percent)) - elif unit == "GB": + return round(memory_mb_percent) + if unit == "GB": memory_gb_percent = memory_gb * percent / 100.0 - return int(round(memory_gb_percent)) - else: - grass.fatal(f"Memory unit {unit} not supported") + return round(memory_gb_percent) + grass.fatal(f"Memory unit {unit} not supported") + return None def test_memory(memory_string): """Test if desired memory is available. In case RAM is smaller than desired memory, use free RAM instead of desired memory value. + Args: memory_string(string): string from standard memory input option Returns: free_ram(int): free RAM to use - memory(int): available memory to use + memory(int): available memory to use. + """ # check memory memory = int(memory_string) free_ram = get_free_ram("MB", 100) if free_ram < memory: grass.warning( - _(f"Using {memory} MB but only {free_ram} MB RAM available.") + _(f"Using {memory} MB but only {free_ram} MB RAM available."), ) grass.warning(_(f"Set used memory to {free_ram} MB.")) return free_ram - else: - return memory + return memory def check_installed_addon(addon, url="..."): """Check if addon is already installed and raise error if not. + Args: addon(string): Addon to check if it is installed - url(string): Path to addon + url(string): Path to addon. + """ if not grass.find_program(addon, "--help"): msg = ( diff --git a/src/grass_gis_helpers/location.py b/src/grass_gis_helpers/location.py index 494f8af..7f44c61 100644 --- a/src/grass_gis_helpers/location.py +++ b/src/grass_gis_helpers/location.py @@ -23,10 +23,12 @@ def get_location_size(): - """Log size of current location""" + """Log size of current location.""" current_gisdbase = grass.gisenv()["GISDBASE"] cmd = grass.Popen( - f"df -h {current_gisdbase}", shell=True, stdout=subprocess.PIPE + f"df -h {current_gisdbase}", + shell=True, + stdout=subprocess.PIPE, ) grass.message( _( @@ -34,12 +36,12 @@ def get_location_size(): "\nDisk usage of GRASS GIS database:\n", f"{cmd.communicate()[0].decode('utf-8').rstrip()}\n", ), - ) + ), ) def create_tmp_location(epsg=4326): - """Creation of a new temporary location + """Creation of a new temporary location. Args: epsg (int): The number of the EPSG code @@ -47,11 +49,12 @@ def create_tmp_location(epsg=4326): Returns: tmp_loc (str): The name of the temporary location tmp_gisrc (str): The path to the original GISRC file + """ current_gisdbase = grass.gisenv()["GISDBASE"] srcgisrc = grass.tempfile() tmp_loc = f"temp_epsg{epsg}_location_{os.getpid()}" - gisrc_file = open(srcgisrc, "w") + gisrc_file = open(srcgisrc, "w", encoding="utf-8") gisrc_file.write("MAPSET: PERMANENT\n") gisrc_file.write(f"GISDBASE: {current_gisdbase}\n") gisrc_file.write(f"LOCATION_NAME: {tmp_loc}\n") @@ -66,7 +69,11 @@ def create_tmp_location(epsg=4326): # create temp location from input without import grass.verbose(_(f"Creating temporary location with EPSG:{epsg}...")) grass.run_command( - "g.proj", flags="c", location=tmp_loc, quiet=True, **epsg_arg + "g.proj", + flags="c", + location=tmp_loc, + quiet=True, + **epsg_arg, ) # switch to temp location @@ -83,13 +90,14 @@ def create_tmp_location(epsg=4326): def get_current_location(): - """Get infos to current location + """Get infos to current location. Returns: loc (str): The name of the current location mapset (str): The name of the current mapset gisdbase (str): The current GISDBASE info gisrc (str): The path to the current GISRC file + """ # get current location, mapset, ... grassenv = grass.gisenv() @@ -102,10 +110,11 @@ def get_current_location(): def switch_back_original_location(original_gisrc): """Switching back to original location after the computation in tmp - location + location. Args: original_gisrc (str): The path to the original GISRC file + """ # switch to target location os.environ["GISRC"] = str(original_gisrc) diff --git a/src/grass_gis_helpers/mapset.py b/src/grass_gis_helpers/mapset.py index 1874e75..68bbcb6 100644 --- a/src/grass_gis_helpers/mapset.py +++ b/src/grass_gis_helpers/mapset.py @@ -22,16 +22,19 @@ import grass.script as grass -def switch_to_new_mapset(new_mapset): +def switch_to_new_mapset(new_mapset, new=True): """The function switches to a new mapset and changes the GISRC file for parallel processing. Args: new_mapset (string): Unique name of the new mapset + new (boolean): Boolean if existing mapset should be used + or a new one created Returns: gisrc (string): The path of the old GISRC file newgisrc (string): The path of the new GISRC file old_mapset (string): The name of the old mapset + """ # current gisdbase, location env = grass.gisenv() @@ -39,8 +42,14 @@ def switch_to_new_mapset(new_mapset): location = env["LOCATION_NAME"] old_mapset = env["MAPSET"] - grass.message(_(f"New mapset {new_mapset}")) - grass.utils.try_rmdir(os.path.join(gisdbase, location, new_mapset)) + if new: + grass.message(_(f"New mapset {new_mapset}")) + grass.utils.try_rmdir(os.path.join(gisdbase, location, new_mapset)) + else: + grass.message(_(f"Using, not deleting mapset {new_mapset}")) + grass.try_remove( + os.path.join(gisdbase, location, new_mapset, ".gislock"), + ) gisrc = os.environ["GISRC"] newgisrc = f"{gisrc}_{os.getpid()}" @@ -65,6 +74,7 @@ def verify_mapsets(start_cur_mapset): start_cur_mapset (string): Name of the mapset which is to verify Returns: location_path (string): The path of the location + """ env = grass.gisenv() gisdbase = env["GISDBASE"] @@ -72,7 +82,6 @@ def verify_mapsets(start_cur_mapset): cur_mapset = env["MAPSET"] if cur_mapset != start_cur_mapset: grass.fatal( - f"new mapset is {cur_mapset}, but should be {start_cur_mapset}" + f"new mapset is {cur_mapset}, but should be {start_cur_mapset}", ) - location_path = os.path.join(gisdbase, location) - return location_path + return os.path.join(gisdbase, location) diff --git a/src/grass_gis_helpers/open_geodata_germany/download_data.py b/src/grass_gis_helpers/open_geodata_germany/download_data.py index 346136f..91daaa4 100644 --- a/src/grass_gis_helpers/open_geodata_germany/download_data.py +++ b/src/grass_gis_helpers/open_geodata_germany/download_data.py @@ -39,36 +39,37 @@ def check_download_dir(download_dir): download_dir (str): Download directory module parameter Returns: (str): Path to download directory + """ if not download_dir: download_dir = grass.tempdir() - else: - if not os.path.isdir(download_dir): - grass.message( - _( - f"Download folder {download_dir} does not exist and will " - "be created." - ) - ) - os.makedirs(download_dir) - elif os.path.exists(download_dir) and os.listdir(download_dir): - grass.warning( - _( - f"Download folder {download_dir} exists and is not empty. " - "Folder will NOT be deleted." - ) - ) + elif not os.path.isdir(download_dir): + grass.message( + _( + f"Download folder {download_dir} does not exist and will " + "be created.", + ), + ) + os.makedirs(download_dir) + elif os.path.exists(download_dir) and os.listdir(download_dir): + grass.warning( + _( + f"Download folder {download_dir} exists and is not empty. " + "Folder will NOT be deleted.", + ), + ) grass.message(f"Download directory: {download_dir}") return download_dir def url_response(url): - """URL response function which is used by download_data_using_threadpool + """URL response function which is used by download_data_using_threadpool. Args: url (str): Data download url Return: url (str): Return the url for printing + """ filename = os.path.basename(url) response = requests.get(url, stream=True) @@ -79,7 +80,7 @@ def url_response(url): def download_data_using_threadpool(urls, download_dir, nprocs): - """Download data from urls via ThreadPool + """Download data from urls via ThreadPool. Args: urls (list): List with data download urls @@ -87,6 +88,7 @@ def download_data_using_threadpool(urls, download_dir, nprocs): downloaded to nprocs (int): The number of worker threads to use; If processes is None then the number returned by os.cpu_count() is used. + """ cur_dir = os.getcwd() try: @@ -110,6 +112,7 @@ def extract_compressed_files(file_names, download_dir): downloaded Returns: extracted_files (list): List with extracted files + """ extracted_files = [] for file_name in file_names: @@ -133,6 +136,7 @@ def extract_compressed_files_deflate64(file_names, download_dir): downloaded Returns: extracted_files (list): List with extracted files + """ extracted_files = [] for file_name in file_names: @@ -146,15 +150,18 @@ def extract_compressed_files_deflate64(file_names, download_dir): def fix_corrupted_data(file): - """Fix corrupted XYZ/TXT data file e.g. for Berlin DOMs + """Fix corrupted XYZ/TXT data file e.g. for Berlin DOMs. Args: file (str): XYZ or TXT data file with corrupted data + """ # remove corrupted data from TXT DOM files if not os.path.exists(f"{file}.bak"): with fileinput.FileInput( - file, inplace=True, backup=".bak" + file, + inplace=True, + backup=".bak", ) as file_object: for line in file_object: # two times replace of white spaces, since some lines contain diff --git a/src/grass_gis_helpers/open_geodata_germany/federal_state.py b/src/grass_gis_helpers/open_geodata_germany/federal_state.py index 935c417..4ac1861 100644 --- a/src/grass_gis_helpers/open_geodata_germany/federal_state.py +++ b/src/grass_gis_helpers/open_geodata_germany/federal_state.py @@ -62,7 +62,7 @@ def import_administrative_boundaries(output, aoi=None, level="KRS"): - """Import administrative boundaries for AOI/region + """Import administrative boundaries for AOI/region. Args: output (str): The name for the output vector map with the imported @@ -78,6 +78,7 @@ def import_administrative_boundaries(output, aoi=None, level="KRS"): KRS - Kreise VWG - Verwaltungsgemeinschaften GEM - Gemeinden + """ # save current region and set region to AOI if aoi: @@ -99,12 +100,10 @@ def import_administrative_boundaries(output, aoi=None, level="KRS"): try: # check if URL is reachable response = requests.get(url) - if not response.status_code == 200: + if response.status_code != 200: grass.fatal( - ( - "The data import of the administrative boundaries are " - "currently not available." - ) + "The data import of the administrative boundaries are " + "currently not available.", ) # download and import administrative boundaries @@ -123,7 +122,7 @@ def import_administrative_boundaries(output, aoi=None, level="KRS"): def get_federal_states(federal_state, federal_state_file): """Get federal state and federal state file module parameters and return - list with federal state abbreviations + list with federal state abbreviations. Args: federal_state (str): Federal state module parameter @@ -137,14 +136,17 @@ def get_federal_states(federal_state, federal_state_file): grass.fatal( _( "Federal state file is given, but file " - f"<{federal_state_file}> does not exist." - ) + f"<{federal_state_file}> does not exist.", + ), ) - with open(federal_state_file) as fs_file: + with open(federal_state_file, encoding="utf-8") as fs_file: fs_list_str = fs_file.read().strip() if fs_list_str == "": grass.fatal( - _("Federal state in is empty string!") + _( + "Federal state in is empty " + "string!", + ), ) elif federal_state: fs_list_str = federal_state.strip() @@ -152,8 +154,8 @@ def get_federal_states(federal_state, federal_state_file): grass.fatal( _( "Neither nor are given. " - "Please set one of the two." - ) + "Please set one of the two.", + ), ) fs_list = [] for fs in fs_list_str.split(","): diff --git a/src/grass_gis_helpers/parallel.py b/src/grass_gis_helpers/parallel.py index 7e86852..b9725b2 100644 --- a/src/grass_gis_helpers/parallel.py +++ b/src/grass_gis_helpers/parallel.py @@ -23,10 +23,14 @@ def run_module_parallel( - module, module_kwargs, tile_list, nprocs, uid, parallel=True + module, + module_kwargs, + tile_list, + nprocs, + uid, + parallel=True, ): - """Running a module in parallel on a grid""" - + """Running a module in parallel on a grid.""" # save current mapset start_cur_mapset = grass.gisenv()["MAPSET"] @@ -62,7 +66,7 @@ def run_module_parallel( # exception errmsg = proc.outputs["stderr"].value.strip() grass.fatal( - _(f"\nERROR by processing <{proc.get_bash()}>: {errmsg}") + _(f"\nERROR by processing <{proc.get_bash()}>: {errmsg}"), ) # print all logs of successfully run modules ordered by module as GRASS # message @@ -79,7 +83,7 @@ def run_module_parallel( def patching_vector_results(mapsets, output): - """Patching vector results of different mapsets into one together""" + """Patching vector results of different mapsets into one together.""" grass.message(_(f"Patching vector {output} subsets ...")) if len(mapsets) > 1: subset_mapset = [f"{output}@{m}" for m in mapsets] @@ -100,7 +104,7 @@ def patching_vector_results(mapsets, output): def patching_raster_results(mapsets, output): - """Patching raster results of different mapsets into one together""" + """Patching raster results of different mapsets into one together.""" grass.message(_(f"Patching raster {output} subsets ...")) if len(mapsets) > 1: subset_mapset = [f"{output}@{m}" for m in mapsets] diff --git a/src/grass_gis_helpers/raster.py b/src/grass_gis_helpers/raster.py index e1b1a05..46e683a 100644 --- a/src/grass_gis_helpers/raster.py +++ b/src/grass_gis_helpers/raster.py @@ -27,16 +27,17 @@ def adjust_raster_resolution(raster_name, output, res): """Resample or inpolate raster to given resolution. It is important that - the region already has the right resolution + the region already has the right resolution. Args: raster_name (str): The name of the raster map which should be resampled/interpolated output (str): The name for the resampled/interpolated raster map res (float): The resolution to which the raster should be resampled. + """ res_rast = float( - grass.parse_command("r.info", map=raster_name, flags="g")["nsres"] + grass.parse_command("r.info", map=raster_name, flags="g")["nsres"], ) if res_rast > res: grass.run_command( @@ -68,6 +69,7 @@ def create_vrt(input_raster_list, output): Args: input_raster_list (list): List with input raster maps output (str): Name of the output (vrt) raster map + """ # copy raster maps to current mapset for rast in input_raster_list: @@ -98,11 +100,12 @@ def create_vrt(input_raster_list, output): def rename_raster(band_name_old, band_name_new): - """Rename raster map + """Rename raster map. Args: band_name_old (str): Raster map name to rename band_name_new (str): The new name for the raster map + """ grass.run_command( "g.rename", diff --git a/src/grass_gis_helpers/tests.py b/src/grass_gis_helpers/tests.py index 365ac25..5ab001a 100644 --- a/src/grass_gis_helpers/tests.py +++ b/src/grass_gis_helpers/tests.py @@ -20,7 +20,7 @@ def get_number_of_grass_elements(): - """Get the number of grass elements like raster, vector and regions + """Get the number of grass elements like raster, vector and regions. Returns: num_rast (int): The number of raster maps in current mapset @@ -28,6 +28,7 @@ def get_number_of_grass_elements(): num_gr (int): The number of groups in current mapset num_reg (int): The number of regions in current mapset num_mapsets (int): The number of mapsets in current location + """ rast_list = grass.parse_command("g.list", type="raster") vect_list = grass.parse_command("g.list", type="vector") @@ -43,9 +44,13 @@ def get_number_of_grass_elements(): def check_number_of_grass_elements( - ref_num_rast, ref_num_vect, ref_num_gr, ref_num_reg, ref_num_mapsets + ref_num_rast, + ref_num_vect, + ref_num_gr, + ref_num_reg, + ref_num_mapsets, ): - """Check the number of grass elements + """Check the number of grass elements. Args: ref_num_rast (int): The reference number of raster maps diff --git a/src/grass_gis_helpers/tiling.py b/src/grass_gis_helpers/tiling.py index 3249066..021fdcd 100644 --- a/src/grass_gis_helpers/tiling.py +++ b/src/grass_gis_helpers/tiling.py @@ -30,7 +30,7 @@ def create_grid(tile_size, grid_prefix, sid, area=None): sid (str): unique identifier Return: grid_prefix (list): list with the names of the created vector map tiles - number_tiles (int): Number of created tiles + number_tiles (int): Number of created tiles. """ # save region orig_region = f"grid_region_{sid}" @@ -50,7 +50,10 @@ def create_grid(tile_size, grid_prefix, sid, area=None): if dist_ns <= float(tile_size) and dist_ew <= float(tile_size): grass.run_command("v.in.region", output=grid, quiet=True) grass.run_command( - "v.db.addtable", map=grid, columns="cat int", quiet=True + "v.db.addtable", + map=grid, + columns="cat int", + quiet=True, ) else: # set region with tile_size @@ -58,7 +61,10 @@ def create_grid(tile_size, grid_prefix, sid, area=None): # create grid grass.run_command( - "v.mkgrid", map=grid, box=f"{tile_size},{tile_size}", quiet=True + "v.mkgrid", + map=grid, + box=f"{tile_size},{tile_size}", + quiet=True, ) # reset region reset_region(orig_region) @@ -77,8 +83,8 @@ def create_grid(tile_size, grid_prefix, sid, area=None): grass.fatal( _( f"The set region is not overlapping with {area}. " - f"Please define another region." - ) + f"Please define another region.", + ), ) else: grid_name = grid @@ -86,8 +92,12 @@ def create_grid(tile_size, grid_prefix, sid, area=None): # create list of tiles tiles_num_list = list( grass.parse_command( - "v.db.select", map=grid_name, columns="cat", flags="c", quiet=True - ).keys() + "v.db.select", + map=grid_name, + columns="cat", + flags="c", + quiet=True, + ).keys(), ) number_tiles = len(tiles_num_list) diff --git a/src/grass_gis_helpers/validation.py b/src/grass_gis_helpers/validation.py index 8532d37..9f8614c 100644 --- a/src/grass_gis_helpers/validation.py +++ b/src/grass_gis_helpers/validation.py @@ -22,10 +22,12 @@ def get_gdalinfo_returncodes(input): - """Return return codes from reading input file with gdalinfo""" + """Return return codes from reading input file with gdalinfo.""" gdalinfo_cmd = ["gdalinfo", "-mm", input] p_gdalinfo = subprocess.Popen( - gdalinfo_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE + gdalinfo_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, ) gdalinfo_err = p_gdalinfo.communicate()[1].decode("utf-8") gdalinfo_returncode = p_gdalinfo.returncode @@ -33,7 +35,7 @@ def get_gdalinfo_returncodes(input): def check_valid_rasterdata(input, strict=True): - """Check if input is broken and returns grass.fatal() in this case""" + """Check if input is broken and returns grass.fatal() in this case.""" gdalinfo_err, gdalinfo_returncode = get_gdalinfo_returncodes(input) if strict: # strict check: checks if gdalinfo contains any error @@ -44,12 +46,9 @@ def check_valid_rasterdata(input, strict=True): "NOTE: Might be harmless error messages. " "Data might be still readable. " "For a less strict check use: " - "check_valid_rasterdata(,strict=False)." - ) + "check_valid_rasterdata(,strict=False).", + ), ) - else: - # less strict check: fails only if bands can't be read - if gdalinfo_returncode != 0 or ( - "TIFFReadEncodedStrip" in gdalinfo_err - ): - grass.fatal(_(f"<{input}> is a broken file")) + # less strict check: fails only if bands can't be read + elif gdalinfo_returncode != 0 or ("TIFFReadEncodedStrip" in gdalinfo_err): + grass.fatal(_(f"<{input}> is a broken file")) diff --git a/src/grass_gis_helpers/vector.py b/src/grass_gis_helpers/vector.py index 74a7453..1f8d5f6 100644 --- a/src/grass_gis_helpers/vector.py +++ b/src/grass_gis_helpers/vector.py @@ -25,12 +25,13 @@ def patch_vectors(vector_list, output, rm_vectors=None): - """Patch vector data from a list + """Patch vector data from a list. Args: vector_list (list): List with vectors to patch output (str): Output map rm_vectors (list): List with vectors that should be removed + """ # patch several vectors (e.g. from parallel imports) if len(vector_list) > 1: