diff --git a/.github/workflows/cicd_full.yml b/.github/workflows/cicd_full.yml index 9cba47a..d51bfdb 100644 --- a/.github/workflows/cicd_full.yml +++ b/.github/workflows/cicd_full.yml @@ -1,5 +1,5 @@ -name: cicd_deploy +name: cicd_full on: # Run tests for pull-requests on master diff --git a/.gitignore b/.gitignore index 976f782..b5f6a44 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ tmp +.vscode __pycache__ ign_pdal_tools.egg-info dist diff --git a/CHANGELOG.md b/CHANGELOG.md index ec26db4..a50402a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # dev +# 1.3.0 +- color: support colorization for <0.2m clouds (including height=0/width=0) +- color: ceil width/height to have a bbox that contains all points + +# 1.2.1 +- fix cicd_full github action: deployment was triggered on pushing to dev instead of master only + +# 1.2.0 +- color: keep downloaded orthoimages by returning them to make them stay in execution scope + # 1.1.1 - unlock: fix main - tests: @@ -8,7 +18,7 @@ # 1.1.0 - standardization: handle malformed laz input ("Global encoding WKT flag not set for point format 6 - 10") -color: extract unlock module from colorization and rename colorization function +- color: extract unlock module from colorization and rename colorization function # 1.0.0 - first public version diff --git a/pdaltools/_version.py b/pdaltools/_version.py index ecb2274..692e5cc 100644 --- a/pdaltools/_version.py +++ b/pdaltools/_version.py @@ -1,4 +1,4 @@ -__version__ = "1.1.1" +__version__ = "1.3.0" if __name__ == "__main__": diff --git a/pdaltools/color.py b/pdaltools/color.py index 9a96778..2f33516 100644 --- a/pdaltools/color.py +++ b/pdaltools/color.py @@ -1,4 +1,5 @@ import json +from math import ceil import subprocess as sp import tempfile import pdal @@ -11,19 +12,19 @@ def pretty_time_delta(seconds): - sign_string = '-' if seconds < 0 else '' + sign_string = "-" if seconds < 0 else "" seconds = abs(int(seconds)) days, seconds = divmod(seconds, 86400) hours, seconds = divmod(seconds, 3600) minutes, seconds = divmod(seconds, 60) if days > 0: - return '%s%dd%dh%dm%ds' % (sign_string, days, hours, minutes, seconds) + return "%s%dd%dh%dm%ds" % (sign_string, days, hours, minutes, seconds) elif hours > 0: - return '%s%dh%dm%ds' % (sign_string, hours, minutes, seconds) + return "%s%dh%dm%ds" % (sign_string, hours, minutes, seconds) elif minutes > 0: - return '%s%dm%ds' % (sign_string, minutes, seconds) + return "%s%dm%ds" % (sign_string, minutes, seconds) else: - return '%s%ds' % (sign_string, seconds) + return "%s%ds" % (sign_string, seconds) def retry(times, delay, factor=2, debug=False): @@ -36,11 +37,11 @@ def newfn(*args, **kwargs): try: return func(*args, **kwargs) except requests.exceptions.ConnectionError as err: - print ("Connection Error:", err) + print("Connection Error:", err) need_retry = True except requests.exceptions.HTTPError as err: if "Server Error" in str(err): - print ("HTTP Error:", err) + print("HTTP Error:", err) need_retry = True else: raise err @@ -52,25 +53,29 @@ def newfn(*args, **kwargs): attempt += 1 return func(*args, **kwargs) + return newfn + return decorator -def download_image_from_geoportail( - proj, layer, minx, miny, maxx, maxy, pixel_per_meter, outfile, timeout -): +def download_image_from_geoportail(proj, layer, minx, miny, maxx, maxy, pixel_per_meter, outfile, timeout): + # Give single-point clouds a width/height of at least one pixel to have valid BBOX and SIZE + if minx == maxx: + maxx = minx + 1 / pixel_per_meter + if miny == maxy: + maxy = miny + 1 / pixel_per_meter + # for layer in layers: URL_GPP = "https://wxs.ign.fr/ortho/geoportail/r/wms?" URL_FORMAT = "&EXCEPTIONS=text/xml&FORMAT=image/geotiff&SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap&STYLES=" URL_EPSG = "&CRS=EPSG:" + str(proj) - URL_BBOX = ( - "&BBOX=" + str(minx) + "," + str(miny) + "," + str(maxx) + "," + str(maxy) - ) + URL_BBOX = "&BBOX=" + str(minx) + "," + str(miny) + "," + str(maxx) + "," + str(maxy) URL_SIZE = ( "&WIDTH=" - + str(int((maxx - minx) * pixel_per_meter)) + + str(ceil((maxx - minx) * pixel_per_meter)) + "&HEIGHT=" - + str(int((maxy - miny) * pixel_per_meter)) + + str(ceil((maxy - miny) * pixel_per_meter)) ) URL = URL_GPP + "LAYERS=" + layer + URL_FORMAT + URL_EPSG + URL_BBOX + URL_SIZE @@ -115,11 +120,16 @@ def pdal_info_json(input_file: str): @copy_and_hack_decorator -def color(input_file: str, output_file :str, - proj="", pixel_per_meter=5, timeout_second=300, - color_rvb_enabled=True, color_ir_enabled=True, veget_index_file="" - ): - +def color( + input_file: str, + output_file: str, + proj="", + pixel_per_meter=5, + timeout_second=300, + color_rvb_enabled=True, + color_ir_enabled=True, + veget_index_file="", +): json_info = pdal_info_json(input_file) metadata = json_info["metadata"] minx, maxx, miny, maxy = metadata["minx"], metadata["maxx"], metadata["miny"], metadata["maxy"] @@ -139,62 +149,60 @@ def color(input_file: str, output_file :str, pipeline |= pdal.Filter.colorization(raster=veget_index_file, dimensions="Deviation:1:256.0") writer_extra_dims = ["Deviation=ushort"] + tmp_ortho = None if color_rvb_enabled: tmp_ortho = tempfile.NamedTemporaryFile().name - download_image_from_geoportail_retrying(proj, "ORTHOIMAGERY.ORTHOPHOTOS", minx, miny, maxx, maxy, pixel_per_meter, tmp_ortho, timeout_second) - pipeline|= pdal.Filter.colorization(raster=tmp_ortho, dimensions="Red:1:256.0, Green:2:256.0, Blue:3:256.0") + download_image_from_geoportail_retrying( + proj, "ORTHOIMAGERY.ORTHOPHOTOS", minx, miny, maxx, maxy, pixel_per_meter, tmp_ortho, timeout_second + ) + pipeline |= pdal.Filter.colorization(raster=tmp_ortho, dimensions="Red:1:256.0, Green:2:256.0, Blue:3:256.0") + tmp_ortho_irc = None if color_ir_enabled: tmp_ortho_irc = tempfile.NamedTemporaryFile().name - download_image_from_geoportail_retrying(proj, "ORTHOIMAGERY.ORTHOPHOTOS.IRC", minx, miny, maxx, maxy, pixel_per_meter, tmp_ortho_irc, timeout_second) + download_image_from_geoportail_retrying( + proj, + "ORTHOIMAGERY.ORTHOPHOTOS.IRC", + minx, + miny, + maxx, + maxy, + pixel_per_meter, + tmp_ortho_irc, + timeout_second, + ) pipeline |= pdal.Filter.colorization(raster=tmp_ortho_irc, dimensions="Infrared:1:256.0") - pipeline |= pdal.Writer.las(filename=output_file, extra_dims=writer_extra_dims, minor_version="4", dataformat_id="8") + pipeline |= pdal.Writer.las( + filename=output_file, extra_dims=writer_extra_dims, minor_version="4", dataformat_id="8" + ) print("Traitement du nuage de point") pipeline.execute() - # os.remove(tmp_ortho) - # os.remove(tmp_ortho_irc) + # The orthoimages files will be deleted only when their reference are lost. + # To keep them, make a copy (with e.g. shutil.copy(...)) + # See: https://docs.python.org/2/library/tempfile.html#tempfile.TemporaryFile + return tmp_ortho, tmp_ortho_irc def parse_args(): parser = argparse.ArgumentParser("Colorize tool") + parser.add_argument("--input", "-i", type=str, required=True, help="Input file") + parser.add_argument("--output", "-o", type=str, default="", help="Output file") parser.add_argument( - "--input", "-i", - type=str, - required=True, - help="Input file") - parser.add_argument( - "--output", "-o", - type=str, - default="", - help="Output file") - parser.add_argument( - "--proj", "-p", - type=str, - default = "", - help="Projection, default will use projection from metadata input") - parser.add_argument( - "--resolution", "-r", - type=float, - default = 5, - help="Resolution, in pixel per meter") - parser.add_argument( - "--timeout", "-t", - type=int, - default = 300, - help="Timeout, in seconds") - parser.add_argument('--rvb', action='store_true', help="Colorize RVB") - parser.add_argument('--ir', action='store_true', help="Colorize IR") + "--proj", "-p", type=str, default="", help="Projection, default will use projection from metadata input" + ) + parser.add_argument("--resolution", "-r", type=float, default=5, help="Resolution, in pixel per meter") + parser.add_argument("--timeout", "-t", type=int, default=300, help="Timeout, in seconds") + parser.add_argument("--rvb", action="store_true", help="Colorize RVB") + parser.add_argument("--ir", action="store_true", help="Colorize IR") parser.add_argument( - "--vegetation", - type=str, - default = "", - help="Vegetation file, value will be stored in Deviation field") - return parser.parse_args() + "--vegetation", type=str, default="", help="Vegetation file, value will be stored in Deviation field" + ) + return parser.parse_args() if __name__ == "__main__": args = parse_args() - color(args.input, args.output, args.proj, args.resolution, args.timeout, args.rvb, args.ir, args.vegetation) \ No newline at end of file + color(args.input, args.output, args.proj, args.resolution, args.timeout, args.rvb, args.ir, args.vegetation) diff --git a/pdaltools/count_occurences/count_occurences_for_attribute.py b/pdaltools/count_occurences/count_occurences_for_attribute.py index 19f7120..5ddedae 100644 --- a/pdaltools/count_occurences/count_occurences_for_attribute.py +++ b/pdaltools/count_occurences/count_occurences_for_attribute.py @@ -13,23 +13,19 @@ def parse_args(): parser = argparse.ArgumentParser("Count points with each value of an attribute.") - parser.add_argument("--input_files", - nargs="+", - type=str, - help="List of laz input files separated by spaces, or directory " + - "containing las/laz files") - parser.add_argument("--attribute", - type=str, - default="Classification", - help="Attribute on which to count values") - parser.add_argument("--output_file", - type=str, - help="Output json file containing the counts") + parser.add_argument( + "--input_files", + nargs="+", + type=str, + help="List of laz input files separated by spaces, or directory " + "containing las/laz files", + ) + parser.add_argument("--attribute", type=str, default="Classification", help="Attribute on which to count values") + parser.add_argument("--output_file", type=str, help="Output json file containing the counts") return parser.parse_args() -def compute_count_one_file(filepath: str, attribute: str="Classification") -> Counter: +def compute_count_one_file(filepath: str, attribute: str = "Classification") -> Counter: pipeline = pdal.Reader.las(filepath) pipeline |= pdal.Filter.stats(dimensions=attribute, count=attribute) pipeline.execute() @@ -48,10 +44,10 @@ def compute_count_one_file(filepath: str, attribute: str="Classification") -> Co return counts -def compute_count(input_files: List[str], attribute: str="Classification", output_file=""): +def compute_count(input_files: List[str], attribute: str = "Classification", output_file=""): all_counts = Counter() # refresh status bar at most every 1/100 iter cf. https://github.com/tqdm/tqdm/issues/1429 - for f in tqdm(input_files, miniters=int(len(input_files)/100), maxinterval=float('inf')): + for f in tqdm(input_files, miniters=int(len(input_files) / 100), maxinterval=float("inf")): logging.debug(f"Counting values of {attribute} for {os.path.basename(f)}") all_counts += compute_count_one_file(f, attribute) @@ -69,8 +65,7 @@ def main(): args = parse_args() if len(args.input_files) == 1 and os.path.isdir(args.input_files[0]): input_dir = args.input_files[0] - input_files = [os.path.join(input_dir, f) for f in os.listdir(input_dir) - if f.lower().endswith(("las", "laz"))] + input_files = [os.path.join(input_dir, f) for f in os.listdir(input_dir) if f.lower().endswith(("las", "laz"))] else: input_files = args.input_files diff --git a/pdaltools/count_occurences/merge_occurences_counts.py b/pdaltools/count_occurences/merge_occurences_counts.py index f119305..77723d1 100644 --- a/pdaltools/count_occurences/merge_occurences_counts.py +++ b/pdaltools/count_occurences/merge_occurences_counts.py @@ -13,14 +13,13 @@ def parse_args(): parser = argparse.ArgumentParser("Count points with each value of an attribute.") - parser.add_argument("--input_files", - nargs="+", - type=str, - help="List of json input files separated by spaces, or directory " + - "containing json files") - parser.add_argument("--output_file", - type=str, - help="Output json file containing the counts") + parser.add_argument( + "--input_files", + nargs="+", + type=str, + help="List of json input files separated by spaces, or directory " + "containing json files", + ) + parser.add_argument("--output_file", type=str, help="Output json file containing the counts") return parser.parse_args() @@ -28,7 +27,7 @@ def parse_args(): def merge_counts(input_files: List[str], output_file=""): all_counts = Counter() # refresh status bar at most every 1/100 iter cf. https://github.com/tqdm/tqdm/issues/1429 - for input_f in tqdm(input_files, miniters=int(len(input_files)/100), maxinterval=float('inf')): + for input_f in tqdm(input_files, miniters=int(len(input_files) / 100), maxinterval=float("inf")): with open(input_f, "r") as f: count = Counter(json.load(f)) @@ -43,12 +42,12 @@ def merge_counts(input_files: List[str], output_file=""): return all_counts + def main(): args = parse_args() if len(args.input_files) == 1 and os.path.isdir(args.input_files[0]): input_dir = args.input_files[0] - input_files = [os.path.join(input_dir, f) for f in os.listdir(input_dir) - if f.lower().endswith("json")] + input_files = [os.path.join(input_dir, f) for f in os.listdir(input_dir) if f.lower().endswith("json")] logging.info(f"Input_files is a directory. Run on {len(input_files)} from this directory.") else: logging.info(f"Input_files is a list of files. Run on {len(args.input_files)} files.") diff --git a/pdaltools/las_add_buffer.py b/pdaltools/las_add_buffer.py index 4cb8c4b..6d200a8 100644 --- a/pdaltools/las_add_buffer.py +++ b/pdaltools/las_add_buffer.py @@ -8,12 +8,15 @@ from typing import List -def create_las_with_buffer(input_dir: str, tile_filename: str, - output_filename: str, - buffer_width: int=100, - spatial_ref: str="EPSG:2154", - tile_width: int=1000, - tile_coord_scale: int=1000): +def create_las_with_buffer( + input_dir: str, + tile_filename: str, + output_filename: str, + buffer_width: int = 100, + spatial_ref: str = "EPSG:2154", + tile_width: int = 1000, + tile_coord_scale: int = 1000, +): """Merge lidar tiles around the queried tile and crop them in order to add a buffer to the tile (usually 100m). Args: @@ -26,19 +29,32 @@ def create_las_with_buffer(input_dir: str, tile_filename: str, tile_coord_scale (int) : scale used in the filename to describe coordinates in meters (usually 1000m) """ - bounds = get_buffered_bounds_from_filename(tile_filename, buffer_width=buffer_width, - tile_width=tile_width, - tile_coord_scale=tile_coord_scale) + bounds = get_buffered_bounds_from_filename( + tile_filename, buffer_width=buffer_width, tile_width=tile_width, tile_coord_scale=tile_coord_scale + ) logging.debug(f"Add buffer of size {buffer_width} to tile.") - las_merge_and_crop(input_dir, tile_filename, bounds, output_filename, spatial_ref, - tile_width=tile_width, tile_coord_scale=tile_coord_scale) + las_merge_and_crop( + input_dir, + tile_filename, + bounds, + output_filename, + spatial_ref, + tile_width=tile_width, + tile_coord_scale=tile_coord_scale, + ) -def las_merge_and_crop(input_dir: str, tile_filename: str, bounds: List, - output_filename: str, spatial_ref: str="EPSG:2154", - tile_width=1000, tile_coord_scale=1000): - """ Merge and crop las in a single pipeline (for buffer addition) +def las_merge_and_crop( + input_dir: str, + tile_filename: str, + bounds: List, + output_filename: str, + spatial_ref: str = "EPSG:2154", + tile_width=1000, + tile_coord_scale=1000, +): + """Merge and crop las in a single pipeline (for buffer addition) For performance reasons, instead of using a pipeline that reads all files, merge them and then crop to the desired bbox, what is done is: @@ -86,40 +102,35 @@ def las_merge_and_crop(input_dir: str, tile_filename: str, bounds: List, logging.info(pipeline.toJSON()) pipeline.execute() else: - raise ValueError('List of valid tiles is empty : stop processing') + raise ValueError("List of valid tiles is empty : stop processing") pass def parse_args(): parser = argparse.ArgumentParser("Add a buffer to a las tile by stitching with its neighbors") parser.add_argument( - "--input_dir", "-i", - type=str, - required=True, - help="Path to the the folder containing the tile to which you want to add buffer"+ - "as well as its neighbors tiles") - parser.add_argument( - "--tile_filename", "-f", + "--input_dir", + "-i", type=str, required=True, - help="Filename of the input tile (basename only)") + help="Path to the the folder containing the tile to which you want to add buffer" + + "as well as its neighbors tiles", + ) parser.add_argument( - "--output_dir", "-o", - type=str, - required=True, - help="Directory folder for saving the outputs") + "--tile_filename", "-f", type=str, required=True, help="Filename of the input tile (basename only)" + ) + parser.add_argument("--output_dir", "-o", type=str, required=True, help="Directory folder for saving the outputs") parser.add_argument( - "--buffer_width", "-b", + "--buffer_width", + "-b", default=100, type=int, - help="Width (in meter) for the buffer that is added to the tile before interpolation " + - "(to prevent artefacts)" + help="Width (in meter) for the buffer that is added to the tile before interpolation " + + "(to prevent artefacts)", ) # Optional parameters parser.add_argument( - "--spatial_reference", - default="EPSG:2154", - help="Spatial reference to use to override the one from input las." + "--spatial_reference", default="EPSG:2154", help="Spatial reference to use to override the one from input las." ) return parser.parse_args() @@ -127,8 +138,10 @@ def parse_args(): if __name__ == "__main__": args = parse_args() - create_las_with_buffer(input_dir=args.input_dir, - tile_filename=os.path.join(args.input_dir, args.tile_filename), - output_filename=os.path.join(args.output_dir, args.tile_filename), - buffer_width=args.buffer_width, - spatial_ref=args.spatial_reference) + create_las_with_buffer( + input_dir=args.input_dir, + tile_filename=os.path.join(args.input_dir, args.tile_filename), + output_filename=os.path.join(args.output_dir, args.tile_filename), + buffer_width=args.buffer_width, + spatial_ref=args.spatial_reference, + ) diff --git a/pdaltools/las_clip.py b/pdaltools/las_clip.py index ecefc9e..5d642a3 100755 --- a/pdaltools/las_clip.py +++ b/pdaltools/las_clip.py @@ -7,8 +7,8 @@ import json -def las_crop(input_file: str, output_file: str, bounds, spatial_ref:str="EPSG:2154"): - """ Crop filter removes points that fall inside a cropping bounding box (2D) +def las_crop(input_file: str, output_file: str, bounds, spatial_ref: str = "EPSG:2154"): + """Crop filter removes points that fall inside a cropping bounding box (2D) Args: input_dir (str): input point cloud file output_dir (str): output point cloud file @@ -16,22 +16,10 @@ def las_crop(input_file: str, output_file: str, bounds, spatial_ref:str="EPSG:21 """ # Parameters information = { - "pipeline": [ - { - "type": "readers.las", - "filename": input_file, - "override_srs": spatial_ref, - "nosrs": True - }, - { - "type":"filters.crop", - "bounds": str(bounds) - }, - { - "type": "writers.las", - "a_srs": spatial_ref, - "filename": output_file - } + "pipeline": [ + {"type": "readers.las", "filename": input_file, "override_srs": spatial_ref, "nosrs": True}, + {"type": "filters.crop", "bounds": str(bounds)}, + {"type": "writers.las", "a_srs": spatial_ref, "filename": output_file}, ] } # Create json diff --git a/pdaltools/las_info.py b/pdaltools/las_info.py index 46a5d62..229d4ee 100644 --- a/pdaltools/las_info.py +++ b/pdaltools/las_info.py @@ -5,6 +5,7 @@ import subprocess as sp from typing import Tuple + def las_info_metadata(filename: str): """Get las info from pdal info --metadata""" ret = sp.run(["pdal", "info", filename, "--metadata"], capture_output=True) @@ -12,29 +13,22 @@ def las_info_metadata(filename: str): infos = ret.stdout.decode() infos = json.loads(infos) - return infos['metadata'] + return infos["metadata"] else: raise RuntimeError(f"pdal info failed with error: \n {ret.stderr}") -def las_info_pipeline(filename:str, spatial_ref:str="EPSG:2154"): +def las_info_pipeline(filename: str, spatial_ref: str = "EPSG:2154"): """Get las info from pdal pipeline with filter.info Args: filename: input las spatial_ref: spatial reference to pass as 'override_srs' argument in las reader """ information = { - "pipeline": [ - { - "type": "readers.las", - "filename": filename, - "override_srs": spatial_ref, - "nosrs": True - }, - { - "type": "filters.info" - } + "pipeline": [ + {"type": "readers.las", "filename": filename, "override_srs": spatial_ref, "nosrs": True}, + {"type": "filters.info"}, ] } @@ -50,11 +44,11 @@ def las_info_pipeline(filename:str, spatial_ref:str="EPSG:2154"): if type(metadata) == str: metadata = json.loads(metadata) - return metadata['metadata']['filters.info'] + return metadata["metadata"]["filters.info"] -def las_get_xy_bounds(filename: str, buffer_width: int=0, spatial_ref:str="EPSG:2154"): - """ Get tile bounds (xy only) from las metadata. +def las_get_xy_bounds(filename: str, buffer_width: int = 0, spatial_ref: str = "EPSG:2154"): + """Get tile bounds (xy only) from las metadata. Try getting bounds using las_info_metadata As command "pdal_info --metadata" does not seem to work properly on some data (TerraSolid output for ex), fallback to las_info_pipeline @@ -71,7 +65,7 @@ def las_get_xy_bounds(filename: str, buffer_width: int=0, spatial_ref:str="EPSG: # Parameters _x = [] _y = [] - bounds= [] + bounds = [] try: metadata = las_info_metadata(filename) bounds_dict = metadata @@ -82,12 +76,12 @@ def las_get_xy_bounds(filename: str, buffer_width: int=0, spatial_ref:str="EPSG: if type(metadata) == str: metadata = json.loads(metadata) # Export bound (maxy, maxy, minx and miny), then creating a buffer with 100 m - _x.append(float((bounds_dict['minx']) - buffer_width)) # coordinate minX - _x.append(float((bounds_dict['maxx']) + buffer_width)) # coordinate maxX - _y.append(float((bounds_dict['miny']) - buffer_width)) # coordinate minY - _y.append(float((bounds_dict['maxy']) + buffer_width)) # coordinate maxY - bounds.append(_x) # [xmin, xmax] - bounds.append(_y) # insert [ymin, ymax] + _x.append(float((bounds_dict["minx"]) - buffer_width)) # coordinate minX + _x.append(float((bounds_dict["maxx"]) + buffer_width)) # coordinate maxX + _y.append(float((bounds_dict["miny"]) - buffer_width)) # coordinate minY + _y.append(float((bounds_dict["maxy"]) + buffer_width)) # coordinate maxY + bounds.append(_x) # [xmin, xmax] + bounds.append(_y) # insert [ymin, ymax] return tuple(i for i in bounds) @@ -104,9 +98,10 @@ def parse_filename(file: str): return prefix, int(coordx), int(coordy), suffix -def get_buffered_bounds_from_filename(filename: str, buffer_width: int=0, - tile_width: int=1000, tile_coord_scale: int=1000) -> Tuple: - """ Get tile bounds (xy only) from las metadata. +def get_buffered_bounds_from_filename( + filename: str, buffer_width: int = 0, tile_width: int = 1000, tile_coord_scale: int = 1000 +) -> Tuple: + """Get tile bounds (xy only) from las metadata. Try getting bounds using las_info_metadata As command "pdal_info --metadata" does not seem to work properly on some data (TerraSolid output for ex), fallback to las_info_pipeline @@ -131,4 +126,4 @@ def get_buffered_bounds_from_filename(filename: str, buffer_width: int=0, xs = [minX - buffer_width, maxX + buffer_width] ys = [minY - buffer_width, maxY + buffer_width] - return (xs,ys) \ No newline at end of file + return (xs, ys) diff --git a/pdaltools/las_merge.py b/pdaltools/las_merge.py index e12b400..e38ace0 100644 --- a/pdaltools/las_merge.py +++ b/pdaltools/las_merge.py @@ -5,7 +5,7 @@ from pdaltools.las_info import parse_filename -def create_filenames(file: str, tile_width: int=1000, tile_coord_scale: int=1000): +def create_filenames(file: str, tile_width: int = 1000, tile_coord_scale: int = 1000): """Generate the name of the tiles around the input LIDAR tile It supposes that the file names are formatted as {prefix1}_{prefix2}_{coordx}_{coordy}_{suffix} with coordx and coordy having at least 4 digits @@ -42,7 +42,7 @@ def create_filenames(file: str, tile_width: int=1000, tile_coord_scale: int=1000 def check_tiles_exist(list_las: list): - """ Check if pointclouds exist + """Check if pointclouds exist Args: list_las (list): Filenames of the tiles around the LIDAR tile @@ -52,7 +52,7 @@ def check_tiles_exist(list_las: list): li = [] for i in list_las: if not os.path.exists(i): - logging.info(f'NOK : {i}') + logging.info(f"NOK : {i}") pass else: li.append(i) @@ -99,13 +99,10 @@ def las_merge(las_dir, input_file, merge_file, tile_width=1000, tile_coord_scale if len(Listfiles) > 0: # Merge information = {} - information = { - "pipeline": - Listfiles + [merge_file] - } + information = {"pipeline": Listfiles + [merge_file]} merge = json.dumps(information, sort_keys=True, indent=4) logging.info(merge) pipeline = pdal.Pipeline(merge) pipeline.execute() else: - raise ValueError('List of valid tiles is empty : stop processing') + raise ValueError("List of valid tiles is empty : stop processing") diff --git a/pdaltools/replace_attribute_in_las.py b/pdaltools/replace_attribute_in_las.py index d4d75ab..4d791c5 100644 --- a/pdaltools/replace_attribute_in_las.py +++ b/pdaltools/replace_attribute_in_las.py @@ -11,36 +11,28 @@ from typing import List, Dict - def parse_args(): parser = argparse.ArgumentParser("Replace values of a given attribute in a las/laz file.") - parser.add_argument("--input_file", - type=str, - help="Laz input file") - parser.add_argument("--output_file", - type=str, - help="Laz output file.") - parser.add_argument("--attribute", - type=str, - default="Classification", - help="Attribute on which to count values") - parser.add_argument("--replacement_map", - type=str, - help="Path to a json file that contains the values that we want to " + - "replace, or string that contains the content of such a file." + - "It should contain a dict like " + - "{new_value1: [value_to_replace1, value_to_replace2], " + - "new_value2: [value_to_replace3, ...]}") - parser.add_argument("--record_format", - choices=[6, 8], - type=int, - required=True, - help="Record format: 6 (no color) or 8 (4 color channels)") - parser.add_argument("--projection", - default="EPSG:2154", - type=str, - help="Projection, eg. EPSG:2154") - + parser.add_argument("--input_file", type=str, help="Laz input file") + parser.add_argument("--output_file", type=str, help="Laz output file.") + parser.add_argument("--attribute", type=str, default="Classification", help="Attribute on which to count values") + parser.add_argument( + "--replacement_map", + type=str, + help="Path to a json file that contains the values that we want to " + + "replace, or string that contains the content of such a file." + + "It should contain a dict like " + + "{new_value1: [value_to_replace1, value_to_replace2], " + + "new_value2: [value_to_replace3, ...]}", + ) + parser.add_argument( + "--record_format", + choices=[6, 8], + type=int, + required=True, + help="Record format: 6 (no color) or 8 (4 color channels)", + ) + parser.add_argument("--projection", default="EPSG:2154", type=str, help="Projection, eg. EPSG:2154") return parser.parse_args() @@ -56,9 +48,7 @@ def check_duplicate_values(d: Dict) -> None: raise ValueError(f"Duplicate value {val} provided more than once (count={count})") -def dict_to_pdal_assign_list(d: Dict, - output_attribute: str="Classification", - input_attribute: str="tmp") -> List: +def dict_to_pdal_assign_list(d: Dict, output_attribute: str = "Classification", input_attribute: str = "tmp") -> List: """Create an assignment list (to be passed to pdal) from a dictionary of type d = { output_val1: [input_val1, input_val2], @@ -70,18 +60,18 @@ def dict_to_pdal_assign_list(d: Dict, assignment_list = [] for output_val, input_values in d.items(): for input_val in input_values: - assignment_list.append( - f"{output_attribute} = {output_val} WHERE {input_attribute} == {input_val}" - ) + assignment_list.append(f"{output_attribute} = {output_val} WHERE {input_attribute} == {input_val}") return assignment_list -def replace_values(input_file: str, - output_file: str, - replacement_map: Dict, - attribute: str="Classification", - writer_parameters: Dict={}) -> None: +def replace_values( + input_file: str, + output_file: str, + replacement_map: Dict, + attribute: str = "Classification", + writer_parameters: Dict = {}, +) -> None: """ Replace values of attribute {attribute} using a replacement map """ @@ -99,7 +89,7 @@ def replace_values(input_file: str, def parse_replacement_map_from_path_or_json_string(replacement_map): if os.path.isfile(replacement_map): - with open(replacement_map, 'r') as f: + with open(replacement_map, "r") as f: parsed_map = json.load(f) else: try: @@ -112,12 +102,13 @@ def parse_replacement_map_from_path_or_json_string(replacement_map): return parsed_map -def replace_values_clean(input_file: str, - output_file: str, - replacement_map: Dict, - attribute: str="Classification", - writer_parameters: Dict={}): - +def replace_values_clean( + input_file: str, + output_file: str, + replacement_map: Dict, + attribute: str = "Classification", + writer_parameters: Dict = {}, +): _, extension = os.path.splitext(output_file) with tempfile.NamedTemporaryFile(suffix=extension) as tmp: tmp.close() @@ -131,8 +122,7 @@ def main(): writer_parameters = get_writer_parameters(writer_params_from_parser) replacement_map = parse_replacement_map_from_path_or_json_string(args.replacement_map) - replace_values_clean(args.input_file, args.output_file, - replacement_map, args.attribute, writer_parameters) + replace_values_clean(args.input_file, args.output_file, replacement_map, args.attribute, writer_parameters) if __name__ == "__main__": diff --git a/pdaltools/standardize_format.py b/pdaltools/standardize_format.py index 3bdcd27..b0d45f9 100644 --- a/pdaltools/standardize_format.py +++ b/pdaltools/standardize_format.py @@ -20,34 +20,26 @@ minor_version="4", # Laz format version (pdal always write in 1.x format) global_encoding=17, # store WKT projection in file compression="true", # Save to compressed laz format - extra_dims= [], # Save no extra_dims - scale_x=0.01, # Precision of the stored data + extra_dims=[], # Save no extra_dims + scale_x=0.01, # Precision of the stored data scale_y=0.01, scale_z=0.01, - offset_x=0, # No offset + offset_x=0, # No offset offset_y=0, offset_z=0, dataformat_id=6, # No color by default - a_srs="EPSG:2154" + a_srs="EPSG:2154", ) def parse_args(): parser = argparse.ArgumentParser("Rewrite laz file with standard format.") - parser.add_argument("--input_file", - type=str, - help="Laz input file.") - parser.add_argument("--output_file", - type=str, - help="Laz output file") - parser.add_argument("--record_format", - choices=[6, 8], - type=int, - help="Record format: 6 (no color) or 8 (4 color channels)") - parser.add_argument("--projection", - default="EPSG:2154", - type=str, - help="Projection, eg. EPSG:2154") + parser.add_argument("--input_file", type=str, help="Laz input file.") + parser.add_argument("--output_file", type=str, help="Laz output file") + parser.add_argument( + "--record_format", choices=[6, 8], type=int, help="Record format: 6 (no color) or 8 (4 color channels)" + ) + parser.add_argument("--projection", default="EPSG:2154", type=str, help="Projection, eg. EPSG:2154") return parser.parse_args() @@ -93,8 +85,5 @@ def standardize(input_file: str, output_file: str, params_from_parser: Dict) -> if __name__ == "__main__": args = parse_args() - params_from_parser = dict( - dataformat_id=args.record_format, - a_srs=args.projection) + params_from_parser = dict(dataformat_id=args.record_format, a_srs=args.projection) standardize(args.input_file, args.output_file, params_from_parser) - diff --git a/pdaltools/unlock_file.py b/pdaltools/unlock_file.py index d2f0625..ac9c1d1 100644 --- a/pdaltools/unlock_file.py +++ b/pdaltools/unlock_file.py @@ -17,7 +17,7 @@ def unlock_file(finename: str): f.close() -def copy_and_hack_input_file(in_file: str, out_file:str): +def copy_and_hack_input_file(in_file: str, out_file: str): print(f"Gestion de l'erreur en créeant un nouveau LAZ que l'on modifiera : {in_file}") shutil.copy(in_file, out_file) unlock_file(out_file) @@ -31,12 +31,13 @@ def copy_and_hack_decorator(func): CAUTION: The decorated function must have the path to a las/laz input file as its first argument """ + def newfn(*args, **kwargs): try: return func(*args, **kwargs) except RuntimeError as e: logging.debug(f"Caught RuntimeError: {e}") - if ("readers.las: Global encoding WKT flag not set for point format 6 - 10." in str(e)): + if "readers.las: Global encoding WKT flag not set for point format 6 - 10." in str(e): args = list(args) in_file = args[0] with tempfile.NamedTemporaryFile(suffix=os.path.splitext(in_file)[-1]) as tmp: @@ -46,20 +47,17 @@ def newfn(*args, **kwargs): else: raise e + return newfn def parse_args(): parser = argparse.ArgumentParser("Unlock las/laz files generated by TerraSolid (it overwrites the input)") - parser.add_argument( - "--input", "-i", - type=str, - required=True, - help="Input file") + parser.add_argument("--input", "-i", type=str, required=True, help="Input file") return parser.parse_args() if __name__ == "__main__": args = parse_args() - unlock_file(args.input) \ No newline at end of file + unlock_file(args.input) diff --git a/pyproject.toml b/pyproject.toml index 428bd7b..9e7645b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,3 +16,17 @@ packages = [ "pdaltools" ] markers = [ "geoportail: marks tests that request the (sometimes unreliable) geoportail.gouv.fr", ] + +[tool.black] +line-length = 119 +include = '\.pyi?$' +exclude = ''' +/( + \.toml + |\.sh + |\.git + |\.ini + |\.bat + | data +)/ +''' diff --git a/test/count_occurences/test_count_occurences_for_attribute.py b/test/count_occurences/test_count_occurences_for_attribute.py index 9e854a1..383fc9c 100644 --- a/test/count_occurences/test_count_occurences_for_attribute.py +++ b/test/count_occurences/test_count_occurences_for_attribute.py @@ -12,25 +12,27 @@ input_dir = os.path.join(test_path, "data/classified_laz") input_files = [os.path.join(input_dir, f) for f in os.listdir(input_dir) if f.endswith(("las", "laz"))] output_file = os.path.join(tmp_path, "count.json") -single_input_file = os.path.join(input_dir, 'test_data_77050_627755_LA93_IGN69.laz') -counts_single_json = os.path.join(test_path, "data", "counts", 'count_test_data_77050_627755_LA93_IGN69.json') +single_input_file = os.path.join(input_dir, "test_data_77050_627755_LA93_IGN69.laz") +counts_single_json = os.path.join(test_path, "data", "counts", "count_test_data_77050_627755_LA93_IGN69.json") attribute = "Classification" -expected_counts = Counter({ - '1': 6830, - '2': 54740, - '3': 605, - '4': 2160, - '5': 42546, - '6': 33595, - '64': 83, -}) +expected_counts = Counter( + { + "1": 6830, + "2": 54740, + "3": 605, + "4": 2160, + "5": 42546, + "6": 33595, + "64": 83, + } +) def setup_module(module): try: shutil.rmtree(tmp_path) - except (FileNotFoundError): + except FileNotFoundError: pass os.mkdir(tmp_path) @@ -58,4 +60,4 @@ def test_count_by_attribute_values_one_file(): if __name__ == "__main__": logging.basicConfig(level=logging.INFO) test_count_by_attribute_values() - test_count_by_attribute_values_with_json() \ No newline at end of file + test_count_by_attribute_values_with_json() diff --git a/test/count_occurences/test_merge_occurences_counts.py b/test/count_occurences/test_merge_occurences_counts.py index 6723411..6d2847d 100644 --- a/test/count_occurences/test_merge_occurences_counts.py +++ b/test/count_occurences/test_merge_occurences_counts.py @@ -13,21 +13,23 @@ output_file = os.path.join(tmp_path, "merged_counts.json") attribute = "Classification" -expected_counts = Counter({ - '1': 6830, - '2': 54740, - '3': 605, - '4': 2160, - '5': 42546, - '6': 33595, - '64': 83, -}) +expected_counts = Counter( + { + "1": 6830, + "2": 54740, + "3": 605, + "4": 2160, + "5": 42546, + "6": 33595, + "64": 83, + } +) def setup_module(module): try: shutil.rmtree(tmp_path) - except (FileNotFoundError): + except FileNotFoundError: pass os.mkdir(tmp_path) @@ -46,4 +48,4 @@ def test_merge_counts_with_json(): if __name__ == "__main__": logging.basicConfig(level=logging.INFO) test_merge_counts() - test_merge_counts_with_json() \ No newline at end of file + test_merge_counts_with_json() diff --git a/test/data/test_data_0436_6384_LA93_IGN69_single_point.laz b/test/data/test_data_0436_6384_LA93_IGN69_single_point.laz new file mode 100644 index 0000000..069d123 Binary files /dev/null and b/test/data/test_data_0436_6384_LA93_IGN69_single_point.laz differ diff --git a/test/test_color.py b/test/test_color.py index 361478a..fa02db8 100644 --- a/test/test_color.py +++ b/test/test_color.py @@ -1,4 +1,5 @@ import os +from pathlib import Path import shutil import pytest @@ -11,32 +12,47 @@ TMPDIR = cwd + "/tmp/" + def setup_module(module): try: shutil.rmtree(TMPDIR) - except (FileNotFoundError): + except FileNotFoundError: pass os.mkdir(TMPDIR) + TEST_PATH = os.path.dirname(os.path.abspath(__file__)) -INPUT_PATH = os.path.join(TEST_PATH, 'data/test_noepsg_043500_629205_IGN69.laz') +INPUT_PATH = os.path.join(TEST_PATH, "data/test_noepsg_043500_629205_IGN69.laz") +INPUT_PATH_SINGLE_POINT_CLOUD = os.path.join(TEST_PATH, "data/test_data_0436_6384_LA93_IGN69_single_point.laz") OUTPUT_FILE = TMPDIR + "Semis_2021_0435_6292_LA93_IGN69.las" +OUTPUT_FILE_SINGLE_POINT_CLOUD = TMPDIR + "test_data_0436_6384_LA93_IGN69_single_point.colorized.laz" @pytest.mark.geoportail def test_epsg_fail(): - with pytest.raises(requests.exceptions.HTTPError, match="400 Client Error: BadRequest for url") : + with pytest.raises(requests.exceptions.HTTPError, match="400 Client Error: BadRequest for url"): color.color(INPUT_PATH, OUTPUT_FILE, "", 0.1, 15) epsg = "2154" -layer= "ORTHOIMAGERY.ORTHOPHOTOS" -minx=435000 -miny=6291000 -maxx=436000 -maxy=6292000 -pixel_per_meter=0.1 +layer = "ORTHOIMAGERY.ORTHOPHOTOS" +minx = 435000 +miny = 6291000 +maxx = 436000 +maxy = 6292000 +pixel_per_meter = 0.1 + + +def test_color_and_keeping_orthoimages(): + tmp_ortho, tmp_ortho_irc = color.color(INPUT_PATH, OUTPUT_FILE, epsg) + assert Path(tmp_ortho).exists() + assert Path(tmp_ortho_irc).exists() + + +def test_color_narrow_cloud(): + # Test that clouds that are smaller in width or height to 20cm are still clorized without an error. + color.color(INPUT_PATH_SINGLE_POINT_CLOUD, OUTPUT_FILE_SINGLE_POINT_CLOUD, epsg) @pytest.mark.geoportail @@ -69,7 +85,7 @@ def test_retry_on_server_error(): def test_retry_on_connection_error(): - with requests_mock.Mocker() as mock: + with requests_mock.Mocker() as mock: mock.get(requests_mock.ANY, exc=requests.exceptions.ConnectionError) with pytest.raises(requests.exceptions.ConnectionError): retry_download = color.retry(2, 1)(color.download_image_from_geoportail) @@ -80,7 +96,6 @@ def test_retry_on_connection_error(): def test_retry_param(): - # Here you can change retry params @color.retry(7, 15, 2, True) def raise_server_error(): diff --git a/test/test_las_add_buffer.py b/test/test_las_add_buffer.py index 4921c37..3d9e20f 100644 --- a/test/test_las_add_buffer.py +++ b/test/test_las_add_buffer.py @@ -21,16 +21,15 @@ input_nb_points = 22343 expected_output_nb_points = 40177 -expected_out_mins = [770540.01, 6277540.] -expected_out_maxs = [770610., 6277600.] - +expected_out_mins = [770540.01, 6277540.0] +expected_out_maxs = [770610.0, 6277600.0] def setup_module(module): try: shutil.rmtree(tmp_path) - except (FileNotFoundError): + except FileNotFoundError: pass os.mkdir(tmp_path) @@ -57,8 +56,13 @@ def get_2d_bounding_box(path): def test_create_las_with_buffer(): buffer_width = 10 create_las_with_buffer( - input_dir, input_file, output_file, buffer_width=buffer_width, - tile_width=tile_width,tile_coord_scale=tile_coord_scale) + input_dir, + input_file, + output_file, + buffer_width=buffer_width, + tile_width=tile_width, + tile_coord_scale=tile_coord_scale, + ) logging.info(get_nb_points(input_file)) # check file exists assert os.path.isfile(output_file) @@ -69,7 +73,7 @@ def test_create_las_with_buffer(): # The following test does not work on the current test case as there is no tile on the left # and the top of the tile - assert np.all(np.isclose(out_mins, in_mins - buffer_width)) + assert np.all(np.isclose(out_mins, in_mins - buffer_width)) assert np.all(np.isclose(out_maxs, in_maxs + buffer_width)) # check number of points @@ -82,4 +86,4 @@ def test_create_las_with_buffer(): if __name__ == "__main__": logging.basicConfig(level=logging.INFO) - test_create_las_with_buffer() \ No newline at end of file + test_create_las_with_buffer() diff --git a/test/test_las_clip.py b/test/test_las_clip.py index ca6b9ef..3fd8b8b 100644 --- a/test/test_las_clip.py +++ b/test/test_las_clip.py @@ -9,7 +9,7 @@ test_path = os.path.dirname(os.path.abspath(__file__)) tmp_path = os.path.join(test_path, "tmp") -input_dir = os.path.join(test_path, "data") +input_dir = os.path.join(test_path, "data") output_file = os.path.join(tmp_path, "cropped.las") coord_x = 77055 @@ -18,17 +18,17 @@ input_nb_points = 22343 expected_output_nb_points = 6578 -input_mins = [ 770550., 6277550.] -input_maxs = [ 770600., 6277600.] -expected_out_mins = [770560., 6277560.] -expected_out_maxs = [770590., 6277590.] +input_mins = [770550.0, 6277550.0] +input_maxs = [770600.0, 6277600.0] +expected_out_mins = [770560.0, 6277560.0] +expected_out_maxs = [770590.0, 6277590.0] def setup_module(module): try: shutil.rmtree(tmp_path) - except (FileNotFoundError): + except FileNotFoundError: pass os.mkdir(tmp_path) @@ -53,8 +53,7 @@ def get_2d_bounding_box(path): ## Tests def test_las_crop(): - bounds = ([expected_out_mins[0], expected_out_maxs[0]], - [expected_out_mins[1], expected_out_maxs[1]]) + bounds = ([expected_out_mins[0], expected_out_maxs[0]], [expected_out_mins[1], expected_out_maxs[1]]) las_crop(input_file, output_file, bounds) # check file exists @@ -73,4 +72,4 @@ def test_las_crop(): if __name__ == "__main__": logging.basicConfig(level=logging.INFO) - test_las_crop() \ No newline at end of file + test_las_crop() diff --git a/test/test_las_info.py b/test/test_las_info.py index 7262cca..b2e30b7 100644 --- a/test/test_las_info.py +++ b/test/test_las_info.py @@ -17,8 +17,8 @@ tmp_path = os.path.join(test_path, "tmp") input_dir = os.path.join(test_path, "data") input_file = os.path.join(input_dir, f"test_data_{coord_x}_{coord_y}_LA93_IGN69_ground.las") -input_mins = [ 770550., 6277550.] -input_maxs = [ 770600., 6277600.] +input_mins = [770550.0, 6277550.0] +input_maxs = [770600.0, 6277600.0] def test_las_info_metadata(): @@ -57,16 +57,17 @@ def test_parse_filename(): def test_get_buffered_bounds_from_filename_no_buffer(): - xs, ys = las_info.get_buffered_bounds_from_filename(input_file, tile_width=tile_width, - tile_coord_scale=tile_coord_scale) + xs, ys = las_info.get_buffered_bounds_from_filename( + input_file, tile_width=tile_width, tile_coord_scale=tile_coord_scale + ) assert xs == [770550, 770600] assert ys == [6277550, 6277600] def test_get_buffered_bounds_from_filename_with_buffer(): buffer_width = 10 - xs, ys = las_info.get_buffered_bounds_from_filename(input_file, tile_width=tile_width, - tile_coord_scale=tile_coord_scale, - buffer_width=buffer_width) + xs, ys = las_info.get_buffered_bounds_from_filename( + input_file, tile_width=tile_width, tile_coord_scale=tile_coord_scale, buffer_width=buffer_width + ) assert xs == [770550 - buffer_width, 770600 + buffer_width] - assert ys == [6277550 - buffer_width, 6277600 + buffer_width] \ No newline at end of file + assert ys == [6277550 - buffer_width, 6277600 + buffer_width] diff --git a/test/test_las_merge.py b/test/test_las_merge.py index 1ab487d..19beb30 100644 --- a/test/test_las_merge.py +++ b/test/test_las_merge.py @@ -9,7 +9,7 @@ test_path = os.path.dirname(os.path.abspath(__file__)) tmp_path = os.path.join(test_path, "tmp") -input_dir = os.path.join(test_path, "data") +input_dir = os.path.join(test_path, "data") output_file = os.path.join(tmp_path, "merged.las") coord_x = 77055 @@ -20,8 +20,8 @@ input_nb_points = 22343 expected_output_nb_points = 154134 -expected_out_mins = [ 770500., 6277500.] -expected_out_maxs = [ 770650., 6277600.] +expected_out_mins = [770500.0, 6277500.0] +expected_out_maxs = [770650.0, 6277600.0] # def setup_module(module): @@ -53,8 +53,7 @@ def get_2d_bounding_box(path): ## Tests def test_las_merge(): - las_merge(input_dir, input_file, output_file, - tile_width=tile_width, tile_coord_scale=tile_coord_scale) + las_merge(input_dir, input_file, output_file, tile_width=tile_width, tile_coord_scale=tile_coord_scale) # check file exists assert os.path.isfile(output_file) @@ -73,4 +72,4 @@ def test_las_merge(): if __name__ == "__main__": logging.basicConfig(level=logging.INFO) - test_las_merge() \ No newline at end of file + test_las_merge() diff --git a/test/test_replace_attribute_in_las.py b/test/test_replace_attribute_in_las.py index af546c7..bda4afa 100644 --- a/test/test_replace_attribute_in_las.py +++ b/test/test_replace_attribute_in_las.py @@ -1,7 +1,11 @@ from collections import Counter import json import os -from pdaltools.replace_attribute_in_las import replace_values, replace_values_clean, parse_replacement_map_from_path_or_json_string +from pdaltools.replace_attribute_in_las import ( + replace_values, + replace_values_clean, + parse_replacement_map_from_path_or_json_string, +) from pdaltools.count_occurences.count_occurences_for_attribute import compute_count_one_file from pdaltools.standardize_format import get_writer_parameters import pytest @@ -16,32 +20,27 @@ input_file = os.path.join(input_dir, "test_data_77050_627755_LA93_IGN69.laz") output_file = os.path.join(tmp_path, "replaced.las") attribute = "Classification" -input_counts = Counter({ - '1': 2047, - '2': 21172, - '3': 226, - '4': 1227, - '5': 30392, - '6': 29447, - '64': 13, -}) - -expected_counts = Counter({ - '2': 21172, - '3': 226, - '4': 1227, - '5': 30392, - '64': 29447, - '201': 2047 + 13 -}) +input_counts = Counter( + { + "1": 2047, + "2": 21172, + "3": 226, + "4": 1227, + "5": 30392, + "6": 29447, + "64": 13, + } +) + +expected_counts = Counter({"2": 21172, "3": 226, "4": 1227, "5": 30392, "64": 29447, "201": 2047 + 13}) replacement_map_fail = { - "201" : ["1", "64"], + "201": ["1", "64"], "6": ["64"], } # has duplicatevalue to replace replacement_map_success = { - "201" : ["1", "64"], + "201": ["1", "64"], "64": ["6"], } @@ -55,7 +54,7 @@ def setup_module(module): try: shutil.rmtree(tmp_path) - except (FileNotFoundError): + except FileNotFoundError: pass os.mkdir(tmp_path) diff --git a/test/test_standardize_format.py b/test/test_standardize_format.py index b795a9a..a5fe48f 100644 --- a/test/test_standardize_format.py +++ b/test/test_standardize_format.py @@ -22,13 +22,48 @@ ] expected_dims = { - 6: set(["X", "Y", "Z", "Intensity", "ReturnNumber", "NumberOfReturns", "ClassFlags", - "ScanChannel", "ScanDirectionFlag", "EdgeOfFlightLine", "Classification", - "UserData", "ScanAngleRank", "PointSourceId", "GpsTime"]), - 8: set(["X", "Y", "Z", "Intensity", "ReturnNumber", "NumberOfReturns", "ClassFlags", - "ScanChannel", "ScanDirectionFlag", "EdgeOfFlightLine", "Classification", - "UserData", "ScanAngleRank", "PointSourceId", "GpsTime", - "Red", "Green", "Blue", "Infrared"]), + 6: set( + [ + "X", + "Y", + "Z", + "Intensity", + "ReturnNumber", + "NumberOfReturns", + "ClassFlags", + "ScanChannel", + "ScanDirectionFlag", + "EdgeOfFlightLine", + "Classification", + "UserData", + "ScanAngleRank", + "PointSourceId", + "GpsTime", + ] + ), + 8: set( + [ + "X", + "Y", + "Z", + "Intensity", + "ReturnNumber", + "NumberOfReturns", + "ClassFlags", + "ScanChannel", + "ScanDirectionFlag", + "EdgeOfFlightLine", + "Classification", + "UserData", + "ScanAngleRank", + "PointSourceId", + "GpsTime", + "Red", + "Green", + "Blue", + "Infrared", + ] + ), } @@ -36,14 +71,13 @@ def setup_module(module): try: shutil.rmtree(tmp_path) - except (FileNotFoundError): + except FileNotFoundError: pass os.mkdir(tmp_path) def _test_standardize_format_one_params_set(params): - rewrite_with_pdal( - input_file, output_file, params) + rewrite_with_pdal(input_file, output_file, params) # check file exists assert os.path.isfile(output_file) # check values from metadata @@ -52,7 +86,7 @@ def _test_standardize_format_one_params_set(params): raise NotImplementedError("This test is not implemented for pdal < 2.5") elif pdal.info.version <= "2.5.2": metadata = json_info["summary"]["metadata"][1] - else : + else: metadata = json_info["summary"]["metadata"] assert metadata["compressed"] == True assert metadata["minor_version"] == 4 @@ -83,7 +117,7 @@ def exec_lasinfo(input_file: str): def assert_lasinfo_no_warning(input_file: str): - errors = [ line for line in exec_lasinfo(input_file).splitlines() if 'WARNING' in line] + errors = [line for line in exec_lasinfo(input_file).splitlines() if "WARNING" in line] for line in errors: print(line) @@ -111,12 +145,12 @@ def test_standardize_does_NOT_produce_any_warning_with_Lasinfo(): def test_standardize_malformed_laz(): - input_file = os.path.join(test_path, 'data/test_pdalfail_0643_6319_LA93_IGN69.laz') - output_file = os.path.join(tmp_path, 'standardize_pdalfail_0643_6319_LA93_IGN69.laz') + input_file = os.path.join(test_path, "data/test_pdalfail_0643_6319_LA93_IGN69.laz") + output_file = os.path.join(tmp_path, "standardize_pdalfail_0643_6319_LA93_IGN69.laz") standardize(input_file, output_file, multiple_params[0]) assert os.path.isfile(output_file) if __name__ == "__main__": logging.basicConfig(level=logging.INFO) - test_standardize_format() \ No newline at end of file + test_standardize_format() diff --git a/test/test_unlock.py b/test/test_unlock.py index 5b48c62..5fb53c2 100644 --- a/test/test_unlock.py +++ b/test/test_unlock.py @@ -9,13 +9,13 @@ TEST_PATH = os.path.dirname(os.path.abspath(__file__)) TMPDIR = os.path.join(TEST_PATH, "tmp") -LAZ_FILE = os.path.join(TEST_PATH, 'data/test_pdalfail_0643_6319_LA93_IGN69.laz') +LAZ_FILE = os.path.join(TEST_PATH, "data/test_pdalfail_0643_6319_LA93_IGN69.laz") def setup_module(module): try: shutil.rmtree(TMPDIR) - except (FileNotFoundError): + except FileNotFoundError: pass os.mkdir(TMPDIR) @@ -53,4 +53,4 @@ def test_unlock_file(): TMP_FILE = os.path.join(TMPDIR, "unlock_file.laz") shutil.copy(LAZ_FILE, TMP_FILE) unlock_file(TMP_FILE) - pdal_info_json(TMP_FILE) \ No newline at end of file + pdal_info_json(TMP_FILE) diff --git a/test/utils.py b/test/utils.py index 47349fd..2a5e3ed 100644 --- a/test/utils.py +++ b/test/utils.py @@ -6,6 +6,6 @@ def get_pdal_infos_summary(f: str): - r = (sp.run(['pdal', 'info', '--summary', f], stderr=sp.PIPE, stdout=sp.PIPE)) + r = sp.run(["pdal", "info", "--summary", f], stderr=sp.PIPE, stdout=sp.PIPE) json_info = json.loads(r.stdout.decode()) - return json_info \ No newline at end of file + return json_info