diff --git a/.github/workflows/cicd_deploy.yml b/.github/workflows/cicd_deploy.yml index 52d4ec3..b913dce 100644 --- a/.github/workflows/cicd_deploy.yml +++ b/.github/workflows/cicd_deploy.yml @@ -5,73 +5,81 @@ on: # Also run when the pull request merges (which generates a push) # So that we can tag the docker image appropriately. push: - branches: - - master + branches: [ "master" ] + tags: [ 'v*.*.*' ] env: - DOCKER_REPO: ignimagelidar/ign-pdal-tools + IMAGE_NAME: ${{ github.repository }} + REGISTRY: ghcr.io + TEST_TAG: ${{ github.repository }}:test jobs: deploy_docker: runs-on: ubuntu-latest + permissions: + packages: write + steps: - name: Checkout branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 - - name: Login to Docker Hub - uses: docker/login-action@v2 + - name: Build the Docker image + id: build + uses: docker/build-push-action@v5 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Build and export to Docker - uses: docker/build-push-action@v4 - with: - context: . load: true - tags: ${{ env.DOCKER_REPO }}:test + tags: ${{ env.TEST_TAG }} - - name: Set version number - run: | - echo "VERSION=$(docker run ${{ env.DOCKER_REPO }}:test python -m pdaltools._version)" >> $GITHUB_ENV - - - name: Run pytest + # run the test on the docker image + - name: Run tests in docker image run: > docker run --ipc=host - ${{ env.DOCKER_REPO }}:test + ${{ env.TEST_TAG }} python -m pytest ./test -s --log-cli-level DEBUG - - - name: Build and push - uses: docker/build-push-action@v4 + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + if: github.event_name != 'pull_request' + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + # Build an Docker image with Buildx (don't on PR) + # https://github.com/docker/build-push-action + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@v5 with: context: . push: true - tags: ${{ env.DOCKER_REPO }}:latest,${{ env.DOCKER_REPO }}:${{ env.VERSION }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} deploy-pypi: runs-on: ubuntu-latest + environment: name: pypi url: https://pypi.org/p/ign-pdal-tools + permissions: contents: read packages: write id-token: write # IMPORTANT: this permission is mandatory for trusted publishing - steps: - name: Checkout branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 # See https://github.com/marketplace/actions/setup-micromamba - name: setup-micromamba - uses: mamba-org/setup-micromamba@v1.4.3 + uses: mamba-org/setup-micromamba@v1.9.0 with: + micromamba-version: '1.5.8-0' # temporary fix as setup-mircomamba hangs with later mamba version environment-file: environment.yml environment-name: pdaltools # activate the environment cache-environment: true diff --git a/.github/workflows/cicd_full.yml b/.github/workflows/cicd_full.yml index d51bfdb..6952358 100644 --- a/.github/workflows/cicd_full.yml +++ b/.github/workflows/cicd_full.yml @@ -13,44 +13,36 @@ on: - dev env: - DOCKER_REPO: ignimagelidar/ign-pdal-tools + IMAGE_NAME: ${{ github.repository }} + REGISTRY: ghcr.io + TEST_TAG: ${{ github.repository }}:test jobs: - test_docker: + deploy_docker: runs-on: ubuntu-latest + permissions: + packages: write + steps: - name: Checkout branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 - - name: Login to Docker Hub - uses: docker/login-action@v2 + - name: Build the Docker image + id: build + uses: docker/build-push-action@v5 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Build and export to Docker - uses: docker/build-push-action@v4 - with: - context: . load: true - tags: ${{ env.DOCKER_REPO }}:test - - - name: Set version number - run: | - echo "VERSION=$(docker run ${{ env.DOCKER_REPO }}:test python -m pdaltools._version)" >> $GITHUB_ENV + tags: ${{ env.TEST_TAG }} - - name: Run pytest + # run the test on the docker image + - name: Run tests in docker image run: > docker run --ipc=host - ${{ env.DOCKER_REPO }}:test + ${{ env.TEST_TAG }} python -m pytest ./test -s --log-cli-level DEBUG - test_local: runs-on: ubuntu-latest environment: @@ -59,8 +51,6 @@ jobs: permissions: contents: read packages: write - id-token: write # IMPORTANT: this permission is mandatory for trusted publishing - steps: - name: Checkout branch @@ -68,8 +58,9 @@ jobs: # See https://github.com/marketplace/actions/setup-micromamba - name: setup-micromamba - uses: mamba-org/setup-micromamba@v1.4.3 + uses: mamba-org/setup-micromamba@v1.9.0 with: + micromamba-version: '1.5.8-0' # temporary fix as setup-mircomamba hangs with later mamba version environment-file: environment.yml environment-name: pdaltools # activate the environment cache-environment: true diff --git a/.github/workflows/cicd_light.yml b/.github/workflows/cicd_light.yml index a77f55d..c632b6c 100644 --- a/.github/workflows/cicd_light.yml +++ b/.github/workflows/cicd_light.yml @@ -16,12 +16,13 @@ jobs: steps: - name: Checkout branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 # See https://github.com/marketplace/actions/setup-micromamba - name: setup-micromamba - uses: mamba-org/setup-micromamba@v1.4.3 + uses: mamba-org/setup-micromamba@v1.9.0 with: + micromamba-version: '1.5.8-0' # temporary fix as setup-mircomamba hangs with later mamba version environment-file: environment.yml environment-name: pdaltools # activate the environment cache-environment: true diff --git a/.gitignore b/.gitignore index b5f6a44..b0e7f80 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ tmp __pycache__ ign_pdal_tools.egg-info dist +*.idea* diff --git a/CHANGELOG.md b/CHANGELOG.md index 04278b2..47de613 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,11 @@ +# 1.7.0 +- las_remove_dimension: new tool to remove one or many dimensions +- deploy on ghcr.io instead of dockerhub +- Add tools to run functions on buffered las: + - update create_las_with_buffer to enable saving which points are from the central las on a new dimension + - add a remove_points_from_buffer to remove the points that have this new dimension not set to 1 + - add a decorator to run a function on a buffered las and return an output las only with the points from the original input + # 1.6.0 - color: choose streams for RGB colorization, and IRC colorization (doc https://geoservices.ign.fr/services-web-experts-ortho) - color: detect white images. diff --git a/Makefile b/Makefile index 04bcec9..edc090a 100644 --- a/Makefile +++ b/Makefile @@ -53,17 +53,21 @@ clean: # Build/deploy Docker image ############################## -PROJECT_NAME=ignimagelidar/ign-pdal-tools +REGISTRY=ghcr.io +NAMESPACE=ignf +IMAGE_NAME=ign-pdal-tools VERSION=`python -m pdaltools._version` +FULL_IMAGE_NAME=${REGISTRY}/${NAMESPACE}/${IMAGE_NAME}:${VERSION} docker-build: clean - docker build --no-cache -t ${PROJECT_NAME}:${VERSION} -f Dockerfile . + docker build --no-cache -t ${IMAGE_NAME}:${VERSION} -f Dockerfile . docker-test: - docker run --rm -it ${PROJECT_NAME}:${VERSION} python -m pytest -s + docker run --rm -it ${IMAGE_NAME}:${VERSION} python -m pytest -s docker-remove: - docker rmi -f `docker images | grep ${PROJECT_NAME} | tr -s ' ' | cut -d ' ' -f 3` + docker rmi -f `docker images | grep ${IMAGE_NAME}:${VERSION} | tr -s ' ' | cut -d ' ' -f 3` docker-deploy: - docker push ${PROJECT_NAME}:${VERSION} + docker tag ${IMAGE_NAME}:${VERSION} ${FULL_IMAGE_NAME} + docker push ${FULL_IMAGE_NAME} diff --git a/pdaltools/_version.py b/pdaltools/_version.py index 6a32fb0..121c53d 100644 --- a/pdaltools/_version.py +++ b/pdaltools/_version.py @@ -1,4 +1,4 @@ -__version__ = "1.6.0" +__version__ = "1.7.0" if __name__ == "__main__": diff --git a/pdaltools/las_add_buffer.py b/pdaltools/las_add_buffer.py index 9328cdf..5f07a32 100644 --- a/pdaltools/las_add_buffer.py +++ b/pdaltools/las_add_buffer.py @@ -1,7 +1,10 @@ import argparse import logging import os -from typing import List +import tempfile +from functools import wraps +from pathlib import Path +from typing import Callable, List import pdal @@ -10,6 +13,9 @@ get_writer_parameters_from_reader_metadata, ) from pdaltools.las_merge import create_list +from pdaltools.las_remove_dimensions import remove_dimensions_from_las + +ORIGINAL_TILE_TAG = "is_in_original" def create_las_with_buffer( @@ -20,19 +26,27 @@ def create_las_with_buffer( spatial_ref: str = "EPSG:2154", tile_width: int = 1000, tile_coord_scale: int = 1000, + tag_original_tile: bool = False, ): """Merge lidar tiles around the queried tile and crop them in order to add a buffer to the tile (usually 100m). + Args: - input_dir (str): directory of pointclouds (where you look for neigbors) - tile_filename (str): full path to the queried LIDAR tile - output_filename (str) : full path to the saved cropped tile - buffer_width (int): width of the border to add to the tile (in pixels) - spatial_ref (str): Spatial reference to use to override the one from input las. - tile width (int): width of tiles in meters (usually 1000m) - tile_coord_scale (int) : scale used in the filename to describe coordinates in meters - (usually 1000m) + input_dir (str): directory of pointclouds (where you look for neighbors) + tile_filename (str): full path to the queried LIDAR tile + output_filename (str): full path to the saved cropped tile + buffer_width (int, optional): width of the border to add to the tile (in meters). + Defaults to 100. + spatial_ref (_type_, optional): Spatial reference to use to override the one from input las. + Defaults to "EPSG:2154". + tile_width (int, optional): width of tiles in meters. Defaults to 1000. + tile_coord_scale (int, optional): scale used in the filename to describe coordinates + in meters. Defaults to 1000. + tag_original_tile (bool, optional): if true, add a new "is_in_original" dimension + to the output las, equal to 1 on points that belong to the original tile, 0 on points + that belong to the added buffer. Defaults to False. """ + bounds = get_buffered_bounds_from_filename( tile_filename, buffer_width=buffer_width, tile_width=tile_width, tile_coord_scale=tile_coord_scale ) @@ -46,6 +60,7 @@ def create_las_with_buffer( spatial_ref, tile_width=tile_width, tile_coord_scale=tile_coord_scale, + tag_original_tile=tag_original_tile, ) @@ -57,6 +72,7 @@ def las_merge_and_crop( spatial_ref: str = "EPSG:2154", tile_width=1000, tile_coord_scale=1000, + tag_original_tile: bool = False, ): """Merge and crop las in a single pipeline (for buffer addition) @@ -65,29 +81,40 @@ def las_merge_and_crop( - For each file: - read it - crop it according to the bounds + - optionally add a dimension to differentiate points from the central pointscloud + from those added as a buffer - keep the crop in memory - delete the pipeline object to release the memory taken by the las reader - Merge the already cropped data Args: - input_dir (str): directory of pointclouds (where you look for neigbors) + input_dir (str): directory of pointclouds (where you look for neighbors) tile_filename (str): full path to the queried LIDAR tile - bounds : 2D bounding box to crop to : provided as ([xmin, xmax], [ymin, ymax]) - output_filename (str) : full path to the saved cropped tile - spatial_ref (str): spatial reference for the writer - tile width (int): width of tiles in meters (usually 1000m) - tile_coord_scale (int) : scale used in the filename to describe coordinates in meters - (usually 1000m) + bounds (List): 2D bounding box to crop to : provided as ([xmin, xmax], [ymin, ymax]) + output_filename (str): full path to the saved cropped tile + spatial_ref (str, optional): spatial reference for the writer. Defaults to "EPSG:2154". + tile_width (int, optional): width of tiles in meters (usually 1000m). Defaults to 1000. + tile_coord_scale (int, optional): scale used in the filename to describe coordinates in meters. + Defaults to 1000. + tag_original_tile (bool, optional): if true, add a new "is_in_original" dimension + to the output las, equal to 1 on points that belong to the original tile, 0 on points + that belong to the added buffer. Defaults to False. + Raises: + ValueError: if the list of tiles to merge is empty """ + # List files to merge files_to_merge = create_list(input_dir, tile_filename, tile_width, tile_coord_scale) - + central_file = files_to_merge[-1] if len(files_to_merge) > 0: # Read and crop each file crops = [] for f in files_to_merge: pipeline = pdal.Pipeline() pipeline |= pdal.Reader.las(filename=f, override_srs=spatial_ref) + if tag_original_tile: + pipeline |= pdal.Filter.ferry(dimensions=f"=>{ORIGINAL_TILE_TAG}") + pipeline |= pdal.Filter.assign(value=f"{ORIGINAL_TILE_TAG}={int(f == central_file)}") pipeline |= pdal.Filter.crop(bounds=str(bounds)) pipeline.execute() if len(pipeline.arrays[0]) == 0: @@ -95,10 +122,9 @@ def las_merge_and_crop( else: crops.append(pipeline.arrays[0]) - # Retrieve metadata before the pipeline is deleted - # As the last file of files_to_merge is the central one, metadata will contain the info - # from the central file after the last iteration of the for loop - metadata = pipeline.metadata + if f == central_file: + # Retrieve metadata before the pipeline is deleted + metadata = pipeline.metadata del pipeline params = get_writer_parameters_from_reader_metadata(metadata, a_srs=spatial_ref) @@ -116,6 +142,106 @@ def las_merge_and_crop( pass +def remove_points_from_buffer(input_file: str, output_file: str): + """Remove the points that were added as a buffer to a las file using the "is_in_original" + dimension that has been added by create_las_with_buffer + + Limitation: if any point has been added to the point cloud after adding the buffer, it + won't be preserved by this operation (only points from the original file are kept) + + Args: + input_file (str): path to the input file containing the "is_in_original" dimension + output_file (str): path to the output_file + """ + with tempfile.NamedTemporaryFile(suffix="_with_additional_dim.las") as tmp_las: + pipeline = pdal.Pipeline() | pdal.Reader.las(input_file) + pipeline |= pdal.Filter.range(limits=f"{ORIGINAL_TILE_TAG}[1:1]") + pipeline |= pdal.Writer.las(filename=tmp_las.name, forward="all", extra_dims="all") + pipeline.execute() + + remove_dimensions_from_las(tmp_las.name, dimensions=[ORIGINAL_TILE_TAG], output_las=output_file) + + +def run_on_buffered_las( + buffer_width: int, spatial_ref: str, tile_width: int = 1000, tile_coord_scale: int = 1000 +) -> Callable: + """Decorator to apply a function that takes a las/laz as input and returns a las/laz output + on an input with an additional buffer, then remove the buffer points from the output + + The first argument of the decorated function must be an input path + The second argument of the decorated function must be an output path + + The buffer is added by merging lidar tiles around the queried tile and crop them based + on their filenames. + + Limitation: if any point has been added to the point cloud by the decorated function, it + won't be preserved by this operation (only points from the original file are kept) + + + Args: + buffer_width (int): width of the border to add to the tile (in meters) + spatial_ref (str): spatial reference for the writer. Example: "EPSG:2154". + tile_width (int, optional): width of tiles in meters (usually 1000m). Defaults to 1000. + tile_coord_scale (int, optional): scale used in the filename to describe coordinates in meters. + Defaults to 1000. + + Raises: + FileNotFoundError: when the first argument of the decorated function is not an existing + file + FileNotFoundError: when the second argument of the decorated function is not a path + with an existing parent folder + + Returns: + Callable: decorated function + """ + """Decorator to run a function that takes a las as input and returns a las output + on a las with an additional buffer, then remove the buffer points from the buffer points + + """ + + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + input_file = args[0] + output_file = args[1] + if not Path(input_file).is_file(): + raise FileNotFoundError( + f"File {args[0]} not found. The first argument of a function decorated by " + "'run_on_buffered_las' is expected to be the path to an existing input file." + ) + + if not Path(output_file).parent.is_dir(): + raise FileNotFoundError( + f"Parent folder for file {args[1]} not found. The second argument of a function " + "decorated by 'run_on_buffered_las' is expected to be the path to an output " + "file in an existing folder" + ) + + with ( + tempfile.NamedTemporaryFile(suffix="_buffered_input.laz", dir=".") as buf_in, + tempfile.NamedTemporaryFile(suffix="_buffered_output.laz", dir=".") as buf_out, + ): + create_las_with_buffer( + Path(input_file).parent, + input_file, + buf_in.name, + buffer_width=buffer_width, + spatial_ref=spatial_ref, + tile_width=tile_width, + tile_coord_scale=tile_coord_scale, + tag_original_tile=True, + ) + func(buf_in.name, buf_out.name, *args[2:], **kwargs) + + remove_points_from_buffer(buf_out.name, output_file) + + return + + return wrapper + + return decorator + + def parse_args(): parser = argparse.ArgumentParser("Add a buffer to a las tile by stitching with its neighbors") parser.add_argument( diff --git a/pdaltools/las_remove_dimensions.py b/pdaltools/las_remove_dimensions.py new file mode 100644 index 0000000..0063d0a --- /dev/null +++ b/pdaltools/las_remove_dimensions.py @@ -0,0 +1,58 @@ +import argparse +import os + +import pdal +from pdaltools.las_info import get_writer_parameters_from_reader_metadata + +def remove_dimensions_from_las(input_las: str, dimensions: [str], output_las: str): + """ + export new las without some dimensions + """ + pipeline = pdal.Pipeline() | pdal.Reader.las(input_las) + pipeline.execute() + points = pipeline.arrays[0] + input_dimensions = list(points.dtype.fields.keys()) + output_dimensions = [dim for dim in input_dimensions if dim not in dimensions] + points_pruned = points[output_dimensions] + params = get_writer_parameters_from_reader_metadata(pipeline.metadata) + pipeline_end = pdal.Pipeline(arrays=[points_pruned]) + pipeline_end |= pdal.Writer.las(output_las, forward="all", **params) + pipeline_end.execute() + + +def parse_args(): + parser = argparse.ArgumentParser("Remove dimensions from las") + parser.add_argument( + "--input_las", + "-i", + type=str, + required=True, + help="Path to the the las for which the dimensions will be removed", + ) + parser.add_argument( + "--output_las", + "-o", + type=str, + required=False, + help="Path to the the output las ; if none, we replace the input las", + ) + parser.add_argument( + "--dimensions", + "-d", + type=str, + required=True, + nargs="+", + help="The dimension we would like to remove from the point cloud file ; be aware to not remove mandatory " + "dimensions of las" + ) + + return parser.parse_args() + + +if __name__ == "__main__": + args = parse_args() + remove_dimensions_from_las( + input_las=args.input_las, + dimensions=args.dimensions, + output_las=args.input_las if args.output_las is None else args.output_las, + ) diff --git a/test/test_las_add_buffer.py b/test/test_las_add_buffer.py index 25c9564..160aaf8 100644 --- a/test/test_las_add_buffer.py +++ b/test/test_las_add_buffer.py @@ -10,7 +10,11 @@ from pdaltools.count_occurences.count_occurences_for_attribute import ( compute_count_one_file, ) -from pdaltools.las_add_buffer import create_las_with_buffer +from pdaltools.las_add_buffer import ( + create_las_with_buffer, + remove_points_from_buffer, + run_on_buffered_las, +) TEST_PATH = os.path.dirname(os.path.abspath(__file__)) TMP_PATH = os.path.join(TEST_PATH, "tmp") @@ -44,7 +48,6 @@ def get_2d_bounding_box(path): return mins[:2], maxs[:2] -# Tests def test_create_las_with_buffer(): output_file = os.path.join(TMP_PATH, "buffer.las") # Note: this tile does not have a tile at its bottom @@ -88,6 +91,11 @@ def test_create_las_with_buffer(): # Check the input header infos are preserved in the output assert_header_info_are_similar(output_file, input_file) + # Check output dimensions are the same as input dimensions + output_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"] + input_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"] + assert output_dimensions == input_dimensions + # Check that classes are preserved (in particular classes over 31) # Warning: classification values > 31 exist only for las 1.4 with dataformat_id >= 6 classes_counts = compute_count_one_file(output_file) @@ -95,6 +103,112 @@ def test_create_las_with_buffer(): assert set(classes_counts.keys()) == {"1", "2", "3", "4", "5", "6", "64"} -if __name__ == "__main__": - logging.basicConfig(level=logging.INFO) - test_create_las_with_buffer() +def test_create_las_with_buffer_with_tag(): + output_file = os.path.join(TMP_PATH, "buffer_with_tag.las") + # Note: this tile does not have a tile at its bottom + # And its left-side tile has been crop to have no data in the buffer area. This case must not generate any error + input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69.laz") + tile_width = 50 + tile_coord_scale = 10 + input_nb_points = 72770 + expected_out_mins = [770545.0, 6277500.0] + expected_out_maxs = [770605.0, 6277555.0] + + buffer_width = 5 + create_las_with_buffer( + INPUT_DIR, + input_file, + output_file, + buffer_width=buffer_width, + tile_width=tile_width, + tile_coord_scale=tile_coord_scale, + tag_original_tile=True, + ) + logging.info(get_nb_points(input_file)) + # check file exists + assert os.path.isfile(output_file) + + # check number of points with the additional tag + assert get_nb_points(output_file) > input_nb_points + count_points_from_original = compute_count_one_file(output_file, attribute="is_in_original") + assert count_points_from_original["1"] == input_nb_points + + # Check boundaries + out_mins, out_maxs = get_2d_bounding_box(output_file) + assert np.all(out_mins == expected_out_mins) + assert np.all(out_maxs == expected_out_maxs) + + # Check the input header infos are preserved in the output + assert_header_info_are_similar(output_file, input_file) + + # Check output dimensions are the same as input dimensions with one additional dimension + output_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"] + input_dimensions = tu.get_pdal_infos_summary(input_file)["summary"]["dimensions"] + assert output_dimensions == input_dimensions + ", is_in_original" + + # Check that classes are preserved (in particular classes over 31) + # Warning: classification values > 31 exist only for las 1.4 with dataformat_id >= 6 + classes_counts = compute_count_one_file(output_file) + + assert set(classes_counts.keys()) == {"1", "2", "3", "4", "5", "6", "64"} + + +def test_remove_points_from_buffer(): + input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69.laz") + buffered_file = os.path.join(TMP_PATH, "remove_points_from_buffer_intermediate.las") + output_file = os.path.join(TMP_PATH, "remove_points_from_buffer_output.las") + tile_width = 50 + tile_coord_scale = 10 + + buffer_width = 5 + create_las_with_buffer( + INPUT_DIR, + input_file, + buffered_file, + buffer_width=buffer_width, + tile_width=tile_width, + tile_coord_scale=tile_coord_scale, + tag_original_tile=True, + ) + + remove_points_from_buffer(buffered_file, output_file) + assert os.path.isfile(output_file) + assert get_nb_points(buffered_file) > get_nb_points(input_file) + assert get_nb_points(output_file) == get_nb_points(input_file) + + # Check output dimensions are the same as input dimensions + output_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"] + buffered_dimensions = tu.get_pdal_infos_summary(buffered_file)["summary"]["dimensions"] + input_dimensions = tu.get_pdal_infos_summary(input_file)["summary"]["dimensions"] + assert buffered_dimensions == input_dimensions + ", is_in_original" + assert output_dimensions == input_dimensions + + # Check the input header infos are preserved in the output + assert_header_info_are_similar(output_file, input_file) + + +def test_run_on_buffered_las(): + # Dummy example with copy only + buffer_width = 5 + tile_width = 50 + tile_coord_scale = 10 + spatial_ref = "EPSG:2154" + + input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69.laz") + output_dir = os.path.join(TMP_PATH, "run_with_buffer") + os.makedirs(output_dir) + output_file = os.path.join(output_dir, "copied.laz") + decorated_copy = run_on_buffered_las( + buffer_width, spatial_ref=spatial_ref, tile_width=tile_width, tile_coord_scale=tile_coord_scale + )(shutil.copy) + + decorated_copy(input_file, output_file) + + # Check output dimensions are the same as input dimensions + output_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"] + input_dimensions = tu.get_pdal_infos_summary(input_file)["summary"]["dimensions"] + assert output_dimensions == input_dimensions + + # Check las content + assert get_nb_points(output_file) == get_nb_points(input_file) + assert compute_count_one_file(output_file) == compute_count_one_file(input_file) diff --git a/test/test_las_remove_dimensions.py b/test/test_las_remove_dimensions.py new file mode 100644 index 0000000..00eb282 --- /dev/null +++ b/test/test_las_remove_dimensions.py @@ -0,0 +1,67 @@ +import tempfile +import pdal +import numpy +import os +import logging +import pytest + +from pdaltools import las_remove_dimensions + +TEST_PATH = os.path.dirname(os.path.abspath(__file__)) +INPUT_DIR = os.path.join(TEST_PATH, "data") + +ini_las = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz") +added_dimensions = ["DIM_1", "DIM_2"] + +def get_points(input_las : str): + pipeline_read_ini = pdal.Pipeline() | pdal.Reader.las(input_las) + pipeline_read_ini.execute() + return pipeline_read_ini.arrays[0] + +def append_dimension(input_las : str, output_las : str): + pipeline = pdal.Pipeline() + pipeline |= pdal.Reader.las(input_las) + pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions)) + pipeline |= pdal.Writer.las(output_las, extra_dims="all", forward="all", ) + pipeline.execute() + + +def test_remove_all_dimension(): + + # get initial data + points_ini = get_points(ini_las) + + with tempfile.NamedTemporaryFile(suffix="_add.las") as tmp_las: + append_dimension(ini_las, tmp_las.name) + with tempfile.NamedTemporaryFile(suffix="_rm.las") as tmp_las_rm: + # remove all dimensions + las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, added_dimensions, tmp_las_rm.name) + points_end = get_points(tmp_las_rm.name) + assert numpy.array_equal(points_ini, points_end) # output data should be the same + + +def test_remove_one_dimension(): + + # get initial data + points_ini = get_points(ini_las) + + with tempfile.NamedTemporaryFile(suffix="_add.las") as tmp_las: + append_dimension(ini_las, tmp_las.name) + with tempfile.NamedTemporaryFile(suffix="_rm.las") as tmp_las_rm: + # remove one dimension + las_remove_dimensions.remove_dimensions_from_las(tmp_las.name, ["DIM_1"], tmp_las_rm.name) + points_end = get_points(tmp_las_rm.name) + + assert list(points_end.dtype.fields.keys()).index("DIM_2") >= 0# should still contains DIM_2 + + with pytest.raises(ValueError): + list(points_end.dtype.fields.keys()).index("DIM_1") # should not have DIM_1 + + with pytest.raises(TypeError): + numpy.array_equal(points_ini, points_end) # output data should not be the same + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + test_remove_all_dimension() + test_remove_one_dimension()