Skip to content

Commit

Permalink
Merge pull request #38 from IGNF/fix-add-buffer-version
Browse files Browse the repository at this point in the history
Fix pdal las writer parameters
  • Loading branch information
leavauchier authored Jan 24, 2024
2 parents 5eff6ce + b8b2532 commit a74ebe1
Show file tree
Hide file tree
Showing 25 changed files with 163 additions and 99 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# dev
- fix add_buffer: propagate header infos from input to the output
- update pdal.Writer params to make sure input format is forwarded except for the specified parameters

# 1.5.0
- switch colorisation source from Geoportail to Geoplateforme
- use absolute value comparison in tests
Expand Down
2 changes: 1 addition & 1 deletion pdaltools/color.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def color(
pipeline |= pdal.Filter.colorization(raster=tmp_ortho_irc.name, dimensions="Infrared:1:256.0")

pipeline |= pdal.Writer.las(
filename=output_file, extra_dims=writer_extra_dims, minor_version="4", dataformat_id="8"
filename=output_file, extra_dims=writer_extra_dims, minor_version="4", dataformat_id="8", forward="all"
)

print("Traitement du nuage de point")
Expand Down
50 changes: 45 additions & 5 deletions pdaltools/las_add_buffer.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import argparse
import logging
import os
from typing import List
from typing import Dict, List

import pdal

Expand Down Expand Up @@ -46,6 +46,40 @@ def create_las_with_buffer(
)


def get_writer_parameters_from_reader_metadata(metadata: Dict, a_srs=None) -> Dict:
"""As pdal las writers does not permit to pass easily metadata from one file as
parameters for a writer, use a trick to generate writer parameters from the
reader metadata of a previous pipeline:
This function uses the metadata from the reader of a pipeline to provide parameters
to pass to the writer of another pipeline
To be removed once https://github.com/PDAL/python/issues/147 is solved
Args:
metadata (Dict): metadata of an executed pipeline (that can be accessed using pipeline.metadata)
Returns:
Dict: parameters to pass to a pdal writer
"""

reader_metadata = metadata["metadata"]["readers.las"]

params = {
"major_version": reader_metadata["major_version"],
"minor_version": reader_metadata["minor_version"],
"global_encoding": reader_metadata["global_encoding"],
"extra_dims": "all",
"scale_x": reader_metadata["scale_x"],
"scale_y": reader_metadata["scale_y"],
"scale_z": reader_metadata["scale_z"],
"offset_x": reader_metadata["offset_x"],
"offset_y": reader_metadata["offset_y"],
"offset_z": reader_metadata["offset_z"],
"dataformat_id": reader_metadata["dataformat_id"],
"a_srs": a_srs if a_srs else reader_metadata["comp_spatialreference"],
}
return params


def las_merge_and_crop(
input_dir: str,
tile_filename: str,
Expand Down Expand Up @@ -77,12 +111,12 @@ def las_merge_and_crop(
(usually 1000m)
"""
# List files to merge
Listfiles = create_list(input_dir, tile_filename, tile_width, tile_coord_scale)
files_to_merge = create_list(input_dir, tile_filename, tile_width, tile_coord_scale)

if len(Listfiles) > 0:
if len(files_to_merge) > 0:
# Read and crop each file
crops = []
for f in Listfiles:
for f in files_to_merge:
pipeline = pdal.Pipeline()
pipeline |= pdal.Reader.las(filename=f, override_srs=spatial_ref)
pipeline |= pdal.Filter.crop(bounds=str(bounds))
Expand All @@ -92,13 +126,19 @@ def las_merge_and_crop(
else:
crops.append(pipeline.arrays[0])

# Retrieve metadata before the pipeline is deleted
# As the last file of files_to_merge is the central one, metadata will contain the info
# from the central file after the last iteration of the for loop
metadata = pipeline.metadata
del pipeline

params = get_writer_parameters_from_reader_metadata(metadata, a_srs=spatial_ref)

# Merge
pipeline = pdal.Filter.merge().pipeline(*crops)

# Write
pipeline |= pdal.Writer(filename=output_filename, a_srs=spatial_ref)
pipeline |= pdal.Writer(filename=output_filename, forward="all", **params)

logging.info(pipeline.toJSON())
pipeline.execute()
Expand Down
2 changes: 1 addition & 1 deletion pdaltools/las_clip.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def las_crop(input_file: str, output_file: str, bounds, spatial_ref: str = "EPSG
"pipeline": [
{"type": "readers.las", "filename": input_file, "override_srs": spatial_ref, "nosrs": True},
{"type": "filters.crop", "bounds": str(bounds)},
{"type": "writers.las", "a_srs": spatial_ref, "filename": output_file},
{"type": "writers.las", "a_srs": spatial_ref, "filename": output_file, "forward": "all"},
]
}
# Create json
Expand Down
15 changes: 7 additions & 8 deletions pdaltools/las_merge.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import json
import logging
import os

Expand Down Expand Up @@ -97,14 +96,14 @@ def las_merge(las_dir, input_file, merge_file, tile_width=1000, tile_coord_scale
1000 * 1m (with 1m being the reference)
"""
# List files to merge
Listfiles = create_list(las_dir, input_file, tile_width, tile_coord_scale)
if len(Listfiles) > 0:
files = create_list(las_dir, input_file, tile_width, tile_coord_scale)
if len(files) > 0:
# Merge
information = {}
information = {"pipeline": Listfiles + [merge_file]}
merge = json.dumps(information, sort_keys=True, indent=4)
logging.info(merge)
pipeline = pdal.Pipeline(merge)
pipeline = pdal.Pipeline()
for f in files:
pipeline |= pdal.Reader.las(filename=f)
pipeline |= pdal.Filter.merge()
pipeline |= pdal.Writer.las(filename=merge_file, forward="all")
pipeline.execute()
else:
raise ValueError("List of valid tiles is empty : stop processing")
2 changes: 1 addition & 1 deletion pdaltools/replace_attribute_in_las.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def replace_values(
pipeline |= pdal.Filter.assign(value=assignment_list)
# the temp_attribute dimension should not be written as long as the writer has no "extra_dims"
# parameter
pipeline |= pdal.Writer(filename=output_file, **writer_parameters)
pipeline |= pdal.Writer(filename=output_file, forward="all", **writer_parameters)

pipeline.execute()

Expand Down
2 changes: 1 addition & 1 deletion pdaltools/standardize_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def rewrite_with_pdal(input_file: str, output_file: str, params_from_parser: Dic
# Update parameters with command line values
params = get_writer_parameters(params_from_parser)
pipeline = pdal.Reader.las(input_file)
pipeline |= pdal.Writer(filename=output_file, **params)
pipeline |= pdal.Writer(filename=output_file, forward="all", **params)
pipeline.execute()


Expand Down
Binary file added test/data/test_data_77050_627755_LA93_IGN69.laz
Binary file not shown.
Binary file not shown.
Binary file added test/data/test_data_77050_627760_LA93_IGN69.laz
Binary file not shown.
Binary file not shown.
Binary file added test/data/test_data_77055_627755_LA93_IGN69.laz
Binary file not shown.
Binary file not shown.
Binary file added test/data/test_data_77055_627760_LA93_IGN69.laz
Binary file not shown.
Binary file not shown.
Binary file added test/data/test_data_77060_627755_LA93_IGN69.laz
Binary file not shown.
Binary file not shown.
Binary file added test/data/test_data_77060_627760_LA93_IGN69.laz
Binary file not shown.
Binary file not shown.
59 changes: 35 additions & 24 deletions test/test_las_add_buffer.py
Original file line number Diff line number Diff line change
@@ -1,37 +1,29 @@
import logging
import os
import shutil
import test.utils as tu
from test.utils import assert_header_info_are_similar

import laspy
import numpy as np

from pdaltools.count_occurences.count_occurences_for_attribute import (
compute_count_one_file,
)
from pdaltools.las_add_buffer import create_las_with_buffer

test_path = os.path.dirname(os.path.abspath(__file__))
tmp_path = os.path.join(test_path, "tmp")
input_dir = os.path.join(test_path, "data")
output_file = os.path.join(tmp_path, "cropped.las")

coord_x = 77055
coord_y = 627760
# Note: neighbor tile 77050_627760 is cropped to simulate missing data in neighbors during merge
input_file = os.path.join(input_dir, f"test_data_{coord_x}_{coord_y}_LA93_IGN69_ground.las")
tile_width = 50
tile_coord_scale = 10

input_nb_points = 22343
expected_output_nb_points = 40177
expected_out_mins = [770540.01, 6277540.0]
expected_out_maxs = [770610.0, 6277600.0]
TEST_PATH = os.path.dirname(os.path.abspath(__file__))
TMP_PATH = os.path.join(TEST_PATH, "tmp")
INPUT_DIR = os.path.join(TEST_PATH, "data")


def setup_module(module):
try:
shutil.rmtree(tmp_path)
shutil.rmtree(TMP_PATH)

except FileNotFoundError:
pass
os.mkdir(tmp_path)
os.mkdir(TMP_PATH)


# Utils functions
Expand All @@ -54,9 +46,19 @@ def get_2d_bounding_box(path):

# Tests
def test_create_las_with_buffer():
buffer_width = 10
output_file = os.path.join(TMP_PATH, "buffer.las")
# Note: this tile does not have a tile at its bottom
# And its left-side tile has been crop to have no data in the buffer area. This case must not generate any error
input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69.laz")
tile_width = 50
tile_coord_scale = 10
input_nb_points = 72770
expected_out_mins = [770545.0, 6277500.0]
expected_out_maxs = [770605.0, 6277555.0]

buffer_width = 5
create_las_with_buffer(
input_dir,
INPUT_DIR,
input_file,
output_file,
buffer_width=buffer_width,
Expand All @@ -73,16 +75,25 @@ def test_create_las_with_buffer():

# The following test does not work on the current test case as there is no tile on the left
# and the top of the tile
assert np.all(np.isclose(out_mins, in_mins - buffer_width))
assert np.all(np.isclose(out_maxs, in_maxs + buffer_width))
tu.allclose_absolute(out_mins, in_mins - buffer_width, 1e-3)
tu.allclose_absolute(out_maxs, in_maxs + buffer_width, 1e-3)

# check number of points
assert get_nb_points(output_file) == expected_output_nb_points
assert get_nb_points(output_file) > input_nb_points

# Check contre valeur attendue
# Check boundaries
assert np.all(out_mins == expected_out_mins)
assert np.all(out_maxs == expected_out_maxs)

# Check the input header infos are preserved in the output
assert_header_info_are_similar(output_file, input_file)

# Check that classes are preserved (in particular classes over 31)
# Warning: classification values > 31 exist only for las 1.4 with dataformat_id >= 6
classes_counts = compute_count_one_file(output_file)

assert set(classes_counts.keys()) == {"1", "2", "3", "4", "5", "6", "64"}


if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
Expand Down
36 changes: 18 additions & 18 deletions test/test_las_clip.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,25 @@
import logging
import os
import shutil
from test.utils import assert_header_info_are_similar

import laspy
import numpy as np

from pdaltools.las_clip import las_crop

test_path = os.path.dirname(os.path.abspath(__file__))
tmp_path = os.path.join(test_path, "tmp")
input_dir = os.path.join(test_path, "data")
output_file = os.path.join(tmp_path, "cropped.las")

coord_x = 77055
coord_y = 627760
input_file = os.path.join(input_dir, f"test_data_{coord_x}_{coord_y}_LA93_IGN69_ground.las")

input_nb_points = 22343
expected_output_nb_points = 6578
input_mins = [770550.0, 6277550.0]
input_maxs = [770600.0, 6277600.0]
expected_out_mins = [770560.0, 6277560.0]
expected_out_maxs = [770590.0, 6277590.0]
TEST_PATH = os.path.dirname(os.path.abspath(__file__))
TMP_PATH = os.path.join(TEST_PATH, "tmp")
INPUT_DIR = os.path.join(TEST_PATH, "data")


def setup_module(module):
try:
shutil.rmtree(tmp_path)
shutil.rmtree(TMP_PATH)

except FileNotFoundError:
pass
os.mkdir(tmp_path)
os.mkdir(TMP_PATH)


# Utils functions
Expand All @@ -53,14 +42,22 @@ def get_2d_bounding_box(path):

# Tests
def test_las_crop():
output_file = os.path.join(TMP_PATH, "cropped.las")

input_file = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz")
# FYI input min/max are: input_mins = [770550.0, 6277550.0] and input_maxs = [770600.0, 6277600.0]

expected_output_nb_points = 20862
expected_out_mins = [770560.0, 6277560.0]
expected_out_maxs = [770590.0, 6277590.0]
bounds = ([expected_out_mins[0], expected_out_maxs[0]], [expected_out_mins[1], expected_out_maxs[1]])

las_crop(input_file, output_file, bounds)

# check file exists
assert os.path.isfile(output_file)

# check difference in bbox
in_mins, in_maxs = get_2d_bounding_box(input_file)
out_mins, out_maxs = get_2d_bounding_box(output_file)

assert np.all(out_mins == expected_out_mins)
Expand All @@ -69,6 +66,9 @@ def test_las_crop():
# check number of points
assert get_nb_points(output_file) == expected_output_nb_points

# check that the las format is preserved
assert_header_info_are_similar(output_file, input_file)


if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
Expand Down
4 changes: 2 additions & 2 deletions test/test_las_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
COORD_X = 77055
COORD_Y = 627760

INPUT_FILE = os.path.join(DATA_PATH, f"test_data_{COORD_X}_{COORD_Y}_LA93_IGN69_ground.las")
INPUT_FILE = os.path.join(DATA_PATH, f"test_data_{COORD_X}_{COORD_Y}_LA93_IGN69.laz")

TILE_WIDTH = 50
TILE_COORD_SCALE = 10
Expand Down Expand Up @@ -70,7 +70,7 @@ def test_las_get_xy_bounds_with_buffer():
def test_parse_filename():
prefix, parsed_coord_x, parsed_coord_y, suffix = las_info.parse_filename(INPUT_FILE)
assert prefix == "test_data"
assert suffix == "LA93_IGN69_ground.las"
assert suffix == "LA93_IGN69.laz"
assert parsed_coord_x == COORD_X
assert parsed_coord_y == COORD_Y

Expand Down
Loading

0 comments on commit a74ebe1

Please sign in to comment.