Skip to content

Commit

Permalink
Merge pull request #20 from IGNF/dev
Browse files Browse the repository at this point in the history
Version 1.3.0
  • Loading branch information
leavauchier authored Aug 17, 2023
2 parents 226c378 + 998987d commit 462efee
Show file tree
Hide file tree
Showing 27 changed files with 418 additions and 366 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/cicd_full.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@

name: cicd_deploy
name: cicd_full

on:
# Run tests for pull-requests on master
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
tmp
.vscode
__pycache__
ign_pdal_tools.egg-info
dist
12 changes: 11 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
# dev

# 1.3.0
- color: support colorization for <0.2m clouds (including height=0/width=0)
- color: ceil width/height to have a bbox that contains all points

# 1.2.1
- fix cicd_full github action: deployment was triggered on pushing to dev instead of master only

# 1.2.0
- color: keep downloaded orthoimages by returning them to make them stay in execution scope

# 1.1.1
- unlock: fix main
- tests:
Expand All @@ -8,7 +18,7 @@

# 1.1.0
- standardization: handle malformed laz input ("Global encoding WKT flag not set for point format 6 - 10")
color: extract unlock module from colorization and rename colorization function
- color: extract unlock module from colorization and rename colorization function

# 1.0.0
- first public version
Expand Down
2 changes: 1 addition & 1 deletion pdaltools/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "1.1.1"
__version__ = "1.3.0"


if __name__ == "__main__":
Expand Down
124 changes: 66 additions & 58 deletions pdaltools/color.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
from math import ceil
import subprocess as sp
import tempfile
import pdal
Expand All @@ -11,19 +12,19 @@


def pretty_time_delta(seconds):
sign_string = '-' if seconds < 0 else ''
sign_string = "-" if seconds < 0 else ""
seconds = abs(int(seconds))
days, seconds = divmod(seconds, 86400)
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
if days > 0:
return '%s%dd%dh%dm%ds' % (sign_string, days, hours, minutes, seconds)
return "%s%dd%dh%dm%ds" % (sign_string, days, hours, minutes, seconds)
elif hours > 0:
return '%s%dh%dm%ds' % (sign_string, hours, minutes, seconds)
return "%s%dh%dm%ds" % (sign_string, hours, minutes, seconds)
elif minutes > 0:
return '%s%dm%ds' % (sign_string, minutes, seconds)
return "%s%dm%ds" % (sign_string, minutes, seconds)
else:
return '%s%ds' % (sign_string, seconds)
return "%s%ds" % (sign_string, seconds)


def retry(times, delay, factor=2, debug=False):
Expand All @@ -36,11 +37,11 @@ def newfn(*args, **kwargs):
try:
return func(*args, **kwargs)
except requests.exceptions.ConnectionError as err:
print ("Connection Error:", err)
print("Connection Error:", err)
need_retry = True
except requests.exceptions.HTTPError as err:
if "Server Error" in str(err):
print ("HTTP Error:", err)
print("HTTP Error:", err)
need_retry = True
else:
raise err
Expand All @@ -52,25 +53,29 @@ def newfn(*args, **kwargs):
attempt += 1

return func(*args, **kwargs)

return newfn

return decorator


def download_image_from_geoportail(
proj, layer, minx, miny, maxx, maxy, pixel_per_meter, outfile, timeout
):
def download_image_from_geoportail(proj, layer, minx, miny, maxx, maxy, pixel_per_meter, outfile, timeout):
# Give single-point clouds a width/height of at least one pixel to have valid BBOX and SIZE
if minx == maxx:
maxx = minx + 1 / pixel_per_meter
if miny == maxy:
maxy = miny + 1 / pixel_per_meter

# for layer in layers:
URL_GPP = "https://wxs.ign.fr/ortho/geoportail/r/wms?"
URL_FORMAT = "&EXCEPTIONS=text/xml&FORMAT=image/geotiff&SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap&STYLES="
URL_EPSG = "&CRS=EPSG:" + str(proj)
URL_BBOX = (
"&BBOX=" + str(minx) + "," + str(miny) + "," + str(maxx) + "," + str(maxy)
)
URL_BBOX = "&BBOX=" + str(minx) + "," + str(miny) + "," + str(maxx) + "," + str(maxy)
URL_SIZE = (
"&WIDTH="
+ str(int((maxx - minx) * pixel_per_meter))
+ str(ceil((maxx - minx) * pixel_per_meter))
+ "&HEIGHT="
+ str(int((maxy - miny) * pixel_per_meter))
+ str(ceil((maxy - miny) * pixel_per_meter))
)

URL = URL_GPP + "LAYERS=" + layer + URL_FORMAT + URL_EPSG + URL_BBOX + URL_SIZE
Expand Down Expand Up @@ -115,11 +120,16 @@ def pdal_info_json(input_file: str):


@copy_and_hack_decorator
def color(input_file: str, output_file :str,
proj="", pixel_per_meter=5, timeout_second=300,
color_rvb_enabled=True, color_ir_enabled=True, veget_index_file=""
):

def color(
input_file: str,
output_file: str,
proj="",
pixel_per_meter=5,
timeout_second=300,
color_rvb_enabled=True,
color_ir_enabled=True,
veget_index_file="",
):
json_info = pdal_info_json(input_file)
metadata = json_info["metadata"]
minx, maxx, miny, maxy = metadata["minx"], metadata["maxx"], metadata["miny"], metadata["maxy"]
Expand All @@ -139,62 +149,60 @@ def color(input_file: str, output_file :str,
pipeline |= pdal.Filter.colorization(raster=veget_index_file, dimensions="Deviation:1:256.0")
writer_extra_dims = ["Deviation=ushort"]

tmp_ortho = None
if color_rvb_enabled:
tmp_ortho = tempfile.NamedTemporaryFile().name
download_image_from_geoportail_retrying(proj, "ORTHOIMAGERY.ORTHOPHOTOS", minx, miny, maxx, maxy, pixel_per_meter, tmp_ortho, timeout_second)
pipeline|= pdal.Filter.colorization(raster=tmp_ortho, dimensions="Red:1:256.0, Green:2:256.0, Blue:3:256.0")
download_image_from_geoportail_retrying(
proj, "ORTHOIMAGERY.ORTHOPHOTOS", minx, miny, maxx, maxy, pixel_per_meter, tmp_ortho, timeout_second
)
pipeline |= pdal.Filter.colorization(raster=tmp_ortho, dimensions="Red:1:256.0, Green:2:256.0, Blue:3:256.0")

tmp_ortho_irc = None
if color_ir_enabled:
tmp_ortho_irc = tempfile.NamedTemporaryFile().name
download_image_from_geoportail_retrying(proj, "ORTHOIMAGERY.ORTHOPHOTOS.IRC", minx, miny, maxx, maxy, pixel_per_meter, tmp_ortho_irc, timeout_second)
download_image_from_geoportail_retrying(
proj,
"ORTHOIMAGERY.ORTHOPHOTOS.IRC",
minx,
miny,
maxx,
maxy,
pixel_per_meter,
tmp_ortho_irc,
timeout_second,
)
pipeline |= pdal.Filter.colorization(raster=tmp_ortho_irc, dimensions="Infrared:1:256.0")

pipeline |= pdal.Writer.las(filename=output_file, extra_dims=writer_extra_dims, minor_version="4", dataformat_id="8")
pipeline |= pdal.Writer.las(
filename=output_file, extra_dims=writer_extra_dims, minor_version="4", dataformat_id="8"
)

print("Traitement du nuage de point")
pipeline.execute()

# os.remove(tmp_ortho)
# os.remove(tmp_ortho_irc)
# The orthoimages files will be deleted only when their reference are lost.
# To keep them, make a copy (with e.g. shutil.copy(...))
# See: https://docs.python.org/2/library/tempfile.html#tempfile.TemporaryFile
return tmp_ortho, tmp_ortho_irc


def parse_args():
parser = argparse.ArgumentParser("Colorize tool")
parser.add_argument("--input", "-i", type=str, required=True, help="Input file")
parser.add_argument("--output", "-o", type=str, default="", help="Output file")
parser.add_argument(
"--input", "-i",
type=str,
required=True,
help="Input file")
parser.add_argument(
"--output", "-o",
type=str,
default="",
help="Output file")
parser.add_argument(
"--proj", "-p",
type=str,
default = "",
help="Projection, default will use projection from metadata input")
parser.add_argument(
"--resolution", "-r",
type=float,
default = 5,
help="Resolution, in pixel per meter")
parser.add_argument(
"--timeout", "-t",
type=int,
default = 300,
help="Timeout, in seconds")
parser.add_argument('--rvb', action='store_true', help="Colorize RVB")
parser.add_argument('--ir', action='store_true', help="Colorize IR")
"--proj", "-p", type=str, default="", help="Projection, default will use projection from metadata input"
)
parser.add_argument("--resolution", "-r", type=float, default=5, help="Resolution, in pixel per meter")
parser.add_argument("--timeout", "-t", type=int, default=300, help="Timeout, in seconds")
parser.add_argument("--rvb", action="store_true", help="Colorize RVB")
parser.add_argument("--ir", action="store_true", help="Colorize IR")
parser.add_argument(
"--vegetation",
type=str,
default = "",
help="Vegetation file, value will be stored in Deviation field")
return parser.parse_args()
"--vegetation", type=str, default="", help="Vegetation file, value will be stored in Deviation field"
)
return parser.parse_args()


if __name__ == "__main__":
args = parse_args()
color(args.input, args.output, args.proj, args.resolution, args.timeout, args.rvb, args.ir, args.vegetation)
color(args.input, args.output, args.proj, args.resolution, args.timeout, args.rvb, args.ir, args.vegetation)
29 changes: 12 additions & 17 deletions pdaltools/count_occurences/count_occurences_for_attribute.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,23 +13,19 @@

def parse_args():
parser = argparse.ArgumentParser("Count points with each value of an attribute.")
parser.add_argument("--input_files",
nargs="+",
type=str,
help="List of laz input files separated by spaces, or directory " +
"containing las/laz files")
parser.add_argument("--attribute",
type=str,
default="Classification",
help="Attribute on which to count values")
parser.add_argument("--output_file",
type=str,
help="Output json file containing the counts")
parser.add_argument(
"--input_files",
nargs="+",
type=str,
help="List of laz input files separated by spaces, or directory " + "containing las/laz files",
)
parser.add_argument("--attribute", type=str, default="Classification", help="Attribute on which to count values")
parser.add_argument("--output_file", type=str, help="Output json file containing the counts")

return parser.parse_args()


def compute_count_one_file(filepath: str, attribute: str="Classification") -> Counter:
def compute_count_one_file(filepath: str, attribute: str = "Classification") -> Counter:
pipeline = pdal.Reader.las(filepath)
pipeline |= pdal.Filter.stats(dimensions=attribute, count=attribute)
pipeline.execute()
Expand All @@ -48,10 +44,10 @@ def compute_count_one_file(filepath: str, attribute: str="Classification") -> Co
return counts


def compute_count(input_files: List[str], attribute: str="Classification", output_file=""):
def compute_count(input_files: List[str], attribute: str = "Classification", output_file=""):
all_counts = Counter()
# refresh status bar at most every 1/100 iter cf. https://github.com/tqdm/tqdm/issues/1429
for f in tqdm(input_files, miniters=int(len(input_files)/100), maxinterval=float('inf')):
for f in tqdm(input_files, miniters=int(len(input_files) / 100), maxinterval=float("inf")):
logging.debug(f"Counting values of {attribute} for {os.path.basename(f)}")
all_counts += compute_count_one_file(f, attribute)

Expand All @@ -69,8 +65,7 @@ def main():
args = parse_args()
if len(args.input_files) == 1 and os.path.isdir(args.input_files[0]):
input_dir = args.input_files[0]
input_files = [os.path.join(input_dir, f) for f in os.listdir(input_dir)
if f.lower().endswith(("las", "laz"))]
input_files = [os.path.join(input_dir, f) for f in os.listdir(input_dir) if f.lower().endswith(("las", "laz"))]
else:
input_files = args.input_files

Expand Down
21 changes: 10 additions & 11 deletions pdaltools/count_occurences/merge_occurences_counts.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,21 @@

def parse_args():
parser = argparse.ArgumentParser("Count points with each value of an attribute.")
parser.add_argument("--input_files",
nargs="+",
type=str,
help="List of json input files separated by spaces, or directory " +
"containing json files")
parser.add_argument("--output_file",
type=str,
help="Output json file containing the counts")
parser.add_argument(
"--input_files",
nargs="+",
type=str,
help="List of json input files separated by spaces, or directory " + "containing json files",
)
parser.add_argument("--output_file", type=str, help="Output json file containing the counts")

return parser.parse_args()


def merge_counts(input_files: List[str], output_file=""):
all_counts = Counter()
# refresh status bar at most every 1/100 iter cf. https://github.com/tqdm/tqdm/issues/1429
for input_f in tqdm(input_files, miniters=int(len(input_files)/100), maxinterval=float('inf')):
for input_f in tqdm(input_files, miniters=int(len(input_files) / 100), maxinterval=float("inf")):
with open(input_f, "r") as f:
count = Counter(json.load(f))

Expand All @@ -43,12 +42,12 @@ def merge_counts(input_files: List[str], output_file=""):

return all_counts


def main():
args = parse_args()
if len(args.input_files) == 1 and os.path.isdir(args.input_files[0]):
input_dir = args.input_files[0]
input_files = [os.path.join(input_dir, f) for f in os.listdir(input_dir)
if f.lower().endswith("json")]
input_files = [os.path.join(input_dir, f) for f in os.listdir(input_dir) if f.lower().endswith("json")]
logging.info(f"Input_files is a directory. Run on {len(input_files)} from this directory.")
else:
logging.info(f"Input_files is a list of files. Run on {len(args.input_files)} files.")
Expand Down
Loading

0 comments on commit 462efee

Please sign in to comment.