Skip to content

Commit

Permalink
fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
anikaweinmann committed Dec 17, 2024
1 parent 2d4b285 commit e843f09
Show file tree
Hide file tree
Showing 5 changed files with 1,242 additions and 28 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -93,10 +93,12 @@
# %end

import os

import shutil
import grass.script as grass

from grass.script.vector import vector_info_topo
from grass_gis_helpers.mapset import switch_to_new_mapset
from grass.pygrass.utils import get_lib_path

EXPORT_PARAM = {

Check failure on line 103 in m.neural_network.preparedata.worker_export/m.neural_network.preparedata.worker_export.py

View workflow job for this annotation

GitHub Actions / lint / ruff

Ruff (I001)

m.neural_network.preparedata.worker_export/m.neural_network.preparedata.worker_export.py:95:1: I001 Import block is un-sorted or un-formatted
"format": "GTiff",
Expand All @@ -117,6 +119,11 @@ def main():
output_dir = options["output_dir"]
tr_flag = flags["t"]

# get addon etc path
etc_path = get_lib_path(modname="m.neural_network.preparedata")
if etc_path is None:
grass.fatal("Unable to find qml files!")

# make new output directory
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
Expand Down Expand Up @@ -149,13 +156,13 @@ def main():
output=os.path.join(output_dir, f"ndsm_{tile_name}.tif"),
**EXPORT_PARAM,
)

# nDSM scaled + export (cut to [0 30] and rescale to [1 255]))
ndsm_sc_file = os.path.join(output_dir, f"ndsm_1_255_{tile_name}.tif")
ex_cut = f"ndsm_cut = if( {ndsm} >= 30, 30, if( {ndsm} < 0, 0, {ndsm} ) )"
grass.run_command("r.mapcalc", expression=ex_cut)
ex_scale = f"ndsm_scaled = int((ndsm_cut / 30. * 254.) + 1)"
grass.run_command("r.mapcalc", expression=ex_scale)

grass.run_command(
"r.out.gdal",
input="ndsm_scaled",
Expand All @@ -167,6 +174,7 @@ def main():
# segmentation or clip reference data
if tr_flag:
label_file = os.path.join(output_dir, f"label_{tile_name}.gpkg")
create_seg = False
if reference:
grass.run_command(
"v.clip",
Expand All @@ -175,36 +183,46 @@ def main():
flags="r",
quiet=True,
)
grass.run_command(
"v.db.addcolumn",
map="segments",
columns="class_number INTEGER",
quiet=True,
)
grass.run_command(
"v.db.update",
map="segments",
column="class_number",
value=0,
quiet=True,
)
grass.run_command(
"v.out.ogr",
input="reference_clipped",
output=label_file,
flags="s",
quiet=True,
)
if vector_info_topo("reference_clipped")["centroids"] == 0:
create_seg = True
else:
grass.run_command(
"v.db.addcolumn",
map="reference_clipped",
columns="class_number INTEGER",
quiet=True,
)
grass.run_command(
"v.db.update",
map="reference_clipped",
column="class_number",
value=0,
quiet=True,
)
grass.run_command(
"v.out.ogr",
input="reference_clipped",
output=label_file,
flags="s",
quiet=True,
)
else:
grass.run_command(
"i.group", group="image_bands", input="ndsm_scaled"
create_seg = True
if create_seg:
ndsm_range = grass.parse_command(
"r.info", map="ndsm_scaled", flags="r"
)
if ndsm_range["min"] != ndsm_range["max"]:
grass.run_command(
"i.group", group="image_bands", input="ndsm_scaled"
)
grass.run_command(
"i.segment",
group="image_bands",
output="segments",
threshold=segmentation_threshold,
minsize=segmentation_minsize,
memory=1000,
quiet=True,
)
grass.run_command(
Expand All @@ -230,7 +248,10 @@ def main():
flags="s",
quiet=True,
)
# TODO QML file? Label Stil Datei
# copy qml file
qml_src_file = os.path.join(etc_path, "qml", "label.qml")
qml_dest_file = os.path.join(output_dir, f"label_{tile_name}.qml")
shutil.copyfile(qml_src_file, qml_dest_file)

# switch back to original mapset
grass.utils.try_remove(newgisrc)
Expand Down
9 changes: 8 additions & 1 deletion m.neural_network.preparedata/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,11 @@ PGM = m.neural_network.preparedata

include $(MODULE_TOPDIR)/include/Make/Script.make

default: script
QML_ETC = $(patsubst %,$(ETC)/$(PGM)/%,$(wildcard qml/*.qml))
mkdir:
$(MKDIR) $(ETC)/$(PGM)/qml

default: script mkdir $(QML_ETC)

$(ETC)/$(PGM)/%: % | $(ETC)/$(PGM)
$(INSTALL_DATA) $< $@
25 changes: 23 additions & 2 deletions m.neural_network.preparedata/m.neural_network.preparedata.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,13 +108,15 @@
import atexit
import json
import os
import shutil
import random

import grass.script as grass
from grass_gis_helpers.cleanup import general_cleanup
from grass_gis_helpers.general import set_nprocs
from grass_gis_helpers.mapset import verify_mapsets
from grass.pygrass.modules import Module, ParallelModuleQueue
from grass.pygrass.utils import get_lib_path


# initialize global vars
Expand Down Expand Up @@ -142,7 +144,7 @@ def cleanup():
)


def export_tindex(output_dir, geojson_dict):
def export_tindex(output_dir, geojson_dict, etc_path):
"""Export tile index from geojson_dict.
Export of tile index and verification of correct gpkg file.
Expand All @@ -167,6 +169,11 @@ def export_tindex(output_dir, geojson_dict):
tindex_verification = stream.read()
print(tindex_verification)

# copy qml file
qml_src_file = os.path.join(etc_path, "qml", "tindex.qml")
qml_dest_file = os.path.join(output_dir, "tindex.qml")
shutil.copyfile(qml_src_file, qml_dest_file)


def main():
global orig_region, rm_files
Expand All @@ -182,6 +189,11 @@ def main():
output_dir = options["output_dir"]
nprocs = set_nprocs(int(options["nprocs"]))

# get addon etc path
etc_path = get_lib_path(modname="m.neural_network.preparedata")
if etc_path is None:
grass.fatal("Unable to find qml files!")

# get location infos
gisenv = grass.gisenv()
cur_mapset = gisenv["MAPSET"]
Expand Down Expand Up @@ -292,6 +304,7 @@ def main():

possible_tr_data = []
tiles_with_data = []
tiles_wo_data = []
for proc in queue.get_finished_modules():
stdout_strs = proc.outputs["stdout"].value.strip().split(":")
null_cells = int(stdout_strs[1].strip())
Expand All @@ -300,6 +313,8 @@ def main():
possible_tr_data.append(num)
if null_cells != tile_size * tile_size:
tiles_with_data.append(num)
else:
tiles_wo_data.append(num)

# random split into train and apply data tiles
num_tr_tiles = round(train_percentage / 100.0 * len(possible_tr_data))
Expand Down Expand Up @@ -368,8 +383,14 @@ def main():
check_parallel_errors(queue_export_ap)
verify_mapsets(cur_mapset)

# remove tiles without data
tiles_wo_data.reverse()
for num in tiles_wo_data:
del geojson_dict["features"][num]

# export tindex
export_tindex(output_dir, geojson_dict)
export_tindex(output_dir, geojson_dict, etc_path)

grass.message(_("Prepare data done"))


Expand Down
Loading

0 comments on commit e843f09

Please sign in to comment.