From b1fcc40a593ba63e17cfe40cea0c27ff3452c591 Mon Sep 17 00:00:00 2001 From: John Lindsay Date: Sun, 10 Feb 2019 19:38:07 -0500 Subject: [PATCH] Bug fixes bug fixes in prep for 0.14.1 --- readme.txt | 10 +- src/lidar/las.rs | 44 +- src/raster/arcascii_raster.rs | 6 + src/raster/arcbinary_raster.rs | 2 + src/raster/geotiff/mod.rs | 1 + src/raster/grass_raster.rs | 6 + src/raster/idrisi_raster.rs | 2 + src/raster/mod.rs | 15 +- src/raster/saga_raster.rs | 2 + src/raster/surfer7_raster.rs | 2 + src/raster/surfer_ascii_raster.rs | 1 + src/raster/whitebox_raster.rs | 117 ++--- src/tools/gis_analysis/clump.rs | 24 +- src/tools/gis_analysis/mod.rs | 2 + src/tools/gis_analysis/polygon_area.rs | 5 + src/tools/gis_analysis/raster_area.rs | 467 ++++++++++++++++++ src/tools/lidar_analysis/ascii_to_las.rs | 421 ++++++++++++++++ src/tools/lidar_analysis/las_to_ascii.rs | 46 +- src/tools/lidar_analysis/mod.rs | 2 + .../math_stat_analysis/extract_statistics.rs | 46 +- .../math_stat_analysis/raster_histogram.rs | 2 - src/tools/mod.rs | 4 + .../terrain_analysis/surface_area_ratio.rs | 4 +- src/utils/byte_order_reader.rs | 29 +- whitebox_tools.py | 25 +- 25 files changed, 1149 insertions(+), 136 deletions(-) create mode 100644 src/tools/gis_analysis/raster_area.rs create mode 100644 src/tools/lidar_analysis/ascii_to_las.rs diff --git a/readme.txt b/readme.txt index 1eb23ad58..30e73c352 100644 --- a/readme.txt +++ b/readme.txt @@ -56,13 +56,17 @@ for more details. * Release Notes: * ****************** -Version 0.15.0 (XX-XX-2019) - +Version 0.14.1 (10-02-2019) +- This release largely focuses on bug-fixes rather than feature additions. However, the + following tools were added to the library: + RasterArea + - Fixed a bug with the MultiscaleTopographicPositionImage tool that prevented proper output for files in GeoTIFF format. +- Several other tool-specific bug fixes. Version 0.14.0 (27-01-2019) -- The release largely focusses on bug-fixes rather than adding new features. The +- The release largely focuses on bug-fixes rather than adding new features. The following tools were added to the project: CircularVarianceOfAspect EdgeDensity diff --git a/src/lidar/las.rs b/src/lidar/las.rs index e5a86dba3..0ee8c66dc 100644 --- a/src/lidar/las.rs +++ b/src/lidar/las.rs @@ -44,7 +44,7 @@ pub struct LasFile { colour_data: Vec, waveform_data: Vec, pub geokeys: GeoKeys, - wkt: String, + pub wkt: String, // starting_point: usize, header_is_set: bool, pub use_point_intensity: bool, @@ -100,6 +100,7 @@ impl LasFile { output.file_mode = "w".to_string(); output.use_point_intensity = true; output.use_point_userdata = true; + output.wkt = input.wkt.clone(); output.add_header(input.header.clone()); @@ -134,9 +135,9 @@ impl LasFile { self.header.generating_software = "WhiteboxTools ".to_string(); self.header.number_of_points_by_return_old = [0, 0, 0, 0, 0]; - self.header.x_scale_factor = 0.0001; - self.header.y_scale_factor = 0.0001; - self.header.z_scale_factor = 0.0001; + self.header.x_scale_factor = 0.001; + self.header.y_scale_factor = 0.001; + self.header.z_scale_factor = 0.001; self.header_is_set = true; } @@ -1067,7 +1068,7 @@ impl LasFile { let mut mantissa: usize = (format!("{}", (self.header.max_x - self.header.min_x).floor())) .to_string() .len(); - let mut dec: f64 = 1.0 / 10_f64.powi(8 - mantissa as i32); + let mut dec: f64 = 1.0 / 10_f64.powi(7 - mantissa as i32); if self.header.x_scale_factor == 0_f64 { self.header.x_scale_factor = dec; } @@ -1184,7 +1185,8 @@ impl LasFile { for i in 0..(self.header.number_of_vlrs as usize) { total_vlr_size += self.vlr_data[i].record_length_after_header as u32; } - self.header.offset_to_points = 235 + total_vlr_size; // THIS NEEDS TO BE FIXED WHEN LAS 1.4 SUPPORT IS ADDED FOR WRITING + // let alignment_bytes = self.header.header_size as u32 + total_vlr_size % 4u32; + self.header.offset_to_points = self.header.header_size as u32 + total_vlr_size; // + alignment_bytes; // THIS NEEDS TO BE FIXED WHEN LAS 1.4 SUPPORT IS ADDED FOR WRITING u32_bytes = unsafe { mem::transmute(self.header.offset_to_points) }; writer.write_all(&u32_bytes)?; @@ -1340,6 +1342,18 @@ impl LasFile { writer.write_all(&vlr.binary_data)?; } + // //////////////////// + // // Alignment bytes / + // //////////////////// + // if alignment_bytes > 0 { + // println!("alignment bytes: {}", alignment_bytes); + // for a in 0..alignment_bytes { + // writer.write_all(&[0u8]); + // } + // } + + // println!("header: {:?}", self.header); + //////////////////////////////// // Write the point to the file / //////////////////////////////// @@ -1352,6 +1366,18 @@ impl LasFile { u32_bytes = unsafe { mem::transmute(val) }; writer.write_all(&u32_bytes)?; + // if i == 0 { + // println!("first point: {:?}", self.point_data[i]); + // } + + // if i == 1 { + // println!("second point: {:?}", self.point_data[i]); + // } + + // if i == 349527 { + // println!("last point: {:?}", self.point_data[i]); + // } + val = ((self.point_data[i].y - self.header.y_offset) / self.header.y_scale_factor) as i32; u32_bytes = unsafe { mem::transmute(val) }; @@ -1369,13 +1395,13 @@ impl LasFile { u8_bytes = unsafe { mem::transmute(self.point_data[i].point_bit_field) }; writer.write_all(&u8_bytes)?; - + u8_bytes = unsafe { mem::transmute(self.point_data[i].class_bit_field) }; writer.write_all(&u8_bytes)?; - + u8_bytes = unsafe { mem::transmute(self.point_data[i].scan_angle as i8) }; writer.write_all(&u8_bytes)?; - + if self.use_point_userdata { u8_bytes = unsafe { mem::transmute(self.point_data[i].user_data) }; writer.write_all(&u8_bytes)?; diff --git a/src/raster/arcascii_raster.rs b/src/raster/arcascii_raster.rs index 3f3d72f47..129ea9a67 100644 --- a/src/raster/arcascii_raster.rs +++ b/src/raster/arcascii_raster.rs @@ -30,8 +30,14 @@ pub fn read_arcascii( } if vec[0].to_lowercase().contains("nrows") { configs.rows = vec[vec.len() - 1].trim().parse::().unwrap() as usize; + if configs.columns > 0 { + data.reserve(configs.rows * configs.columns); + } } else if vec[0].to_lowercase().contains("ncols") { configs.columns = vec[vec.len() - 1].trim().parse::().unwrap() as usize; + if configs.rows > 0 { + data.reserve(configs.rows * configs.columns); + } } else if vec[0].to_lowercase().contains("xllcorner") { xllcenter = vec[vec.len() - 1] .trim() diff --git a/src/raster/arcbinary_raster.rs b/src/raster/arcbinary_raster.rs index eca2ab81d..c8e37a1c2 100644 --- a/src/raster/arcbinary_raster.rs +++ b/src/raster/arcbinary_raster.rs @@ -101,6 +101,8 @@ pub fn read_arcbinary( yllcenter - (0.5 * configs.resolution_y) + (configs.rows as f64) * configs.resolution_y; } + data.reserve(configs.rows * configs.columns); + // read the data file let data_file = file_name.replace(".hdr", ".flt"); let mut f = File::open(data_file.clone())?; diff --git a/src/raster/geotiff/mod.rs b/src/raster/geotiff/mod.rs index 2947426c0..eee09e469 100644 --- a/src/raster/geotiff/mod.rs +++ b/src/raster/geotiff/mod.rs @@ -296,6 +296,7 @@ pub fn read_geotiff<'a>( //data = vec![0.0f64; configs.rows * configs.columns]; data.clear(); + data.reserve(configs.rows * configs.columns); for _ in 0..configs.rows * configs.columns { data.push(0.0f64); } diff --git a/src/raster/grass_raster.rs b/src/raster/grass_raster.rs index 2e9cf6cf2..5532b4656 100644 --- a/src/raster/grass_raster.rs +++ b/src/raster/grass_raster.rs @@ -25,8 +25,14 @@ pub fn read_grass_raster( let vec = line_split.collect::>(); if vec[0].to_lowercase().contains("rows") { configs.rows = vec[1].trim().parse::().unwrap() as usize; + if configs.columns > 0 { + data.reserve(configs.rows * configs.columns); + } } else if vec[0].to_lowercase().contains("cols") { configs.columns = vec[1].trim().parse::().unwrap() as usize; + if configs.rows > 0 { + data.reserve(configs.rows * configs.columns); + } } else if vec[0].to_lowercase().contains("north") { configs.north = vec[1].trim().to_string().parse::().unwrap(); } else if vec[0].to_lowercase().contains("south") { diff --git a/src/raster/idrisi_raster.rs b/src/raster/idrisi_raster.rs index 56f377237..27e153791 100644 --- a/src/raster/idrisi_raster.rs +++ b/src/raster/idrisi_raster.rs @@ -122,6 +122,8 @@ pub fn read_idrisi( configs.resolution_x = (configs.east - configs.west) / configs.columns as f64; configs.resolution_y = (configs.north - configs.south) / configs.rows as f64; + data.reserve(configs.rows * configs.columns); + // read the data file let data_file = file_name.replace(".rdc", ".rst"); let mut f = File::open(data_file.clone())?; diff --git a/src/raster/mod.rs b/src/raster/mod.rs index 5be5c112e..705dfca91 100644 --- a/src/raster/mod.rs +++ b/src/raster/mod.rs @@ -1,8 +1,8 @@ /* This tool is part of the WhiteboxTools geospatial analysis library. Authors: Dr. John Lindsay -Created: June 2, 2017 -Last Modified: 07/12/2018 +Created: 02/06/2017 +Last Modified: 09/02/2019 License: MIT */ @@ -250,11 +250,12 @@ impl Raster { { output.configs.nodata = 1.71041e38; } + output.data.reserve(output.configs.rows * output.configs.columns); output.data = vec![output.configs.nodata; output.configs.rows * output.configs.columns]; output } - /// Returns the file name of the `Raster`, without the directory. + /// Returns the file name of the `Raster`, without the directory and file extension. pub fn get_short_filename(&self) -> String { let path = Path::new(&self.file_name); let file_name = path.file_stem().unwrap(); @@ -262,6 +263,14 @@ impl Raster { f.to_string() } + /// Returns the file extension. + pub fn get_file_extension(&self) -> String { + let path = Path::new(&self.file_name); + let extension = path.extension().unwrap(); + let e = extension.to_str().unwrap(); + e.to_string() + } + /// Returns the value contained within a grid cell specified /// by `row` and `column`. pub fn get_value(&self, row: isize, column: isize) -> f64 { diff --git a/src/raster/saga_raster.rs b/src/raster/saga_raster.rs index 31cd72049..c5151cb03 100644 --- a/src/raster/saga_raster.rs +++ b/src/raster/saga_raster.rs @@ -156,6 +156,8 @@ pub fn read_saga( row_start = configs.rows - 1; } + data.reserve(configs.rows * configs.columns); + // read the data file let data_file = file_name.replace(".sgrd", ".sdat"); let mut f = File::open(data_file.clone())?; diff --git a/src/raster/surfer7_raster.rs b/src/raster/surfer7_raster.rs index e1c24bb10..b266bc5d0 100644 --- a/src/raster/surfer7_raster.rs +++ b/src/raster/surfer7_raster.rs @@ -120,6 +120,8 @@ pub fn read_surfer7( } as usize; offset += 4; + data.reserve(configs.rows * configs.columns); + configs.west = unsafe { mem::transmute::<[u8; 8], f64>([ buffer[offset], diff --git a/src/raster/surfer_ascii_raster.rs b/src/raster/surfer_ascii_raster.rs index ea4a23cd4..e3332118f 100644 --- a/src/raster/surfer_ascii_raster.rs +++ b/src/raster/surfer_ascii_raster.rs @@ -48,6 +48,7 @@ pub fn read_surfer_ascii_raster( } configs.columns = vec[0].trim().parse::().unwrap() as usize; configs.rows = vec[1].trim().parse::().unwrap() as usize; + data.reserve(configs.rows * configs.columns); row = configs.rows - 1; // files are stored row major, bottom-to-top num_cells = configs.rows * configs.columns; data.clear(); diff --git a/src/raster/whitebox_raster.rs b/src/raster/whitebox_raster.rs index 551147dd3..e9946728c 100644 --- a/src/raster/whitebox_raster.rs +++ b/src/raster/whitebox_raster.rs @@ -133,101 +133,82 @@ pub fn read_whitebox( 1 }; + data.reserve(configs.rows * configs.columns); + let num_cells = configs.rows * configs.columns; - let buf_size = 1_000_000usize; - let mut j = 0; - while j < num_cells { - let mut buffer = vec![0; buf_size * data_size]; + let buf_size = if num_cells > 10_000_000usize { + 10_000_000usize + } else { + num_cells + }; + while data.len() < num_cells { + let mut buffer = vec![0u8; buf_size * data_size]; f.read(&mut buffer)?; + let num_values = if data.len() + buf_size <= num_cells { + buf_size + } else { + num_cells - data.len() + 1 + }; + // read the file's bytes into a buffer let mut bor = ByteOrderReader::new(buffer, configs.endian); match configs.data_type { DataType::F64 => { bor.pos = 0; - for _ in 0..buf_size { + for _ in 0..num_values { data.push(bor.read_f64() as f64); - j += 1; - if j == num_cells { - break; - } + } + if data.len() == num_cells { + break; } } DataType::F32 => { bor.pos = 0; - for _ in 0..buf_size { + for _ in 0..num_values { data.push(bor.read_f32() as f64); - j += 1; - if j == num_cells { - break; - } + } + if data.len() == num_cells { + break; } } DataType::I32 => { bor.pos = 0; - for _ in 0..buf_size { + for _ in 0..num_values { data.push(bor.read_i32() as f64); - j += 1; - if j == num_cells { - break; - } + } + if data.len() == num_cells { + break; } } DataType::I16 => { bor.pos = 0; - for _ in 0..buf_size { + for _ in 0..num_values { data.push(bor.read_i16() as f64); - j += 1; - if j == num_cells { - break; - } + } + if data.len() == num_cells { + break; } } DataType::U8 => { bor.pos = 0; - for _ in 0..buf_size { + for _ in 0..num_values { data.push(bor.read_u8() as f64); - j += 1; - if j == num_cells { - break; - } + } + if data.len() == num_cells { + break; } } DataType::RGBA32 => { bor.pos = 0; - for _ in 0..buf_size { + for _ in 0..num_values { data.push(bor.read_f32() as i32 as u32 as f64); - j += 1; - if j == num_cells { - break; - } } - // let mut bor = ByteOrderReader::new(buffer, configs.endian); - // bor.pos = 0; - // let mut val: u32; - // let mut r: u32; - // let mut g: u32; - // let mut b: u32; - // let mut a: u32; - // for _ in 0..buf_size { - // val = bor.read_f32() as u32; - // r = (value as u32 & 0xFF) as f64 / 255f64; - // g = ((value as u32 >> 8) & 0xFF) as f64 / 255f64; - // b = ((value as u32 >> 16) & 0xFF) as f64 / 255f64; - - // val = in_val as u32; - // red = val & 0xFF; - // green = (val >> 8) & 0xFF; - // blue = (val >> 16) & 0xFF; - - // data.push(bor.read_f32() as f64); - // j += 1; - // if j == num_cells { - // break; - // } - // } + if data.len() == num_cells { + break; + } } _ => { return Err(Error::new( @@ -311,7 +292,8 @@ pub fn write_whitebox<'a>(r: &'a mut Raster) -> Result<(), Error> { writer.write_all("Data Type:\tFLOAT\n".as_bytes())?; } DataType::I32 => { - writer.write_all("Data Type:\tI32\n".as_bytes())?; + // writer.write_all("Data Type:\tI32\n".as_bytes())?; + writer.write_all("Data Type:\tFLOAT\n".as_bytes())?; } DataType::I16 => { writer.write_all("Data Type:\tINTEGER\n".as_bytes())?; @@ -399,7 +381,7 @@ pub fn write_whitebox<'a>(r: &'a mut Raster) -> Result<(), Error> { let f = File::create(&data_file)?; let mut writer = BufWriter::new(f); - let mut u16_bytes: [u8; 2]; + // let mut u16_bytes: [u8; 2]; let mut u32_bytes: [u8; 4]; let mut u64_bytes: [u8; 8]; @@ -424,13 +406,19 @@ pub fn write_whitebox<'a>(r: &'a mut Raster) -> Result<(), Error> { } } DataType::I32 => { + // if verbose { + // println!("Warning: the I32 data type may not be supported on all versions of Whitebox GAT."); + // } + // for i in 0..num_cells { + // writer.write_i32::(r.data[i] as i32)?; + // } for i in 0..num_cells { - writer.write_f64::(r.data[i])?; + writer.write_f32::(r.data[i] as f32)?; } } DataType::U16 => { for i in 0..num_cells { - writer.write_f32::(r.data[i] as f32)?; + writer.write_u16::(r.data[i] as u16)?; } } DataType::RGBA32 => { @@ -452,8 +440,9 @@ pub fn write_whitebox<'a>(r: &'a mut Raster) -> Result<(), Error> { } DataType::I16 => { for i in 0..num_cells { - u16_bytes = unsafe { mem::transmute(r.data[i] as u16) }; - writer.write(&u16_bytes)?; + // u16_bytes = unsafe { mem::transmute(r.data[i] as u16) }; + // writer.write(&u16_bytes)?; + writer.write_i16::(r.data[i] as i16)?; } } DataType::U8 | DataType::I8 => { diff --git a/src/tools/gis_analysis/clump.rs b/src/tools/gis_analysis/clump.rs index 59b808c38..d020d5618 100644 --- a/src/tools/gis_analysis/clump.rs +++ b/src/tools/gis_analysis/clump.rs @@ -166,17 +166,17 @@ impl WhiteboxTool for Clump { } let flag_val = vec[0].to_lowercase().replace("--", "-"); if flag_val == "-i" || flag_val == "-input" { - if keyval { - input_file = vec[1].to_string(); + input_file = if keyval { + vec[1].to_string() } else { - input_file = args[i + 1].to_string(); - } + args[i + 1].to_string() + }; } else if flag_val == "-o" || flag_val == "-output" { - if keyval { - output_file = vec[1].to_string(); + output_file = if keyval { + vec[1].to_string() } else { - output_file = args[i + 1].to_string(); - } + args[i + 1].to_string() + }; } else if flag_val == "-diag" { diag = true; } else if flag_val == "-zero_back" { @@ -215,8 +215,12 @@ impl WhiteboxTool for Clump { let columns = input.configs.columns as isize; let mut output = Raster::initialize_using_file(&output_file, &input); + let out_nodata = -999f64; + output.reinitialize_values(out_nodata); + output.configs.nodata = out_nodata; output.configs.photometric_interp = PhotometricInterpretation::Categorical; output.configs.data_type = DataType::I32; + // output.configs.data_type = DataType::F32; let mut dx = [1, 1, 1, 0, -1, -1, -1, 0]; let mut dy = [-1, 0, 1, 1, 1, 0, -1, -1]; @@ -241,7 +245,7 @@ impl WhiteboxTool for Clump { for col in 0..columns { zin = input[(row, col)]; zout = output[(row, col)]; - if zin != nodata && zin != back_val && zout == nodata { + if zin != nodata && zin != back_val && zout == out_nodata { fid += 1f64; output[(row, col)] = fid; num_solved_cells += 1; @@ -267,7 +271,7 @@ impl WhiteboxTool for Clump { for i in 0..num_neighbours { zn = input[(r + dy[i], c + dx[i])]; zout = output[(r + dy[i], c + dx[i])]; - if zn == zin && zout == nodata { + if zn == zin && zout == out_nodata { output[(r + dy[i], c + dx[i])] = fid; num_solved_cells += 1; stack.push((r + dy[i], c + dx[i])); diff --git a/src/tools/gis_analysis/mod.rs b/src/tools/gis_analysis/mod.rs index 951333723..6c57bab14 100644 --- a/src/tools/gis_analysis/mod.rs +++ b/src/tools/gis_analysis/mod.rs @@ -62,6 +62,7 @@ mod polygon_perimeter; mod polygon_short_axis; mod polygonize; mod radius_of_gyration; +mod raster_area; mod raster_cell_assignment; mod reclass; mod reclass_equal_interval; @@ -145,6 +146,7 @@ pub use self::polygon_perimeter::PolygonPerimeter; pub use self::polygon_short_axis::PolygonShortAxis; pub use self::polygonize::Polygonize; pub use self::radius_of_gyration::RadiusOfGyration; +pub use self::raster_area::RasterArea; pub use self::raster_cell_assignment::RasterCellAssignment; pub use self::reclass::Reclass; pub use self::reclass_equal_interval::ReclassEqualInterval; diff --git a/src/tools/gis_analysis/polygon_area.rs b/src/tools/gis_analysis/polygon_area.rs index 12ffadcbc..29a44ea8e 100644 --- a/src/tools/gis_analysis/polygon_area.rs +++ b/src/tools/gis_analysis/polygon_area.rs @@ -18,6 +18,11 @@ use std::path; /// vector's attribute table (AREA field). The area calculation will account /// for any holes contained within polygons. The vector should be in a /// projected coordinate system. +/// +/// To calculate the area of raster polygons, use the `RasterArea` tool instead. +/// +/// # See Also +/// `RasterArea` pub struct PolygonArea { name: String, description: String, diff --git a/src/tools/gis_analysis/raster_area.rs b/src/tools/gis_analysis/raster_area.rs new file mode 100644 index 000000000..909b51f1c --- /dev/null +++ b/src/tools/gis_analysis/raster_area.rs @@ -0,0 +1,467 @@ +/* +This tool is part of the WhiteboxTools geospatial analysis library. +Authors: Dr. John Lindsay +Created: 10/02/2019 +Last Modified: 10/02/2019 +License: MIT +*/ + +use crate::raster::*; +use crate::tools::*; +use num_cpus; +use std::env; +use std::f64; +use std::io::{Error, ErrorKind}; +use std::path; +use std::sync::mpsc; +use std::sync::Arc; +use std::thread; + +/// This tools estimates the area of each category, polygon, or patch in an input raster. The input raster must be categorical +/// in data scale. Rasters with floating-point cell values are not good candidates for an area analysis. The user must specify +/// whether the output is given in `grid cells` or `map units` (`--units`). Map Units are physical units, e.g. if the rasters's +/// scale is in metres, areas will report in square-metres. Notice that square-metres can be converted into hectares by dividing +/// by 10,000 and into square-kilometres by dividing by 1,000,000. If the input raster is in geographic coordinates (i.e. +/// latitude and longitude) a warning will be issued and areas will be estimated based on per-row calculated degree lengths. +/// +/// The tool can be run with a raster output (`--output`), a text output (`--out_text`), or both. If niether outputs are specified, +/// the tool will automatically output a raster named `area.tif`. +/// +/// Zero values in the input raster may be excluded from the area analysis if the `--zero_back` flag is used. +/// +/// To calculate the area of vector polygons, use the `PolygonArea` tool instead. +/// +/// # See Also +/// `PolygonArea`, `RasterHistogram` +pub struct RasterArea { + name: String, + description: String, + toolbox: String, + parameters: Vec, + example_usage: String, +} + +impl RasterArea { + pub fn new() -> RasterArea { + // public constructor + let name = "RasterArea".to_string(); + let toolbox = "GIS Analysis".to_string(); + let description = + "Calculates the area of polygons or classes within a raster image." + .to_string(); + + let mut parameters = vec![]; + parameters.push(ToolParameter { + name: "Input File".to_owned(), + flags: vec!["-i".to_owned(), "--input".to_owned()], + description: "Input raster file.".to_owned(), + parameter_type: ParameterType::ExistingFile(ParameterFileType::Raster), + default_value: None, + optional: false, + }); + + parameters.push(ToolParameter { + name: "Output File".to_owned(), + flags: vec!["-o".to_owned(), "--output".to_owned()], + description: "Output raster file.".to_owned(), + parameter_type: ParameterType::NewFile(ParameterFileType::Raster), + default_value: None, + optional: true, + }); + + parameters.push(ToolParameter { + name: "Output text?".to_owned(), + flags: vec!["--out_text".to_owned()], + description: "Would you like to output polygon areas to text?" + .to_owned(), + parameter_type: ParameterType::Boolean, + default_value: None, + optional: false, + }); + + parameters.push(ToolParameter{ + name: "Units".to_owned(), + flags: vec!["--units".to_owned()], + description: "Area units; options include 'grid cells' and 'map units'.".to_owned(), + parameter_type: ParameterType::OptionList(vec!["grid cells".to_owned(), "map units".to_owned()]), + default_value: Some("grid cells".to_owned()), + optional: true + }); + + parameters.push(ToolParameter { + name: "Treat zero values as background?".to_owned(), + flags: vec!["--zero_back".to_owned()], + description: "Flag indicating whether zero values should be treated as a background." + .to_owned(), + parameter_type: ParameterType::Boolean, + default_value: None, + optional: false, + }); + + let sep: String = path::MAIN_SEPARATOR.to_string(); + let p = format!("{}", env::current_dir().unwrap().display()); + let e = format!("{}", env::current_exe().unwrap().display()); + let mut short_exe = e + .replace(&p, "") + .replace(".exe", "") + .replace(".", "") + .replace(&sep, ""); + if e.contains(".exe") { + short_exe += ".exe"; + } + let usage = format!( + ">>.*{} -r={} -v --wd=\"*path*to*data*\" -i=input.tif -o=output.tif --out_text --units='grid cells' --zero_back", + short_exe, name + ) + .replace("*", &sep); + + RasterArea { + name: name, + description: description, + toolbox: toolbox, + parameters: parameters, + example_usage: usage, + } + } +} + +impl WhiteboxTool for RasterArea { + fn get_source_file(&self) -> String { + String::from(file!()) + } + + fn get_tool_name(&self) -> String { + self.name.clone() + } + + fn get_tool_description(&self) -> String { + self.description.clone() + } + + fn get_tool_parameters(&self) -> String { + match serde_json::to_string(&self.parameters) { + Ok(json_str) => return format!("{{\"parameters\":{}}}", json_str), + Err(err) => return format!("{:?}", err), + } + } + + fn get_example_usage(&self) -> String { + self.example_usage.clone() + } + + fn get_toolbox(&self) -> String { + self.toolbox.clone() + } + + fn run<'a>( + &self, + args: Vec, + working_directory: &'a str, + verbose: bool, + ) -> Result<(), Error> { + let mut input_file = String::new(); + let mut output_file = String::new(); + let mut output_raster = false; + let mut zero_back = false; + let mut is_grid_cell_units = false; + let mut output_text = false; + + if args.len() == 0 { + return Err(Error::new( + ErrorKind::InvalidInput, + "Tool run with no paramters.", + )); + } + for i in 0..args.len() { + let mut arg = args[i].replace("\"", ""); + arg = arg.replace("\'", ""); + let cmd = arg.split("="); // in case an equals sign was used + let vec = cmd.collect::>(); + let mut keyval = false; + if vec.len() > 1 { + keyval = true; + } + let flag_val = vec[0].to_lowercase().replace("--", "-"); + if flag_val == "-i" || flag_val == "-input" { + input_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if flag_val == "-o" || flag_val == "-output" { + output_file = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + output_raster = true; + } else if flag_val == "-units" { + is_grid_cell_units = if keyval { + vec[1].to_string().to_lowercase().contains("cells") + } else { + args[i + 1].to_string().to_lowercase().contains("cells") + }; + } else if flag_val == "-zero_back" { + zero_back = true; + } else if flag_val == "-out_text" { + output_text = true; + } + } + + if verbose { + println!("***************{}", "*".repeat(self.get_tool_name().len())); + println!("* Welcome to {} *", self.get_tool_name()); + println!("***************{}", "*".repeat(self.get_tool_name().len())); + } + + let sep: String = path::MAIN_SEPARATOR.to_string(); + + let mut progress: usize; + let mut old_progress: usize = 1; + + if !input_file.contains(&sep) && !input_file.contains("/") { + input_file = format!("{}{}", working_directory, input_file); + } + + if !output_raster && !output_text { + println!("Warning: Niether a raster nor text outputs were selected. An area raster will be generated."); + output_file = String::from("area.tif"); + output_raster = true; + } + + if output_raster { + if !output_file.contains(&sep) && !output_file.contains("/") { + output_file = format!("{}{}", working_directory, output_file); + } + } + + if verbose { + println!("Reading data...") + }; + + let input = Arc::new(Raster::new(&input_file, "r")?); + + let start = Instant::now(); + + let nodata = input.configs.nodata; + let rows = input.configs.rows as isize; + let columns = input.configs.columns as isize; + let min_val = input.configs.display_min; + let max_val = input.configs.display_max; + let range = max_val - min_val + 0.00001f64; // otherwise the max value is outside the range + let num_bins = range.ceil() as usize; + let back_val = if zero_back { + 0f64 + } else { + nodata + }; + + if is_grid_cell_units { + let num_procs = num_cpus::get() as isize; + let (tx, rx) = mpsc::channel(); + for tid in 0..num_procs { + let input = input.clone(); + let tx = tx.clone(); + thread::spawn(move || { + let mut freq_data = vec![0usize; num_bins]; + let mut val: f64; + let mut bin: usize; + for row in (0..rows).filter(|r| r % num_procs == tid) { + for col in 0..columns { + val = input.get_value(row, col); + if val != nodata && val != back_val && val >= min_val && val <= max_val { + bin = (val - min_val).floor() as usize; + freq_data[bin] += 1; + } + } + } + tx.send(freq_data).unwrap(); + }); + } + + let mut freq_data = vec![0usize; num_bins]; + for tid in 0..num_procs { + let data = rx.recv().unwrap(); + for a in 0..num_bins { + freq_data[a] += data[a]; + } + + if verbose { + progress = (100.0_f64 * (tid + 1) as f64 / num_procs as f64) as usize; + if progress != old_progress { + println!("Progress: {}%", progress); + old_progress = progress; + } + } + } + + let mut val: f64; + let mut bin: usize; + if output_raster { + let mut output = Raster::initialize_using_file(&output_file, &input); + let out_nodata = -999f64; + output.reinitialize_values(out_nodata); + output.configs.nodata = out_nodata; + output.configs.photometric_interp = PhotometricInterpretation::Continuous; + output.configs.data_type = DataType::I32; + for row in 0..rows { + for col in 0..columns { + val = input.get_value(row, col); + if val != nodata && val != back_val && val >= min_val && val <= max_val { + bin = (val - min_val).floor() as usize; + output.set_value(row, col, freq_data[bin] as f64); + } + } + if verbose { + progress = (100.0_f64 * row as f64 / (rows - 1) as f64) as usize; + if progress != old_progress { + println!("Outputting raster: {}%", progress); + old_progress = progress; + } + } + } + + let elapsed_time = get_formatted_elapsed_time(start); + output.add_metadata_entry(format!( + "Created by whitebox_tools\' {} tool", + self.get_tool_name() + )); + output.add_metadata_entry(format!("Input file: {}", input_file)); + output.add_metadata_entry(format!("Elapsed Time (excluding I/O): {}", elapsed_time)); + + if verbose { + println!("Saving data...") + }; + let _ = match output.write() { + Ok(_) => { + if verbose { + println!("Output file written") + } + } + Err(e) => return Err(e), + }; + } + if output_text { + println!("Class,Cells"); + for a in 0..num_bins { + if freq_data[a] > 0 { + val = (a as f64 + min_val).floor(); + println!("{},{}", val, freq_data[a]); + } + } + } + } else { + let is_geographic = input.is_in_geographic_coordinates(); + if is_geographic && verbose { + println!("Warning: the input file does not appear to be in a projected coodinate system. Area values will only be estimates."); + } + + let num_procs = num_cpus::get() as isize; + let (tx, rx) = mpsc::channel(); + for tid in 0..num_procs { + let input = input.clone(); + let tx = tx.clone(); + thread::spawn(move || { + let mut resx = input.configs.resolution_x; + let mut resy = input.configs.resolution_y; + let mut area_data = vec![0f64; num_bins]; + let mut val: f64; + let mut bin: usize; + let mut mid_lat: f64; + for row in (0..rows).filter(|r| r % num_procs == tid) { + if is_geographic { + mid_lat = input.get_y_from_row(row).to_radians(); + resx = resx * 111_111.0 * mid_lat.cos(); + resy = resy * 111_111.0; + } + for col in 0..columns { + val = input.get_value(row, col); + if val != nodata && val != back_val && val >= min_val && val <= max_val { + bin = (val - min_val).floor() as usize; + area_data[bin] += resx * resy; + } + } + } + tx.send(area_data).unwrap(); + }); + } + + // we could just multiply the num cells by the cell area to get the area, + // but for the possibility of an input in geographic coordinates where the + // cell size is not constant for the data. + let mut area_data = vec![0f64; num_bins]; + for tid in 0..num_procs { + let data = rx.recv().unwrap(); + for a in 0..num_bins { + area_data[a] += data[a]; + } + + if verbose { + progress = (100.0_f64 * (tid + 1) as f64 / num_procs as f64) as usize; + if progress != old_progress { + println!("Progress: {}%", progress); + old_progress = progress; + } + } + } + + let mut val: f64; + let mut bin: usize; + if output_raster { + let mut output = Raster::initialize_using_file(&output_file, &input); + let out_nodata = -999f64; + output.reinitialize_values(out_nodata); + output.configs.nodata = out_nodata; + output.configs.photometric_interp = PhotometricInterpretation::Continuous; + output.configs.data_type = DataType::I32; + for row in 0..rows { + for col in 0..columns { + val = input.get_value(row, col); + if val != nodata && val != back_val && val >= min_val && val <= max_val { + bin = (val - min_val).floor() as usize; + output.set_value(row, col, area_data[bin]); + } + } + if verbose { + progress = (100.0_f64 * row as f64 / (rows - 1) as f64) as usize; + if progress != old_progress { + println!("Outputting raster: {}%", progress); + old_progress = progress; + } + } + } + + let elapsed_time = get_formatted_elapsed_time(start); + output.add_metadata_entry(format!( + "Created by whitebox_tools\' {} tool", + self.get_tool_name() + )); + output.add_metadata_entry(format!("Input file: {}", input_file)); + output.add_metadata_entry(format!("Elapsed Time (excluding I/O): {}", elapsed_time)); + + if verbose { + println!("Saving data...") + }; + let _ = match output.write() { + Ok(_) => { + if verbose { + println!("Output file written") + } + } + Err(e) => return Err(e), + }; + } + if output_text { + println!("Class,Area"); + for a in 0..num_bins { + if area_data[a] > 0f64 { + val = (a as f64 + min_val).floor(); + println!("{},{}", val, area_data[a]); + } + } + } + } + + Ok(()) + } +} diff --git a/src/tools/lidar_analysis/ascii_to_las.rs b/src/tools/lidar_analysis/ascii_to_las.rs new file mode 100644 index 000000000..d13f7fe7d --- /dev/null +++ b/src/tools/lidar_analysis/ascii_to_las.rs @@ -0,0 +1,421 @@ +/* +This tool is part of the WhiteboxTools geospatial analysis library. +Authors: Dr. John Lindsay +Created: 10/02/2019 +Last Modified: 10/02/2019 +License: MIT +*/ + +use crate::lidar::*; +use crate::tools::*; +use std; +use std::env; +use std::fs::File; +use std::io::prelude::*; +use std::io::BufReader; +use std::io::{Error, ErrorKind}; +use std::path; +extern crate byteorder; +use byteorder::{LittleEndian, WriteBytesExt}; + + +/// This tool can be used to convert one or more ASCII files, containing LiDAR point data, into LAS files. The user must +/// specify the name(s) of the input ASCII file(s) (`--inputs`). Each input file will have a coorespondingly named +/// output file with a `.las` file extension. The output point data, each on a seperate line, will take the format: +/// +/// ``` +/// x,y,z,intensity,class,return,num_returns" +/// ``` +/// +/// | Value | Interpretation | +/// | :---- | :---------------- | +/// | x | x-coordinate | +/// | y | y-coordinate | +/// | z | elevation | +/// | i | intensity value | +/// | c | classification | +/// | rn | return number | +/// | nr | number of returns | +/// | time | GPS time | +/// | sa | scan angle | +/// +/// The `x`, `y`, and `z` patterns must always be specified. If the `rn` pattern is used, the `nr` pattern must +/// also be specified. Examples of valid pattern string inclue: +/// +/// ``` +/// 'x,y,z,i' +/// 'x,y,z,i,rn,nr' +/// 'x,y,z,i,c,rn,nr,sa' +/// 'z,x,y,rn,nr' +/// ``` +/// +/// Use the `LasToAscii` tool to convert a LAS file into a text file containing LiDAR point data. +/// +/// # See Also +/// `LasToAscii` +pub struct AsciiToLas { + name: String, + description: String, + toolbox: String, + parameters: Vec, + example_usage: String, +} + +impl AsciiToLas { + pub fn new() -> AsciiToLas { + // public constructor + let name = "AsciiToLas".to_string(); + let toolbox = "LiDAR Tools".to_string(); + let description = "Converts one or more ASCII files containing LiDAR points into LAS files.".to_string(); + + let mut parameters = vec![]; + parameters.push(ToolParameter { + name: "Input LiDAR point ASCII Files (.csv)".to_owned(), + flags: vec!["-i".to_owned(), "--inputs".to_owned()], + description: "Input LiDAR ASCII files (.csv).".to_owned(), + parameter_type: ParameterType::FileList(ParameterFileType::Csv), + default_value: None, + optional: false, + }); + + parameters.push(ToolParameter { + name: "Pattern".to_owned(), + flags: vec!["--pattern".to_owned()], + description: "Input field pattern.".to_owned(), + parameter_type: ParameterType::String, + default_value: None, + optional: false, + }); + + parameters.push(ToolParameter { + name: "Well-known-text (WKT) string".to_owned(), + flags: vec!["--wkt".to_owned()], + description: "Well-known-text string.".to_owned(), + parameter_type: ParameterType::String, + default_value: None, + optional: true, + }); + + let sep: String = path::MAIN_SEPARATOR.to_string(); + let p = format!("{}", env::current_dir().unwrap().display()); + let e = format!("{}", env::current_exe().unwrap().display()); + let mut short_exe = e + .replace(&p, "") + .replace(".exe", "") + .replace(".", "") + .replace(&sep, ""); + if e.contains(".exe") { + short_exe += ".exe"; + } + let usage = format!(">>.*{0} -r={1} -v --wd=\"*path*to*data*\" -i=\"file1.las, file2.las, file3.las\" -o=outfile.las\"", short_exe, name).replace("*", &sep); + + AsciiToLas { + name: name, + description: description, + toolbox: toolbox, + parameters: parameters, + example_usage: usage, + } + } +} + +impl WhiteboxTool for AsciiToLas { + fn get_source_file(&self) -> String { + String::from(file!()) + } + + fn get_tool_name(&self) -> String { + self.name.clone() + } + + fn get_tool_description(&self) -> String { + self.description.clone() + } + + fn get_tool_parameters(&self) -> String { + let mut s = String::from("{\"parameters\": ["); + for i in 0..self.parameters.len() { + if i < self.parameters.len() - 1 { + s.push_str(&(self.parameters[i].to_string())); + s.push_str(","); + } else { + s.push_str(&(self.parameters[i].to_string())); + } + } + s.push_str("]}"); + s + } + + fn get_example_usage(&self) -> String { + self.example_usage.clone() + } + + fn get_toolbox(&self) -> String { + self.toolbox.clone() + } + + fn run<'a>( + &self, + args: Vec, + working_directory: &'a str, + verbose: bool, + ) -> Result<(), Error> { + let mut input_files: String = String::new(); + let mut pattern_string = String::new(); + let mut wkt_string = String::new(); + + // read the arguments + if args.len() == 0 { + return Err(Error::new( + ErrorKind::InvalidInput, + "Tool run with no paramters.", + )); + } + for i in 0..args.len() { + let mut arg = args[i].replace("\"", ""); + arg = arg.replace("\'", ""); + let cmd = arg.split("="); // in case an equals sign was used + let vec = cmd.collect::>(); + let mut keyval = false; + if vec.len() > 1 { + keyval = true; + } + let flag_val = vec[0].to_lowercase().replace("--", "-"); + if flag_val == "-i" || flag_val == "-inputs" || flag_val == "-input" { + input_files = if keyval { + vec[1].to_string() + } else { + args[i + 1].to_string() + }; + } else if flag_val == "-pattern" { + pattern_string = if keyval { + vec[1].to_string().to_lowercase() + } else { + args[i + 1].to_string().to_lowercase() + }; + } else if flag_val == "-wkt" { + wkt_string = if keyval { + vec[1].to_string().to_lowercase() + } else { + args[i + 1].to_string().to_lowercase() + }; + } + } + + if verbose { + println!("***************{}", "*".repeat(self.get_tool_name().len())); + println!("* Welcome to {} *", self.get_tool_name()); + println!("***************{}", "*".repeat(self.get_tool_name().len())); + } + + let sep = std::path::MAIN_SEPARATOR; + + let mut progress: usize; + let mut old_progress: usize = 1; + + let start = Instant::now(); + + if pattern_string.is_empty() { + return Err(Error::new( + ErrorKind::InvalidInput, + "Error: The interpretation pattern string (e.g. 'x,y,z,i,c,rn,nr,sa') was not specified.", + )); + } + + // Do some quality control on the pattern + if !pattern_string.contains("x") || + !pattern_string.contains("y") || + !pattern_string.contains("z") { + return Err(Error::new( + ErrorKind::InvalidInput, + "Error: The interpretation pattern string (e.g. 'x,y,z,i,c,rn,nr,sa') must contain x, y, and z fields.", + )); + } + + if (pattern_string.contains("rn") && !pattern_string.contains("nr")) || + (pattern_string.contains("nr") && !pattern_string.contains("rn")) { + return Err(Error::new( + ErrorKind::InvalidInput, + "Error: If the interpretation pattern string (e.g. 'x,y,z,i,rn,nr,time') contains a 'rn' field it must also contain a 'nr' field and vice versa.", + )); + } + + let pattern_vec = pattern_string.trim().split(",").collect::>(); + let num_fields = pattern_vec.len(); + + // convert the string pattern to numeric for easier look-up + let mut pattern_numeric = vec![0usize; num_fields]; + let mut pattern_has_time = false; + for a in 0..num_fields { + match pattern_vec[a] { + "x" => pattern_numeric[a] = 0usize, + "y" => pattern_numeric[a] = 1usize, + "z" => pattern_numeric[a] = 2usize, + "i" => pattern_numeric[a] = 3usize, + "c" => pattern_numeric[a] = 4usize, + "rn" => pattern_numeric[a] = 5usize, + "nr" => pattern_numeric[a] = 6usize, + "time" => { + pattern_has_time = true; + pattern_numeric[a] = 7usize + }, + "sa" => pattern_numeric[a] = 8usize, + _ => println!("Unrecognized pattern {}", pattern_vec[a]), + } + } + + let mut cmd = input_files.split(";"); + let mut files_vec = cmd.collect::>(); + if files_vec.len() == 1 { + cmd = input_files.split(","); + files_vec = cmd.collect::>(); + } + let mut i = 1; + let num_files = files_vec.len(); + for value in files_vec { + if !value.trim().is_empty() { + let mut input_file = value.trim().to_owned(); + if !input_file.contains(sep) && !input_file.contains("/") { + input_file = format!("{}{}", working_directory, input_file); + } + + // Initialize the output LAS file + let file_extension = get_file_extension(&input_file); + let output_file = input_file.replace(&format!(".{}", file_extension), ".las"); + let mut output = LasFile::new(&output_file, "w")?; + let mut header: LasHeader = Default::default(); + pattern_has_time = true; + if pattern_has_time { + header.point_format = 1; + } else { + header.point_format = 0; + } + output.add_header(header); + if !wkt_string.is_empty() { + output.wkt = wkt_string.clone(); + } + + if verbose { + println!("Parsing file {}...", output.get_short_filename()); + } + let f = File::open(input_file.clone())?; + let f = BufReader::new(f); + + let mut point_num = 0; + for line in f.lines() { + let line_unwrapped = line.unwrap(); + let line_data = line_unwrapped.split(",").collect::>(); + if line_data.len() >= num_fields { + // check to see if the first field contains a number; if not, it's likely a header row and should be ignored + let is_num = line_data[0].parse::().is_ok(); + if is_num { + let mut point_data: PointData = Default::default(); + let mut gps_time = 0f64; + // now convert each of the specified fields based on the input pattern + for a in 0..num_fields { + match pattern_numeric[a] { + 0usize => point_data.x = line_data[a].parse::().unwrap(), + 1usize => point_data.y = line_data[a].parse::().unwrap(), + 2usize => point_data.z = line_data[a].parse::().unwrap(), + 3usize => point_data.intensity = line_data[a].parse::().unwrap(), + 4usize => point_data.set_classification(line_data[a].parse::().unwrap()), + 5usize => point_data.set_return_number(line_data[a].parse::().unwrap()), + 6usize => point_data.set_number_of_returns(line_data[a].parse::().unwrap()), + 7usize => gps_time = line_data[a].parse::().unwrap(), + 8usize => point_data.scan_angle = line_data[a].parse::().unwrap(), + _ => println!("unrecognized pattern"), + } + } + + point_num += 1; + if point_num == 1 || point_num == 2 || point_num == 349528 { + println!("Point {}: {:?}", point_num, point_data); + } + if !pattern_has_time { + output.add_point_record(LidarPointRecord::PointRecord0 { + point_data: point_data, + }); + } else { + output.add_point_record(LidarPointRecord::PointRecord1 { + point_data: point_data, + gps_data: gps_time, + }); + } + } + } // else ignore the line. + } + + // let mut vlr1: Vlr = Default::default(); + // vlr1.reserved = 0u16; + // vlr1.user_id = String::from("LASF_Projection"); + // vlr1.record_id = 34735u16; + // vlr1.description = String::from("GeoTiff Projection Keys"); + // let raw_data = vec![1u16, 1, 0, 23, 1024, 0, 1, 1, 2048, 0, 1, 4269, 2049, 34737, 24, 64, 2050, 0, 1, 6269, 2051, 0, 1, 8901, 2054, 0, 1, 9102, 2055, 34736, 1, 9, 2056, 0, 1, 7019, 2057, 34736, 1, 6, 2059, 34736, 1, 7, 2061, 34736, 1, 8, 3072, 0, 1, 32145, 3073, 34737, 38, 0, 3075, 0, 1, 1, 3076, 0, 1, 9001, 3077, 34736, 1, 5, 3081, 34736, 1, 4, 3082, 34736, 1, 0, 3083, 34736, 1, 1, 3088, 34736, 1, 2, 3092, 34736, 1, 3, 4097, 34737, 26, 38, 4099, 0, 1, 9001]; + // let mut v8: Vec = Vec::new(); + // for n in raw_data { + // v8.write_u16::(n).unwrap(); + // } + // vlr1.binary_data = v8; + // vlr1.record_length_after_header = vlr1.binary_data.len() as u16; //192; + // println!("vlr1 (192): {} {}", vlr1.binary_data.len(), vlr1.record_length_after_header); + // output.add_vlr(vlr1); + + // let mut vlr2: Vlr = Default::default(); + // vlr2.reserved = 0u16; + // vlr2.user_id = String::from("LASF_Projection"); + // vlr2.record_id = 34736u16; + // vlr2.description = String::from("GeoTiff double parameters"); + // let raw_data = vec![500000f64, 0f64, -72.5f64, 0.9999642857142857f64, 42.5f64, 1f64, 6378137f64, 298.257222101f64, 0f64, 0.017453292519943295f64]; + // let mut v8: Vec = Vec::new(); + // for n in raw_data { + // v8.write_f64::(n).unwrap(); + // } + // vlr2.binary_data = v8; + // vlr2.record_length_after_header = vlr2.binary_data.len() as u16; //80; + // println!("vlr2 (80): {} {}", vlr2.binary_data.len(), vlr2.record_length_after_header); + // output.add_vlr(vlr2); + + // let mut vlr3: Vlr = Default::default(); + // vlr3.reserved = 0u16; + // vlr3.user_id = String::from("LASF_Projection"); + // vlr3.record_id = 34737u16; + // vlr3.description = String::from("GeoTiff ASCII parameters"); + // vlr3.binary_data = "NAD_1983_StatePlane_Vermont_FIPS_4400|NAVD88 - Geoid09 (Meters)|GCS_North_American_1983|".as_bytes().to_vec(); + // vlr3.record_length_after_header = vlr3.binary_data.len() as u16; //89; + // println!("vlr3 (89): {} {}", vlr3.binary_data.len(), vlr3.record_length_after_header); + // output.add_vlr(vlr3); + + if verbose { + println!("Writing output LAS file {}...", output.get_short_filename()); + } + let _ = match output.write() { + Ok(_) => println!("Complete!"), + Err(e) => println!("error while writing: {:?}", e), + }; + } + if verbose { + progress = (100.0_f64 * (i + 1) as f64 / num_files as f64) as usize; + if progress != old_progress { + println!("Progress: {}%", progress); + old_progress = progress; + } + } + i += 1; + } + + if verbose { + let elapsed_time = get_formatted_elapsed_time(start); + println!("{}", &format!("Elapsed Time: {}", elapsed_time)); + } + + Ok(()) + } +} + +/// Returns the file extension. +pub fn get_file_extension(file_name: &str) -> String { + let file_path = std::path::Path::new(file_name); + let extension = file_path.extension().unwrap(); + let e = extension.to_str().unwrap(); + e.to_string() +} \ No newline at end of file diff --git a/src/tools/lidar_analysis/las_to_ascii.rs b/src/tools/lidar_analysis/las_to_ascii.rs index daeff0bbd..fdc69be8c 100644 --- a/src/tools/lidar_analysis/las_to_ascii.rs +++ b/src/tools/lidar_analysis/las_to_ascii.rs @@ -1,8 +1,8 @@ /* This tool is part of the WhiteboxTools geospatial analysis library. Authors: Dr. John Lindsay -Created: July 16, 2017 -Last Modified: 12/10/2018 +Created: 16/07/2017 +Last Modified: 10/02/2019 License: MIT */ @@ -16,6 +16,20 @@ use std::io::BufWriter; use std::io::{Error, ErrorKind}; use std::path; +/// This tool can be used to convert one or more LAS file, containing LiDAR data, into ASCII files. The user must +/// specify the name(s) of the input LAS file(s) (`--inputs`). Each input file will have a coorespondingly named +/// output file with a `.csv` file extension. CSV files are comma separated value files and contain tabular data +/// with each column cooresponding to a field in the table and each row a point value. Fields are separated by +/// commas in the ASCII formated file. The output point data, each on a seperate line, will take the format: +/// +/// ``` +/// x,y,z,intensity,class,return,num_returns,scan_angle" +/// ``` +/// +/// Use the `AsciiToLas` tool to convert a text file containing LiDAR point data into a LAS file. +/// +/// # See Also +/// `AsciiToLas` pub struct LasToAscii { name: String, description: String, @@ -171,34 +185,26 @@ impl WhiteboxTool for LasToAscii { } }; - let output_file = if input_file.to_lowercase().ends_with(".las") { - input_file.replace(".las", ".txt") - } else if input_file.to_lowercase().ends_with(".zip") { - input_file.replace(".zip", ".txt") - } else { - return Err(Error::new( - ErrorKind::NotFound, - format!("No such file or directory ({})", input_file), - )); - }; - + let file_extension = get_file_extension(&input_file); + let output_file = input_file.replace(&format!(".{}", file_extension), ".csv"); let f = File::create(output_file)?; let mut writer = BufWriter::new(f); let n_points = input.header.number_of_points as usize; - writer.write_all("X Y Z Intensity Class Return Num_returns\n".as_bytes())?; + writer.write_all("X,Y,Z,Intensity,Class,Return,Num_returns,Scan_angle\n".as_bytes())?; for k in 0..n_points { let pd = input[k]; let s = format!( - "{} {} {} {} {} {} {}\n", + "{},{},{},{},{},{},{},{}\n", pd.x, pd.y, pd.z, pd.intensity, pd.classification(), pd.return_number(), - pd.number_of_returns() + pd.number_of_returns(), + pd.scan_angle ); writer.write_all(s.as_bytes())?; @@ -227,3 +233,11 @@ impl WhiteboxTool for LasToAscii { Ok(()) } } + +/// Returns the file extension. +pub fn get_file_extension(file_name: &str) -> String { + let file_path = std::path::Path::new(file_name); + let extension = file_path.extension().unwrap(); + let e = extension.to_str().unwrap(); + e.to_string() +} \ No newline at end of file diff --git a/src/tools/lidar_analysis/mod.rs b/src/tools/lidar_analysis/mod.rs index a33762c15..f640f1fda 100644 --- a/src/tools/lidar_analysis/mod.rs +++ b/src/tools/lidar_analysis/mod.rs @@ -1,4 +1,5 @@ // private sub-module defined in other files +// mod ascii_to_las; mod block_maximum; mod block_minimum; mod classify_overlap_points; @@ -39,6 +40,7 @@ mod remove_duplicates; mod select_tiles_by_polygon; // exports identifiers from private sub-modules in the current module namespace +// pub use self::ascii_to_las::AsciiToLas; pub use self::block_maximum::LidarBlockMaximum; pub use self::block_minimum::LidarBlockMinimum; pub use self::classify_overlap_points::ClassifyOverlapPoints; diff --git a/src/tools/math_stat_analysis/extract_statistics.rs b/src/tools/math_stat_analysis/extract_statistics.rs index 41a5f9c4f..ce9c5910f 100644 --- a/src/tools/math_stat_analysis/extract_statistics.rs +++ b/src/tools/math_stat_analysis/extract_statistics.rs @@ -32,6 +32,9 @@ use std::thread; /// for each of a group of watersheds (feature definitions). Although the data raster can contain any /// type of data, the feature definition raster must be categorical, i.e. it must define area entities /// using integer values. +/// +/// The `--stat` parameter can take the values, 'average', 'minimum', 'maximum', 'range', +/// 'standard deviation', or 'total'. /// /// If an output image name is specified, the tool will assign the descriptive statistic value to /// each of the spatial entities defined in the feature definition raster. If text output is selected, @@ -89,7 +92,7 @@ impl ExtractRasterStatistics { parameters.push(ToolParameter { name: "Statistic Type".to_owned(), flags: vec!["--stat".to_owned()], - description: "Statistic to extract.".to_owned(), + description: "Statistic to extract, including 'average', 'minimum', 'maximum', 'range', 'standard deviation', and 'total'.".to_owned(), parameter_type: ParameterType::OptionList(vec![ "average".to_owned(), "minimum".to_owned(), @@ -200,36 +203,36 @@ impl WhiteboxTool for ExtractRasterStatistics { } let flag_val = vec[0].to_lowercase().replace("--", "-"); if flag_val == "-i" || flag_val == "-input" { - if keyval { - input_file = vec[1].to_string(); + input_file = if keyval { + vec[1].to_string() } else { - input_file = args[i + 1].to_string(); - } + args[i + 1].to_string() + }; } else if flag_val == "-features" { - if keyval { - features_file = vec[1].to_string(); + features_file = if keyval { + vec[1].to_string() } else { - features_file = args[i + 1].to_string(); - } + args[i + 1].to_string() + }; } else if flag_val == "-o" || flag_val == "-output" { - if keyval { - output_file = vec[1].to_string(); + output_file = if keyval { + vec[1].to_string() } else { - output_file = args[i + 1].to_string(); - } + args[i + 1].to_string() + }; } else if flag_val == "-out_table" { // out_table = true; - if keyval { - output_html_file = vec[1].to_string(); + output_html_file = if keyval { + vec[1].to_string() } else { - output_html_file = args[i + 1].to_string(); - } + args[i + 1].to_string() + }; } else if flag_val == "-stat" { - if keyval { - stat_type = vec[1].to_string().to_lowercase(); + stat_type = if keyval { + vec[1].to_string().to_lowercase() } else { - stat_type = args[i + 1].to_string().to_lowercase(); - } + args[i + 1].to_string().to_lowercase() + }; } } @@ -415,6 +418,7 @@ impl WhiteboxTool for ExtractRasterStatistics { if !output_file.is_empty() { let mut output = Raster::initialize_using_file(&output_file, &input); output.configs.data_type = DataType::F32; + output.configs.photometric_interp = PhotometricInterpretation::Continuous; let out_stat = if stat_type.contains("av") { features_average.clone() } else if stat_type.contains("min") { diff --git a/src/tools/math_stat_analysis/raster_histogram.rs b/src/tools/math_stat_analysis/raster_histogram.rs index a29abce36..743cdc480 100644 --- a/src/tools/math_stat_analysis/raster_histogram.rs +++ b/src/tools/math_stat_analysis/raster_histogram.rs @@ -265,8 +265,6 @@ impl WhiteboxTool for RasterHistogram { let _ = writer.flush(); - // println!("freq. data: {:?}", freq_data); - if verbose { if cfg!(target_os = "macos") || cfg!(target_os = "ios") { let output = Command::new("open") diff --git a/src/tools/mod.rs b/src/tools/mod.rs index 7f2ad22f4..5541f6ff8 100644 --- a/src/tools/mod.rs +++ b/src/tools/mod.rs @@ -114,6 +114,7 @@ impl ToolManager { tool_names.push("PolygonShortAxis".to_string()); tool_names.push("Polygonize".to_string()); tool_names.push("RadiusOfGyration".to_string()); + tool_names.push("RasterArea".to_string()); tool_names.push("RasterCellAssignment".to_string()); tool_names.push("Reclass".to_string()); tool_names.push("ReclassEqualInterval".to_string()); @@ -248,6 +249,7 @@ impl ToolManager { tool_names.push("WriteFunctionMemoryInsertion".to_string()); // lidar_analysis + // tool_names.push("AsciiToLas".to_string()); tool_names.push("LidarBlockMaximum".to_string()); tool_names.push("LidarBlockMinimum".to_string()); tool_names.push("ClassifyOverlapPoints".to_string()); @@ -564,6 +566,7 @@ impl ToolManager { "polygonshortaxis" => Some(Box::new(gis_analysis::PolygonShortAxis::new())), "polygonize" => Some(Box::new(gis_analysis::Polygonize::new())), "radiusofgyration" => Some(Box::new(gis_analysis::RadiusOfGyration::new())), + "rasterarea" => Some(Box::new(gis_analysis::RasterArea::new())), "rastercellassignment" => Some(Box::new(gis_analysis::RasterCellAssignment::new())), "reclass" => Some(Box::new(gis_analysis::Reclass::new())), "reclassequalinterval" => Some(Box::new(gis_analysis::ReclassEqualInterval::new())), @@ -754,6 +757,7 @@ impl ToolManager { } // lidar_analysis + // "asciitolas" => Some(Box::new(lidar_analysis::AsciiToLas::new())), "lidarblockmaximum" => Some(Box::new(lidar_analysis::LidarBlockMaximum::new())), "lidarblockminimum" => Some(Box::new(lidar_analysis::LidarBlockMinimum::new())), "classifyoverlappoints" => Some(Box::new(lidar_analysis::ClassifyOverlapPoints::new())), diff --git a/src/tools/terrain_analysis/surface_area_ratio.rs b/src/tools/terrain_analysis/surface_area_ratio.rs index a5a2233b2..63ee33e62 100644 --- a/src/tools/terrain_analysis/surface_area_ratio.rs +++ b/src/tools/terrain_analysis/surface_area_ratio.rs @@ -195,6 +195,8 @@ impl WhiteboxTool for SurfaceAreaRatio { output.configs.data_type = DataType::F32; let rows = input.configs.rows as isize; + let is_geographic = input.is_in_geographic_coordinates(); + let num_procs = num_cpus::get() as isize; let (tx, rx) = mpsc::channel(); for tid in 0..num_procs { @@ -252,7 +254,7 @@ impl WhiteboxTool for SurfaceAreaRatio { // let mut num_valid_facets: f64; let mut mid_lat: f64; for row in (0..rows).filter(|r| r % num_procs == tid) { - if input.is_in_geographic_coordinates() { + if is_geographic { mid_lat = input.get_y_from_row(row).to_radians(); resx = resx * 111_111.0 * mid_lat.cos(); resy = resy * 111_111.0; diff --git a/src/utils/byte_order_reader.rs b/src/utils/byte_order_reader.rs index 8feea8987..ae0e9e9c3 100644 --- a/src/utils/byte_order_reader.rs +++ b/src/utils/byte_order_reader.rs @@ -1,17 +1,19 @@ -// extern crate byteorder; use byteorder::{BigEndian, ByteOrder, LittleEndian}; pub struct ByteOrderReader { pub byte_order: Endianness, + pub is_le: bool, pub buffer: Vec, pub pos: usize, } impl ByteOrderReader { pub fn new(buffer: Vec, byte_order: Endianness) -> ByteOrderReader { + let is_le = byte_order == Endianness::LittleEndian; ByteOrderReader { buffer: buffer, byte_order: byte_order, + is_le: is_le, pos: 0usize, } } @@ -118,13 +120,30 @@ impl ByteOrderReader { } pub fn read_f32(&mut self) -> f32 { - let buf = &self.buffer[self.pos..self.pos + 4]; + // let buf = &self.buffer[self.pos..self.pos + 4]; self.pos += 4; - if self.byte_order == Endianness::LittleEndian { - LittleEndian::read_f32(buf) + if self.is_le { + // LittleEndian::read_f32(buf) + LittleEndian::read_f32(&self.buffer[self.pos - 4..self.pos]) + } else { + // BigEndian::read_f32(buf) + BigEndian::read_f32(&self.buffer[self.pos - 4..self.pos]) + } + } + + pub fn as_f32_vec(&mut self) -> Vec { + let num_values = self.buffer.len() / 4; + let mut ret: Vec = Vec::with_capacity(num_values); + if self.is_le { + for a in 0..num_values { + ret.push(LittleEndian::read_f32(&self.buffer[a*4..a*4+4])); + } } else { - BigEndian::read_f32(buf) + for a in 0..num_values { + ret.push(BigEndian::read_f32(&self.buffer[a*4..a*4+4])); + } } + ret } pub fn read_f64(&mut self) -> f64 { diff --git a/whitebox_tools.py b/whitebox_tools.py index f0a005d93..8e08c1a0a 100644 --- a/whitebox_tools.py +++ b/whitebox_tools.py @@ -368,6 +368,7 @@ def list_tools(self, keywords=[]): + ############## # Data Tools # ############## @@ -1198,6 +1199,26 @@ def polygon_short_axis(self, i, output, callback=None): args.append("--output='{}'".format(output)) return self.run_tool('polygon_short_axis', args, callback) # returns 1 if error + def raster_area(self, i, output=None, out_text=False, units="grid cells", zero_back=False, callback=None): + """Calculates the area of polygons or classes within a raster image. + + Keyword arguments: + + i -- Input raster file. + output -- Output raster file. + out_text -- Would you like to output polygon areas to text?. + units -- ; options are 'grid cells', 'map units'. + zero_back -- Flag indicating whether zero values should be treated as a background. + callback -- Custom function for handling tool text outputs. + """ + args = [] + args.append("--input='{}'".format(i)) + if output is not None: args.append("--output='{}'".format(output)) + if out_text: args.append("--out_text") + args.append("--units={}".format(units)) + if zero_back: args.append("--zero_back") + return self.run_tool('raster_area', args, callback) # returns 1 if error + def raster_cell_assignment(self, i, output, assign="column", callback=None): """Assign row or column number to cells. @@ -4034,7 +4055,7 @@ def split_colour_composite(self, i, output, callback=None): Keyword arguments: i -- Input colour composite image file. - output -- Output raster file (suffixes of '_r', '_g', and '_b' will be appended). + output -- Output raster file (suffixes of _r, _g, and _b will be appended). callback -- Custom function for handling tool text outputs. """ args = [] @@ -6030,7 +6051,7 @@ def extract_raster_statistics(self, i, features, output=None, stat="average", ou i -- Input data raster file. features -- Input feature definition raster file. output -- Output raster file. - stat -- Statistic to extract. + stat -- Statistic to extract, including 'average', 'minimum', 'maximum', 'range', 'standard deviation', and 'total'. out_table -- Output HTML Table file. callback -- Custom function for handling tool text outputs. """