Skip to content

Commit

Permalink
Patch download and area calculation (#275)
Browse files Browse the repository at this point in the history
* Patch download and area calculation

* shapefile_util codacy tweaks

* extractproduct condacy fixes

* update NLCD to new LULC mask

* more conservative area calculation for dem chunk

* catch error when old files exists for different specified products

* Capture duplicates with all executables

* GACOS update new format

* print error when caught

very simple update so that the error of why multiprocessing fails is printed to the screen

* fix logging bug

Co-authored-by: bbuzz31 <[email protected]>
  • Loading branch information
sssangha and bbuzz31 authored Jul 29, 2021
1 parent a79e8a1 commit 04f1400
Show file tree
Hide file tree
Showing 9 changed files with 780 additions and 431 deletions.
300 changes: 193 additions & 107 deletions tools/ARIAtools/ARIAProduct.py

Large diffs are not rendered by default.

440 changes: 291 additions & 149 deletions tools/ARIAtools/extractProduct.py

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions tools/ARIAtools/mask_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,8 +261,8 @@ def __call__(self, proj, bounds, arrshape, outputFormat='ENVI', test=False):
outputFormat='ENVI'

## crop the raw nlcd in memory
path_nlcd = '/vsizip/vsicurl/https://s3-us-west-2.amazonaws.com/mrlc/NLCD_2016'\
'_Land_Cover_L48_20190424.zip/NLCD_2016_Land_Cover_L48_20190424.img'
path_nlcd = '/vsizip/vsicurl/https://s3-us-west-2.amazonaws.com/mrlc/nlcd_2019'\
'_land_cover_l48_20210604.zip/nlcd_2019_land_cover_l48_20210604.img'

log.info('Cropping raw NLCD...')
ds_crop = crop_ds(path_nlcd, self.path_bbox)
Expand Down
10 changes: 8 additions & 2 deletions tools/ARIAtools/productPlot.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,9 @@ def createParser():
"Default 0.0081 = 0.0081km\u00b2=area of single pixel at standard 90m resolution")
parser.add_argument('--figwidth', dest='figwidth', type=str, default='standard',
help='Width of lat extents figure in inches. Default is \"standard\", i.e., the 6.4-inch-wide standard figure size. Optionally, theuser may define the width manually, e.g,. 8 [inches] or set the parameter to \"wide\" format, i.e., the width of the figure automatically scales with the number of interferograms. Other options include')
parser.add_argument('--version', dest='version', default=None,
help='Specify version as str, e.g. 2_0_4 or all prods; default: '
'newest')
parser.add_argument('-verbose', '--verbose', action='store_true', dest='verbose',
help="Toggle verbose mode on.")
return parser
Expand Down Expand Up @@ -580,8 +583,11 @@ def main(inps=None):
# Outputs = arrays ['standardproduct_info.products'] containing grouped “radarmetadata info” and “data layer keys+paths” dictionaries for each standard product
# In addition, path to bbox file ['standardproduct_info.bbox_file'] (if bbox specified)
standardproduct_info = ARIA_standardproduct(inps.imgfile,
bbox=inps.bbox, workdir=inps.workdir,
num_threads=inps.num_threads, verbose=inps.verbose)
bbox=inps.bbox,
workdir=inps.workdir,
num_threads=inps.num_threads,
url_version=inps.version,
verbose=inps.verbose)

# If user requests to generate all plots.
if inps.plotall:
Expand Down
62 changes: 33 additions & 29 deletions tools/ARIAtools/shapefile_util.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Author: Simran Sangha & David Bekaert
# Copyright 2019, by the California Institute of Technology. ALL RIGHTS
Expand All @@ -18,11 +18,7 @@
gdal.PushErrorHandler('CPLQuietErrorHandler')

def open_shapefile(fname, lyrind, ftind):
'''
Open a existing shapefile and pass the coordinates back.
##SS => see commend on projection of the shapefile, expand the desciption of what the function would do based on this.
'''

"""Open a existing shapefile and pass the coordinates back."""
# import dependencies
from shapely.wkt import loads

Expand All @@ -37,15 +33,12 @@ def open_shapefile(fname, lyrind, ftind):
else:
file_bbox = file_bbox.GetLayerByIndex(lyrind).GetFeature(ftind)
geom = file_bbox.GetGeometryRef()
file_bbox = loads(geom.ExportToWkt()) ##SS => We should track the projection of the shapefile adn pass it up? e.g. what it the data has different projection than that of the shapefile?
file_bbox = loads(geom.ExportToWkt())

return file_bbox

def save_shapefile(fname, polygon, drivername):
'''
##SS => add descritpion and limitations. Is there not a way to add the projection pf the shape file as well?
'''

"""Save a polygon shapefile."""
# open file
ds = ogr.GetDriverByName(drivername).CreateDataSource(fname)
# create layer
Expand All @@ -65,11 +58,8 @@ def save_shapefile(fname, polygon, drivername):

return

def shapefile_area(file_bbox):
'''
Compute km\u00b2 area of shapefile.
'''

def shapefile_area(file_bbox, bounds = False):
"""Compute km\u00b2 area of shapefile."""
# import dependencies
from pyproj import Proj
from shapely.geometry import shape
Expand All @@ -80,32 +70,43 @@ def shapefile_area(file_bbox):
if file_bbox.type == 'Polygon': file_bbox = [file_bbox]
for polyobj in file_bbox:
# get coords
lon, lat=polyobj.exterior.coords.xy
if bounds:
# Pass coordinates of bounds as opposed to cutline
# Necessary for estimating DEM/mask footprints
WSEN = polyobj.bounds
lon = np.array([WSEN[0],WSEN[0],WSEN[2],WSEN[2],WSEN[0]])
lat = np.array([WSEN[1],WSEN[3],WSEN[3],WSEN[1],WSEN[1]])
else:
lon, lat = polyobj.exterior.coords.xy

# use equal area projection centered on/bracketing AOI
pa = Proj("+proj=aea +lat_1={} +lat_2={} +lat_0={} +lon_0={}".format(min(lat), max(lat), (max(lat)+min(lat))/2, (max(lon)+min(lon))/2))
pa = Proj("+proj=aea +lat_1={} +lat_2={} +lat_0={} +lon_0={}". \
format(min(lat), max(lat), (max(lat)+min(lat))/2, \
(max(lon)+min(lon))/2))
x, y = pa(lon, lat)
cop = {"type": "Polygon", "coordinates": [zip(x, y)]}
shape_area+=shape(cop).area/1e6 # area in km^2
shape_area += shape(cop).area/1e6 # area in km^2

return shape_area

def chunk_area(WSEN):
""" Chunk an area ~evenly pieces < 450000 km required by the SRTM server """
"""Chunk an area ~evenly pieces < 450000 km required by the
SRTM server."""
from shapely.geometry import Polygon
max_area = 400000 # need buffer for projections inconsistencies
W, S, E, N = WSEN
n = 2 # start with a 2 x 2 chunk
area = max_area+1
n = 2 # start with a 2 x 2 chunk
area = max_area + 1
while area > max_area:
cols = np.linspace(W, E, n+1)
rows = np.linspace(S, N, n+1)
cols = np.linspace(W, E, n + 1)
rows = np.linspace(S, N, n + 1)
Wi, Si, Ei, Ni = [cols[0], rows[0], cols[1], rows[1]]
poly = Polygon([(Wi,Ni), (Wi,Si), (Ei,Si), (Ei,Ni)])
area = shapefile_area(poly)
n+=1
if n>100:
log.error('There was a problem chunking the DEM; check input bounds')
log.error('There was a problem chunking the DEM; check input '
'bounds')
os.sys.exit()
return rows, cols

Expand Down Expand Up @@ -139,7 +140,7 @@ def plot_shapefile(fname):
if geom_name == 'MULTIPOLYGON':
r = geom.GetGeometryRef(i)
for j in range(r.GetGeometryCount()):
p = r.GetGeometryRef(0)
p = r.GetGeometryRef(j)
r = p
x = [r.GetX(j) for j in range(r.GetPointCount())]
y = [r.GetY(j) for j in range(r.GetPointCount())]
Expand All @@ -153,19 +154,22 @@ def plot_shapefile(fname):
paths.append(path)

with plt.style.context(('seaborn')):
fig, ax = plt.subplots(figsize=(12, 9))
fig = plt.figure(figsize=(12, 9))
ax = fig.add_subplot(111)
ax.set_xlim(ext[0]-xoff,ext[1]+xoff)
ax.set_ylim(ext[2]-yoff,ext[3]+yoff)

# Add paths as patches to axes
for path in paths:
patch = mpatches.PathPatch(path, fill=False, facecolor='blue', edgecolor='black', linewidth=1)
patch = mpatches.PathPatch(path, fill=False, facecolor='blue', \
edgecolor='black', linewidth=1)

ax.add_patch(patch)

ax.set_xlabel('longitude', labelpad=15, fontsize=15)
ax.set_ylabel('latitude', labelpad=15, fontsize=15)
ax.set_title(os.path.basename(os.path.splitext(fname)[0]), fontsize=15)
ax.set_title(os.path.basename(os.path.splitext(fname)[0]), \
fontsize=15)
ax.set_aspect(1.0)
ax.grid(False)
plt.show()
20 changes: 16 additions & 4 deletions tools/ARIAtools/tsSetup.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,9 @@ def create_parser():
'area of overlap of scenes wrt specified bounding box.'
' Default 0.0081 = 0.0081km\u00b2 = area of single'
' pixel at standard 90m resolution')
parser.add_argument('--version', dest='version', default=None,
help='Specify version as str, e.g. 2_0_4 or all prods;'
'default: newest')
parser.add_argument('-verbose', '--verbose', action='store_true',
dest='verbose', help="Toggle verbose mode on.")

Expand Down Expand Up @@ -313,7 +316,13 @@ def generate_stack(aria_prod, input_files, output_file_name,
data_set = None

metadata['wavelength'] = wavelength
metadata['utcTime'] = utc_time[dates]
try:
metadata['utcTime'] = utc_time[dates]
except:
log.debug('Skipping %s; it likely exists in the %s, '\
'but was not specified in the product list',
dates, os.path.dirname(data[1]))
continue
metadata['bPerp'] = b_perp[dates]
metadata['incAng'] = inc_angle[dates]
metadata['lookAng'] = look_ang[dates]
Expand Down Expand Up @@ -384,9 +393,12 @@ def main(inps=None):
# standard product
# In addition, path to bbox file ['standardproduct_info.bbox_file']
# (if bbox specified)
standardproduct_info = ARIA_standardproduct(
inps.imgfile, bbox=inps.bbox,
workdir=inps.workdir, num_threads=inps.num_threads, verbose=inps.verbose)
standardproduct_info = ARIA_standardproduct(inps.imgfile,
bbox=inps.bbox,
workdir=inps.workdir,
num_threads=inps.num_threads,
url_version=inps.version,
verbose=inps.verbose)

# pass number of threads for gdal multiprocessing computation
if inps.num_threads.lower() == 'all':
Expand Down
56 changes: 56 additions & 0 deletions tools/ARIAtools/url_manager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
#!/usr/bin/env python3
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Author: Simran Sangha
# Copyright 2019, by the California Institute of Technology. ALL RIGHTS
# RESERVED. United States Government Sponsorship acknowledged.
#
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

import os, os.path as op

###Capture and segregate duplicate products (if applicable)
def url_versions(urls, user_version, wd):
"""For duplicate products (other than version number)
Uses the the latest if user_version is None else use specified ver."""
if isinstance(user_version, str) and user_version.lower() == 'all':
return urls

url_bases = []
for url in urls:
url_base = '-'.join(url.split('-')[:-1])
if not url_base in url_bases:
url_bases.append(url_base)

urls_final = []
for url_base in url_bases:
# get the possible matches
duplicates = [url for url in urls if url_base in url]
if len(duplicates) == 1:
urls_final.append(duplicates[0])
else:
versions = []
for dupe in duplicates:
ver_str = dupe.split('-')[-1]
versions.append(float(ver_str.lstrip('v').rstrip('.nc')))

## default, use latest version
if user_version is None:
version = str(max(versions))
version = f'v{version[0]}_{version[1]}_{version[2]}.nc'
else:
version=f'v{user_version}.nc'

urls_final.append(f'{url_base}-{version}')

## move duplicates to a different folder
dupe_folder = op.join(wd, 'duplicated_products')
os.makedirs(dupe_folder, exist_ok=True)
for dupe in duplicates:
dupe_path = os.path.join(dupe_folder, os.path.basename(dupe))
wd_path = os.path.join(wd, os.path.basename(dupe))
if os.path.basename(dupe) != \
os.path.basename(urls_final[-1]) and \
os.path.exists(wd_path):
os.rename(wd_path, dupe_path)
return urls_final
Loading

0 comments on commit 04f1400

Please sign in to comment.