From 86e67d3cfc4e535a8243c6277d01abafbf661a28 Mon Sep 17 00:00:00 2001 From: Fabien Maussion Date: Wed, 20 Nov 2024 22:14:04 +0100 Subject: [PATCH] Update velocity and bedmachine in shop --- oggm/shop/bedmachine.py | 142 ++++++++++++++++++++++++++++ oggm/shop/bedtopo.py | 3 +- oggm/shop/cook23.py | 3 +- oggm/shop/hugonnet_maps.py | 12 +-- oggm/shop/its_live.py | 120 +++++++++++------------ oggm/shop/millan22.py | 188 +++++++++++++++++++++---------------- oggm/utils/_downloads.py | 2 - 7 files changed, 312 insertions(+), 158 deletions(-) create mode 100644 oggm/shop/bedmachine.py diff --git a/oggm/shop/bedmachine.py b/oggm/shop/bedmachine.py new file mode 100644 index 000000000..ba8bdecc6 --- /dev/null +++ b/oggm/shop/bedmachine.py @@ -0,0 +1,142 @@ +import logging +import warnings +from packaging.version import Version + +import numpy as np +import pandas as pd +import xarray as xr +import os + +try: + import rasterio + from rasterio.warp import reproject, Resampling, calculate_default_transform + from rasterio import MemoryFile + try: + # rasterio V > 1.0 + from rasterio.merge import merge as merge_tool + except ImportError: + from rasterio.tools.merge import merge as merge_tool +except ImportError: + pass + + +from oggm import utils, cfg + +# Module logger +log = logging.getLogger(__name__) + +aa_base_url = ('https://n5eil01u.ecs.nsidc.org/MEASURES/' + 'NSIDC-0756.003/1970.01.01/BedMachineAntarctica-v3.nc') +grl_base_url = ('https://n5eil01u.ecs.nsidc.org/ICEBRIDGE/IDBMG4.005/' + '1993.01.01/BedMachineGreenland-v5.nc') + + +@utils.entity_task(log, writes=['gridded_data']) +def bedmachine_to_gdir(gdir): + """Add the Bedmachine ice thickness maps to this glacier directory. + + For Antarctica: BedMachineAntarctica-v3.nc + For Greenland: BedMachineGreenland-v5.nc + + Parameters + ---------- + gdir : :py:class:`oggm.GlacierDirectory` + the glacier directory to process + """ + + if gdir.rgi_region == '19': + file_url = aa_base_url + elif gdir.rgi_region == '05': + file_url = grl_base_url + else: + raise NotImplementedError('Bedmachine data not available ' + f'for this region: {gdir.rgi_region}') + + file_local = utils.download_with_authentication(file_url, + 'urs.earthdata.nasa.gov') + + with xr.open_dataset(file_local) as ds: + ds.attrs['pyproj_srs'] = ds.proj4 + + x0, x1, y0, y1 = gdir.grid.extent_in_crs(ds.proj4) + dsroi = ds.salem.subset(corners=((x0, y0), (x1, y1)), crs=ds.proj4, margin=10) + thick = gdir.grid.map_gridded_data(dsroi['thickness'].data, + grid=dsroi.salem.grid, + interp='linear') + thick[thick <= 0] = np.NaN + + # Write + with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc: + + vn = 'bedmachine_thickness' + if vn in nc.variables: + v = nc.variables[vn] + else: + v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True) + v.units = 'm' + ln = 'Ice thickness from BedMachine' + v.long_name = ln + v.data_source = file_url + v[:] = thick.data + + +@utils.entity_task(log) +def bedmachine_statistics(gdir): + """Gather statistics about the Bedmachine data interpolated to this glacier. + """ + + d = dict() + + # Easy stats - this should always be possible + d['rgi_id'] = gdir.rgi_id + d['rgi_region'] = gdir.rgi_region + d['rgi_subregion'] = gdir.rgi_subregion + d['rgi_area_km2'] = gdir.rgi_area_km2 + d['bedmachine_area_km2'] = 0 + d['bedmachine_perc_cov'] = 0 + d['bedmachine_vol_km3'] = np.nan + + try: + with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds: + thick = ds['bedmachine_thickness'].where(ds['glacier_mask'], np.nan).load() + gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6 + d['bedmachine_area_km2'] = float((~thick.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6) + d['bedmachine_perc_cov'] = float(d['bedmachine_area_km2'] / gridded_area) + d['bedmachine_vol_km3'] = float(thick.sum() * gdir.grid.dx ** 2 * 1e-9) + except (FileNotFoundError, AttributeError, KeyError): + pass + + return d + + +@utils.global_task(log) +def compile_bedmachine_statistics(gdirs, filesuffix='', path=True): + """Gather as much statistics as possible about a list of glaciers. + + It can be used to do result diagnostics and other stuffs. + + Parameters + ---------- + gdirs : list of :py:class:`oggm.GlacierDirectory` objects + the glacier directories to process + filesuffix : str + add suffix to output file + path : str, bool + Set to "True" in order to store the info in the working directory + Set to a path to store the file to your chosen location + """ + from oggm.workflow import execute_entity_task + + out_df = execute_entity_task(bedmachine_statistics, gdirs) + + out = pd.DataFrame(out_df).set_index('rgi_id') + + if path: + if path is True: + out.to_csv(os.path.join(cfg.PATHS['working_dir'], + ('bedmachine_statistics' + + filesuffix + '.csv'))) + else: + out.to_csv(path) + + return out diff --git a/oggm/shop/bedtopo.py b/oggm/shop/bedtopo.py index 8bc0bdead..a04067be6 100644 --- a/oggm/shop/bedtopo.py +++ b/oggm/shop/bedtopo.py @@ -97,9 +97,10 @@ def consensus_statistics(gdir): try: with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds: thick = ds['consensus_ice_thickness'].where(ds['glacier_mask'], np.nan).load() + gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6 d['consensus_vol_km3'] = float(thick.sum() * gdir.grid.dx ** 2 * 1e-9) d['consensus_area_km2'] = float((~thick.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6) - d['consensus_perc_cov'] = float(d['consensus_area_km2'] / gdir.rgi_area_km2) + d['consensus_perc_cov'] = float(d['consensus_area_km2'] / gridded_area) except (FileNotFoundError, AttributeError, KeyError): pass diff --git a/oggm/shop/cook23.py b/oggm/shop/cook23.py index 3da0695ea..78bc68972 100644 --- a/oggm/shop/cook23.py +++ b/oggm/shop/cook23.py @@ -133,12 +133,13 @@ def cook23_statistics(gdir): try: with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds: thick = ds['cook23_thk'].where(ds['glacier_mask'], np.nan).load() + gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6 with warnings.catch_warnings(): # For operational runs we ignore the warnings warnings.filterwarnings('ignore', category=RuntimeWarning) d['cook23_vol_km3'] = float(thick.sum() * gdir.grid.dx ** 2 * 1e-9) d['cook23_area_km2'] = float((~thick.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6) - d['cook23_perc_cov'] = float(d['cook23_area_km2'] / gdir.rgi_area_km2) + d['cook23_perc_cov'] = float(d['cook23_area_km2'] / gridded_area) except (FileNotFoundError, AttributeError, KeyError): pass diff --git a/oggm/shop/hugonnet_maps.py b/oggm/shop/hugonnet_maps.py index 1fa5fa2ba..d271bb1f4 100644 --- a/oggm/shop/hugonnet_maps.py +++ b/oggm/shop/hugonnet_maps.py @@ -19,15 +19,6 @@ except ImportError: pass -try: - import salem -except ImportError: - pass - -try: - import geopandas as gpd -except ImportError: - pass from oggm import utils, cfg @@ -224,8 +215,9 @@ def hugonnet_statistics(gdir): try: with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds: dhdt = ds['hugonnet_dhdt'].where(ds['glacier_mask'], np.nan).load() + gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6 d['hugonnet_area_km2'] = float((~dhdt.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6) - d['hugonnet_perc_cov'] = float(d['hugonnet_area_km2'] / gdir.rgi_area_km2) + d['hugonnet_perc_cov'] = float(d['hugonnet_area_km2'] / gridded_area) with warnings.catch_warnings(): # This can trigger an empty mean warning warnings.filterwarnings("ignore", category=RuntimeWarning) diff --git a/oggm/shop/its_live.py b/oggm/shop/its_live.py index 6aafd6339..5cb473142 100644 --- a/oggm/shop/its_live.py +++ b/oggm/shop/its_live.py @@ -21,62 +21,41 @@ # Module logger log = logging.getLogger(__name__) -base_url = ('http://its-live-data.jpl.nasa.gov.s3.amazonaws.com/' - 'velocity_mosaic/landsat/v00.0/static/cog/') -regions = ['HMA', 'ANT', 'PAT', 'ALA', 'CAN', 'GRE', 'ICE', 'SRA'] +base_url = ('https://its-live-data.s3.amazonaws.com/' + 'velocity_mosaic/v2/static/cog/ITS_LIVE_velocity_120m_') + +regions = [f'RGI{reg:02d}A' for reg in range(1, 20)] region_files = {} for reg in regions: d = {} - for var in ['vx', 'vy', 'vy_err', 'vx_err']: - d[var] = base_url + '{}_G0120_0000_{}.tif'.format(reg, var) + for var in ['v', 'vx', 'vy', 'v_error', 'vx_error', 'vy_error']: + d[var] = base_url + f'{reg}_0000_v02_{var}.tif' region_files[reg] = d region_grids = {} -rgi_region_links = {'01': 'ALA', '02': 'ALA', - '03': 'CAN', '04': 'CAN', - '05': 'GRE', - '06': 'ICE', - '07': 'SRA', '09': 'SRA', - '13': 'HMA', '14': 'HMA', '15': 'HMA', - '17': 'PAT', - '19': 'ANT', +rgi_region_links = {'01': 'RGI01A', + '02': ['RGI01A', 'RGI02A'], + '03': 'RGI03A', + '04': 'RGI04A', + '05': 'RGI05A', + '06': 'RGI06A', + '07': 'RGI07A', + '08': 'RGI08A', + '09': 'RGI09A', + '10': 'RGI10A', + '11': 'RGI11A', + '12': 'RGI12A', + '13': 'RGI14A', '14': 'RGI14A', '15': 'RGI14A', + '17': 'RGI17A', + '18': 'RGI18A', + '19': 'RGI19A', } -def region_grid(reg): - - global region_grids - - if reg not in region_grids: - with utils.get_lock(): - fp = utils.file_downloader(region_files[reg]['vx']) - ds = salem.GeoTiff(fp) - region_grids[reg] = ds.grid - - return region_grids[reg] - - -def _in_grid(grid, lon, lat): - - i, j = grid.transform([lon], [lat], maskout=True) - return np.all(~ (i.mask | j.mask)) - - def find_region(gdir): - - reg = rgi_region_links.get(gdir.rgi_region, None) - - if reg is None: - return None - - grid = region_grid(reg) - - if _in_grid(grid, gdir.cenlon, gdir.cenlat): - return reg - else: - return None + return rgi_region_links.get(gdir.rgi_region, None) def _reproject_and_scale(gdir, do_error=False): @@ -87,17 +66,21 @@ def _reproject_and_scale(gdir, do_error=False): raise InvalidWorkflowError('There does not seem to be its_live data ' 'available for this glacier') + vn = 'v' vnx = 'vx' vny = 'vy' if do_error: - vnx += '_err' - vny += '_err' + vn += '_error' + vnx += '_error' + vny += '_error' with utils.get_lock(): + fv = utils.file_downloader(region_files[reg][vn]) fx = utils.file_downloader(region_files[reg][vnx]) fy = utils.file_downloader(region_files[reg][vny]) # Open the files + dsv = salem.GeoTiff(fv) dsx = salem.GeoTiff(fx) dsy = salem.GeoTiff(fy) # subset them to our map @@ -108,6 +91,7 @@ def _reproject_and_scale(gdir, do_error=False): # This can trigger an out-of-bounds warning warnings.filterwarnings("ignore", category=RuntimeWarning, message='.*out of bounds.*') + dsv.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4) dsx.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4) dsy.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4) grid_vel = dsx.grid.center_grid @@ -126,12 +110,13 @@ def _reproject_and_scale(gdir, do_error=False): xx0, yy0 = grid_vel.center_grid.xy_coordinates # Compute coords at t1 + vel = dsv.get_vardata() xx1 = dsx.get_vardata() yy1 = dsy.get_vardata() - non_valid = (xx1 == nodata) | (yy1 == nodata) + non_valid = (xx1 == nodata) | (yy1 == nodata) | (vel == nodata) + vel[non_valid] = np.nan xx1[non_valid] = np.nan yy1[non_valid] = np.nan - orig_vel = np.sqrt(xx1**2 + yy1**2) xx1 += xx0 yy1 += yy0 @@ -148,17 +133,33 @@ def _reproject_and_scale(gdir, do_error=False): vy = yy1 - yy0 # Scale back velocities - https://github.com/OGGM/oggm/issues/1014 - new_vel = np.sqrt(vx**2 + vy**2) - p_ok = new_vel > 1 # avoid div by zero - vx[p_ok] = vx[p_ok] * orig_vel[p_ok] / new_vel[p_ok] - vy[p_ok] = vy[p_ok] * orig_vel[p_ok] / new_vel[p_ok] + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=RuntimeWarning) + new_vel = np.sqrt(vx**2 + vy**2) + p_ok = new_vel > 0 # avoid div by zero + vx[p_ok] = vx[p_ok] * vel[p_ok] / new_vel[p_ok] + vy[p_ok] = vy[p_ok] * vel[p_ok] / new_vel[p_ok] # And transform to local map + v = grid_gla.map_gridded_data(vel, grid=grid_vel, interp='linear') vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear') vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear') # Write with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc: + + vn = 'itslive_v' + if vn in nc.variables: + v = nc.variables[vn] + else: + v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True) + v.units = 'm yr-1' + ln = 'ITS LIVE velocity data' + if do_error: + ln = 'Uncertainty of ' + ln + v.long_name = ln + v[:] = v.filled(np.nan) + vn = 'itslive_vx' if do_error: vn += '_err' @@ -187,18 +188,6 @@ def _reproject_and_scale(gdir, do_error=False): v.long_name = ln v[:] = vy.filled(np.nan) - if not do_error: - vel = np.sqrt(vx ** 2 + vy ** 2) - vn = 'itslive_v' - if vn in nc.variables: - v = nc.variables[vn] - else: - v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True) - v.units = 'm yr-1' - ln = 'ITS LIVE velocity data' - v.long_name = ln - v[:] = vel.filled(np.nan) - @utils.entity_task(log, writes=['gridded_data']) def velocity_to_gdir(gdir, add_error=False): @@ -257,13 +246,14 @@ def itslive_statistics(gdir): try: with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds: v = ds['itslive_v'].where(ds['glacier_mask'], np.nan).load() + gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6 with warnings.catch_warnings(): # For operational runs we ignore the warnings warnings.filterwarnings('ignore', category=RuntimeWarning) d['itslive_avg_vel'] = np.nanmean(v) d['itslive_max_vel'] = np.nanmax(v) d['itslive_perc_cov'] = (float((~v.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6) / - gdir.rgi_area_km2) + gridded_area) except (FileNotFoundError, AttributeError, KeyError): pass diff --git a/oggm/shop/millan22.py b/oggm/shop/millan22.py index 3d5e7a9ed..1ff449bcc 100644 --- a/oggm/shop/millan22.py +++ b/oggm/shop/millan22.py @@ -17,6 +17,11 @@ except ImportError: pass +try: + import rasterio +except ImportError: + pass + from oggm import utils, cfg from oggm.exceptions import InvalidWorkflowError @@ -48,17 +53,22 @@ def _get_lookup_velocity(): def _filter(ds): # Read the data and prevent bad surprises data = ds.get_vardata().astype(np.float64) - # Nans with 0 - data[~ np.isfinite(data)] = 0 - nmax = np.nanmax(data) - if nmax == np.inf: - # Replace inf with 0 - data[data == nmax] = 0 + with rasterio.Env(): + with rasterio.open(ds.file) as src: + try: + nodata = src.nodatavals[0] + data[data == nodata] = np.nan + print(nodata, ds.file) + except AttributeError: + pass + + # NaNs + data[~ np.isfinite(data)] = np.nan return data -def _filter_and_reproj(gdir, var, gdf, allow_neg=True): +def _filter_and_reproj(gdir, var, gdf): """ Same code for thickness and error Parameters @@ -68,7 +78,7 @@ def _filter_and_reproj(gdir, var, gdf, allow_neg=True): """ # We may have more than one file - total_data = 0 + total_data = None grids_used = [] files_used = [] for i, s in gdf.iterrows(): @@ -88,9 +98,6 @@ def _filter_and_reproj(gdir, var, gdf, allow_neg=True): margin=5) data = _filter(dsb) - if not allow_neg: - # Replace negative values with 0 - data[data < 0] = 0 if np.nansum(data) == 0: # No need to continue @@ -102,7 +109,14 @@ def _filter_and_reproj(gdir, var, gdf, allow_neg=True): warnings.filterwarnings("ignore", category=RuntimeWarning, message='.*out of bounds.*') r_data = gdir.grid.map_gridded_data(data, dsb.grid, interp='linear') - total_data += r_data.filled(0) + + if total_data is None: + total_data = r_data.filled(np.nan) + else: + r_data = r_data.filled(np.nan) + pok = np.isfinite(r_data) + total_data[pok] = r_data[pok] + grids_used.append(dsb) files_used.append(s.file_id) @@ -129,17 +143,11 @@ def thickness_to_gdir(gdir, add_error=False): raise InvalidWorkflowError(f'There seems to be no Millan file for this ' f'glacier: {gdir.rgi_id}') - total_thick, files_used, _ = _filter_and_reproj(gdir, 'thickness', sel, - allow_neg=False) - - # We mask zero ice as nodata - total_thick = np.where(total_thick == 0, np.nan, total_thick) + total_thick, files_used, _ = _filter_and_reproj(gdir, 'thickness', sel) if add_error: - total_err, _, _ = _filter_and_reproj(gdir, 'err', sel, allow_neg=False) + total_err, _, _ = _filter_and_reproj(gdir, 'err', sel) total_err[~ np.isfinite(total_thick)] = np.nan - # Error cannot be larger than ice thickness itself - total_err = utils.clip_max(total_err, total_thick) # Write with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc: @@ -194,60 +202,81 @@ def velocity_to_gdir(gdir, add_error=False): raise InvalidWorkflowError(f'There seems to be no Millan file for this ' f'glacier: {gdir.rgi_id}') - vel, files, grids = _filter_and_reproj(gdir, 'v', sel, allow_neg=False) + vel, files, grids = _filter_and_reproj(gdir, 'v', sel) if len(grids) == 0: raise RuntimeError('There is no velocity data for this glacier') - if len(grids) > 1: - raise RuntimeError('Multiple velocity grids - dont know what to do.') - - sel = sel.loc[sel.file_id == files[0]] - vx, _, gridsx = _filter_and_reproj(gdir, 'vx', sel) - vy, _, gridsy = _filter_and_reproj(gdir, 'vy', sel) - - dsx = gridsx[0] - dsy = gridsy[0] - grid_vel = dsx.grid - proj_vel = grid_vel.proj - grid_gla = gdir.grid - - # Get the coords at t0 - xx0, yy0 = grid_vel.center_grid.xy_coordinates - - # Compute coords at t1 - xx1 = _filter(dsx) - yy1 = _filter(dsy) - xx1 += xx0 - yy1 += yy0 - - # Transform both to glacier proj - xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0) - xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1) - - # Compute velocities from there - vx = xx1 - xx0 - vy = yy1 - yy0 - - # And transform to local map - vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear') - vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear') - - # Scale back to match velocity - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=RuntimeWarning) - new_vel = np.sqrt(vx**2 + vy**2) - p_ok = np.isfinite(new_vel) & (new_vel > 1) # avoid div by zero - scaler = vel[p_ok] / new_vel[p_ok] - vx[p_ok] = vx[p_ok] * scaler - vy[p_ok] = vy[p_ok] * scaler - - vx = vx.filled(np.nan) - vy = vy.filled(np.nan) - if add_error: - err_vx, _, _ = _filter_and_reproj(gdir, 'err_vx', sel, allow_neg=False) - err_vy, _, _ = _filter_and_reproj(gdir, 'err_vy', sel, allow_neg=False) - err_vx[p_ok] = err_vx[p_ok] * scaler - err_vy[p_ok] = err_vy[p_ok] * scaler + all_vx = None + all_vy = None + all_err_vx = None + all_err_vy = None + for i, _ in sel.iterrows(): + subsel = sel.loc[[i]] + vx, _, gridsx = _filter_and_reproj(gdir, 'vx', subsel) + vy, _, gridsy = _filter_and_reproj(gdir, 'vy', subsel) + + if len(gridsx) == 0: + continue + + dsx = gridsx[0] + dsy = gridsy[0] + grid_vel = dsx.grid + proj_vel = grid_vel.proj + grid_gla = gdir.grid + + # Get the coords at t0 + xx0, yy0 = grid_vel.center_grid.xy_coordinates + + # Compute coords at t1 + xx1 = _filter(dsx) + yy1 = _filter(dsy) + xx1 += xx0 + yy1 += yy0 + + # Transform both to glacier proj + xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0) + xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1) + + # Compute velocities from there + vx = xx1 - xx0 + vy = yy1 - yy0 + + # And transform to local map + vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear') + vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear') + + # Scale back to match velocity + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=RuntimeWarning) + new_vel = np.sqrt(vx**2 + vy**2) + p_ok = np.isfinite(new_vel) & (new_vel > 0) # avoid div by zero + scaler = vel[p_ok] / new_vel[p_ok] + vx[p_ok] = vx[p_ok] * scaler + vy[p_ok] = vy[p_ok] * scaler + + if all_vx is None: + all_vx = vx.filled(np.nan) + all_vy = vy.filled(np.nan) + else: + vx = vx.filled(np.nan) + vy = vy.filled(np.nan) + locuptate = np.isfinite(vx) + all_vx[locuptate] = vx[locuptate] + all_vy[locuptate] = vy[locuptate] + + if add_error: + err_vx, _, _ = _filter_and_reproj(gdir, 'err_vx', subsel) + err_vy, _, _ = _filter_and_reproj(gdir, 'err_vy', subsel) + err_vx[p_ok] = err_vx[p_ok] * scaler + err_vy[p_ok] = err_vy[p_ok] * scaler + if all_err_vx is None: + all_err_vx = err_vx.filled(np.nan) + all_err_vy = err_vy.filled(np.nan) + else: + err_vx = err_vx.filled(np.nan) + err_vy = err_vy.filled(np.nan) + all_err_vx[locuptate] = err_vx[locuptate] + all_err_vy[locuptate] = err_vy[locuptate] # Write with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc: @@ -272,7 +301,7 @@ def velocity_to_gdir(gdir, add_error=False): ln = 'Ice velocity in map x direction from Millan et al. 2022' v.long_name = ln v.data_source = files[0] - v[:] = vx + v[:] = all_vx vn = 'millan_vy' if vn in nc.variables: @@ -283,7 +312,7 @@ def velocity_to_gdir(gdir, add_error=False): ln = 'Ice velocity in map y direction from Millan et al. 2022' v.long_name = ln v.data_source = files[0] - v[:] = vy + v[:] = all_vy if add_error: vn = 'millan_err_vx' @@ -295,7 +324,7 @@ def velocity_to_gdir(gdir, add_error=False): ln = 'Ice velocity error in map x direction from Millan et al. 2022' v.long_name = ln v.data_source = files[0] - v[:] = err_vx + v[:] = all_err_vx vn = 'millan_err_vy' if vn in nc.variables: @@ -306,7 +335,7 @@ def velocity_to_gdir(gdir, add_error=False): ln = 'Ice velocity error in map y direction from Millan et al. 2022' v.long_name = ln v.data_source = files[0] - v[:] = err_vy + v[:] = all_err_vy @utils.entity_task(log) @@ -328,12 +357,13 @@ def millan_statistics(gdir): try: with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds: thick = ds['millan_ice_thickness'].where(ds['glacier_mask'], np.nan).load() + gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6 with warnings.catch_warnings(): # For operational runs we ignore the warnings warnings.filterwarnings('ignore', category=RuntimeWarning) d['millan_vol_km3'] = float(thick.sum() * gdir.grid.dx ** 2 * 1e-9) d['millan_area_km2'] = float((~thick.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6) - d['millan_perc_cov'] = float(d['millan_area_km2'] / gdir.rgi_area_km2) + d['millan_perc_cov'] = float(d['millan_area_km2'] / gridded_area) if 'millan_ice_thickness_err' in ds: err = ds['millan_ice_thickness_err'].where(ds['glacier_mask'], np.nan).load() @@ -344,14 +374,14 @@ def millan_statistics(gdir): try: with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds: v = ds['millan_v'].where(ds['glacier_mask'], np.nan).load() + gridded_area = ds['glacier_mask'].sum() * gdir.grid.dx ** 2 * 1e-6 with warnings.catch_warnings(): # For operational runs we ignore the warnings warnings.filterwarnings('ignore', category=RuntimeWarning) d['millan_avg_vel'] = np.nanmean(v) d['millan_max_vel'] = np.nanmax(v) - d['millan_vel_perc_cov'] = (float((~v.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6) / - gdir.rgi_area_km2) - + d['millan_vel_perc_cov'] = float(((~v.isnull()).sum() * gdir.grid.dx ** 2 * 1e-6) / + gridded_area) if 'millan_err_vx' in ds: err_vx = ds['millan_err_vx'].where(ds['glacier_mask'], np.nan).load() err_vy = ds['millan_err_vy'].where(ds['glacier_mask'], np.nan).load() diff --git a/oggm/utils/_downloads.py b/oggm/utils/_downloads.py index e3037c2c4..2e829c3b0 100644 --- a/oggm/utils/_downloads.py +++ b/oggm/utils/_downloads.py @@ -774,8 +774,6 @@ def _check_ext(f): def download_with_authentication(wwwfile, key): """ Uses credentials from a local .netrc file to download files - This is function is currently used for TanDEM-X and ASTER - Parameters ---------- wwwfile : str