Skip to content

Commit

Permalink
Merge branch 'master' of github.com:mortenwh/nansat
Browse files Browse the repository at this point in the history
  • Loading branch information
mortenwh committed Jun 7, 2024
2 parents 09c6079 + a7c92af commit cf52890
Show file tree
Hide file tree
Showing 7 changed files with 247 additions and 45 deletions.
20 changes: 13 additions & 7 deletions nansat/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from __future__ import print_function, absolute_import, division

import os
import pytz
import tempfile
import datetime
import warnings
Expand Down Expand Up @@ -46,7 +47,7 @@ class Exporter(object):
'_FillValue', 'type', 'scale', 'offset', 'NETCDF_VARNAME']

def export(self, filename='', bands=None, rm_metadata=None, add_geolocation=True,
driver='netCDF', options='FORMAT=NC4', hardcopy=False):
driver='netCDF', options='FORMAT=NC4', hardcopy=False, add_gcps=True):
"""Export Nansat object into netCDF or GTiff file
Parameters
Expand Down Expand Up @@ -112,10 +113,11 @@ def export(self, filename='', bands=None, rm_metadata=None, add_geolocation=True
if self.filename == filename or hardcopy:
export_vrt.hardcopy_bands()

if driver == 'GTiff':
add_gcps = export_vrt.prepare_export_gtiff()
else:
add_gcps = export_vrt.prepare_export_netcdf()
if add_gcps:
if driver == 'GTiff':
add_gcps = export_vrt.prepare_export_gtiff()
else:
add_gcps = export_vrt.prepare_export_netcdf()

# Create output file using GDAL
dataset = gdal.GetDriverByName(driver).CreateCopy(filename, export_vrt.dataset,
Expand All @@ -129,7 +131,11 @@ def export(self, filename='', bands=None, rm_metadata=None, add_geolocation=True
# Rename variable names to get rid of the band numbers
self.rename_variables(filename)
# Rename attributes to get rid of "GDAL_" added by gdal
self.correct_attributes(filename, history=self.vrt.dataset.GetMetadata()['history'])
try:
history = self.vrt.dataset.GetMetadata()['history']
except KeyError:
history = None
self.correct_attributes(filename, history=history)

self.logger.debug('Export - OK!')

Expand Down Expand Up @@ -367,7 +373,7 @@ def _create_dimensions(self, nc_inp, nc_out, time):
nc_out.createDimension(dim_name, dim_shapes[dim_name])

# create value for time variable
td = time - datetime.datetime(1900, 1, 1)
td = time - datetime.datetime(1900, 1, 1).replace(tzinfo=pytz.timezone("utc"))
days = td.days + (float(td.seconds) / 60.0 / 60.0 / 24.0)
# add time dimension
nc_out.createDimension('time', 1)
Expand Down
48 changes: 32 additions & 16 deletions nansat/mappers/mapper_meps.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
import json
import pytz
import datetime

import pythesint as pti

from osgeo import gdal
Expand All @@ -13,7 +16,8 @@

class Mapper(NetcdfCF, Opendap):

def __init__(self, url, gdal_dataset, gdal_metadata, file_num=0, bands=None, *args, **kwargs):
def __init__(self, url, gdal_dataset, gdal_metadata, file_num=0, bands=None, *args,
**kwargs):

if not url.endswith(".nc"):
raise WrongMapperError
Expand All @@ -23,20 +27,23 @@ def __init__(self, url, gdal_dataset, gdal_metadata, file_num=0, bands=None, *ar
except OSError:
raise WrongMapperError

if "title" not in ds.ncattrs() or "meps" not in ds.getncattr("title").lower():
raise WrongMapperError

metadata = {}
for attr in ds.ncattrs():
metadata[attr] = ds.getncattr(attr)

if 'title' not in metadata.keys() or 'meps' not in metadata['title'].lower():
raise WrongMapperError
content = ds.getncattr(attr)
if isinstance(content, str):
content = content.replace("æ", "ae").replace("ø", "oe").replace("å", "aa")
metadata[attr] = content

self.input_filename = url

xsize = ds.dimensions['x'].size
ysize = ds.dimensions['y'].size
xsize = ds.dimensions["x"].size
ysize = ds.dimensions["y"].size

# Pick 10 meter height dimension only
height_dim = 'height6'
height_dim = "height6"
if height_dim not in ds.dimensions.keys():
raise WrongMapperError
if ds.dimensions[height_dim].size != 1:
Expand All @@ -47,7 +54,7 @@ def __init__(self, url, gdal_dataset, gdal_metadata, file_num=0, bands=None, *ar
varnames = []
for var in ds.variables:
var_dimensions = ds.variables[var].dimensions
if var_dimensions == ('time', height_dim, 'y', 'x'):
if var_dimensions == ("time", height_dim, "y", "x"):
varnames.append(var)

# Projection
Expand All @@ -63,8 +70,8 @@ def __init__(self, url, gdal_dataset, gdal_metadata, file_num=0, bands=None, *ar
nsr = NSR(crs.to_proj4())

# Geotransform
xx = ds.variables['x'][0:2]
yy = ds.variables['y'][0:2]
xx = ds.variables["x"][0:2]
yy = ds.variables["y"][0:2]
gtrans = xx[0], xx[1]-xx[0], 0, yy[0], 0, yy[1]-yy[0]

self._init_from_dataset_params(xsize, ysize, gtrans, nsr.wkt)
Expand All @@ -79,7 +86,13 @@ def __init__(self, url, gdal_dataset, gdal_metadata, file_num=0, bands=None, *ar
self._pop_spatial_dimensions(dimension_names)
index = self._get_index_of_dimensions(dimension_names, {}, dim_sizes)
fn = self._get_sub_filename(url, band, dim_sizes, index)
meta_dict.append(self.get_band_metadata_dict(fn, ds.variables[band]))
band_metadata = self.get_band_metadata_dict(fn, ds.variables[band])
# Add time stamp to band metadata
tt = datetime.datetime.fromisoformat(str(self.times()[index["time"]["index"]]))
if tt.tzinfo is None:
tt = pytz.utc.localize(tt)
band_metadata["dst"]["time"] = tt.isoformat()
meta_dict.append(band_metadata)

self.create_bands(meta_dict)

Expand All @@ -89,8 +102,11 @@ def __init__(self, url, gdal_dataset, gdal_metadata, file_num=0, bands=None, *ar

# Get dictionary describing the instrument and platform according to
# the GCMD keywords
mm = pti.get_gcmd_instrument('computer')
ee = pti.get_gcmd_platform('models')
mm = pti.get_gcmd_instrument("computer")
ee = pti.get_gcmd_platform("models")

self.dataset.SetMetadataItem("instrument", json.dumps(mm))
self.dataset.SetMetadataItem("platform", json.dumps(ee))

self.dataset.SetMetadataItem('instrument', json.dumps(mm))
self.dataset.SetMetadataItem('platform', json.dumps(ee))
# Set input filename
self.dataset.SetMetadataItem("nc_file", self.input_filename)
18 changes: 15 additions & 3 deletions nansat/mappers/mapper_meps_ncml.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,29 @@
import os
import pytz
import netCDF4
import datetime

import numpy as np

from nansat.mappers.mapper_meps import Mapper as NCMapper
from nansat.exceptions import WrongMapperError
from nansat.mappers.mapper_meps import Mapper as NCMapper

class Mapper(NCMapper):


def __init__(self, ncml_url, gdal_dataset, gdal_metadata, file_num=0, *args, **kwargs):
def __init__(self, ncml_url, gdal_dataset, gdal_metadata, netcdf_dim=None, *args, **kwargs):

if not ncml_url.endswith(".ncml"):
raise WrongMapperError

url = self._get_odap_url(ncml_url, file_num)
dt = datetime.timedelta(0)
if netcdf_dim is not None and "time" in netcdf_dim.keys():
ds = netCDF4.Dataset(ncml_url)
time = netcdf_dim["time"].astype(datetime.datetime).replace(
tzinfo=pytz.timezone("utc"))
dt = time - datetime.datetime.fromisoformat(ds.time_coverage_start.replace(
"Z", "+00:00"))
url = self._get_odap_url(ncml_url, np.round(dt.total_seconds()/3600))

super(Mapper, self).__init__(url, gdal_dataset, gdal_metadata, *args, **kwargs)

Expand Down
20 changes: 12 additions & 8 deletions nansat/mappers/mapper_opendap_arome.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
# Name: mapper_arome.py
# Purpose: Nansat mapping for AROME-Arctic and MEPS (MetCoOp Ensemble
# Prediction System) data provided by MET.NO
# Author: Artem Moiseev
# Licence: This file is part of NANSAT. You can redistribute it or modify
# under the terms of GNU General Public License, v.3
# http://www.gnu.org/licenses/gpl-3.0.html
import json
import os
import json
import pytz
import datetime

import numpy as np
import pythesint as pti

from datetime import datetime
from netCDF4 import Dataset
from osgeo import gdal
from pyproj import CRS
Expand Down Expand Up @@ -103,7 +101,13 @@ def __init__(self, filename, gdal_dataset, gdal_metadata, netcdf_dim=None,
self._pop_spatial_dimensions(dimension_names)
index = self._get_index_of_dimensions(dimension_names, netcdf_dim, dim_sizes)
fn = self._get_sub_filename(filename, band, dim_sizes, index)
meta_dict.append(self.get_band_metadata_dict(fn, ds.variables[band]))
band_metadata = self.get_band_metadata_dict(fn, ds.variables[band])
# Add time stamp to band metadata
tt = datetime.datetime.fromisoformat(str(self.times()[index["time"]["index"]]))
if tt.tzinfo is None:
tt = pytz.utc.localize(tt)
band_metadata["dst"]["time"] = tt.isoformat()
meta_dict.append(band_metadata)

self.create_bands(meta_dict)

Expand All @@ -122,7 +126,7 @@ def get_date(filename):
it as a datetime object.
"""
_, filename = os.path.split(filename)
return datetime.strptime(filename.split('_')[-1], '%Y%m%dT%HZ.nc')
return datetime.datetime.strptime(filename.split('_')[-1], '%Y%m%dT%HZ.nc')

def convert_dstime_datetimes(self, ds_time):
"""Convert time variable to np.datetime64"""
Expand Down
116 changes: 116 additions & 0 deletions nansat/mappers/mapper_opendap_met_nordic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
# Licence: This file is part of NANSAT. You can redistribute it or modify
# under the terms of GNU General Public License, v.3
# http://www.gnu.org/licenses/gpl-3.0.html
import json
import os
import numpy as np
import pythesint as pti

from datetime import datetime
from netCDF4 import Dataset
from osgeo import gdal
from pyproj import CRS

from nansat.exceptions import WrongMapperError
from nansat.nsr import NSR
from nansat.vrt import VRT

from nansat.mappers.mapper_netcdf_cf import Mapper as MapperNCCF
#from nansat.mappers.mapper_arome import Mapper as MapperArome
from nansat.mappers.opendap import Opendap


class Mapper(MapperNCCF, Opendap):

baseURLs = ['https://thredds.met.no/thredds/dodsC/metpparchivev3',]
timeVarName = 'time'

def __init__(self, filename, gdal_dataset, gdal_metadata, netcdf_dim=None,
ds=None, bands=None, cachedir=None, *args, **kwargs):

if netcdf_dim is None:
netcdf_dim = {}

self.input_filename = filename

if gdal_dataset is None:
gdal_dataset = gdal.Open(filename)

try:
ds = Dataset(filename)
except OSError:
raise WrongMapperError

metadata = {}
for attr in ds.ncattrs():
metadata[attr] = self._fix_encoding(ds.getncattr(attr))

if 'title' not in metadata.keys() or 'met nordic' not in metadata['title'].lower():
raise WrongMapperError

xsize = ds.dimensions['x'].size
ysize = ds.dimensions['y'].size

varnames = []
for var in ds.variables:
var_dimensions = ds.variables[var].dimensions
if var_dimensions == ('time', 'y', 'x'):
varnames.append(var)

# Projection
try:
grid_mapping = ds.variables[ds.variables[varnames[0]].grid_mapping]
except KeyError:
raise WrongMapperError

grid_mapping_dict = {}
for index in grid_mapping.ncattrs():
grid_mapping_dict[index] = grid_mapping.getncattr(index)
crs = CRS.from_cf(grid_mapping_dict)
nsr = NSR(crs.to_proj4())

# Geotransform
xx = ds.variables['x'][0:2]
yy = ds.variables['y'][0:2]
gtrans = xx[0], xx[1]-xx[0], 0, yy[0], 0, yy[1]-yy[0]

self._init_from_dataset_params(xsize, ysize, gtrans, nsr.wkt)

meta_dict = []
if bands is None:
bands = varnames
for band in bands:
if band not in ds.variables.keys():
continue
dimension_names, dim_sizes = self._get_dimension_info(band)
self._pop_spatial_dimensions(dimension_names)
index = self._get_index_of_dimensions(dimension_names, netcdf_dim, dim_sizes)
fn = self._get_sub_filename(filename, band, dim_sizes, index)
meta_dict.append(self.get_band_metadata_dict(fn, ds.variables[band]))

self.create_bands(meta_dict)

# Copy metadata
for key in metadata.keys():
self.dataset.SetMetadataItem(str(key), str(metadata[key]))

mm = pti.get_gcmd_instrument('Computer')
ee = pti.get_gcmd_platform('ecmwfifs')
self.dataset.SetMetadataItem('instrument', json.dumps(mm))
self.dataset.SetMetadataItem('platform', json.dumps(ee))

@staticmethod
def get_date(filename):
"""Extract date and time parameters from filename and return
it as a datetime object.
"""
_, filename = os.path.split(filename)
return datetime.strptime(filename.split('_')[-1], '%Y%m%dT%HZ.nc')

def convert_dstime_datetimes(self, ds_time):
"""Convert time variable to np.datetime64"""
ds_datetimes = np.array(
[(np.datetime64(self.epoch).astype('M8[s]')
+ np.timedelta64(int(sec), 's').astype('m8[s]')) for sec in ds_time]).astype('M8[s]')
return ds_datetimes

2 changes: 0 additions & 2 deletions nansat/mappers/opendap.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,7 @@ def get_dataset_time(self):
if os.path.exists(cachefile):
ds_time = np.load(cachefile)[self.timeVarName]
else:
warnings.warn('Time consuming loading time from OpenDAP...')
ds_time = self.ds.variables[self.timeVarName][:]
warnings.warn('Loading time - OK!')

if os.path.exists(cachefile):
np.savez(cachefile, **{self.timeVarName: ds_time})
Expand Down
Loading

0 comments on commit cf52890

Please sign in to comment.