diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 483c69f..35c56e1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -7,7 +7,7 @@ jobs: strategy: matrix: os: ["ubuntu-latest"] - python-version: ["3.9", "3.10"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v2 - name: mamba setup enviroment diff --git a/environment.yml b/environment.yml index 4c73e6a..31db1a8 100644 --- a/environment.yml +++ b/environment.yml @@ -9,9 +9,9 @@ dependencies: - dask - netcdf4 - gsw + - polars>=1.1 - scipy - bitstring - pooch - pip: - dbdreader - - polars diff --git a/pyglider/ncprocess.py b/pyglider/ncprocess.py index 55331fc..347212b 100644 --- a/pyglider/ncprocess.py +++ b/pyglider/ncprocess.py @@ -70,14 +70,14 @@ def extract_timeseries_profiles(inname, outdir, deploymentyaml, force=False): dss['v'] = dss.water_velocity_northward.mean() dss['v'].attrs = profile_meta['v'] elif 'u' in profile_meta: - dss['u'] = profile_meta['u'].get('_FillValue', np.NaN) + dss['u'] = profile_meta['u'].get('_FillValue', np.nan) dss['u'].attrs = profile_meta['u'] - dss['v'] = profile_meta['v'].get('_FillValue', np.NaN) + dss['v'] = profile_meta['v'].get('_FillValue', np.nan) dss['v'].attrs = profile_meta['v'] else: - dss['u'] = np.NaN - dss['v'] = np.NaN + dss['u'] = np.nan + dss['v'] = np.nan dss['profile_id'] = np.int32(p) @@ -117,11 +117,11 @@ def extract_timeseries_profiles(inname, outdir, deploymentyaml, force=False): dss['platform'].attrs['_FillValue'] = -1 - dss['lat_uv'] = np.NaN + dss['lat_uv'] = np.nan dss['lat_uv'].attrs = profile_meta['lat_uv'] - dss['lon_uv'] = np.NaN + dss['lon_uv'] = np.nan dss['lon_uv'].attrs = profile_meta['lon_uv'] - dss['time_uv'] = np.NaN + dss['time_uv'] = np.nan dss['time_uv'].attrs = profile_meta['time_uv'] dss['instrument_ctd'] = np.int32(1.0) @@ -328,7 +328,7 @@ def make_gridfiles(inname, outdir, deploymentyaml, *, fnamesuffix='', dz=1, star dsout.to_netcdf( outname, encoding={'time': {'units': 'seconds since 1970-01-01T00:00:00Z', - '_FillValue': np.NaN, + '_FillValue': np.nan, 'calendar': 'gregorian', 'dtype': 'float64'}}) _log.info('Done gridding') diff --git a/pyglider/slocum.py b/pyglider/slocum.py index a037288..f8fb947 100644 --- a/pyglider/slocum.py +++ b/pyglider/slocum.py @@ -345,8 +345,8 @@ def dbd_to_dict(dinkum_file, cachedir, keys=None): _log.debug('Diagnostic check passed. Endian is %s', endian) nsensors = int(meta['sensors_per_cycle']) - currentValues = np.zeros(int(meta['sensors_per_cycle'])) + np.NaN - data = np.zeros((DINKUMCHUNKSIZE, nsensors)) + np.NaN + currentValues = np.zeros(int(meta['sensors_per_cycle'])) + np.nan + data = np.zeros((DINKUMCHUNKSIZE, nsensors)) + np.nan # Then there's a data cycle with every sensor marked as updated, giving # us our initial values. # 01 means updated with 'same value', 10 means updated with a new value, @@ -370,7 +370,7 @@ def dbd_to_dict(dinkum_file, cachedir, keys=None): binaryData.bytealign() for i, code in enumerate(updatedCode): if code == '00': # No new value - currentValues[i] = np.NaN + currentValues[i] = np.nan elif code == '01': # Same value as before. continue elif code == '10': # New value. @@ -404,7 +404,7 @@ def dbd_to_dict(dinkum_file, cachedir, keys=None): if ndata % DINKUMCHUNKSIZE == 0: # need to allocate more data! data = np.concatenate( - (data, np.NaN + np.zeros((DINKUMCHUNKSIZE, nsensors))), + (data, np.nan + np.zeros((DINKUMCHUNKSIZE, nsensors))), axis=0) elif d == 'X': # End of file cycle tag. We made it through. @@ -496,7 +496,7 @@ def add_times_flight_sci(fdata, sdata=None): sdata['m_present_time_sci'] = np.interp( sdata['sci_m_present_time'], tf, pt, np.nan, np.nan) else: - sdata['m_present_time_sci'] = np.NaN * sdata['sci_m_present_time'] + sdata['m_present_time_sci'] = np.nan * sdata['sci_m_present_time'] return fdata, sdata @@ -731,7 +731,7 @@ def raw_to_timeseries(indir, outdir, deploymentyaml, *, _log.debug('EBD sensorname %s', sensorname) val = ebd[sensorname] val = utils._zero_screen(val) - # val[val==0] = np.NaN + # val[val==0] = np.nan val = convert(val) else: _log.debug('DBD sensorname %s', sensorname) @@ -922,7 +922,7 @@ def binary_to_timeseries(indir, cachedir, outdir, deploymentyaml, *, ds = utils.get_derived_eos_raw(ds) # screen out-of-range times; these won't convert: - ds['time'] = ds.time.where((ds.time>0) & (ds.time<6.4e9), np.NaN) + ds['time'] = ds.time.where((ds.time>0) & (ds.time<6.4e9), np.nan) # convert time to datetime64: ds['time'] = (ds.time*1e9).astype('datetime64[ns]') ds['time'].attrs = attr @@ -954,7 +954,7 @@ def binary_to_timeseries(indir, cachedir, outdir, deploymentyaml, *, # as a unit: ds.to_netcdf(outname, 'w', encoding={'time': {'units': 'seconds since 1970-01-01T00:00:00Z', - '_FillValue': np.NaN, + '_FillValue': np.nan, 'dtype': 'float64'}}) return outname @@ -1076,9 +1076,9 @@ def parse_logfiles(files): # now parse them out = xr.Dataset( coords={'time': ('surfacing', np.zeros(ntimes, dtype='datetime64[ns]'))}) - out['ampH'] = ('surfacing', np.zeros(ntimes) * np.NaN) - out['lon'] = ('surfacing', np.zeros(ntimes) * np.NaN) - out['lat'] = ('surfacing', np.zeros(ntimes) * np.NaN) + out['ampH'] = ('surfacing', np.zeros(ntimes) * np.nan) + out['lon'] = ('surfacing', np.zeros(ntimes) * np.nan) + out['lat'] = ('surfacing', np.zeros(ntimes) * np.nan) for i in range(ntimes): timestring = times[i][11:-13] @@ -1151,10 +1151,10 @@ def parse_logfiles_maybe(files): # now parse them out = xr.Dataset(coords={'time': ('surfacing', np.zeros(ntimes, dtype='datetime64[ns]'))}) - out['ampH'] = ('surfacing', np.zeros(ntimes) * np.NaN) - out['lon'] = ('surfacing', np.zeros(ntimes) * np.NaN) - out['lat'] = ('surfacing', np.zeros(ntimes) * np.NaN) - out['missionnum'] = ('surfacing', np.zeros(ntimes) * np.NaN) + out['ampH'] = ('surfacing', np.zeros(ntimes) * np.nan) + out['lon'] = ('surfacing', np.zeros(ntimes) * np.nan) + out['lat'] = ('surfacing', np.zeros(ntimes) * np.nan) + out['missionnum'] = ('surfacing', np.zeros(ntimes) * np.nan) out.attrs['surfacereason'] = surfacereason # ABORT HISTORY: last abort segment: hal_1002-2024-183-0-0 (0171.0000) out.attrs['abortsegment'] = float(abortsegment[-11:-2]) diff --git a/pyglider/utils.py b/pyglider/utils.py index aa64efe..f838efb 100644 --- a/pyglider/utils.py +++ b/pyglider/utils.py @@ -92,8 +92,8 @@ def get_profiles(ds, min_dp=10.0, inversion=3., filt_length=7, make two variables: profile_direction and profile_index; this version is good for lots of data. Less good for sparse data """ - profile = ds.pressure.values * np.NaN - direction = ds.pressure.values * np.NaN + profile = ds.pressure.values * np.nan + direction = ds.pressure.values * np.nan pronum = 1 lastpronum = 0 @@ -500,7 +500,7 @@ def fill_metadata(ds, metadata, sensor_data): def _zero_screen(val): - val[val == 0] = np.NaN + val[val == 0] = np.nan return val diff --git a/requirements.txt b/requirements.txt index a15c091..a46e8ae 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,4 +7,4 @@ numpy pooch scipy xarray -polars>0.16 +polars>=1.1 diff --git a/tests/environment.yml b/tests/environment.yml index c80a469..c3e117b 100644 --- a/tests/environment.yml +++ b/tests/environment.yml @@ -11,6 +11,7 @@ dependencies: - gsw - scipy - bitstring + - polars>=1.1 - pytest - pytest-cov - pooch @@ -19,4 +20,3 @@ dependencies: - cc-plugin-glider - pip: - dbdreader - - polars