Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

move to stable polars, pin numpy below 2 #4

Merged
merged 3 commits into from
Aug 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
strategy:
matrix:
os: ["ubuntu-latest"]
python-version: ["3.9", "3.10"]
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v2
- name: Cache conda
Expand All @@ -25,7 +25,7 @@ jobs:
environment-file: tests/environment.yml
python-version: ${{ matrix.python-version }}
channel-priority: strict
use-only-tar-bz2: true # IMPORTANT: This needs to be set for caching to work properly!
use-only-tar-bz2: false # IMPORTANT: This needs to be set for caching to work properly!
- name: Conda info
shell: bash -l {0}
run: conda info; conda list
Expand All @@ -45,4 +45,4 @@ jobs:
shell: bash -l {0}
run: which python; pytest --cov --cov-report xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v3
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ dependencies:
- dask
- netcdf4
- gsw
- polars>=1.1
- scipy
- bitstring
- pooch
- pip:
- dbdreader
- polars
16 changes: 8 additions & 8 deletions pyglider/ncprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,14 @@ def extract_timeseries_profiles(inname, outdir, deploymentyaml, force=False):
dss['v'] = dss.water_velocity_northward.mean()
dss['v'].attrs = profile_meta['v']
elif 'u' in profile_meta:
dss['u'] = profile_meta['u'].get('_FillValue', np.NaN)
dss['u'] = profile_meta['u'].get('_FillValue', np.nan)
dss['u'].attrs = profile_meta['u']

dss['v'] = profile_meta['v'].get('_FillValue', np.NaN)
dss['v'] = profile_meta['v'].get('_FillValue', np.nan)
dss['v'].attrs = profile_meta['v']
else:
dss['u'] = np.NaN
dss['v'] = np.NaN
dss['u'] = np.nan
dss['v'] = np.nan


dss['profile_id'] = np.int32(p)
Expand Down Expand Up @@ -117,11 +117,11 @@ def extract_timeseries_profiles(inname, outdir, deploymentyaml, force=False):
dss['platform'].attrs['_FillValue'] = -1


dss['lat_uv'] = np.NaN
dss['lat_uv'] = np.nan
dss['lat_uv'].attrs = profile_meta['lat_uv']
dss['lon_uv'] = np.NaN
dss['lon_uv'] = np.nan
dss['lon_uv'].attrs = profile_meta['lon_uv']
dss['time_uv'] = np.NaN
dss['time_uv'] = np.nan
dss['time_uv'].attrs = profile_meta['time_uv']

dss['instrument_ctd'] = np.int32(1.0)
Expand Down Expand Up @@ -328,7 +328,7 @@ def make_gridfiles(inname, outdir, deploymentyaml, *, fnamesuffix='', dz=1, star
dsout.to_netcdf(
outname,
encoding={'time': {'units': 'seconds since 1970-01-01T00:00:00Z',
'_FillValue': np.NaN,
'_FillValue': np.nan,
'calendar': 'gregorian',
'dtype': 'float64'}})
_log.info('Done gridding')
Expand Down
30 changes: 15 additions & 15 deletions pyglider/slocum.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,8 +345,8 @@ def dbd_to_dict(dinkum_file, cachedir, keys=None):
_log.debug('Diagnostic check passed. Endian is %s', endian)

nsensors = int(meta['sensors_per_cycle'])
currentValues = np.zeros(int(meta['sensors_per_cycle'])) + np.NaN
data = np.zeros((DINKUMCHUNKSIZE, nsensors)) + np.NaN
currentValues = np.zeros(int(meta['sensors_per_cycle'])) + np.nan
data = np.zeros((DINKUMCHUNKSIZE, nsensors)) + np.nan
# Then there's a data cycle with every sensor marked as updated, giving
# us our initial values.
# 01 means updated with 'same value', 10 means updated with a new value,
Expand All @@ -370,7 +370,7 @@ def dbd_to_dict(dinkum_file, cachedir, keys=None):
binaryData.bytealign()
for i, code in enumerate(updatedCode):
if code == '00': # No new value
currentValues[i] = np.NaN
currentValues[i] = np.nan
elif code == '01': # Same value as before.
continue
elif code == '10': # New value.
Expand Down Expand Up @@ -404,7 +404,7 @@ def dbd_to_dict(dinkum_file, cachedir, keys=None):
if ndata % DINKUMCHUNKSIZE == 0:
# need to allocate more data!
data = np.concatenate(
(data, np.NaN + np.zeros((DINKUMCHUNKSIZE, nsensors))),
(data, np.nan + np.zeros((DINKUMCHUNKSIZE, nsensors))),
axis=0)
elif d == 'X':
# End of file cycle tag. We made it through.
Expand Down Expand Up @@ -496,7 +496,7 @@ def add_times_flight_sci(fdata, sdata=None):
sdata['m_present_time_sci'] = np.interp(
sdata['sci_m_present_time'], tf, pt, np.nan, np.nan)
else:
sdata['m_present_time_sci'] = np.NaN * sdata['sci_m_present_time']
sdata['m_present_time_sci'] = np.nan * sdata['sci_m_present_time']

return fdata, sdata

Expand Down Expand Up @@ -731,7 +731,7 @@ def raw_to_timeseries(indir, outdir, deploymentyaml, *,
_log.debug('EBD sensorname %s', sensorname)
val = ebd[sensorname]
val = utils._zero_screen(val)
# val[val==0] = np.NaN
# val[val==0] = np.nan
val = convert(val)
else:
_log.debug('DBD sensorname %s', sensorname)
Expand Down Expand Up @@ -922,7 +922,7 @@ def binary_to_timeseries(indir, cachedir, outdir, deploymentyaml, *,
ds = utils.get_derived_eos_raw(ds)

# screen out-of-range times; these won't convert:
ds['time'] = ds.time.where((ds.time>0) & (ds.time<6.4e9), np.NaN)
ds['time'] = ds.time.where((ds.time>0) & (ds.time<6.4e9), np.nan)
# convert time to datetime64:
ds['time'] = (ds.time*1e9).astype('datetime64[ns]')
ds['time'].attrs = attr
Expand Down Expand Up @@ -954,7 +954,7 @@ def binary_to_timeseries(indir, cachedir, outdir, deploymentyaml, *,
# as a unit:
ds.to_netcdf(outname, 'w',
encoding={'time': {'units': 'seconds since 1970-01-01T00:00:00Z',
'_FillValue': np.NaN,
'_FillValue': np.nan,
'dtype': 'float64'}})

return outname
Expand Down Expand Up @@ -1076,9 +1076,9 @@ def parse_logfiles(files):
# now parse them
out = xr.Dataset(
coords={'time': ('surfacing', np.zeros(ntimes, dtype='datetime64[ns]'))})
out['ampH'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['lon'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['lat'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['ampH'] = ('surfacing', np.zeros(ntimes) * np.nan)
out['lon'] = ('surfacing', np.zeros(ntimes) * np.nan)
out['lat'] = ('surfacing', np.zeros(ntimes) * np.nan)

for i in range(ntimes):
timestring = times[i][11:-13]
Expand Down Expand Up @@ -1151,10 +1151,10 @@ def parse_logfiles_maybe(files):

# now parse them
out = xr.Dataset(coords={'time': ('surfacing', np.zeros(ntimes, dtype='datetime64[ns]'))})
out['ampH'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['lon'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['lat'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['missionnum'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['ampH'] = ('surfacing', np.zeros(ntimes) * np.nan)
out['lon'] = ('surfacing', np.zeros(ntimes) * np.nan)
out['lat'] = ('surfacing', np.zeros(ntimes) * np.nan)
out['missionnum'] = ('surfacing', np.zeros(ntimes) * np.nan)
out.attrs['surfacereason'] = surfacereason
# ABORT HISTORY: last abort segment: hal_1002-2024-183-0-0 (0171.0000)
out.attrs['abortsegment'] = float(abortsegment[-11:-2])
Expand Down
6 changes: 3 additions & 3 deletions pyglider/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ def get_profiles(ds, min_dp=10.0, inversion=3., filt_length=7,
make two variables: profile_direction and profile_index; this version
is good for lots of data. Less good for sparse data
"""
profile = ds.pressure.values * np.NaN
direction = ds.pressure.values * np.NaN
profile = ds.pressure.values * np.nan
direction = ds.pressure.values * np.nan
pronum = 1
lastpronum = 0

Expand Down Expand Up @@ -500,7 +500,7 @@ def fill_metadata(ds, metadata, sensor_data):


def _zero_screen(val):
val[val == 0] = np.NaN
val[val == 0] = np.nan
return val


Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@ numpy
pooch
scipy
xarray
polars>0.16
polars>=1.1
2 changes: 1 addition & 1 deletion tests/environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ dependencies:
- gsw
- scipy
- bitstring
- polars>=1.1
- pytest
- pytest-cov
- pooch
Expand All @@ -19,4 +20,3 @@ dependencies:
- cc-plugin-glider
- pip:
- dbdreader
- polars
Loading