Skip to content

Commit

Permalink
REF: np.NaN >> np.nan. unpin numpy
Browse files Browse the repository at this point in the history
  • Loading branch information
callumrollo committed Aug 6, 2024
1 parent 26e2b1f commit 085b54f
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 29 deletions.
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ channels:
- conda-forge
dependencies:
- python>=3.10
- numpy<2
- numpy
- pip
- xarray
- dask
Expand Down
16 changes: 8 additions & 8 deletions pyglider/ncprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,14 @@ def extract_timeseries_profiles(inname, outdir, deploymentyaml, force=False):
dss['v'] = dss.water_velocity_northward.mean()
dss['v'].attrs = profile_meta['v']
elif 'u' in profile_meta:
dss['u'] = profile_meta['u'].get('_FillValue', np.NaN)
dss['u'] = profile_meta['u'].get('_FillValue', np.nan)
dss['u'].attrs = profile_meta['u']

dss['v'] = profile_meta['v'].get('_FillValue', np.NaN)
dss['v'] = profile_meta['v'].get('_FillValue', np.nan)
dss['v'].attrs = profile_meta['v']
else:
dss['u'] = np.NaN
dss['v'] = np.NaN
dss['u'] = np.nan
dss['v'] = np.nan


dss['profile_id'] = np.int32(p)
Expand Down Expand Up @@ -117,11 +117,11 @@ def extract_timeseries_profiles(inname, outdir, deploymentyaml, force=False):
dss['platform'].attrs['_FillValue'] = -1


dss['lat_uv'] = np.NaN
dss['lat_uv'] = np.nan
dss['lat_uv'].attrs = profile_meta['lat_uv']
dss['lon_uv'] = np.NaN
dss['lon_uv'] = np.nan
dss['lon_uv'].attrs = profile_meta['lon_uv']
dss['time_uv'] = np.NaN
dss['time_uv'] = np.nan
dss['time_uv'].attrs = profile_meta['time_uv']

dss['instrument_ctd'] = np.int32(1.0)
Expand Down Expand Up @@ -328,7 +328,7 @@ def make_gridfiles(inname, outdir, deploymentyaml, *, fnamesuffix='', dz=1, star
dsout.to_netcdf(
outname,
encoding={'time': {'units': 'seconds since 1970-01-01T00:00:00Z',
'_FillValue': np.NaN,
'_FillValue': np.nan,
'calendar': 'gregorian',
'dtype': 'float64'}})
_log.info('Done gridding')
Expand Down
30 changes: 15 additions & 15 deletions pyglider/slocum.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,8 +345,8 @@ def dbd_to_dict(dinkum_file, cachedir, keys=None):
_log.debug('Diagnostic check passed. Endian is %s', endian)

nsensors = int(meta['sensors_per_cycle'])
currentValues = np.zeros(int(meta['sensors_per_cycle'])) + np.NaN
data = np.zeros((DINKUMCHUNKSIZE, nsensors)) + np.NaN
currentValues = np.zeros(int(meta['sensors_per_cycle'])) + np.nan
data = np.zeros((DINKUMCHUNKSIZE, nsensors)) + np.nan
# Then there's a data cycle with every sensor marked as updated, giving
# us our initial values.
# 01 means updated with 'same value', 10 means updated with a new value,
Expand All @@ -370,7 +370,7 @@ def dbd_to_dict(dinkum_file, cachedir, keys=None):
binaryData.bytealign()
for i, code in enumerate(updatedCode):
if code == '00': # No new value
currentValues[i] = np.NaN
currentValues[i] = np.nan
elif code == '01': # Same value as before.
continue
elif code == '10': # New value.
Expand Down Expand Up @@ -404,7 +404,7 @@ def dbd_to_dict(dinkum_file, cachedir, keys=None):
if ndata % DINKUMCHUNKSIZE == 0:
# need to allocate more data!
data = np.concatenate(
(data, np.NaN + np.zeros((DINKUMCHUNKSIZE, nsensors))),
(data, np.nan + np.zeros((DINKUMCHUNKSIZE, nsensors))),
axis=0)
elif d == 'X':
# End of file cycle tag. We made it through.
Expand Down Expand Up @@ -496,7 +496,7 @@ def add_times_flight_sci(fdata, sdata=None):
sdata['m_present_time_sci'] = np.interp(
sdata['sci_m_present_time'], tf, pt, np.nan, np.nan)
else:
sdata['m_present_time_sci'] = np.NaN * sdata['sci_m_present_time']
sdata['m_present_time_sci'] = np.nan * sdata['sci_m_present_time']

return fdata, sdata

Expand Down Expand Up @@ -731,7 +731,7 @@ def raw_to_timeseries(indir, outdir, deploymentyaml, *,
_log.debug('EBD sensorname %s', sensorname)
val = ebd[sensorname]
val = utils._zero_screen(val)
# val[val==0] = np.NaN
# val[val==0] = np.nan
val = convert(val)
else:
_log.debug('DBD sensorname %s', sensorname)
Expand Down Expand Up @@ -922,7 +922,7 @@ def binary_to_timeseries(indir, cachedir, outdir, deploymentyaml, *,
ds = utils.get_derived_eos_raw(ds)

# screen out-of-range times; these won't convert:
ds['time'] = ds.time.where((ds.time>0) & (ds.time<6.4e9), np.NaN)
ds['time'] = ds.time.where((ds.time>0) & (ds.time<6.4e9), np.nan)
# convert time to datetime64:
ds['time'] = (ds.time*1e9).astype('datetime64[ns]')
ds['time'].attrs = attr
Expand Down Expand Up @@ -954,7 +954,7 @@ def binary_to_timeseries(indir, cachedir, outdir, deploymentyaml, *,
# as a unit:
ds.to_netcdf(outname, 'w',
encoding={'time': {'units': 'seconds since 1970-01-01T00:00:00Z',
'_FillValue': np.NaN,
'_FillValue': np.nan,
'dtype': 'float64'}})

return outname
Expand Down Expand Up @@ -1076,9 +1076,9 @@ def parse_logfiles(files):
# now parse them
out = xr.Dataset(
coords={'time': ('surfacing', np.zeros(ntimes, dtype='datetime64[ns]'))})
out['ampH'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['lon'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['lat'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['ampH'] = ('surfacing', np.zeros(ntimes) * np.nan)
out['lon'] = ('surfacing', np.zeros(ntimes) * np.nan)
out['lat'] = ('surfacing', np.zeros(ntimes) * np.nan)

for i in range(ntimes):
timestring = times[i][11:-13]
Expand Down Expand Up @@ -1151,10 +1151,10 @@ def parse_logfiles_maybe(files):

# now parse them
out = xr.Dataset(coords={'time': ('surfacing', np.zeros(ntimes, dtype='datetime64[ns]'))})
out['ampH'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['lon'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['lat'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['missionnum'] = ('surfacing', np.zeros(ntimes) * np.NaN)
out['ampH'] = ('surfacing', np.zeros(ntimes) * np.nan)
out['lon'] = ('surfacing', np.zeros(ntimes) * np.nan)
out['lat'] = ('surfacing', np.zeros(ntimes) * np.nan)
out['missionnum'] = ('surfacing', np.zeros(ntimes) * np.nan)
out.attrs['surfacereason'] = surfacereason
# ABORT HISTORY: last abort segment: hal_1002-2024-183-0-0 (0171.0000)
out.attrs['abortsegment'] = float(abortsegment[-11:-2])
Expand Down
6 changes: 3 additions & 3 deletions pyglider/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ def get_profiles(ds, min_dp=10.0, inversion=3., filt_length=7,
make two variables: profile_direction and profile_index; this version
is good for lots of data. Less good for sparse data
"""
profile = ds.pressure.values * np.NaN
direction = ds.pressure.values * np.NaN
profile = ds.pressure.values * np.nan
direction = ds.pressure.values * np.nan
pronum = 1
lastpronum = 0

Expand Down Expand Up @@ -500,7 +500,7 @@ def fill_metadata(ds, metadata, sensor_data):


def _zero_screen(val):
val[val == 0] = np.NaN
val[val == 0] = np.nan
return val


Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ dbdreader
dask
gsw
netcdf4
numpy<2
numpy
pooch
scipy
xarray
Expand Down
2 changes: 1 addition & 1 deletion tests/environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ channels:
- conda-forge
dependencies:
- python>=3.9
- numpy<2
- numpy
- pip
- xarray
- dask
Expand Down

0 comments on commit 085b54f

Please sign in to comment.