Skip to content

Commit

Permalink
Avoid deprecated use of dataset.dims
Browse files Browse the repository at this point in the history
  • Loading branch information
dschwoerer committed Dec 9, 2024
1 parent d8b79ee commit a91056f
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 7 deletions.
2 changes: 1 addition & 1 deletion examples/tutorial/blob2d.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@
"outputs": [],
"source": [
"# dz is a scalar, so gets stored in 'metadata'\n",
"dz = xr.DataArray(ds.metadata['dz']).expand_dims({'z': ds.dims['z']})\n",
"dz = xr.DataArray(ds.metadata['dz']).expand_dims({'z': ds.sizes['z']})\n",
"z = dz.cumsum(dim='z')\n",
"ds = ds.assign_coords({'z': z})\n",
"\n",
Expand Down
4 changes: 2 additions & 2 deletions xbout/geometries.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None):
# 'dx' may not be consistent between different regions (e.g. core and PFR).
# For some geometries xcoord may have already been created by
# add_geometry_coords, in which case we do not need this.
nx = updated_ds.dims[xcoord]
nx = updated_ds.sizes[xcoord]

# can't use commented out version, uncommented one works around xarray bug
# removing attrs
Expand Down Expand Up @@ -181,7 +181,7 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None):
if zcoord in updated_ds.dims and zcoord not in updated_ds.coords:
# Generates a coordinate whose value is 0 on the first grid point, not dz/2, to
# match how BOUT++ generates fields from input file expressions.
nz = updated_ds.dims[zcoord]
nz = updated_ds.sizes[zcoord]

# In BOUT++ v5, dz is either a Field2D or Field3D.
# We can use it as a 1D coordinate if it's a Field3D, _or_ if nz == 1
Expand Down
8 changes: 4 additions & 4 deletions xbout/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -810,10 +810,10 @@ def get_nonnegative_scalar(ds, key, default=1, info=True):
mxg = get_nonnegative_scalar(ds, "MXG", default=2, info=info)
myg = get_nonnegative_scalar(ds, "MYG", default=0, info=info)
mxsub = get_nonnegative_scalar(
ds, "MXSUB", default=ds.dims["x"] - 2 * mxg, info=info
ds, "MXSUB", default=ds.sizes["x"] - 2 * mxg, info=info
)
mysub = get_nonnegative_scalar(
ds, "MYSUB", default=ds.dims["y"] - 2 * myg, info=info
ds, "MYSUB", default=ds.sizes["y"] - 2 * myg, info=info
)

# Check whether this is a single file squashed from the multiple output files of a
Expand All @@ -828,8 +828,8 @@ def get_nonnegative_scalar(ds, key, default=1, info=True):
else:
# Workaround for older data files
ny = ds["MYSUB"].values * ds["NYPE"].values
nx_file = ds.dims["x"]
ny_file = ds.dims["y"]
nx_file = ds.sizes["x"]
ny_file = ds.sizes["y"]
is_squashed_doublenull = False
if nxpe > 1 or nype > 1:
# if nxpe = nype = 1, was only one process anyway, so no need to check for
Expand Down

0 comments on commit a91056f

Please sign in to comment.