Skip to content

Commit

Permalink
NpyFilesDatastore -> NpyFilesDatastoreMEPS
Browse files Browse the repository at this point in the history
  • Loading branch information
leifdenby committed Sep 27, 2024
1 parent 67998b8 commit ac7e46a
Show file tree
Hide file tree
Showing 8 changed files with 10 additions and 9 deletions.
4 changes: 2 additions & 2 deletions neural_lam/datastore/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# Local
from .mdp import MDPDatastore # noqa
from .npyfiles import NpyFilesDatastore # noqa
from .npyfilesmeps import NpyFilesDatastoreMEPS # noqa

DATASTORES = dict(
mdp=MDPDatastore,
npyfiles=NpyFilesDatastore,
npyfilesmeps=NpyFilesDatastoreMEPS,
)


Expand Down
2 changes: 0 additions & 2 deletions neural_lam/datastore/npyfiles/__init__.py

This file was deleted.

2 changes: 2 additions & 0 deletions neural_lam/datastore/npyfilesmeps/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Local
from .store import NpyFilesDatastoreMEPS # noqa
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def main():
rank = get_rank()
world_size = get_world_size()
datastore = init_datastore(
datastore_kind="npyfiles", config_path=args.datastore_config
datastore_kind="npyfilesmeps", config_path=args.datastore_config
)

if distributed:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def _load_np(fp, add_feature_dim):
return arr


class NpyFilesDatastore(BaseCartesianDatastore):
class NpyFilesDatastoreMEPS(BaseCartesianDatastore):
__doc__ = f"""
Represents a dataset stored as numpy files on disk. The dataset is assumed
to be stored in a directory structure where each sample is stored in a
Expand Down
5 changes: 3 additions & 2 deletions neural_lam/weather_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,8 @@ def _sample_time(self, da, idx, n_steps: int, n_timesteps_offset: int = 0):
n_steps : int
The number of time steps to include in the sample.
n_timestep_offset : int
A number of timesteps to use as offset from the start time of the slice
A number of timesteps to use as offset from the start time of the
slice
"""
# selecting the time slice
if self.datastore.is_forecast:
Expand Down Expand Up @@ -455,7 +456,7 @@ def __init__(
self.test_dataset = None
if num_workers > 0:
# default to spawn for now, as the default on linux "fork" hangs
# when using dask (which the npyfiles datastore uses)
# when using dask (which the npyfilesmeps datastore uses)
self.multiprocessing_context = "spawn"
else:
self.multiprocessing_context = None
Expand Down
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def download_meps_example_reduced_dataset():

DATASTORES_EXAMPLES = dict(
mdp=(DATASTORE_EXAMPLES_ROOT_PATH / "mdp" / "danra.example.yaml"),
npyfiles=download_meps_example_reduced_dataset(),
npyfilesmeps=download_meps_example_reduced_dataset(),
)


Expand Down

0 comments on commit ac7e46a

Please sign in to comment.