Skip to content

Commit

Permalink
Add convenience method to get default dimension packers.
Browse files Browse the repository at this point in the history
  • Loading branch information
TallJimbo committed Jun 15, 2023
1 parent 5f77edf commit 2a0d73d
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 4 deletions.
1 change: 1 addition & 0 deletions doc/changes/DM-39453.feature.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add `Instrument.make_default_dimension_packer` to restore simple access to the default data ID packer for an instrument.
50 changes: 48 additions & 2 deletions python/lsst/pipe/base/_instrument.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,13 @@

from lsst.daf.butler import DataCoordinate, DataId, DimensionPacker, DimensionRecord, Formatter
from lsst.daf.butler.registry import DataIdError
from lsst.pex.config import RegistryField
from lsst.pex.config import Config, RegistryField
from lsst.utils import doImportType

from ._observation_dimension_packer import observation_packer_registry

if TYPE_CHECKING:
from lsst.daf.butler import Registry
from lsst.pex.config import Config


class Instrument(metaclass=ABCMeta):
Expand Down Expand Up @@ -624,6 +623,53 @@ class to determine the behavior for each data ID encountered. When the
doc, default=None, optional=True, on_none=Instrument._make_default_dimension_packer_dispatch
)

@staticmethod
@final
def make_default_dimension_packer(
data_id: DataCoordinate, is_exposure: bool | None = None
) -> DimensionPacker:
"""Return the default dimension packer for the given data ID.
Parameters
----------
data_id : `lsst.daf.butler.DataCoordinate`
Data ID that identifies at least the ``instrument`` dimension. Must
have dimension records attached.
is_exposure : `bool`, optional
If `False`, construct a packer for visit+detector data IDs. If
`True`, construct a packer for exposure+detector data IDs. If
`None`, this is determined based on whether ``visit`` or
``exposure`` is present in ``data_id``, with ``visit`` checked
first and hence used if both are present.
Returns
-------
packer : `lsst.daf.butler.DimensionPacker`
Object that packs {visit, detector} or {exposure, detector} data
IDs into integers.
Notes
-----
When using a dimension packer in task code, using
`make_dimension_packer_config_field` to make the packing algorithm
configurable is preferred over this method.
When obtaining a dimension packer to unpack IDs that were packed by
task code, it is similarly preferable to load the configuration for
that task and the existing packer configuration field there, to ensure
any config overrides are respected. That is sometimes quite difficult,
however, and since config overrides for dimension packers are expected
to be exceedingly rare, using this simpler method will almost always
work.
"""

class _DummyConfig(Config):
packer = Instrument.make_dimension_packer_config_field()

config = _DummyConfig()

return config.packer.apply(data_id, is_exposure=is_exposure)

@staticmethod
@final
def _make_default_dimension_packer_dispatch(
Expand Down
10 changes: 8 additions & 2 deletions tests/test_instrument.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,8 +208,14 @@ def test_dimension_packer_config_defaults(self):
# Note that we don't need to pass any more than the instrument in
# the data ID yet, because we're just constructing packers, not
# calling their pack method.
visit_packers=[config.packer.apply(instrument_data_id, is_exposure=False)],
exposure_packers=[config.packer.apply(instrument_data_id, is_exposure=True)],
visit_packers=[
config.packer.apply(instrument_data_id, is_exposure=False),
Instrument.make_default_dimension_packer(instrument_data_id, is_exposure=False),
],
exposure_packers=[
config.packer.apply(instrument_data_id, is_exposure=True),
Instrument.make_default_dimension_packer(instrument_data_id, is_exposure=True),
],
n_detectors=record.detector_max,
n_visits=record.visit_max,
n_exposures=record.exposure_max,
Expand Down

0 comments on commit 2a0d73d

Please sign in to comment.