Skip to content

Commit

Permalink
Add, use DataCoordinate.make_empty.
Browse files Browse the repository at this point in the history
The old DataCoordinate.makeEmpty is not being deprecated because that
wasn't included on RFC-834 and we don't have a good reason to get rid
it, but we've now got a consistent suite of DataCoordinate factory
methods with from_full_values and from_required_values (whose camelCase
forms _did_ have a good reason to be retired, as this aided with the
transition from DimensionGraph to DimensionGroup).
  • Loading branch information
TallJimbo committed Nov 21, 2023
1 parent 0de720a commit 2d5f464
Show file tree
Hide file tree
Showing 9 changed files with 41 additions and 21 deletions.
32 changes: 26 additions & 6 deletions python/lsst/daf/butler/dimensions/_coordinate.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ class DataCoordinate(NamedKeyMapping[Dimension, DataIdValue]):
`DataCoordinate` is an ABC, but it provides `staticmethod` factory
functions for private concrete implementations that should be sufficient
for most purposes. `standardize` is the most flexible and safe of these;
the others (`makeEmpty`, `from_required_values`, and `from_full_values`)
the others (`make_empty`, `from_required_values`, and `from_full_values`)
are more specialized and perform little or no checking of inputs.
Lookups for implied dimensions (those in ``self.dimensions.implied``) are
Expand Down Expand Up @@ -252,7 +252,7 @@ def standardize(
raise TypeError("universe must be provided if graph is not.")
dimensions = DimensionGroup(universe, new_mapping.keys())
if not dimensions:
return DataCoordinate.makeEmpty(universe)
return DataCoordinate.make_empty(universe)
# Some backends cannot handle numpy.int64 type which is a subclass of
# numbers.Integral; convert that to int.
for k, v in new_mapping.items():
Expand Down Expand Up @@ -328,6 +328,26 @@ def makeEmpty(universe: DimensionUniverse) -> DataCoordinate:
`hasRecords` are guaranteed to return `True`, because both `full`
and `records` are just empty mappings.
"""
return DataCoordinate.make_empty(universe)

@staticmethod
def make_empty(universe: DimensionUniverse) -> DataCoordinate:
"""Return an empty `DataCoordinate`.
It identifies the null set of dimensions.
Parameters
----------
universe : `DimensionUniverse`
Universe to which this null dimension set belongs.
Returns
-------
data_id : `DataCoordinate`
A data ID object that identifies no dimensions. `hasFull` and
`hasRecords` are guaranteed to return `True`, because both `full`
and `records` are just empty mappings.
"""
return _ExpandedTupleDataCoordinate(universe.empty.as_group(), (), {})

# TODO: remove on DM-41326.
Expand Down Expand Up @@ -383,7 +403,7 @@ def from_required_values(dimensions: DimensionGroup, values: tuple[DataIdValue,
Returns
-------
dataId : `DataCoordinate`
data_id : `DataCoordinate`
A data ID object that identifies the given dimensions.
``dataId.hasFull()`` will return `True` only if
``dimensions.implied`` is empty. ``dataId.hasRecords()`` will
Expand All @@ -393,7 +413,7 @@ def from_required_values(dimensions: DimensionGroup, values: tuple[DataIdValue,
values
), f"Inconsistency between dimensions {dimensions.required} and required values {values}."
if not dimensions:
return DataCoordinate.makeEmpty(dimensions.universe)
return DataCoordinate.make_empty(dimensions.universe)
if not dimensions.implied:
return _FullTupleDataCoordinate(dimensions, values)
return _RequiredTupleDataCoordinate(dimensions, values)
Expand Down Expand Up @@ -455,7 +475,7 @@ def from_full_values(dimensions: DimensionGroup, values: tuple[DataIdValue, ...]
Returns
-------
dataId : `DataCoordinate`
data_id : `DataCoordinate`
A data ID object that identifies the given dimensions.
``dataId.hasFull()`` will always return `True`.
``dataId.hasRecords()`` will only return `True` if ``dimensions``
Expand All @@ -465,7 +485,7 @@ def from_full_values(dimensions: DimensionGroup, values: tuple[DataIdValue, ...]
values
), f"Inconsistency between dimensions {dimensions.data_coordinate_keys} and full values {values}."
if not dimensions:
return DataCoordinate.makeEmpty(dimensions.universe)
return DataCoordinate.make_empty(dimensions.universe)
return _FullTupleDataCoordinate(dimensions, values)

def __bool__(self) -> bool:
Expand Down
4 changes: 2 additions & 2 deletions python/lsst/daf/butler/registry/queries/_structs.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,9 +108,9 @@ def combine(
An object representing the WHERE clause for a query.
"""
if data_id is None:
data_id = DataCoordinate.makeEmpty(dimensions.universe)
data_id = DataCoordinate.make_empty(dimensions.universe)
if defaults is None:
defaults = DataCoordinate.makeEmpty(dimensions.universe)
defaults = DataCoordinate.make_empty(dimensions.universe)
expression_predicate, governor_constraints = make_string_expression_predicate(
expression,
dimensions,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def make_string_expression_predicate(
"""
governor_constraints: dict[str, Set[str]] = {}
if data_id is None:
data_id = DataCoordinate.makeEmpty(dimensions.universe)
data_id = DataCoordinate.make_empty(dimensions.universe)
if not string:
for dimension in data_id.dimensions.governors:
governor_constraints[dimension] = {cast(str, data_id[dimension])}
Expand All @@ -146,7 +146,7 @@ def make_string_expression_predicate(
if column and table in dimensions.universe.elements.names:
raise RuntimeError(f"Bind parameter key {identifier!r} looks like a dimension column.")
if defaults is None:
defaults = DataCoordinate.makeEmpty(dimensions.universe)
defaults = DataCoordinate.make_empty(dimensions.universe)
# Convert the expression to disjunctive normal form (ORs of ANDs).
# That's potentially super expensive in the general case (where there's
# a ton of nesting of ANDs and ORs). That won't be the case for the
Expand Down
6 changes: 3 additions & 3 deletions python/lsst/daf/butler/registry/tests/_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -1685,7 +1685,7 @@ def testEmptyDimensionsQueries(self):
self.loadData(registry, "base.yaml")
schema = DatasetType("schema", dimensions=registry.dimensions.empty, storageClass="Catalog")
registry.registerDatasetType(schema)
dataId = DataCoordinate.makeEmpty(registry.dimensions)
dataId = DataCoordinate.make_empty(registry.dimensions)
run1 = "run1"
run2 = "run2"
registry.registerRun(run1)
Expand Down Expand Up @@ -3339,8 +3339,8 @@ def test_long_query_names(self) -> None:
registry.registerRun(run1)
run2 = "run2"
registry.registerRun(run2)
(ref1,) = registry.insertDatasets(name, [DataCoordinate.makeEmpty(registry.dimensions)], run1)
registry.insertDatasets(name, [DataCoordinate.makeEmpty(registry.dimensions)], run2)
(ref1,) = registry.insertDatasets(name, [DataCoordinate.make_empty(registry.dimensions)], run1)
registry.insertDatasets(name, [DataCoordinate.make_empty(registry.dimensions)], run2)
self.assertEqual(
set(registry.queryDatasets(name, collections=[run1, run2], findFirst=True)),
{ref1},
Expand Down
2 changes: 1 addition & 1 deletion tests/test_butler.py
Original file line number Diff line number Diff line change
Expand Up @@ -2138,7 +2138,7 @@ def _absolute_transfer(self, transfer: str) -> None:

metrics = makeExampleMetrics()
with ResourcePath.temporary_uri(suffix=".json") as temp:
dataId = DataCoordinate.makeEmpty(self.source_butler.dimensions)
dataId = DataCoordinate.make_empty(self.source_butler.dimensions)
source_refs = [DatasetRef(datasetType, dataId, run=run)]
temp.write(json.dumps(metrics.exportAsDict()).encode())
dataset = FileDataset(path=temp, refs=source_refs)
Expand Down
4 changes: 2 additions & 2 deletions tests/test_datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -945,7 +945,7 @@ def test_pydantic_dict_storage_class_conversions(self) -> None:
"store_as_model",
dimensions=self.universe.empty,
storageClass="DictConvertibleModel",
dataId=DataCoordinate.makeEmpty(self.universe),
dataId=DataCoordinate.make_empty(self.universe),
)
content = {"a": "one", "b": "two"}
model = DictConvertibleModel.from_dict(content, extra="original content")
Expand Down Expand Up @@ -988,7 +988,7 @@ def _assert_different_puts(self, datastore: Datastore, storageClass_root: str, d
f"stora_as_{x}",
dimensions=self.universe.empty,
storageClass=f"{storageClass_root}{x}",
dataId=DataCoordinate.makeEmpty(self.universe),
dataId=DataCoordinate.make_empty(self.universe),
)
for x in ["A", "B"]
}
Expand Down
4 changes: 2 additions & 2 deletions tests/test_dimensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -713,10 +713,10 @@ def testStandardize(self):
DataCoordinate.standardize(dataId.mapping, universe=dataId.universe),
DataCoordinate.standardize(dataId.mapping, dimensions=dataId.dimensions),
DataCoordinate.standardize(
DataCoordinate.makeEmpty(dataId.dimensions.universe), **dataId.mapping
DataCoordinate.make_empty(dataId.dimensions.universe), **dataId.mapping
),
DataCoordinate.standardize(
DataCoordinate.makeEmpty(dataId.dimensions.universe),
DataCoordinate.make_empty(dataId.dimensions.universe),
dimensions=dataId.dimensions,
**dataId.mapping,
),
Expand Down
4 changes: 2 additions & 2 deletions tests/test_expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,8 +355,8 @@ def test_governor(self):

universe = DimensionUniverse()
dimensions = universe.conform(("instrument", "visit"))
dataId = DataCoordinate.makeEmpty(universe)
defaults = DataCoordinate.makeEmpty(universe)
dataId = DataCoordinate.make_empty(universe)
defaults = DataCoordinate.make_empty(universe)

# governor-only constraint
tree = parser.parse("instrument = 'LSST'")
Expand Down
2 changes: 1 addition & 1 deletion tests/test_formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def setUp(self):
self.id = 0
self.factory = FormatterFactory()
self.universe = DimensionUniverse()
self.dataId = DataCoordinate.makeEmpty(self.universe)
self.dataId = DataCoordinate.make_empty(self.universe)

# Dummy FileDescriptor for testing getFormatter
self.fileDescriptor = FileDescriptor(
Expand Down

0 comments on commit 2d5f464

Please sign in to comment.