Skip to content

Commit

Permalink
Merge pull request #1043 from lsst/tickets/DM-45119
Browse files Browse the repository at this point in the history
DM-45119: Update logic for checking overlap dimensions in relations
  • Loading branch information
andy-slac committed Jul 31, 2024
2 parents c108883 + 2111e21 commit 28efc2a
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 12 deletions.
1 change: 1 addition & 0 deletions doc/changes/DM-45119.bugfix.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix for handling of dataset types that use `healpix11` dimensions, previously they caused exception in many query operations.
15 changes: 11 additions & 4 deletions python/lsst/daf/butler/registry/dimensions/static.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,11 +384,18 @@ def make_spatial_join_relation(
existing_relationships: Set[frozenset[str]] = frozenset(),
) -> tuple[Relation, bool]:
# Docstring inherited.
overlap_relationship = frozenset(
self.universe[element1].dimensions.names | self.universe[element2].dimensions.names
)
if overlap_relationship in existing_relationships:
group1 = self.universe[element1].minimal_group
group2 = self.universe[element2].minimal_group
overlap_relationships = {
frozenset(a | b)
for a, b in itertools.product(
[group1.names, group1.required],
[group2.names, group2.required],
)
}
if not overlap_relationships.isdisjoint(existing_relationships):
return context.preferred_engine.make_join_identity_relation(), False

overlaps: Relation | None = None
needs_refinement: bool = False
if element1 == self.universe.commonSkyPix.name:
Expand Down
23 changes: 15 additions & 8 deletions python/lsst/daf/butler/registry/sql_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -2126,17 +2126,24 @@ def queryDatasets(
# only if we need to findFirst. Note that if any of the
# collections are actually wildcard expressions, and
# findFirst=True, this will raise TypeError for us.
builder.joinDataset(
any_records = builder.joinDataset(
resolved_dataset_type, collection_wildcard, isResult=True, findFirst=findFirst
)
query = builder.finish()
parent_results.append(queries.DatabaseParentDatasetQueryResults(query, resolved_dataset_type))
if any_records:
query = builder.finish()
parent_results.append(queries.DatabaseParentDatasetQueryResults(query, resolved_dataset_type))
else:
doomed_by.append(
f"No datasets of type {resolved_dataset_type.name} "
f"in collections {collection_wildcard!r}."
)
if not parent_results:
doomed_by.extend(
f"No registered dataset type matching {t!r} found, so no matching datasets can "
"exist in any collection."
for t in ensure_iterable(datasetType)
)
if not doomed_by:
doomed_by.extend(
f"No registered dataset type matching {t!r} found, so no matching datasets can "
"exist in any collection."
for t in ensure_iterable(datasetType)
)
return queries.ChainedDatasetQueryResults([], doomed_by=doomed_by)
elif len(parent_results) == 1:
return parent_results[0]
Expand Down

0 comments on commit 28efc2a

Please sign in to comment.