Skip to content

Commit

Permalink
feat(visibility): Return meta with top N timeseries results (#83446)
Browse files Browse the repository at this point in the history
Right now, a timeseries request with a `groupBy` does not attach any
`"meta"` to the response! That's a bummer because for the new standard
widgets, we rely on that meta to figure out types and units.

This PR adds the meta to the responses, including support for the
`transformAliasToInputFormat` URL parameter. Note that a `groupBy`
splits the results by group but the _meta_ is the same everywhere. This
is why I attach `results["meta"]` to the response directly without
having to split anything up.

## Changes

- Attach meta to results
- Pass through alias input parameter

---------

Co-authored-by: Tony Xiao <[email protected]>
  • Loading branch information
gggritso and Zylphrex authored Jan 16, 2025
1 parent b9c558f commit 8d4ef7a
Show file tree
Hide file tree
Showing 12 changed files with 118 additions and 12 deletions.
1 change: 1 addition & 0 deletions src/sentry/api/endpoints/organization_events_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,7 @@ def _get_event_stats(
on_demand_metrics_type=on_demand_metrics_type,
include_other=include_other,
query_source=query_source,
transform_alias_to_input_format=transform_alias_to_input_format,
fallback_to_transactions=features.has(
"organizations:performance-discover-dataset-selector",
organization,
Expand Down
5 changes: 5 additions & 0 deletions src/sentry/snuba/discover.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,6 +440,7 @@ def top_events_timeseries(
dataset: Dataset = Dataset.Discover,
query_source: QuerySource | None = None,
fallback_to_transactions: bool = False,
transform_alias_to_input_format: bool = False,
) -> dict[str, SnubaTSResult] | SnubaTSResult:
"""
High-level API for doing arbitrary user timeseries queries for a limited number of top events
Expand All @@ -462,6 +463,8 @@ def top_events_timeseries(
top_events - A dictionary with a 'data' key containing a list of dictionaries that
represent the top events matching the query. Useful when you have found
the top events earlier and want to save a query.
transform_alias_to_input_format - Whether aggregate columns should be returned in the originally
requested function format.
"""
assert dataset in [
Dataset.Discover,
Expand Down Expand Up @@ -500,6 +503,7 @@ def top_events_timeseries(
config=QueryBuilderConfig(
functions_acl=functions_acl,
skip_tag_resolution=True,
transform_alias_to_input_format=transform_alias_to_input_format,
),
)
if len(top_events["data"]) == limit and include_other:
Expand Down Expand Up @@ -587,6 +591,7 @@ def top_events_timeseries(
if zerofill_results
else item["data"]
),
"meta": result["meta"],
"order": item["order"],
},
snuba_params.start_date,
Expand Down
6 changes: 6 additions & 0 deletions src/sentry/snuba/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,7 @@ def top_events_timeseries(
dataset: Dataset = Dataset.Discover,
query_source: QuerySource | None = None,
fallback_to_transactions: bool = False,
transform_alias_to_input_format: bool = False,
) -> dict[str, SnubaTSResult] | SnubaTSResult:
"""
High-level API for doing arbitrary user timeseries queries for a limited number of top events
Expand All @@ -237,6 +238,9 @@ def top_events_timeseries(
top_events - A dictionary with a 'data' key containing a list of dictionaries that
represent the top events matching the query. Useful when you have found
the top events earlier and want to save a query.
transform_alias_to_input_format - Whether aggregate columns should be returned in the originally
requested function format.
"""
if top_events is None:
with sentry_sdk.start_span(op="discover.errors", name="top_events.fetch_events"):
Expand Down Expand Up @@ -269,6 +273,7 @@ def top_events_timeseries(
config=QueryBuilderConfig(
functions_acl=functions_acl,
skip_tag_resolution=True,
transform_alias_to_input_format=transform_alias_to_input_format,
),
)
if len(top_events["data"]) == limit and include_other:
Expand Down Expand Up @@ -356,6 +361,7 @@ def top_events_timeseries(
if zerofill_results
else item["data"]
),
"meta": result["meta"],
"order": item["order"],
},
snuba_params.start_date,
Expand Down
4 changes: 3 additions & 1 deletion src/sentry/snuba/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ def top_events_timeseries(
on_demand_metrics_type: MetricSpecType | None = None,
query_source: QuerySource | None = None,
fallback_to_transactions: bool = False,
transform_alias_to_input_format: bool = False,
):
assert not include_other, "Other is not supported" # TODO: support other

Expand Down Expand Up @@ -183,6 +184,7 @@ def top_events_timeseries(
config=QueryBuilderConfig(
functions_acl=functions_acl,
skip_tag_resolution=True,
transform_alias_to_input_format=transform_alias_to_input_format,
),
)

Expand Down Expand Up @@ -270,7 +272,7 @@ def format_top_events_timeseries_results(
else item["data"]
),
"order": item["order"],
"meta": result["meta"],
"meta": processed_result["meta"],
},
snuba_params.start_date,
snuba_params.end_date,
Expand Down
3 changes: 3 additions & 0 deletions src/sentry/snuba/metrics_enhanced_performance.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,7 @@ def top_events_timeseries(
on_demand_metrics_type: MetricSpecType | None = None,
query_source: QuerySource | None = None,
fallback_to_transactions: bool = False,
transform_alias_to_input_format: bool = False,
) -> SnubaTSResult | dict[str, Any]:
metrics_compatible = False
equations, _ = categorize_columns(selected_columns)
Expand Down Expand Up @@ -262,6 +263,7 @@ def top_events_timeseries(
on_demand_metrics_enabled=on_demand_metrics_enabled,
on_demand_metrics_type=on_demand_metrics_type,
query_source=query_source,
transform_alias_to_input_format=transform_alias_to_input_format,
)
# raise Invalid Queries since the same thing will happen with discover
except InvalidSearchQuery:
Expand Down Expand Up @@ -296,6 +298,7 @@ def top_events_timeseries(
include_other,
functions_acl,
query_source=query_source,
transform_alias_to_input_format=transform_alias_to_input_format,
)
return SnubaTSResult(
{
Expand Down
11 changes: 8 additions & 3 deletions src/sentry/snuba/metrics_performance.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,6 +413,7 @@ def top_events_timeseries(
on_demand_metrics_type: MetricSpecType | None = None,
query_source: QuerySource | None = None,
fallback_to_transactions: bool = False,
transform_alias_to_input_format: bool = False,
) -> SnubaTSResult | dict[str, Any]:

if top_events is None:
Expand Down Expand Up @@ -445,6 +446,7 @@ def top_events_timeseries(
functions_acl=functions_acl,
on_demand_metrics_enabled=on_demand_metrics_enabled,
on_demand_metrics_type=on_demand_metrics_type,
transform_alias_to_input_format=transform_alias_to_input_format,
),
)
if len(top_events["data"]) == limit and include_other:
Expand Down Expand Up @@ -527,9 +529,12 @@ def top_events_timeseries(
else item["data"]
),
"order": item["order"],
# One of the queries in the builder has required, thus, we mark all of them
# This could mislead downstream consumers of the meta data
"meta": {"isMetricsExtractedData": top_events_builder.use_on_demand},
"meta": {
# One of the queries in the builder has required, thus, we mark all of them
# This could mislead downstream consumers of the meta data
"isMetricsExtractedData": top_events_builder.use_on_demand,
**result["meta"],
},
},
snuba_params.start_date,
snuba_params.end_date,
Expand Down
3 changes: 3 additions & 0 deletions src/sentry/snuba/spans_eap.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ def top_events_timeseries(
dataset: Dataset = Dataset.Discover,
query_source: QuerySource | None = None,
fallback_to_transactions: bool = False,
transform_alias_to_input_format: bool = False,
):
"""
High-level API for doing arbitrary user timeseries queries for a limited number of top events
Expand Down Expand Up @@ -202,6 +203,7 @@ def top_events_timeseries(
config=QueryBuilderConfig(
functions_acl=functions_acl,
skip_tag_resolution=True,
transform_alias_to_input_format=transform_alias_to_input_format,
),
)
if len(top_events["data"]) == limit and include_other:
Expand Down Expand Up @@ -286,6 +288,7 @@ def top_events_timeseries(
if zerofill_results
else result_item["data"]
),
"meta": result["meta"],
"order": result_item["order"],
},
snuba_params.start_date,
Expand Down
3 changes: 3 additions & 0 deletions src/sentry/snuba/spans_indexed.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ def top_events_timeseries(
on_demand_metrics_type: MetricSpecType | None = None,
query_source: QuerySource | None = None,
fallback_to_transactions: bool = False,
transform_alias_to_input_format: bool = False,
):
"""
High-level API for doing arbitrary user timeseries queries for a limited number of top events
Expand Down Expand Up @@ -197,6 +198,7 @@ def top_events_timeseries(
config=QueryBuilderConfig(
functions_acl=functions_acl,
skip_tag_resolution=True,
transform_alias_to_input_format=transform_alias_to_input_format,
),
)
if len(top_events["data"]) == limit and include_other:
Expand Down Expand Up @@ -279,6 +281,7 @@ def top_events_timeseries(
if zerofill_results
else item["data"]
),
"meta": result["meta"],
"order": item["order"],
},
snuba_params.start_date,
Expand Down
3 changes: 3 additions & 0 deletions src/sentry/snuba/spans_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@ def top_events_timeseries(
on_demand_metrics_type: MetricSpecType | None = None,
query_source: QuerySource | None = None,
fallback_to_transactions: bool = False,
transform_alias_to_input_format: bool = False,
):
"""
High-level API for doing arbitrary user timeseries queries for a limited number of top events
Expand Down Expand Up @@ -217,6 +218,7 @@ def top_events_timeseries(
config=QueryBuilderConfig(
functions_acl=functions_acl,
skip_tag_resolution=True,
transform_alias_to_input_format=transform_alias_to_input_format,
),
)
if len(top_events["data"]) == limit and include_other:
Expand Down Expand Up @@ -290,6 +292,7 @@ def top_events_timeseries(
if zerofill_results
else item["data"]
),
"meta": result["meta"],
"order": item["order"],
},
snuba_params.start_date,
Expand Down
2 changes: 2 additions & 0 deletions src/sentry/snuba/transactions.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ def top_events_timeseries(
on_demand_metrics_type: MetricSpecType | None = None,
query_source: QuerySource | None = None,
fallback_to_transactions: bool = False,
transform_alias_to_input_format: bool = False,
) -> dict[str, SnubaTSResult] | SnubaTSResult:
return discover.top_events_timeseries(
timeseries_columns,
Expand All @@ -156,6 +157,7 @@ def top_events_timeseries(
on_demand_metrics_type=on_demand_metrics_type,
dataset=Dataset.Transactions,
query_source=query_source,
transform_alias_to_input_format=transform_alias_to_input_format,
)


Expand Down
65 changes: 65 additions & 0 deletions tests/snuba/api/endpoints/test_organization_events_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -1463,6 +1463,71 @@ def test_simple_top_events(self):
assert other["order"] == 5
assert [{"count": 3}] in [attrs for _, attrs in other["data"]]

def test_simple_top_events_meta(self):
with self.feature(self.enabled_features):
response = self.client.get(
self.url,
data={
"start": self.day_ago.isoformat(),
"end": (self.day_ago + timedelta(hours=2)).isoformat(),
"interval": "1h",
"yAxis": "sum(transaction.duration)",
"orderby": ["-sum(transaction.duration)"],
"field": ["transaction", "sum(transaction.duration)"],
"topEvents": "5",
},
format="json",
)

data = response.data
assert response.status_code == 200, response.content

for transaction, transaction_data in data.items():
assert transaction_data["meta"]["fields"] == {
"time": "date",
"transaction": "string",
"sum_transaction_duration": "duration",
}

assert transaction_data["meta"]["units"] == {
"time": None,
"transaction": None,
"sum_transaction_duration": "millisecond",
}

def test_simple_top_events_meta_no_alias(self):
with self.feature(self.enabled_features):
response = self.client.get(
self.url,
data={
"transformAliasToInputFormat": "1",
"start": self.day_ago.isoformat(),
"end": (self.day_ago + timedelta(hours=2)).isoformat(),
"interval": "1h",
"yAxis": "sum(transaction.duration)",
"orderby": ["-sum(transaction.duration)"],
"field": ["transaction", "sum(transaction.duration)"],
"topEvents": "5",
},
format="json",
)

data = response.data
assert response.status_code == 200, response.content

for transaction, transaction_data in data.items():
assert transaction_data["meta"]["fields"] == {
"time": "date",
"transaction": "string",
"sum(transaction.duration)": "duration",
}

assert transaction_data["meta"]["units"] == {
"time": None,
"transaction": None,
"sum(transaction.duration)": "millisecond",
}

def test_top_events_with_projects_other(self):
with self.feature(self.enabled_features):
response = self.client.get(
Expand Down
24 changes: 16 additions & 8 deletions tests/snuba/api/endpoints/test_organization_events_stats_mep.py
Original file line number Diff line number Diff line change
Expand Up @@ -1996,11 +1996,11 @@ def test_glob_http_referer_on_demand(self):
assert datum["meta"] == {
"dataset": "metricsEnhanced",
"datasetReason": "unchanged",
"fields": {},
"fields": {"count": "integer", "networkId": "string", "time": "date"},
"isMetricsData": False,
"isMetricsExtractedData": True,
"tips": {},
"units": {},
"units": {"count": None, "networkId": None, "time": None},
}

def _test_is_metrics_extracted_data(
Expand Down Expand Up @@ -2184,11 +2184,11 @@ def test_order_by_aggregate_top_events_desc(self):
assert datum["meta"] == {
"dataset": "metricsEnhanced",
"datasetReason": "unchanged",
"fields": {},
"fields": {"count": "integer", "networkId": "string", "time": "date"},
"isMetricsData": False,
"isMetricsExtractedData": True,
"tips": {},
"units": {},
"units": {"count": None, "networkId": None, "time": None},
}

def test_order_by_aggregate_top_events_asc(self):
Expand Down Expand Up @@ -2226,11 +2226,11 @@ def test_order_by_aggregate_top_events_asc(self):
assert datum["meta"] == {
"dataset": "metricsEnhanced",
"datasetReason": "unchanged",
"fields": {},
"fields": {"count": "integer", "networkId": "string", "time": "date"},
"isMetricsData": False,
"isMetricsExtractedData": True,
"tips": {},
"units": {},
"units": {"count": None, "networkId": None, "time": None},
}

def test_order_by_aggregate_top_events_graph_different_aggregate(self):
Expand Down Expand Up @@ -2268,11 +2268,19 @@ def test_order_by_aggregate_top_events_graph_different_aggregate(self):
assert datum["meta"] == {
"dataset": "metricsEnhanced",
"datasetReason": "unchanged",
"fields": {},
"fields": {
"networkId": "string",
"p95_transaction_duration": "duration",
"time": "date",
},
"isMetricsData": False,
"isMetricsExtractedData": True,
"tips": {},
"units": {},
"units": {
"networkId": None,
"p95_transaction_duration": "millisecond",
"time": None,
},
}

def test_cannot_order_by_tag(self):
Expand Down

0 comments on commit 8d4ef7a

Please sign in to comment.