Skip to content

Commit

Permalink
not hacky
Browse files Browse the repository at this point in the history
  • Loading branch information
Rachel Chen authored and Rachel Chen committed Feb 14, 2025
1 parent b527547 commit dca9e59
Show file tree
Hide file tree
Showing 5 changed files with 18 additions and 26 deletions.
2 changes: 0 additions & 2 deletions snuba/web/db_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,8 +176,6 @@ def execute_query(
# Apply clickhouse query setting overrides
clickhouse_query_settings.update(query_settings.get_clickhouse_settings())

print("formatted_queryyyy", formatted_query)

result = reader.execute(
formatted_query,
clickhouse_query_settings,
Expand Down
25 changes: 10 additions & 15 deletions snuba/web/rpc/v1/endpoint_trace_item_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@
TraceItemTableResponse,
)
from sentry_protos.snuba.v1.request_common_pb2 import TraceItemType
from sentry_protos.snuba.v1.trace_item_attribute_pb2 import AttributeKey
from sentry_protos.snuba.v1.trace_item_filter_pb2 import ExistsFilter, TraceItemFilter

from snuba.web.rpc import RPCEndpoint, TraceItemDataResolver
from snuba.web.rpc.common.exceptions import BadSnubaRPCRequestException
Expand Down Expand Up @@ -88,7 +86,7 @@ def _transform_request(request: TraceItemTableRequest) -> TraceItemTableRequest:


def convert_to_conditional_aggregation(in_msg: TraceItemTableRequest) -> None:
for column in in_msg.columns:
def _convert(column: Column) -> None:
if column.HasField("aggregation"):
aggregation = column.aggregation
column.conditional_aggregation.CopyFrom(
Expand All @@ -97,17 +95,18 @@ def convert_to_conditional_aggregation(in_msg: TraceItemTableRequest) -> None:
key=aggregation.key,
label=aggregation.label,
extrapolation_mode=aggregation.extrapolation_mode,
filter=TraceItemFilter( # I needed a filter that will always evaluate to literal(true)
exists_filter=ExistsFilter(
key=AttributeKey(
type=AttributeKey.Type.TYPE_INT,
name="sentry.organization_id",
)
)
),
)
)

if column.HasField("formula"):
_convert(column.formula.left)
_convert(column.formula.right)

for column in in_msg.columns:
_convert(column)
for ob in in_msg.order_by:
_convert(ob.column)


class EndpointTraceItemTable(
RPCEndpoint[TraceItemTableRequest, TraceItemTableResponse]
Expand Down Expand Up @@ -135,9 +134,7 @@ def _execute(self, in_msg: TraceItemTableRequest) -> TraceItemTableResponse:
convert_to_conditional_aggregation(in_msg)
in_msg = _apply_labels_to_columns(in_msg)
_validate_select_and_groupby(in_msg)
# print("in_msg_after_validate_select_and_grop_by", in_msg)
_validate_order_by(in_msg)
# print("in_msg_after_validate_order_by", in_msg)

in_msg.meta.request_id = getattr(in_msg.meta, "request_id", None) or str(
uuid.uuid4()
Expand All @@ -149,7 +146,5 @@ def _execute(self, in_msg: TraceItemTableRequest) -> TraceItemTableResponse:

in_msg = _transform_request(in_msg)

print("in_msg_after_transform_request", in_msg)

resolver = self.get_resolver(in_msg.meta.trace_item_type)
return resolver.resolve(in_msg)
Original file line number Diff line number Diff line change
Expand Up @@ -124,11 +124,13 @@ def _convert_order_by(
expression=attribute_key_to_expression(x.column.key),
)
)
elif x.column.HasField("aggregation"):
elif x.column.HasField("conditional_aggregation"):
res.append(
OrderBy(
direction=direction,
expression=aggregation_to_expression(x.column.aggregation),
expression=aggregation_to_expression(
x.column.conditional_aggregation
),
)
)
elif x.column.HasField("formula"):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -728,7 +728,6 @@ def test_table_with_aggregates_backward_compat(self, setup_teardown: Any) -> Non
limit=5,
)
response = EndpointTraceItemTable().execute(message)
breakpoint()
assert response.column_values == [
TraceItemColumnValues(
attribute_name="location",
Expand All @@ -755,7 +754,6 @@ def test_table_with_aggregates_backward_compat(self, setup_teardown: Any) -> Non
],
),
]
assert False

def test_table_with_aggregates(self, setup_teardown: Any) -> None:
ts = Timestamp(seconds=int(BASE_TIME.timestamp()))
Expand Down Expand Up @@ -2687,7 +2685,7 @@ def test_apply_labels_to_columns_backward_compat(self) -> None:
message = TraceItemTableRequest(
columns=[
Column(
aggregation=AttributeAggregation(
conditional_aggregation=AttributeConditionalAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(
type=AttributeKey.TYPE_FLOAT, name="custom_measurement"
Expand All @@ -2697,7 +2695,7 @@ def test_apply_labels_to_columns_backward_compat(self) -> None:
)
),
Column(
aggregation=AttributeAggregation(
conditional_aggregation=AttributeConditionalAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(
type=AttributeKey.TYPE_FLOAT, name="custom_measurement"
Expand All @@ -2719,7 +2717,7 @@ def test_apply_labels_to_columns(self) -> None:
message = TraceItemTableRequest(
columns=[
Column(
aggregation=AttributeAggregation(
conditional_aggregation=AttributeConditionalAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(
type=AttributeKey.TYPE_DOUBLE, name="custom_measurement"
Expand All @@ -2729,7 +2727,7 @@ def test_apply_labels_to_columns(self) -> None:
)
),
Column(
aggregation=AttributeAggregation(
conditional_aggregation=AttributeConditionalAggregation(
aggregate=Function.FUNCTION_AVG,
key=AttributeKey(
type=AttributeKey.TYPE_DOUBLE, name="custom_measurement"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,6 @@ def test_aggregation_on_attribute_column(self) -> None:
limit=5,
)
response = EndpointTraceItemTable().execute(message)
print("responseeeeee", response)
measurement_sum = [v.val_double for v in response.column_values[0].results][0]
measurement_avg = [v.val_double for v in response.column_values[1].results][0]
measurement_count_custom_measurement = [
Expand Down

0 comments on commit dca9e59

Please sign in to comment.