Skip to content

Commit

Permalink
chore(typing): Add typing to organization-events endpoint (#82656)
Browse files Browse the repository at this point in the history
This adds typing to the `organization-events` endpoint and specifically
wanted to make sure the referrers set is typed.
  • Loading branch information
Zylphrex authored and andrewshie-sentry committed Jan 2, 2025
1 parent 1046cdd commit d2ace79
Show file tree
Hide file tree
Showing 9 changed files with 196 additions and 117 deletions.
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ module = [
"sentry.api.endpoints.index",
"sentry.api.endpoints.internal.mail",
"sentry.api.endpoints.organization_details",
"sentry.api.endpoints.organization_events",
"sentry.api.endpoints.organization_events_facets_performance",
"sentry.api.endpoints.organization_events_meta",
"sentry.api.endpoints.organization_events_spans_performance",
Expand Down
112 changes: 69 additions & 43 deletions src/sentry/api/endpoints/organization_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
)
from sentry.snuba.metrics.extraction import MetricSpecType
from sentry.snuba.referrer import Referrer
from sentry.snuba.types import DatasetQuery
from sentry.snuba.utils import dataset_split_decision_inferred_from_query, get_dataset
from sentry.types.ratelimit import RateLimit, RateLimitCategory
from sentry.utils.snuba import SnubaError
Expand All @@ -55,7 +56,7 @@ class DiscoverDatasetSplitException(Exception):
pass


ALLOWED_EVENTS_REFERRERS = {
ALLOWED_EVENTS_REFERRERS: set[str] = {
Referrer.API_ORGANIZATION_EVENTS.value,
Referrer.API_ORGANIZATION_EVENTS_V2.value,
Referrer.API_DASHBOARDS_TABLEWIDGET.value,
Expand Down Expand Up @@ -173,7 +174,6 @@ class DiscoverDatasetSplitException(Exception):
Referrer.ISSUE_DETAILS_STREAMLINE_LIST.value,
}

API_TOKEN_REFERRER = Referrer.API_AUTH_TOKEN_EVENTS.value

LEGACY_RATE_LIMIT = dict(limit=30, window=1, concurrent_limit=15)
# reduced limit will be the future default for all organizations not explicitly on increased limit
Expand Down Expand Up @@ -276,8 +276,7 @@ class OrganizationEventsEndpoint(OrganizationEventsV2EndpointBase):

enforce_rate_limit = True

def rate_limits(*args, **kwargs) -> dict[str, dict[RateLimitCategory, RateLimit]]:
return rate_limit_events(*args, **kwargs)
rate_limits = rate_limit_events

def get_features(self, organization: Organization, request: Request) -> Mapping[str, bool]:
feature_names = [
Expand All @@ -298,11 +297,13 @@ def get_features(self, organization: Organization, request: Request) -> Mapping[
actor=request.user,
)

all_features = (
batch_features.get(f"organization:{organization.id}", {})
if batch_features is not None
else {}
)
all_features: dict[str, bool] = {}

if batch_features is not None:
for feature_name, result in batch_features.get(
f"organization:{organization.id}", {}
).items():
all_features[feature_name] = bool(result)

for feature_name in feature_names:
if feature_name not in all_features:
Expand Down Expand Up @@ -382,7 +383,7 @@ def get(self, request: Request, organization) -> Response:
}
)
except InvalidParams as err:
raise ParseError(err)
raise ParseError(detail=str(err))

batch_features = self.get_features(organization, request)

Expand Down Expand Up @@ -421,7 +422,9 @@ def get(self, request: Request, organization) -> Response:

# Force the referrer to "api.auth-token.events" for events requests authorized through a bearer token
if request.auth:
referrer = API_TOKEN_REFERRER
referrer = Referrer.API_AUTH_TOKEN_EVENTS.value
elif referrer is None:
referrer = Referrer.API_ORGANIZATION_EVENTS.value
elif referrer not in ALLOWED_EVENTS_REFERRERS:
if referrer:
with sentry_sdk.isolation_scope() as scope:
Expand All @@ -436,11 +439,16 @@ def get(self, request: Request, organization) -> Response:
use_rpc = request.GET.get("useRpc", "0") == "1"
sentry_sdk.set_tag("performance.use_rpc", use_rpc)

def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]:
def _data_fn(
dataset_query: DatasetQuery,
offset: int,
limit: int,
query: str | None,
):
if use_rpc and dataset == spans_eap:
return spans_rpc.run_table_query(
params=snuba_params,
query_string=query,
query_string=query or "",
selected_columns=self.get_field_list(organization, request),
orderby=self.get_orderby(request),
offset=offset,
Expand All @@ -452,9 +460,9 @@ def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]:
),
)
query_source = self.get_request_source(request)
return scoped_dataset.query(
return dataset_query(
selected_columns=self.get_field_list(organization, request),
query=query,
query=query or "",
snuba_params=snuba_params,
equations=self.get_equation_list(organization, request),
orderby=self.get_orderby(request),
Expand All @@ -463,24 +471,30 @@ def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]:
referrer=referrer,
auto_fields=True,
auto_aggregations=True,
use_aggregate_conditions=use_aggregate_conditions,
allow_metric_aggregates=allow_metric_aggregates,
use_aggregate_conditions=use_aggregate_conditions,
transform_alias_to_input_format=True,
# Whether the flag is enabled or not, regardless of the referrer
has_metrics=use_metrics,
use_metrics_layer=batch_features.get("organizations:use-metrics-layer", False),
on_demand_metrics_enabled=on_demand_metrics_enabled,
on_demand_metrics_type=on_demand_metrics_type,
query_source=query_source,
fallback_to_transactions=features.has(
"organizations:performance-discover-dataset-selector",
organization,
actor=request.user,
),
query_source=query_source,
)

@sentry_sdk.tracing.trace
def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_widget_id):
def _dashboards_data_fn(
scoped_dataset_query: DatasetQuery,
offset: int,
limit: int,
scoped_query: str | None,
dashboard_widget_id: str,
):
try:
widget = DashboardWidget.objects.get(id=dashboard_widget_id)
does_widget_have_split = widget.discover_widget_split is not None
Expand All @@ -491,27 +505,29 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w
)

if does_widget_have_split and not has_override_feature:
dataset_query: DatasetQuery

# This is essentially cached behaviour and we skip the check
if widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS:
split_dataset = errors
dataset_query = errors.query
elif widget.discover_widget_split == DashboardWidgetTypes.TRANSACTION_LIKE:
# We can't add event.type:transaction for now because of on-demand.
split_dataset = scoped_dataset
dataset_query = scoped_dataset_query
else:
split_dataset = discover
dataset_query = discover.query

return _data_fn(split_dataset, offset, limit, scoped_query)
return _data_fn(dataset_query, offset, limit, scoped_query)

with handle_query_errors():
try:
error_results = _data_fn(errors, offset, limit, scoped_query)
error_results = _data_fn(errors.query, offset, limit, scoped_query)
# Widget has not split the discover dataset yet, so we need to check if there are errors etc.
has_errors = len(error_results["data"]) > 0
except SnubaError:
has_errors = False
error_results = None

original_results = _data_fn(scoped_dataset, offset, limit, scoped_query)
original_results = _data_fn(scoped_dataset_query, offset, limit, scoped_query)
if original_results.get("data") is not None:
dataset_meta = original_results.get("meta", {})
else:
Expand All @@ -528,15 +544,17 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w
if has_errors and has_other_data and not using_metrics:
# In the case that the original request was not using the metrics dataset, we cannot be certain that other data is solely transactions.
sentry_sdk.set_tag("third_split_query", True)
transaction_results = _data_fn(transactions, offset, limit, scoped_query)
transaction_results = _data_fn(
transactions.query, offset, limit, scoped_query
)
has_transactions = len(transaction_results["data"]) > 0

decision = self.save_split_decision(
widget, has_errors, has_transactions, organization, request.user
)

if decision == DashboardWidgetTypes.DISCOVER:
return _data_fn(discover, offset, limit, scoped_query)
return _data_fn(discover.query, offset, limit, scoped_query)
elif decision == DashboardWidgetTypes.TRANSACTION_LIKE:
original_results["meta"]["discoverSplitDecision"] = (
DashboardWidgetTypes.get_type_name(
Expand All @@ -554,13 +572,19 @@ def _dashboards_data_fn(scoped_dataset, offset, limit, scoped_query, dashboard_w
except Exception as e:
# Swallow the exception if it was due to the discover split, and try again one more time.
if isinstance(e, ParseError):
return _data_fn(scoped_dataset, offset, limit, scoped_query)
return _data_fn(scoped_dataset_query, offset, limit, scoped_query)

sentry_sdk.capture_exception(e)
return _data_fn(scoped_dataset, offset, limit, scoped_query)
return _data_fn(scoped_dataset_query, offset, limit, scoped_query)

@sentry_sdk.tracing.trace
def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_saved_query_id):
def _discover_data_fn(
scoped_dataset_query: DatasetQuery,
offset: int,
limit: int,
scoped_query: str | None,
discover_saved_query_id: str,
):
try:
discover_query = DiscoverSavedQuery.objects.get(
id=discover_saved_query_id, organization=organization
Expand All @@ -569,7 +593,7 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save
discover_query.dataset is not DiscoverSavedQueryTypes.DISCOVER
)
if does_widget_have_split:
return _data_fn(scoped_dataset, offset, limit, scoped_query)
return _data_fn(scoped_dataset_query, offset, limit, scoped_query)

dataset_inferred_from_query = dataset_split_decision_inferred_from_query(
self.get_field_list(organization, request),
Expand All @@ -580,9 +604,11 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save

# See if we can infer which dataset based on selected columns and query string.
with handle_query_errors():
if dataset_inferred_from_query is not None:
if (
dataset := SAVED_QUERY_DATASET_MAP.get(dataset_inferred_from_query)
) is not None:
result = _data_fn(
SAVED_QUERY_DATASET_MAP[dataset_inferred_from_query],
dataset.query,
offset,
limit,
scoped_query,
Expand All @@ -606,11 +632,11 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save
with ThreadPoolExecutor(max_workers=3) as exe:
futures = {
exe.submit(
_data_fn, get_dataset(dataset_), offset, limit, scoped_query
): dataset_
for dataset_ in [
"errors",
"transactions",
_data_fn, dataset_query, offset, limit, scoped_query
): dataset_name
for dataset_name, dataset_query in [
("errors", errors.query),
("transactions", transactions.query),
]
}

Expand Down Expand Up @@ -664,10 +690,10 @@ def _discover_data_fn(scoped_dataset, offset, limit, scoped_query, discover_save
except Exception as e:
# Swallow the exception if it was due to the discover split, and try again one more time.
if isinstance(e, ParseError):
return _data_fn(scoped_dataset, offset, limit, scoped_query)
return _data_fn(scoped_dataset_query, offset, limit, scoped_query)

sentry_sdk.capture_exception(e)
return _data_fn(scoped_dataset, offset, limit, scoped_query)
return _data_fn(scoped_dataset_query, offset, limit, scoped_query)

def data_fn_factory(scoped_dataset):
"""
Expand All @@ -681,17 +707,17 @@ def data_fn_factory(scoped_dataset):
dashboard_widget_id = request.GET.get("dashboardWidgetId", None)
discover_saved_query_id = request.GET.get("discoverSavedQueryId", None)

def fn(offset, limit) -> dict[str, Any]:
def fn(offset, limit):
if save_discover_dataset_decision and discover_saved_query_id:
return _discover_data_fn(
scoped_dataset, offset, limit, scoped_query, discover_saved_query_id
scoped_dataset.query, offset, limit, scoped_query, discover_saved_query_id
)

if not (metrics_enhanced and dashboard_widget_id):
return _data_fn(scoped_dataset, offset, limit, scoped_query)
return _data_fn(scoped_dataset.query, offset, limit, scoped_query)

return _dashboards_data_fn(
scoped_dataset, offset, limit, scoped_query, dashboard_widget_id
scoped_dataset.query, offset, limit, scoped_query, dashboard_widget_id
)

return fn
Expand Down
3 changes: 3 additions & 0 deletions src/sentry/search/events/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,12 @@ class QueryFramework:


class EventsMeta(TypedDict):
datasetReason: NotRequired[str]
fields: dict[str, str]
tips: NotRequired[dict[str, str | None]]
isMetricsData: NotRequired[bool]
isMetricsExtractedData: NotRequired[bool]
discoverSplitDecision: NotRequired[str]


class EventsResponse(TypedDict):
Expand Down
47 changes: 25 additions & 22 deletions src/sentry/snuba/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import cast

import sentry_sdk
from snuba_sdk import Column, Condition

from sentry.discover.arithmetic import categorize_columns
from sentry.exceptions import InvalidSearchQuery
Expand All @@ -30,29 +31,31 @@


def query(
selected_columns,
query,
snuba_params,
equations=None,
orderby=None,
offset=None,
limit=50,
referrer=None,
auto_fields=False,
auto_aggregations=False,
include_equation_fields=False,
allow_metric_aggregates=False,
use_aggregate_conditions=False,
conditions=None,
functions_acl=None,
transform_alias_to_input_format=False,
sample=None,
has_metrics=False,
use_metrics_layer=False,
skip_tag_resolution=False,
on_demand_metrics_enabled=False,
selected_columns: list[str],
query: str,
snuba_params: SnubaParams,
equations: list[str] | None = None,
orderby: list[str] | None = None,
offset: int | None = None,
limit: int = 50,
referrer: str | None = None,
auto_fields: bool = False,
auto_aggregations: bool = False,
include_equation_fields: bool = False,
allow_metric_aggregates: bool = False,
use_aggregate_conditions: bool = False,
conditions: list[Condition] | None = None,
functions_acl: list[str] | None = None,
transform_alias_to_input_format: bool = False,
sample: float | None = None,
has_metrics: bool = False,
use_metrics_layer: bool = False,
skip_tag_resolution: bool = False,
extra_columns: list[Column] | None = None,
on_demand_metrics_enabled: bool = False,
on_demand_metrics_type: MetricSpecType | None = None,
fallback_to_transactions=False,
dataset: Dataset = Dataset.Events,
fallback_to_transactions: bool = False,
query_source: QuerySource | None = None,
) -> EventsResponse:
if not selected_columns:
Expand Down
Loading

0 comments on commit d2ace79

Please sign in to comment.