Skip to content

Commit

Permalink
feat(ddm): Add default limit of 20 to the metrics data endpoint (#63014)
Browse files Browse the repository at this point in the history
  • Loading branch information
iambriccardo authored Jan 12, 2024
1 parent dc749c7 commit 718b1d1
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 8 deletions.
7 changes: 4 additions & 3 deletions src/sentry/api/endpoints/organization_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,10 @@ class OrganizationMetricsDataEndpoint(OrganizationEndpoint):
Based on `OrganizationSessionsEndpoint`.
"""

# Number of groups returned for each page (applies to old endpoint).
default_per_page = 50
# Number of groups returned (applies to new endpoint).
default_limit = 20

def _new_get(self, request: Request, organization) -> Response:
# We first parse the interval and date, since this is dependent on the query params.
Expand All @@ -169,8 +172,6 @@ def _new_get(self, request: Request, organization) -> Response:
start, end = get_date_range_from_params(request.GET)

try:
limit = request.GET.get("limit")

results = run_metrics_query(
fields=request.GET.getlist("field", []),
interval=interval,
Expand All @@ -184,7 +185,7 @@ def _new_get(self, request: Request, organization) -> Response:
query=request.GET.get("query"),
group_bys=request.GET.getlist("groupBy"),
order_by=request.GET.get("orderBy"),
limit=int(limit) if limit else None,
limit=int(request.GET.get("limit", self.default_limit)),
)
except InvalidMetricsQueryError as e:
return Response(status=400, data={"detail": str(e)})
Expand Down
28 changes: 23 additions & 5 deletions src/sentry/sentry_metrics/querying/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from sentry.snuba.dataset import Dataset
from sentry.snuba.metrics import to_intervals
from sentry.snuba.metrics_layer.query import run_query
from sentry.utils import metrics
from sentry.utils.snuba import SnubaError

# Snuba can return at most 10.000 rows.
Expand Down Expand Up @@ -551,6 +552,8 @@ def __init__(self, organization: Organization, referrer: str):
self._interval_choices = sorted(DEFAULT_QUERY_INTERVALS)
# List of queries scheduled for execution.
self._scheduled_queries: List[ExecutableQuery] = []
# Tracks the number of queries that have been executed (for measuring purposes).
self._number_of_executed_queries = 0

def _build_request(self, query: MetricsQuery) -> Request:
"""
Expand Down Expand Up @@ -609,6 +612,7 @@ def _execute(
order_by_direction
)

self._number_of_executed_queries += 1
totals_result = run_query(
request=self._build_request(
totals_executable_query.to_totals_query().metrics_query
Expand All @@ -629,6 +633,7 @@ def _execute(
_extract_groups_from_seq(totals_result["data"])
)

self._number_of_executed_queries += 1
series_result = run_query(
request=self._build_request(series_executable_query.metrics_query)
)
Expand Down Expand Up @@ -734,6 +739,10 @@ def _serial_execute(self) -> Sequence[QueryResult]:

results = [reference_query_result]
reference_groups = reference_query_result.groups
metrics.distribution(
key="ddm.metrics_api.groups_cardinality", value=len(reference_groups)
)

for query in self._scheduled_queries:
query_result = self._execute(
executable_query=query.add_group_filters(reference_groups),
Expand All @@ -759,7 +768,12 @@ def _serial_execute(self) -> Sequence[QueryResult]:
return self._serial_execute()

def execute(self) -> Sequence[QueryResult]:
return self._serial_execute()
results = self._serial_execute()
metrics.distribution(
key="ddm.metrics_api.queries_executed", value=self._number_of_executed_queries
)

return results

def schedule(
self,
Expand Down Expand Up @@ -1019,10 +1033,14 @@ def run_metrics_query(
f"The supplied orderBy {order_by} is not matching with any field of the query"
)

# Iterating over each result.
results = []
for result in executor.execute():
results.append(result)
with metrics.timer(
key="ddm.metrics_api.queries_execution_time",
tags={"with_order_by": (order_by is not None), "with_group_by": (group_bys is not None)},
):
# Iterating over each result.
results = []
for result in executor.execute():
results.append(result)

# We translate the result back to the pre-existing format.
return _translate_query_results(execution_results=results)

0 comments on commit 718b1d1

Please sign in to comment.