Skip to content

Commit

Permalink
fix(visibility): Unbound errors for visibility files (#29330)
Browse files Browse the repository at this point in the history
- Because of upcoming changes to CI, these unbound errors will become
  blockers, fixed anything under visibility's codeowners
  • Loading branch information
wmak authored Oct 14, 2021
1 parent 152dc16 commit 84d39ba
Show file tree
Hide file tree
Showing 10 changed files with 38 additions and 21 deletions.
5 changes: 4 additions & 1 deletion src/sentry/data_export/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,8 @@ def get_processor(data_export, environment_id):
discover_query=data_export.query_info,
organization_id=data_export.organization_id,
)
else:
raise ExportError(f"No processor found for this query type: {data_export.query_type}")
return processor
except ExportError as error:
error_str = str(error)
Expand All @@ -234,6 +236,8 @@ def process_rows(processor, data_export, batch_size, offset):
rows = process_issues_by_tag(processor, batch_size, offset)
elif data_export.query_type == ExportQueryType.DISCOVER:
rows = process_discover(processor, batch_size, offset)
else:
raise ExportError(f"No processor found for this query type: {data_export.query_type}")
return rows
except ExportError as error:
error_str = str(error)
Expand Down Expand Up @@ -313,7 +317,6 @@ def merge_export_blobs(data_export_id, **kwargs):
if data_export.user.email:
user["email"] = data_export.user.email
scope.user = user
scope.user = user
scope.set_tag("organization.slug", data_export.organization.slug)
scope.set_tag("export.type", ExportQueryType.as_str(data_export.query_type))
scope.set_extra("export.query", data_export.query_info)
Expand Down
3 changes: 2 additions & 1 deletion src/sentry/search/events/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -543,6 +543,7 @@ def resolve_field_list(
if "project.id" not in fields:
fields.append("project.id")

field = None
for field in fields:
if isinstance(field, str) and field.strip() == "":
continue
Expand All @@ -559,7 +560,7 @@ def resolve_field_list(
aggregate_fields[format_column_as_key(function.aggregate[1])].add(field)

# Only auto aggregate when there's one other so the group by is not unexpectedly changed
if auto_aggregations and snuba_filter.having and len(aggregations) > 0:
if auto_aggregations and snuba_filter.having and len(aggregations) > 0 and field is not None:
for agg in snuba_filter.condition_aggregates:
if agg not in snuba_filter.aliases:
function = resolve_field(agg, snuba_filter.params, functions_acl)
Expand Down
7 changes: 5 additions & 2 deletions src/sentry/search/events/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -757,6 +757,8 @@ def convert_search_boolean_to_snuba_query(terms, params=None):
# start or end a query with an operator.
prev = None
new_terms = []
term = None

for term in terms:
if prev:
if SearchBoolean.is_operator(prev) and SearchBoolean.is_operator(term):
Expand All @@ -772,7 +774,7 @@ def convert_search_boolean_to_snuba_query(terms, params=None):
if term != SearchBoolean.BOOLEAN_AND:
new_terms.append(term)
prev = term
if SearchBoolean.is_operator(term):
if term is not None and SearchBoolean.is_operator(term):
raise InvalidSearchQuery(f"Condition is missing on the right side of '{term}' operator")
terms = new_terms

Expand Down Expand Up @@ -1107,6 +1109,7 @@ def resolve_boolean_conditions(
# start or end a query with an operator.
prev = None
new_terms = []
term = None
for term in terms:
if prev:
if SearchBoolean.is_operator(prev) and SearchBoolean.is_operator(term):
Expand All @@ -1124,7 +1127,7 @@ def resolve_boolean_conditions(

prev = term

if SearchBoolean.is_operator(term):
if term is not None and SearchBoolean.is_operator(term):
raise InvalidSearchQuery(f"Condition is missing on the right side of '{term}' operator")
terms = new_terms

Expand Down
34 changes: 18 additions & 16 deletions tests/sentry/snuba/test_discover.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ def test_using_project_and_project_name(self):

def test_missing_project(self):
project_ids = []
other_project = None
for project_name in ["a" * 32, "z" * 32, "m" * 32]:
other_project = self.create_project(organization=self.organization, slug=project_name)
project_ids.append(other_project.id)
Expand All @@ -174,7 +175,8 @@ def test_missing_project(self):
)

# delete the last project so its missing
other_project.delete()
if other_project is not None:
other_project.delete()

for use_snql in [False, True]:
result = discover.query(
Expand Down Expand Up @@ -2171,22 +2173,22 @@ def test_array_fields(self):
assert len(data[0]["stack.filename"]) == len(expected_filenames), use_snql
assert sorted(data[0]["stack.filename"]) == expected_filenames, use_snql

result = discover.query(
selected_columns=["stack.filename"],
query="stack.filename:[raven.js]",
params={
"organization_id": self.organization.id,
"project_id": [self.project.id],
"start": before_now(minutes=12),
"end": before_now(minutes=8),
},
use_snql=use_snql,
)
result = discover.query(
selected_columns=["stack.filename"],
query="stack.filename:[raven.js]",
params={
"organization_id": self.organization.id,
"project_id": [self.project.id],
"start": before_now(minutes=12),
"end": before_now(minutes=8),
},
use_snql=use_snql,
)

data = result["data"]
assert len(data) == 1
assert len(data[0]["stack.filename"]) == len(expected_filenames)
assert sorted(data[0]["stack.filename"]) == expected_filenames
data = result["data"]
assert len(data) == 1
assert len(data[0]["stack.filename"]) == len(expected_filenames)
assert sorted(data[0]["stack.filename"]) == expected_filenames

def test_orderby_field_alias(self):
data = load_data("android-ndk", timestamp=before_now(minutes=10))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def test_key_transaction_without_feature(self):
},
format="json",
)
assert response.status_code == 404, response.content
assert response.status_code == 404, response.content

def test_get_key_transaction_multiple_projects(self):
project = self.create_project(name="qux", organization=self.org)
Expand Down
1 change: 1 addition & 0 deletions tests/snuba/api/endpoints/test_group_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ def test_with_first_last_release(self):
data={"release": "1.1", "timestamp": iso_format(before_now(minutes=2))},
project_id=self.project.id,
)
event = None
for timestamp in last_release.values():
event = self.store_event(
data={"release": "1.0a", "timestamp": iso_format(timestamp)},
Expand Down
4 changes: 4 additions & 0 deletions tests/snuba/api/endpoints/test_organization_events_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,7 @@ def setUp(self):
)

def test_get_baseline_simple(self):
data = {}
for index, event_id in enumerate(["a" * 32, "b" * 32, "c" * 32]):
data = self.prototype.copy()
data["start_timestamp"] = iso_format(before_now(minutes=2 + index))
Expand All @@ -226,6 +227,7 @@ def test_get_baseline_simple(self):
assert data["project"] == self.project.slug

def test_get_baseline_duration_tie(self):
data = {}
for index, event_id in enumerate(
["b" * 32, "a" * 32]
): # b then a so we know its not id breaking the tie
Expand All @@ -249,6 +251,7 @@ def test_get_baseline_duration_tie(self):
assert data["p50"] == 60000

def test_get_baseline_duration_and_timestamp_tie(self):
data = {}
for event_id in ["b" * 32, "a" * 32]: # b then a so we know its not id breaking the tie
data = self.prototype.copy()
data["start_timestamp"] = iso_format(before_now(minutes=2))
Expand Down Expand Up @@ -293,6 +296,7 @@ def test_get_baseline_with_computed_value(self):
assert data["p50"] == "80000"

def test_get_baseline_with_different_function(self):
data = {}
for index, event_id in enumerate(["a" * 32, "b" * 32]):
data = self.prototype.copy()
data["start_timestamp"] = iso_format(before_now(minutes=2 + index))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1015,6 +1015,7 @@ def setUp(self):
self.events = []
for index, event_data in enumerate(self.event_data):
data = event_data["data"].copy()
event = {}
for i in range(event_data["count"]):
data["event_id"] = f"{index}{i}" * 16
event = self.store_event(data, project_id=event_data["project"].id)
Expand Down
1 change: 1 addition & 0 deletions tests/snuba/api/endpoints/test_organization_group_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -2654,6 +2654,7 @@ def test_snooze_count(self):
assert response.data["statusDetails"]["actor"]["id"] == str(self.user.id)

def test_snooze_user_count(self):
event = {}
for i in range(10):
event = self.store_event(
data={
Expand Down
1 change: 1 addition & 0 deletions tests/snuba/api/endpoints/test_project_group_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -987,6 +987,7 @@ def test_snooze_count(self):
assert response.data["statusDetails"]["actor"]["id"] == str(self.user.id)

def test_snooze_user_count(self):
event = {}
for i in range(10):
event = self.store_event(
data={
Expand Down

0 comments on commit 84d39ba

Please sign in to comment.