-
Notifications
You must be signed in to change notification settings - Fork 605
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
QP sidebar filters to active slice for group datasets #5177
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -9,7 +9,6 @@ | |
from bson import ObjectId | ||
from dataclasses import asdict, dataclass | ||
from datetime import date, datetime | ||
import math | ||
import typing as t | ||
|
||
import asyncio | ||
|
@@ -46,6 +45,7 @@ class LightningPathInput: | |
class LightningInput: | ||
dataset: str | ||
paths: t.List[LightningPathInput] | ||
slice: t.Optional[str] = None | ||
|
||
|
||
@gql.interface | ||
|
@@ -138,7 +138,13 @@ async def lightning_resolver( | |
for collection, sublist in zip(collections, queries) | ||
for item in sublist | ||
] | ||
result = await _do_async_pooled_queries(dataset, flattened) | ||
|
||
filter = ( | ||
{f"{dataset.group_field}.name": input.slice} | ||
if dataset.group_field and input.slice | ||
else None | ||
) | ||
result = await _do_async_pooled_queries(dataset, flattened, filter) | ||
|
||
results = [] | ||
offset = 0 | ||
|
@@ -293,10 +299,11 @@ async def _do_async_pooled_queries( | |
queries: t.List[ | ||
t.Tuple[AsyncIOMotorCollection, t.Union[DistinctQuery, t.List[t.Dict]]] | ||
], | ||
filter: t.Optional[t.Mapping[str, str]], | ||
): | ||
return await asyncio.gather( | ||
*[ | ||
_do_async_query(dataset, collection, query) | ||
_do_async_query(dataset, collection, query, filter) | ||
for collection, query in queries | ||
] | ||
) | ||
|
@@ -306,25 +313,31 @@ async def _do_async_query( | |
dataset: fo.Dataset, | ||
collection: AsyncIOMotorCollection, | ||
query: t.Union[DistinctQuery, t.List[t.Dict]], | ||
filter: t.Optional[t.Mapping[str, str]], | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Add type checking before modifying the query list The current implementation assumes query is always a list when applying the filter. Add type checking to ensure safe operation. if filter:
+ if not isinstance(query, list):
+ raise TypeError("Expected query to be a list for filter application")
query.insert(0, {"$match": filter}) Also applies to: 324-325 |
||
): | ||
if isinstance(query, DistinctQuery): | ||
if query.has_list and not query.filters: | ||
return await _do_distinct_query(collection, query) | ||
return await _do_distinct_query(collection, query, filter) | ||
|
||
return await _do_distinct_pipeline(dataset, collection, query, filter) | ||
|
||
return await _do_distinct_pipeline(dataset, collection, query) | ||
if filter: | ||
query.insert(0, {"$match": filter}) | ||
|
||
return [i async for i in collection.aggregate(query)] | ||
|
||
|
||
async def _do_distinct_query( | ||
collection: AsyncIOMotorCollection, query: DistinctQuery | ||
collection: AsyncIOMotorCollection, | ||
query: DistinctQuery, | ||
filter: t.Optional[t.Mapping[str, str]], | ||
): | ||
match = None | ||
if query.search: | ||
match = query.search | ||
|
||
try: | ||
result = await collection.distinct(query.path) | ||
result = await collection.distinct(query.path, filter) | ||
except: | ||
# too many results | ||
return None | ||
|
@@ -350,12 +363,16 @@ async def _do_distinct_pipeline( | |
dataset: fo.Dataset, | ||
collection: AsyncIOMotorCollection, | ||
query: DistinctQuery, | ||
filter: t.Optional[t.Mapping[str, str]], | ||
): | ||
pipeline = [] | ||
if filter: | ||
pipeline.append({"$match": filter}) | ||
|
||
if query.filters: | ||
pipeline += get_view(dataset, filters=query.filters)._pipeline() | ||
|
||
pipeline += [{"$sort": {query.path: 1}}] | ||
pipeline.append({"$sort": {query.path: 1}}) | ||
|
||
if query.search: | ||
if query.is_object_id_field: | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Remove debugging console.log statement
Debug logging should not be committed to production code.
Apply this diff to remove the debugging statement:
📝 Committable suggestion