Skip to content

Commit

Permalink
Expand dotted keys in mappings used as cursors. (#8568)
Browse files Browse the repository at this point in the history
  • Loading branch information
pchauhan-qlogic authored and tseaver committed Aug 7, 2019
1 parent 1d5ede5 commit 799c1e5
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 1 deletion.
7 changes: 6 additions & 1 deletion firestore/google/cloud/firestore_v1/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -666,7 +666,12 @@ def _normalize_cursor(self, cursor, orders):
data = document_fields
for order_key in order_keys:
try:
values.append(field_path_module.get_nested_value(order_key, data))
if order_key in data:
values.append(data[order_key])
else:
values.append(
field_path_module.get_nested_value(order_key, data)
)
except KeyError:
msg = _MISSING_ORDER_BY.format(order_key, data)
raise ValueError(msg)
Expand Down
37 changes: 37 additions & 0 deletions firestore/tests/system/test_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -611,6 +611,43 @@ def test_query_stream(client, cleanup):
assert value["b"] == 2


def test_query_with_order_dot_key(client, cleanup):
db = client
collection_id = "collek" + unique_resource_id("-")
collection = db.collection(collection_id)
for index in range(100, -1, -1):
doc = collection.document("test_{:09d}".format(index))
data = {"count": 10 * index, "wordcount": {"page1": index * 10 + 100}}
doc.set(data)
cleanup(doc.delete)
query = collection.order_by("wordcount.page1").limit(3)
data = [doc.to_dict()["wordcount"]["page1"] for doc in query.stream()]
assert [100, 110, 120] == data
for snapshot in collection.order_by("wordcount.page1").limit(3).stream():
last_value = snapshot.get("wordcount.page1")
cursor_with_nested_keys = {"wordcount": {"page1": last_value}}
found = list(
collection.order_by("wordcount.page1")
.start_after(cursor_with_nested_keys)
.limit(3)
.stream()
)
found_data = [
{u"count": 30, u"wordcount": {u"page1": 130}},
{u"count": 40, u"wordcount": {u"page1": 140}},
{u"count": 50, u"wordcount": {u"page1": 150}},
]
assert found_data == [snap.to_dict() for snap in found]
cursor_with_dotted_paths = {"wordcount.page1": last_value}
cursor_with_key_data = list(
collection.order_by("wordcount.page1")
.start_after(cursor_with_dotted_paths)
.limit(3)
.stream()
)
assert found_data == [snap.to_dict() for snap in cursor_with_key_data]


def test_query_unary(client, cleanup):
collection_name = "unary" + UNIQUE_RESOURCE_ID
collection = client.collection(collection_name)
Expand Down
10 changes: 10 additions & 0 deletions firestore/tests/unit/v1/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -808,6 +808,16 @@ def test__normalize_cursor_as_dict_hit(self):

self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))

def test__normalize_cursor_as_dict_with_dot_key_hit(self):
cursor = ({"b.a": 1}, True)
query = self._make_one(mock.sentinel.parent).order_by("b.a", "ASCENDING")
self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))

def test__normalize_cursor_as_dict_with_inner_data_hit(self):
cursor = ({"b": {"a": 1}}, True)
query = self._make_one(mock.sentinel.parent).order_by("b.a", "ASCENDING")
self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True))

def test__normalize_cursor_as_snapshot_hit(self):
values = {"b": 1}
docref = self._make_docref("here", "doc_id")
Expand Down

0 comments on commit 799c1e5

Please sign in to comment.