From 88ae78dde05fc724eb6b96fd6c8b92382b2df33c Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Fri, 7 Apr 2023 15:53:11 -0400 Subject: [PATCH 1/3] Support generic sorting PBENCH-1126 With pagination, a client (e.g., the dashboard) can't rely on client-side column sorting. Instead, add generalize sorting to `GET /datasets`, allowing the returned datasets to be sorted by any column or metadata value, either ascending (default) or descending. `GET /api/v1/datasets?sort=user.dashboard.favorite:desc,dataset.uploaded` will return all accessible datasets, sorted first by whether the authenticated user has marked the dataset "favorite" and second by the upload timestamp. (All "favorited" datasets will appear first, in upload order, followed by all "non- favorited" datasets in upload order.) --- docs/API/V1/list.md | 22 ++++- .../server/api/resources/datasets_list.py | 73 +++++++++++++-- lib/pbench/test/unit/server/conftest.py | 88 ++++++++++--------- .../test/unit/server/test_datasets_list.py | 82 ++++++++++++++++- 4 files changed, 213 insertions(+), 52 deletions(-) diff --git a/docs/API/V1/list.md b/docs/API/V1/list.md index 236fe3fdc5..7cb6872300 100644 --- a/docs/API/V1/list.md +++ b/docs/API/V1/list.md @@ -6,7 +6,9 @@ can only list datasets with access `public`.) The collection of datasets may be filtered using any combination of a number of query parameters, including `owner`, `access`, `name` substring, date range, -and arbitrary metadata filter expressions. +and arbitrary metadata filter expressions. The selected datasets may be sorted +by any metadata key value in either ascending or descending order. Multiple +sort parameters will be processed in order. Large collections can be paginated for efficiency using the `limit` and `offset` query parameters. @@ -113,6 +115,24 @@ with a paginated display or to limit data transfer requirements. Select only datasets owned by the specified username. Unless the username matches the authenticated user, only "public" datasets can be selected. +`sort` sort expression \ +Sort the returned datasets by one or more sort expressions. You can separate +multiple expressions using comma lists, or across separate `sort` query +parameters, which will be processed in order. Any Metadata namespace key can +be specified. + +Specify a sort order using the keywords `asc` (ascending) or `desc` +(descending), separated from the key name with a colon (`:`). For example, +`dataset.name:asc` or `dataset.metalog.pbench.script:desc`. The default is +"ascending" if no order is specified. If no sort expressions are specified, +datasets are returned sorted by `dataset.resource_id`. + +For example, `GET /api/v1/datasets?sort=global.dashboard.seen:desc,dataset.name` +will return selected datasets sorted first in descending order based on whether +the dataset has been marked "seen" by the dashboard, and secondly sorted by the +dataset name. The Pbench Dashboard stores `global.dashboard.seen` as a `boolean` +value, so in this case `true` values will appear before `false` values. + `start` date/time \ Select only datasets created on or after the specified time. Time should be specified in ISO standard format, as `YYYY-MM-DDThh:mm:ss.ffffff[+|-]HH:MM`. diff --git a/lib/pbench/server/api/resources/datasets_list.py b/lib/pbench/server/api/resources/datasets_list.py index 410d9d0c0d..0ebaa87057 100644 --- a/lib/pbench/server/api/resources/datasets_list.py +++ b/lib/pbench/server/api/resources/datasets_list.py @@ -5,7 +5,7 @@ from flask import current_app from flask.json import jsonify from flask.wrappers import Request, Response -from sqlalchemy import and_, cast, func, or_, String +from sqlalchemy import and_, asc, cast, func, desc, or_, String from sqlalchemy.exc import ProgrammingError, StatementError from sqlalchemy.orm import aliased, Query from sqlalchemy.sql.expression import Alias @@ -80,6 +80,12 @@ def __init__(self, config: PbenchServerConfig): string_list=",", metalog_ok=True, ), + Parameter( + "sort", + ParamType.LIST, + element_type=ParamType.STRING, + string_list=",", + ), ), authorization=ApiAuthorizationType.USER_ACCESS, ), @@ -105,7 +111,7 @@ def get_paginated_obj( start to narrow down the result. """ paginated_result = {} - query = query.order_by(Dataset.resource_id).distinct() + query = query.distinct() total_count = query.count() # Shift the query search by user specified offset value, @@ -222,7 +228,7 @@ def filter_query( k, v = kw.split(":", maxsplit=1) except ValueError: raise APIAbort( - HTTPStatus.BAD_REQUEST, f"filter {kw!r} must have the form 'k=v'" + HTTPStatus.BAD_REQUEST, f"filter {kw!r} must have the form 'k:v'" ) if k.startswith("^"): combine_or = True @@ -372,7 +378,9 @@ def daterange(self, query: Query) -> JSONOBJECT: else: return {} - def datasets(self, request: Request, json: JSONOBJECT, query: Query) -> JSONOBJECT: + def datasets( + self, request: Request, aliases: dict[str, Any], json: JSONOBJECT, query: Query + ) -> JSONOBJECT: """Gather and paginate the selected datasets Run the query we've compiled, with pagination limits applied; collect @@ -380,12 +388,67 @@ def datasets(self, request: Request, json: JSONOBJECT, query: Query) -> JSONOBJE Args: request: The HTTP Request object + aliases: Map of join column aliases for each Metadata namespace json: The JSON query parameters query: The basic filtered SQLAlchemy query object Returns: The paginated dataset listing """ + + # Process a possible list of sort terms. By default, we sort by the + # dataset resource_id. + sorters = [] + for sort in json.get("sort", ["dataset.resource_id"]): + if ":" not in sort: + k = sort + order = asc + else: + try: + k, o = sort.split(":", maxsplit=1) + except ValueError: + raise APIAbort( + HTTPStatus.BAD_REQUEST, + f"sort {sort!r} must have the form 'k[:o]'", + ) + if o.lower() == "asc": + order = asc + elif o.lower() == "desc": + order = desc + else: + raise APIAbort( + HTTPStatus.BAD_REQUEST, + f"sort order in {sort!r} must be 'asc' or 'desc'", + ) + + if not Metadata.is_key_path(k, Metadata.METADATA_KEYS, metalog_key_ok=True): + raise APIAbort(HTTPStatus.BAD_REQUEST, str(MetadataBadKey(k))) + keys = k.split(".") + native_key = keys.pop(0).lower() + sorter = None + if native_key == Metadata.DATASET: + second = keys[0].lower() + # The dataset namespace requires special handling because + # "dataset.metalog" is really a special native key space + # named "metalog", while other "dataset" sub-keys are primary + # columns in the Dataset table. + if second == Metadata.METALOG: + native_key = keys.pop(0).lower() + else: + try: + c = getattr(Dataset, second) + except AttributeError as e: + raise APIAbort( + HTTPStatus.BAD_REQUEST, str(MetadataBadKey(k)) + ) from e + sorter = order(c) + if sorter is None: + sorter = order(aliases[native_key].value[keys]) + sorters.append(sorter) + + # Apply our list of sort terms + query = query.order_by(*sorters) + try: datasets, paginated_result = self.get_paginated_obj( query=query, json=json, url=request.url @@ -534,5 +597,5 @@ def _get( result.update(self.daterange(query)) done = True if not done: - result = self.datasets(request, json, query) + result = self.datasets(request, aliases, json, query) return jsonify(result) diff --git a/lib/pbench/test/unit/server/conftest.py b/lib/pbench/test/unit/server/conftest.py index d12217f615..5b401b8226 100644 --- a/lib/pbench/test/unit/server/conftest.py +++ b/lib/pbench/test/unit/server/conftest.py @@ -386,10 +386,10 @@ def more_datasets( test 20 private 1970-01-01:00:42 fio_1 3 public 1978-06-26:08:00 fio_2 20 public 2022-01-01:00:00 - uperf_1 20 private 1978-06-26:08:00 - uperf_2 20 private 1978-06-26:08:00 - uperf_3 20 private 1978-06-26:08:00 - uperf_4 20 private 1978-06-26:08:00 + uperf_1 20 private 1978-06-26:08:10 + uperf_2 20 private 1978-06-26:09:01 + uperf_3 20 private 1978-06-26:09:30 + uperf_4 20 private 1978-06-26:10:00 Args: client: Provide a Flask API client @@ -399,44 +399,48 @@ def more_datasets( attach_dataset: Provide some datasets create_user: Create the "test" user """ - with freeze_time("1978-06-26 08:00:00"): - Dataset( - owner=create_drb_user, - name="fio_1", - access="public", - resource_id="random_md5_string3", - ).add() - Dataset( - owner=create_user, - uploaded=datetime.datetime(2022, 1, 1), - name="fio_2", - access="public", - resource_id="random_md5_string4", - ).add() - Dataset( - owner=create_user, - name="uperf_1", - access="private", - resource_id="random_md5_string5", - ).add() - Dataset( - owner=create_user, - name="uperf_2", - access="private", - resource_id="random_md5_string6", - ).add() - Dataset( - owner=create_user, - name="uperf_3", - access="private", - resource_id="random_md5_string7", - ).add() - Dataset( - owner=create_user, - name="uperf_4", - access="private", - resource_id="random_md5_string8", - ).add() + Dataset( + owner=create_drb_user, + uploaded=datetime.datetime(1978, 6, 26, 8, 0, 0, 0), + name="fio_1", + access="public", + resource_id="random_md5_string3", + ).add() + Dataset( + owner=create_user, + uploaded=datetime.datetime(2022, 1, 1), + name="fio_2", + access="public", + resource_id="random_md5_string4", + ).add() + Dataset( + owner=create_user, + uploaded=datetime.datetime(1978, 6, 26, 8, 1, 0, 0), + name="uperf_1", + access="private", + resource_id="random_md5_string5", + ).add() + Dataset( + owner=create_user, + uploaded=datetime.datetime(1978, 6, 26, 9, 0, 0, 0), + name="uperf_2", + access="private", + resource_id="random_md5_string6", + ).add() + Dataset( + owner=create_user, + uploaded=datetime.datetime(1978, 6, 26, 9, 30, 0, 0), + name="uperf_3", + access="private", + resource_id="random_md5_string7", + ).add() + Dataset( + owner=create_user, + uploaded=datetime.datetime(1978, 6, 26, 10, 0, 0, 0), + name="uperf_4", + access="private", + resource_id="random_md5_string8", + ).add() @pytest.fixture() diff --git a/lib/pbench/test/unit/server/test_datasets_list.py b/lib/pbench/test/unit/server/test_datasets_list.py index 5bd460c70e..042477aaae 100644 --- a/lib/pbench/test/unit/server/test_datasets_list.py +++ b/lib/pbench/test/unit/server/test_datasets_list.py @@ -5,7 +5,7 @@ import pytest import requests -from sqlalchemy import and_ +from sqlalchemy import and_, desc from sqlalchemy.exc import ProgrammingError from sqlalchemy.orm import aliased, Query @@ -14,6 +14,7 @@ from pbench.server.api.resources.datasets_list import DatasetsList, urlencode_json from pbench.server.database.database import Database from pbench.server.database.models.datasets import Dataset, Metadata +from pbench.server.database.models.users import User from pbench.test.unit.server import DRB_USER_ID FLATTEN = re.compile(r"[\n\s]+") @@ -147,7 +148,7 @@ def get_results(self, name_list: list[str], query: JSON, server_config) -> JSON: paginated_name_list = name_list[offset:] next_url = "" - for name in sorted(paginated_name_list): + for name in paginated_name_list: dataset = Dataset.query(name=name) results.append( { @@ -175,8 +176,8 @@ def compare_results( expected = self.get_results(name_list, query, server_config) for k, v in result.items(): if k == "results": - assert sorted(v, key=lambda d: d["resource_id"]) == sorted( - expected[k], key=lambda d: d["resource_id"] + assert ( + v == expected[k] ), f"Actual {k}={v} doesn't match expected {expected[k]}" else: assert ( @@ -810,3 +811,76 @@ def test_key_and_dates(self, query_as): }, }, } + + @pytest.mark.parametrize( + "sort,results", + [ + ( + "dataset.name", + ["fio_1", "fio_2", "test", "uperf_1", "uperf_2", "uperf_3", "uperf_4"], + ), + ( + "dataset.name:asc", + ["fio_1", "fio_2", "test", "uperf_1", "uperf_2", "uperf_3", "uperf_4"], + ), + ( + "dataset.name:desc", + ["uperf_4", "uperf_3", "uperf_2", "uperf_1", "test", "fio_2", "fio_1"], + ), + ( + "dataset.uploaded", + ["test", "fio_1", "uperf_1", "uperf_2", "uperf_3", "uperf_4", "fio_2"], + ), + ( + "dataset.uploaded:desc", + ["fio_2", "uperf_4", "uperf_3", "uperf_2", "uperf_1", "fio_1", "test"], + ), + ( + "dataset.metalog.run.controller", + ["test", "fio_1", "fio_2", "uperf_1", "uperf_2", "uperf_3", "uperf_4"], + ), + ( + "global.test.sequence:desc", + ["fio_1", "fio_2", "test", "uperf_1", "uperf_2", "uperf_3", "uperf_4"], + ), + ( + "global.test.sequence", + ["uperf_4", "uperf_3", "uperf_2", "uperf_1", "test", "fio_2", "fio_1"], + ), + ( + "user.test.odd,global.test.sequence:desc", + ["fio_1", "test", "uperf_2", "uperf_4", "fio_2", "uperf_1", "uperf_3"], + ), + ( + "user.test.odd:desc,dataset.name:desc", + ["uperf_3", "uperf_1", "fio_2", "uperf_4", "uperf_2", "test", "fio_1"], + ), + ], + ) + def test_dataset_sort(self, server_config, query_as, sort, results): + """Test `datasets/list?sort` + + We want a couple of consistent values sequences to play with. We can + use the dataset.name and dataset.resource_id fields, but we want to + cross Metadata namespaces, so add "global" and "user" keys we can + order. + + Args: + server_config: The PbenchServerConfig object + query_as: A fixture to provide a helper that executes the API call + login: The username as which to perform a query + query: A JSON representation of the query parameters (these will be + automatically supplemented with a metadata request term) + results: A list of the dataset names we expect to be returned + """ + + # Assign "sequence numbers" in the inverse order of name + test = User.query(username="test") + all = Database.db_session.query(Dataset).order_by(desc(Dataset.name)).all() + for i, d in enumerate(all): + odd = i & 1 + Metadata.setvalue(d, "global.test.sequence", i) + Metadata.setvalue(d, "user.test.odd", odd, user=test) + query = {"sort": sort, "metadata": ["dataset.uploaded"]} + result = query_as(query, "test", HTTPStatus.OK) + self.compare_results(result.json, results, query, server_config) From 4c183797c3a54cd718f588a5fbd5c6531b87a948 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Mon, 10 Apr 2023 11:14:28 -0400 Subject: [PATCH 2/3] Test cases and cleanup --- .../server/api/resources/datasets_list.py | 12 +--- lib/pbench/test/unit/server/conftest.py | 4 +- .../test/unit/server/test_datasets_list.py | 71 ++++++++++++++++++- 3 files changed, 73 insertions(+), 14 deletions(-) diff --git a/lib/pbench/server/api/resources/datasets_list.py b/lib/pbench/server/api/resources/datasets_list.py index 0ebaa87057..21b106e207 100644 --- a/lib/pbench/server/api/resources/datasets_list.py +++ b/lib/pbench/server/api/resources/datasets_list.py @@ -5,7 +5,7 @@ from flask import current_app from flask.json import jsonify from flask.wrappers import Request, Response -from sqlalchemy import and_, asc, cast, func, desc, or_, String +from sqlalchemy import and_, asc, cast, desc, func, or_, String from sqlalchemy.exc import ProgrammingError, StatementError from sqlalchemy.orm import aliased, Query from sqlalchemy.sql.expression import Alias @@ -404,13 +404,7 @@ def datasets( k = sort order = asc else: - try: - k, o = sort.split(":", maxsplit=1) - except ValueError: - raise APIAbort( - HTTPStatus.BAD_REQUEST, - f"sort {sort!r} must have the form 'k[:o]'", - ) + k, o = sort.split(":", maxsplit=1) if o.lower() == "asc": order = asc elif o.lower() == "desc": @@ -418,7 +412,7 @@ def datasets( else: raise APIAbort( HTTPStatus.BAD_REQUEST, - f"sort order in {sort!r} must be 'asc' or 'desc'", + f"The sort order in {sort!r} must be 'asc' or 'desc'", ) if not Metadata.is_key_path(k, Metadata.METADATA_KEYS, metalog_key_ok=True): diff --git a/lib/pbench/test/unit/server/conftest.py b/lib/pbench/test/unit/server/conftest.py index 5b401b8226..a1494c3e65 100644 --- a/lib/pbench/test/unit/server/conftest.py +++ b/lib/pbench/test/unit/server/conftest.py @@ -386,8 +386,8 @@ def more_datasets( test 20 private 1970-01-01:00:42 fio_1 3 public 1978-06-26:08:00 fio_2 20 public 2022-01-01:00:00 - uperf_1 20 private 1978-06-26:08:10 - uperf_2 20 private 1978-06-26:09:01 + uperf_1 20 private 1978-06-26:08:01 + uperf_2 20 private 1978-06-26:09:00 uperf_3 20 private 1978-06-26:09:30 uperf_4 20 private 1978-06-26:10:00 diff --git a/lib/pbench/test/unit/server/test_datasets_list.py b/lib/pbench/test/unit/server/test_datasets_list.py index 042477aaae..42114e8141 100644 --- a/lib/pbench/test/unit/server/test_datasets_list.py +++ b/lib/pbench/test/unit/server/test_datasets_list.py @@ -816,45 +816,65 @@ def test_key_and_dates(self, query_as): "sort,results", [ ( + # Simple sort by name "dataset.name", ["fio_1", "fio_2", "test", "uperf_1", "uperf_2", "uperf_3", "uperf_4"], ), ( + # Simple sort by name with explicit "ascending" order "dataset.name:asc", ["fio_1", "fio_2", "test", "uperf_1", "uperf_2", "uperf_3", "uperf_4"], ), ( + # Simple sort by name with "descending" order "dataset.name:desc", ["uperf_4", "uperf_3", "uperf_2", "uperf_1", "test", "fio_2", "fio_1"], ), ( + # Sort by date timestamp "dataset.uploaded", ["test", "fio_1", "uperf_1", "uperf_2", "uperf_3", "uperf_4", "fio_2"], ), ( + # Sort by date timestamp with descending order "dataset.uploaded:desc", ["fio_2", "uperf_4", "uperf_3", "uperf_2", "uperf_1", "fio_1", "test"], ), ( + # Sort by a "dataset.metalog" value "dataset.metalog.run.controller", ["test", "fio_1", "fio_2", "uperf_1", "uperf_2", "uperf_3", "uperf_4"], ), ( + # Sort by a general global metadata value in descending order "global.test.sequence:desc", ["fio_1", "fio_2", "test", "uperf_1", "uperf_2", "uperf_3", "uperf_4"], ), ( + # Sprt by a general global metadata value in ascending order "global.test.sequence", ["uperf_4", "uperf_3", "uperf_2", "uperf_1", "test", "fio_2", "fio_1"], ), ( + # Sort two keys across distinct metadata namespaces asc/desc "user.test.odd,global.test.sequence:desc", ["fio_1", "test", "uperf_2", "uperf_4", "fio_2", "uperf_1", "uperf_3"], ), ( + # Sort two keys across distinct metadata namespaces desc/desc "user.test.odd:desc,dataset.name:desc", ["uperf_3", "uperf_1", "fio_2", "uperf_4", "uperf_2", "test", "fio_1"], ), + ( + # Sort by a JSON sub-object containing two keys ascending + "global.test", + ["uperf_4", "uperf_3", "uperf_2", "uperf_1", "test", "fio_2", "fio_1"], + ), + ( + # Sort by a JSON sub-object containing two keys descending + "global.test:desc", + ["fio_1", "fio_2", "test", "uperf_1", "uperf_2", "uperf_3", "uperf_4"], + ), ], ) def test_dataset_sort(self, server_config, query_as, sort, results): @@ -868,9 +888,7 @@ def test_dataset_sort(self, server_config, query_as, sort, results): Args: server_config: The PbenchServerConfig object query_as: A fixture to provide a helper that executes the API call - login: The username as which to perform a query - query: A JSON representation of the query parameters (these will be - automatically supplemented with a metadata request term) + sort: A JSON representation of the sort query parameter value results: A list of the dataset names we expect to be returned """ @@ -880,7 +898,54 @@ def test_dataset_sort(self, server_config, query_as, sort, results): for i, d in enumerate(all): odd = i & 1 Metadata.setvalue(d, "global.test.sequence", i) + Metadata.setvalue(d, "global.test.mcguffin", 100 - i) Metadata.setvalue(d, "user.test.odd", odd, user=test) query = {"sort": sort, "metadata": ["dataset.uploaded"]} result = query_as(query, "test", HTTPStatus.OK) self.compare_results(result.json, results, query, server_config) + + @pytest.mark.parametrize( + "sort,message", + [ + ( + # Specify a sort by a Dataset table column that doesn't exist + "dataset.noname", + "Metadata key 'dataset.noname' is not supported", + ), + ( + # Specify a sort using an undefined order keyword + "dataset.name:backwards", + "The sort order in 'dataset.name:backwards' must be 'asc' or 'desc'", + ), + ( + # Specify a sort using bad sort order syntax + "dataset.name:desc:", + "The sort order in 'dataset.name:desc:' must be 'asc' or 'desc'", + ), + ( + # Specify a sort using a bad metadata namespace + "xyzzy.uploaded", + "Metadata key 'xyzzy.uploaded' is not supported", + ), + ], + ) + def test_dataset_sort_errors(self, server_config, query_as, sort, message): + """Test `datasets/list?sort` error cases + + Args: + server_config: The PbenchServerConfig object + query_as: A fixture to provide a helper that executes the API call + sort: A JSON representation of the sort query parameter value + message: The expected error message + """ + + # Assign "sequence numbers" in the inverse order of name + test = User.query(username="test") + all = Database.db_session.query(Dataset).order_by(desc(Dataset.name)).all() + for i, d in enumerate(all): + odd = i & 1 + Metadata.setvalue(d, "global.test.sequence", i) + Metadata.setvalue(d, "user.test.odd", odd, user=test) + query = {"sort": sort} + result = query_as(query, "test", HTTPStatus.BAD_REQUEST) + assert result.json["message"] == message From da63dd0220b121244d360ce6d911346921cb85a1 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Mon, 10 Apr 2023 14:33:04 -0400 Subject: [PATCH 3/3] Minor cleanup --- lib/pbench/server/api/resources/datasets_list.py | 2 +- lib/pbench/test/unit/server/test_datasets_list.py | 12 ++---------- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/lib/pbench/server/api/resources/datasets_list.py b/lib/pbench/server/api/resources/datasets_list.py index 21b106e207..58e1dae0ec 100644 --- a/lib/pbench/server/api/resources/datasets_list.py +++ b/lib/pbench/server/api/resources/datasets_list.py @@ -412,7 +412,7 @@ def datasets( else: raise APIAbort( HTTPStatus.BAD_REQUEST, - f"The sort order in {sort!r} must be 'asc' or 'desc'", + f"The sort order {o!r} for key {k!r} must be 'asc' or 'desc'", ) if not Metadata.is_key_path(k, Metadata.METADATA_KEYS, metalog_key_ok=True): diff --git a/lib/pbench/test/unit/server/test_datasets_list.py b/lib/pbench/test/unit/server/test_datasets_list.py index 42114e8141..2583d584f7 100644 --- a/lib/pbench/test/unit/server/test_datasets_list.py +++ b/lib/pbench/test/unit/server/test_datasets_list.py @@ -915,12 +915,12 @@ def test_dataset_sort(self, server_config, query_as, sort, results): ( # Specify a sort using an undefined order keyword "dataset.name:backwards", - "The sort order in 'dataset.name:backwards' must be 'asc' or 'desc'", + "The sort order 'backwards' for key 'dataset.name' must be 'asc' or 'desc'", ), ( # Specify a sort using bad sort order syntax "dataset.name:desc:", - "The sort order in 'dataset.name:desc:' must be 'asc' or 'desc'", + "The sort order 'desc:' for key 'dataset.name' must be 'asc' or 'desc'", ), ( # Specify a sort using a bad metadata namespace @@ -938,14 +938,6 @@ def test_dataset_sort_errors(self, server_config, query_as, sort, message): sort: A JSON representation of the sort query parameter value message: The expected error message """ - - # Assign "sequence numbers" in the inverse order of name - test = User.query(username="test") - all = Database.db_session.query(Dataset).order_by(desc(Dataset.name)).all() - for i, d in enumerate(all): - odd = i & 1 - Metadata.setvalue(d, "global.test.sequence", i) - Metadata.setvalue(d, "user.test.odd", odd, user=test) query = {"sort": sort} result = query_as(query, "test", HTTPStatus.BAD_REQUEST) assert result.json["message"] == message