Skip to content

Commit

Permalink
Configurable batch size for JIRA._fetch_pages() and dependant metho…
Browse files Browse the repository at this point in the history
…ds (#1394)

* Add `default_batch_sizes` argument to `JIRA` class to control batch sizes for `fetch_pages()` and dependant methods
* Fix possible crash in 'search_assignable_users_for_issues'
* Add docs, fix tests, and shallow-copy bug
* Fix typos in JIRA's `default_batch_sizes` docstring

___
Jannik Meinecke (<jannik.meinecke@mercedes-benz.com>) on behalf of MBition GmbH.
[Provider Information](https://github.com/mercedes-benz/foss/blob/master/PROVIDER_INFORMATION.md)
  • Loading branch information
rynkk authored Jun 28, 2022
1 parent c6d59a1 commit 60b503a
Show file tree
Hide file tree
Showing 6 changed files with 204 additions and 37 deletions.
2 changes: 2 additions & 0 deletions constraints.txt
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ packaging==21.3
# pytest
# pytest-sugar
# sphinx
parameterized==0.8.1
# via jira (setup.cfg)
parso==0.8.3
# via jedi
pickleshare==0.7.5
Expand Down
79 changes: 69 additions & 10 deletions jira/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,9 @@ class JIRA:
# 'Expires': 'Thu, 01 Jan 1970 00:00:00 GMT'
"X-Atlassian-Token": "no-check",
},
"default_batch_size": {
Resource: 100,
},
}

checked_version = False
Expand Down Expand Up @@ -387,6 +390,7 @@ def __init__(
proxies: Any = None,
timeout: Optional[Union[Union[float, int], Tuple[float, float]]] = None,
auth: Tuple[str, str] = None,
default_batch_sizes: Optional[Dict[Type[Resource], Optional[int]]] = None,
):
"""Construct a Jira client instance.
Expand Down Expand Up @@ -462,11 +466,18 @@ def __init__(
proxies (Optional[Any]): Sets the proxies for the HTTP session.
auth (Optional[Tuple[str,str]]): Set a cookie auth token if this is required.
logging (bool): Determine whether or not logging should be enabled. (Default: True)
default_batch_sizes (Optional[Dict[Type[Resource], Optional[int]]]): Manually specify the batch-sizes for
the paginated retrieval of different item types. `Resource` is used as a fallback for every item type not
specified. If an item type is mapped to `None` no fallback occurs, instead the JIRA-backend will use its
default batch-size. By default all Resources will be queried in batches of 100. E.g., setting this to
``{Issue: 500, Resource: None}`` will make :py:meth:`search_issues` query Issues in batches of 500, while
every other item type's batch-size will be controlled by the backend. (Default: None)
"""
# force a copy of the tuple to be used in __del__() because
# sys.version_info could have already been deleted in __del__()
self.sys_version_info = tuple(sys.version_info)

self.sys_version_info = tuple(sys.version_info)
if options is None:
options = {}
if server and isinstance(server, dict):
Expand All @@ -486,7 +497,10 @@ def __init__(
LOG.setLevel(_logging.INFO if logging else _logging.CRITICAL)
self.log = LOG

self._options: Dict[str, Any] = copy.copy(JIRA.DEFAULT_OPTIONS)
self._options: Dict[str, Any] = copy.deepcopy(JIRA.DEFAULT_OPTIONS)

if default_batch_sizes:
self._options["default_batch_size"].update(default_batch_sizes)

if "headers" in options:
headers = copy.copy(options["headers"])
Expand Down Expand Up @@ -710,6 +724,8 @@ def _fetch_pages(
page_params["startAt"] = startAt
if maxResults:
page_params["maxResults"] = maxResults
elif batch_size := self._get_batch_size(item_type):
page_params["maxResults"] = batch_size

resource = self._get_json(request_path, params=page_params, base=base)
next_items_page = self._get_items_from_page(item_type, items_key, resource)
Expand All @@ -734,6 +750,13 @@ def _fetch_pages(
# If maxResults evaluates as False, get all items in batches
if not maxResults:
page_size = max_results_from_response or len(items)
if batch_size is not None and page_size < batch_size:
self.log.warning(
"'batch_size' set to %s, but only received %s items in batch. Falling back to %s.",
batch_size,
page_size,
page_size,
)
page_start = (startAt or start_at_from_response or 0) + page_size
if (
async_class is not None
Expand Down Expand Up @@ -765,6 +788,9 @@ def _fetch_pages(
and (total is None or page_start < total)
and len(next_items_page) == page_size
):
page_params = (
params.copy() if params else {}
) # Hack necessary for mock-calls to not change
page_params["startAt"] = page_start
page_params["maxResults"] = page_size
resource = self._get_json(
Expand Down Expand Up @@ -805,6 +831,25 @@ def _get_items_from_page(
# improving the error text so we know why it happened
raise KeyError(str(e) + " : " + json.dumps(resource))

def _get_batch_size(self, item_type: Type[ResourceType]) -> Optional[int]:
"""
Return the batch size for the given resource type from the options.
Check if specified item-type has a mapped batch-size, else try to fallback to batch-size assigned to `Resource`, else fallback to Backend-determined batch-size.
Returns:
Optional[int]: The batch size to use. When the configured batch size is None, the batch size should be determined by the JIRA-Backend.
"""
batch_sizes: Dict[Type[Resource], Optional[int]] = self._options[
"default_batch_size"
]
try:
item_type_batch_size = batch_sizes[item_type]
except KeyError:
# Cannot find Resource-key -> Fallback to letting JIRA-Backend determine batch-size (=None)
item_type_batch_size = batch_sizes.get(Resource, None)
return item_type_batch_size

# Information about this client

def client_info(self) -> str:
Expand Down Expand Up @@ -1131,7 +1176,12 @@ def dashboards(
if filter is not None:
params["filter"] = filter
return self._fetch_pages(
Dashboard, "dashboards", "dashboard", startAt, maxResults, params
Dashboard,
"dashboards",
"dashboard",
startAt,
maxResults,
params,
)

def dashboard(self, id: str) -> Dashboard:
Expand Down Expand Up @@ -3028,7 +3078,11 @@ def user(self, id: str, expand: Optional[Any] = None) -> User:
return user

def search_assignable_users_for_projects(
self, username: str, projectKeys: str, startAt: int = 0, maxResults: int = 50
self,
username: str,
projectKeys: str,
startAt: int = 0,
maxResults: int = 50,
) -> ResultList:
"""Get a list of user Resources that match the search string and can be assigned issues for projects.
Expand Down Expand Up @@ -3086,6 +3140,11 @@ def search_assignable_users_for_issues(
Returns:
ResultList
"""
if not username and not query:
raise ValueError(
"Either 'username' or 'query' arguments must be specified."
)

if username is not None:
params = {"username": username}
if query is not None:
Expand All @@ -3097,13 +3156,13 @@ def search_assignable_users_for_issues(
if expand is not None:
params["expand"] = expand

if not username and not query:
raise ValueError(
"Either 'username' or 'query' arguments must be specified."
)

return self._fetch_pages(
User, None, "user/assignable/search", startAt, maxResults, params
User,
None,
"user/assignable/search",
startAt,
maxResults,
params,
)

# non-resource
Expand Down
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ test =
wheel>=0.24.0 # MIT
xmlrunner>=1.7.7 # LGPL
yanc>=0.3.3 # GPL
parameterized>=0.8.1 # BSD-3-Clause

[options.entry_points]
console_scripts =
Expand Down
10 changes: 10 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,3 +343,13 @@ def find_by_name(seq, name):
for seq_item in seq:
if seq_item["name"] == name:
return seq_item


@pytest.fixture()
def no_fields(monkeypatch):
"""When we want to test the __init__ method of the jira.client.JIRA
we don't need any external calls to get the fields.
We don't need the features of a MagicMock, hence we don't use it here.
"""
monkeypatch.setattr(JIRA, "fields", lambda *args, **kwargs: [])
10 changes: 0 additions & 10 deletions tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,16 +52,6 @@ def remove_by_slug():
return slug


@pytest.fixture()
def no_fields(monkeypatch):
"""When we want to test the __init__ method of the jira.client.JIRA
we don't need any external calls to get the fields.
We don't need the features of a MagicMock, hence we don't use it here.
"""
monkeypatch.setattr(jira.client.JIRA, "fields", lambda *args, **kwargs: [])


def test_delete_project(cl_admin, cl_normal, slug):

assert cl_admin.delete_project(slug)
Expand Down
Loading

0 comments on commit 60b503a

Please sign in to comment.