diff --git a/constraints.txt b/constraints.txt
index b3fc41a77..076ecc61b 100644
--- a/constraints.txt
+++ b/constraints.txt
@@ -83,6 +83,8 @@ packaging==21.3
     #   pytest
     #   pytest-sugar
     #   sphinx
+parameterized==0.8.1
+    # via jira (setup.cfg)
 parso==0.8.3
     # via jedi
 pickleshare==0.7.5
diff --git a/jira/client.py b/jira/client.py
index 468fad4b5..495814e5c 100644
--- a/jira/client.py
+++ b/jira/client.py
@@ -360,6 +360,9 @@ class JIRA:
             # 'Expires': 'Thu, 01 Jan 1970 00:00:00 GMT'
             "X-Atlassian-Token": "no-check",
         },
+        "default_batch_size": {
+            Resource: 100,
+        },
     }
 
     checked_version = False
@@ -387,6 +390,7 @@ def __init__(
         proxies: Any = None,
         timeout: Optional[Union[Union[float, int], Tuple[float, float]]] = None,
         auth: Tuple[str, str] = None,
+        default_batch_sizes: Optional[Dict[Type[Resource], Optional[int]]] = None,
     ):
         """Construct a Jira client instance.
 
@@ -462,11 +466,18 @@ def __init__(
             proxies (Optional[Any]): Sets the proxies for the HTTP session.
             auth (Optional[Tuple[str,str]]): Set a cookie auth token if this is required.
             logging (bool): Determine whether or not logging should be enabled. (Default: True)
+            default_batch_sizes (Optional[Dict[Type[Resource], Optional[int]]]): Manually specify the batch-sizes for
+              the paginated retrieval of different item types. `Resource` is used as a fallback for every item type not
+              specified. If an item type is mapped to `None` no fallback occurs, instead the JIRA-backend will use its
+              default batch-size. By default all Resources will be queried in batches of 100. E.g., setting this to
+              ``{Issue: 500, Resource: None}`` will make :py:meth:`search_issues` query Issues in batches of 500, while
+              every other item type's batch-size will be controlled by the backend. (Default: None)
+
         """
         # force a copy of the tuple to be used in __del__() because
         # sys.version_info could have already been deleted in __del__()
-        self.sys_version_info = tuple(sys.version_info)
 
+        self.sys_version_info = tuple(sys.version_info)
         if options is None:
             options = {}
             if server and isinstance(server, dict):
@@ -486,7 +497,10 @@ def __init__(
         LOG.setLevel(_logging.INFO if logging else _logging.CRITICAL)
         self.log = LOG
 
-        self._options: Dict[str, Any] = copy.copy(JIRA.DEFAULT_OPTIONS)
+        self._options: Dict[str, Any] = copy.deepcopy(JIRA.DEFAULT_OPTIONS)
+
+        if default_batch_sizes:
+            self._options["default_batch_size"].update(default_batch_sizes)
 
         if "headers" in options:
             headers = copy.copy(options["headers"])
@@ -710,6 +724,8 @@ def _fetch_pages(
             page_params["startAt"] = startAt
         if maxResults:
             page_params["maxResults"] = maxResults
+        elif batch_size := self._get_batch_size(item_type):
+            page_params["maxResults"] = batch_size
 
         resource = self._get_json(request_path, params=page_params, base=base)
         next_items_page = self._get_items_from_page(item_type, items_key, resource)
@@ -734,6 +750,13 @@ def _fetch_pages(
             # If maxResults evaluates as False, get all items in batches
             if not maxResults:
                 page_size = max_results_from_response or len(items)
+                if batch_size is not None and page_size < batch_size:
+                    self.log.warning(
+                        "'batch_size' set to %s, but only received %s items in batch. Falling back to %s.",
+                        batch_size,
+                        page_size,
+                        page_size,
+                    )
                 page_start = (startAt or start_at_from_response or 0) + page_size
                 if (
                     async_class is not None
@@ -765,6 +788,9 @@ def _fetch_pages(
                     and (total is None or page_start < total)
                     and len(next_items_page) == page_size
                 ):
+                    page_params = (
+                        params.copy() if params else {}
+                    )  # Hack necessary for mock-calls to not change
                     page_params["startAt"] = page_start
                     page_params["maxResults"] = page_size
                     resource = self._get_json(
@@ -805,6 +831,25 @@ def _get_items_from_page(
             # improving the error text so we know why it happened
             raise KeyError(str(e) + " : " + json.dumps(resource))
 
+    def _get_batch_size(self, item_type: Type[ResourceType]) -> Optional[int]:
+        """
+        Return the batch size for the given resource type from the options.
+
+        Check if specified item-type has a mapped batch-size, else try to fallback to batch-size assigned to `Resource`, else fallback to Backend-determined batch-size.
+
+        Returns:
+           Optional[int]: The batch size to use. When the configured batch size is None, the batch size should be determined by the JIRA-Backend.
+        """
+        batch_sizes: Dict[Type[Resource], Optional[int]] = self._options[
+            "default_batch_size"
+        ]
+        try:
+            item_type_batch_size = batch_sizes[item_type]
+        except KeyError:
+            # Cannot find Resource-key -> Fallback to letting JIRA-Backend determine batch-size (=None)
+            item_type_batch_size = batch_sizes.get(Resource, None)
+        return item_type_batch_size
+
     # Information about this client
 
     def client_info(self) -> str:
@@ -1131,7 +1176,12 @@ def dashboards(
         if filter is not None:
             params["filter"] = filter
         return self._fetch_pages(
-            Dashboard, "dashboards", "dashboard", startAt, maxResults, params
+            Dashboard,
+            "dashboards",
+            "dashboard",
+            startAt,
+            maxResults,
+            params,
         )
 
     def dashboard(self, id: str) -> Dashboard:
@@ -3028,7 +3078,11 @@ def user(self, id: str, expand: Optional[Any] = None) -> User:
         return user
 
     def search_assignable_users_for_projects(
-        self, username: str, projectKeys: str, startAt: int = 0, maxResults: int = 50
+        self,
+        username: str,
+        projectKeys: str,
+        startAt: int = 0,
+        maxResults: int = 50,
     ) -> ResultList:
         """Get a list of user Resources that match the search string and can be assigned issues for projects.
 
@@ -3086,6 +3140,11 @@ def search_assignable_users_for_issues(
         Returns:
             ResultList
         """
+        if not username and not query:
+            raise ValueError(
+                "Either 'username' or 'query' arguments must be specified."
+            )
+
         if username is not None:
             params = {"username": username}
         if query is not None:
@@ -3097,13 +3156,13 @@ def search_assignable_users_for_issues(
         if expand is not None:
             params["expand"] = expand
 
-        if not username and not query:
-            raise ValueError(
-                "Either 'username' or 'query' arguments must be specified."
-            )
-
         return self._fetch_pages(
-            User, None, "user/assignable/search", startAt, maxResults, params
+            User,
+            None,
+            "user/assignable/search",
+            startAt,
+            maxResults,
+            params,
         )
 
     # non-resource
diff --git a/setup.cfg b/setup.cfg
index b71094d41..6ac9f3a6b 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -94,6 +94,7 @@ test =
     wheel>=0.24.0  # MIT
     xmlrunner>=1.7.7  # LGPL
     yanc>=0.3.3  # GPL
+    parameterized>=0.8.1  # BSD-3-Clause
 
 [options.entry_points]
 console_scripts =
diff --git a/tests/conftest.py b/tests/conftest.py
index 1b240ab16..e6da6217e 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -343,3 +343,13 @@ def find_by_name(seq, name):
     for seq_item in seq:
         if seq_item["name"] == name:
             return seq_item
+
+
+@pytest.fixture()
+def no_fields(monkeypatch):
+    """When we want to test the __init__ method of the jira.client.JIRA
+    we don't need any external calls to get the fields.
+
+    We don't need the features of a MagicMock, hence we don't use it here.
+    """
+    monkeypatch.setattr(JIRA, "fields", lambda *args, **kwargs: [])
diff --git a/tests/test_client.py b/tests/test_client.py
index 743bb4f79..0fedd8f8f 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -52,16 +52,6 @@ def remove_by_slug():
     return slug
 
 
-@pytest.fixture()
-def no_fields(monkeypatch):
-    """When we want to test the __init__ method of the jira.client.JIRA
-    we don't need any external calls to get the fields.
-
-    We don't need the features of a MagicMock, hence we don't use it here.
-    """
-    monkeypatch.setattr(jira.client.JIRA, "fields", lambda *args, **kwargs: [])
-
-
 def test_delete_project(cl_admin, cl_normal, slug):
 
     assert cl_admin.delete_project(slug)
diff --git a/tests/tests.py b/tests/tests.py
index 20ef9a5bf..45c97a0ba 100755
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -12,15 +12,16 @@
 import os
 import pickle
 from time import sleep
-from typing import cast
+from typing import Optional, cast
 from unittest import mock
 
 import pytest
 import requests
+from parameterized import parameterized
 
 from jira import JIRA, Issue, JIRAError
 from jira.client import ResultList
-from jira.resources import cls_for_resource
+from jira.resources import Dashboard, Resource, cls_for_resource
 from tests.conftest import JiraTestCase, rndpassword
 
 LOGGER = logging.getLogger(__name__)
@@ -286,6 +287,9 @@ def test_session_server_offline(self):
         self.assertTrue(False, "Instantiation of invalid JIRA instance succeeded.")
 
 
+MIMICKED_BACKEND_BATCH_SIZE = 10
+
+
 class AsyncTests(JiraTestCase):
     def setUp(self):
         self.jira = JIRA(
@@ -296,40 +300,109 @@ def setUp(self):
             get_server_info=False,
         )
 
-    def test_fetch_pages(self):
+    @parameterized.expand(
+        [
+            (
+                0,
+                26,
+                {Issue: None},
+                False,
+            ),  # original behaviour, fetch all with jira's original return size
+            (0, 26, {Issue: 20}, False),  # set batch size to 20
+            (5, 26, {Issue: 20}, False),  # test start_at
+            (5, 26, {Issue: 20}, 50),  # test maxResults set (one request)
+        ]
+    )
+    def test_fetch_pages(
+        self, start_at: int, total: int, default_batch_sizes: dict, max_results: int
+    ):
         """Tests that the JIRA._fetch_pages method works as expected."""
         params = {"startAt": 0}
-        total = 26
+        self.jira._options["default_batch_size"] = default_batch_sizes
+        batch_size = self.jira._get_batch_size(Issue)
+        expected_calls = _calculate_calls_for_fetch_pages(
+            "https://jira.atlassian.com/rest/api/2/search",
+            start_at,
+            total,
+            max_results,
+            batch_size,
+            MIMICKED_BACKEND_BATCH_SIZE,
+        )
+        batch_size = batch_size or MIMICKED_BACKEND_BATCH_SIZE
         expected_results = []
         for i in range(0, total):
             result = _create_issue_result_json(i, "summary %s" % i, key="KEY-%s" % i)
             expected_results.append(result)
-        result_one = _create_issue_search_results_json(
-            expected_results[:10], max_results=10, total=total
-        )
-        result_two = _create_issue_search_results_json(
-            expected_results[10:20], max_results=10, total=total
-        )
-        result_three = _create_issue_search_results_json(
-            expected_results[20:], max_results=6, total=total
-        )
+
+        if not max_results:
+            mocked_api_results = []
+            for i in range(start_at, total, batch_size):
+                mocked_api_result = _create_issue_search_results_json(
+                    expected_results[i : i + batch_size],
+                    max_results=batch_size,
+                    total=total,
+                )
+                mocked_api_results.append(mocked_api_result)
+        else:
+            mocked_api_results = [
+                _create_issue_search_results_json(
+                    expected_results[start_at : max_results + start_at],
+                    max_results=max_results,
+                    total=total,
+                )
+            ]
+
         mock_session = mock.Mock(name="mock_session")
         responses = mock.Mock(name="responses")
         responses.content = "_filler_"
-        responses.json.side_effect = [result_one, result_two, result_three]
+        responses.json.side_effect = mocked_api_results
         responses.status_code = 200
         mock_session.request.return_value = responses
         mock_session.get.return_value = responses
         self.jira._session.close()
         self.jira._session = mock_session
-        items = self.jira._fetch_pages(Issue, "issues", "search", 0, False, params)
-        self.assertEqual(len(items), total)
+        items = self.jira._fetch_pages(
+            Issue, "issues", "search", start_at, max_results, params=params
+        )
+
+        actual_calls = [[kall[1], kall[2]] for kall in self.jira._session.method_calls]
+        self.assertEqual(actual_calls, expected_calls)
+        self.assertEqual(len(items), total - start_at)
         self.assertEqual(
             {item.key for item in items},
-            {expected_r["key"] for expected_r in expected_results},
+            {expected_r["key"] for expected_r in expected_results[start_at:]},
         )
 
 
+@pytest.mark.parametrize(
+    "default_batch_sizes, item_type, expected",
+    [
+        ({Issue: 2}, Issue, 2),
+        ({Resource: 1}, Resource, 1),
+        (
+            {Resource: 1, Issue: None},
+            Issue,
+            None,
+        ),
+        ({Resource: 1}, Dashboard, 1),
+        ({}, Issue, 100),
+        ({}, Resource, 100),
+    ],
+    ids=[
+        "modify Issue default",
+        "modify Resource default",
+        "let backend decide for Issue",
+        "fallback",
+        "default for Issue",
+        "default value for everything else",
+    ],
+)
+def test_get_batch_size(default_batch_sizes, item_type, expected, no_fields):
+    jira = JIRA(default_batch_sizes=default_batch_sizes, get_server_info=False)
+
+    assert jira._get_batch_size(item_type) == expected
+
+
 def _create_issue_result_json(issue_id, summary, key, **kwargs):
     """Returns a minimal json object for an issue."""
     return {
@@ -350,6 +423,38 @@ def _create_issue_search_results_json(issues, **kwargs):
     }
 
 
+def _calculate_calls_for_fetch_pages(
+    url: str,
+    start_at: int,
+    total: int,
+    max_results: int,
+    batch_size: Optional[int],
+    default: Optional[int] = 10,
+):
+    """Returns expected query parameters for specified search-issues arguments."""
+    if not max_results:
+        call_list = []
+        if batch_size is None:
+            # for the first request with batch-size is `None` we specifically cannot/don't want to set it but let
+            # the server specify it (here we mimic a server-default of 10 issues per batch).
+            call_ = [(url,), {"params": {"startAt": start_at}}]
+            call_list.append(call_)
+            start_at += default
+            batch_size = default
+        for index, start_at in enumerate(range(start_at, total, batch_size)):
+            call_ = [
+                (url,),
+                {"params": {"startAt": start_at, "maxResults": batch_size}},
+            ]
+
+            call_list.append(call_)
+    else:
+        call_list = [
+            [(url,), {"params": {"startAt": start_at, "maxResults": max_results}}]
+        ]
+    return call_list
+
+
 class WebsudoTests(JiraTestCase):
     def test_kill_websudo(self):
         self.jira.kill_websudo()