diff --git a/README.md b/README.md index 94af44b8..5c77a581 100644 --- a/README.md +++ b/README.md @@ -37,6 +37,7 @@ sqlalchemy to communicate directly with ICAT's database. - [Database Backend](#database-backend) - [Mapped Classes](#mapped-classes) - [Python ICAT Backend](#python-icat-backend) + - [Client Handling](#client-handling) - [ICATQuery](#icatquery) - [Generating the OpenAPI Specification](#generating-the-openapi-specification) - [Utilities](#utilities) @@ -658,11 +659,74 @@ the API supporting multiple authentication mechanisms. Meta attributes such as ` are dealt by Python ICAT, rather than the API. +### Client Handling +Python ICAT uses +[client objects](https://python-icat.readthedocs.io/en/stable/client.html) to +authenticate users and provide interaction to ICAT (e.g. querying icatdb). A client +object has a high creation cost (often taking several seconds), so it's unsuitable to +create a new client object at the start of each request. In a similar vein, it would +also be unsuitable to use a single client object for the entire API due to collisions +between different users. + +Client objects are handled using an +[LRU cache](https://docs.python.org/3/library/functools.html#functools.lru_cache), +fetching clients from an [object pool](https://object-pool.readthedocs.io/en/latest/) +when a new client is requested for the cache. + +#### Caching +The cache is extended from Cachetools' implementation (although the documentation for +the builtin LRU cache is more detailed, hence that's linked above) to allow for a client +object to be placed back into the object pool once it becomes 'least recently used' and +therefore is removed from the cache (in place of another item). Each cache item is +differentiated by the arguments of the function it's applied to which in this case is +the session ID. The client pool object is also passed into the function, but this is a +singleton object (mandated by the library it's implemented from) so this won't change +throughout the lifetime of the API. + +#### Pooling +The object pool has an initial pool size that will be created at startup, and a maximum +size that the pool can grow to if needed, where both values are configurable. The +clients within the pool do not expire and have unlimited reuses, so clients created at +startup can be used for the lifespan of the API. Python ICAT's `Client` class is +extended (to `ICATClient`) to rename `cleanup()` to a function name that the object pool +will recognise to clean up resources and will disable the auto logout feature to prevent +sessions from being logged out when the client is reused. + +#### Attributes of the Design +Combining caching and pooling into one design gives the following high-level results. +There is a 1 client to 1 session ID ratio, which will prevent collision between users +and doesn't require an excessive amount of resources (such as a 1 client to 1 request +ratio would). Since the object pool is created at startup, this design can cause the API +to be slow to start as the pool of object needs to be created. A rough guide would be to +multiply the configured initial pool size by around 5 or 6 seconds to get a time +estimate for pool creation. + +#### Configuring Client Handling +When configuring the cache size and the client pool, the following should be considered. +The pool's max size should be configured to the maximum number of concurrent users +expected for the API. The cache size must not exceed the pool's maximum size. If +this does happen, the cache could attempt to acquire a client from an empty pool that +cannot grow, causing the request to never respond because the API will wait +indefinitely. The pool's initial size should be configured to strike a balance of +reasonable startup time and not slowing down requests when the pool grows beyond its +initial size. NOTE: when the pool exceeds the initial size and a client is requested by +the cache, a client is created on the fly, so that request (and any others sent before +the client is created and in the cache) WILL be slow. For development, the following +settings (as also set in the example config) would allow for an acceptable startup time +but allow for multiple session IDs to be used if required. + +```json +"client_cache_size": 5, +"client_pool_init_size": 2, +"client_pool_max_size": 5, +``` + + ### ICATQuery The ICATQuery classed is in `datagateway_api.common.icat.query`. This class stores a query created with Python ICAT -[documentation for the query](https://python-icat.readthedocs.io/en/stable/query.html). -The `execute_query()` function executes the query and returns either results in either a +([documentation](https://python-icat.readthedocs.io/en/stable/query.html)). The +`execute_query()` function executes the query and returns either results in either a JSON format, or a list of [Python ICAT entity's](https://python-icat.readthedocs.io/en/stable/entity.html) (this is defined using the `return_json_formattable` flag). Other functions within that class diff --git a/datagateway_api/common/backend.py b/datagateway_api/common/backend.py index a6ca914a..7177fc3c 100644 --- a/datagateway_api/common/backend.py +++ b/datagateway_api/common/backend.py @@ -10,7 +10,9 @@ class Backend(ABC): def login(self, credentials): """ Attempt to log a user in using the provided credentials - :param credentials: The user's credentials (including mechanism) + :param credentials: The user's credentials (including mechanism). Credentials + should take the following format in JSON: + { username: "value", password: "value", mechanism: "value"} :returns: a session ID """ pass diff --git a/datagateway_api/common/config.py b/datagateway_api/common/config.py index a4519011..80665aab 100644 --- a/datagateway_api/common/config.py +++ b/datagateway_api/common/config.py @@ -17,6 +17,9 @@ class APIConfigOptions(Enum): """ BACKEND = "backend" + CLIENT_CACHE_SIZE = "client_cache_size" + CLIENT_POOL_INIT_SIZE = "client_pool_init_size" + CLIENT_POOL_MAX_SIZE = "client_pool_max_size" DB_URL = "db_url" DEBUG_MODE = "debug_mode" FLASK_RELOADER = "flask_reloader" @@ -65,6 +68,9 @@ def _check_config_items_exist(self): if self.get_config_value(APIConfigOptions.BACKEND) == "python_icat": icat_backend_specific_config_keys = [ + APIConfigOptions.CLIENT_CACHE_SIZE, + APIConfigOptions.CLIENT_POOL_INIT_SIZE, + APIConfigOptions.CLIENT_POOL_MAX_SIZE, APIConfigOptions.ICAT_CHECK_CERT, APIConfigOptions.ICAT_URL, ] diff --git a/datagateway_api/common/database/backend.py b/datagateway_api/common/database/backend.py index 927e9659..f4b8409a 100644 --- a/datagateway_api/common/database/backend.py +++ b/datagateway_api/common/database/backend.py @@ -32,7 +32,7 @@ class DatabaseBackend(Backend): Class that contains functions to access and modify data in an ICAT database directly """ - def login(self, credentials): + def login(self, credentials, **kwargs): if credentials["username"] == "user" and credentials["password"] == "password": session_id = str(uuid.uuid1()) insert_row_into_table( @@ -48,84 +48,84 @@ def login(self, credentials): raise AuthenticationError("Username and password are incorrect") @requires_session_id - def get_session_details(self, session_id): + def get_session_details(self, session_id, **kwargs): return get_row_by_id(SESSION, session_id).to_dict() @requires_session_id - def refresh(self, session_id): + def refresh(self, session_id, **kwargs): return session_id @requires_session_id @queries_records - def logout(self, session_id): + def logout(self, session_id, **kwargs): return delete_row_by_id(SESSION, session_id) @requires_session_id @queries_records - def get_with_filters(self, session_id, entity_type, filters): + def get_with_filters(self, session_id, entity_type, filters, **kwargs): table = get_entity_object_from_name(entity_type) return get_rows_by_filter(table, filters) @requires_session_id @queries_records - def create(self, session_id, entity_type, data): + def create(self, session_id, entity_type, data, **kwargs): table = get_entity_object_from_name(entity_type) return create_rows_from_json(table, data) @requires_session_id @queries_records - def update(self, session_id, entity_type, data): + def update(self, session_id, entity_type, data, **kwargs): table = get_entity_object_from_name(entity_type) return patch_entities(table, data) @requires_session_id @queries_records - def get_one_with_filters(self, session_id, entity_type, filters): + def get_one_with_filters(self, session_id, entity_type, filters, **kwargs): table = get_entity_object_from_name(entity_type) return get_first_filtered_row(table, filters) @requires_session_id @queries_records - def count_with_filters(self, session_id, entity_type, filters): + def count_with_filters(self, session_id, entity_type, filters, **kwargs): table = get_entity_object_from_name(entity_type) return get_filtered_row_count(table, filters) @requires_session_id @queries_records - def get_with_id(self, session_id, entity_type, id_): + def get_with_id(self, session_id, entity_type, id_, **kwargs): table = get_entity_object_from_name(entity_type) return get_row_by_id(table, id_).to_dict() @requires_session_id @queries_records - def delete_with_id(self, session_id, entity_type, id_): + def delete_with_id(self, session_id, entity_type, id_, **kwargs): table = get_entity_object_from_name(entity_type) return delete_row_by_id(table, id_) @requires_session_id @queries_records - def update_with_id(self, session_id, entity_type, id_, data): + def update_with_id(self, session_id, entity_type, id_, data, **kwargs): table = get_entity_object_from_name(entity_type) return update_row_from_id(table, id_, data) @requires_session_id @queries_records def get_facility_cycles_for_instrument_with_filters( - self, session_id, instrument_id, filters, + self, session_id, instrument_id, filters, **kwargs, ): return get_facility_cycles_for_instrument(instrument_id, filters) @requires_session_id @queries_records def get_facility_cycles_for_instrument_count_with_filters( - self, session_id, instrument_id, filters, + self, session_id, instrument_id, filters, **kwargs, ): return get_facility_cycles_for_instrument_count(instrument_id, filters) @requires_session_id @queries_records def get_investigations_for_instrument_facility_cycle_with_filters( - self, session_id, instrument_id, facilitycycle_id, filters, + self, session_id, instrument_id, facilitycycle_id, filters, **kwargs, ): return get_investigations_for_instrument_in_facility_cycle( instrument_id, facilitycycle_id, filters, @@ -134,7 +134,7 @@ def get_investigations_for_instrument_facility_cycle_with_filters( @requires_session_id @queries_records def get_investigation_count_instrument_facility_cycle_with_filters( - self, session_id, instrument_id, facilitycycle_id, filters, + self, session_id, instrument_id, facilitycycle_id, filters, **kwargs, ): return get_investigations_for_instrument_in_facility_cycle_count( instrument_id, facilitycycle_id, filters, diff --git a/datagateway_api/common/icat/backend.py b/datagateway_api/common/icat/backend.py index 2770f190..83461672 100644 --- a/datagateway_api/common/icat/backend.py +++ b/datagateway_api/common/icat/backend.py @@ -6,9 +6,9 @@ from datagateway_api.common.exceptions import AuthenticationError from datagateway_api.common.helpers import queries_records from datagateway_api.common.icat.helpers import ( - create_client, create_entities, delete_entity_by_id, + get_cached_client, get_count_with_filters, get_entity_by_id, get_entity_with_filters, @@ -37,9 +37,13 @@ class PythonICATBackend(Backend): def __init__(self): pass - def login(self, credentials): + def login(self, credentials, **kwargs): log.info("Logging in to get session ID") - client = create_client() + client_pool = kwargs.get("client_pool") + + # There is no session ID required for this endpoint, a client object will be + # fetched from cache with a blank `sessionId` attribute + client = get_cached_client(None, client_pool) # Syntax for Python ICAT login_details = { @@ -48,6 +52,11 @@ def login(self, credentials): } try: session_id = client.login(credentials["mechanism"], login_details) + # Flushing client's session ID so the session ID returned in this request + # won't be logged out next time `client.login()` is used in this function. + # `login()` calls `self.logout()` if `sessionId` is set + client.sessionId = None + return session_id except ICATSessionError: raise AuthenticationError("User credentials are incorrect") @@ -55,94 +64,84 @@ def login(self, credentials): @requires_session_id def get_session_details(self, session_id, **kwargs): log.info("Getting session details for session: %s", session_id) - client = kwargs["client"] if kwargs["client"] else create_client() - return get_session_details_helper(client) + return get_session_details_helper(kwargs.get("client")) @requires_session_id def refresh(self, session_id, **kwargs): log.info("Refreshing session: %s", session_id) - client = kwargs["client"] if kwargs["client"] else create_client() - return refresh_client_session(client) + return refresh_client_session(kwargs.get("client")) @requires_session_id @queries_records def logout(self, session_id, **kwargs): log.info("Logging out of the Python ICAT client") - client = kwargs["client"] if kwargs["client"] else create_client() - return logout_icat_client(client) + return logout_icat_client(kwargs.get("client")) @requires_session_id @queries_records def get_with_filters(self, session_id, entity_type, filters, **kwargs): - client = kwargs["client"] if kwargs["client"] else create_client() - return get_entity_with_filters(client, entity_type, filters) + return get_entity_with_filters(kwargs.get("client"), entity_type, filters) @requires_session_id @queries_records def create(self, session_id, entity_type, data, **kwargs): - client = kwargs["client"] if kwargs["client"] else create_client() - return create_entities(client, entity_type, data) + return create_entities(kwargs.get("client"), entity_type, data) @requires_session_id @queries_records def update(self, session_id, entity_type, data, **kwargs): - client = kwargs["client"] if kwargs["client"] else create_client() - return update_entities(client, entity_type, data) + return update_entities(kwargs.get("client"), entity_type, data) @requires_session_id @queries_records def get_one_with_filters(self, session_id, entity_type, filters, **kwargs): - client = kwargs["client"] if kwargs["client"] else create_client() - return get_first_result_with_filters(client, entity_type, filters) + return get_first_result_with_filters(kwargs.get("client"), entity_type, filters) @requires_session_id @queries_records def count_with_filters(self, session_id, entity_type, filters, **kwargs): - client = kwargs["client"] if kwargs["client"] else create_client() - return get_count_with_filters(client, entity_type, filters) + return get_count_with_filters(kwargs.get("client"), entity_type, filters) @requires_session_id @queries_records def get_with_id(self, session_id, entity_type, id_, **kwargs): - client = kwargs["client"] if kwargs["client"] else create_client() - return get_entity_by_id(client, entity_type, id_, True) + return get_entity_by_id(kwargs.get("client"), entity_type, id_, True) @requires_session_id @queries_records def delete_with_id(self, session_id, entity_type, id_, **kwargs): - client = kwargs["client"] if kwargs["client"] else create_client() - return delete_entity_by_id(client, entity_type, id_) + return delete_entity_by_id(kwargs.get("client"), entity_type, id_) @requires_session_id @queries_records def update_with_id(self, session_id, entity_type, id_, data, **kwargs): - client = kwargs["client"] if kwargs["client"] else create_client() - return update_entity_by_id(client, entity_type, id_, data) + return update_entity_by_id(kwargs.get("client"), entity_type, id_, data) @requires_session_id @queries_records def get_facility_cycles_for_instrument_with_filters( self, session_id, instrument_id, filters, **kwargs, ): - client = kwargs["client"] if kwargs["client"] else create_client() - return get_facility_cycles_for_instrument(client, instrument_id, filters) + return get_facility_cycles_for_instrument( + kwargs.get("client"), instrument_id, filters, + ) @requires_session_id @queries_records def get_facility_cycles_for_instrument_count_with_filters( self, session_id, instrument_id, filters, **kwargs, ): - client = kwargs["client"] if kwargs["client"] else create_client() - return get_facility_cycles_for_instrument_count(client, instrument_id, filters) + return get_facility_cycles_for_instrument_count( + kwargs.get("client"), instrument_id, filters, + ) @requires_session_id @queries_records def get_investigations_for_instrument_facility_cycle_with_filters( self, session_id, instrument_id, facilitycycle_id, filters, **kwargs, ): - client = kwargs["client"] if kwargs["client"] else create_client() return get_investigations_for_instrument_in_facility_cycle( - client, instrument_id, facilitycycle_id, filters, + kwargs.get("client"), instrument_id, facilitycycle_id, filters, ) @requires_session_id @@ -150,7 +149,6 @@ def get_investigations_for_instrument_facility_cycle_with_filters( def get_investigation_count_instrument_facility_cycle_with_filters( self, session_id, instrument_id, facilitycycle_id, filters, **kwargs, ): - client = kwargs["client"] if kwargs["client"] else create_client() return get_investigations_for_instrument_in_facility_cycle_count( - client, instrument_id, facilitycycle_id, filters, + kwargs.get("client"), instrument_id, facilitycycle_id, filters, ) diff --git a/datagateway_api/common/icat/filters.py b/datagateway_api/common/icat/filters.py index a1606e40..58ee92b9 100644 --- a/datagateway_api/common/icat/filters.py +++ b/datagateway_api/common/icat/filters.py @@ -117,22 +117,30 @@ def __init__(self, fields): def apply_filter(self, query): try: log.info("Adding ICAT distinct filter to ICAT query") - if ( - query.aggregate == "COUNT" - or query.aggregate == "AVG" - or query.aggregate == "SUM" - ): + log.debug("Fields for distinct filter: %s", self.fields) + + # These aggregate keywords not currently used in the API, but conditional + # present in case they're used in the future + if query.aggregate == "AVG" or query.aggregate == "SUM": # Distinct can be combined with other aggregate functions query.setAggregate(f"{query.aggregate}:DISTINCT") + elif query.aggregate == "COUNT": + # When count and distinct keywords are used together when selecting + # multiple attributes, Python ICAT will always throw an error on query + # execution (more info: + # https://github.com/icatproject/python-icat/issues/76). This appears to + # be a JPQL limitation, something that cannot be fixed in Python ICAT. + # As a result, the API will get the distinct results and manually + # perform `len()` on the list, using `manual_count` as a flag to + # recognise this situation + query.setAggregate("DISTINCT") + log.debug("Manual count flag enabled") + query.manual_count = True else: query.setAggregate("DISTINCT") - # Using where filters to identify which fields to apply distinct too - for field in self.fields: - where_filter = PythonICATWhereFilter(field, "null", "ne") - where_filter.apply_filter(query) + query.setAttributes(self.fields) - log.debug("Fields for distinct filter: %s", self.fields) except ValueError as e: raise FilterError(e) diff --git a/datagateway_api/common/icat/helpers.py b/datagateway_api/common/icat/helpers.py index f782e2eb..343f1985 100644 --- a/datagateway_api/common/icat/helpers.py +++ b/datagateway_api/common/icat/helpers.py @@ -2,7 +2,7 @@ from functools import wraps import logging -import icat.client +from cachetools import cached from icat.entities import getTypeMap from icat.exception import ( ICATInternalError, @@ -13,7 +13,6 @@ ICATValidationError, ) -from datagateway_api.common.config import APIConfigOptions, config from datagateway_api.common.date_handler import DateHandler from datagateway_api.common.exceptions import ( AuthenticationError, @@ -26,6 +25,7 @@ PythonICATLimitFilter, PythonICATWhereFilter, ) +from datagateway_api.common.icat.lru_cache import ExtendedLRUCache from datagateway_api.common.icat.query import ICATQuery @@ -54,7 +54,9 @@ def requires_session_id(method): @wraps(method) def wrapper_requires_session(*args, **kwargs): try: - client = create_client() + client_pool = kwargs.get("client_pool") + + client = get_cached_client(args[1], client_pool) client.sessionId = args[1] # Client object put into kwargs so it can be accessed by backend functions kwargs["client"] = client @@ -66,17 +68,38 @@ def wrapper_requires_session(*args, **kwargs): raise AuthenticationError("Forbidden") else: return method(*args, **kwargs) - except ICATSessionError: - raise AuthenticationError("Forbidden") + except ICATSessionError as e: + raise AuthenticationError(e) return wrapper_requires_session -def create_client(): - client = icat.client.Client( - config.get_config_value(APIConfigOptions.ICAT_URL), - checkCert=config.get_config_value(APIConfigOptions.ICAT_CHECK_CERT), - ) +@cached(cache=ExtendedLRUCache()) +def get_cached_client(session_id, client_pool): + """ + Get a client from cache using session ID as the cache parameter (client_pool will + always be given the same object, so won't impact on argument hashing) + + An available client is fetched from the object pool, given a session ID, and kept + around in this cache until it becomes 'least recently used'. At this point, the + session ID is flushed and the client is returned to the pool. More details about + client handling can be found in the README + + :param session_id: The user's session ID + :type session_id: :class:`str` + :param client_pool: Client object pool used to fetch an unused client + :type client_pool: :class:`ObjectPool` + """ + + # Get a client from the pool + client, stats = client_pool._get_resource() + + # `session_id` of None suggests this function is being called from an endpoint that + # doesn't use the `requires_session_id` decorator (e.g. POST /sessions) + log.info("Caching, session ID: %s", session_id) + if session_id: + client.sessionId = session_id + return client @@ -554,6 +577,7 @@ def get_facility_cycles_for_instrument( query_aggregate = "COUNT:DISTINCT" if count_query else "DISTINCT" query = ICATQuery(client, "FacilityCycle", aggregate=query_aggregate) + query.isis_endpoint = True instrument_id_check = PythonICATWhereFilter( "facility.instruments.id", instrument_id, "eq", @@ -634,6 +658,7 @@ def get_investigations_for_instrument_in_facility_cycle( query_aggregate = "COUNT:DISTINCT" if count_query else "DISTINCT" query = ICATQuery(client, "Investigation", aggregate=query_aggregate) + query.isis_endpoint = True instrument_id_check = PythonICATWhereFilter( "facility.instruments.id", instrument_id, "eq", diff --git a/datagateway_api/common/icat/icat_client_pool.py b/datagateway_api/common/icat/icat_client_pool.py new file mode 100644 index 00000000..cd8c2ff0 --- /dev/null +++ b/datagateway_api/common/icat/icat_client_pool.py @@ -0,0 +1,43 @@ +import logging + +from icat.client import Client +from object_pool import ObjectPool + +from datagateway_api.common.config import APIConfigOptions, config + +log = logging.getLogger() + + +class ICATClient(Client): + """Wrapper class to allow an object pool of client objects to be created""" + + def __init__(self): + super().__init__( + config.get_config_value(APIConfigOptions.ICAT_URL), + checkCert=config.get_config_value(APIConfigOptions.ICAT_CHECK_CERT), + ) + # When clients are cleaned up, sessions won't be logged out + self.autoLogout = False + + def clean_up(self): + """ + Allows object pool to cleanup the client's resources, using the existing Python + ICAT functionality + """ + super().cleanup() + + +def create_client_pool(): + """ + Function to create an object pool for ICAT client objects + + The ObjectPool class uses the singleton design pattern + """ + + return ObjectPool( + ICATClient, + min_init=config.get_config_value(APIConfigOptions.CLIENT_POOL_INIT_SIZE), + max_capacity=config.get_config_value(APIConfigOptions.CLIENT_POOL_MAX_SIZE), + max_reusable=0, + expires=0, + ) diff --git a/datagateway_api/common/icat/lru_cache.py b/datagateway_api/common/icat/lru_cache.py new file mode 100644 index 00000000..441c9b6e --- /dev/null +++ b/datagateway_api/common/icat/lru_cache.py @@ -0,0 +1,36 @@ +import logging + +from cachetools.lru import LRUCache + +from datagateway_api.common.config import APIConfigOptions, config + +log = logging.getLogger() + + +class ExtendedLRUCache(LRUCache): + """ + An extension to cachetools' LRUCache class to allow client objects to be pushed back + into the pool + + This version of LRU cache was chosen instead of the builtin LRU cache as it allows + for addtional actions to be added when an item leaves the cache (controlled by + `popitem()`). Since the builtin version was just a function (using a couple of + wrapper functions), adding additional functionality wasn't possible. + """ + + def __init__(self): + super().__init__( + maxsize=config.get_config_value(APIConfigOptions.CLIENT_CACHE_SIZE), + ) + + def popitem(self): + key, client = super().popitem() + session_id, client_pool = key + log.debug("Client popped from LRU cache with session: %s", session_id) + + # Flushing session ID so next time the client object is used, there's no issues + client.sessionId = None + + # Put client back into pool - resource stats aren't used in the API, so defaults + # are passed in + client_pool._queue_resource(client, client_pool._get_default_stats()) diff --git a/datagateway_api/common/icat/query.py b/datagateway_api/common/icat/query.py index 4721523f..97e1af41 100644 --- a/datagateway_api/common/icat/query.py +++ b/datagateway_api/common/icat/query.py @@ -5,9 +5,8 @@ from icat.exception import ICATInternalError, ICATValidationError from icat.query import Query -from datagateway_api.common.constants import Constants from datagateway_api.common.date_handler import DateHandler -from datagateway_api.common.exceptions import FilterError, PythonICATError +from datagateway_api.common.exceptions import PythonICATError log = logging.getLogger() @@ -37,6 +36,9 @@ def __init__( :raises PythonICATError: If a ValueError is raised when creating a Query(), 500 will be returned as a response """ + # Needed for ISIS endpoints as they use DISTINCT keyword but don't select + # multiple attributes + self.isis_endpoint = False try: log.info("Creating ICATQuery for entity: %s", entity_name) @@ -47,6 +49,8 @@ def __init__( aggregate=aggregate, includes=includes, ) + # Initialising flag for distinct filter on count endpoints + self.query.manual_count = False except ValueError: raise PythonICATError( "An issue has occurred while creating a Python ICAT Query object," @@ -77,7 +81,6 @@ def execute_query(self, client, return_json_formattable=False): raise PythonICATError(e) flat_query_includes = self.flatten_query_included_fields(self.query.includes) - mapped_distinct_fields = None # If the query has a COUNT function applied to it, some of these steps can be # skipped @@ -87,29 +90,45 @@ def execute_query(self, client, return_json_formattable=False): count_query = True log.debug("This ICATQuery is used for COUNT purposes") - if self.query.aggregate == "DISTINCT" and not count_query: + distinct_query = False + if ( + self.query.aggregate == "DISTINCT" + and not count_query + and not self.query.manual_count + and not self.isis_endpoint + ): + distinct_query = True log.info("Extracting the distinct fields from query's conditions") # Check query's conditions for the ones created by the distinct filter - distinct_attributes = self.iterate_query_conditions_for_distinctiveness() - if distinct_attributes != []: - mapped_distinct_fields = self.map_distinct_attributes_to_entity_names( - distinct_attributes, flat_query_includes, - ) - log.debug( - "Attribute names used in the distinct filter, mapped to the entity" - " they are a part of: %s", - mapped_distinct_fields, - ) + distinct_attributes = self.get_distinct_attributes() if return_json_formattable: log.info("Query results will be returned in a JSON format") data = [] + if self.query.manual_count: + # Manually count the number of results + data.append(len(query_result)) + return data + for result in query_result: - if not count_query: - dict_result = self.entity_to_dict( - result, flat_query_includes, mapped_distinct_fields, + if distinct_query: + # When multiple attributes are given in a distinct filter, Python + # ICAT returns the results in a nested list. This doesn't happen + # when a single attribute is given, so the result is encased in a + # list as `map_distinct_attributes_to_results()` assumes a list as + # input + if not isinstance(result, tuple): + result = [result] + + # Map distinct attributes and result + data.append( + self.map_distinct_attributes_to_results( + distinct_attributes, result, + ), ) + elif not count_query: + dict_result = self.entity_to_dict(result, flat_query_includes) data.append(dict_result) else: data.append(result) @@ -119,37 +138,10 @@ def execute_query(self, client, return_json_formattable=False): log.info("Query results will be returned as ICAT entities") return query_result - def iterate_query_conditions_for_distinctiveness(self): - distinct_attributes = [] - for attribute_name, where_statement in self.query.conditions.items(): - if isinstance(where_statement, list): - for sub_value in where_statement: - self.check_attribute_name_for_distinct( - distinct_attributes, attribute_name, sub_value, - ) - elif isinstance(where_statement, str): - self.check_attribute_name_for_distinct( - distinct_attributes, attribute_name, where_statement, - ) - - return distinct_attributes - - def check_attribute_name_for_distinct(self, attribute_list, key, value): - """ - Check the attribute name to see if its associated value is used to signify the - attribute is requested in a distinct filter and if so, append it to the list of - attribute names - - :param key: Name of an attribute - :type key: :class:`str` - :param value: Expression that should be applied to the associated attribute - e.g. "= 'Metadata'" - :type value: :class:`str` - """ - if value == Constants.PYTHON_ICAT_DISTNCT_CONDITION: - attribute_list.append(key) + def get_distinct_attributes(self): + return self.query.attributes - def entity_to_dict(self, entity, includes, distinct_fields=None): + def entity_to_dict(self, entity, includes): """ This expands on Python ICAT's implementation of `icat.entity.Entity.as_dict()` to use set operators to create a version of the entity as a dictionary @@ -176,13 +168,6 @@ def entity_to_dict(self, entity, includes, distinct_fields=None): include_set = (entity.InstRel | entity.InstMRel) & set(includes) for key in entity.InstAttr | entity.MetaAttr | include_set: if key in includes: - # Make a copy of distinct_fields when calling this function again later - if distinct_fields is not None: - distinct_fields_copy = self.prepare_distinct_fields( - key, distinct_fields, - ) - else: - distinct_fields_copy = None target = getattr(entity, key) # Copy and remove don't return values so must be done separately @@ -195,131 +180,92 @@ def entity_to_dict(self, entity, includes, distinct_fields=None): " cause an issue further on in the request", ) if isinstance(target, Entity): - d[key] = self.entity_to_dict( - target, includes_copy, distinct_fields_copy, - ) + d[key] = self.entity_to_dict(target, includes_copy) # Related fields with one-many relationships are stored as EntityLists elif isinstance(target, EntityList): d[key] = [] for e in target: - d[key].append( - self.entity_to_dict(e, includes_copy, distinct_fields_copy), - ) + d[key].append(self.entity_to_dict(e, includes_copy)) # Add actual piece of data to the dictionary else: - entity_data = None - - if distinct_fields is None or key in distinct_fields["base"]: - entity_data = getattr(entity, key) - # Convert datetime objects to strings ready to be outputted as JSON - if isinstance(entity_data, datetime): - # Remove timezone data which isn't utilised in ICAT - entity_data = DateHandler.datetime_object_to_str(entity_data) + entity_data = getattr(entity, key) + # Convert datetime objects to strings ready to be outputted as JSON + if isinstance(entity_data, datetime): + # Remove timezone data which isn't utilised in ICAT + entity_data = DateHandler.datetime_object_to_str(entity_data) - d[key] = entity_data + d[key] = entity_data return d - def map_distinct_attributes_to_entity_names(self, distinct_fields, included_fields): + def map_distinct_attributes_to_results(self, distinct_attributes, query_result): """ - This function looks at a list of dot-separated fields and maps them to which - entity they belong to - - The result of this function will be a dictionary that has a data structure - similar to the example below. The values assigned to the 'base' key are the - fields that belong to the entity the request is being sent to (e.g. the base - values of `/users` would be fields belonging to the User entity). - - Example return value: - `{'base': ['id', 'modTime'], 'userGroups': ['id', 'fullName'], - 'investigationUser': ['id', 'role']}` - - For distinct fields that are part of included entities (e.g. userGroups.id), it - is assumed that the relevant entities have been specified in an include filter. - This is checked, and a suitable exception is thrown. Without this, the query - would execute, and the user would get a 200 response, but they wouldn't receive - the data they're expecting, hence it's more sensible to raise a 400 to alert - them to their probable mistake, rather than to just log a warning. - - :param distinct_fields: List of fields that should be distinctive in the request - response, as per the distinct filters in the request - :type distinct_fields: :class:`list` - :param included_fields: List of fields that have been included in the ICAT - query. It is assumed each element has been checked for multiple fields - separated by dots, split them accordingly and flattened the resulting list. - Note: ICATQuery.flatten_query_included_fields performs this functionality. - :type included_fields: :class:`list` - :return: Dictionary of fields, where the key denotes which entity they belong to + Maps the attribute names from a distinct filter onto the results given by the + query constructed and executed using Python ICAT + + When selecting multiple (but not all) attributes in a JPQL query, the results + are returned in a list and not mapped to an entity object. As a result, + `entity_to_dict()` cannot be used as that function assumes an entity object + input. Within the API, selecting multiple attributes happens when a distinct + filter is applied to a request. This function is the alternative for processing + data ready for output + + :param distinct_attributes: List of distinct attributes from the distinct + filter of the incoming request + :type distinct_attributes: :class:`list` + :param query_result: Results fetched from Python ICAT + :type query_result: :class:`tuple` or :class:`list` when a single attribute is + given + :return: Dictionary of attribute names paired with the results, ready to be + returned to the user """ + result_dict = {} + for attr_name, data in zip(distinct_attributes, query_result): + # Splitting attribute names in case it's from a related entity + split_attr_name = attr_name.split(".") + + if isinstance(data, datetime): + data = DateHandler.datetime_object_to_str(data) + + # Attribute name is from the 'origin' entity (i.e. not a related entity) + if len(split_attr_name) == 1: + result_dict[attr_name] = data + # Attribute name is a related entity, dictionary needs to be nested + else: + result_dict.update(self.map_nested_attrs({}, split_attr_name, data)) + + return result_dict - # Mapping which entities have distinct fields - distinct_field_dict = {"base": []} - - for field in distinct_fields: - split_fields = field.split(".") - # Single element list means the field belongs to the entity which the - # request has been sent to - if len(split_fields) == 1: - # Conventional list assignment causes IndexError because -2 is out of - # range of a list with a single element - split_fields.insert(0, "base") - - # Check that only an entity name, and attribute name exist - # Code within loop is used for when `split_fields` = - # ['dataset', 'investigation', 'name'] for example - while len(split_fields) > 2: - # If a key doesn't exist in the dictionary, create it and assign an - # empty list to it - distinct_field_dict.setdefault(split_fields[0], []) - split_fields.pop(0) - - distinct_field_dict.setdefault(split_fields[0], []) - distinct_field_dict[split_fields[0]].append(split_fields[-1]) - - # Remove "base" key as this isn't a valid entity name in Python ICAT - distinct_entities = list(distinct_field_dict.keys()) - distinct_entities.remove("base") - - # Search through entity names that have distinct fields for the request and - # ensure these same entity names are in the query's includes - for entity in distinct_entities: - if entity not in included_fields: - raise FilterError( - "A distinct field that has a relationship with another entity does" - " not have the included entity within an include filter in this" - " request. Please add all related entities which are required for" - " the fields in the distinct filter distinct to an include filter.", - ) - - return distinct_field_dict - - def prepare_distinct_fields(self, entity_name, distinct_fields): + def map_nested_attrs(self, nested_dict, split_attr_name, query_data): """ - Copy `distinct_fields` and move the data held in `entity_name` portion of the - dictionary to the "base" section of the dictionary. This function is called in - preparation for recursive calls occurring in entity_to_dict() - - See map_distinct_attribute_to_entity_names() for an explanation regarding - `distinct_fields` and its data structure - - :param entity_name: Name of the Python ICAT entity - :type entity_name: :class:`str` - :param distinct_fields: Names of fields in Python ICAT which should be outputted - in the response, separated by which entities they belong to as the keys - :type distinct_fields: :class:`dict` - :return: A copy of `distinct_fields`, with the data from the entity name put - into the base portion of the dictionary + A function that can be called recursively to map attributes from related + entities to the associated data + + :param nested_dict: Dictionary to insert data into + :type nested_dict: :class:`dict` + :param split_attr_name: List of parts to an attribute name, that have been split + by "." + :type split_attr_name: :class:`list` + :param query_data: Data to be added to the dictionary + :type query_data: :class:`str` or :class:`str` + :return: Dictionary to be added to the result dictionary """ - log.debug("Entity Name: %s, Distinct Fields: %s", entity_name, distinct_fields) - - distinct_fields_copy = distinct_fields.copy() - - # Reset base fields - distinct_fields_copy["base"] = [] - if entity_name in distinct_fields_copy.keys(): - distinct_fields_copy["base"] = distinct_fields_copy[entity_name] + # Popping LHS of related attribute name to see if it's an attribute name or part + # of a path to a related entity + attr_name_pop = split_attr_name.pop(0) + + # Related attribute name, ready to insert data into dictionary + if len(split_attr_name) == 0: + # at role, so put data in + nested_dict[attr_name_pop] = query_data + # Part of the path for related entity, need to recurse to get to attribute name + else: + nested_dict[attr_name_pop] = {} + self.map_nested_attrs( + nested_dict[attr_name_pop], split_attr_name, query_data, + ) - return distinct_fields_copy + return nested_dict def flatten_query_included_fields(self, includes): """ diff --git a/datagateway_api/config.json.example b/datagateway_api/config.json.example index 9ca8b44a..68137ce9 100644 --- a/datagateway_api/config.json.example +++ b/datagateway_api/config.json.example @@ -1,5 +1,8 @@ { "backend": "db", + "client_cache_size": 5, + "client_pool_init_size": 2, + "client_pool_max_size": 5, "db_url": "mysql+pymysql://icatdbuser:icatdbuserpw@localhost:3306/icatdb", "flask_reloader": false, "icat_url": "https://localhost:8181", diff --git a/datagateway_api/src/api_start_utils.py b/datagateway_api/src/api_start_utils.py index 8a3e306d..25cbe69a 100644 --- a/datagateway_api/src/api_start_utils.py +++ b/datagateway_api/src/api_start_utils.py @@ -10,6 +10,7 @@ from datagateway_api.common.backends import create_backend from datagateway_api.common.config import APIConfigOptions, config from datagateway_api.common.database.helpers import db +from datagateway_api.common.icat.icat_client_pool import create_client_pool from datagateway_api.src.resources.entities.entity_endpoint import ( get_count_endpoint, get_endpoint, @@ -86,27 +87,32 @@ def create_api_endpoints(flask_app, api, spec): backend = create_backend(backend_type) + icat_client_pool = None + if backend_type == "python_icat": + # Create client pool + icat_client_pool = create_client_pool() + for entity_name in endpoints: get_endpoint_resource = get_endpoint( - entity_name, endpoints[entity_name], backend, + entity_name, endpoints[entity_name], backend, client_pool=icat_client_pool, ) api.add_resource(get_endpoint_resource, f"/{entity_name.lower()}") spec.path(resource=get_endpoint_resource, api=api) get_id_endpoint_resource = get_id_endpoint( - entity_name, endpoints[entity_name], backend, + entity_name, endpoints[entity_name], backend, client_pool=icat_client_pool, ) api.add_resource(get_id_endpoint_resource, f"/{entity_name.lower()}/") spec.path(resource=get_id_endpoint_resource, api=api) get_count_endpoint_resource = get_count_endpoint( - entity_name, endpoints[entity_name], backend, + entity_name, endpoints[entity_name], backend, client_pool=icat_client_pool, ) api.add_resource(get_count_endpoint_resource, f"/{entity_name.lower()}/count") spec.path(resource=get_count_endpoint_resource, api=api) get_find_one_endpoint_resource = get_find_one_endpoint( - entity_name, endpoints[entity_name], backend, + entity_name, endpoints[entity_name], backend, client_pool=icat_client_pool, ) api.add_resource( get_find_one_endpoint_resource, f"/{entity_name.lower()}/findone", @@ -114,19 +120,21 @@ def create_api_endpoints(flask_app, api, spec): spec.path(resource=get_find_one_endpoint_resource, api=api) # Session endpoint - session_endpoint_resource = session_endpoints(backend) + session_endpoint_resource = session_endpoints(backend, client_pool=icat_client_pool) api.add_resource(session_endpoint_resource, "/sessions") spec.path(resource=session_endpoint_resource, api=api) # Table specific endpoints - instrument_facility_cycle_resource = instrument_facility_cycles_endpoint(backend) + instrument_facility_cycle_resource = instrument_facility_cycles_endpoint( + backend, client_pool=icat_client_pool, + ) api.add_resource( instrument_facility_cycle_resource, "/instruments//facilitycycles", ) spec.path(resource=instrument_facility_cycle_resource, api=api) count_instrument_facility_cycle_res = count_instrument_facility_cycles_endpoint( - backend, + backend, client_pool=icat_client_pool, ) api.add_resource( count_instrument_facility_cycle_res, @@ -134,7 +142,9 @@ def create_api_endpoints(flask_app, api, spec): ) spec.path(resource=count_instrument_facility_cycle_res, api=api) - instrument_investigation_resource = instrument_investigation_endpoint(backend) + instrument_investigation_resource = instrument_investigation_endpoint( + backend, client_pool=icat_client_pool, + ) api.add_resource( instrument_investigation_resource, "/instruments//facilitycycles//investigations", @@ -142,7 +152,7 @@ def create_api_endpoints(flask_app, api, spec): spec.path(resource=instrument_investigation_resource, api=api) count_instrument_investigation_resource = count_instrument_investigation_endpoint( - backend, + backend, client_pool=icat_client_pool, ) api.add_resource( count_instrument_investigation_resource, diff --git a/datagateway_api/src/resources/entities/entity_endpoint.py b/datagateway_api/src/resources/entities/entity_endpoint.py index ef26e844..5a278e4b 100644 --- a/datagateway_api/src/resources/entities/entity_endpoint.py +++ b/datagateway_api/src/resources/entities/entity_endpoint.py @@ -7,7 +7,7 @@ ) -def get_endpoint(name, entity_type, backend): +def get_endpoint(name, entity_type, backend, **kwargs): """ Given an entity name generate a flask_restful Resource class. In main.py these generated classes are registered with the api e.g @@ -31,6 +31,7 @@ def get(self): get_session_id_from_auth_header(), entity_type, get_filters_from_query_string(), + **kwargs, ), 200, ) @@ -72,7 +73,10 @@ def get(self): def post(self): return ( backend.create( - get_session_id_from_auth_header(), entity_type, request.json, + get_session_id_from_auth_header(), + entity_type, + request.json, + **kwargs, ), 200, ) @@ -115,7 +119,10 @@ def post(self): def patch(self): return ( backend.update( - get_session_id_from_auth_header(), entity_type, request.json, + get_session_id_from_auth_header(), + entity_type, + request.json, + **kwargs, ), 200, ) @@ -159,7 +166,7 @@ def patch(self): return Endpoint -def get_id_endpoint(name, entity_type, backend): +def get_id_endpoint(name, entity_type, backend, **kwargs): """ Given an entity name generate a flask_restful Resource class. In main.py these generated classes are registered with the api e.g @@ -180,7 +187,7 @@ class EndpointWithID(Resource): def get(self, id_): return ( backend.get_with_id( - get_session_id_from_auth_header(), entity_type, id_, + get_session_id_from_auth_header(), entity_type, id_, **kwargs, ), 200, ) @@ -216,7 +223,9 @@ def get(self, id_): """ def delete(self, id_): - backend.delete_with_id(get_session_id_from_auth_header(), entity_type, id_) + backend.delete_with_id( + get_session_id_from_auth_header(), entity_type, id_, **kwargs, + ) return "", 204 delete.__doc__ = f""" @@ -248,8 +257,10 @@ def delete(self, id_): def patch(self, id_): session_id = get_session_id_from_auth_header() - backend.update_with_id(session_id, entity_type, id_, request.json) - return backend.get_with_id(session_id, entity_type, id_), 200 + backend.update_with_id( + session_id, entity_type, id_, request.json, **kwargs, + ) + return backend.get_with_id(session_id, entity_type, id_, **kwargs), 200 patch.__doc__ = f""" --- @@ -293,7 +304,7 @@ def patch(self, id_): return EndpointWithID -def get_count_endpoint(name, entity_type, backend): +def get_count_endpoint(name, entity_type, backend, **kwargs): """ Given an entity name generate a flask_restful Resource class. In main.py these generated classes are registered with the api e.g @@ -313,7 +324,7 @@ def get(self): filters = get_filters_from_query_string() return ( backend.count_with_filters( - get_session_id_from_auth_header(), entity_type, filters, + get_session_id_from_auth_header(), entity_type, filters, **kwargs, ), 200, ) @@ -350,7 +361,7 @@ def get(self): return CountEndpoint -def get_find_one_endpoint(name, entity_type, backend): +def get_find_one_endpoint(name, entity_type, backend, **kwargs): """ Given an entity name generate a flask_restful Resource class. In main.py these generated classes are registered with the api e.g @@ -372,7 +383,7 @@ def get(self): filters = get_filters_from_query_string() return ( backend.get_one_with_filters( - get_session_id_from_auth_header(), entity_type, filters, + get_session_id_from_auth_header(), entity_type, filters, **kwargs, ), 200, ) diff --git a/datagateway_api/src/resources/non_entities/sessions_endpoints.py b/datagateway_api/src/resources/non_entities/sessions_endpoints.py index 7e5b2a6b..651f5e5c 100644 --- a/datagateway_api/src/resources/non_entities/sessions_endpoints.py +++ b/datagateway_api/src/resources/non_entities/sessions_endpoints.py @@ -10,7 +10,7 @@ log = logging.getLogger() -def session_endpoints(backend): +def session_endpoints(backend, **kwargs): """ Generate a flask_restful Resource class using the configured backend. In main.py these generated classes are registered with the api e.g. @@ -74,7 +74,7 @@ def post(self): if not ("mechanism" in request.json): request.json["mechanism"] = "simple" try: - return {"sessionID": backend.login(request.json)}, 201 + return {"sessionID": backend.login(request.json, **kwargs)}, 201 except AuthenticationError: return "Forbidden", 403 @@ -99,7 +99,7 @@ def delete(self): 404: description: Not Found - Unable to find session ID """ - backend.logout(get_session_id_from_auth_header()) + backend.logout(get_session_id_from_auth_header(), **kwargs) return "", 200 def get(self): @@ -136,7 +136,12 @@ def get(self): 403: description: Forbidden - The session ID provided is invalid """ - return backend.get_session_details(get_session_id_from_auth_header()), 200 + return ( + backend.get_session_details( + get_session_id_from_auth_header(), **kwargs, + ), + 200, + ) def put(self): """ @@ -161,6 +166,6 @@ def put(self): 403: description: Forbidden - The session ID provided is invalid """ - return backend.refresh(get_session_id_from_auth_header()), 200 + return backend.refresh(get_session_id_from_auth_header(), **kwargs), 200 return Sessions diff --git a/datagateway_api/src/resources/table_endpoints/table_endpoints.py b/datagateway_api/src/resources/table_endpoints/table_endpoints.py index 3c73c406..ee5819b3 100644 --- a/datagateway_api/src/resources/table_endpoints/table_endpoints.py +++ b/datagateway_api/src/resources/table_endpoints/table_endpoints.py @@ -6,7 +6,7 @@ ) -def instrument_facility_cycles_endpoint(backend): +def instrument_facility_cycles_endpoint(backend, **kwargs): """ Generate a flask_restful Resource class using the configured backend. In main.py these generated classes are registered with the api e.g. @@ -66,6 +66,7 @@ def get(self, id_): get_session_id_from_auth_header(), id_, get_filters_from_query_string(), + **kwargs, ), 200, ) @@ -73,7 +74,7 @@ def get(self, id_): return InstrumentsFacilityCycles -def count_instrument_facility_cycles_endpoint(backend): +def count_instrument_facility_cycles_endpoint(backend, **kwargs): """ Generate a flask_restful Resource class using the configured backend. In main.py these generated classes are registered with the api e.g. @@ -126,6 +127,7 @@ def get(self, id_): get_session_id_from_auth_header(), id_, get_filters_from_query_string(), + **kwargs, ), 200, ) @@ -133,7 +135,7 @@ def get(self, id_): return InstrumentsFacilityCyclesCount -def instrument_investigation_endpoint(backend): +def instrument_investigation_endpoint(backend, **kwargs): """ Generate a flask_restful Resource class using the configured backend. In main.py these generated classes are registered with the api e.g. @@ -201,6 +203,7 @@ def get(self, instrument_id, cycle_id): instrument_id, cycle_id, get_filters_from_query_string(), + **kwargs, ), 200, ) @@ -208,7 +211,7 @@ def get(self, instrument_id, cycle_id): return InstrumentsFacilityCyclesInvestigations -def count_instrument_investigation_endpoint(backend): +def count_instrument_investigation_endpoint(backend, **kwargs): """ Generate a flask_restful Resource class using the configured backend. In main.py these generated classes are registered with the api e.g. @@ -270,6 +273,7 @@ def get(self, instrument_id, cycle_id): instrument_id, cycle_id, get_filters_from_query_string(), + **kwargs, ), 200, ) diff --git a/poetry.lock b/poetry.lock index 8b1de70c..46829252 100644 --- a/poetry.lock +++ b/poetry.lock @@ -90,6 +90,14 @@ typed-ast = ">=1.4.0" [package.extras] d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] +[[package]] +name = "cachetools" +version = "4.2.1" +description = "Extensible memoizing collections and decorators" +category = "main" +optional = false +python-versions = "~=3.5" + [[package]] name = "certifi" version = "2020.12.5" @@ -371,14 +379,14 @@ flask = "*" [[package]] name = "gitdb" -version = "4.0.5" +version = "4.0.7" description = "Git Object Database" category = "dev" optional = false python-versions = ">=3.4" [package.dependencies] -smmap = ">=3.0.1,<4" +smmap = ">=3.0.1,<5" [[package]] name = "gitpython" @@ -420,7 +428,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "3.7.3" +version = "3.10.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -432,7 +440,7 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -564,6 +572,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "py-object-pool" +version = "1.1" +description = "Object pool creation library" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "pycodestyle" version = "2.7.0" @@ -574,7 +590,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pyflakes" -version = "2.3.0" +version = "2.3.1" description = "passive checker of Python programs" category = "dev" optional = false @@ -602,7 +618,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "pytest" -version = "6.2.2" +version = "6.2.3" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -663,7 +679,7 @@ six = ">=1.5" [[package]] name = "python-icat" -version = "0.17.0" +version = "0.18.1" description = "Python interface to ICAT and IDS" category = "main" optional = false @@ -687,7 +703,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "regex" -version = "2021.3.17" +version = "2021.4.4" description = "Alternative regular expression module, to replace re." category = "dev" optional = false @@ -735,15 +751,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "smmap" -version = "3.0.5" +version = "4.0.0" description = "A pure Python implementation of a sliding window memory map manager" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] name = "sqlalchemy" -version = "1.4.1" +version = "1.4.5" description = "Database Abstraction Library" category = "main" optional = false @@ -755,6 +771,7 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [package.extras] aiomysql = ["greenlet (!=0.4.17)", "aiomysql"] +aiosqlite = ["greenlet (!=0.4.17)", "aiosqlite"] asyncio = ["greenlet (!=0.4.17)"] mariadb_connector = ["mariadb (>=1.0.1)"] mssql = ["pyodbc"] @@ -770,6 +787,7 @@ postgresql_pg8000 = ["pg8000 (>=1.16.6)"] postgresql_psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql (<1)", "pymysql"] +sqlcipher = ["sqlcipher3-binary"] [[package]] name = "stevedore" @@ -863,7 +881,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt [metadata] lock-version = "1.1" python-versions = "^3.6" -content-hash = "8544fcdd39d43fefe311167bd7981c5518ae5e0f39a8be5524bf924cb1510278" +content-hash = "8e6c3af6795dbb3c0d0f27e870b4d895cf399ca51418ebbb2636ccc561caf293" [metadata.files] aniso8601 = [ @@ -894,6 +912,10 @@ black = [ {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"}, {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, ] +cachetools = [ + {file = "cachetools-4.2.1-py3-none-any.whl", hash = "sha256:1d9d5f567be80f7c07d765e21b814326d78c61eb0c3a637dffc0e5d1796cb2e2"}, + {file = "cachetools-4.2.1.tar.gz", hash = "sha256:f469e29e7aa4cff64d8de4aad95ce76de8ea1125a16c68e0d93f65c3c3dc92e9"}, +] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, @@ -1033,8 +1055,8 @@ flask-swagger-ui = [ {file = "flask-swagger-ui-3.25.0.tar.gz", hash = "sha256:42d098997e06b04f992609c4945cc990738b269c153d8388fc59a91a5dfcee9e"}, ] gitdb = [ - {file = "gitdb-4.0.5-py3-none-any.whl", hash = "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac"}, - {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, + {file = "gitdb-4.0.7-py3-none-any.whl", hash = "sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0"}, + {file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"}, ] gitpython = [ {file = "GitPython-3.1.14-py3-none-any.whl", hash = "sha256:3283ae2fba31c913d857e12e5ba5f9a7772bbc064ae2bb09efafa71b0dd4939b"}, @@ -1093,8 +1115,8 @@ idna = [ {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, ] importlib-metadata = [ - {file = "importlib_metadata-3.7.3-py3-none-any.whl", hash = "sha256:b74159469b464a99cb8cc3e21973e4d96e05d3024d337313fedb618a6e86e6f4"}, - {file = "importlib_metadata-3.7.3.tar.gz", hash = "sha256:742add720a20d0467df2f444ae41704000f50e1234f46174b51f9c6031a1bd71"}, + {file = "importlib_metadata-3.10.0-py3-none-any.whl", hash = "sha256:d2d46ef77ffc85cbf7dac7e81dd663fde71c45326131bea8033b9bad42268ebe"}, + {file = "importlib_metadata-3.10.0.tar.gz", hash = "sha256:c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1198,13 +1220,17 @@ py = [ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] +py-object-pool = [ + {file = "py-object-pool-1.1.tar.gz", hash = "sha256:fa3a41f363a50b8bf346880bd75f45d6a0391f24a2533a140ed531316782352c"}, + {file = "py_object_pool-1.1-py3-none-any.whl", hash = "sha256:9be717f00b861bbecc45f38108a96d7251bcaba4e02b24bbcc5115ffb9d32104"}, +] pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pyflakes = [ - {file = "pyflakes-2.3.0-py2.py3-none-any.whl", hash = "sha256:910208209dcea632721cb58363d0f72913d9e8cf64dc6f8ae2e02a3609aba40d"}, - {file = "pyflakes-2.3.0.tar.gz", hash = "sha256:e59fd8e750e588358f1b8885e5a4751203a0516e0ee6d34811089ac294c8806f"}, + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pymysql = [ {file = "PyMySQL-1.0.2-py3-none-any.whl", hash = "sha256:41fc3a0c5013d5f039639442321185532e3e2c8924687abe6537de157d403641"}, @@ -1215,8 +1241,8 @@ pyparsing = [ {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, ] pytest = [ - {file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"}, - {file = "pytest-6.2.2.tar.gz", hash = "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9"}, + {file = "pytest-6.2.3-py3-none-any.whl", hash = "sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc"}, + {file = "pytest-6.2.3.tar.gz", hash = "sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634"}, ] pytest-cov = [ {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, @@ -1230,7 +1256,7 @@ python-dateutil = [ {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, ] python-icat = [ - {file = "python-icat-0.17.0.tar.gz", hash = "sha256:92942ce5e4b4c7b7db8179b78c07c58b56091a1d275385f69dd99d19a58a9396"}, + {file = "python-icat-0.18.1.tar.gz", hash = "sha256:d8b8fc1a535a78e1aac263594851c2dada798b027c60cc9bb20411fb6835650c"}, ] pytz = [ {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, @@ -1260,47 +1286,47 @@ pyyaml = [ {file = "PyYAML-5.4.tar.gz", hash = "sha256:3c49e39ac034fd64fd576d63bb4db53cda89b362768a67f07749d55f128ac18a"}, ] regex = [ - {file = "regex-2021.3.17-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b97ec5d299c10d96617cc851b2e0f81ba5d9d6248413cd374ef7f3a8871ee4a6"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:cb4ee827857a5ad9b8ae34d3c8cc51151cb4a3fe082c12ec20ec73e63cc7c6f0"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:633497504e2a485a70a3268d4fc403fe3063a50a50eed1039083e9471ad0101c"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:a59a2ee329b3de764b21495d78c92ab00b4ea79acef0f7ae8c1067f773570afa"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f85d6f41e34f6a2d1607e312820971872944f1661a73d33e1e82d35ea3305e14"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4651f839dbde0816798e698626af6a2469eee6d9964824bb5386091255a1694f"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:39c44532d0e4f1639a89e52355b949573e1e2c5116106a395642cbbae0ff9bcd"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3d9a7e215e02bd7646a91fb8bcba30bc55fd42a719d6b35cf80e5bae31d9134e"}, - {file = "regex-2021.3.17-cp36-cp36m-win32.whl", hash = "sha256:159fac1a4731409c830d32913f13f68346d6b8e39650ed5d704a9ce2f9ef9cb3"}, - {file = "regex-2021.3.17-cp36-cp36m-win_amd64.whl", hash = "sha256:13f50969028e81765ed2a1c5fcfdc246c245cf8d47986d5172e82ab1a0c42ee5"}, - {file = "regex-2021.3.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9d8d286c53fe0cbc6d20bf3d583cabcd1499d89034524e3b94c93a5ab85ca90"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:201e2619a77b21a7780580ab7b5ce43835e242d3e20fef50f66a8df0542e437f"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d47d359545b0ccad29d572ecd52c9da945de7cd6cf9c0cfcb0269f76d3555689"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:ea2f41445852c660ba7c3ebf7d70b3779b20d9ca8ba54485a17740db49f46932"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:486a5f8e11e1f5bbfcad87f7c7745eb14796642323e7e1829a331f87a713daaa"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:18e25e0afe1cf0f62781a150c1454b2113785401ba285c745acf10c8ca8917df"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:a2ee026f4156789df8644d23ef423e6194fad0bc53575534101bb1de5d67e8ce"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:4c0788010a93ace8a174d73e7c6c9d3e6e3b7ad99a453c8ee8c975ddd9965643"}, - {file = "regex-2021.3.17-cp37-cp37m-win32.whl", hash = "sha256:575a832e09d237ae5fedb825a7a5bc6a116090dd57d6417d4f3b75121c73e3be"}, - {file = "regex-2021.3.17-cp37-cp37m-win_amd64.whl", hash = "sha256:8e65e3e4c6feadf6770e2ad89ad3deb524bcb03d8dc679f381d0568c024e0deb"}, - {file = "regex-2021.3.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a0df9a0ad2aad49ea3c7f65edd2ffb3d5c59589b85992a6006354f6fb109bb18"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b98bc9db003f1079caf07b610377ed1ac2e2c11acc2bea4892e28cc5b509d8d5"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:808404898e9a765e4058bf3d7607d0629000e0a14a6782ccbb089296b76fa8fe"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:5770a51180d85ea468234bc7987f5597803a4c3d7463e7323322fe4a1b181578"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:976a54d44fd043d958a69b18705a910a8376196c6b6ee5f2596ffc11bff4420d"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:63f3ca8451e5ff7133ffbec9eda641aeab2001be1a01878990f6c87e3c44b9d5"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bcd945175c29a672f13fce13a11893556cd440e37c1b643d6eeab1988c8b209c"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:3d9356add82cff75413bec360c1eca3e58db4a9f5dafa1f19650958a81e3249d"}, - {file = "regex-2021.3.17-cp38-cp38-win32.whl", hash = "sha256:f5d0c921c99297354cecc5a416ee4280bd3f20fd81b9fb671ca6be71499c3fdf"}, - {file = "regex-2021.3.17-cp38-cp38-win_amd64.whl", hash = "sha256:14de88eda0976020528efc92d0a1f8830e2fb0de2ae6005a6fc4e062553031fa"}, - {file = "regex-2021.3.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4c2e364491406b7888c2ad4428245fc56c327e34a5dfe58fd40df272b3c3dab3"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8bd4f91f3fb1c9b1380d6894bd5b4a519409135bec14c0c80151e58394a4e88a"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:882f53afe31ef0425b405a3f601c0009b44206ea7f55ee1c606aad3cc213a52c"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:07ef35301b4484bce843831e7039a84e19d8d33b3f8b2f9aab86c376813d0139"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:360a01b5fa2ad35b3113ae0c07fb544ad180603fa3b1f074f52d98c1096fa15e"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:709f65bb2fa9825f09892617d01246002097f8f9b6dde8d1bb4083cf554701ba"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:c66221e947d7207457f8b6f42b12f613b09efa9669f65a587a2a71f6a0e4d106"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c782da0e45aff131f0bed6e66fbcfa589ff2862fc719b83a88640daa01a5aff7"}, - {file = "regex-2021.3.17-cp39-cp39-win32.whl", hash = "sha256:dc9963aacb7da5177e40874585d7407c0f93fb9d7518ec58b86e562f633f36cd"}, - {file = "regex-2021.3.17-cp39-cp39-win_amd64.whl", hash = "sha256:a0d04128e005142260de3733591ddf476e4902c0c23c1af237d9acf3c96e1b38"}, - {file = "regex-2021.3.17.tar.gz", hash = "sha256:4b8a1fb724904139149a43e172850f35aa6ea97fb0545244dc0b805e0154ed68"}, + {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, + {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, + {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, + {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, + {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, + {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, + {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, + {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, + {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, + {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, + {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, + {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, + {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, ] requests = [ {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, @@ -1315,44 +1341,44 @@ six = [ {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, ] smmap = [ - {file = "smmap-3.0.5-py2.py3-none-any.whl", hash = "sha256:7bfcf367828031dc893530a29cb35eb8c8f2d7c8f2d0989354d75d24c8573714"}, - {file = "smmap-3.0.5.tar.gz", hash = "sha256:84c2751ef3072d4f6b2785ec7ee40244c6f45eb934d9e543e2c51f1bd3d54c50"}, + {file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"}, + {file = "smmap-4.0.0.tar.gz", hash = "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182"}, ] sqlalchemy = [ - {file = "SQLAlchemy-1.4.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:afe330ee70b01c5f88ccd4c4aef5b284368ace76ecfdf380aa10c9432218578f"}, - {file = "SQLAlchemy-1.4.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:fbe998667bb2d4c8e818a089ba3bf5815e0528c5454fc7fb2dbeef68a0fdc649"}, - {file = "SQLAlchemy-1.4.1-cp27-cp27m-win32.whl", hash = "sha256:ec4dd88ab810a6135abecf1e40006b3534e43434380f87e9ddd698ae1ed6c026"}, - {file = "SQLAlchemy-1.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e6e0fe0c5374eb192ac3cb04a0028f82630282e4e15cf7c5fb2c10ed93ea1f0b"}, - {file = "SQLAlchemy-1.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:486afe754505a17c5b6b6f74537bc7f2a854f808b6b5fe899a8cfba9ccbe731b"}, - {file = "SQLAlchemy-1.4.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:df3d12d7809b8db69ca52e178e2b6a08f1776f4373a2f98c62b0c48e9096ed25"}, - {file = "SQLAlchemy-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:a6ff349c87094c91f3459a5c5af85e7199ac59835dd6d4726a40bbd517b590a6"}, - {file = "SQLAlchemy-1.4.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:03caff28c0598f05fdb7419fc7a2d2177dbe6a0d480f89c949a0c210bd413700"}, - {file = "SQLAlchemy-1.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c97a4f7ca058e04286dcfb46af700f9313b145429521083c50db57dfa19bc341"}, - {file = "SQLAlchemy-1.4.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:387ed375feef43ad35f11cd4eb000b22bedeea1a88ebae52f8e92ad09a3390c3"}, - {file = "SQLAlchemy-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:fd0813d175cb85618a1b32ca5f2610531c54c9235036cd77700301fada9d97cb"}, - {file = "SQLAlchemy-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f535e5e226ba3b17d40414fef11f324690881b200c5c5f2461b589ec1c7b257a"}, - {file = "SQLAlchemy-1.4.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:d5edf0fdb97f7af661131547bece6655a64912f51839ab2eeedbb539ed3115b4"}, - {file = "SQLAlchemy-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ec261c87728a54a7f9c7e45c3fa36b88b2ed32eda37122ae16c596e0801c5b6d"}, - {file = "SQLAlchemy-1.4.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bcdc64bb09820fbe44b60e9fa3755014909f874cbf525b388101a7e3d25992d4"}, - {file = "SQLAlchemy-1.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:62ce8ca8e7c4d0cb4e208a3d2f3c221ad8a8c3bafe9e5be3917f61617d4dc99f"}, - {file = "SQLAlchemy-1.4.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:7b64432987dab6cf5cd89213d6a81f85f4de2ee5085d10b255a6303e470ea2d2"}, - {file = "SQLAlchemy-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:1ea14c62ad5e0952b45e1b2a8b80a6948af0be01088fc87b79b61fe8b0facb7d"}, - {file = "SQLAlchemy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f84d0b098cb2bb1f2e16a9e4aecb96d32cd91458a8e7a8b00f87c8aa9e12b487"}, - {file = "SQLAlchemy-1.4.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:be015f6d872b1d12510b46badf7cb616c4840077ade9545a49897b450bf0a399"}, - {file = "SQLAlchemy-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:837c55d55c56c1a132ee059474d7ecf259f82cd3864c5ef6ad6201b3cf5500e5"}, - {file = "SQLAlchemy-1.4.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:5a972a2facb69c559c5297930a8a43f348d567a846b031aaff0e696dcbf993f1"}, - {file = "SQLAlchemy-1.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:1edb425535d280c21bf505a054857790d2cef302acea8578f1c5208c0bf89492"}, - {file = "SQLAlchemy-1.4.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:c7a702e0893134c7a1fcb9306b051a658a01d70e80bfa7cdf9c97ab9573404ca"}, - {file = "SQLAlchemy-1.4.1-cp38-cp38-win32.whl", hash = "sha256:7c5687b8e056bb105c2665b7d7db3df81d463ed6a9e3f34c98b814ecdd5d0da0"}, - {file = "SQLAlchemy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:b41aa302ecb636ded2f28e7a0b55f8e3cf2cc4c460d7dda5cbd91c8aef41c1f1"}, - {file = "SQLAlchemy-1.4.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a774dbc9c82f5a62b4a9c6f7b47b48371d44c0b087143e07883182392103ef0a"}, - {file = "SQLAlchemy-1.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:140104ecf20ee0e5594c1d3ec637ec5101b6330fab42a2211afd7e8e3991fe78"}, - {file = "SQLAlchemy-1.4.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f37abd52706b6de55fdabbab368306b1a87fae1eac7b37af53d75bb79d45f2dd"}, - {file = "SQLAlchemy-1.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ca861642bf1ac2b161ae623327874dcae8789713c188d9f78320cadaef6e5f6f"}, - {file = "SQLAlchemy-1.4.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:42b19fae479a012d775383b5395d55568017872551f93601e75f8bf67b3bbab3"}, - {file = "SQLAlchemy-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a4fa7217a7e09aa2e89604813b9ba2b874ed106c5be4ac98fc93547c4472e0c0"}, - {file = "SQLAlchemy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:396621702babc3e92b93aa4fc6a858d6e41b6e10676e5e5e7a79869c59733c4e"}, - {file = "SQLAlchemy-1.4.1.tar.gz", hash = "sha256:a6afdab2d70ef9f9905eba5ba93cf78ed9ed188cde3abda3231cdb8fce005a84"}, + {file = "SQLAlchemy-1.4.5-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c3810ebcf1d42c532c8f5c3f442c705d94442a27a32f2df5344f0857306ab321"}, + {file = "SQLAlchemy-1.4.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:7481f9c2c832a3bf37c80bee44d91ac9938b815cc06f7e795b976e300914aab9"}, + {file = "SQLAlchemy-1.4.5-cp27-cp27m-win32.whl", hash = "sha256:94040a92b6676f9ffdab6c6b479b3554b927a635c90698c761960b266b04fc88"}, + {file = "SQLAlchemy-1.4.5-cp27-cp27m-win_amd64.whl", hash = "sha256:02b039e0e7e6de2f15ea2d2de3995e31a170e700ec0b37b4eded662171711d19"}, + {file = "SQLAlchemy-1.4.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f16801795f1ffe9472360589a04301018c79e4582a85e68067275bb4f765e4e2"}, + {file = "SQLAlchemy-1.4.5-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:82f11b679df91275788be6734dd4a9dfa29bac67b85326992609f62b05bdab37"}, + {file = "SQLAlchemy-1.4.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:a08027ae84efc563f0f2f341dda572eadebeca38c0ae028a009988f27e9e6230"}, + {file = "SQLAlchemy-1.4.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:70a1387396ea5b3022539b560c287daf79403d8b4b365f89b56d660e625a4457"}, + {file = "SQLAlchemy-1.4.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4f7ce3bfdab6520554af4a5b1df4513d45388624d015ba4d921daf48ce1d6503"}, + {file = "SQLAlchemy-1.4.5-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:08943201a1e3c6238e48f4d5d56c27ea1e1b39d3d9f36a9d81fc3cfb0e1b83bd"}, + {file = "SQLAlchemy-1.4.5-cp36-cp36m-win32.whl", hash = "sha256:fbb0fda1c574975807aceb0e2332e0ecfe9e5656c191ed482c1a5eafe7a33823"}, + {file = "SQLAlchemy-1.4.5-cp36-cp36m-win_amd64.whl", hash = "sha256:8d6a9feb5efd2fdab25c6d5a0a5589fed9d789f5ec57ec12263fd0e60ce1dea6"}, + {file = "SQLAlchemy-1.4.5-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:c22bfac8d3b955cdb13f0fcd6343156bf56d925196cf7d9ab9ce9f61d3f1e11c"}, + {file = "SQLAlchemy-1.4.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7c0c7bb49167ac738ca6ee6e7f94a9988a7e4e261d8da335341e8c8c8f3b2e9b"}, + {file = "SQLAlchemy-1.4.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:344b58b4b4193b72e8b768a51ef6eb5a4c948ce313a0f23e2ea081e71ce8ac0e"}, + {file = "SQLAlchemy-1.4.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:48540072f43b3c080159ec1f24a4b014c0ee83d3b73795399974aa358a8cf71b"}, + {file = "SQLAlchemy-1.4.5-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:81badd7d3e0e6aba70a5d1b50fabe8112e9835a6fdb0684054c3fe5378ce0d01"}, + {file = "SQLAlchemy-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:a103294583383660d9e06dbd82037dc8e94c184bdcb27b2be44ae4457dafc6b4"}, + {file = "SQLAlchemy-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5361e25181b9872d6906c8c9be7dc05cb0a0951d71ee59ee5a71c1deb301b8a8"}, + {file = "SQLAlchemy-1.4.5-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:7f5087104c3c5af11ea59e49ae66c33ca98b14a47d3796ae97498fca53f84aef"}, + {file = "SQLAlchemy-1.4.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:11e7a86209f69273e75d2dd64b06c0c2660e39cd942fce2170515c404ed7358a"}, + {file = "SQLAlchemy-1.4.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:8301ecf3e819eb5dbc171e84654ff60872807775301a55fe35b0ab2ba3742031"}, + {file = "SQLAlchemy-1.4.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:44e11a06168782b6d485daef197783366ce7ab0d5eea0066c899ae06cef47bbc"}, + {file = "SQLAlchemy-1.4.5-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:6f8fdad2f335d2f3ca2f3ee3b01404f7abcf519b03de2c510f1f42d16e39ffb4"}, + {file = "SQLAlchemy-1.4.5-cp38-cp38-win32.whl", hash = "sha256:f62c57ceadedeb8e7b98b48ac4d684bf2b0f73b9d882fed3ca260d9aedf6403f"}, + {file = "SQLAlchemy-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:301d0cd6ef1dc73b607748183da857e712d6f743de8d92b1e1f8facfb0ba2aa2"}, + {file = "SQLAlchemy-1.4.5-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:915d4fa08776c0252dc5a34fa15c6490f66f411ea1ac9492022f98875d6baf20"}, + {file = "SQLAlchemy-1.4.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7de84feb31af3d8fdf819cac2042928d0b60d3cb16f49c4b2f48d88db46e79f6"}, + {file = "SQLAlchemy-1.4.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:45b091ccbf94374ed14abde17e9a04522b0493a17282eaaf4383efdd413f5243"}, + {file = "SQLAlchemy-1.4.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:4df07161897191ed8d4a0cfc92425c81296160e5c5f76c9256716d3085172883"}, + {file = "SQLAlchemy-1.4.5-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ee4ddc904fb6414b5118af5b8d45e428aac2ccda01326b2ba2fe4354b0d8d1ae"}, + {file = "SQLAlchemy-1.4.5-cp39-cp39-win32.whl", hash = "sha256:2f11b5783933bff55291ca06496124347627d211ff2e509e846af1c35de0a3fb"}, + {file = "SQLAlchemy-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:0ee0054d4a598d2920cae14bcbd33e200e02c5e3b47b902627f8cf5d4c9a2a4b"}, + {file = "SQLAlchemy-1.4.5.tar.gz", hash = "sha256:1294f05916c044631fd626a4866326bbfbd17f62bd37510d000afaef4b35bd74"}, ] stevedore = [ {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, diff --git a/pyproject.toml b/pyproject.toml index 663ef326..88c6f5c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,8 +17,10 @@ Flask-Cors = "3.0.9" apispec = "3.3.0" flask-swagger-ui = "3.25.0" PyYAML = "5.4" -python-icat = "0.17.0" +python-icat = "0.18.1" suds-community = "^0.8.4" +py-object-pool = "^1.1" +cachetools = "^4.2.1" Flask-SQLAlchemy = "^2.4.4" requests = "^2.25.1" python-dateutil = "^2.8.1" diff --git a/test/icat/endpoints/test_count_with_filters_icat.py b/test/icat/endpoints/test_count_with_filters_icat.py index 3f4d09c1..8b112c8e 100644 --- a/test/icat/endpoints/test_count_with_filters_icat.py +++ b/test/icat/endpoints/test_count_with_filters_icat.py @@ -2,24 +2,44 @@ class TestICATCountWithFilters: - @pytest.mark.usefixtures("single_investigation_test_data") + @pytest.mark.parametrize( + "query_params, expected_result", + [ + pytest.param( + '?where={"title": {"like": "Test data for the Python ICAT Backend on' + ' DataGateway API"}}', + 5, + id="Filter on test data", + ), + pytest.param( + '?where={"title": {"like": "Test data for the Python ICAT Backend on' + ' DataGateway API"}}&distinct=["startDate"]', + 1, + id="Distinct test data", + ), + ], + ) + @pytest.mark.usefixtures("multiple_investigation_test_data") def test_valid_count_with_filters( - self, flask_test_app_icat, valid_icat_credentials_header, + self, + flask_test_app_icat, + valid_icat_credentials_header, + query_params, + expected_result, ): test_response = flask_test_app_icat.get( - '/investigations/count?where={"title": {"like": "Test data for the Python' - ' ICAT Backend on DataGateway API"}}', + f"/investigations/count{query_params}", headers=valid_icat_credentials_header, ) - assert test_response.json == 1 + assert test_response.json == expected_result def test_valid_no_results_count_with_filters( self, flask_test_app_icat, valid_icat_credentials_header, ): test_response = flask_test_app_icat.get( - '/investigations/count?where={"title": {"like": "This filter should cause a' - '404 for testing purposes..."}}', + '/investigations/count?where={"title": {"like": "This filter should cause 0' + ' results to be found for testing purposes..."}}', headers=valid_icat_credentials_header, ) diff --git a/test/icat/filters/test_distinct_filter.py b/test/icat/filters/test_distinct_filter.py index 66255b3a..4adf3ae8 100644 --- a/test/icat/filters/test_distinct_filter.py +++ b/test/icat/filters/test_distinct_filter.py @@ -5,12 +5,19 @@ class TestICATDistinctFilter: - def test_valid_str_field_input(self, icat_query): - test_filter = PythonICATDistinctFieldFilter("name") + @pytest.mark.parametrize( + "attribute_name", + [ + pytest.param("name", id="Attribute for own entity"), + pytest.param("investigationUsers.role", id="Related attribute name"), + ], + ) + def test_valid_str_field_input(self, icat_query, attribute_name): + test_filter = PythonICATDistinctFieldFilter(attribute_name) test_filter.apply_filter(icat_query) assert ( - icat_query.conditions == {"name": "!= null"} + icat_query.attributes == [attribute_name] and icat_query.aggregate == "DISTINCT" ) @@ -19,8 +26,7 @@ def test_valid_list_fields_input(self, icat_query): test_filter.apply_filter(icat_query) assert ( - icat_query.conditions - == {"doi": "!= null", "name": "!= null", "title": "!= null"} + icat_query.attributes == ["doi", "name", "title"] and icat_query.aggregate == "DISTINCT" ) @@ -35,11 +41,30 @@ def test_distinct_aggregate_added(self, icat_query): assert icat_query.aggregate == "DISTINCT" - @pytest.mark.parametrize("existing_aggregate", ["COUNT", "AVG", "SUM"]) - def test_existing_aggregate_appended(self, icat_query, existing_aggregate): + @pytest.mark.parametrize( + "existing_aggregate, expected_instance_aggregate", + [ + pytest.param( + "COUNT", "DISTINCT", id="Existing count aggregate (count endpoints)", + ), + pytest.param("AVG", "AVG:DISTINCT", id="Existing avg aggregate"), + pytest.param("SUM", "SUM:DISTINCT", id="Existing sum aggregate"), + ], + ) + def test_existing_aggregate_on_query( + self, icat_query, existing_aggregate, expected_instance_aggregate, + ): icat_query.setAggregate(existing_aggregate) test_filter = PythonICATDistinctFieldFilter("name") test_filter.apply_filter(icat_query) - assert icat_query.aggregate == f"{existing_aggregate}:DISTINCT" + assert icat_query.aggregate == expected_instance_aggregate + + def test_manual_count_flag(self, icat_query): + icat_query.setAggregate("COUNT") + + test_filter = PythonICATDistinctFieldFilter("name") + test_filter.apply_filter(icat_query) + + assert icat_query.manual_count diff --git a/test/icat/test_lru_cache.py b/test/icat/test_lru_cache.py new file mode 100644 index 00000000..3f27d603 --- /dev/null +++ b/test/icat/test_lru_cache.py @@ -0,0 +1,37 @@ +from unittest.mock import MagicMock + +from cachetools import cached +from icat.client import Client + +from datagateway_api.common.config import APIConfigOptions, config +from datagateway_api.common.icat.icat_client_pool import create_client_pool +from datagateway_api.common.icat.lru_cache import ExtendedLRUCache + + +class TestLRUCache: + def test_valid_cache_creation(self): + test_cache = ExtendedLRUCache() + assert test_cache.maxsize == config.get_config_value( + APIConfigOptions.CLIENT_CACHE_SIZE, + ) + + def test_valid_popitem(self): + test_cache = ExtendedLRUCache() + test_pool = create_client_pool() + test_client = Client( + config.get_config_value(APIConfigOptions.ICAT_URL), + checkCert=config.get_config_value(APIConfigOptions.ICAT_CHECK_CERT), + ) + + test_cache.popitem = MagicMock(side_effect=test_cache.popitem) + + @cached(cache=test_cache) + def get_cached_client(cache_number, client_pool): + return test_client + + for cache_number in range( + config.get_config_value(APIConfigOptions.CLIENT_CACHE_SIZE) + 1, + ): + get_cached_client(cache_number, test_pool) + + assert test_cache.popitem.called diff --git a/test/icat/test_query.py b/test/icat/test_query.py index d154ad7e..869c6445 100644 --- a/test/icat/test_query.py +++ b/test/icat/test_query.py @@ -1,10 +1,10 @@ -from datetime import datetime +from datetime import datetime, timezone from icat.entity import Entity import pytest from datagateway_api.common.date_handler import DateHandler -from datagateway_api.common.exceptions import FilterError, PythonICATError +from datagateway_api.common.exceptions import PythonICATError from datagateway_api.common.icat.filters import ( PythonICATSkipFilter, PythonICATWhereFilter, @@ -12,7 +12,7 @@ from datagateway_api.common.icat.query import ICATQuery -def prepare_icat_data_for_assertion(data, remove_id=False): +def prepare_icat_data_for_assertion(data, remove_id=False, remove_visit_id=False): """ Remove meta attributes from ICAT data. Meta attributes contain data about data creation/modification, and should be removed to ensure correct assertion values @@ -38,6 +38,8 @@ def prepare_icat_data_for_assertion(data, remove_id=False): # meta_attributes is immutable if remove_id: entity.pop("id") + if remove_visit_id: + entity.pop("visitId") assertable_data.append(entity) @@ -45,29 +47,272 @@ def prepare_icat_data_for_assertion(data, remove_id=False): class TestICATQuery: - def test_valid_query_creation(self, icat_client): - # Paramatise and add inputs for conditions, aggregate and includes - test_query = ICATQuery(icat_client, "User") + @pytest.mark.parametrize( + "input_conditions, input_aggregate, input_includes, expected_conditions," + " expected_aggregate, expected_includes", + [ + pytest.param( + {"fullName": "like Bob"}, + None, + None, + {"fullName": "like Bob"}, + None, + set(), + id="Query with condition", + ), + pytest.param( + None, + "DISTINCT", + None, + {}, + "DISTINCT", + set(), + id="Query with aggregate", + ), + pytest.param( + None, + None, + ["instrumentScientists"], + {}, + None, + {"instrumentScientists"}, + id="Query with included entity", + ), + ], + ) + def test_valid_query_creation( + self, + icat_client, + input_conditions, + input_aggregate, + input_includes, + expected_conditions, + expected_aggregate, + expected_includes, + ): + test_query = ICATQuery( + icat_client, + "User", + conditions=input_conditions, + aggregate=input_aggregate, + includes=input_includes, + ) assert test_query.query.entity == icat_client.getEntityClass("User") + assert test_query.query.conditions == expected_conditions + assert test_query.query.aggregate == expected_aggregate + assert test_query.query.includes == expected_includes + + def test_valid_manual_count_flag_init(self, icat_client): + """ + Flag required for distinct filters used on count endpoints should be initialised + in `__init__()` of ICATQuery` + """ + test_query = ICATQuery(icat_client, "User") + + assert not test_query.query.manual_count def test_invalid_query_creation(self, icat_client): with pytest.raises(PythonICATError): ICATQuery(icat_client, "User", conditions={"invalid": "invalid"}) + @pytest.mark.parametrize( + "query_conditions, query_aggregate, query_includes, query_attributes" + ", manual_count, return_json_format_flag, expected_query_result", + [ + pytest.param( + { + "title": "like '%Test data for the Python ICAT Backend on" + " DataGateway API%'", + }, + None, + None, + None, + False, + True, + [ + { + "doi": None, + "endDate": "2020-01-08 01:01:01+00:00", + "name": "Test Data for DataGateway API Testing 0", + "releaseDate": None, + "startDate": "2020-01-04 01:01:01+00:00", + "summary": None, + "title": "Test data for the Python ICAT Backend on DataGateway" + " API 0", + }, + ], + id="Ordinary query", + ), + pytest.param( + { + "title": "like '%Test data for the Python ICAT Backend on" + " DataGateway API%'", + }, + None, + ["facility"], + None, + False, + True, + [ + { + "doi": None, + "endDate": "2020-01-08 01:01:01+00:00", + "name": "Test Data for DataGateway API Testing 0", + "releaseDate": None, + "startDate": "2020-01-04 01:01:01+00:00", + "summary": None, + "title": "Test data for the Python ICAT Backend on DataGateway" + " API 0", + "facility": { + "createId": "user", + "createTime": "2011-01-29 06:19:43+00:00", + "daysUntilRelease": 10, + "description": "Lorem ipsum light source", + "fullName": None, + "id": 1, + "modId": "user", + "modTime": "2008-10-15 12:05:09+00:00", + "name": "LILS", + "url": None, + }, + }, + ], + id="Query with included entity", + ), + pytest.param( + { + "title": "like '%Test data for the Python ICAT Backend on" + " DataGateway API%'", + }, + "COUNT", + None, + None, + False, + True, + [1], + id="Count query", + ), + pytest.param( + { + "title": "like '%Test data for the Python ICAT Backend on" + " DataGateway API%'", + }, + None, + None, + None, + False, + False, + [ + { + "doi": None, + "endDate": "2020-01-08 01:01:01+00:00", + "name": "Test Data for DataGateway API Testing 0", + "releaseDate": None, + "startDate": "2020-01-04 01:01:01+00:00", + "summary": None, + "title": "Test data for the Python ICAT Backend on DataGateway" + " API 0", + }, + ], + id="Data returned as entity objects", + ), + pytest.param( + { + "title": "like '%Test data for the Python ICAT Backend on" + " DataGateway API%'", + }, + "DISTINCT", + None, + "title", + False, + True, + [ + { + "title": "Test data for the Python ICAT Backend on DataGateway" + " API 0", + }, + ], + id="Single distinct field", + ), + pytest.param( + { + "title": "like '%Test data for the Python ICAT Backend on" + " DataGateway API%'", + }, + "DISTINCT", + None, + ["title", "name"], + False, + True, + [ + { + "title": "Test data for the Python ICAT Backend on DataGateway" + " API 0", + "name": "Test Data for DataGateway API Testing 0", + }, + ], + id="Multiple distinct fields", + ), + pytest.param( + { + "title": "like '%Test data for the Python ICAT Backend on" + " DataGateway API%'", + }, + "DISTINCT", + None, + ["title", "name"], + True, + True, + [1], + id="Multiple distinct fields on count query", + ), + pytest.param( + {"title": "like '%Unknown testing data for DG API%'"}, + "DISTINCT", + None, + ["title", "name"], + True, + True, + [0], + id="Multiple distinct fields on count query to return 0 matches", + ), + ], + ) + @pytest.mark.usefixtures("single_investigation_test_data") def test_valid_query_exeuction( - self, icat_client, single_investigation_test_data, + self, + icat_client, + query_conditions, + query_aggregate, + query_includes, + query_attributes, + manual_count, + return_json_format_flag, + expected_query_result, ): - test_query = ICATQuery(icat_client, "Investigation") - test_data_filter = PythonICATWhereFilter( - "title", "Test data for the Python ICAT Backend on DataGateway API", "like", + test_query = ICATQuery( + icat_client, + "Investigation", + conditions=query_conditions, + aggregate=query_aggregate, + includes=query_includes, + ) + test_query.query.setAttributes(query_attributes) + test_query.query.manual_count = manual_count + query_data = test_query.execute_query( + icat_client, return_json_formattable=return_json_format_flag, ) - test_data_filter.apply_filter(test_query.query) - query_data = test_query.execute_query(icat_client) - query_output_dicts = prepare_icat_data_for_assertion(query_data) + if ( + test_query.query.aggregate != "COUNT" + and test_query.query.aggregate != "DISTINCT" + ): + query_data = prepare_icat_data_for_assertion( + query_data, remove_id=True, remove_visit_id=True, + ) - assert query_output_dicts == single_investigation_test_data + assert query_data == expected_query_result def test_invalid_query_execution(self, icat_client): test_query = ICATQuery(icat_client, "Investigation") @@ -94,150 +339,71 @@ def test_json_format_execution_output( assert query_output_json == single_investigation_test_data - @pytest.mark.parametrize( - "input_distinct_fields, included_fields, expected_output", - [ - pytest.param( - ["id"], - [], - {"base": ["id"]}, - id="Base only distinct attribute, no included attributes", - ), - pytest.param( - ["id", "doi", "name", "createTime"], - [], - {"base": ["id", "doi", "name", "createTime"]}, - id="Multiple base only distinct attributes, no included attributes", - ), - pytest.param( - ["id"], - ["investigation"], - {"base": ["id"]}, - id="Base only distinct attribute, single, unnested included attributes", - ), - pytest.param( - ["id"], - ["investigation", "parameters", "type"], - {"base": ["id"]}, - id="Base only distinct attribute, multiple, unnested included" - " attributes", - ), - pytest.param( - ["dataset.investigation.name"], - ["dataset", "investigation"], - {"base": [], "dataset": [], "investigation": ["name"]}, - id="Single nested-include distinct attribute", - ), - pytest.param( - ["dataset.investigation.name", "datafileFormat.facility.url"], - ["dataset", "investigation", "datafileFormat", "facility"], - { - "base": [], - "dataset": [], - "investigation": ["name"], - "datafileFormat": [], - "facility": ["url"], - }, - id="Multiple nested-include distinct attributes", - ), - ], - ) - def test_valid_distinct_attribute_mapping( - self, icat_client, input_distinct_fields, included_fields, expected_output, - ): - # Entity name passed to ICATQuery is irrelevant for this test - test_query = ICATQuery(icat_client, "Datafile") - - mapped_attributes = test_query.map_distinct_attributes_to_entity_names( - input_distinct_fields, included_fields, - ) - - assert mapped_attributes == expected_output - - @pytest.mark.parametrize( - "input_distinct_fields, included_fields", - [ - pytest.param( - ["investigation.id"], - [], - id="Single nested-include distinct attribute, included entity not" - " added", - ), - ], - ) - def test_invalid_distinct_attribute_mapping( - self, icat_client, input_distinct_fields, included_fields, - ): - """ - Test that when the appropriate included fields are not present, a `FilterError` - will be raised - """ - test_query = ICATQuery(icat_client, "Datafile") + def test_valid_get_distinct_attributes(self, icat_client): + test_query = ICATQuery(icat_client, "Investigation") + test_query.query.setAttributes(["summary", "name"]) - with pytest.raises(FilterError): - test_query.map_distinct_attributes_to_entity_names( - input_distinct_fields, included_fields, - ) + assert test_query.get_distinct_attributes() == ["summary", "name"] @pytest.mark.parametrize( - "included_entity_name, input_fields, expected_fields", + "distinct_attrs, result, expected_output", [ pytest.param( - "dataset", - {"base": ["id"]}, - {"base": []}, - id="Include filter used but no included attributes on distinct filter," - " no entity name match", + ["summary"], + ["Summary 1"], + {"summary": "Summary 1"}, + id="Single attribute", ), pytest.param( - "no match", - {"base": ["id"], "dataset": ["name"]}, - {"base": [], "dataset": ["name"]}, - id="Distinct filter contains included attributes, no entity name match", + ["startDate"], + ( + datetime( + year=2020, + month=1, + day=4, + hour=1, + minute=1, + second=1, + tzinfo=timezone.utc, + ), + ), + {"startDate": "2020-01-04 01:01:01+00:00"}, + id="Single date attribute", ), pytest.param( - "dataset", - {"base": ["id"], "dataset": ["name"]}, - {"base": ["name"], "dataset": ["name"]}, - id="Distinct filter contains included attributes, entity name match", + ["summary", "title"], + ("Summary 1", "Title 1"), + {"summary": "Summary 1", "title": "Title 1"}, + id="Multiple attributes", ), pytest.param( - "dataset", - {"base": ["id"], "dataset": [], "investigation": ["name"]}, - {"base": [], "dataset": [], "investigation": ["name"]}, - id="Distinct filter contains nested included attributes, no entity name" - " match", + ["summary", "investigationUsers.role"], + ("Summary 1", "PI"), + {"summary": "Summary 1", "investigationUsers": {"role": "PI"}}, + id="Multiple attributes with related attribute", ), pytest.param( - "investigation", - {"base": ["id"], "dataset": [], "investigation": ["name"]}, - {"base": ["name"], "dataset": [], "investigation": ["name"]}, - id="Distinct filter contains nested included attributes, entity name" - " match", + ["summary", "investigationUsers.investigation.name"], + ("Summary 1", "Investigation Name 1"), + { + "summary": "Summary 1", + "investigationUsers": { + "investigation": {"name": "Investigation Name 1"}, + }, + }, + id="Multiple attributes with 2-level nested related attribute", ), ], ) - def test_prepare_distinct_fields( - self, icat_client, included_entity_name, input_fields, expected_fields, + def test_valid_map_distinct_attributes_to_results( + self, icat_client, distinct_attrs, result, expected_output, ): - """ - The function tested here should move the list from - `input_fields[included_entity_name]` to `input_fields["base"]` ready for when - `entity_to_dict()` is called as part of a recursive call, but the original - `input_fields` should not be modified. This caused a bug previously - """ - unmodded_distinct_fields = input_fields.copy() - test_query = ICATQuery(icat_client, "Datafile") - - distinct_fields_for_recursive_call = test_query.prepare_distinct_fields( - included_entity_name, input_fields, + test_query = ICATQuery(icat_client, "Investigation") + test_output = test_query.map_distinct_attributes_to_results( + distinct_attrs, result, ) - print(distinct_fields_for_recursive_call) - print(input_fields) - assert distinct_fields_for_recursive_call == expected_fields - # prepare_distinct_fields() should not modify the original `distinct_fields` - assert input_fields == unmodded_distinct_fields + assert test_output == expected_output def test_include_fields_list_flatten(self, icat_client): included_field_set = {