diff --git a/datagateway_api/src/api_start_utils.py b/datagateway_api/src/api_start_utils.py index 866f12e3..5972fc15 100644 --- a/datagateway_api/src/api_start_utils.py +++ b/datagateway_api/src/api_start_utils.py @@ -12,7 +12,7 @@ # Only attempt to create a DataGateway API backend if the datagateway_api object # is present in the config. This ensures that the API does not error on startup # due to an AttributeError exception being thrown if the object is missing. -if Config.config.datagateway_api is not None: +if Config.config.datagateway_api: from datagateway_api.src.datagateway_api.backends import create_backend from datagateway_api.src.datagateway_api.database.helpers import db # noqa: I202 from datagateway_api.src.datagateway_api.icat.icat_client_pool import create_client_pool @@ -27,14 +27,16 @@ from datagateway_api.src.resources.non_entities.sessions_endpoints import ( session_endpoints, ) -from datagateway_api.src.resources.search_api_endpoints import ( - get_files_endpoint, - get_number_count_endpoint, - get_number_count_files_endpoint, - get_search_endpoint, - get_single_endpoint, -) -from datagateway_api.src.resources.table_endpoints.table_endpoints import ( + +if Config.config.search_api: + from datagateway_api.src.resources.search_api_endpoints import ( + get_files_endpoint, + get_number_count_endpoint, + get_number_count_files_endpoint, + get_search_endpoint, + get_single_endpoint, + ) +from datagateway_api.src.resources.table_endpoints.table_endpoints import ( # noqa: I202, B950 count_instrument_facility_cycles_endpoint, count_instrument_investigation_endpoint, instrument_facility_cycles_endpoint, diff --git a/datagateway_api/src/search_api/filters.py b/datagateway_api/src/search_api/filters.py index 425d433f..b2cb7f03 100644 --- a/datagateway_api/src/search_api/filters.py +++ b/datagateway_api/src/search_api/filters.py @@ -83,18 +83,21 @@ def __str__(self): if isinstance(self.search_api_query, SearchAPIQuery): log.info("__str__ for SearchAPIWhereFilter, SearchAPIQuery found") - query = self.search_api_query - self.apply_filter(query) + self.apply_filter(self.search_api_query) # Replicating the condition in Python ICAT format so it can be searched on # the query and return as string representation conds_dict = self.create_filter() - a, jpql_func = query.icat_query.query._split_db_functs(self.field) - conds_dict[self.field] = query.icat_query.query._cond_value( + a, jpql_func = self.search_api_query.icat_query.query._split_db_functs( + self.field, + ) + conds_dict[self.field] = self.search_api_query.icat_query.query._cond_value( conds_dict[self.field], jpql_func, ) - str_conds = query.icat_query.query.search_conditions(self.field, conds_dict) + str_conds = self.search_api_query.icat_query.query.search_conditions( + self.field, conds_dict, + ) try: return str_conds[0] diff --git a/datagateway_api/src/search_api/panosc_mappings.py b/datagateway_api/src/search_api/panosc_mappings.py index e7500735..b0e5f4fc 100644 --- a/datagateway_api/src/search_api/panosc_mappings.py +++ b/datagateway_api/src/search_api/panosc_mappings.py @@ -1,7 +1,9 @@ import json import logging from pathlib import Path +import sys +from datagateway_api.src.common.config import Config from datagateway_api.src.common.exceptions import FilterError, SearchAPIError log = logging.getLogger() @@ -17,7 +19,12 @@ def __init__( log.info("Loading PaNOSC to ICAT mappings from %s", path) self.mappings = json.load(target) except IOError as e: - raise SearchAPIError(e) + # The API shouldn't exit if there's an exception (e.g. file not found) if + # the user is only using DataGateway API and not the search API + if Config.config.search_api: + sys.exit( + f"An error occurred while trying to load the PaNOSC mappings: {e}", + ) def get_icat_mapping(self, panosc_entity_name, field_name): """ @@ -60,7 +67,7 @@ def get_icat_mapping(self, panosc_entity_name, field_name): # delegated to other code in this repo so the entire list is returned here icat_field_name = icat_mapping - return (panosc_entity_name, icat_field_name) + return panosc_entity_name, icat_field_name def get_panosc_related_entity_name( self, panosc_entity_name, panosc_related_field_name, diff --git a/datagateway_api/src/search_api/query_filter_factory.py b/datagateway_api/src/search_api/query_filter_factory.py index e930475d..eecf3f7b 100644 --- a/datagateway_api/src/search_api/query_filter_factory.py +++ b/datagateway_api/src/search_api/query_filter_factory.py @@ -1,4 +1,3 @@ -import importlib import logging from datagateway_api.src.common.base_query_filter_factory import QueryFilterFactory @@ -9,6 +8,7 @@ SearchAPISkipFilter, SearchAPIWhereFilter, ) +import datagateway_api.src.search_api.models as search_api_models from datagateway_api.src.search_api.nested_where_filters import NestedWhereFilters from datagateway_api.src.search_api.panosc_mappings import mappings from datagateway_api.src.search_api.query import SearchAPIQuery @@ -133,9 +133,6 @@ def get_where_filter(where_filter_input, entity_name): elif list(where_filter_input.keys())[0] == "text": log.debug("Text operator found within JSON where object") try: - search_api_models = importlib.import_module( - "datagateway_api.src.search_api.models", - ) entity_class = getattr(search_api_models, entity_name) except AttributeError as e: raise SearchAPIError( @@ -143,21 +140,22 @@ def get_where_filter(where_filter_input, entity_name): f", {e.args}", ) - try: - or_conditional_filters = [] - field_names = entity_class._text_operator_fields - log.debug( - "Text operators found for PaNOSC %s: %s", entity_name, field_names, + or_conditional_filters = [] + field_names = entity_class._text_operator_fields + log.debug( + "Text operators found for PaNOSC %s: %s", entity_name, field_names, + ) + if not field_names: + # No text operator fields present, simply log and move on, we should + # ignore text operator queries on entities where `_text_operator_fields` + # is empty (meaning they are not present in the origina PaNOSC data + # model) + log.info( + "No text operator fields found for PaNOSC entity %s, will" + " ignore", + entity_name, ) - if not field_names: - # No text operator fields present, raise KeyError to be caught in - # this try/except block - log.warning( - "No text operator fields found for PaNOSC entity %s, will" - " ignore", - entity_name, - ) - raise KeyError() + else: for field_name in field_names: or_conditional_filters.append( {field_name: {"like": where_filter_input["text"]}}, @@ -171,11 +169,6 @@ def get_where_filter(where_filter_input, entity_name): where_filter, entity_name, ), ) - except KeyError: - # Do not raise FilterError nor attempt to create filters. Simply - # ignore text operator queries on fields that are not part of the - # text_operator_fields dict. - pass else: log.info("Basic where filter found, extracting field, value and operation") filter_data = SearchAPIQueryFilterFactory.get_condition_values(