diff --git a/README.md b/README.md index ba3e6e21..3eeffab9 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,9 @@ ICAT API to interface with the Data Gateway - [Requirements](#requirements) - [Setup and running the API](#setup-and-running-the-api) - [Project structure](#project-structure) - - [Main:](#main) - - [Endpoints:](#endpoints) - - [Mapped classes:](#mapped-classes) + - [Main](#main) + - [Endpoints](#endpoints) + - [Mapped classes](#mapped-classes) - [Querying and filtering](#querying-and-filtering) - [Swagger Generation](#generating-the-swagger-spec-openapiyaml) - [Authentication](#authentication) @@ -26,7 +26,6 @@ The required python libraries: - [SQLAlchemy](https://www.sqlalchemy.org/) - [flask-restful](https://github.com/flask-restful/flask-restful/) - [pymysql](https://pymysql.readthedocs.io/en/latest/) - - [requests](https://2.python-requests.org/en/master/) - [pyyaml](https://pyyaml.org/wiki/PyYAMLDocumentation) (For the swagger generation) - [pip-tools](https://github.com/jazzband/pip-tools) (For generating requirements.txt) @@ -118,7 +117,7 @@ This is illustrated below. ├── logs.log └── config.json ````` -#### Main: +#### Main `main.py` is where the flask_restful api is set up. This is where each endpoint resource class is generated and mapped to an endpoint. @@ -126,7 +125,7 @@ Example: `api.add_resource(get_endpoint(entity_name, endpoints[entity_name]), f"/{entity_name.lower()}")` -#### Endpoints: +#### Endpoints The logic for each endpoint are within `/src/resources`. They are split into entities, non_entities and table_endpoints. The entities package contains `entities_map` which maps entity names to their sqlalchemy model. The `entity_endpoint` module contains the function that is used to generate endpoints at start up. @@ -134,7 +133,7 @@ model. The `entity_endpoint` module contains the function that is used to genera session endpoint. -#### Mapped classes: +#### Mapped classes The classes mapped from the database are stored in `/common/models/db_models.py`. Each model was automatically generated using sqlacodegen. A class `EntityHelper` is defined so that each model may inherit two methods `to_dict()` and `update_from_dict(dictionary)`, both used for returning entities @@ -188,4 +187,10 @@ class DataCollectionDatasets(Resource): ## Running Tests To run the tests use `python -m unittest discover` +## Linter +When writing code for this repository, [Black](https://black.readthedocs.io/en/stable/) +is used as the code linter/formatter to ensure the code is kept Pythonic. Installing +the dev requirements will ensure this package is installed. This repository uses the +default settings for Black; to use, execute the following command on the root directory of this repo: +`black .` diff --git a/common/backend.py b/common/backend.py index 7704ed08..89fe0bd1 100644 --- a/common/backend.py +++ b/common/backend.py @@ -44,7 +44,9 @@ def logout(self, session_id): @abstractmethod def get_with_filters(self, session_id, entity_type, filters): """ - Given a list of filters supplied in json format, returns entities that match the filters for the given entity type + Given a list of filters supplied in json format, returns entities that match the + filters for the given entity type + :param session_id: The session id of the requesting user :param entity_type: The type of entity :param filters: The list of filters to be applied @@ -55,7 +57,9 @@ def get_with_filters(self, session_id, entity_type, filters): @abstractmethod def create(self, session_id, entity_type, data): """ - Create one or more entities, from the given list containing json. Each entity must not contain its ID + Create one or more entities, from the given list containing json. Each entity + must not contain its ID + :param session_id: The session id of the requesting user :param entity_type: The type of entity :param data: The entities to be created @@ -66,7 +70,9 @@ def create(self, session_id, entity_type, data): @abstractmethod def update(self, session_id, entity_type, data): """ - Update one or more entities, from the given list containing json. Each entity must contain its ID + Update one or more entities, from the given list containing json. Each entity + must contain its ID + :param session_id: The session id of the requesting user :param entity_type: The type of entity :param data: the list of updated values or a dictionary @@ -77,7 +83,8 @@ def update(self, session_id, entity_type, data): @abstractmethod def get_one_with_filters(self, session_id, entity_type, filters): """ - returns the first entity that matches a given filter, for a given entity type + Returns the first entity that matches a given filter, for a given entity type + :param session_id: The session id of the requesting user :param entity_type: The type of entity :param filters: the filter to be applied to the query @@ -88,7 +95,9 @@ def get_one_with_filters(self, session_id, entity_type, filters): @abstractmethod def count_with_filters(self, session_id, entity_type, filters): """ - returns the count of the entities that match a given filter for a given entity type + Returns the count of the entities that match a given filter for a given entity + type + :param session_id: The session id of the requesting user :param entity_type: The type of entity :param filters: the filters to be applied to the query @@ -100,6 +109,7 @@ def count_with_filters(self, session_id, entity_type, filters): def get_with_id(self, session_id, entity_type, id_): """ Gets the entity matching the given ID for the given entity type + :param session_id: The session id of the requesting user :param entity_type: The type of entity :param id_: the id of the record to find @@ -111,6 +121,7 @@ def get_with_id(self, session_id, entity_type, id_): def delete_with_id(self, session_id, entity_type, id_): """ Deletes the row matching the given ID for the given entity type + :param session_id: The session id of the requesting user :param table: the table to be searched :param id_: the id of the record to delete @@ -121,6 +132,7 @@ def delete_with_id(self, session_id, entity_type, id_): def update_with_id(self, session_id, entity_type, id_, data): """ Updates the row matching the given ID for the given entity type + :param session_id: The session id of the requesting user :param entity_type: The type of entity :param id_: the id of the record to update @@ -130,9 +142,13 @@ def update_with_id(self, session_id, entity_type, id_, data): pass @abstractmethod - def get_instrument_facilitycycles_with_filters(self, session_id, instrument_id, filters): + def get_instrument_facilitycycles_with_filters( + self, session_id, instrument_id, filters + ): """ - Given an instrument_id get facility cycles where the instrument has investigations that occur within that cycle + Given an instrument_id get facility cycles where the instrument has + investigations that occur within that cycle + :param session_id: The session id of the requesting user :param filters: The filters to be applied to the query :param instrument_id: The id of the instrument @@ -141,10 +157,13 @@ def get_instrument_facilitycycles_with_filters(self, session_id, instrument_id, pass @abstractmethod - def count_instrument_facilitycycles_with_filters(self, session_id, instrument_id, filters): + def count_instrument_facilitycycles_with_filters( + self, session_id, instrument_id, filters + ): """ - Given an instrument_id get the facility cycles count where the instrument has investigations that occur within - that cycle + Given an instrument_id get the facility cycles count where the instrument has + investigations that occur within that cycle + :param session_id: The session id of the requesting user :param filters: The filters to be applied to the query :param instrument_id: The id of the instrument @@ -153,9 +172,13 @@ def count_instrument_facilitycycles_with_filters(self, session_id, instrument_id pass @abstractmethod - def get_instrument_facilitycycle_investigations_with_filters(self, session_id, instrument_id, facilitycycle_id, filters): + def get_instrument_facilitycycle_investigations_with_filters( + self, session_id, instrument_id, facilitycycle_id, filters + ): """ - Given an instrument id and facility cycle id, get investigations that use the given instrument in the given cycle + Given an instrument id and facility cycle id, get investigations that use the + given instrument in the given cycle + :param session_id: The session id of the requesting user :param filters: The filters to be applied to the query :param instrument_id: The id of the instrument @@ -165,10 +188,13 @@ def get_instrument_facilitycycle_investigations_with_filters(self, session_id, i pass @abstractmethod - def count_instrument_facilitycycles_investigations_with_filters(self, session_id, instrument_id, facilitycycle_id, filters): + def count_instrument_facilitycycles_investigations_with_filters( + self, session_id, instrument_id, facilitycycle_id, filters + ): """ - Given an instrument id and facility cycle id, get the count of the investigations that use the given instrument in - the given cycle + Given an instrument id and facility cycle id, get the count of the + investigations that use the given instrument in the given cycle + :param session_id: The session id of the requesting user :param filters: The filters to be applied to the query :param instrument_id: The id of the instrument diff --git a/common/backends.py b/common/backends.py index cb8b43df..cc65e915 100644 --- a/common/backends.py +++ b/common/backends.py @@ -11,6 +11,5 @@ elif backend_type == "python_icat": backend = PythonICATBackend() else: - sys.exit( - f"Invalid config value '{backend_type}' for config option backend") + sys.exit(f"Invalid config value '{backend_type}' for config option backend") backend = Backend() diff --git a/common/config.py b/common/config.py index 57e6aef4..aa8a0601 100644 --- a/common/config.py +++ b/common/config.py @@ -4,7 +4,6 @@ class Config(object): - def __init__(self): config_path = Path(__file__).parent.parent / "config.json" with open(config_path) as target: @@ -35,7 +34,7 @@ def get_icat_check_cert(self): return self.config["icat_check_cert"] except: # This could be set to true if there's no value, and log a warning - # that no value has been found from the config - save app from + # that no value has been found from the config - save app from # exiting sys.exit("Missing config value, icat_check_cert") diff --git a/common/database/backend.py b/common/database/backend.py index 9e5e6a15..60e52eb8 100644 --- a/common/database/backend.py +++ b/common/database/backend.py @@ -1,8 +1,20 @@ from common.backend import Backend -from common.database.helpers import get_facility_cycles_for_instrument, get_facility_cycles_for_instrument_count, \ - get_investigations_for_instrument_in_facility_cycle, get_investigations_for_instrument_in_facility_cycle_count, \ - get_rows_by_filter, create_rows_from_json, patch_entities, get_row_by_id, insert_row_into_table, \ - delete_row_by_id, update_row_from_id, get_filtered_row_count, get_first_filtered_row, requires_session_id +from common.database.helpers import ( + get_facility_cycles_for_instrument, + get_facility_cycles_for_instrument_count, + get_investigations_for_instrument_in_facility_cycle, + get_investigations_for_instrument_in_facility_cycle_count, + get_rows_by_filter, + create_rows_from_json, + patch_entities, + get_row_by_id, + insert_row_into_table, + delete_row_by_id, + update_row_from_id, + get_filtered_row_count, + get_first_filtered_row, + requires_session_id, +) from common.helpers import queries_records from common.models.db_models import SESSION import uuid @@ -10,8 +22,10 @@ import datetime import logging + log = logging.getLogger() + class DatabaseBackend(Backend): """ Class that contains functions to access and modify data in an ICAT database directly @@ -20,8 +34,14 @@ class DatabaseBackend(Backend): def login(self, credentials): if credentials["username"] == "user" and credentials["password"] == "password": session_id = str(uuid.uuid1()) - insert_row_into_table(SESSION, SESSION(ID=session_id, USERNAME=f"{credentials['mechanism']}/root", - EXPIREDATETIME=datetime.datetime.now() + datetime.timedelta(days=1))) + insert_row_into_table( + SESSION, + SESSION( + ID=session_id, + USERNAME=f"{credentials['mechanism']}/root", + EXPIREDATETIME=datetime.datetime.now() + datetime.timedelta(days=1), + ), + ) return session_id else: raise AuthenticationError("Username and password are incorrect") @@ -81,20 +101,32 @@ def update_with_id(self, session_id, table, id_, data): @requires_session_id @queries_records - def get_instrument_facilitycycles_with_filters(self, session_id, instrument_id, filters): + def get_instrument_facilitycycles_with_filters( + self, session_id, instrument_id, filters + ): return get_facility_cycles_for_instrument(instrument_id, filters) @requires_session_id @queries_records - def count_instrument_facilitycycles_with_filters(self, session_id, instrument_id, filters): + def count_instrument_facilitycycles_with_filters( + self, session_id, instrument_id, filters + ): return get_facility_cycles_for_instrument_count(instrument_id, filters) @requires_session_id @queries_records - def get_instrument_facilitycycle_investigations_with_filters(self, session_id, instrument_id, facilitycycle_id, filters): - return get_investigations_for_instrument_in_facility_cycle(instrument_id, facilitycycle_id, filters) + def get_instrument_facilitycycle_investigations_with_filters( + self, session_id, instrument_id, facilitycycle_id, filters + ): + return get_investigations_for_instrument_in_facility_cycle( + instrument_id, facilitycycle_id, filters + ) @requires_session_id @queries_records - def count_instrument_facilitycycles_investigations_with_filters(self, session_id, instrument_id, facilitycycle_id, filters): - return get_investigations_for_instrument_in_facility_cycle_count(instrument_id, facilitycycle_id, filters) + def count_instrument_facilitycycles_investigations_with_filters( + self, session_id, instrument_id, facilitycycle_id, filters + ): + return get_investigations_for_instrument_in_facility_cycle_count( + instrument_id, facilitycycle_id, filters + ) diff --git a/common/database/filters.py b/common/database/filters.py index 06fa915d..34a9c324 100644 --- a/common/database/filters.py +++ b/common/database/filters.py @@ -1,7 +1,14 @@ -from common.filters import WhereFilter, DistinctFieldFilter, OrderFilter, SkipFilter, LimitFilter, \ - IncludeFilter +from common.filters import ( + WhereFilter, + DistinctFieldFilter, + OrderFilter, + SkipFilter, + LimitFilter, + IncludeFilter, +) from common.exceptions import FilterError + class DatabaseWhereFilter(WhereFilter): def __init__(self, field, value, operation): super().__init__(field, value, operation) @@ -15,10 +22,10 @@ def apply_filter(self, query): if self.included_included_field: included_table = getattr(db_models, self.field) included_included_table = getattr(db_models, self.included_field) - query.base_query = query.base_query.join( - included_table).join(included_included_table) - field = getattr(included_included_table, - self.included_included_field) + query.base_query = query.base_query.join(included_table).join( + included_included_table + ) + field = getattr(included_included_table, self.included_included_field) elif self.included_field: included_table = getattr(db_models, self.field) @@ -42,7 +49,8 @@ def apply_filter(self, query): query.base_query = query.base_query.filter(field.in_(self.value)) else: raise FilterError( - f" Bad operation given to where filter. operation: {self.operation}") + f" Bad operation given to where filter. operation: {self.operation}" + ) class DatabaseDistinctFieldFilter(DistinctFieldFilter): @@ -52,8 +60,7 @@ def __init__(self, fields): def apply_filter(self, query): query.is_distinct_fields_query = True try: - self.fields = [getattr(query.table, field) - for field in self.fields] + self.fields = [getattr(query.table, field) for field in self.fields] except AttributeError: raise FilterError("Bad field requested") query.base_query = query.session.query(*self.fields).distinct() @@ -65,11 +72,9 @@ def __init__(self, field, direction): def apply_filter(self, query): if self.direction.upper() == "ASC": - query.base_query = query.base_query.order_by( - asc(self.field.upper())) + query.base_query = query.base_query.order_by(asc(self.field.upper())) elif self.direction.upper() == "DESC": - query.base_query = query.base_query.order_by( - desc(self.field.upper())) + query.base_query = query.base_query.order_by(desc(self.field.upper())) else: raise FilterError(f" Bad filter: {self.direction}") diff --git a/common/database/helpers.py b/common/database/helpers.py index 1596bd92..57c57480 100644 --- a/common/database/helpers.py +++ b/common/database/helpers.py @@ -6,33 +6,61 @@ from sqlalchemy import asc, desc from sqlalchemy.orm import aliased -from common.exceptions import ApiError, AuthenticationError, MissingRecordError, FilterError, \ - BadRequestError, MultipleIncludeError +from common.exceptions import ( + ApiError, + AuthenticationError, + MissingRecordError, + FilterError, + BadRequestError, + MultipleIncludeError, +) from common.models import db_models -from common.models.db_models import INVESTIGATIONUSER, INVESTIGATION, INSTRUMENT, FACILITYCYCLE, \ - INVESTIGATIONINSTRUMENT, FACILITY, SESSION +from common.models.db_models import ( + INVESTIGATIONUSER, + INVESTIGATION, + INSTRUMENT, + FACILITYCYCLE, + INVESTIGATIONINSTRUMENT, + FACILITY, + SESSION, +) from common.session_manager import session_manager from common.filters import FilterOrderHandler from common.config import config backend_type = config.get_backend_type() if backend_type == "db": - from common.database.filters import DatabaseWhereFilter as WhereFilter, DatabaseDistinctFieldFilter as DistinctFieldFilter, \ - DatabaseOrderFilter as OrderFilter, DatabaseSkipFilter as SkipFilter, DatabaseLimitFilter as LimitFilter, \ - DatabaseIncludeFilter as IncludeFilter + from common.database.filters import ( + DatabaseWhereFilter as WhereFilter, + DatabaseDistinctFieldFilter as DistinctFieldFilter, + DatabaseOrderFilter as OrderFilter, + DatabaseSkipFilter as SkipFilter, + DatabaseLimitFilter as LimitFilter, + DatabaseIncludeFilter as IncludeFilter, + ) elif backend_type == "python_icat": - from common.icat.filters import PythonICATWhereFilter as WhereFilter, PythonICATDistinctFieldFilter as DistinctFieldFilter, \ - PythonICATOrderFilter as OrderFilter, PythonICATSkipFilter as SkipFilter, PythonICATLimitFilter as LimitFilter, \ - PythonICATIncludeFilter as IncludeFilter + from common.icat.filters import ( + PythonICATWhereFilter as WhereFilter, + PythonICATDistinctFieldFilter as DistinctFieldFilter, + PythonICATOrderFilter as OrderFilter, + PythonICATSkipFilter as SkipFilter, + PythonICATLimitFilter as LimitFilter, + PythonICATIncludeFilter as IncludeFilter, + ) else: - raise ApiError("Cannot select which implementation of filters to import, check the config file has a valid backend type") + raise ApiError( + "Cannot select which implementation of filters to import, check the config file" + " has a valid backend type" + ) log = logging.getLogger() + def requires_session_id(method): """ - Decorator for database backend methods that makes sure a valid session_id is provided - It expects that session_id is the second argument supplied to the function + Decorator for database backend methods that makes sure a valid session_id is + provided. It expects that session_id is the second argument supplied to the function + :param method: The method for the backend operation :raises AuthenticationError, if a valid session_id is not provided with the request """ @@ -41,8 +69,7 @@ def requires_session_id(method): def wrapper_requires_session(*args, **kwargs): log.info(" Authenticating consumer") session = session_manager.get_icat_db_session() - query = session.query(SESSION).filter( - SESSION.ID == args[1]).first() + query = session.query(SESSION).filter(SESSION.ID == args[1]).first() if query is not None: log.info(" Closing DB session") session.close() @@ -59,9 +86,11 @@ def wrapper_requires_session(*args, **kwargs): class Query(ABC): """ - The base query class that all other queries extend from. This defines the enter and exit methods, used to handle - sessions. It is expected that all queries would be used with the 'with' keyword in most cases for this reason. + The base query class that all other queries extend from. This defines the enter and + exit methods, used to handle sessions. It is expected that all queries would be used + with the 'with' keyword in most cases for this reason. """ + @abstractmethod def __init__(self, table): self.session = session_manager.get_icat_db_session() @@ -88,7 +117,6 @@ def commit_changes(self): class CountQuery(Query): - def __init__(self, table): super().__init__(table) self.include_related_entities = False @@ -104,7 +132,6 @@ def get_count(self): class ReadQuery(Query): - def __init__(self, table): super().__init__(table) self.include_related_entities = False @@ -130,14 +157,15 @@ def get_all_results(self): class CreateQuery(Query): - def __init__(self, table, row): super().__init__(table) self.row = row self.inserted_row = None def execute_query(self): - """Determines if the row is a row object or dictionary then commits it to the table""" + """ + Determines if the row is a row object or dictionary then commits it to the table + """ if type(self.row) is not dict: record = self.row else: @@ -154,7 +182,6 @@ def execute_query(self): class UpdateQuery(Query): - def __init__(self, table, row, new_values): super().__init__(table) self.row = row @@ -168,7 +195,6 @@ def execute_query(self): class DeleteQuery(Query): - def __init__(self, table, row): super().__init__(table) self.row = row @@ -185,10 +211,10 @@ def get_query_filter(filter): """ Given a filter return a matching Query filter object - This factory is not in common.filters so the created filter can be for the correct backend. - Moving the factory into that file would mean the filters would be based off the abstract - classes (because they're in the same file) which won't enable filters to be unique to the - backend + This factory is not in common.filters so the created filter can be for the + correct backend. Moving the factory into that file would mean the filters would + be based off the abstract classes (because they're in the same file) which won't + enable filters to be unique to the backend :param filter: dict - The filter to create the QueryFilter for :return: The QueryFilter object created @@ -239,8 +265,10 @@ def create_row_from_json(table, data): def create_rows_from_json(table, data): """ - Given a List containing dictionary representations of entities, or a dictionary representation of an entity, insert - the entities into the table and return the created entities + Given a List containing dictionary representations of entities, or a dictionary + representation of an entity, insert the entities into the table and return the + created entities + :param table: The table to insert the entities in :param data: The entities to be inserted :return: The inserted entities @@ -252,7 +280,9 @@ def create_rows_from_json(table, data): def get_row_by_id(table, id_): """ - Gets the row matching the given ID from the given table, raises MissingRecordError if it can not be found + Gets the row matching the given ID from the given table, raises MissingRecordError + if it can not be found + :param table: the table to be searched :param id_: the id of the record to find :return: the record retrieved @@ -266,7 +296,9 @@ def get_row_by_id(table, id_): def delete_row_by_id(table, id_): """ - Deletes the row matching the given ID from the given table, raises MissingRecordError if it can not be found + Deletes the row matching the given ID from the given table, raises + MissingRecordError if it can not be found + :param table: the table to be searched :param id_: the id of the record to delete """ @@ -279,6 +311,7 @@ def delete_row_by_id(table, id_): def update_row_from_id(table, id_, new_values): """ Updates a record in a table + :param table: The table the record is in :param id_: The id of the record :param new_values: A JSON string containing what columns are to be updated @@ -290,7 +323,9 @@ def update_row_from_id(table, id_, new_values): def get_filtered_read_query_results(filter_handler, filters, query): """ - Given a filter handler, list of filters and a query. Apply the filters and execute the query + Given a filter handler, list of filters and a query. Apply the filters and execute + the query + :param filter_handler: The filter handler to apply the filters :param filters: The filters to be applied :param query: The query for the filters to be applied to @@ -308,8 +343,9 @@ def get_filtered_read_query_results(filter_handler, filters, query): def _get_results_with_include(filters, results): """ - Given a list of entities and a list of filters, use the include filter to nest the included entities requested in - the include filter given + Given a list of entities and a list of filters, use the include filter to nest the + included entities requested in the include filter given + :param filters: The list of filters :param results: The list of entities :return: A list of nested dictionaries representing the entity results @@ -321,8 +357,9 @@ def _get_results_with_include(filters, results): def _get_distinct_fields_as_dicts(results): """ - Given a list of column results return a list of dictionaries where each column name is the key and the column value - is the dictionary key value + Given a list of column results return a list of dictionaries where each column name + is the key and the column value is the dictionary key value + :param results: A list of sql alchemy result objects :return: A list of dictionary representations of the sqlalchemy result objects """ @@ -335,7 +372,9 @@ def _get_distinct_fields_as_dicts(results): def get_rows_by_filter(table, filters): """ - Given a list of filters supplied in json format, returns entities that match the filters from the given table + Given a list of filters supplied in json format, returns entities that match the + filters from the given table + :param table: The table to checked :param filters: The list of filters to be applied :return: A list of the rows returned in dictionary form @@ -374,7 +413,9 @@ def get_filtered_row_count(table, filters): def patch_entities(table, json_list): """ - Update one or more rows in the given table, from the given list containing json. Each entity must contain its ID + Update one or more rows in the given table, from the given list containing json. + Each entity must contain its ID + :param table: The table of the entities :param json_list: the list of updated values or a dictionary :return: The list of updated rows. @@ -404,21 +445,24 @@ class InstrumentFacilityCyclesQuery(ReadQuery): def __init__(self, instrument_id): super().__init__(FACILITYCYCLE) investigation_instrument = aliased(INSTRUMENT) - self.base_query = self.base_query \ - .join(FACILITYCYCLE.FACILITY) \ - .join(FACILITY.INSTRUMENT) \ - .join(FACILITY.INVESTIGATION) \ - .join(INVESTIGATION.INVESTIGATIONINSTRUMENT) \ - .join(investigation_instrument, INVESTIGATIONINSTRUMENT.INSTRUMENT) \ - .filter(INSTRUMENT.ID == instrument_id) \ - .filter(investigation_instrument.ID == INSTRUMENT.ID) \ - .filter(INVESTIGATION.STARTDATE >= FACILITYCYCLE.STARTDATE) \ + self.base_query = ( + self.base_query.join(FACILITYCYCLE.FACILITY) + .join(FACILITY.INSTRUMENT) + .join(FACILITY.INVESTIGATION) + .join(INVESTIGATION.INVESTIGATIONINSTRUMENT) + .join(investigation_instrument, INVESTIGATIONINSTRUMENT.INSTRUMENT) + .filter(INSTRUMENT.ID == instrument_id) + .filter(investigation_instrument.ID == INSTRUMENT.ID) + .filter(INVESTIGATION.STARTDATE >= FACILITYCYCLE.STARTDATE) .filter(INVESTIGATION.STARTDATE <= FACILITYCYCLE.ENDDATE) + ) def get_facility_cycles_for_instrument(instrument_id, filters): """ - Given an instrument_id get facility cycles where the instrument has investigations that occur within that cycle + Given an instrument_id get facility cycles where the instrument has investigations + that occur within that cycle + :param filters: The filters to be applied to the query :param instrument_id: The id of the instrument :return: A list of facility cycle entities @@ -429,26 +473,27 @@ def get_facility_cycles_for_instrument(instrument_id, filters): class InstrumentFacilityCyclesCountQuery(CountQuery): - def __init__(self, instrument_id): super().__init__(FACILITYCYCLE) investigation_instrument = aliased(INSTRUMENT) - self.base_query = self.base_query\ - .join(FACILITYCYCLE.FACILITY) \ - .join(FACILITY.INSTRUMENT) \ - .join(FACILITY.INVESTIGATION) \ - .join(INVESTIGATION.INVESTIGATIONINSTRUMENT) \ - .join(investigation_instrument, INVESTIGATIONINSTRUMENT.INSTRUMENT) \ - .filter(INSTRUMENT.ID == instrument_id) \ - .filter(investigation_instrument.ID == INSTRUMENT.ID) \ - .filter(INVESTIGATION.STARTDATE >= FACILITYCYCLE.STARTDATE) \ + self.base_query = ( + self.base_query.join(FACILITYCYCLE.FACILITY) + .join(FACILITY.INSTRUMENT) + .join(FACILITY.INVESTIGATION) + .join(INVESTIGATION.INVESTIGATIONINSTRUMENT) + .join(investigation_instrument, INVESTIGATIONINSTRUMENT.INSTRUMENT) + .filter(INSTRUMENT.ID == instrument_id) + .filter(investigation_instrument.ID == INSTRUMENT.ID) + .filter(INVESTIGATION.STARTDATE >= FACILITYCYCLE.STARTDATE) .filter(INVESTIGATION.STARTDATE <= FACILITYCYCLE.ENDDATE) + ) def get_facility_cycles_for_instrument_count(instrument_id, filters): """ - Given an instrument_id get the facility cycles count where the instrument has investigations that occur within - that cycle + Given an instrument_id get the facility cycles count where the instrument has + investigations that occur within that cycle + :param filters: The filters to be applied to the query :param instrument_id: The id of the instrument :return: The count of the facility cycles @@ -464,29 +509,36 @@ class InstrumentFacilityCycleInvestigationsQuery(ReadQuery): def __init__(self, instrument_id, facility_cycle_id): super().__init__(INVESTIGATION) investigation_instrument = aliased(INSTRUMENT) - self.base_query = self.base_query \ - .join(INVESTIGATION.FACILITY) \ - .join(FACILITY.FACILITYCYCLE) \ - .join(FACILITY.INSTRUMENT) \ - .join(INVESTIGATION.INVESTIGATIONINSTRUMENT) \ - .join(investigation_instrument, INVESTIGATIONINSTRUMENT.INSTRUMENT) \ - .filter(INSTRUMENT.ID == instrument_id) \ - .filter(FACILITYCYCLE.ID == facility_cycle_id) \ - .filter(investigation_instrument.ID == INSTRUMENT.ID) \ - .filter(INVESTIGATION.STARTDATE >= FACILITYCYCLE.STARTDATE) \ + self.base_query = ( + self.base_query.join(INVESTIGATION.FACILITY) + .join(FACILITY.FACILITYCYCLE) + .join(FACILITY.INSTRUMENT) + .join(INVESTIGATION.INVESTIGATIONINSTRUMENT) + .join(investigation_instrument, INVESTIGATIONINSTRUMENT.INSTRUMENT) + .filter(INSTRUMENT.ID == instrument_id) + .filter(FACILITYCYCLE.ID == facility_cycle_id) + .filter(investigation_instrument.ID == INSTRUMENT.ID) + .filter(INVESTIGATION.STARTDATE >= FACILITYCYCLE.STARTDATE) .filter(INVESTIGATION.STARTDATE <= FACILITYCYCLE.ENDDATE) + ) -def get_investigations_for_instrument_in_facility_cycle(instrument_id, facility_cycle_id, filters): +def get_investigations_for_instrument_in_facility_cycle( + instrument_id, facility_cycle_id, filters +): """ - Given an instrument id and facility cycle id, get investigations that use the given instrument in the given cycle + Given an instrument id and facility cycle id, get investigations that use the given + instrument in the given cycle + :param filters: The filters to be applied to the query :param instrument_id: The id of the instrument :param facility_cycle_id: the ID of the facility cycle :return: The investigations """ filter_handler = FilterOrderHandler() - with InstrumentFacilityCycleInvestigationsQuery(instrument_id, facility_cycle_id) as query: + with InstrumentFacilityCycleInvestigationsQuery( + instrument_id, facility_cycle_id + ) as query: return get_filtered_read_query_results(filter_handler, filters, query) @@ -494,29 +546,35 @@ class InstrumentFacilityCycleInvestigationsCountQuery(CountQuery): def __init__(self, instrument_id, facility_cycle_id): super().__init__(INVESTIGATION) investigation_instrument = aliased(INSTRUMENT) - self.base_query = self.base_query \ - .join(INVESTIGATION.FACILITY) \ - .join(FACILITY.FACILITYCYCLE) \ - .join(FACILITY.INSTRUMENT) \ - .join(INVESTIGATION.INVESTIGATIONINSTRUMENT) \ - .join(investigation_instrument, INVESTIGATIONINSTRUMENT.INSTRUMENT) \ - .filter(INSTRUMENT.ID == instrument_id) \ - .filter(FACILITYCYCLE.ID == facility_cycle_id) \ - .filter(investigation_instrument.ID == INSTRUMENT.ID) \ - .filter(INVESTIGATION.STARTDATE >= FACILITYCYCLE.STARTDATE) \ + self.base_query = ( + self.base_query.join(INVESTIGATION.FACILITY) + .join(FACILITY.FACILITYCYCLE) + .join(FACILITY.INSTRUMENT) + .join(INVESTIGATION.INVESTIGATIONINSTRUMENT) + .join(investigation_instrument, INVESTIGATIONINSTRUMENT.INSTRUMENT) + .filter(INSTRUMENT.ID == instrument_id) + .filter(FACILITYCYCLE.ID == facility_cycle_id) + .filter(investigation_instrument.ID == INSTRUMENT.ID) + .filter(INVESTIGATION.STARTDATE >= FACILITYCYCLE.STARTDATE) .filter(INVESTIGATION.STARTDATE <= FACILITYCYCLE.ENDDATE) + ) -def get_investigations_for_instrument_in_facility_cycle_count(instrument_id, facility_cycle_id, filters): +def get_investigations_for_instrument_in_facility_cycle_count( + instrument_id, facility_cycle_id, filters +): """ - Given an instrument id and facility cycle id, get the count of the investigations that use the given instrument in - the given cycle + Given an instrument id and facility cycle id, get the count of the investigations + that use the given instrument in the given cycle + :param filters: The filters to be applied to the query :param instrument_id: The id of the instrument :param facility_cycle_id: the ID of the facility cycle :return: The investigations count """ - with InstrumentFacilityCycleInvestigationsCountQuery(instrument_id, facility_cycle_id) as query: + with InstrumentFacilityCycleInvestigationsCountQuery( + instrument_id, facility_cycle_id + ) as query: filter_handler = FilterOrderHandler() filter_handler.add_filters(filters) filter_handler.apply_filters(query) diff --git a/common/exceptions.py b/common/exceptions.py index bee4718f..0a75d1d5 100644 --- a/common/exceptions.py +++ b/common/exceptions.py @@ -6,48 +6,53 @@ class ApiError(Exception): class MissingRecordError(ApiError): - def __init__(self, msg='No such record in table', *args, **kwargs): + def __init__(self, msg="No such record in table", *args, **kwargs): super().__init__(msg, *args, **kwargs) self.status_code = 404 class FilterError(ApiError): - def __init__(self, msg='Invalid filter requested', *args, **kwargs): + def __init__(self, msg="Invalid filter requested", *args, **kwargs): super().__init__(msg, *args, **kwargs) self.status_code = 400 class MultipleIncludeError(FilterError): - def __init__(self, msg='Bad request, only one include filter may be given per request', *args, **kwargs): + def __init__( + self, + msg="Bad request, only one include filter may be given per request", + *args, + **kwargs + ): super().__init__(msg, *args, **kwargs) self.status_code = 400 class AuthenticationError(ApiError): - def __init__(self, msg='Authentication error', *args, **kwargs): + def __init__(self, msg="Authentication error", *args, **kwargs): super().__init__(msg, *args, **kwargs) self.status_code = 403 class MissingCredentialsError(AuthenticationError): - def __init__(self, msg='No credentials provided in auth header', *args, **kwargs): + def __init__(self, msg="No credentials provided in auth header", *args, **kwargs): super().__init__(msg, *args, **kwargs) self.status_code = 401 class BadRequestError(ApiError): - def __init__(self, msg='Bad request', *args, **kwargs): + def __init__(self, msg="Bad request", *args, **kwargs): super().__init__(msg, *args, **kwargs) self.status_code = 400 class DatabaseError(ApiError): - def __init__(self, msg='Database error', *args, **kwargs): + def __init__(self, msg="Database error", *args, **kwargs): super().__init__(msg, *args, **kwargs) self.status_code = 500 class PythonICATError(ApiError): - def __init__(self, msg='Python ICAT error', *args, **kwargs): + def __init__(self, msg="Python ICAT error", *args, **kwargs): super().__init__(msg, *args, **kwargs) self.status_code = 500 diff --git a/common/filters.py b/common/filters.py index a0ad0532..a49741f5 100644 --- a/common/filters.py +++ b/common/filters.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod + class QueryFilter(ABC): @property @abstractmethod @@ -21,7 +22,7 @@ def __init__(self, field, value, operation): self._set_filter_fields() self.value = value self.operation = operation - #super().__init__() + # super().__init__() def _set_filter_fields(self): if self.field.count(".") == 1: @@ -33,6 +34,7 @@ def _set_filter_fields(self): self.included_field = self.field.split(".")[1] self.field = self.field.split(".")[0] + class DistinctFieldFilter(QueryFilter): precedence = 0 @@ -40,6 +42,7 @@ def __init__(self, fields): # This allows single string distinct filters self.fields = fields if type(fields) is list else [fields] + class OrderFilter(QueryFilter): precedence = 2 @@ -47,18 +50,21 @@ def __init__(self, field, direction): self.field = field self.direction = direction + class SkipFilter(QueryFilter): precedence = 3 def __init__(self, skip_value): self.skip_value = skip_value + class LimitFilter(QueryFilter): precedence = 4 def __init__(self, limit_value): self.limit_value = limit_value + class IncludeFilter(QueryFilter): precedence = 5 @@ -68,7 +74,8 @@ def __init__(self, included_filters): class FilterOrderHandler(object): """ - The FilterOrderHandler takes in filters, sorts them according to the order of operations, then applies them. + The FilterOrderHandler takes in filters, sorts them according to the order of + operations, then applies them. """ def __init__(self): diff --git a/common/helpers.py b/common/helpers.py index 8adfa2d7..14d5abfa 100644 --- a/common/helpers.py +++ b/common/helpers.py @@ -7,7 +7,15 @@ from sqlalchemy.exc import IntegrityError from common.database.helpers import QueryFilterFactory -from common.exceptions import ApiError, AuthenticationError, FilterError, BadRequestError, MissingCredentialsError, MissingRecordError, MultipleIncludeError +from common.exceptions import ( + ApiError, + AuthenticationError, + FilterError, + BadRequestError, + MissingCredentialsError, + MissingRecordError, + MultipleIncludeError, +) log = logging.getLogger() @@ -36,6 +44,7 @@ def wrapper_gets_records(*args, **kwargs): except IntegrityError as e: log.exception(e) raise BadRequestError() + return wrapper_gets_records @@ -48,14 +57,15 @@ def get_session_id_from_auth_header(): parser = reqparse.RequestParser() parser.add_argument("Authorization", location="headers") args = parser.parse_args() - auth_header = args["Authorization"].split( - " ") if args["Authorization"] is not None else "" + auth_header = ( + args["Authorization"].split(" ") if args["Authorization"] is not None else "" + ) if auth_header == "": - raise MissingCredentialsError( - f"No credentials provided in auth header") + raise MissingCredentialsError(f"No credentials provided in auth header") if len(auth_header) != 2 or auth_header[0] != "Bearer": raise AuthenticationError( - f" Could not authenticate consumer with auth header {auth_header}") + f" Could not authenticate consumer with auth header {auth_header}" + ) return auth_header[1] @@ -76,7 +86,9 @@ def is_valid_json(string): def get_filters_from_query_string(): """ - Gets a list of filters from the query_strings arg,value pairs, and returns a list of QueryFilter Objects + Gets a list of filters from the query_strings arg,value pairs, and returns a list of + QueryFilter Objects + :return: The list of filters """ log.info(" Getting filters from query string") @@ -84,8 +96,9 @@ def get_filters_from_query_string(): filters = [] for arg in request.args: for value in request.args.getlist(arg): - filters.append(QueryFilterFactory.get_query_filter( - {arg: json.loads(value)})) + filters.append( + QueryFilterFactory.get_query_filter({arg: json.loads(value)}) + ) return filters except: raise FilterError() diff --git a/common/icat/backend.py b/common/icat/backend.py index c9eef5ba..a9cacf44 100644 --- a/common/icat/backend.py +++ b/common/icat/backend.py @@ -5,32 +5,50 @@ from common.backend import Backend from common.helpers import queries_records -from common.icat.helpers import requires_session_id, get_session_details_helper, logout_icat_client, \ - refresh_client_session, get_entity_by_id, update_entity_by_id, delete_entity_by_id, get_entity_with_filters - +from common.icat.helpers import ( + requires_session_id, + get_session_details_helper, + logout_icat_client, + refresh_client_session, + get_entity_by_id, + update_entity_by_id, + delete_entity_by_id, + get_entity_with_filters, +) + from common.config import config from common.exceptions import AuthenticationError from common.models.db_models import SESSION log = logging.getLogger() + class PythonICATBackend(Backend): """ Class that contains functions to access and modify data in an ICAT database directly """ - + def __init__(self): - # Client object is created here as well as in login() to avoid uncaught exceptions - # where the object is None. This could happen where a user tries to use an endpoint before - # logging in. Also helps to give a bit of certainty to what's stored here - self.client = icat.client.Client(config.get_icat_url(), checkCert=config.get_icat_check_cert()) + # Client object is created here as well as in login() to avoid uncaught + # exceptions where the object is None. This could happen where a user tries to + # use an endpoint before logging in. Also helps to give a bit of certainty to + # what's stored here + self.client = icat.client.Client( + config.get_icat_url(), checkCert=config.get_icat_check_cert() + ) def login(self, credentials): - # Client object is re-created here so session IDs aren't overwritten in the database - self.client = icat.client.Client(config.get_icat_url(), checkCert=config.get_icat_check_cert()) + # Client object is re-created here so session IDs aren't overwritten in the + # database + self.client = icat.client.Client( + config.get_icat_url(), checkCert=config.get_icat_check_cert() + ) # Syntax for Python ICAT - login_details = {'username': credentials['username'], 'password': credentials['password']} + login_details = { + "username": credentials["username"], + "password": credentials["password"], + } try: session_id = self.client.login(credentials["mechanism"], login_details) return session_id @@ -95,23 +113,31 @@ def update_with_id(self, session_id, table, id_, data): @requires_session_id @queries_records - def get_instrument_facilitycycles_with_filters(self, session_id, instrument_id, filters): + def get_instrument_facilitycycles_with_filters( + self, session_id, instrument_id, filters + ): pass @requires_session_id @queries_records - def count_instrument_facilitycycles_with_filters(self, session_id, instrument_id, filters): + def count_instrument_facilitycycles_with_filters( + self, session_id, instrument_id, filters + ): pass - #return get_facility_cycles_for_instrument_count(instrument_id, filters) + # return get_facility_cycles_for_instrument_count(instrument_id, filters) @requires_session_id @queries_records - def get_instrument_facilitycycle_investigations_with_filters(self, session_id, instrument_id, facilitycycle_id, filters): + def get_instrument_facilitycycle_investigations_with_filters( + self, session_id, instrument_id, facilitycycle_id, filters + ): pass - #return get_investigations_for_instrument_in_facility_cycle(instrument_id, facilitycycle_id, filters) + # return get_investigations_for_instrument_in_facility_cycle(instrument_id, facilitycycle_id, filters) @requires_session_id @queries_records - def count_instrument_facilitycycles_investigations_with_filters(self, session_id, instrument_id, facilitycycle_id, filters): + def count_instrument_facilitycycles_investigations_with_filters( + self, session_id, instrument_id, facilitycycle_id, filters + ): pass - #return get_investigations_for_instrument_in_facility_cycle_count(instrument_id, facilitycycle_id, filters) + # return get_investigations_for_instrument_in_facility_cycle_count(instrument_id, facilitycycle_id, filters) diff --git a/common/icat/filters.py b/common/icat/filters.py index 8e57c607..bcdbee99 100644 --- a/common/icat/filters.py +++ b/common/icat/filters.py @@ -1,7 +1,13 @@ import logging -from common.filters import WhereFilter, DistinctFieldFilter, OrderFilter, SkipFilter, LimitFilter, \ - IncludeFilter +from common.filters import ( + WhereFilter, + DistinctFieldFilter, + OrderFilter, + SkipFilter, + LimitFilter, + IncludeFilter, +) from common.exceptions import FilterError from common.icat.helpers import create_condition @@ -13,23 +19,25 @@ def __init__(self, field, value, operation): super().__init__(field, value, operation) def apply_filter(self, query): - + if self.operation == "eq": - where_filter = create_condition(self.field, '=', self.value) + where_filter = create_condition(self.field, "=", self.value) elif self.operation == "like": - where_filter = create_condition(self.field, 'like', self.value) + where_filter = create_condition(self.field, "like", self.value) elif self.operation == "lt": - where_filter = create_condition(self.field, '<', self.value) + where_filter = create_condition(self.field, "<", self.value) elif self.operation == "lte": - where_filter = create_condition(self.field, '<=', self.value) + where_filter = create_condition(self.field, "<=", self.value) elif self.operation == "gt": - where_filter = create_condition(self.field, '>', self.value) + where_filter = create_condition(self.field, ">", self.value) elif self.operation == "gte": - where_filter = create_condition(self.field, '>=', self.value) + where_filter = create_condition(self.field, ">=", self.value) elif self.operation == "in": - where_filter = create_condition(self.field, 'in', tuple(self.value)) + where_filter = create_condition(self.field, "in", tuple(self.value)) else: - raise FilterError(f"Bad operation given to where filter: {self.operation}") + raise FilterError( + f"Bad operation given to where filter: {self.operation}" + ) try: query.addConditions(where_filter) diff --git a/common/icat/helpers.py b/common/icat/helpers.py index 0dba073e..5d24c9bd 100644 --- a/common/icat/helpers.py +++ b/common/icat/helpers.py @@ -4,18 +4,24 @@ from icat.query import Query from icat.exception import ICATSessionError, ICATValidationError -from common.exceptions import AuthenticationError, BadRequestError, MissingRecordError, PythonICATError +from common.exceptions import ( + AuthenticationError, + BadRequestError, + MissingRecordError, + PythonICATError, +) from common.filters import FilterOrderHandler from common.constants import Constants log = logging.getLogger() + def requires_session_id(method): """ - Decorator for Python ICAT backend methods that looks out for session errors when using the API. - The API call runs and an ICATSessionError may be raised due to an expired session, invalid - session ID etc. + Decorator for Python ICAT backend methods that looks out for session errors when + using the API. The API call runs and an ICATSessionError may be raised due to an + expired session, invalid session ID etc. :param method: The method for the backend operation :raises AuthenticationError: If a valid session_id is not provided with the request @@ -45,15 +51,20 @@ def get_session_details_helper(client): :param client: ICAT client containing an authenticated user :type client: :class:`icat.client.Client` - :return: Details of the user's session, ready to be converted into a JSON response body + :return: Details of the user's session, ready to be converted into a JSON response + body """ - # Remove rounding + # Remove rounding session_time_remaining = client.getRemainingMinutes() session_expiry_time = datetime.now() + timedelta(minutes=session_time_remaining) username = client.getUserName() - return {"ID": client.sessionId, "EXPIREDATETIME": str(session_expiry_time), "USERNAME": username} + return { + "ID": client.sessionId, + "EXPIREDATETIME": str(session_expiry_time), + "USERNAME": username, + } def logout_icat_client(client): @@ -77,7 +88,9 @@ def refresh_client_session(client): client.refresh() -def construct_icat_query(client, entity_name, conditions=None, aggregate=None, includes=None): +def construct_icat_query( + client, entity_name, conditions=None, aggregate=None, includes=None +): """ Create a Query object within Python ICAT @@ -87,37 +100,48 @@ def construct_icat_query(client, entity_name, conditions=None, aggregate=None, i :type entity_name: :class:`suds.sax.text.Text` :param conditions: Constraints used when an entity is queried :type conditions: :class:`dict` - :param aggregate: Name of the aggregate function to apply. Operations such as counting the - number of records. See `icat.query.setAggregate` for valid values. + :param aggregate: Name of the aggregate function to apply. Operations such as + counting the number of records. See `icat.query.setAggregate` for valid values. :type aggregate: :class:`str` - :param includes: List of related entity names to add to the query so related entities (and - their data) can be returned with the query result + :param includes: List of related entity names to add to the query so related + entities (and their data) can be returned with the query result :type includes: :class:`str` or iterable of :class:`str` :return: Query object from Python ICAT - :raises PythonICATError: If a ValueError is raised when creating a Query(), 500 will be returned as a response + :raises PythonICATError: If a ValueError is raised when creating a Query(), 500 will + be returned as a response """ try: - query = Query(client, entity_name, conditions=conditions, aggregate=aggregate, includes=includes) + query = Query( + client, + entity_name, + conditions=conditions, + aggregate=aggregate, + includes=includes, + ) except ValueError: - raise PythonICATError(f"An issue has occurred while creating a Python ICAT Query object, suggesting an invalid argument") + raise PythonICATError( + "An issue has occurred while creating a Python ICAT Query object," + " suggesting an invalid argument" + ) return query def execute_icat_query(client, query, return_json_formattable=False): """ - Execute a previously created ICAT Query object and return in the format specified by the - return_json_formattable flag + Execute a previously created ICAT Query object and return in the format specified + by the return_json_formattable flag :param client: ICAT client containing an authenticated user :type client: :class:`icat.client.Client` :param query: ICAT Query object to execute within Python ICAT :type query: :class:`icat.query.Query` - :param return_json_formattable: Flag to determine whether the data from the query should be - returned as a list of data ready to be converted straight to JSON (i.e. if the data will be - used as a response for an API call) or whether to leave the data in a Python ICAT format - (i.e. if it's going to be manipulated at some point) + :param return_json_formattable: Flag to determine whether the data from the query + should be returned as a list of data ready to be converted straight to JSON + (i.e. if the data will be used as a response for an API call) or whether to + leave the data in a Python ICAT format (i.e. if it's going to be manipulated at + some point) :type return_json_formattable_data: :class:`bool` :return: Data (of type list) from the executed query """ @@ -145,18 +169,19 @@ def execute_icat_query(client, query, return_json_formattable=False): def get_python_icat_entity_name(client, database_table_name): """ - From the database table name, this function returns the correctly cased entity name relating - to the table name + From the database table name, this function returns the correctly cased entity name + relating to the table name - Due to the case sensitivity of Python ICAT, the table name must be compared with each of the - valid entity names within Python ICAT to get the correctly cased entity name. This is done by - putting everything to lowercase and comparing from there + Due to the case sensitivity of Python ICAT, the table name must be compared with + each of the valid entity names within Python ICAT to get the correctly cased entity + name. This is done by putting everything to lowercase and comparing from there :param client: ICAT client containing an authenticated user :type client: :class:`icat.client.Client` :param database_table_name: Table name (from icatdb) to be interacted with :type database_table_name: :class:`str` - :return: Entity name (of type string) in the correct casing ready to be passed into Python ICAT + :return: Entity name (of type string) in the correct casing ready to be passed into + Python ICAT :raises BadRequestError: If the entity cannot be found """ @@ -171,14 +196,18 @@ def get_python_icat_entity_name(client, database_table_name): # Raise a 400 if a valid entity cannot be found if python_icat_entity_name is None: - raise BadRequestError(f"Bad request made, cannot find {database_table_name} entity within Python ICAT") + raise BadRequestError( + f"Bad request made, cannot find {database_table_name} entity within Python" + " ICAT" + ) return python_icat_entity_name def create_condition(attribute_name, operator, value): """ - Construct and return a Python dictionary containing conditions to be used in a Query object + Construct and return a Python dictionary containing conditions to be used in a + Query object :param attribute_name: Attribute name to search :type attribute_name: :class:`str` @@ -186,7 +215,8 @@ def create_condition(attribute_name, operator, value): :type operator: :class:`str` :param value: What ICAT will use to filter the data :type value: :class:`str` or :class:`tuple` (when using an IN expression) - :return: Condition (of type :class:`dict`) ready to be added to a Python ICAT Query object + :return: Condition (of type :class:`dict`) ready to be added to a Python ICAT Query + object """ conditions = {} @@ -199,13 +229,15 @@ def create_condition(attribute_name, operator, value): def str_to_datetime_object(icat_attribute, data): """ - Where data is stored as dates in ICAT (which this function determines), convert strings (i.e. - user data from PATCH/POST requests) into datetime objects so they can be stored in ICAT + Where data is stored as dates in ICAT (which this function determines), convert + strings (i.e. user data from PATCH/POST requests) into datetime objects so they can + be stored in ICAT - Python 3.7+ has support for `datetime.fromisoformat()` which would be a more elegant solution - to this conversion operation since dates are converted into ISO format within this file, - however, the production instance of this API is typically built on Python 3.6, and it doesn't - seem of enough value to mandate 3.7 for a single line of code + Python 3.7+ has support for `datetime.fromisoformat()` which would be a more elegant + solution to this conversion operation since dates are converted into ISO format + within this file, however, the production instance of this API is typically built on + Python 3.6, and it doesn't seem of enough value to mandate 3.7 for a single line of + code :param icat_attribute: Attribute that will be updated with new data :type icat_attribute: Any valid data type that can be stored in Python ICAT @@ -219,34 +251,47 @@ def str_to_datetime_object(icat_attribute, data): try: data = datetime.strptime(data, Constants.ACCEPTED_DATE_FORMAT) except ValueError: - raise BadRequestError(f"Bad request made, the date entered is not in the correct format. Use the {Constants.ACCEPTED_DATE_FORMAT} format to submit dates to the API") + raise BadRequestError( + "Bad request made, the date entered is not in the correct format. Use the" + f" {Constants.ACCEPTED_DATE_FORMAT} format to submit dates to the API" + ) return data def update_attributes(old_entity, new_entity): """ - Updates the attribute(s) of a given object which is a record of an entity from Python ICAT + Updates the attribute(s) of a given object which is a record of an entity from + Python ICAT :param old_entity: An existing entity record from Python ICAT :type object: :class:`icat.entities.ENTITY` :param new_entity: Dictionary containing the new data to be modified :type new_entity: :class:`dict` - :raises BadRequestError: If the attribute cannot be found, or if it cannot be edited - - typically if Python ICAT doesn't allow an attribute to be edited (e.g. modId & modTime) + :raises BadRequestError: If the attribute cannot be found, or if it cannot be edited + - typically if Python ICAT doesn't allow an attribute to be edited (e.g. modId & + modTime) """ for key in new_entity: try: original_data_attribute = getattr(old_entity, key) if isinstance(original_data_attribute, datetime): - new_entity[key] = str_to_datetime_object(original_data_attribute, new_entity[key]) + new_entity[key] = str_to_datetime_object( + original_data_attribute, new_entity[key] + ) except AttributeError: - raise BadRequestError(f"Bad request made, cannot find attribute '{key}' within the {old_entity.BeanName} entity") + raise BadRequestError( + f"Bad request made, cannot find attribute '{key}' within the" + f"{old_entity.BeanName} entity" + ) try: setattr(old_entity, key, new_entity[key]) except AttributeError: - raise BadRequestError(f"Bad request made, cannot modify attribute '{key}' within the {old_entity.BeanName} entity") + raise BadRequestError( + f"Bad request made, cannot modify attribute '{key}' within the" + f" {old_entity.BeanName} entity" + ) try: old_entity.update() @@ -264,21 +309,26 @@ def get_entity_by_id(client, table_name, id_, return_json_formattable_data): :type table_name: :class:`str` :param id_: ID number of the entity to retrieve :type id_: :class:`int` - :param return_json_formattable_data: Flag to determine whether the data should be returned as a - list of data ready to be converted straight to JSON (i.e. if the data will be used as a - response for an API call) or whether to leave the data in a Python ICAT format + :param return_json_formattable_data: Flag to determine whether the data should be + returned as a list of data ready to be converted straight to JSON (i.e. if the + data will be used as a response for an API call) or whether to leave the data in + a Python ICAT format :type return_json_formattable_data: :class:`bool` :return: The record of the specified ID from the given entity :raises: MissingRecordError: If Python ICAT cannot find a record of the specified ID """ # Set query condition for the selected ID - id_condition = create_condition('id', '=', id_) + id_condition = create_condition("id", "=", id_) selected_entity_name = get_python_icat_entity_name(client, table_name) - id_query = construct_icat_query(client, selected_entity_name, conditions=id_condition, includes="1") - entity_by_id_data = execute_icat_query(client, id_query, return_json_formattable_data) + id_query = construct_icat_query( + client, selected_entity_name, conditions=id_condition, includes="1" + ) + entity_by_id_data = execute_icat_query( + client, id_query, return_json_formattable_data + ) if not entity_by_id_data: # Cannot find any data matching the given ID @@ -313,19 +363,19 @@ def update_entity_by_id(client, table_name, id_, new_data): :type table_name: :class:`str` :param id_: ID number of the entity to retrieve :type id_: :class:`int` - :param new_data: JSON from request body providing new data to update the record with the - specified ID + :param new_data: JSON from request body providing new data to update the record with + the specified ID :return: The updated record of the specified ID from the given entity """ entity_id_data = get_entity_by_id(client, table_name, id_, False) - # There will only ever be one record associated with a single ID - if a record with the - # specified ID cannot be found, it'll be picked up by the MissingRecordError in + # There will only ever be one record associated with a single ID - if a record with + # the specified ID cannot be found, it'll be picked up by the MissingRecordError in # get_entity_by_id() update_attributes(entity_id_data, new_data) - # The record is re-obtained from Python ICAT (rather than using entity_id_data) to show to the - # user whether the change has actually been applied + # The record is re-obtained from Python ICAT (rather than using entity_id_data) to + # show to the user whether the change has actually been applied return get_entity_by_id(client, table_name, id_, True) @@ -335,7 +385,7 @@ def get_entity_with_filters(client, table_name, filters): filter_handler = FilterOrderHandler() filter_handler.add_filters(filters) filter_handler.apply_filters(query) - + data = execute_icat_query(client, query, True) if not data: diff --git a/common/logger_setup.py b/common/logger_setup.py index 7d9e939d..2efcad5d 100644 --- a/common/logger_setup.py +++ b/common/logger_setup.py @@ -7,23 +7,24 @@ LOG_FILE_NAME = Path(__file__).parent.parent / "logs.log" logger_config = { "version": 1, - "formatters": {"default": { - "format": "[%(asctime)s] {%(module)s:%(filename)s:%(funcName)s:%(lineno)d} %(levelname)s -%(message)s ", - }}, - "handlers": {"default": { - "level": config.get_log_level(), - "formatter": "default", - "class": "logging.handlers.RotatingFileHandler", - "filename": LOG_FILE_NAME, - "maxBytes": 5000000, - "backupCount": 10 - }}, - "root": { - "level": config.get_log_level(), - "handlers": ["default"] - } + "formatters": { + "default": { + "format": "[%(asctime)s] {%(module)s:%(filename)s:%(funcName)s:%(lineno)d} %(levelname)s -%(message)s ", + } + }, + "handlers": { + "default": { + "level": config.get_log_level(), + "formatter": "default", + "class": "logging.handlers.RotatingFileHandler", + "filename": LOG_FILE_NAME, + "maxBytes": 5000000, + "backupCount": 10, + } + }, + "root": {"level": config.get_log_level(), "handlers": ["default"]}, } def setup_logger(): - logging.config.dictConfig(logger_config) \ No newline at end of file + logging.config.dictConfig(logger_config) diff --git a/common/models/db_models.py b/common/models/db_models.py index 9fe97c2e..aa0303ef 100644 --- a/common/models/db_models.py +++ b/common/models/db_models.py @@ -3,8 +3,19 @@ from datetime import datetime from decimal import Decimal -from sqlalchemy import Index, Column, BigInteger, String, DateTime, ForeignKey, Integer, Float, FetchedValue, \ - TypeDecorator, Boolean +from sqlalchemy import ( + Index, + Column, + BigInteger, + String, + DateTime, + ForeignKey, + Integer, + Float, + FetchedValue, + TypeDecorator, + Boolean, +) from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship from sqlalchemy.orm.collections import InstrumentedList @@ -18,6 +29,7 @@ class EnumAsInteger(TypeDecorator): """ Column type for storing Python enums in a database INTEGER column. """ + impl = Integer def __init__(self, enum_type): @@ -35,8 +47,7 @@ def process_result_value(self, value, dialect): return f"{self.enum_type(value)}".replace(f"{self.enum_type.__name__}.", "") except ValueError: # This will force a 500 response - raise DatabaseError( - f"value {value} not in {self.enum_type.__name__}") + raise DatabaseError(f"value {value} not in {self.enum_type.__name__}") def copy(self, **kwargs): return EnumAsInteger(self.enum_type) @@ -73,7 +84,9 @@ def _make_serializable(self, field): def to_nested_dict(self, includes): """ - Given related models return a nested dictionary with the child or parent rows nested. + Given related models return a nested dictionary with the child or parent rows + nested. + :param includes: string/list/dict - The related models to include. :return: A nested dictionary with the included models """ @@ -86,34 +99,38 @@ def to_nested_dict(self, includes): elif type(include) is dict: self._nest_dictionary_include(dictionary, include) except TypeError: - raise FilterError( - f" Bad include relations provided: {includes}") + raise FilterError(f" Bad include relations provided: {includes}") return dictionary def _nest_dictionary_include(self, dictionary, include): """ - Given a dictionary of related entities names, nest the related entities into the given dictionary representation, - of the original entity. + Given a dictionary of related entities names, nest the related entities into the + given dictionary representation, of the original entity. + :param dictionary: The dictionary representation of the original entity :param include: The dictionary of related entity names to be nested. """ related_entity = self.get_related_entity(list(include)[0]) if not isinstance(related_entity, InstrumentedList): dictionary[related_entity.__tablename__] = related_entity.to_nested_dict( - include[list(include)[0]]) + include[list(include)[0]] + ) else: for entity in related_entity: if entity.__tablename__ in dictionary.keys(): dictionary[entity.__tablename__].append( - entity.to_nested_dict(include[list(include)[0]])) + entity.to_nested_dict(include[list(include)[0]]) + ) else: dictionary[entity.__tablename__] = [ - entity.to_nested_dict(include[list(include)[0]])] + entity.to_nested_dict(include[list(include)[0]]) + ] def _nest_string_include(self, dictionary, include): """ - Given the name of a single related entity, nest the related entity into the given dictionary representation of - the original entity. + Given the name of a single related entity, nest the related entity into the + given dictionary representation of the original entity. + :param dictionary: The dictionary representation of an entity to be nested in. :param include: The name of the related entity to be nested """ @@ -140,7 +157,9 @@ def get_related_entity(self, entity): def update_from_dict(self, dictionary): """ - Given a dictionary containing field names and variables, updates the entity from the given dictionary + Given a dictionary containing field names and variables, updates the entity from + the given dictionary + :param dictionary: dict: dictionary containing the new values :returns: The updated dict """ @@ -150,10 +169,8 @@ def update_from_dict(self, dictionary): class APPLICATION(Base, EntityHelper): - __tablename__ = 'APPLICATION' - __table_args__ = ( - Index('UNQ_APPLICATION_0', 'FACILITY_ID', 'NAME', 'VERSION'), - ) + __tablename__ = "APPLICATION" + __table_args__ = (Index("UNQ_APPLICATION_0", "FACILITY_ID", "NAME", "VERSION"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -162,14 +179,17 @@ class APPLICATION(Base, EntityHelper): MOD_TIME = Column(DateTime, nullable=False) NAME = Column(String(255), nullable=False) VERSION = Column(String(255), nullable=False) - FACILITY_ID = Column(ForeignKey('FACILITY.ID'), nullable=False) + FACILITY_ID = Column(ForeignKey("FACILITY.ID"), nullable=False) FACILITY = relationship( - 'FACILITY', primaryjoin='APPLICATION.FACILITY_ID == FACILITY.ID', backref='APPLICATION') + "FACILITY", + primaryjoin="APPLICATION.FACILITY_ID == FACILITY.ID", + backref="APPLICATION", + ) class FACILITY(Base, EntityHelper): - __tablename__ = 'FACILITY' + __tablename__ = "FACILITY" ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -184,7 +204,7 @@ class FACILITY(Base, EntityHelper): class DATACOLLECTION(Base, EntityHelper): - __tablename__ = 'DATACOLLECTION' + __tablename__ = "DATACOLLECTION" ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -195,10 +215,9 @@ class DATACOLLECTION(Base, EntityHelper): class DATACOLLECTIONDATAFILE(Base, EntityHelper): - __tablename__ = 'DATACOLLECTIONDATAFILE' + __tablename__ = "DATACOLLECTIONDATAFILE" __table_args__ = ( - Index('UNQ_DATACOLLECTIONDATAFILE_0', - 'DATACOLLECTION_ID', 'DATAFILE_ID'), + Index("UNQ_DATACOLLECTIONDATAFILE_0", "DATACOLLECTION_ID", "DATAFILE_ID"), ) ID = Column(BigInteger, primary_key=True) @@ -206,21 +225,25 @@ class DATACOLLECTIONDATAFILE(Base, EntityHelper): CREATE_TIME = Column(DateTime, nullable=False) MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) - DATACOLLECTION_ID = Column(ForeignKey('DATACOLLECTION.ID'), nullable=False) - DATAFILE_ID = Column(ForeignKey('DATAFILE.ID'), nullable=False, index=True) + DATACOLLECTION_ID = Column(ForeignKey("DATACOLLECTION.ID"), nullable=False) + DATAFILE_ID = Column(ForeignKey("DATAFILE.ID"), nullable=False, index=True) - DATACOLLECTION = relationship('DATACOLLECTION', - primaryjoin='DATACOLLECTIONDATAFILE.DATACOLLECTION_ID == DATACOLLECTION.ID', - backref='DATACOLLECTIONDATAFILE') - DATAFILE = relationship('DATAFILE', primaryjoin='DATACOLLECTIONDATAFILE.DATAFILE_ID == DATAFILE.ID', - backref='DATACOLLECTIONDATAFILE') + DATACOLLECTION = relationship( + "DATACOLLECTION", + primaryjoin="DATACOLLECTIONDATAFILE.DATACOLLECTION_ID == DATACOLLECTION.ID", + backref="DATACOLLECTIONDATAFILE", + ) + DATAFILE = relationship( + "DATAFILE", + primaryjoin="DATACOLLECTIONDATAFILE.DATAFILE_ID == DATAFILE.ID", + backref="DATACOLLECTIONDATAFILE", + ) class DATACOLLECTIONDATASET(Base, EntityHelper): - __tablename__ = 'DATACOLLECTIONDATASET' + __tablename__ = "DATACOLLECTIONDATASET" __table_args__ = ( - Index('UNQ_DATACOLLECTIONDATASET_0', - 'DATACOLLECTION_ID', 'DATASET_ID'), + Index("UNQ_DATACOLLECTIONDATASET_0", "DATACOLLECTION_ID", "DATASET_ID"), ) ID = Column(BigInteger, primary_key=True) @@ -228,21 +251,27 @@ class DATACOLLECTIONDATASET(Base, EntityHelper): CREATE_TIME = Column(DateTime, nullable=False) MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) - DATACOLLECTION_ID = Column(ForeignKey('DATACOLLECTION.ID'), nullable=False) - DATASET_ID = Column(ForeignKey('DATASET.ID'), nullable=False, index=True) + DATACOLLECTION_ID = Column(ForeignKey("DATACOLLECTION.ID"), nullable=False) + DATASET_ID = Column(ForeignKey("DATASET.ID"), nullable=False, index=True) - DATACOLLECTION = relationship('DATACOLLECTION', - primaryjoin='DATACOLLECTIONDATASET.DATACOLLECTION_ID == DATACOLLECTION.ID', - backref='DATACOLLECTIONDATASET') - DATASET = relationship('DATASET', primaryjoin='DATACOLLECTIONDATASET.DATASET_ID == DATASET.ID', - backref='DATACOLLECTIONDATASET') + DATACOLLECTION = relationship( + "DATACOLLECTION", + primaryjoin="DATACOLLECTIONDATASET.DATACOLLECTION_ID == DATACOLLECTION.ID", + backref="DATACOLLECTIONDATASET", + ) + DATASET = relationship( + "DATASET", + primaryjoin="DATACOLLECTIONDATASET.DATASET_ID == DATASET.ID", + backref="DATACOLLECTIONDATASET", + ) class DATACOLLECTIONPARAMETER(Base, EntityHelper): - __tablename__ = 'DATACOLLECTIONPARAMETER' + __tablename__ = "DATACOLLECTIONPARAMETER" __table_args__ = ( - Index('UNQ_DATACOLLECTIONPARAMETER_0', - 'DATACOLLECTION_ID', 'PARAMETER_TYPE_ID'), + Index( + "UNQ_DATACOLLECTIONPARAMETER_0", "DATACOLLECTION_ID", "PARAMETER_TYPE_ID" + ), ) ID = Column(BigInteger, primary_key=True) @@ -256,23 +285,26 @@ class DATACOLLECTIONPARAMETER(Base, EntityHelper): RANGEBOTTOM = Column(Float(asdecimal=True)) RANGETOP = Column(Float(asdecimal=True)) STRING_VALUE = Column(String(4000)) - DATACOLLECTION_ID = Column(ForeignKey('DATACOLLECTION.ID'), nullable=False) - PARAMETER_TYPE_ID = Column(ForeignKey( - 'PARAMETERTYPE.ID'), nullable=False, index=True) + DATACOLLECTION_ID = Column(ForeignKey("DATACOLLECTION.ID"), nullable=False) + PARAMETER_TYPE_ID = Column( + ForeignKey("PARAMETERTYPE.ID"), nullable=False, index=True + ) - DATACOLLECTION = relationship('DATACOLLECTION', - primaryjoin='DATACOLLECTIONPARAMETER.DATACOLLECTION_ID == DATACOLLECTION.ID', - backref='DATACOLLECTIONPARAMETER') - PARAMETERTYPE = relationship('PARAMETERTYPE', - primaryjoin='DATACOLLECTIONPARAMETER.PARAMETER_TYPE_ID == PARAMETERTYPE.ID', - backref='DATACOLLECTIONPARAMETER') + DATACOLLECTION = relationship( + "DATACOLLECTION", + primaryjoin="DATACOLLECTIONPARAMETER.DATACOLLECTION_ID == DATACOLLECTION.ID", + backref="DATACOLLECTIONPARAMETER", + ) + PARAMETERTYPE = relationship( + "PARAMETERTYPE", + primaryjoin="DATACOLLECTIONPARAMETER.PARAMETER_TYPE_ID == PARAMETERTYPE.ID", + backref="DATACOLLECTIONPARAMETER", + ) class DATAFILE(Base, EntityHelper): - __tablename__ = 'DATAFILE' - __table_args__ = ( - Index('UNQ_DATAFILE_0', 'DATASET_ID', 'NAME'), - ) + __tablename__ = "DATAFILE" + __table_args__ = (Index("UNQ_DATAFILE_0", "DATASET_ID", "NAME"),) ID = Column(BigInteger, primary_key=True) CHECKSUM = Column(String(255)) @@ -287,20 +319,22 @@ class DATAFILE(Base, EntityHelper): MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) NAME = Column(String(255), nullable=False) - DATAFILEFORMAT_ID = Column(ForeignKey('DATAFILEFORMAT.ID'), index=True) - DATASET_ID = Column(ForeignKey('DATASET.ID'), nullable=False) + DATAFILEFORMAT_ID = Column(ForeignKey("DATAFILEFORMAT.ID"), index=True) + DATASET_ID = Column(ForeignKey("DATASET.ID"), nullable=False) - DATAFILEFORMAT = relationship('DATAFILEFORMAT', primaryjoin='DATAFILE.DATAFILEFORMAT_ID == DATAFILEFORMAT.ID', - backref='DATAFILE') + DATAFILEFORMAT = relationship( + "DATAFILEFORMAT", + primaryjoin="DATAFILE.DATAFILEFORMAT_ID == DATAFILEFORMAT.ID", + backref="DATAFILE", + ) DATASET = relationship( - 'DATASET', primaryjoin='DATAFILE.DATASET_ID == DATASET.ID', backref='DATAFILE') + "DATASET", primaryjoin="DATAFILE.DATASET_ID == DATASET.ID", backref="DATAFILE" + ) class DATAFILEFORMAT(Base, EntityHelper): - __tablename__ = 'DATAFILEFORMAT' - __table_args__ = ( - Index('UNQ_DATAFILEFORMAT_0', 'FACILITY_ID', 'NAME', 'VERSION'), - ) + __tablename__ = "DATAFILEFORMAT" + __table_args__ = (Index("UNQ_DATAFILEFORMAT_0", "FACILITY_ID", "NAME", "VERSION"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -311,16 +345,19 @@ class DATAFILEFORMAT(Base, EntityHelper): NAME = Column(String(255), nullable=False) TYPE = Column(String(255)) VERSION = Column(String(255), nullable=False) - FACILITY_ID = Column(ForeignKey('FACILITY.ID'), nullable=False) + FACILITY_ID = Column(ForeignKey("FACILITY.ID"), nullable=False) - FACILITY = relationship('FACILITY', primaryjoin='DATAFILEFORMAT.FACILITY_ID == FACILITY.ID', - backref='DATAFILEFORMAT') + FACILITY = relationship( + "FACILITY", + primaryjoin="DATAFILEFORMAT.FACILITY_ID == FACILITY.ID", + backref="DATAFILEFORMAT", + ) class DATAFILEPARAMETER(Base, EntityHelper): - __tablename__ = 'DATAFILEPARAMETER' + __tablename__ = "DATAFILEPARAMETER" __table_args__ = ( - Index('UNQ_DATAFILEPARAMETER_0', 'DATAFILE_ID', 'PARAMETER_TYPE_ID'), + Index("UNQ_DATAFILEPARAMETER_0", "DATAFILE_ID", "PARAMETER_TYPE_ID"), ) ID = Column(BigInteger, primary_key=True) @@ -334,21 +371,26 @@ class DATAFILEPARAMETER(Base, EntityHelper): RANGEBOTTOM = Column(Float(asdecimal=True)) RANGETOP = Column(Float(asdecimal=True)) STRING_VALUE = Column(String(4000)) - DATAFILE_ID = Column(ForeignKey('DATAFILE.ID'), nullable=False) - PARAMETER_TYPE_ID = Column(ForeignKey( - 'PARAMETERTYPE.ID'), nullable=False, index=True) + DATAFILE_ID = Column(ForeignKey("DATAFILE.ID"), nullable=False) + PARAMETER_TYPE_ID = Column( + ForeignKey("PARAMETERTYPE.ID"), nullable=False, index=True + ) - DATAFILE = relationship('DATAFILE', primaryjoin='DATAFILEPARAMETER.DATAFILE_ID == DATAFILE.ID', - backref='DATAFILEPARAMETER') - PARAMETERTYPE = relationship('PARAMETERTYPE', primaryjoin='DATAFILEPARAMETER.PARAMETER_TYPE_ID == PARAMETERTYPE.ID', - backref='DATAFILEPARAMETER') + DATAFILE = relationship( + "DATAFILE", + primaryjoin="DATAFILEPARAMETER.DATAFILE_ID == DATAFILE.ID", + backref="DATAFILEPARAMETER", + ) + PARAMETERTYPE = relationship( + "PARAMETERTYPE", + primaryjoin="DATAFILEPARAMETER.PARAMETER_TYPE_ID == PARAMETERTYPE.ID", + backref="DATAFILEPARAMETER", + ) class DATASET(Base, EntityHelper): - __tablename__ = 'DATASET' - __table_args__ = ( - Index('UNQ_DATASET_0', 'INVESTIGATION_ID', 'NAME'), - ) + __tablename__ = "DATASET" + __table_args__ = (Index("UNQ_DATASET_0", "INVESTIGATION_ID", "NAME"),) ID = Column(BigInteger, primary_key=True) COMPLETE = Column(Boolean, nullable=False, server_default=FetchedValue()) @@ -362,22 +404,29 @@ class DATASET(Base, EntityHelper): MOD_TIME = Column(DateTime, nullable=False) NAME = Column(String(255), nullable=False) STARTDATE = Column(DateTime) - INVESTIGATION_ID = Column(ForeignKey('INVESTIGATION.ID'), nullable=False) - SAMPLE_ID = Column(ForeignKey('SAMPLE.ID'), index=True) - TYPE_ID = Column(ForeignKey('DATASETTYPE.ID'), nullable=False, index=True) - - INVESTIGATION = relationship('INVESTIGATION', primaryjoin='DATASET.INVESTIGATION_ID == INVESTIGATION.ID', - backref='DATASET') + INVESTIGATION_ID = Column(ForeignKey("INVESTIGATION.ID"), nullable=False) + SAMPLE_ID = Column(ForeignKey("SAMPLE.ID"), index=True) + TYPE_ID = Column(ForeignKey("DATASETTYPE.ID"), nullable=False, index=True) + + INVESTIGATION = relationship( + "INVESTIGATION", + primaryjoin="DATASET.INVESTIGATION_ID == INVESTIGATION.ID", + backref="DATASET", + ) SAMPLE = relationship( - 'SAMPLE', primaryjoin='DATASET.SAMPLE_ID == SAMPLE.ID', backref='DATASET') + "SAMPLE", primaryjoin="DATASET.SAMPLE_ID == SAMPLE.ID", backref="DATASET" + ) DATASETTYPE = relationship( - 'DATASETTYPE', primaryjoin='DATASET.TYPE_ID == DATASETTYPE.ID', backref='DATASET') + "DATASETTYPE", + primaryjoin="DATASET.TYPE_ID == DATASETTYPE.ID", + backref="DATASET", + ) class DATASETPARAMETER(Base, EntityHelper): - __tablename__ = 'DATASETPARAMETER' + __tablename__ = "DATASETPARAMETER" __table_args__ = ( - Index('UNQ_DATASETPARAMETER_0', 'DATASET_ID', 'PARAMETER_TYPE_ID'), + Index("UNQ_DATASETPARAMETER_0", "DATASET_ID", "PARAMETER_TYPE_ID"), ) ID = Column(BigInteger, primary_key=True) @@ -391,21 +440,26 @@ class DATASETPARAMETER(Base, EntityHelper): RANGEBOTTOM = Column(Float(asdecimal=True)) RANGETOP = Column(Float(asdecimal=True)) STRING_VALUE = Column(String(4000)) - DATASET_ID = Column(ForeignKey('DATASET.ID'), nullable=False) - PARAMETER_TYPE_ID = Column(ForeignKey( - 'PARAMETERTYPE.ID'), nullable=False, index=True) + DATASET_ID = Column(ForeignKey("DATASET.ID"), nullable=False) + PARAMETER_TYPE_ID = Column( + ForeignKey("PARAMETERTYPE.ID"), nullable=False, index=True + ) - DATASET = relationship('DATASET', primaryjoin='DATASETPARAMETER.DATASET_ID == DATASET.ID', - backref='DATASETPARAMETER') - PARAMETERTYPE = relationship('PARAMETERTYPE', primaryjoin='DATASETPARAMETER.PARAMETER_TYPE_ID == PARAMETERTYPE.ID', - backref='DATASETPARAMETER') + DATASET = relationship( + "DATASET", + primaryjoin="DATASETPARAMETER.DATASET_ID == DATASET.ID", + backref="DATASETPARAMETER", + ) + PARAMETERTYPE = relationship( + "PARAMETERTYPE", + primaryjoin="DATASETPARAMETER.PARAMETER_TYPE_ID == PARAMETERTYPE.ID", + backref="DATASETPARAMETER", + ) class DATASETTYPE(Base, EntityHelper): - __tablename__ = 'DATASETTYPE' - __table_args__ = ( - Index('UNQ_DATASETTYPE_0', 'FACILITY_ID', 'NAME'), - ) + __tablename__ = "DATASETTYPE" + __table_args__ = (Index("UNQ_DATASETTYPE_0", "FACILITY_ID", "NAME"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -414,17 +468,18 @@ class DATASETTYPE(Base, EntityHelper): MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) NAME = Column(String(255), nullable=False) - FACILITY_ID = Column(ForeignKey('FACILITY.ID'), nullable=False) + FACILITY_ID = Column(ForeignKey("FACILITY.ID"), nullable=False) FACILITY = relationship( - 'FACILITY', primaryjoin='DATASETTYPE.FACILITY_ID == FACILITY.ID', backref='DATASETTYPE') + "FACILITY", + primaryjoin="DATASETTYPE.FACILITY_ID == FACILITY.ID", + backref="DATASETTYPE", + ) class FACILITYCYCLE(Base, EntityHelper): - __tablename__ = 'FACILITYCYCLE' - __table_args__ = ( - Index('UNQ_FACILITYCYCLE_0', 'FACILITY_ID', 'NAME'), - ) + __tablename__ = "FACILITYCYCLE" + __table_args__ = (Index("UNQ_FACILITYCYCLE_0", "FACILITY_ID", "NAME"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -435,14 +490,17 @@ class FACILITYCYCLE(Base, EntityHelper): MOD_TIME = Column(DateTime, nullable=False) NAME = Column(String(255), nullable=False) STARTDATE = Column(DateTime) - FACILITY_ID = Column(ForeignKey('FACILITY.ID'), nullable=False) + FACILITY_ID = Column(ForeignKey("FACILITY.ID"), nullable=False) - FACILITY = relationship('FACILITY', primaryjoin='FACILITYCYCLE.FACILITY_ID == FACILITY.ID', - backref='FACILITYCYCLE') + FACILITY = relationship( + "FACILITY", + primaryjoin="FACILITYCYCLE.FACILITY_ID == FACILITY.ID", + backref="FACILITYCYCLE", + ) class GROUPING(Base, EntityHelper): - __tablename__ = 'GROUPING' + __tablename__ = "GROUPING" ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -453,10 +511,8 @@ class GROUPING(Base, EntityHelper): class INSTRUMENT(Base, EntityHelper): - __tablename__ = 'INSTRUMENT' - __table_args__ = ( - Index('UNQ_INSTRUMENT_0', 'FACILITY_ID', 'NAME'), - ) + __tablename__ = "INSTRUMENT" + __table_args__ = (Index("UNQ_INSTRUMENT_0", "FACILITY_ID", "NAME"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -468,38 +524,42 @@ class INSTRUMENT(Base, EntityHelper): NAME = Column(String(255), nullable=False) TYPE = Column(String(255)) URL = Column(String(255)) - FACILITY_ID = Column(ForeignKey('FACILITY.ID'), nullable=False) + FACILITY_ID = Column(ForeignKey("FACILITY.ID"), nullable=False) FACILITY = relationship( - 'FACILITY', primaryjoin='INSTRUMENT.FACILITY_ID == FACILITY.ID', backref='INSTRUMENT') + "FACILITY", + primaryjoin="INSTRUMENT.FACILITY_ID == FACILITY.ID", + backref="INSTRUMENT", + ) class INSTRUMENTSCIENTIST(Base, EntityHelper): - __tablename__ = 'INSTRUMENTSCIENTIST' - __table_args__ = ( - Index('UNQ_INSTRUMENTSCIENTIST_0', 'USER_ID', 'INSTRUMENT_ID'), - ) + __tablename__ = "INSTRUMENTSCIENTIST" + __table_args__ = (Index("UNQ_INSTRUMENTSCIENTIST_0", "USER_ID", "INSTRUMENT_ID"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) CREATE_TIME = Column(DateTime, nullable=False) MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) - INSTRUMENT_ID = Column(ForeignKey('INSTRUMENT.ID'), - nullable=False, index=True) - USER_ID = Column(ForeignKey('USER_.ID'), nullable=False) + INSTRUMENT_ID = Column(ForeignKey("INSTRUMENT.ID"), nullable=False, index=True) + USER_ID = Column(ForeignKey("USER_.ID"), nullable=False) - INSTRUMENT = relationship('INSTRUMENT', primaryjoin='INSTRUMENTSCIENTIST.INSTRUMENT_ID == INSTRUMENT.ID', - backref='INSTRUMENTSCIENTIST') + INSTRUMENT = relationship( + "INSTRUMENT", + primaryjoin="INSTRUMENTSCIENTIST.INSTRUMENT_ID == INSTRUMENT.ID", + backref="INSTRUMENTSCIENTIST", + ) USER_ = relationship( - 'USER', primaryjoin='INSTRUMENTSCIENTIST.USER_ID == USER.ID', backref='INSTRUMENTSCIENTIST') + "USER", + primaryjoin="INSTRUMENTSCIENTIST.USER_ID == USER.ID", + backref="INSTRUMENTSCIENTIST", + ) class INVESTIGATION(Base, EntityHelper): - __tablename__ = 'INVESTIGATION' - __table_args__ = ( - Index('UNQ_INVESTIGATION_0', 'FACILITY_ID', 'NAME', 'VISIT_ID'), - ) + __tablename__ = "INVESTIGATION" + __table_args__ = (Index("UNQ_INVESTIGATION_0", "FACILITY_ID", "NAME", "VISIT_ID"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -514,21 +574,25 @@ class INVESTIGATION(Base, EntityHelper): SUMMARY = Column(String(4000)) TITLE = Column(String(255), nullable=False) VISIT_ID = Column(String(255), nullable=False) - FACILITY_ID = Column(ForeignKey('FACILITY.ID'), nullable=False) - TYPE_ID = Column(ForeignKey('INVESTIGATIONTYPE.ID'), - nullable=False, index=True) + FACILITY_ID = Column(ForeignKey("FACILITY.ID"), nullable=False) + TYPE_ID = Column(ForeignKey("INVESTIGATIONTYPE.ID"), nullable=False, index=True) - FACILITY = relationship('FACILITY', primaryjoin='INVESTIGATION.FACILITY_ID == FACILITY.ID', - backref='INVESTIGATION') - INVESTIGATIONTYPE = relationship('INVESTIGATIONTYPE', primaryjoin='INVESTIGATION.TYPE_ID == INVESTIGATIONTYPE.ID', - backref='INVESTIGATION') + FACILITY = relationship( + "FACILITY", + primaryjoin="INVESTIGATION.FACILITY_ID == FACILITY.ID", + backref="INVESTIGATION", + ) + INVESTIGATIONTYPE = relationship( + "INVESTIGATIONTYPE", + primaryjoin="INVESTIGATION.TYPE_ID == INVESTIGATIONTYPE.ID", + backref="INVESTIGATION", + ) class INVESTIGATIONGROUP(Base, EntityHelper): - __tablename__ = 'INVESTIGATIONGROUP' + __tablename__ = "INVESTIGATIONGROUP" __table_args__ = ( - Index('UNQ_INVESTIGATIONGROUP_0', 'GROUP_ID', - 'INVESTIGATION_ID', 'ROLE'), + Index("UNQ_INVESTIGATIONGROUP_0", "GROUP_ID", "INVESTIGATION_ID", "ROLE"), ) ID = Column(BigInteger, primary_key=True) @@ -537,21 +601,27 @@ class INVESTIGATIONGROUP(Base, EntityHelper): MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) ROLE = Column(String(255), nullable=False) - GROUP_ID = Column(ForeignKey('GROUPING.ID'), nullable=False) - INVESTIGATION_ID = Column(ForeignKey( - 'INVESTIGATION.ID'), nullable=False, index=True) + GROUP_ID = Column(ForeignKey("GROUPING.ID"), nullable=False) + INVESTIGATION_ID = Column( + ForeignKey("INVESTIGATION.ID"), nullable=False, index=True + ) - GROUPING = relationship('GROUPING', primaryjoin='INVESTIGATIONGROUP.GROUP_ID == GROUPING.ID', - backref='INVESTIGATIONGROUP') - INVESTIGATION = relationship('INVESTIGATION', primaryjoin='INVESTIGATIONGROUP.INVESTIGATION_ID == INVESTIGATION.ID', - backref='INVESTIGATIONGROUP') + GROUPING = relationship( + "GROUPING", + primaryjoin="INVESTIGATIONGROUP.GROUP_ID == GROUPING.ID", + backref="INVESTIGATIONGROUP", + ) + INVESTIGATION = relationship( + "INVESTIGATION", + primaryjoin="INVESTIGATIONGROUP.INVESTIGATION_ID == INVESTIGATION.ID", + backref="INVESTIGATIONGROUP", + ) class INVESTIGATIONINSTRUMENT(Base, EntityHelper): - __tablename__ = 'INVESTIGATIONINSTRUMENT' + __tablename__ = "INVESTIGATIONINSTRUMENT" __table_args__ = ( - Index('UNQ_INVESTIGATIONINSTRUMENT_0', - 'INVESTIGATION_ID', 'INSTRUMENT_ID'), + Index("UNQ_INVESTIGATIONINSTRUMENT_0", "INVESTIGATION_ID", "INSTRUMENT_ID"), ) ID = Column(BigInteger, primary_key=True) @@ -559,22 +629,25 @@ class INVESTIGATIONINSTRUMENT(Base, EntityHelper): CREATE_TIME = Column(DateTime, nullable=False) MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) - INSTRUMENT_ID = Column(ForeignKey('INSTRUMENT.ID'), - nullable=False, index=True) - INVESTIGATION_ID = Column(ForeignKey('INVESTIGATION.ID'), nullable=False) + INSTRUMENT_ID = Column(ForeignKey("INSTRUMENT.ID"), nullable=False, index=True) + INVESTIGATION_ID = Column(ForeignKey("INVESTIGATION.ID"), nullable=False) - INSTRUMENT = relationship('INSTRUMENT', primaryjoin='INVESTIGATIONINSTRUMENT.INSTRUMENT_ID == INSTRUMENT.ID', - backref='INVESTIGATIONINSTRUMENT') - INVESTIGATION = relationship('INVESTIGATION', - primaryjoin='INVESTIGATIONINSTRUMENT.INVESTIGATION_ID == INVESTIGATION.ID', - backref='INVESTIGATIONINSTRUMENT') + INSTRUMENT = relationship( + "INSTRUMENT", + primaryjoin="INVESTIGATIONINSTRUMENT.INSTRUMENT_ID == INSTRUMENT.ID", + backref="INVESTIGATIONINSTRUMENT", + ) + INVESTIGATION = relationship( + "INVESTIGATION", + primaryjoin="INVESTIGATIONINSTRUMENT.INVESTIGATION_ID == INVESTIGATION.ID", + backref="INVESTIGATIONINSTRUMENT", + ) class INVESTIGATIONPARAMETER(Base, EntityHelper): - __tablename__ = 'INVESTIGATIONPARAMETER' + __tablename__ = "INVESTIGATIONPARAMETER" __table_args__ = ( - Index('UNQ_INVESTIGATIONPARAMETER_0', - 'INVESTIGATION_ID', 'PARAMETER_TYPE_ID'), + Index("UNQ_INVESTIGATIONPARAMETER_0", "INVESTIGATION_ID", "PARAMETER_TYPE_ID"), ) ID = Column(BigInteger, primary_key=True) @@ -588,23 +661,26 @@ class INVESTIGATIONPARAMETER(Base, EntityHelper): RANGEBOTTOM = Column(Float(asdecimal=True)) RANGETOP = Column(Float(asdecimal=True)) STRING_VALUE = Column(String(4000)) - INVESTIGATION_ID = Column(ForeignKey('INVESTIGATION.ID'), nullable=False) - PARAMETER_TYPE_ID = Column(ForeignKey( - 'PARAMETERTYPE.ID'), nullable=False, index=True) + INVESTIGATION_ID = Column(ForeignKey("INVESTIGATION.ID"), nullable=False) + PARAMETER_TYPE_ID = Column( + ForeignKey("PARAMETERTYPE.ID"), nullable=False, index=True + ) - INVESTIGATION = relationship('INVESTIGATION', - primaryjoin='INVESTIGATIONPARAMETER.INVESTIGATION_ID == INVESTIGATION.ID', - backref='INVESTIGATIONPARAMETER') - PARAMETERTYPE = relationship('PARAMETERTYPE', - primaryjoin='INVESTIGATIONPARAMETER.PARAMETER_TYPE_ID == PARAMETERTYPE.ID', - backref='INVESTIGATIONPARAMETER') + INVESTIGATION = relationship( + "INVESTIGATION", + primaryjoin="INVESTIGATIONPARAMETER.INVESTIGATION_ID == INVESTIGATION.ID", + backref="INVESTIGATIONPARAMETER", + ) + PARAMETERTYPE = relationship( + "PARAMETERTYPE", + primaryjoin="INVESTIGATIONPARAMETER.PARAMETER_TYPE_ID == PARAMETERTYPE.ID", + backref="INVESTIGATIONPARAMETER", + ) class INVESTIGATIONTYPE(Base, EntityHelper): - __tablename__ = 'INVESTIGATIONTYPE' - __table_args__ = ( - Index('UNQ_INVESTIGATIONTYPE_0', 'NAME', 'FACILITY_ID'), - ) + __tablename__ = "INVESTIGATIONTYPE" + __table_args__ = (Index("UNQ_INVESTIGATIONTYPE_0", "NAME", "FACILITY_ID"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -613,16 +689,19 @@ class INVESTIGATIONTYPE(Base, EntityHelper): MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) NAME = Column(String(255), nullable=False) - FACILITY_ID = Column(ForeignKey('FACILITY.ID'), nullable=False, index=True) + FACILITY_ID = Column(ForeignKey("FACILITY.ID"), nullable=False, index=True) - FACILITY = relationship('FACILITY', primaryjoin='INVESTIGATIONTYPE.FACILITY_ID == FACILITY.ID', - backref='INVESTIGATIONTYPE') + FACILITY = relationship( + "FACILITY", + primaryjoin="INVESTIGATIONTYPE.FACILITY_ID == FACILITY.ID", + backref="INVESTIGATIONTYPE", + ) class INVESTIGATIONUSER(Base, EntityHelper): - __tablename__ = 'INVESTIGATIONUSER' + __tablename__ = "INVESTIGATIONUSER" __table_args__ = ( - Index('UNQ_INVESTIGATIONUSER_0', 'USER_ID', 'INVESTIGATION_ID', 'ROLE'), + Index("UNQ_INVESTIGATIONUSER_0", "USER_ID", "INVESTIGATION_ID", "ROLE"), ) ID = Column(BigInteger, primary_key=True) @@ -631,18 +710,25 @@ class INVESTIGATIONUSER(Base, EntityHelper): MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) ROLE = Column(String(255), nullable=False) - INVESTIGATION_ID = Column(ForeignKey( - 'INVESTIGATION.ID'), nullable=False, index=True) - USER_ID = Column(ForeignKey('USER_.ID'), nullable=False) + INVESTIGATION_ID = Column( + ForeignKey("INVESTIGATION.ID"), nullable=False, index=True + ) + USER_ID = Column(ForeignKey("USER_.ID"), nullable=False) - INVESTIGATION = relationship('INVESTIGATION', primaryjoin='INVESTIGATIONUSER.INVESTIGATION_ID == INVESTIGATION.ID', - backref='INVESTIGATIONUSER') + INVESTIGATION = relationship( + "INVESTIGATION", + primaryjoin="INVESTIGATIONUSER.INVESTIGATION_ID == INVESTIGATION.ID", + backref="INVESTIGATIONUSER", + ) USER_ = relationship( - 'USER', primaryjoin='INVESTIGATIONUSER.USER_ID == USER.ID', backref='INVESTIGATIONUSER') + "USER", + primaryjoin="INVESTIGATIONUSER.USER_ID == USER.ID", + backref="INVESTIGATIONUSER", + ) class JOB(Base, EntityHelper): - __tablename__ = 'JOB' + __tablename__ = "JOB" ID = Column(BigInteger, primary_key=True) ARGUMENTS = Column(String(255)) @@ -650,24 +736,23 @@ class JOB(Base, EntityHelper): CREATE_TIME = Column(DateTime, nullable=False) MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) - APPLICATION_ID = Column(ForeignKey('APPLICATION.ID'), - nullable=False, index=True) - INPUTDATACOLLECTION_ID = Column( - ForeignKey('DATACOLLECTION.ID'), index=True) - OUTPUTDATACOLLECTION_ID = Column( - ForeignKey('DATACOLLECTION.ID'), index=True) + APPLICATION_ID = Column(ForeignKey("APPLICATION.ID"), nullable=False, index=True) + INPUTDATACOLLECTION_ID = Column(ForeignKey("DATACOLLECTION.ID"), index=True) + OUTPUTDATACOLLECTION_ID = Column(ForeignKey("DATACOLLECTION.ID"), index=True) APPLICATION = relationship( - 'APPLICATION', primaryjoin='JOB.APPLICATION_ID == APPLICATION.ID', backref='JOB') - DATACOLLECTION = relationship('DATACOLLECTION', primaryjoin='JOB.INPUTDATACOLLECTION_ID == DATACOLLECTION.ID', - backref='JOB') + "APPLICATION", primaryjoin="JOB.APPLICATION_ID == APPLICATION.ID", backref="JOB" + ) + DATACOLLECTION = relationship( + "DATACOLLECTION", + primaryjoin="JOB.INPUTDATACOLLECTION_ID == DATACOLLECTION.ID", + backref="JOB", + ) class KEYWORD(Base, EntityHelper): - __tablename__ = 'KEYWORD' - __table_args__ = ( - Index('UNQ_KEYWORD_0', 'NAME', 'INVESTIGATION_ID'), - ) + __tablename__ = "KEYWORD" + __table_args__ = (Index("UNQ_KEYWORD_0", "NAME", "INVESTIGATION_ID"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -675,18 +760,20 @@ class KEYWORD(Base, EntityHelper): MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) NAME = Column(String(255), nullable=False) - INVESTIGATION_ID = Column(ForeignKey( - 'INVESTIGATION.ID'), nullable=False, index=True) + INVESTIGATION_ID = Column( + ForeignKey("INVESTIGATION.ID"), nullable=False, index=True + ) - INVESTIGATION = relationship('INVESTIGATION', primaryjoin='KEYWORD.INVESTIGATION_ID == INVESTIGATION.ID', - backref='KEYWORD') + INVESTIGATION = relationship( + "INVESTIGATION", + primaryjoin="KEYWORD.INVESTIGATION_ID == INVESTIGATION.ID", + backref="KEYWORD", + ) class PARAMETERTYPE(Base, EntityHelper): - __tablename__ = 'PARAMETERTYPE' - __table_args__ = ( - Index('UNQ_PARAMETERTYPE_0', 'FACILITY_ID', 'NAME', 'UNITS'), - ) + __tablename__ = "PARAMETERTYPE" + __table_args__ = (Index("UNQ_PARAMETERTYPE_0", "FACILITY_ID", "NAME", "UNITS"),) class ValueTypeEnum(enum.Enum): DATE_AND_TIME = 0 @@ -712,16 +799,19 @@ class ValueTypeEnum(enum.Enum): UNITSFULLNAME = Column(String(255)) VALUETYPE = Column(EnumAsInteger(ValueTypeEnum), nullable=False) VERIFIED = Column(Boolean, server_default=FetchedValue()) - FACILITY_ID = Column(ForeignKey('FACILITY.ID'), nullable=False) + FACILITY_ID = Column(ForeignKey("FACILITY.ID"), nullable=False) - FACILITY = relationship('FACILITY', primaryjoin='PARAMETERTYPE.FACILITY_ID == FACILITY.ID', - backref='PARAMETERTYPE') + FACILITY = relationship( + "FACILITY", + primaryjoin="PARAMETERTYPE.FACILITY_ID == FACILITY.ID", + backref="PARAMETERTYPE", + ) class PERMISSIBLESTRINGVALUE(Base, EntityHelper): - __tablename__ = 'PERMISSIBLESTRINGVALUE' + __tablename__ = "PERMISSIBLESTRINGVALUE" __table_args__ = ( - Index('UNQ_PERMISSIBLESTRINGVALUE_0', 'VALUE', 'PARAMETERTYPE_ID'), + Index("UNQ_PERMISSIBLESTRINGVALUE_0", "VALUE", "PARAMETERTYPE_ID"), ) ID = Column(BigInteger, primary_key=True) @@ -730,16 +820,19 @@ class PERMISSIBLESTRINGVALUE(Base, EntityHelper): MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) VALUE = Column(String(255), nullable=False) - PARAMETERTYPE_ID = Column(ForeignKey( - 'PARAMETERTYPE.ID'), nullable=False, index=True) + PARAMETERTYPE_ID = Column( + ForeignKey("PARAMETERTYPE.ID"), nullable=False, index=True + ) - PARAMETERTYPE = relationship('PARAMETERTYPE', - primaryjoin='PERMISSIBLESTRINGVALUE.PARAMETERTYPE_ID == PARAMETERTYPE.ID', - backref='PERMISSIBLESTRINGVALUE') + PARAMETERTYPE = relationship( + "PARAMETERTYPE", + primaryjoin="PERMISSIBLESTRINGVALUE.PARAMETERTYPE_ID == PARAMETERTYPE.ID", + backref="PERMISSIBLESTRINGVALUE", + ) class PUBLICATION(Base, EntityHelper): - __tablename__ = 'PUBLICATION' + __tablename__ = "PUBLICATION" ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -751,18 +844,20 @@ class PUBLICATION(Base, EntityHelper): REPOSITORY = Column(String(255)) REPOSITORYID = Column(String(255)) URL = Column(String(255)) - INVESTIGATION_ID = Column(ForeignKey( - 'INVESTIGATION.ID'), nullable=False, index=True) + INVESTIGATION_ID = Column( + ForeignKey("INVESTIGATION.ID"), nullable=False, index=True + ) - INVESTIGATION = relationship('INVESTIGATION', primaryjoin='PUBLICATION.INVESTIGATION_ID == INVESTIGATION.ID', - backref='PUBLICATION') + INVESTIGATION = relationship( + "INVESTIGATION", + primaryjoin="PUBLICATION.INVESTIGATION_ID == INVESTIGATION.ID", + backref="PUBLICATION", + ) class PUBLICSTEP(Base, EntityHelper): - __tablename__ = 'PUBLICSTEP' - __table_args__ = ( - Index('UNQ_PUBLICSTEP_0', 'ORIGIN', 'FIELD'), - ) + __tablename__ = "PUBLICSTEP" + __table_args__ = (Index("UNQ_PUBLICSTEP_0", "ORIGIN", "FIELD"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -774,9 +869,9 @@ class PUBLICSTEP(Base, EntityHelper): class RELATEDDATAFILE(Base, EntityHelper): - __tablename__ = 'RELATEDDATAFILE' + __tablename__ = "RELATEDDATAFILE" __table_args__ = ( - Index('UNQ_RELATEDDATAFILE_0', 'SOURCE_DATAFILE_ID', 'DEST_DATAFILE_ID'), + Index("UNQ_RELATEDDATAFILE_0", "SOURCE_DATAFILE_ID", "DEST_DATAFILE_ID"), ) ID = Column(BigInteger, primary_key=True) @@ -785,16 +880,18 @@ class RELATEDDATAFILE(Base, EntityHelper): MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) RELATION = Column(String(255), nullable=False) - DEST_DATAFILE_ID = Column(ForeignKey( - 'DATAFILE.ID'), nullable=False, index=True) - SOURCE_DATAFILE_ID = Column(ForeignKey('DATAFILE.ID'), nullable=False) + DEST_DATAFILE_ID = Column(ForeignKey("DATAFILE.ID"), nullable=False, index=True) + SOURCE_DATAFILE_ID = Column(ForeignKey("DATAFILE.ID"), nullable=False) - DATAFILE = relationship('DATAFILE', primaryjoin='RELATEDDATAFILE.DEST_DATAFILE_ID == DATAFILE.ID', - backref='RELATEDDATAFILE') + DATAFILE = relationship( + "DATAFILE", + primaryjoin="RELATEDDATAFILE.DEST_DATAFILE_ID == DATAFILE.ID", + backref="RELATEDDATAFILE", + ) class RULE(Base, EntityHelper): - __tablename__ = 'RULE_' + __tablename__ = "RULE_" ID = Column(BigInteger, primary_key=True) ATTRIBUTE = Column(String(255)) @@ -813,17 +910,16 @@ class RULE(Base, EntityHelper): SEARCHJPQL = Column(String(1024)) U = Column(Integer, server_default=FetchedValue()) WHAT = Column(String(1024), nullable=False) - GROUPING_ID = Column(ForeignKey('GROUPING.ID'), index=True) + GROUPING_ID = Column(ForeignKey("GROUPING.ID"), index=True) GROUPING = relationship( - 'GROUPING', primaryjoin='RULE.GROUPING_ID == GROUPING.ID', backref='RULE') + "GROUPING", primaryjoin="RULE.GROUPING_ID == GROUPING.ID", backref="RULE" + ) class SAMPLE(Base, EntityHelper): - __tablename__ = 'SAMPLE' - __table_args__ = ( - Index('UNQ_SAMPLE_0', 'INVESTIGATION_ID', 'NAME'), - ) + __tablename__ = "SAMPLE" + __table_args__ = (Index("UNQ_SAMPLE_0", "INVESTIGATION_ID", "NAME"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -831,20 +927,24 @@ class SAMPLE(Base, EntityHelper): MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) NAME = Column(String(255), nullable=False) - INVESTIGATION_ID = Column(ForeignKey('INVESTIGATION.ID'), nullable=False) - SAMPLETYPE_ID = Column(ForeignKey('SAMPLETYPE.ID'), index=True) + INVESTIGATION_ID = Column(ForeignKey("INVESTIGATION.ID"), nullable=False) + SAMPLETYPE_ID = Column(ForeignKey("SAMPLETYPE.ID"), index=True) - INVESTIGATION = relationship('INVESTIGATION', primaryjoin='SAMPLE.INVESTIGATION_ID == INVESTIGATION.ID', - backref='SAMPLE') + INVESTIGATION = relationship( + "INVESTIGATION", + primaryjoin="SAMPLE.INVESTIGATION_ID == INVESTIGATION.ID", + backref="SAMPLE", + ) SAMPLETYPE = relationship( - 'SAMPLETYPE', primaryjoin='SAMPLE.SAMPLETYPE_ID == SAMPLETYPE.ID', backref='SAMPLE') + "SAMPLETYPE", + primaryjoin="SAMPLE.SAMPLETYPE_ID == SAMPLETYPE.ID", + backref="SAMPLE", + ) class SAMPLEPARAMETER(Base, EntityHelper): - __tablename__ = 'SAMPLEPARAMETER' - __table_args__ = ( - Index('UNQ_SAMPLEPARAMETER_0', 'SAMPLE_ID', 'PARAMETER_TYPE_ID'), - ) + __tablename__ = "SAMPLEPARAMETER" + __table_args__ = (Index("UNQ_SAMPLEPARAMETER_0", "SAMPLE_ID", "PARAMETER_TYPE_ID"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -857,18 +957,25 @@ class SAMPLEPARAMETER(Base, EntityHelper): RANGEBOTTOM = Column(Float(asdecimal=True)) RANGETOP = Column(Float(asdecimal=True)) STRING_VALUE = Column(String(4000)) - SAMPLE_ID = Column(ForeignKey('SAMPLE.ID'), nullable=False) - PARAMETER_TYPE_ID = Column(ForeignKey( - 'PARAMETERTYPE.ID'), nullable=False, index=True) + SAMPLE_ID = Column(ForeignKey("SAMPLE.ID"), nullable=False) + PARAMETER_TYPE_ID = Column( + ForeignKey("PARAMETERTYPE.ID"), nullable=False, index=True + ) - PARAMETERTYPE = relationship('PARAMETERTYPE', primaryjoin='SAMPLEPARAMETER.PARAMETER_TYPE_ID == PARAMETERTYPE.ID', - backref='SAMPLEPARAMETER') + PARAMETERTYPE = relationship( + "PARAMETERTYPE", + primaryjoin="SAMPLEPARAMETER.PARAMETER_TYPE_ID == PARAMETERTYPE.ID", + backref="SAMPLEPARAMETER", + ) SAMPLE = relationship( - 'SAMPLE', primaryjoin='SAMPLEPARAMETER.SAMPLE_ID == SAMPLE.ID', backref='SAMPLEPARAMETER') + "SAMPLE", + primaryjoin="SAMPLEPARAMETER.SAMPLE_ID == SAMPLE.ID", + backref="SAMPLEPARAMETER", + ) class SESSION(Base, EntityHelper): - __tablename__ = 'SESSION_' + __tablename__ = "SESSION_" ID = Column(String(255), primary_key=True) EXPIREDATETIME = Column(DateTime) @@ -876,10 +983,8 @@ class SESSION(Base, EntityHelper): class SHIFT(Base, EntityHelper): - __tablename__ = 'SHIFT' - __table_args__ = ( - Index('UNQ_SHIFT_0', 'INVESTIGATION_ID', 'STARTDATE', 'ENDDATE'), - ) + __tablename__ = "SHIFT" + __table_args__ = (Index("UNQ_SHIFT_0", "INVESTIGATION_ID", "STARTDATE", "ENDDATE"),) ID = Column(BigInteger, primary_key=True) COMMENT = Column(String(255)) @@ -889,14 +994,17 @@ class SHIFT(Base, EntityHelper): MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) STARTDATE = Column(DateTime, nullable=False) - INVESTIGATION_ID = Column(ForeignKey('INVESTIGATION.ID'), nullable=False) + INVESTIGATION_ID = Column(ForeignKey("INVESTIGATION.ID"), nullable=False) - INVESTIGATION = relationship('INVESTIGATION', primaryjoin='SHIFT.INVESTIGATION_ID == INVESTIGATION.ID', - backref='SHIFT') + INVESTIGATION = relationship( + "INVESTIGATION", + primaryjoin="SHIFT.INVESTIGATION_ID == INVESTIGATION.ID", + backref="SHIFT", + ) class USER(Base, EntityHelper): - __tablename__ = 'USER_' + __tablename__ = "USER_" ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -910,29 +1018,29 @@ class USER(Base, EntityHelper): class USERGROUP(Base, EntityHelper): - __tablename__ = 'USERGROUP' - __table_args__ = ( - Index('UNQ_USERGROUP_0', 'USER_ID', 'GROUP_ID'), - ) + __tablename__ = "USERGROUP" + __table_args__ = (Index("UNQ_USERGROUP_0", "USER_ID", "GROUP_ID"),) ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) CREATE_TIME = Column(DateTime, nullable=False) MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) - GROUP_ID = Column(ForeignKey('GROUPING.ID'), nullable=False, index=True) - USER_ID = Column(ForeignKey('USER_.ID'), nullable=False) + GROUP_ID = Column(ForeignKey("GROUPING.ID"), nullable=False, index=True) + USER_ID = Column(ForeignKey("USER_.ID"), nullable=False) GROUPING = relationship( - 'GROUPING', primaryjoin='USERGROUP.GROUP_ID == GROUPING.ID', backref='USERGROUP') + "GROUPING", primaryjoin="USERGROUP.GROUP_ID == GROUPING.ID", backref="USERGROUP" + ) USER_ = relationship( - 'USER', primaryjoin='USERGROUP.USER_ID == USER.ID', backref='USERGROUP') + "USER", primaryjoin="USERGROUP.USER_ID == USER.ID", backref="USERGROUP" + ) class STUDYINVESTIGATION(Base, EntityHelper): - __tablename__ = 'STUDYINVESTIGATION' + __tablename__ = "STUDYINVESTIGATION" __table_args__ = ( - Index('UNQ_STUDYINVESTIGATION_0', 'STUDY_ID', 'INVESTIGATION_ID'), + Index("UNQ_STUDYINVESTIGATION_0", "STUDY_ID", "INVESTIGATION_ID"), ) ID = Column(BigInteger, primary_key=True) @@ -940,18 +1048,25 @@ class STUDYINVESTIGATION(Base, EntityHelper): CREATE_TIME = Column(DateTime, nullable=False) MOD_ID = Column(String(255), nullable=False) MOD_TIME = Column(DateTime, nullable=False) - INVESTIGATION_ID = Column(ForeignKey( - 'INVESTIGATION.ID'), nullable=False, index=True) - STUDY_ID = Column(ForeignKey('STUDY.ID'), nullable=False) + INVESTIGATION_ID = Column( + ForeignKey("INVESTIGATION.ID"), nullable=False, index=True + ) + STUDY_ID = Column(ForeignKey("STUDY.ID"), nullable=False) - INVESTIGATION = relationship('INVESTIGATION', primaryjoin='STUDYINVESTIGATION.INVESTIGATION_ID == INVESTIGATION.ID', - backref='STUDYINVESTIGATION') + INVESTIGATION = relationship( + "INVESTIGATION", + primaryjoin="STUDYINVESTIGATION.INVESTIGATION_ID == INVESTIGATION.ID", + backref="STUDYINVESTIGATION", + ) STUDY = relationship( - 'STUDY', primaryjoin='STUDYINVESTIGATION.STUDY_ID == STUDY.ID', backref='STUDYINVESTIGATION') + "STUDY", + primaryjoin="STUDYINVESTIGATION.STUDY_ID == STUDY.ID", + backref="STUDYINVESTIGATION", + ) class STUDY(Base, EntityHelper): - __tablename__ = 'STUDY' + __tablename__ = "STUDY" ID = Column(BigInteger, primary_key=True) CREATE_ID = Column(String(255), nullable=False) @@ -962,16 +1077,17 @@ class STUDY(Base, EntityHelper): NAME = Column(String(255), nullable=False) STARTDATE = Column(DateTime) STATUS = Column(Integer) - USER_ID = Column(ForeignKey('USER_.ID'), index=True) + USER_ID = Column(ForeignKey("USER_.ID"), index=True) USER_ = relationship( - 'USER', primaryjoin='STUDY.USER_ID == USER.ID', backref='STUDY') + "USER", primaryjoin="STUDY.USER_ID == USER.ID", backref="STUDY" + ) class SAMPLETYPE(Base, EntityHelper): - __tablename__ = 'SAMPLETYPE' + __tablename__ = "SAMPLETYPE" __table_args__ = ( - Index('UNQ_SAMPLETYPE_0', 'FACILITY_ID', 'NAME', 'MOLECULARFORMULA'), + Index("UNQ_SAMPLETYPE_0", "FACILITY_ID", "NAME", "MOLECULARFORMULA"), ) ID = Column(BigInteger, primary_key=True) @@ -982,7 +1098,10 @@ class SAMPLETYPE(Base, EntityHelper): MOLECULARFORMULA = Column(String(255), nullable=False) NAME = Column(String(255), nullable=False) SAFETYINFORMATION = Column(String(4000)) - FACILITY_ID = Column(ForeignKey('FACILITY.ID'), nullable=False) + FACILITY_ID = Column(ForeignKey("FACILITY.ID"), nullable=False) FACILITY = relationship( - 'FACILITY', primaryjoin='SAMPLETYPE.FACILITY_ID == FACILITY.ID', backref='SAMPLETYPE') + "FACILITY", + primaryjoin="SAMPLETYPE.FACILITY_ID == FACILITY.ID", + backref="SAMPLETYPE", + ) diff --git a/common/session_manager.py b/common/session_manager.py index f4459b02..dcf13e12 100644 --- a/common/session_manager.py +++ b/common/session_manager.py @@ -4,12 +4,13 @@ from common.constants import Constants -engine = create_engine(Constants.DATABASE_URL, poolclass=QueuePool, pool_size=100, max_overflow=0) +engine = create_engine( + Constants.DATABASE_URL, poolclass=QueuePool, pool_size=100, max_overflow=0 +) session_factory = sessionmaker(engine) class SessionManager(object): - def __init__(self): self.Session = scoped_session(session_factory) diff --git a/dev-requirements.in b/dev-requirements.in index f02bcfa9..8fbfcbed 100644 --- a/dev-requirements.in +++ b/dev-requirements.in @@ -1 +1,3 @@ -Faker == 2.0.2 \ No newline at end of file +pip-tools == 5.3.1 +Faker == 2.0.2 +black == 19.10b0 \ No newline at end of file diff --git a/dev-requirements.txt b/dev-requirements.txt index 4a7bf267..4b2fe87a 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -4,7 +4,19 @@ # # pip-compile dev-requirements.in # -faker==2.0.2 +appdirs==1.4.4 # via black +attrs==19.3.0 # via black +black==19.10b0 # via -r dev-requirements.in +click==7.1.2 # via black, pip-tools +faker==2.0.2 # via -r dev-requirements.in +pathspec==0.8.0 # via black +pip-tools==5.3.1 # via -r dev-requirements.in python-dateutil==2.8.0 # via faker -six==1.12.0 # via faker, python-dateutil +regex==2020.7.14 # via black +six==1.12.0 # via faker, pip-tools, python-dateutil text-unidecode==1.3 # via faker +toml==0.10.1 # via black +typed-ast==1.4.1 # via black + +# The following packages are considered to be unsafe in a requirements file: +# pip diff --git a/requirements.txt b/requirements.txt index 30725d73..752e3265 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,18 +5,18 @@ # pip-compile requirements.in # aniso8601==8.0.0 # via flask-restful -apispec==3.3.0 +apispec==3.3.0 # via -r requirements.in click==7.0 # via flask -flask-cors==3.0.8 -flask-swagger-ui==3.25.0 +flask-cors==3.0.8 # via -r requirements.in +flask-swagger-ui==3.25.0 # via -r requirements.in flask==1.1.1 # via flask-cors, flask-restful, flask-swagger-ui -flask_restful==0.3.7 +flask_restful==0.3.7 # via -r requirements.in itsdangerous==1.1.0 # via flask jinja2==2.10.1 # via flask markupsafe==1.1.1 # via jinja2 -pymysql==0.9.3 +pymysql==0.9.3 # via -r requirements.in pytz==2019.2 # via flask-restful -pyyaml==5.1.2 +pyyaml==5.1.2 # via -r requirements.in six==1.12.0 # via flask-cors, flask-restful -sqlalchemy==1.3.8 +sqlalchemy==1.3.8 # via -r requirements.in werkzeug==0.16.0 # via flask diff --git a/src/main.py b/src/main.py index 8ffa6699..a19c2b94 100644 --- a/src/main.py +++ b/src/main.py @@ -5,12 +5,20 @@ from common.config import config from common.logger_setup import setup_logger -from src.resources.entities.entity_endpoint import get_endpoint, get_id_endpoint, get_count_endpoint, \ - get_find_one_endpoint +from src.resources.entities.entity_endpoint import ( + get_endpoint, + get_id_endpoint, + get_count_endpoint, + get_find_one_endpoint, +) from src.resources.entities.entity_map import endpoints from src.resources.non_entities.sessions_endpoints import * -from src.resources.table_endpoints.table_endpoints import InstrumentsFacilityCycles, InstrumentsFacilityCyclesCount, \ - InstrumentsFacilityCyclesInvestigations, InstrumentsFacilityCyclesInvestigationsCount +from src.resources.table_endpoints.table_endpoints import ( + InstrumentsFacilityCycles, + InstrumentsFacilityCyclesCount, + InstrumentsFacilityCyclesInvestigations, + InstrumentsFacilityCyclesInvestigationsCount, +) from common.exceptions import ApiError from apispec import APISpec from pathlib import Path @@ -19,8 +27,13 @@ from src.swagger.initialise_spec import initialise_spec -spec = APISpec(title="DataGateway API", version="1.0", openapi_version="3.0.3", - plugins=[RestfulPlugin()], security=[{"session_id": []}]) +spec = APISpec( + title="DataGateway API", + version="1.0", + openapi_version="3.0.3", + plugins=[RestfulPlugin()], + security=[{"session_id": []}], +) app = Flask(__name__) cors = CORS(app) @@ -36,11 +49,7 @@ def handle_error(e): swaggerui_blueprint = get_swaggerui_blueprint( - "", - "/openapi.json", - config={ - 'app_name': "DataGateway API OpenAPI Spec" - }, + "", "/openapi.json", config={"app_name": "DataGateway API OpenAPI Spec"}, ) app.register_blueprint(swaggerui_blueprint, url_prefix="/") @@ -54,22 +63,20 @@ def handle_error(e): api.add_resource(get_endpoint_resource, f"/{entity_name.lower()}") spec.path(resource=get_endpoint_resource, api=api) - get_id_endpoint_resource = get_id_endpoint( - entity_name, endpoints[entity_name]) - api.add_resource(get_id_endpoint_resource, - f"/{entity_name.lower()}/") + get_id_endpoint_resource = get_id_endpoint(entity_name, endpoints[entity_name]) + api.add_resource(get_id_endpoint_resource, f"/{entity_name.lower()}/") spec.path(resource=get_id_endpoint_resource, api=api) get_count_endpoint_resource = get_count_endpoint( - entity_name, endpoints[entity_name]) - api.add_resource(get_count_endpoint_resource, - f"/{entity_name.lower()}/count") + entity_name, endpoints[entity_name] + ) + api.add_resource(get_count_endpoint_resource, f"/{entity_name.lower()}/count") spec.path(resource=get_count_endpoint_resource, api=api) get_find_one_endpoint_resource = get_find_one_endpoint( - entity_name, endpoints[entity_name]) - api.add_resource(get_find_one_endpoint_resource, - f"/{entity_name.lower()}/findone") + entity_name, endpoints[entity_name] + ) + api.add_resource(get_find_one_endpoint_resource, f"/{entity_name.lower()}/findone") spec.path(resource=get_find_one_endpoint_resource, api=api) @@ -78,17 +85,21 @@ def handle_error(e): spec.path(resource=Sessions, api=api) # Table specific endpoints -api.add_resource(InstrumentsFacilityCycles, - "/instruments//facilitycycles") +api.add_resource(InstrumentsFacilityCycles, "/instruments//facilitycycles") spec.path(resource=InstrumentsFacilityCycles, api=api) -api.add_resource(InstrumentsFacilityCyclesCount, - "/instruments//facilitycycles/count") +api.add_resource( + InstrumentsFacilityCyclesCount, "/instruments//facilitycycles/count" +) spec.path(resource=InstrumentsFacilityCyclesCount, api=api) -api.add_resource(InstrumentsFacilityCyclesInvestigations, - "/instruments//facilitycycles//investigations") +api.add_resource( + InstrumentsFacilityCyclesInvestigations, + "/instruments//facilitycycles//investigations", +) spec.path(resource=InstrumentsFacilityCyclesInvestigations, api=api) -api.add_resource(InstrumentsFacilityCyclesInvestigationsCount, - "/instruments//facilitycycles//investigations/count") +api.add_resource( + InstrumentsFacilityCyclesInvestigationsCount, + "/instruments//facilitycycles//investigations/count", +) spec.path(resource=InstrumentsFacilityCyclesInvestigationsCount, api=api) openapi_spec_path = Path(__file__).parent / "swagger/openapi.yaml" @@ -104,5 +115,6 @@ def specs(): if __name__ == "__main__": - app.run(host=config.get_host(), port=config.get_port(), - debug=config.is_debug_mode()) + app.run( + host=config.get_host(), port=config.get_port(), debug=config.is_debug_mode() + ) diff --git a/src/resources/entities/entity_endpoint.py b/src/resources/entities/entity_endpoint.py index fe6d8439..81795beb 100644 --- a/src/resources/entities/entity_endpoint.py +++ b/src/resources/entities/entity_endpoint.py @@ -1,9 +1,20 @@ from flask import request from flask_restful import Resource -from common.database.helpers import get_rows_by_filter, create_rows_from_json, patch_entities, get_row_by_id, \ - delete_row_by_id, update_row_from_id, get_filtered_row_count, get_first_filtered_row -from common.helpers import get_session_id_from_auth_header, get_filters_from_query_string +from common.database.helpers import ( + get_rows_by_filter, + create_rows_from_json, + patch_entities, + get_row_by_id, + delete_row_by_id, + update_row_from_id, + get_filtered_row_count, + get_first_filtered_row, +) +from common.helpers import ( + get_session_id_from_auth_header, + get_filters_from_query_string, +) from common.backends import backend @@ -12,13 +23,22 @@ def get_endpoint(name, table): Given an entity name generate a flask_restful Resource class. In main.py these generated classes are registered with the api e.g api.add_resource(get_endpoint("Datafiles", DATAFILE), "/datafiles") + :param name: The name of the entity :param table: The table the endpoint will use in queries :return: The generated endpoint class """ + class Endpoint(Resource): def get(self): - return backend.get_with_filters(get_session_id_from_auth_header(), table, get_filters_from_query_string()), 200 + return ( + backend.get_with_filters( + get_session_id_from_auth_header(), + table, + get_filters_from_query_string(), + ), + 200, + ) get.__doc__ = f""" --- @@ -53,7 +73,10 @@ def get(self): """ def post(self): - return backend.create(get_session_id_from_auth_header(), table, request.json), 200 + return ( + backend.create(get_session_id_from_auth_header(), table, request.json), + 200, + ) post.__doc__ = f""" --- @@ -94,7 +117,17 @@ def post(self): """ def patch(self): - return list(map(lambda x: x.to_dict(), backend.update(get_session_id_from_auth_header(), table, request.json))), 200 + return ( + list( + map( + lambda x: x.to_dict(), + backend.update( + get_session_id_from_auth_header(), table, request.json + ), + ) + ), + 200, + ) patch.__doc__ = f""" --- @@ -142,15 +175,19 @@ def get_id_endpoint(name, table): """ Given an entity name generate a flask_restful Resource class. In main.py these generated classes are registered with the api e.g - api.add_resource(get_endpoint("Datafiles", DATAFILE), "/datafiles/") + api.add_resource(get_endpoint("Datafiles", DATAFILE), "/datafiles/") + :param name: The name of the entity :param table: The table the endpoint will use in queries :return: The generated id endpoint class """ - class EndpointWithID(Resource): + class EndpointWithID(Resource): def get(self, id_): - return backend.get_with_id(get_session_id_from_auth_header(), table, id_), 200 + return ( + backend.get_with_id(get_session_id_from_auth_header(), table, id_), + 200, + ) get.__doc__ = f""" --- @@ -183,8 +220,7 @@ def get(self, id_): """ def delete(self, id_): - backend.delete_with_id( - get_session_id_from_auth_header(), table, id_) + backend.delete_with_id(get_session_id_from_auth_header(), table, id_) return "", 204 delete.__doc__ = f""" @@ -264,15 +300,21 @@ def get_count_endpoint(name, table): Given an entity name generate a flask_restful Resource class. In main.py these generated classes are registered with the api e.g api.add_resource(get_endpoint("Datafiles", DATAFILE), "/datafiles/count") + :param name: The name of the entity :param table: The table the endpoint will use in queries :return: The generated count endpoint class """ - class CountEndpoint(Resource): + class CountEndpoint(Resource): def get(self): filters = get_filters_from_query_string() - return backend.count_with_filters(get_session_id_from_auth_header(), table, filters), 200 + return ( + backend.count_with_filters( + get_session_id_from_auth_header(), table, filters + ), + 200, + ) get.__doc__ = f""" --- @@ -310,15 +352,21 @@ def get_find_one_endpoint(name, table): Given an entity name generate a flask_restful Resource class. In main.py these generated classes are registered with the api e.g api.add_resource(get_endpoint("Datafiles", DATAFILE), "/datafiles/findone") + :param name: The name of the entity :param table: The table the endpoint will use in queries :return: The generated findOne endpoint class """ - class FindOneEndpoint(Resource): + class FindOneEndpoint(Resource): def get(self): filters = get_filters_from_query_string() - return backend.get_one_with_filters(get_session_id_from_auth_header(), table, filters), 200 + return ( + backend.get_one_with_filters( + get_session_id_from_auth_header(), table, filters + ), + 200, + ) get.__doc__ = f""" --- diff --git a/src/resources/entities/entity_map.py b/src/resources/entities/entity_map.py index bb73d0e1..e1939444 100644 --- a/src/resources/entities/entity_map.py +++ b/src/resources/entities/entity_map.py @@ -1,27 +1,87 @@ -from common.models.db_models import APPLICATION, DATACOLLECTIONDATAFILE, DATACOLLECTIONPARAMETER, DATACOLLECTIONDATASET, \ - DATACOLLECTION, DATAFILEFORMAT, DATAFILE, FACILITYCYCLE, DATASETTYPE, GROUPING, INSTRUMENT, INSTRUMENTSCIENTIST, \ - INVESTIGATIONGROUP, INVESTIGATIONINSTRUMENT, INVESTIGATIONTYPE, INVESTIGATION, JOB, KEYWORD, PARAMETERTYPE, \ - INVESTIGATIONPARAMETER, INVESTIGATIONUSER, PUBLICSTEP, RULE, SAMPLE, USERGROUP, STUDYINVESTIGATION, SAMPLETYPE, \ - RELATEDDATAFILE, SAMPLEPARAMETER, PUBLICATION, STUDY, USER, SHIFT, PERMISSIBLESTRINGVALUE, FACILITY, \ - DATAFILEPARAMETER, DATASET, DATASETPARAMETER +from common.models.db_models import ( + APPLICATION, + DATACOLLECTIONDATAFILE, + DATACOLLECTIONPARAMETER, + DATACOLLECTIONDATASET, + DATACOLLECTION, + DATAFILEFORMAT, + DATAFILE, + FACILITYCYCLE, + DATASETTYPE, + GROUPING, + INSTRUMENT, + INSTRUMENTSCIENTIST, + INVESTIGATIONGROUP, + INVESTIGATIONINSTRUMENT, + INVESTIGATIONTYPE, + INVESTIGATION, + JOB, + KEYWORD, + PARAMETERTYPE, + INVESTIGATIONPARAMETER, + INVESTIGATIONUSER, + PUBLICSTEP, + RULE, + SAMPLE, + USERGROUP, + STUDYINVESTIGATION, + SAMPLETYPE, + RELATEDDATAFILE, + SAMPLEPARAMETER, + PUBLICATION, + STUDY, + USER, + SHIFT, + PERMISSIBLESTRINGVALUE, + FACILITY, + DATAFILEPARAMETER, + DATASET, + DATASETPARAMETER, +) import datetime from sqlalchemy.inspection import inspect -endpoints = {'Applications': APPLICATION, 'DataCollectionDatafiles': DATACOLLECTIONDATAFILE, - 'DataCollectionDatasets': DATACOLLECTIONDATASET, 'DataCollectionParameters': DATACOLLECTIONPARAMETER, - 'DataCollections': DATACOLLECTION, 'DatafileFormats': DATAFILEFORMAT, - 'DatafileParameters': DATAFILEPARAMETER, 'Datafiles': DATAFILE, 'DatasetParameters': DATASETPARAMETER, 'DatasetTypes': DATASETTYPE, - 'Datasets': DATASET, 'Facilities': FACILITY, 'FacilityCycles': FACILITYCYCLE, 'Groupings': GROUPING, - 'InstrumentScientists': INSTRUMENTSCIENTIST, 'Instruments': INSTRUMENT, - 'InvestigationGroups': INVESTIGATIONGROUP, - 'InvestigationInstruments': INVESTIGATIONINSTRUMENT, 'InvestigationParameters': INVESTIGATIONPARAMETER, - 'InvestigationTypes': INVESTIGATIONTYPE, 'InvestigationUsers': INVESTIGATIONUSER, - 'Investigations': INVESTIGATION, 'Jobs': JOB, 'Keywords': KEYWORD, 'ParameterTypes': PARAMETERTYPE, - 'PermissibleStringValues': PERMISSIBLESTRINGVALUE, 'PublicSteps': PUBLICSTEP, - 'Publications': PUBLICATION, 'RelatedDatafiles': RELATEDDATAFILE, 'Rules': RULE, - 'SampleParameters': SAMPLEPARAMETER, 'SampleTypes': SAMPLETYPE, 'Samples': SAMPLE, 'Shifts': SHIFT, - 'Studies': STUDY, 'StudyInvestigations': STUDYINVESTIGATION, 'UserGroups': USERGROUP, 'Users': USER} +endpoints = { + "Applications": APPLICATION, + "DataCollectionDatafiles": DATACOLLECTIONDATAFILE, + "DataCollectionDatasets": DATACOLLECTIONDATASET, + "DataCollectionParameters": DATACOLLECTIONPARAMETER, + "DataCollections": DATACOLLECTION, + "DatafileFormats": DATAFILEFORMAT, + "DatafileParameters": DATAFILEPARAMETER, + "Datafiles": DATAFILE, + "DatasetParameters": DATASETPARAMETER, + "DatasetTypes": DATASETTYPE, + "Datasets": DATASET, + "Facilities": FACILITY, + "FacilityCycles": FACILITYCYCLE, + "Groupings": GROUPING, + "InstrumentScientists": INSTRUMENTSCIENTIST, + "Instruments": INSTRUMENT, + "InvestigationGroups": INVESTIGATIONGROUP, + "InvestigationInstruments": INVESTIGATIONINSTRUMENT, + "InvestigationParameters": INVESTIGATIONPARAMETER, + "InvestigationTypes": INVESTIGATIONTYPE, + "InvestigationUsers": INVESTIGATIONUSER, + "Investigations": INVESTIGATION, + "Jobs": JOB, + "Keywords": KEYWORD, + "ParameterTypes": PARAMETERTYPE, + "PermissibleStringValues": PERMISSIBLESTRINGVALUE, + "PublicSteps": PUBLICSTEP, + "Publications": PUBLICATION, + "RelatedDatafiles": RELATEDDATAFILE, + "Rules": RULE, + "SampleParameters": SAMPLEPARAMETER, + "SampleTypes": SAMPLETYPE, + "Samples": SAMPLE, + "Shifts": SHIFT, + "Studies": STUDY, + "StudyInvestigations": STUDYINVESTIGATION, + "UserGroups": USERGROUP, + "Users": USER, +} def type_conversion(python_type): @@ -34,13 +94,13 @@ def type_conversion(python_type): if python_type is int: return {"type": "integer"} if python_type is float: - return {"type": 'number', "format": "float"} + return {"type": "number", "format": "float"} if python_type is bool: - return {"type": 'boolean'} + return {"type": "boolean"} if python_type is datetime.datetime: - return {"type": 'string', "format": "datetime"} + return {"type": "string", "format": "datetime"} if python_type is datetime.date: - return {"type": 'string', "format": "date"} + return {"type": "string", "format": "date"} return {"type": "string"} @@ -57,8 +117,11 @@ def create_entity_models(): required = [] endpoint_inspection = inspect(endpoints[endpoint]) for column in endpoint_inspection.columns: - python_type = column.type.impl.python_type if hasattr( - column.type, 'impl') else column.type.python_type + python_type = ( + column.type.impl.python_type + if hasattr(column.type, "impl") + else column.type.python_type + ) param = type_conversion(python_type) if column.name == "ID": @@ -69,18 +132,30 @@ def create_entity_models(): required.append(column.name) params[column.name] = param - for (relationship_name, relationship_class) in endpoint_inspection.relationships.items(): - if relationship_class.direction.name == "MANYTOONE" or relationship_class.direction.name == "ONETOONE": + for ( + relationship_name, + relationship_class, + ) in endpoint_inspection.relationships.items(): + if ( + relationship_class.direction.name == "MANYTOONE" + or relationship_class.direction.name == "ONETOONE" + ): params[relationship_name] = { - "$ref": f"#/components/schemas/{relationship_name.strip('_')}"} - if relationship_class.direction.name == "MANYTOMANY" or relationship_class.direction.name == "ONETOMANY": + "$ref": f"#/components/schemas/{relationship_name.strip('_')}" + } + if ( + relationship_class.direction.name == "MANYTOMANY" + or relationship_class.direction.name == "ONETOMANY" + ): params[relationship_name] = { "type": "array", "items": { "$ref": f"#/components/schemas/{relationship_name.strip('_')}" - } + }, } endpoint_models[endpoints[endpoint].__name__] = { - "properties": params, "required": required} + "properties": params, + "required": required, + } return endpoint_models diff --git a/src/resources/non_entities/sessions_endpoints.py b/src/resources/non_entities/sessions_endpoints.py index 21716530..492700e1 100644 --- a/src/resources/non_entities/sessions_endpoints.py +++ b/src/resources/non_entities/sessions_endpoints.py @@ -4,7 +4,11 @@ from flask import request from flask_restful import Resource, reqparse -from common.database.helpers import insert_row_into_table, delete_row_by_id, get_row_by_id +from common.database.helpers import ( + insert_row_into_table, + delete_row_by_id, + get_row_by_id, +) from common.helpers import get_session_id_from_auth_header from common.models.db_models import SESSION from common.backends import backend @@ -12,8 +16,8 @@ log = logging.getLogger() -class Sessions(Resource): +class Sessions(Resource): def post(self): """ Generates a sessionID if the user has correct credentials @@ -56,7 +60,9 @@ def post(self): 403: description: Forbidden. User credentials were invalid """ - if not (request.data and "username" in request.json and "password" in request.json): + if not ( + request.data and "username" in request.json and "password" in request.json + ): return "Bad request", 400 # If no mechanism is present in request body, default to simple if not ("mechanism" in request.json): diff --git a/src/resources/table_endpoints/table_endpoints.py b/src/resources/table_endpoints/table_endpoints.py index 60eca7cc..4de8a945 100644 --- a/src/resources/table_endpoints/table_endpoints.py +++ b/src/resources/table_endpoints/table_endpoints.py @@ -1,8 +1,15 @@ from flask_restful import Resource -from common.database.helpers import get_facility_cycles_for_instrument, get_facility_cycles_for_instrument_count, \ - get_investigations_for_instrument_in_facility_cycle, get_investigations_for_instrument_in_facility_cycle_count -from common.helpers import get_session_id_from_auth_header, get_filters_from_query_string +from common.database.helpers import ( + get_facility_cycles_for_instrument, + get_facility_cycles_for_instrument_count, + get_investigations_for_instrument_in_facility_cycle, + get_investigations_for_instrument_in_facility_cycle_count, +) +from common.helpers import ( + get_session_id_from_auth_header, + get_filters_from_query_string, +) from common.backends import backend @@ -45,7 +52,12 @@ def get(self, id_): 404: description: No such record - Unable to find a record in the database """ - return backend.get_facility_cycles_for_instrument(get_session_id_from_auth_header(), id_, get_filters_from_query_string()), 200 + return ( + backend.get_facility_cycles_for_instrument( + get_session_id_from_auth_header(), id_, get_filters_from_query_string() + ), + 200, + ) class InstrumentsFacilityCyclesCount(Resource): @@ -81,7 +93,12 @@ def get(self, id_): 404: description: No such record - Unable to find a record in the database """ - return backend.get_facility_cycles_for_instrument_count(get_session_id_from_auth_header(), id_, get_filters_from_query_string()), 200 + return ( + backend.get_facility_cycles_for_instrument_count( + get_session_id_from_auth_header(), id_, get_filters_from_query_string() + ), + 200, + ) class InstrumentsFacilityCyclesInvestigations(Resource): @@ -129,8 +146,15 @@ def get(self, instrument_id, cycle_id): 404: description: No such record - Unable to find a record in the database """ - return backend.get_investigations_for_instrument_in_facility_cycle(get_session_id_from_auth_header(), instrument_id, cycle_id, - get_filters_from_query_string()), 200 + return ( + backend.get_investigations_for_instrument_in_facility_cycle( + get_session_id_from_auth_header(), + instrument_id, + cycle_id, + get_filters_from_query_string(), + ), + 200, + ) class InstrumentsFacilityCyclesInvestigationsCount(Resource): @@ -172,5 +196,12 @@ def get(self, instrument_id, cycle_id): 404: description: No such record - Unable to find a record in the database """ - return backend.get_investigations_for_instrument_in_facility_cycle_count(get_session_id_from_auth_header(), instrument_id, cycle_id, - get_filters_from_query_string()), 200 + return ( + backend.get_investigations_for_instrument_in_facility_cycle_count( + get_session_id_from_auth_header(), + instrument_id, + cycle_id, + get_filters_from_query_string(), + ), + 200, + ) diff --git a/src/swagger/apispec_flask_restful.py b/src/swagger/apispec_flask_restful.py index cc9c8d1c..6d3fe3fa 100644 --- a/src/swagger/apispec_flask_restful.py +++ b/src/swagger/apispec_flask_restful.py @@ -1,4 +1,5 @@ -# TODO: when apispec-flask-restful updates to support apispec v3, use the version from PyPi instead +# TODO: when apispec-flask-restful updates to support apispec v3, use the version from +# PyPi instead import logging import re @@ -11,38 +12,38 @@ def deduce_path(resource, **kwargs): """Find resource path using provided API or path itself""" - api = kwargs.get('api', None) + api = kwargs.get("api", None) if not api: # flask-restful resource url passed - return kwargs.get('path') + return kwargs.get("path") # flask-restful API passed # Require MethodView - if not getattr(resource, 'endpoint', None): - raise APISpecError('Flask-RESTful resource needed') + if not getattr(resource, "endpoint", None): + raise APISpecError("Flask-RESTful resource needed") if api.blueprint: # it is required to have Flask app to be able enumerate routes - app = kwargs.get('app') + app = kwargs.get("app") if app: for rule in app.url_map.iter_rules(): - if rule.endpoint.endswith('.' + resource.endpoint): + if rule.endpoint.endswith("." + resource.endpoint): break else: raise APISpecError( - 'Cannot find blueprint resource {}'.format(resource.endpoint)) + "Cannot find blueprint resource {}".format(resource.endpoint) + ) else: # Application not initialized yet, fallback to path - return kwargs.get('path') + return kwargs.get("path") else: for rule in api.app.url_map.iter_rules(): if rule.endpoint == resource.endpoint: - rule.endpoint.endswith('.' + resource.endpoint) + rule.endpoint.endswith("." + resource.endpoint) break else: - raise APISpecError( - 'Cannot find resource {}'.format(resource.endpoint)) + raise APISpecError("Cannot find resource {}".format(resource.endpoint)) return rule.rule @@ -58,7 +59,8 @@ def parse_operations(resource, operations): operation = None if not operation: logging.getLogger(__name__).warning( - 'Cannot load docstring for {}/{}'.format(resource, method)) + "Cannot load docstring for {}/{}".format(resource, method) + ) operations[method.lower()] = operation or dict() @@ -68,22 +70,24 @@ class RestfulPlugin(apispec.BasePlugin): def path_helper(self, path=None, operations=None, parameters=None, **kwargs): kwargs["path"] = path try: - resource = kwargs.pop('resource') + resource = kwargs.pop("resource") path = deduce_path(resource, **kwargs) - path = re.sub(r'<(?:[^:<>]+:)?([^<>]+)>', r'{\1}', path) + path = re.sub(r"<(?:[^:<>]+:)?([^<>]+)>", r"{\1}", path) return path except Exception as exc: logging.getLogger(__name__).exception( - 'Exception parsing APISpec', exc_info=exc) + "Exception parsing APISpec", exc_info=exc + ) raise def operation_helper(self, path=None, operations=None, **kwargs): if operations is None: return try: - resource = kwargs.pop('resource') + resource = kwargs.pop("resource") parse_operations(resource, operations) except Exception as exc: logging.getLogger(__name__).exception( - 'Exception parsing APISpec', exc_info=exc) + "Exception parsing APISpec", exc_info=exc + ) raise diff --git a/src/swagger/initialise_spec.py b/src/swagger/initialise_spec.py index 7915c6b3..5147b3ff 100644 --- a/src/swagger/initialise_spec.py +++ b/src/swagger/initialise_spec.py @@ -3,321 +3,223 @@ def initialise_spec(spec): """ - Given a apispec spec object, will initialise it with the security scheme, models and parameters we use + Given a apispec spec object, will initialise it with the security scheme, models and + parameters we use + :spec: ApiSpec: spec object to initialise :return: void """ spec.components.security_scheme( - "session_id", {"type": "http", "scheme": "bearer", "bearerFormat": "uuid"}) + "session_id", {"type": "http", "scheme": "bearer", "bearerFormat": "uuid"} + ) entity_schemas = create_entity_models() for (schema_name, schema) in entity_schemas.items(): spec.components.schema(schema_name, schema) - spec.components.parameter("WHERE_FILTER", "query", { - "in": "query", - "name": "where", - "description": "Apply where filters to the query. The possible operators are like, gte, lte, in and eq", - "schema": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": { + spec.components.parameter( + "WHERE_FILTER", + "query", + { + "in": "query", + "name": "where", + "description": "Apply where filters to the query. The possible operators are like, gte, lte, in and eq", + "schema": { + "type": "array", + "items": { "type": "object", - "minProperties": 1, - "maxProperties": 1, - "title": "Column", - "description": "Name of the column to apply the filter on", - "oneOf": [ - { - "type": "object", - "title": "Equality", - "properties": { - "eq": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "number" - }, - { - "type": "integer" - }, - { - "type": "boolean" - } - ] - } - } - }, - { - "type": "object", - "title": "Greater than or equal", - "properties": { - "gte": { - "oneOf": [ - { - "type": "number" - }, - { - "type": "integer" - } - ] - } - } - }, - { - "type": "object", - "title": "Less than or equal", - "properties": { - "lte": { - "oneOf": [ - { - "type": "number" - }, - { - "type": "integer" - } - ] - } - } - }, - { - "type": "object", - "title": "Substring equality", - "properties": { - "like": { - "type": "string" - } - } - }, - { - "type": "object", - "title": "Equality from a list of values", - "properties": { - "in": { - "type": "array", - "items": { + "additionalProperties": { + "type": "object", + "minProperties": 1, + "maxProperties": 1, + "title": "Column", + "description": "Name of the column to apply the filter on", + "oneOf": [ + { + "type": "object", + "title": "Equality", + "properties": { + "eq": { "oneOf": [ - { - "type": "string" - }, - { - "type": "number" - }, - { - "type": "integer" - } + {"type": "string"}, + {"type": "number"}, + {"type": "integer"}, + {"type": "boolean"}, ] } - } - } - } - ] - } - } - }, - "examples": { - "eq": { - "value": [ - { - "ID": { - "eq": 1 - } - } - ] - }, - "like": { - "value": [ - { - "NAME": { - "like": "dog" - } - } - ] + }, + }, + { + "type": "object", + "title": "Greater than or equal", + "properties": { + "gte": { + "oneOf": [ + {"type": "number"}, + {"type": "integer"}, + ] + } + }, + }, + { + "type": "object", + "title": "Less than or equal", + "properties": { + "lte": { + "oneOf": [ + {"type": "number"}, + {"type": "integer"}, + ] + } + }, + }, + { + "type": "object", + "title": "Substring equality", + "properties": {"like": {"type": "string"}}, + }, + { + "type": "object", + "title": "Equality from a list of values", + "properties": { + "in": { + "type": "array", + "items": { + "oneOf": [ + {"type": "string"}, + {"type": "number"}, + {"type": "integer"}, + ] + }, + } + }, + }, + ], + }, + }, }, - "gte": { - "value": [ - { - "ID": { - "gte": 50 - } - } - ] + "examples": { + "eq": {"value": [{"ID": {"eq": 1}}]}, + "like": {"value": [{"NAME": {"like": "dog"}}]}, + "gte": {"value": [{"ID": {"gte": 50}}]}, + "lte": {"value": [{"ID": {"lte": 50}}]}, + "in": {"value": [{"ID": {"in": [1, 2, 3]}}]}, }, - "lte": { - "value": [ - { - "ID": { - "lte": 50 - } - } - ] - }, - "in": { - "value": [ - { - "ID": { - "in": [ - 1, - 2, - 3 - ] - } - } - ] - } - } - }) + }, + ) - spec.components.parameter("ORDER_FILTER", "query", { - "in": "query", - "name": "order", - "description": "Apply order filters to the query. Given a field and direction, order the returned entities.", - "schema": { - "type": "array", - "items": { - "type": "string" - } + spec.components.parameter( + "ORDER_FILTER", + "query", + { + "in": "query", + "name": "order", + "description": "Apply order filters to the query. Given a field and direction, order the returned entities.", + "schema": {"type": "array", "items": {"type": "string"}}, + "examples": {"asc": {"value": ["ID asc"]}, "desc": {"value": ["ID desc"]}}, }, - "examples": { - "asc": { - "value": [ - "ID asc" - ] - }, - "desc": { - "value": [ - "ID desc" - ] - } - } - }) + ) - spec.components.parameter("LIMIT_FILTER", "query", { - "in": "query", - "name": "limit", - "description": "Apply limit filter to the query. Limit the number of entities returned.", - "schema": { - "type": "integer" - } - }) + spec.components.parameter( + "LIMIT_FILTER", + "query", + { + "in": "query", + "name": "limit", + "description": "Apply limit filter to the query. Limit the number of entities returned.", + "schema": {"type": "integer"}, + }, + ) - spec.components.parameter("SKIP_FILTER", "query", { - "in": "query", - "name": "skip", - "description": "Apply skip filter to the query. Offset the returned entities by a given number.", - "schema": { - "type": "integer" - } - }) - spec.components.parameter("DISTINCT_FILTER", "query", { - "in": "query", - "name": "distinct", - "description": "Apply distinct filter to the query. Return unique values for the fields requested.", - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - }) - spec.components.parameter("INCLUDE_FILTER", "query", { - "in": "query", - "name": "include", - "description": "Apply include filter to the query. Given the names of related entities, include them in the results. Only one include parameter is allowed.", - "schema": { + spec.components.parameter( + "SKIP_FILTER", + "query", + { + "in": "query", + "name": "skip", + "description": "Apply skip filter to the query. Offset the returned entities by a given number.", + "schema": {"type": "integer"}, + }, + ) + spec.components.parameter( + "DISTINCT_FILTER", + "query", + { + "in": "query", + "name": "distinct", + "description": "Apply distinct filter to the query. Return unique values for the fields requested.", + "schema": {"type": "array", "items": {"type": "string"}}, + }, + ) + spec.components.parameter( + "INCLUDE_FILTER", + "query", + { + "in": "query", + "name": "include", + "description": "Apply include filter to the query. Given the names of related entities, include them in the results. Only one include parameter is allowed.", + "schema": { "oneOf": [ - { - "type": "string" - }, + {"type": "string"}, { "type": "array", "items": { "oneOf": [ - { - "type": "string" - }, + {"type": "string"}, { "type": "object", "additionalProperties": { "oneOf": [ - { - "type": "string" - }, + {"type": "string"}, { "type": "array", - "items": [ - { - "type": "string" - } - ] - } + "items": [{"type": "string"}], + }, ] - } - } + }, + }, ] - } + }, }, { "type": "object", "additionalProperties": { "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": [ - { - "type": "string" - } - ] - } + {"type": "string"}, + {"type": "array", "items": [{"type": "string"}]}, ] - } - } - ] - }, - "examples": { - "single": { - "value": "RELATED_COLUMN" - }, - "array": { - "value": [ - "RELATED_COLUMN_1", - "RELATED_COLUMN_2" + }, + }, ] }, - "multi-level": { - "value": { - "RELATED_COLUMN": "RELATED_COLUMN_RELATED_COLUMN" - } - }, - "multi-level array": { - "value": { - "RELATED_COLUMN": [ - "RELATED_COLUMN_RELATED_COLUMN_1", - "RELATED_COLUMN_RELATED_COLUMN_2" - ] - } - }, - "array of multi-level": { - "value": [ - "RELATED_COLUMN_1", - { - "RELATED_COLUMN_2": "RELATED_COLUMN_2_RELATED_COLUMN_1" - }, - { - "RELATED_COLUMN_3": [ - "RELATED_COLUMN_3_RELATED_COLUMN_1", - "RELATED_COLUMN_3_RELATED_COLUMN_2" + "examples": { + "single": {"value": "RELATED_COLUMN"}, + "array": {"value": ["RELATED_COLUMN_1", "RELATED_COLUMN_2"]}, + "multi-level": { + "value": {"RELATED_COLUMN": "RELATED_COLUMN_RELATED_COLUMN"} + }, + "multi-level array": { + "value": { + "RELATED_COLUMN": [ + "RELATED_COLUMN_RELATED_COLUMN_1", + "RELATED_COLUMN_RELATED_COLUMN_2", ] } - ] - } - } - }) + }, + "array of multi-level": { + "value": [ + "RELATED_COLUMN_1", + {"RELATED_COLUMN_2": "RELATED_COLUMN_2_RELATED_COLUMN_1"}, + { + "RELATED_COLUMN_3": [ + "RELATED_COLUMN_3_RELATED_COLUMN_1", + "RELATED_COLUMN_3_RELATED_COLUMN_2", + ] + }, + ] + }, + }, + }, + ) diff --git a/test/test_database_helpers.py b/test/test_database_helpers.py index a91ea4d3..d7ee2173 100644 --- a/test/test_database_helpers.py +++ b/test/test_database_helpers.py @@ -6,41 +6,93 @@ backend_type = config.get_backend_type() if backend_type == "db": - from common.database.filters import DatabaseWhereFilter as WhereFilter, DatabaseDistinctFieldFilter as DistinctFieldFilter, \ - DatabaseOrderFilter as OrderFilter, DatabaseSkipFilter as SkipFilter, DatabaseLimitFilter as LimitFilter, \ - DatabaseIncludeFilter as IncludeFilter + from common.database.filters import ( + DatabaseWhereFilter as WhereFilter, + DatabaseDistinctFieldFilter as DistinctFieldFilter, + DatabaseOrderFilter as OrderFilter, + DatabaseSkipFilter as SkipFilter, + DatabaseLimitFilter as LimitFilter, + DatabaseIncludeFilter as IncludeFilter, + ) elif backend_type == "python_icat": # TODO - Adapt these tests for the ICAT implementation of filters - from common.icat.filters import PythonICATWhereFilter as WhereFilter, PythonICATDistinctFieldFilter as DistinctFieldFilter, \ - PythonICATOrderFilter as OrderFilter, PythonICATSkipFilter as SkipFilter, PythonICATLimitFilter as LimitFilter, \ - PythonICATIncludeFilter as IncludeFilter + from common.icat.filters import ( + PythonICATWhereFilter as WhereFilter, + PythonICATDistinctFieldFilter as DistinctFieldFilter, + PythonICATOrderFilter as OrderFilter, + PythonICATSkipFilter as SkipFilter, + PythonICATLimitFilter as LimitFilter, + PythonICATIncludeFilter as IncludeFilter, + ) else: - raise ApiError("Cannot select which implementation of filters to import, check the config file has a valid backend type") + raise ApiError( + "Cannot select which implementation of filters to import, check the config file" + " has a valid backend type" + ) class TestQueryFilterFactory(TestCase): def test_order_filter(self): - self.assertIs(OrderFilter, type(QueryFilterFactory.get_query_filter({"order": "ID DESC"}))) + self.assertIs( + OrderFilter, type(QueryFilterFactory.get_query_filter({"order": "ID DESC"})) + ) def test_limit_filter(self): - self.assertIs(LimitFilter, type(QueryFilterFactory.get_query_filter({"limit": 10}))) + self.assertIs( + LimitFilter, type(QueryFilterFactory.get_query_filter({"limit": 10})) + ) def test_skip_filter(self): - self.assertIs(SkipFilter, type(QueryFilterFactory.get_query_filter({"skip": 10}))) + self.assertIs( + SkipFilter, type(QueryFilterFactory.get_query_filter({"skip": 10})) + ) def test_where_filter(self): - self.assertIs(WhereFilter, type(QueryFilterFactory.get_query_filter({"where": {"ID": {"eq": "1"}}}))) - self.assertIs(WhereFilter, type(QueryFilterFactory.get_query_filter({"where": {"ID": {"lte": "1"}}}))) - self.assertIs(WhereFilter, type(QueryFilterFactory.get_query_filter({"where": {"ID": {"gte": "1"}}}))) - self.assertIs(WhereFilter, type(QueryFilterFactory.get_query_filter({"where": {"ID": {"like": "3"}}}))) - self.assertIs(WhereFilter, - type(QueryFilterFactory.get_query_filter({"where": {"ID": {"in": ["1", "2", "3"]}}}))) + self.assertIs( + WhereFilter, + type(QueryFilterFactory.get_query_filter({"where": {"ID": {"eq": "1"}}})), + ) + self.assertIs( + WhereFilter, + type(QueryFilterFactory.get_query_filter({"where": {"ID": {"lte": "1"}}})), + ) + self.assertIs( + WhereFilter, + type(QueryFilterFactory.get_query_filter({"where": {"ID": {"gte": "1"}}})), + ) + self.assertIs( + WhereFilter, + type(QueryFilterFactory.get_query_filter({"where": {"ID": {"like": "3"}}})), + ) + self.assertIs( + WhereFilter, + type( + QueryFilterFactory.get_query_filter( + {"where": {"ID": {"in": ["1", "2", "3"]}}} + ) + ), + ) def test_include_filter(self): - self.assertIs(IncludeFilter, type(QueryFilterFactory.get_query_filter({"include": "DATAFILE"}))) - self.assertIs(IncludeFilter, type(QueryFilterFactory.get_query_filter({"include": ["TEST"]}))) - self.assertIs(IncludeFilter, - type(QueryFilterFactory.get_query_filter({"include": {"Test": ["TEST1", "Test2"]}}))) + self.assertIs( + IncludeFilter, + type(QueryFilterFactory.get_query_filter({"include": "DATAFILE"})), + ) + self.assertIs( + IncludeFilter, + type(QueryFilterFactory.get_query_filter({"include": ["TEST"]})), + ) + self.assertIs( + IncludeFilter, + type( + QueryFilterFactory.get_query_filter( + {"include": {"Test": ["TEST1", "Test2"]}} + ) + ), + ) def test_distinct_filter(self): - self.assertIs(DistinctFieldFilter, type(QueryFilterFactory.get_query_filter({"distinct": "TEST"}))) + self.assertIs( + DistinctFieldFilter, + type(QueryFilterFactory.get_query_filter({"distinct": "TEST"})), + ) diff --git a/test/test_entityHelper.py b/test/test_entityHelper.py index 2d1b7a1f..780bc2d7 100644 --- a/test/test_entityHelper.py +++ b/test/test_entityHelper.py @@ -45,7 +45,7 @@ def test_to_dict(self): "CREATE_ID": "test create id", "MOD_ID": "test mod id", "DATAFILEFORMAT_ID": 1, - "CREATE_TIME": str(datetime.datetime(2000, 1, 1)) + "CREATE_TIME": str(datetime.datetime(2000, 1, 1)), } self.assertEqual(expected_dict, self.datafile.to_dict()) @@ -82,7 +82,7 @@ def test_to_nested_dict(self): "STARTDATE": None, "SAMPLE_ID": None, "TYPE_ID": None, - } + }, } self.assertEqual(expected_dict, self.datafile.to_nested_dict("DATASET")) expected_dict = { @@ -132,11 +132,13 @@ def test_to_nested_dict(self): "TITLE": None, "VISIT_ID": None, "FACILITY_ID": None, - "TYPE_ID": None - } - } + "TYPE_ID": None, + }, + }, } - self.assertEqual(expected_dict, self.datafile.to_nested_dict({"DATASET": "INVESTIGATION"})) + self.assertEqual( + expected_dict, self.datafile.to_nested_dict({"DATASET": "INVESTIGATION"}) + ) def test_get_related_entity(self): self.assertEqual(self.dataset, self.datafile.get_related_entity("DATASET")) @@ -158,7 +160,7 @@ def test_update_from_dict(self): "CREATE_ID": "test create id", "MOD_ID": "test mod id", "DATAFILEFORMAT_ID": 1, - "CREATE_TIME": str(datetime.datetime(2000, 1, 1)) + "CREATE_TIME": str(datetime.datetime(2000, 1, 1)), } datafile.update_from_dict(dictionary) self.assertEqual(dictionary, datafile.to_dict()) diff --git a/test/test_helpers.py b/test/test_helpers.py index 140cd2d2..ce4d517b 100644 --- a/test/test_helpers.py +++ b/test/test_helpers.py @@ -2,12 +2,29 @@ from sqlalchemy.exc import IntegrityError -from common.database.helpers import delete_row_by_id, insert_row_into_table, LimitFilter, DistinctFieldFilter, \ - IncludeFilter, SkipFilter, WhereFilter, OrderFilter -from common.exceptions import MissingRecordError, FilterError, BadRequestError, MissingCredentialsError, \ - AuthenticationError -from common.helpers import is_valid_json, queries_records, get_session_id_from_auth_header, \ - get_filters_from_query_string +from common.database.helpers import ( + delete_row_by_id, + insert_row_into_table, + LimitFilter, + DistinctFieldFilter, + IncludeFilter, + SkipFilter, + WhereFilter, + OrderFilter, +) +from common.exceptions import ( + MissingRecordError, + FilterError, + BadRequestError, + MissingCredentialsError, + AuthenticationError, +) +from common.helpers import ( + is_valid_json, + queries_records, + get_session_id_from_auth_header, + get_filters_from_query_string, +) from common.models.db_models import SESSION from test.test_base import FlaskAppTest @@ -55,16 +72,26 @@ def test_missing_credentials(self): self.assertEqual(401, self.app.get("/datafiles").status_code) def test_invalid_credentials(self): - self.assertEqual(403, self.app.get( - "/datafiles", headers=self.invalid_credentials_header).status_code) + self.assertEqual( + 403, + self.app.get( + "/datafiles", headers=self.invalid_credentials_header + ).status_code, + ) def test_bad_credentials(self): - self.assertEqual(403, self.app.get( - "/datafiles", headers=self.bad_credentials_header).status_code) + self.assertEqual( + 403, + self.app.get("/datafiles", headers=self.bad_credentials_header).status_code, + ) def test_good_credentials(self): - self.assertEqual(200, self.app.get("/datafiles?limit=0", - headers=self.good_credentials_header).status_code) + self.assertEqual( + 200, + self.app.get( + "/datafiles?limit=0", headers=self.good_credentials_header + ).status_code, + ) class TestQueries_records(TestCase): @@ -135,18 +162,15 @@ def raise_bad_request_error(): class TestGet_session_id_from_auth_header(FlaskAppTest): - def test_no_session_in_header(self): with self.app: self.app.get("/") - self.assertRaises(MissingCredentialsError, - get_session_id_from_auth_header) + self.assertRaises(MissingCredentialsError, get_session_id_from_auth_header) def test_with_bad_header(self): with self.app: self.app.get("/", headers={"Authorization": "test"}) - self.assertRaises(AuthenticationError, - get_session_id_from_auth_header) + self.assertRaises(AuthenticationError, get_session_id_from_auth_header) def test_with_good_header(self): with self.app: @@ -162,7 +186,7 @@ def test_no_filters(self): def test_bad_filter(self): with self.app: - self.app.get("/?test=\"test\"") + self.app.get('/?test="test"') self.assertRaises(FilterError, get_filters_from_query_string) def test_limit_filter(self): @@ -170,54 +194,66 @@ def test_limit_filter(self): self.app.get("/?limit=10") filters = get_filters_from_query_string() self.assertEqual( - 1, len(filters), msg="Returned incorrect number of filters") - self.assertIs(LimitFilter, type( - filters[0]), msg="Incorrect type of filter") + 1, len(filters), msg="Returned incorrect number of filters" + ) + self.assertIs(LimitFilter, type(filters[0]), msg="Incorrect type of filter") def test_order_filter(self): with self.app: - self.app.get("/?order=\"ID DESC\"") + self.app.get('/?order="ID DESC"') filters = get_filters_from_query_string() self.assertEqual( - 1, len(filters), msg="Returned incorrect number of filters") - self.assertIs(OrderFilter, type( - filters[0]), msg="Incorrect type of filter returned") + 1, len(filters), msg="Returned incorrect number of filters" + ) + self.assertIs( + OrderFilter, type(filters[0]), msg="Incorrect type of filter returned" + ) def test_where_filter(self): with self.app: self.app.get('/?where={"ID":{"eq":3}}') filters = get_filters_from_query_string() self.assertEqual( - 1, len(filters), msg="Returned incorrect number of filters") - self.assertIs(WhereFilter, type( - filters[0]), msg="Incorrect type of filter returned") + 1, len(filters), msg="Returned incorrect number of filters" + ) + self.assertIs( + WhereFilter, type(filters[0]), msg="Incorrect type of filter returned" + ) def test_skip_filter(self): with self.app: - self.app.get('/?skip=10') + self.app.get("/?skip=10") filters = get_filters_from_query_string() self.assertEqual( - 1, len(filters), msg="Returned incorrect number of filters") - self.assertIs(SkipFilter, type( - filters[0]), msg="Incorrect type of filter returned") + 1, len(filters), msg="Returned incorrect number of filters" + ) + self.assertIs( + SkipFilter, type(filters[0]), msg="Incorrect type of filter returned" + ) def test_include_filter(self): with self.app: - self.app.get("/?include=\"TEST\"") + self.app.get('/?include="TEST"') filters = get_filters_from_query_string() self.assertEqual( - 1, len(filters), msg="Incorrect number of filters returned") - self.assertIs(IncludeFilter, type( - filters[0]), msg="Incorrect type of filter returned") + 1, len(filters), msg="Incorrect number of filters returned" + ) + self.assertIs( + IncludeFilter, type(filters[0]), msg="Incorrect type of filter returned" + ) def test_distinct_filter(self): with self.app: - self.app.get("/?distinct=\"ID\"") + self.app.get('/?distinct="ID"') filters = get_filters_from_query_string() self.assertEqual( - 1, len(filters), msg="Incorrect number of filters returned") - self.assertIs(DistinctFieldFilter, type( - filters[0]), msg="Incorrect type of filter returned") + 1, len(filters), msg="Incorrect number of filters returned" + ) + self.assertIs( + DistinctFieldFilter, + type(filters[0]), + msg="Incorrect type of filter returned", + ) def test_multiple_filters(self): with self.app: diff --git a/util/icat_db_generator.py b/util/icat_db_generator.py index 1724a4a7..8c559633 100644 --- a/util/icat_db_generator.py +++ b/util/icat_db_generator.py @@ -10,8 +10,22 @@ from common.session_manager import session_manager parser = argparse.ArgumentParser() -parser.add_argument("--seed", "-s", dest="seed", help="Provide seed for random and faker", type=int, default=1) -parser.add_argument("--years", "-y", dest="years", help="Provide number of years to generate", type=int, default=20) +parser.add_argument( + "--seed", + "-s", + dest="seed", + help="Provide seed for random and faker", + type=int, + default=1, +) +parser.add_argument( + "--years", + "-y", + dest="years", + help="Provide number of years to generate", + type=int, + default=20, +) args = parser.parse_args() SEED = args.seed YEARS = args.years # 4 Cycles per years generated @@ -38,8 +52,10 @@ def get_date_time(): Generates a datetime :return: the datetime """ - return faker.date_time_between_dates(datetime_start=datetime.datetime(2000, 10, 4), - datetime_end=datetime.datetime(2019, 10, 5)) + return faker.date_time_between_dates( + datetime_start=datetime.datetime(2000, 10, 4), + datetime_end=datetime.datetime(2019, 10, 5), + ) def get_start_date(i): @@ -48,11 +64,15 @@ def get_start_date(i): :param i: :return: """ - return datetime.datetime(2000 + i // 4, ((i + 1) * (i + 1)) % 11 + 1, ((i + 1) * (i + 2) % 28 + 1)) + return datetime.datetime( + 2000 + i // 4, ((i + 1) * (i + 1)) % 11 + 1, ((i + 1) * (i + 2) % 28 + 1) + ) def get_end_date(i): - return datetime.datetime(2000 + i // 4, (((i + 1) * (i + 2)) % 11) + 1, ((i + 1) ** 2) % 28 + 1) + return datetime.datetime( + 2000 + i // 4, (((i + 1) * (i + 2)) % 11) + 1, ((i + 1) ** 2) % 28 + 1 + ) def apply_common_attributes(entity, iterator): @@ -78,7 +98,6 @@ def apply_common_parameter_attributes(entity, i): class Generator(ABC): - @property @abstractmethod def tier(self): @@ -333,7 +352,9 @@ class InvestigationInstrumentGenerator(Generator): amount = InvestigationGenerator.amount # Must equal number of investigations def generate(self): - self.pool_map(InvestigationInstrumentGenerator.generate_investigation_instrument) + self.pool_map( + InvestigationInstrumentGenerator.generate_investigation_instrument + ) @staticmethod def generate_investigation_instrument(i): @@ -483,7 +504,9 @@ def generate_investigation_parameter(i): apply_common_attributes(investigation_parameter, i) apply_common_parameter_attributes(investigation_parameter, i) investigation_parameter.INVESTIGATION_ID = i - investigation_parameter.PARAMETER_TYPE_ID = randrange(1, ParameterTypeGenerator.amount) + investigation_parameter.PARAMETER_TYPE_ID = randrange( + 1, ParameterTypeGenerator.amount + ) post_entity(investigation_parameter) @@ -533,7 +556,11 @@ def generate_dataset(i): dataset.COMPLETE = randrange(2) dataset.LOCATION = faker.file_path() investigation_id = i % InvestigationGenerator.amount - dataset.INVESTIGATION_ID = investigation_id if investigation_id != 0 else InvestigationGenerator.amount - 1 + dataset.INVESTIGATION_ID = ( + investigation_id + if investigation_id != 0 + else InvestigationGenerator.amount - 1 + ) sample_id = i % SampleGenerator.amount dataset.SAMPLE_ID = sample_id if sample_id != 0 else SampleGenerator.amount - 1 dataset.TYPE_ID = randrange(1, DatasetTypeGenerator.amount) @@ -600,7 +627,9 @@ class DataCollectionParameterGenerator(Generator): amount = DataCollectionGenerator.amount def generate(self): - self.pool_map(DataCollectionParameterGenerator.generate_data_collection_parameter) + self.pool_map( + DataCollectionParameterGenerator.generate_data_collection_parameter + ) @staticmethod def generate_data_collection_parameter(i): @@ -608,7 +637,9 @@ def generate_data_collection_parameter(i): apply_common_attributes(datacollection_parameter, i) apply_common_parameter_attributes(datacollection_parameter, i) datacollection_parameter.DATACOLLECTION_ID = i - datacollection_parameter.PARAMETER_TYPE_ID = randrange(1, ParameterTypeGenerator.amount) + datacollection_parameter.PARAMETER_TYPE_ID = randrange( + 1, ParameterTypeGenerator.amount + ) post_entity(datacollection_parameter) @@ -650,7 +681,10 @@ def generate_all(i, generators): processes = [] for generator in generators: if generator.tier == i: - print(f"Adding {type(generator).__name__.replace('Generator', '') + 's'} of tier {generator.tier}") + print( + f"Adding {type(generator).__name__.replace('Generator', '') + 's'} of" + f" tier {generator.tier}" + ) processes.append(Process(target=generator.generate)) [process.start() for process in processes] @@ -666,7 +700,9 @@ def main(): generate_all(i, generators) print( - f"Added {sum(generator.amount for generator in generators)} entities in {datetime.datetime.now() - start_time}") + f"Added {sum(generator.amount for generator in generators)} entities in" + f" {datetime.datetime.now() - start_time}" + ) if __name__ == "__main__":