diff --git a/bin/adhoc_db b/bin/adhoc_db index 4a6bf86..539dfb1 100755 --- a/bin/adhoc_db +++ b/bin/adhoc_db @@ -23,8 +23,8 @@ import re import subprocess import sys -from pint_server.database import init_db -from pint_server.models import ( +from pint_models.database import init_db +from pint_models.models import ( AlibabaImagesModel, AmazonImagesModel, AmazonServersModel, diff --git a/pint_server/__init__.py b/pint_server/__init__.py index e582f0d..13332c1 100644 --- a/pint_server/__init__.py +++ b/pint_server/__init__.py @@ -18,10 +18,10 @@ # NOTE(gyee): must update the version here on a new release __VERSION__ = '2.0.15' -from pint_server.database import ( +from pint_models.database import ( init_db, create_postgres_url_from_config, get_psql_server_version ) -from pint_server.models import ( +from pint_models.models import ( AlibabaImagesModel, AmazonImagesModel, AmazonServersModel, GoogleImagesModel, GoogleServersModel, ImageState, diff --git a/pint_server/app.py b/pint_server/app.py index 52f6591..3e1658a 100644 --- a/pint_server/app.py +++ b/pint_server/app.py @@ -42,8 +42,8 @@ import xml.etree.ElementTree as ET import pint_server -from pint_server.database import init_db, get_psql_server_version -from pint_server.models import (ImageState, AmazonImagesModel, +from pint_modesl.database import init_db, get_psql_server_version +from pint_models.models import (ImageState, AmazonImagesModel, OracleImagesModel, AlibabaImagesModel, MicrosoftImagesModel, GoogleImagesModel, AmazonServersModel, MicrosoftServersModel, diff --git a/pint_server/data_update.py b/pint_server/data_update.py index df7946e..08e7972 100644 --- a/pint_server/data_update.py +++ b/pint_server/data_update.py @@ -27,8 +27,8 @@ import subprocess import sys -from pint_server.database import init_db -from pint_server.models import ( +from pint_models.database import init_db +from pint_models.models import ( AlibabaImagesModel, AmazonImagesModel, AmazonServersModel, diff --git a/pint_server/database.py b/pint_server/database.py deleted file mode 100644 index 1a84395..0000000 --- a/pint_server/database.py +++ /dev/null @@ -1,226 +0,0 @@ -# Copyright (c) 2021 SUSE LLC -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of version 3 of the GNU General Public License as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, contact SUSE LLC. -# -# To contact SUSE about this file by physical or electronic mail, -# you may find current contact information at www.suse.com - -# NOTE(gyee): -# see https://flask.palletsprojects.com/en/1.1.x/patterns/sqlalchemy/ - -import enum -import logging -import os -import re - -from sqlalchemy import create_engine -from sqlalchemy.orm import scoped_session, sessionmaker -from sqlalchemy.ext.declarative import declarative_base -from pint_server.models import ( - AlibabaImagesModel, - AmazonImagesModel, - AmazonServersModel, - Base, - GoogleImagesModel, - GoogleServersModel, - ImageState, - MicrosoftImagesModel, - MicrosoftRegionMapModel, - MicrosoftServersModel, - OracleImagesModel, - ServerType, - VersionsModel - ) - - -def get_environ_or_bust(key_name): - assert key_name in os.environ, 'Environment variable %s is required.' % ( - key_name) - return os.environ.get(key_name) - - -def create_db_logger(outputfile): - """Function to setup logging of SQL statements - - Args: - outputfile (filepath): The filepath where the SQL statements will be - logged - """ - if not outputfile: - return - - db_log_file_name = outputfile - db_handler_log_level = logging.INFO - db_logger_log_level = logging.DEBUG - - db_handler = logging.FileHandler(db_log_file_name) - db_handler.setLevel(db_handler_log_level) - - db_logger = logging.getLogger('sqlalchemy') - db_logger.addHandler(db_handler) - db_logger.setLevel(db_logger_log_level) - - -def _create_postgres_url(db_user, db_password, db_name, db_host, - db_port=5432, db_ssl_mode=None, - db_root_cert=None): - """Helper function to construct the URL connection string - - Args: - db_user: (string): the username to connect to the Postgres - DB as - db_password: (string): the password associated with the - username being used to connect to the Postgres DB - db_name: (string): the name of the Postgres DB to connect - to - db_host: (string): the host where the Postgres DB is - running - db_host: (number, optional): the port to connect to the - Postgres DB at - db_ssl_mode: (string, optional): the SSL mode to use when - connecting to the Postgres DB - db_root_cert: (string, optional): the root cert to use when - connecting to the Postgres DB - - Returns: - [string]: Postgres connection string - """ - - ssl_mode = '' - if db_ssl_mode: - # see - # https://www.postgresql.org/docs/11/libpq-connect.html# - # LIBPQ-CONNECT-SSLMODE - ssl_mode = '?sslmode=%s' % (db_ssl_mode) - if db_root_cert: - ssl_mode += '&sslrootcert=%s' % (db_root_cert) - - return ('postgresql://%(user)s:%(password)s@%(host)s:%(port)s/' - '%(db)s%(ssl)s' % { - 'user': db_user, - 'password': db_password, - 'db': db_name, - 'host': db_host, - 'port': db_port, - 'ssl': ssl_mode}) - - -def create_postgres_url_from_config(dbconfig): - """Create postgres connection string from provided config - - Args: - dbconfig: (dict): A dictionary of config settings - that are required to connect to the Postgres DB - - Returns: - [string]: Postgres connection string - """ - - return _create_postgres_url( - db_user = dbconfig.get('user'), - db_password = dbconfig.get('password'), - db_name = dbconfig.get('dbname'), - db_host = dbconfig.get('host'), - db_port = dbconfig.get('port'), - # change the SSL stuff once we have a real DB to connect to - db_ssl_mode = '', - db_root_cert = '' - ) - - -def create_postgres_url_from_env(): - """Create postgres connection string from environment settings - - Returns: - [string]: Postgres connection string - """ - - return _create_postgres_url( - db_user=get_environ_or_bust('POSTGRES_USER'), - db_password=get_environ_or_bust('POSTGRES_PASSWORD'), - db_name=get_environ_or_bust('POSTGRES_DB'), - db_host=get_environ_or_bust('POSTGRES_HOST'), - db_port=os.environ.get('POSTGRES_PORT', 5432), - db_ssl_mode=os.environ.get('POSTGRES_SSL_MODE', None), - db_root_cert=os.environ.get('POSTGRES_SSL_ROOT_CERTIFICATE', None) - ) - - -def init_db(dbconfig=None, outputfile=None, echo=None, - hide_parameters=None, create_all=False): - # import all modules here that might define models so that - # they will be registered properly on the metadata. Otherwise - # you will have to import them first before calling init_db() - """Setup DB scoped session - - Args: - config (dict): A dictionary of config settings - that are required to connect to the Postgres DB - outputfile (filepath): File location to log SQL statements - echo (bool): Whether or not all statements are logged to the - default log handler - https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.echo - hide_parameters (bool): if false then statement parameters - will not be logged to INFO leverl log messages or in - logged representation of error reports. - https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.hide_parameters - - Returns: - [scoped_session]: DB scoped_session to use for DB SQL operations - """ - - # Setup a dedicated DB logger if a target output file was provided - create_db_logger(outputfile) - - # Create the DB engine, either from provided settings, or - # using relevant environment settings. - if dbconfig: - engine_url = create_postgres_url_from_config(dbconfig) - elif os.environ.get('DATABASE_URI', None): - engine_url = os.environ['DATABASE_URI'] - else: - engine_url = create_postgres_url_from_env() - - # TODO(rtamalin): Remove this try/except hackery once we move forward - # to being based on SLE 15 SP3 or later. - try: - engine = create_engine(engine_url, convert_unicode=True, - echo=echo, hide_parameters=hide_parameters) - except TypeError as e: - # If we failed because of the hide_parameters argument then - # try again without it. - if 'hide_parameters' in str(e): - engine = create_engine(engine_url, convert_unicode=True, - echo=echo) - else: - raise - - db_session = scoped_session(sessionmaker(autocommit=False, - autoflush=False, - bind=engine)) - Base.query = db_session.query_property() - - if create_all: - Base.metadata.create_all(bind=engine) - - return db_session - - -def get_psql_server_version(db_session): - """Return the psql server version""" - result = db_session.execute("select version()") - for row in result: - version = re.search(r'PostgreSQL\s+\d+.\d+', str(row)) - if version: - break - return version.group(0) diff --git a/pint_server/models.py b/pint_server/models.py deleted file mode 100644 index 70c5318..0000000 --- a/pint_server/models.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright (c) 2021 SUSE LLC -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of version 3 of the GNU General Public License as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, contact SUSE LLC. -# -# To contact SUSE about this file by physical or electronic mail, -# you may find current contact information at www.suse.com - -import enum -import logging - -from sqlalchemy import Column, Date, Enum, Integer, Numeric, String, UniqueConstraint, Index -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.dialects import postgresql -from sqlalchemy.orm import validates - - -logger = logging.getLogger(__name__) - -Base = declarative_base() - - -class ImageState(enum.Enum): - __enum_name__ = 'image_state' - - deleted = 'deleted' - deprecated = 'deprecated' - inactive = 'inactive' - active = 'active' - - def __str__(self): - return str(self.value) - - -class ServerType(enum.Enum): - __enum_name__ = 'server_type' - - region = 'region' - update = 'update' - - -class PintBase(object): - @property - def tablename(self): - """Return table name.""" - return self.__tablename__ - - @classmethod - def unique_constraints(cls): - """Return the table's unique constraint's column names, or empty list.""" - return [u for u in cls.__table__.constraints - if isinstance(u, UniqueConstraint)] - - def __repr__(self): - return "<%s(%s)>" % (self.__class__.__name__, - ", ".join(["%s=%s" % (k, repr(getattr(self, k))) - for k in self.__table__. - columns.keys()])) - - -class ProviderImageBase(PintBase): - state = Column(Enum(ImageState, name=ImageState.__enum_name__), - nullable=False) - replacementname = Column(String(255)) - publishedon = Column(Date, nullable=False) - deprecatedon = Column(Date) - deletedon = Column(Date) - changeinfo = Column(String(255)) - - @validates('publishedon', 'deprecatedon', 'deletedon') - def validate_image_dates(self, key, value): - publishedon = value if key == 'publishedon' else self.publishedon - deprecatedon = value if key == 'deprecatedon' else self.deprecatedon - deletedon = value if key == 'deletedon' else self.deletedon - - # If called for deprecatedon or deletedon before publishedon - # has been set we have nothing to compare against, so just - # fall through and accept the provided value for now. - # Since the validator will be triggered for all 3 fields and - # performs the same checks each time, even if publishedon is - # the last field we are called for, the validator will still - # fail if either deprecatedon or deletedon is not valid with - # respect to that publishedon value. - if publishedon: - if deprecatedon and deprecatedon < publishedon: - raise ValueError('Image %s invalid dates specified - ' - 'publishedon(%s) should not be after ' - 'deprecatedon(%s)' % (self.name, - str(publishedon), str(deprecatedon))) - - if deletedon and deletedon < publishedon: - raise ValueError('Image %s invalid dates specified - ' - 'publishedon(%s) should not be after ' - 'deletedon(%s)' % (self.name, - str(publishedon), str(deletedon))) - - if deprecatedon and deletedon and deletedon < deprecatedon: - raise ValueError('Image %s invalid dates specified - ' - 'deprecatedon(%s) should not be after ' - 'deletedon(%s)' % (self.name, - str(deprecatedon), str(deletedon))) - - return value - - - @validates("changeinfo") - def validate_changeinfo(self, key, value): - if value and not value.endswith('/'): - value = value + '/' - logger.info('%s.%s = %s (updated)', self.tablename, key, repr(value)) - - return value - - -class ProviderServerBase(PintBase): - id = Column(Integer, primary_key=True, autoincrement=True) - type = Column(Enum(ServerType, name=ServerType.__enum_name__), - nullable=False) - shape = Column(String(10)) - name = Column(String(100)) - - @validates("name") - def validate_name(self, key, value): - if self.type == ServerType.update: - if not value: - raise ValueError("%s.%s cannot be null/empty for an update server." % (self.tablename, key)) - return value - - -class AmazonImagesModel(Base, ProviderImageBase): - __tablename__ = 'amazonimages' - - name = Column(String(255), nullable=False) - id = Column(String(100), primary_key=True) - replacementid = Column(String(100)) - region = Column(String(100), nullable=False) - - -class AlibabaImagesModel(Base, ProviderImageBase): - __tablename__ = 'alibabaimages' - - name = Column(String(255), nullable=False) - id = Column(String(100), primary_key=True) - replacementid = Column(String(100)) - region = Column(String(100), nullable=False) - - -class GoogleImagesModel(Base, ProviderImageBase): - __tablename__ = 'googleimages' - - name = Column(String(255), primary_key=True) - project = Column(String(50), nullable=False) - - -class MicrosoftImagesModel(Base, ProviderImageBase): - __tablename__ = 'microsoftimages' - __table_args__ = (UniqueConstraint('name', 'environment'),) - - id = Column(Integer, primary_key=True) - name = Column(String(255), nullable=False) - environment = Column(String(50), nullable=False) - urn = Column(String(100)) - - -class OracleImagesModel(Base, ProviderImageBase): - __tablename__ = 'oracleimages' - - name = Column(String(255), nullable=False) - id = Column(String(100), primary_key=True) - replacementid = Column(String(100)) - - -class AmazonServersModel(Base, ProviderServerBase): - __tablename__ = 'amazonservers' - - # NOTE(gyee): the INET type is specific to PostgreSQL. If in the future - # we decided to support other vendors, we'll need to update this - # column type accordingly. - ip = Column(postgresql.INET) - region = Column(String(100), nullable=False) - ipv6 = Column(postgresql.INET) - - __table_args__ = ( - Index('uix_amazonservers_region_ip_not_null', 'region', 'ip', unique=True, postgresql_where=ip.isnot(None)), - Index('uix_amazonservers_region_ipv6_not_null', 'region', 'ipv6', unique=True, postgresql_where=ipv6.isnot(None)), - ) - - -class GoogleServersModel(Base, ProviderServerBase): - __tablename__ = 'googleservers' - - # NOTE(gyee): the INET type is specific to PostgreSQL. If in the future - # we decided to support other vendors, we'll need to update this - # column type accordingly. - ip = Column(postgresql.INET) - region = Column(String(100), nullable=False) - ipv6 = Column(postgresql.INET) - - __table_args__ = ( - Index('uix_googleservers_region_ip_not_null', 'region', 'ip', unique=True, postgresql_where=ip.isnot(None)), - Index('uix_googleservers_region_ipv6_not_null', 'region', 'ipv6', unique=True, postgresql_where=ipv6.isnot(None)), - ) - - -class MicrosoftServersModel(Base, ProviderServerBase): - __tablename__ = 'microsoftservers' - - # NOTE(gyee): the INET type is specific to PostgreSQL. If in the future - # we decided to support other vendors, we'll need to update this - # column type accordingly. - ip = Column(postgresql.INET) - region = Column(String(100), nullable=False) - ipv6 = Column(postgresql.INET) - - __table_args__ = ( - Index('uix_microsoftservers_region_ip_not_null', 'region', 'ip', unique=True, postgresql_where=ip.isnot(None)), - Index('uix_microsoftservers_region_ipv6_not_null', 'region', 'ipv6', unique=True, postgresql_where=ipv6.isnot(None)), - ) - - -class MicrosoftRegionMapModel(Base, PintBase): - __tablename__ = 'microsoftregionmap' - - environment = Column(String(50), primary_key=True) - region = Column(String(100), primary_key=True) - canonicalname = Column(String(100), primary_key=True) - - -class VersionsModel(Base, PintBase): - __tablename__ = 'versions' - - tablename = Column(String(100), primary_key=True) - version = Column(Numeric, nullable=False) diff --git a/pint_server/pint_db_migrate/env.py b/pint_server/pint_db_migrate/env.py index 8fba610..a913ccf 100644 --- a/pint_server/pint_db_migrate/env.py +++ b/pint_server/pint_db_migrate/env.py @@ -17,7 +17,7 @@ # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata -from pint_server.models import Base +from pint_models.models import Base target_metadata = Base.metadata # other values from the config, defined by the needs of env.py, diff --git a/pint_server/tests/unit/mock_pint_data.py b/pint_server/tests/unit/mock_pint_data.py index 42037ed..9c68745 100644 --- a/pint_server/tests/unit/mock_pint_data.py +++ b/pint_server/tests/unit/mock_pint_data.py @@ -22,7 +22,7 @@ from lxml import etree -from pint_server.models import ImageState +from pint_models.models import ImageState DATE_FORMAT = '%Y%m%d' diff --git a/requirements.txt b/requirements.txt index 940950a..c8acf42 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,6 +18,7 @@ lxml==5.2.2 Mako==1.1.6 MarkupSafe==2.0.1 pbr==6.0.0 +pint_models @ git+https://github.com/SUSE-Enceladus/public-cloud-info-models#egg=pint_models psycopg2-binary==2.9.8 python-dateutil==2.9.0.post0 python-editor==1.0.4