Skip to content

Commit

Permalink
Add tests for migration.
Browse files Browse the repository at this point in the history
  • Loading branch information
jonathangreen committed Nov 3, 2023
1 parent 593901c commit b1c284f
Show file tree
Hide file tree
Showing 3 changed files with 250 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
Create Date: 2023-11-01 22:42:06.754873+00:00
"""
import json
import logging

import sqlalchemy as sa
Expand Down Expand Up @@ -47,7 +46,7 @@ def upgrade() -> None:
)
conn.execute(
"UPDATE integration_configurations SET name = (%s) WHERE id = (%s)",
(row.collection_name, row.collection_name),
(row.collection_name, row.integration_id),
)

op.alter_column("collections", "name", existing_type=sa.VARCHAR(), nullable=True)
Expand Down Expand Up @@ -97,7 +96,9 @@ def upgrade() -> None:

registry = LicenseProvidersRegistry()
for row in rows:
settings_dict = json.loads(row.settings)
if row.external_account_id is None:
continue
settings_dict = row.settings.copy()
settings_dict["external_account_id"] = row.external_account_id
impl_class = registry.get(row.protocol)
if impl_class is None:
Expand All @@ -106,7 +107,7 @@ def upgrade() -> None:
)
settings_obj = impl_class.settings_class()(**settings_dict)
new_settings_dict = settings_obj.dict()
if settings_dict != new_settings_dict:
if row.settings != new_settings_dict:
new_settings = json_serializer(new_settings_dict)
log.info(
f"Updating settings for integration {row.integration_id} from {row.settings} to {new_settings}."
Expand Down
9 changes: 8 additions & 1 deletion core/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from pydantic.json import pydantic_encoder
from sqlalchemy import create_engine
from sqlalchemy.engine import Connection
from sqlalchemy.exc import IntegrityError
from sqlalchemy.exc import DatabaseError, IntegrityError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session, sessionmaker
from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
Expand Down Expand Up @@ -54,6 +54,13 @@ def pg_advisory_lock(
connection.execute(text(f"SELECT pg_advisory_lock({lock_id});"))
try:
yield
except IntegrityError:
# If there was an IntegrityError, and we are in a transaction,
# we need to roll it back before we are able to release the lock.
transaction = connection.get_transaction()
if transaction is not None:
transaction.rollback()
raise
finally:
# Close the lock
connection.execute(text(f"SELECT pg_advisory_unlock({lock_id});"))
Expand Down
237 changes: 237 additions & 0 deletions tests/migration/test_20231101_2d72d6876c52.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,237 @@
from typing import Any, Dict, Optional

import pytest
from pytest_alembic import MigrationContext
from sqlalchemy import inspect
from sqlalchemy.engine import Connection, Engine
from sqlalchemy.exc import IntegrityError

from core.model import json_serializer
from tests.migration.conftest import (
CreateConfigSetting,
CreateExternalIntegration,
CreateLibrary,
)


def create_integration_configuration(
connection: Connection,
name: str,
protocol: str,
goal: str,
settings: Optional[Dict[str, Any]] = None,
) -> int:
if settings is None:
settings = {}

settings_str = json_serializer(settings)

integration_configuration = connection.execute(
"INSERT INTO integration_configurations (name, protocol, goal, settings, self_test_results) "
"VALUES (%s, %s, %s, %s, '{}') returning id",
name,
protocol,
goal,
settings_str,
).fetchone()
assert integration_configuration is not None
assert isinstance(integration_configuration.id, int)
return integration_configuration.id


def create_integration_library_configuration(
connection: Connection,
integration_id: int,
library_id: int,
settings: Optional[Dict[str, Any]] = None,
) -> None:
if settings is None:
settings = {}

settings_str = json_serializer(settings)
connection.execute(
"INSERT INTO integration_library_configurations (parent_id, library_id, settings) "
"VALUES (%s, %s, %s)",
integration_id,
library_id,
settings_str,
)


def create_collection_library(
connection: Connection, collection_id: int, library_id: int
) -> None:
connection.execute(
"INSERT INTO collections_libraries (collection_id, library_id) "
"VALUES (%s, %s)",
collection_id,
library_id,
)


def create_collection(
connection: Connection,
name: str,
integration_configuration_id: int,
external_account_id: Optional[str] = None,
external_integration_id: Optional[int] = None,
) -> int:
collection = connection.execute(
"INSERT INTO collections "
"(name, external_account_id, integration_configuration_id, external_integration_id) VALUES "
"(%s, %s, %s, %s) "
"returning id",
name,
external_account_id,
integration_configuration_id,
external_integration_id,
).fetchone()
assert collection is not None
assert isinstance(collection.id, int)
return collection.id


def column_exists(engine: Engine, table_name: str, column_name: str) -> bool:
inspector = inspect(engine)
columns = [column["name"] for column in inspector.get_columns(table_name)]
return column_name in columns


def test_migration(
alembic_runner: MigrationContext,
alembic_engine: Engine,
create_library: CreateLibrary,
create_external_integration: CreateExternalIntegration,
create_config_setting: CreateConfigSetting,
) -> None:
alembic_runner.migrate_down_to("2d72d6876c52")
alembic_runner.migrate_down_one()

with alembic_engine.connect() as connection:
# Test setup, create all the data we need for the migration
library_1 = create_library(connection, "library_1")
library_2 = create_library(connection, "library_2")

integration_1_settings = {"data_source": "integration_1"}
integration_1 = create_integration_configuration(
connection,
"integration_1",
"OPDS Import",
"LICENSE_GOAL",
settings=integration_1_settings,
)

integration_2_settings = {
"overdrive_website_id": "2",
"overdrive_client_key": "3",
"overdrive_client_secret": "4",
}
integration_2 = create_integration_configuration(
connection,
"collection_2",
"Overdrive",
"LICENSE_GOAL",
settings=integration_2_settings,
)

external_1 = create_external_integration(connection)
external_2 = create_external_integration(connection)

create_config_setting(
connection, "token_auth_endpoint", "http://token.com/auth", external_1
)

collection_1 = create_collection(
connection, "collection_1", integration_1, "http://test.com", external_1
)
collection_2 = create_collection(
connection, "collection_2", integration_2, "1", external_2
)

create_integration_library_configuration(connection, integration_1, library_1)
create_integration_library_configuration(connection, integration_1, library_2)
create_collection_library(connection, collection_1, library_1)
create_collection_library(connection, collection_1, library_2)

create_integration_library_configuration(connection, integration_2, library_2)
create_collection_library(connection, collection_2, library_2)

# Test that the collections_libraries table has the correct forign key constraints
with pytest.raises(IntegrityError) as excinfo:
create_collection_library(connection, 99, 99)
assert "violates foreign key constraint" in str(excinfo.value)

# Make sure we have the data we expect before we run the migration
integration_1_actual = connection.execute(
"select name, settings from integration_configurations where id = (%s)",
integration_1,
).fetchone()
assert integration_1_actual is not None
assert integration_1_actual.name == "integration_1"
assert integration_1_actual.settings == integration_1_settings
assert (
column_exists(alembic_engine, "integration_configurations", "context")
is False
)

integration_2_actual = connection.execute(
"select name, settings from integration_configurations where id = (%s)",
integration_2,
).fetchone()
assert integration_2_actual is not None
assert integration_2_actual.name == "collection_2"
assert integration_2_actual.settings == integration_2_settings
assert (
column_exists(alembic_engine, "integration_configurations", "context")
is False
)

# Run the migration
alembic_runner.migrate_up_one()

with alembic_engine.connect() as connection:
# Make sure the migration updated the integration name, added the context column, and updated the settings
# column to contain the external_account_id
integration_1_actual = connection.execute(
"select name, settings, context from integration_configurations where id = (%s)",
integration_1,
).fetchone()
assert integration_1_actual is not None
assert integration_1_actual.name == "collection_1"
assert integration_1_actual.settings != integration_1_settings
assert integration_1_actual.settings == {
"data_source": "integration_1",
"external_account_id": "http://test.com",
}
assert integration_1_actual.context == {
"token_auth_endpoint": "http://token.com/auth"
}

integration_2_actual = connection.execute(
"select name, settings, context from integration_configurations where id = (%s)",
integration_2,
).fetchone()
assert integration_2_actual is not None
assert integration_2_actual.name == "collection_2"
assert integration_2_actual.settings != integration_2_settings
assert integration_2_actual.settings == {
"overdrive_website_id": "2",
"overdrive_client_key": "3",
"overdrive_client_secret": "4",
"external_account_id": "1",
}
assert integration_2_actual.context == {}

# The foreign key constraints have been removed from the collections_libraries table
create_collection_library(connection, 99, 99)

# If we try to run the migration, it will fail when it tries to add back the foreign key constraints
with pytest.raises(IntegrityError):
alembic_runner.migrate_down_one()

# But if we remove the data that violates the foreign key constraints, the migration will run successfully
with alembic_engine.connect() as connection:
connection.execute(
"delete from collections_libraries where collection_id = 99 and library_id = 99"
)
alembic_runner.migrate_down_one()

0 comments on commit b1c284f

Please sign in to comment.