From c5444648743c1e06f93ffb0d0580c195345fd053 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:16:02 +0100 Subject: [PATCH 01/29] remove apply migration --- migration-engine/core/src/api.rs | 10 - migration-engine/core/src/api/rpc.rs | 4 - migration-engine/core/src/commands.rs | 2 - .../core/src/commands/apply_migration.rs | 205 ------------------ migration-engine/core/src/lib.rs | 2 +- .../migration-engine-tests/src/test_api.rs | 30 +-- 6 files changed, 3 insertions(+), 250 deletions(-) delete mode 100644 migration-engine/core/src/commands/apply_migration.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index 8f6887e5e614..175cd29accfa 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -41,7 +41,6 @@ where #[async_trait::async_trait] pub trait GenericApi: Send + Sync + 'static { async fn version(&self, input: &serde_json::Value) -> CoreResult; - async fn apply_migration(&self, input: &ApplyMigrationInput) -> CoreResult; async fn apply_migrations(&self, input: &ApplyMigrationsInput) -> CoreResult; async fn apply_script(&self, input: &ApplyScriptInput) -> CoreResult; async fn calculate_database_steps( @@ -88,15 +87,6 @@ where .await } - async fn apply_migration(&self, input: &ApplyMigrationInput) -> CoreResult { - self.handle_command::>(input) - .instrument(tracing::info_span!( - "ApplyMigration", - migration_id = input.migration_id.as_str() - )) - .await - } - async fn apply_migrations(&self, input: &ApplyMigrationsInput) -> CoreResult { self.handle_command::(input) .instrument(tracing::info_span!("ApplyMigrations")) diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 5c698c2c1acc..f425a3790a38 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -12,7 +12,6 @@ pub struct RpcApi { #[derive(Debug, Clone, Copy, PartialEq)] enum RpcCommand { - ApplyMigration, ApplyMigrations, ApplyScript, CalculateDatabaseSteps, @@ -38,7 +37,6 @@ enum RpcCommand { impl RpcCommand { fn name(&self) -> &'static str { match self { - RpcCommand::ApplyMigration => "applyMigration", RpcCommand::ApplyMigrations => "applyMigrations", RpcCommand::ApplyScript => "applyScript", RpcCommand::CalculateDatabaseSteps => "calculateDatabaseSteps", @@ -64,7 +62,6 @@ impl RpcCommand { } const AVAILABLE_COMMANDS: &[RpcCommand] = &[ - RpcCommand::ApplyMigration, RpcCommand::ApplyMigrations, RpcCommand::ApplyScript, RpcCommand::CalculateDatabaseSteps, @@ -139,7 +136,6 @@ impl RpcApi { RpcCommand::ApplyMigrations => render(executor.apply_migrations(¶ms.parse()?).await?), RpcCommand::CreateMigration => render(executor.create_migration(¶ms.parse()?).await?), RpcCommand::DebugPanic => render(executor.debug_panic(&()).await?), - RpcCommand::ApplyMigration => render(executor.apply_migration(¶ms.parse()?).await?), RpcCommand::CalculateDatabaseSteps => render(executor.calculate_database_steps(¶ms.parse()?).await?), RpcCommand::CalculateDatamodel => render(executor.calculate_datamodel(¶ms.parse()?).await?), RpcCommand::DiagnoseMigrationHistory => { diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 9f5def98ea73..af5827bbf6c4 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -4,7 +4,6 @@ //! module. #[allow(missing_docs)] -mod apply_migration; mod apply_migrations; mod apply_script; #[allow(missing_docs)] @@ -33,7 +32,6 @@ mod schema_push; #[allow(missing_docs)] mod unapply_migration; -pub use apply_migration::*; pub use apply_migrations::{ApplyMigrationsCommand, ApplyMigrationsInput, ApplyMigrationsOutput}; pub use apply_script::{ApplyScriptCommand, ApplyScriptInput, ApplyScriptOutput}; pub use calculate_database_steps::*; diff --git a/migration-engine/core/src/commands/apply_migration.rs b/migration-engine/core/src/commands/apply_migration.rs deleted file mode 100644 index 71ab17f8a7fc..000000000000 --- a/migration-engine/core/src/commands/apply_migration.rs +++ /dev/null @@ -1,205 +0,0 @@ -use super::MigrationStepsResultOutput; -use crate::{commands::command::*, CoreError}; -use crate::{migration_engine::MigrationEngine, CoreResult}; -use datamodel::{ast::SchemaAst, Datamodel}; -use migration_connector::*; -use serde::Deserialize; - -pub struct ApplyMigrationCommand<'a> { - input: &'a ApplyMigrationInput, -} - -#[async_trait::async_trait] -impl<'a> MigrationCommand for ApplyMigrationCommand<'a> { - type Input = ApplyMigrationInput; - type Output = MigrationStepsResultOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let cmd = ApplyMigrationCommand { input }; - tracing::debug!("{:?}", cmd.input); - - let connector = engine.connector(); - let migration_persistence = connector.migration_persistence(); - migration_persistence.init().await?; - - match migration_persistence.last().await? { - Some(ref last_migration) if last_migration.is_watch_migration() && !cmd.input.is_watch_migration() => { - cmd.handle_transition_out_of_watch_mode(&engine).await - } - _ => cmd.handle_normal_migration(&engine).await, - } - } -} - -impl<'a> ApplyMigrationCommand<'a> { - async fn handle_transition_out_of_watch_mode( - &self, - engine: &MigrationEngine, - ) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let connector = engine.connector(); - - let migration_persistence = connector.migration_persistence(); - - let last_migration = migration_persistence.last().await?; - let current_datamodel = last_migration - .map(|migration| migration.parse_datamodel()) - .unwrap_or_else(|| Ok(Datamodel::new())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - - let last_non_watch_datamodel = migration_persistence - .last_non_watch_migration() - .await? - .map(|m| m.parse_schema_ast()) - .unwrap_or_else(|| Ok(SchemaAst::empty())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - - let next_datamodel_ast = engine - .datamodel_calculator() - .infer(&last_non_watch_datamodel, self.input.steps.as_slice())?; - - self.handle_migration(&engine, current_datamodel, next_datamodel_ast) - .await - } - - async fn handle_normal_migration( - &self, - engine: &MigrationEngine, - ) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let connector = engine.connector(); - let migration_persistence = connector.migration_persistence(); - - if migration_persistence - .migration_is_already_applied(&self.input.migration_id) - .await? - { - return Err(CoreError::Input(anyhow::anyhow!( - "Invariant violation: the migration with id `{migration_id}` has already been applied.", - migration_id = self.input.migration_id - ))); - } - - let last_migration = migration_persistence.last().await?; - let current_datamodel_ast = last_migration - .as_ref() - .map(|migration| migration.parse_schema_ast()) - .unwrap_or_else(|| Ok(SchemaAst::empty())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - let current_datamodel = last_migration - .map(|migration| migration.parse_datamodel()) - .unwrap_or_else(|| Ok(Datamodel::new())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - - let next_datamodel_ast = engine - .datamodel_calculator() - .infer(¤t_datamodel_ast, self.input.steps.as_slice())?; - - self.handle_migration(&engine, current_datamodel, next_datamodel_ast) - .await - } - - async fn handle_migration( - &self, - engine: &MigrationEngine, - current_datamodel: Datamodel, - next_schema_ast: SchemaAst, - ) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let connector = engine.connector(); - let next_datamodel = - datamodel::lift_ast_to_datamodel(&next_schema_ast).map_err(CoreError::ProducedBadDatamodel)?; - let migration_persistence = connector.migration_persistence(); - - let database_migration = connector - .database_migration_inferrer() - .infer(¤t_datamodel, &next_datamodel.subject, &self.input.steps) - .await?; - - let database_steps_json_pretty = connector - .database_migration_step_applier() - .render_steps_pretty(&database_migration)?; - - tracing::trace!(?database_steps_json_pretty); - - let database_migration_json = serde_json::Value::Null; - - let migration = Migration::new(NewMigration { - name: self.input.migration_id.clone(), - datamodel_steps: self.input.steps.clone(), - datamodel_string: datamodel::render_schema_ast_to_string(&next_schema_ast), - database_migration: database_migration_json, - }); - - let diagnostics = connector - .destructive_change_checker() - .check(&database_migration) - .await?; - - match ( - !diagnostics.unexecutable_migrations.is_empty(), - diagnostics.has_warnings(), - self.input.force.unwrap_or(false), - ) { - (true, _, _) => { - tracing::info!("There are unexecutable migration steps, the migration will not be applied.") - } - // We have no warnings, or the force flag is passed. - (_, false, _) | (_, true, true) => { - tracing::debug!("Applying the migration"); - let saved_migration = migration_persistence.create(migration).await?; - - connector - .migration_applier() - .apply(&saved_migration, &database_migration) - .await?; - - tracing::debug!("Migration applied"); - } - // We have warnings, but no force flag was passed. - (_, true, false) => tracing::info!("The force flag was not passed, the migration will not be applied."), - } - - let DestructiveChangeDiagnostics { - warnings, - unexecutable_migrations, - } = diagnostics; - - Ok(MigrationStepsResultOutput { - datamodel: datamodel::render_datamodel_to_string(&next_datamodel.subject), - datamodel_steps: self.input.steps.clone(), - database_steps: database_steps_json_pretty, - errors: [], - warnings, - general_errors: [], - unexecutable_migrations, - }) - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ApplyMigrationInput { - pub migration_id: String, - pub steps: Vec, - pub force: Option, -} - -impl IsWatchMigration for ApplyMigrationInput { - fn is_watch_migration(&self) -> bool { - self.migration_id.starts_with("watch") - } -} diff --git a/migration-engine/core/src/lib.rs b/migration-engine/core/src/lib.rs index eff9a4285115..f1efd2cc8908 100644 --- a/migration-engine/core/src/lib.rs +++ b/migration-engine/core/src/lib.rs @@ -15,7 +15,7 @@ mod gate_keeper; use anyhow::anyhow; pub use api::GenericApi; -pub use commands::{ApplyMigrationInput, InferMigrationStepsInput, MigrationStepsResultOutput, SchemaPushInput}; +pub use commands::{InferMigrationStepsInput, MigrationStepsResultOutput, SchemaPushInput}; pub use core_error::{CoreError, CoreResult}; pub use gate_keeper::GateKeeper; diff --git a/migration-engine/migration-engine-tests/src/test_api.rs b/migration-engine/migration-engine-tests/src/test_api.rs index d96f47bf6b4d..8cd64c26c8b6 100644 --- a/migration-engine/migration-engine-tests/src/test_api.rs +++ b/migration-engine/migration-engine-tests/src/test_api.rs @@ -21,16 +21,13 @@ use super::{ assertions::SchemaAssertion, misc_helpers::{mysql_migration_connector, postgres_migration_connector, sqlite_migration_connector, test_api}, sql::barrel_migration_executor::BarrelMigrationExecutor, - InferAndApplyOutput, }; use crate::{connectors::Tags, test_api::list_migration_directories::ListMigrationDirectories, AssertionResult}; use enumflags2::BitFlags; -use migration_connector::{ - ImperativeMigrationsPersistence, MigrationConnector, MigrationPersistence, MigrationRecord, MigrationStep, -}; +use migration_connector::{ImperativeMigrationsPersistence, MigrationConnector, MigrationPersistence, MigrationRecord}; use migration_core::{ api::{GenericApi, MigrationApi}, - commands::{ApplyMigrationInput, ApplyScriptInput}, + commands::ApplyScriptInput, }; use quaint::{ prelude::{ConnectionInfo, Queryable, SqlFamily}, @@ -117,29 +114,6 @@ impl TestApi { Ok(tempfile::tempdir()?) } - pub async fn apply_migration(&self, steps: Vec, migration_id: &str) -> InferAndApplyOutput { - let input = ApplyMigrationInput { - migration_id: migration_id.into(), - steps, - force: None, - }; - - let migration_output = self.api.apply_migration(&input).await.expect("ApplyMigration failed"); - - assert!( - migration_output.general_errors.is_empty(), - format!( - "ApplyMigration returned unexpected errors: {:?}", - migration_output.general_errors - ) - ); - - InferAndApplyOutput { - sql_schema: self.describe_database().await.unwrap(), - migration_output, - } - } - pub fn apply_migrations<'a>(&'a self, migrations_directory: &'a TempDir) -> ApplyMigrations<'a> { ApplyMigrations::new(&self.api, migrations_directory) } From 628dd96b48a6124b7f49fc2d606c429b1ea4af1d Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:37:25 +0100 Subject: [PATCH 02/29] remove listmigrations --- migration-engine/core/src/api.rs | 7 --- migration-engine/core/src/api/rpc.rs | 4 -- migration-engine/core/src/commands.rs | 4 -- .../core/src/commands/list_migrations.rs | 57 ------------------- 4 files changed, 72 deletions(-) delete mode 100644 migration-engine/core/src/commands/list_migrations.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index 175cd29accfa..d80211a85f3e 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -57,7 +57,6 @@ pub trait GenericApi: Send + Sync + 'static { async fn evaluate_data_loss(&self, input: &EvaluateDataLossInput) -> CoreResult; async fn infer_migration_steps(&self, input: &InferMigrationStepsInput) -> CoreResult; async fn initialize(&self, input: &InitializeInput) -> CoreResult; - async fn list_migrations(&self, input: &serde_json::Value) -> CoreResult>; async fn list_migration_directories( &self, input: &ListMigrationDirectoriesInput, @@ -163,12 +162,6 @@ where .await } - async fn list_migrations(&self, input: &serde_json::Value) -> CoreResult> { - self.handle_command::(input) - .instrument(tracing::info_span!("ListMigrations")) - .await - } - async fn list_migration_directories( &self, input: &ListMigrationDirectoriesInput, diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index f425a3790a38..aa3e4a2fa14f 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -24,7 +24,6 @@ enum RpcCommand { InferMigrationSteps, Initialize, ListMigrationDirectories, - ListMigrations, MarkMigrationApplied, MarkMigrationRolledBack, MigrationProgress, @@ -49,7 +48,6 @@ impl RpcCommand { RpcCommand::InferMigrationSteps => "inferMigrationSteps", RpcCommand::Initialize => "initialize", RpcCommand::ListMigrationDirectories => "listMigrationDirectories", - RpcCommand::ListMigrations => "listMigrations", RpcCommand::MarkMigrationApplied => "markMigrationApplied", RpcCommand::MarkMigrationRolledBack => "markMigrationRolledBack", RpcCommand::MigrationProgress => "migrationProgress", @@ -74,7 +72,6 @@ const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::InferMigrationSteps, RpcCommand::Initialize, RpcCommand::ListMigrationDirectories, - RpcCommand::ListMigrations, RpcCommand::MarkMigrationApplied, RpcCommand::MigrationProgress, RpcCommand::MarkMigrationRolledBack, @@ -148,7 +145,6 @@ impl RpcApi { RpcCommand::ListMigrationDirectories => { render(executor.list_migration_directories(¶ms.parse()?).await?) } - RpcCommand::ListMigrations => render(executor.list_migrations(&serde_json::Value::Null).await?), RpcCommand::MarkMigrationApplied => render(executor.mark_migration_applied(¶ms.parse()?).await?), RpcCommand::MarkMigrationRolledBack => render(executor.mark_migration_rolled_back(¶ms.parse()?).await?), RpcCommand::MigrationProgress => render(executor.migration_progress(¶ms.parse()?).await?), diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index af5827bbf6c4..237cf5aca815 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -3,7 +3,6 @@ //! The commands exposed by the migration engine core are defined in this //! module. -#[allow(missing_docs)] mod apply_migrations; mod apply_script; #[allow(missing_docs)] @@ -20,8 +19,6 @@ mod get_database_version; mod infer_migration_steps; mod initialize; mod list_migration_directories; -#[allow(missing_docs)] -mod list_migrations; mod mark_migration_applied; mod mark_migration_rolled_back; #[allow(missing_docs)] @@ -48,7 +45,6 @@ pub use get_database_version::*; pub use infer_migration_steps::*; pub use initialize::{InitializeCommand, InitializeInput, InitializeOutput}; pub use list_migration_directories::*; -pub use list_migrations::*; pub use mark_migration_applied::{MarkMigrationAppliedCommand, MarkMigrationAppliedInput, MarkMigrationAppliedOutput}; pub use mark_migration_rolled_back::{ MarkMigrationRolledBackCommand, MarkMigrationRolledBackInput, MarkMigrationRolledBackOutput, diff --git a/migration-engine/core/src/commands/list_migrations.rs b/migration-engine/core/src/commands/list_migrations.rs deleted file mode 100644 index 781039ea953f..000000000000 --- a/migration-engine/core/src/commands/list_migrations.rs +++ /dev/null @@ -1,57 +0,0 @@ -use crate::migration_engine::MigrationEngine; -use crate::{commands::command::*, CoreResult}; -use migration_connector::steps::*; -use migration_connector::*; -use serde::Serialize; - -pub struct ListMigrationsCommand; - -#[async_trait::async_trait] -impl<'a> MigrationCommand for ListMigrationsCommand { - type Input = serde_json::Value; - type Output = Vec; - - async fn execute(_input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let migration_persistence = engine.connector().migration_persistence(); - migration_persistence.init().await?; - - let migrations: Self::Output = migration_persistence - .load_all() - .await? - .into_iter() - .map(convert_migration_to_list_migration_steps_output) - .collect(); - - tracing::info!( - "Returning {migrations_count} migrations ({pending_count} pending).", - migrations_count = migrations.len(), - pending_count = migrations.iter().filter(|mig| mig.status.is_pending()).count(), - ); - - Ok(migrations) - } -} - -pub fn convert_migration_to_list_migration_steps_output(migration: Migration) -> ListMigrationsOutput { - ListMigrationsOutput { - id: migration.name, - datamodel_steps: migration.datamodel_steps, - database_steps: vec![], - status: migration.status, - datamodel: migration.datamodel_string, - } -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct ListMigrationsOutput { - pub id: String, - pub datamodel_steps: Vec, - pub database_steps: Vec, - pub status: MigrationStatus, - pub datamodel: String, -} From 5e9f194ece0870400ddbff8385ff607d79efc788 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:43:42 +0100 Subject: [PATCH 03/29] remove calculatedatabasesteps --- migration-engine/core/src/api.rs | 13 --- migration-engine/core/src/api/rpc.rs | 4 - migration-engine/core/src/commands.rs | 3 - .../src/commands/calculate_database_steps.rs | 107 ------------------ 4 files changed, 127 deletions(-) delete mode 100644 migration-engine/core/src/commands/calculate_database_steps.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index d80211a85f3e..a640759dbf6f 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -43,10 +43,6 @@ pub trait GenericApi: Send + Sync + 'static { async fn version(&self, input: &serde_json::Value) -> CoreResult; async fn apply_migrations(&self, input: &ApplyMigrationsInput) -> CoreResult; async fn apply_script(&self, input: &ApplyScriptInput) -> CoreResult; - async fn calculate_database_steps( - &self, - input: &CalculateDatabaseStepsInput, - ) -> CoreResult; async fn calculate_datamodel(&self, input: &CalculateDatamodelInput) -> CoreResult; async fn create_migration(&self, input: &CreateMigrationInput) -> CoreResult; async fn debug_panic(&self, input: &()) -> CoreResult<()>; @@ -98,15 +94,6 @@ where .await } - async fn calculate_database_steps( - &self, - input: &CalculateDatabaseStepsInput, - ) -> CoreResult { - self.handle_command::>(input) - .instrument(tracing::info_span!("CalculateDatabaseSteps")) - .await - } - async fn calculate_datamodel(&self, input: &CalculateDatamodelInput) -> CoreResult { self.handle_command::(input) .instrument(tracing::info_span!("CalculateDatamodel")) diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index aa3e4a2fa14f..1d630d4915a2 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -14,7 +14,6 @@ pub struct RpcApi { enum RpcCommand { ApplyMigrations, ApplyScript, - CalculateDatabaseSteps, CalculateDatamodel, CreateMigration, DebugPanic, @@ -38,7 +37,6 @@ impl RpcCommand { match self { RpcCommand::ApplyMigrations => "applyMigrations", RpcCommand::ApplyScript => "applyScript", - RpcCommand::CalculateDatabaseSteps => "calculateDatabaseSteps", RpcCommand::CalculateDatamodel => "calculateDatamodel", RpcCommand::CreateMigration => "createMigration", RpcCommand::DebugPanic => "debugPanic", @@ -62,7 +60,6 @@ impl RpcCommand { const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::ApplyMigrations, RpcCommand::ApplyScript, - RpcCommand::CalculateDatabaseSteps, RpcCommand::CalculateDatamodel, RpcCommand::CreateMigration, RpcCommand::DebugPanic, @@ -133,7 +130,6 @@ impl RpcApi { RpcCommand::ApplyMigrations => render(executor.apply_migrations(¶ms.parse()?).await?), RpcCommand::CreateMigration => render(executor.create_migration(¶ms.parse()?).await?), RpcCommand::DebugPanic => render(executor.debug_panic(&()).await?), - RpcCommand::CalculateDatabaseSteps => render(executor.calculate_database_steps(¶ms.parse()?).await?), RpcCommand::CalculateDatamodel => render(executor.calculate_datamodel(¶ms.parse()?).await?), RpcCommand::DiagnoseMigrationHistory => { render(executor.diagnose_migration_history(¶ms.parse()?).await?) diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 237cf5aca815..c83907325386 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -6,8 +6,6 @@ mod apply_migrations; mod apply_script; #[allow(missing_docs)] -mod calculate_database_steps; -#[allow(missing_docs)] mod calculate_datamodel; mod command; mod create_migration; @@ -31,7 +29,6 @@ mod unapply_migration; pub use apply_migrations::{ApplyMigrationsCommand, ApplyMigrationsInput, ApplyMigrationsOutput}; pub use apply_script::{ApplyScriptCommand, ApplyScriptInput, ApplyScriptOutput}; -pub use calculate_database_steps::*; pub use calculate_datamodel::*; pub use command::MigrationCommand; pub use create_migration::{CreateMigrationCommand, CreateMigrationInput, CreateMigrationOutput}; diff --git a/migration-engine/core/src/commands/calculate_database_steps.rs b/migration-engine/core/src/commands/calculate_database_steps.rs deleted file mode 100644 index 5df129be3b8b..000000000000 --- a/migration-engine/core/src/commands/calculate_database_steps.rs +++ /dev/null @@ -1,107 +0,0 @@ -//! The CalculateDatabaseSteps RPC method. -//! -//! Its purpose is to infer the database steps for a given migration without reference to a target -//! prisma schema/datamodel, based on the datamodel migration steps and previous already applied -//! migrations. - -use super::MigrationStepsResultOutput; -use crate::{commands::command::MigrationCommand, migration_engine::MigrationEngine, CoreError, CoreResult}; -use datamodel::ast::SchemaAst; -use migration_connector::{DatabaseMigrationMarker, DestructiveChangeDiagnostics, MigrationConnector, MigrationStep}; -use serde::Deserialize; - -pub struct CalculateDatabaseStepsCommand<'a> { - input: &'a CalculateDatabaseStepsInput, -} - -#[async_trait::async_trait] -impl<'a> MigrationCommand for CalculateDatabaseStepsCommand<'a> { - type Input = CalculateDatabaseStepsInput; - type Output = MigrationStepsResultOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let cmd = CalculateDatabaseStepsCommand { input }; - tracing::debug!(command_input = ?cmd.input); - - let connector = engine.connector(); - - let steps_to_apply = &cmd.input.steps_to_apply; - let assume_to_be_applied = cmd.applicable_steps(); - - let assumed_datamodel_ast = engine - .datamodel_calculator() - .infer(&SchemaAst::empty(), &assume_to_be_applied)?; - let assumed_datamodel = - datamodel::lift_ast_to_datamodel(&assumed_datamodel_ast).map_err(CoreError::ProducedBadDatamodel)?; - - let next_datamodel_ast = engine - .datamodel_calculator() - .infer(&assumed_datamodel_ast, &steps_to_apply)?; - let next_datamodel = - datamodel::lift_ast_to_datamodel(&next_datamodel_ast).map_err(CoreError::ProducedBadDatamodel)?; - - let database_migration = connector - .database_migration_inferrer() - .infer(&assumed_datamodel.subject, &next_datamodel.subject, &steps_to_apply) - .await?; - - let DestructiveChangeDiagnostics { - warnings, - unexecutable_migrations, - } = connector - .destructive_change_checker() - .check(&database_migration) - .await?; - - let database_steps_json = connector - .database_migration_step_applier() - .render_steps_pretty(&database_migration)?; - - Ok(MigrationStepsResultOutput { - datamodel: datamodel::render_schema_ast_to_string(&next_datamodel_ast), - datamodel_steps: steps_to_apply.to_vec(), - database_steps: database_steps_json, - errors: [], - warnings, - general_errors: [], - unexecutable_migrations, - }) - } -} - -impl CalculateDatabaseStepsCommand<'_> { - /// Returns assume_to_be_applied from the input, with the exception of the steps from - /// steps_to_apply that may have been sent by mistake. - fn applicable_steps(&self) -> &[MigrationStep] { - match self.input.assume_to_be_applied.as_ref() { - Some(all_steps) => { - let steps_to_apply = &self.input.steps_to_apply; - - if steps_to_apply.len() >= all_steps.len() { - return all_steps; - } - - let start_idx = all_steps.len() - (steps_to_apply.len()); - let sliced = &all_steps[start_idx..]; - - if sliced == steps_to_apply.as_slice() { - return &all_steps[..start_idx]; - } - - all_steps - } - None => &[], - } - } -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct CalculateDatabaseStepsInput { - pub steps_to_apply: Vec, - pub assume_to_be_applied: Option>, -} From 60c490de5ce37644e5cb82321739c785907accc7 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:47:08 +0100 Subject: [PATCH 04/29] remove calculateDatamodel --- migration-engine/core/src/api.rs | 7 ---- migration-engine/core/src/api/rpc.rs | 4 -- migration-engine/core/src/commands.rs | 3 -- .../core/src/commands/calculate_datamodel.rs | 41 ------------------- 4 files changed, 55 deletions(-) delete mode 100644 migration-engine/core/src/commands/calculate_datamodel.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index a640759dbf6f..d1e69ec4c746 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -43,7 +43,6 @@ pub trait GenericApi: Send + Sync + 'static { async fn version(&self, input: &serde_json::Value) -> CoreResult; async fn apply_migrations(&self, input: &ApplyMigrationsInput) -> CoreResult; async fn apply_script(&self, input: &ApplyScriptInput) -> CoreResult; - async fn calculate_datamodel(&self, input: &CalculateDatamodelInput) -> CoreResult; async fn create_migration(&self, input: &CreateMigrationInput) -> CoreResult; async fn debug_panic(&self, input: &()) -> CoreResult<()>; async fn diagnose_migration_history( @@ -94,12 +93,6 @@ where .await } - async fn calculate_datamodel(&self, input: &CalculateDatamodelInput) -> CoreResult { - self.handle_command::(input) - .instrument(tracing::info_span!("CalculateDatamodel")) - .await - } - async fn create_migration(&self, input: &CreateMigrationInput) -> CoreResult { self.handle_command::(input) .instrument(tracing::info_span!( diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 1d630d4915a2..879f5dffb9e8 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -14,7 +14,6 @@ pub struct RpcApi { enum RpcCommand { ApplyMigrations, ApplyScript, - CalculateDatamodel, CreateMigration, DebugPanic, DiagnoseMigrationHistory, @@ -37,7 +36,6 @@ impl RpcCommand { match self { RpcCommand::ApplyMigrations => "applyMigrations", RpcCommand::ApplyScript => "applyScript", - RpcCommand::CalculateDatamodel => "calculateDatamodel", RpcCommand::CreateMigration => "createMigration", RpcCommand::DebugPanic => "debugPanic", RpcCommand::DiagnoseMigrationHistory => "diagnoseMigrationHistory", @@ -60,7 +58,6 @@ impl RpcCommand { const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::ApplyMigrations, RpcCommand::ApplyScript, - RpcCommand::CalculateDatamodel, RpcCommand::CreateMigration, RpcCommand::DebugPanic, RpcCommand::DiagnoseMigrationHistory, @@ -130,7 +127,6 @@ impl RpcApi { RpcCommand::ApplyMigrations => render(executor.apply_migrations(¶ms.parse()?).await?), RpcCommand::CreateMigration => render(executor.create_migration(¶ms.parse()?).await?), RpcCommand::DebugPanic => render(executor.debug_panic(&()).await?), - RpcCommand::CalculateDatamodel => render(executor.calculate_datamodel(¶ms.parse()?).await?), RpcCommand::DiagnoseMigrationHistory => { render(executor.diagnose_migration_history(¶ms.parse()?).await?) } diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index c83907325386..8d0cf4301680 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -5,8 +5,6 @@ mod apply_migrations; mod apply_script; -#[allow(missing_docs)] -mod calculate_datamodel; mod command; mod create_migration; mod debug_panic; @@ -29,7 +27,6 @@ mod unapply_migration; pub use apply_migrations::{ApplyMigrationsCommand, ApplyMigrationsInput, ApplyMigrationsOutput}; pub use apply_script::{ApplyScriptCommand, ApplyScriptInput, ApplyScriptOutput}; -pub use calculate_datamodel::*; pub use command::MigrationCommand; pub use create_migration::{CreateMigrationCommand, CreateMigrationInput, CreateMigrationOutput}; pub use debug_panic::DebugPanicCommand; diff --git a/migration-engine/core/src/commands/calculate_datamodel.rs b/migration-engine/core/src/commands/calculate_datamodel.rs deleted file mode 100644 index b55db030a87b..000000000000 --- a/migration-engine/core/src/commands/calculate_datamodel.rs +++ /dev/null @@ -1,41 +0,0 @@ -use crate::migration_engine::MigrationEngine; -use crate::{commands::command::*, CoreResult}; -use datamodel::ast::SchemaAst; -use migration_connector::*; -use serde::{Deserialize, Serialize}; -use tracing::debug; - -pub struct CalculateDatamodelCommand; - -#[async_trait::async_trait] -impl MigrationCommand for CalculateDatamodelCommand { - type Input = CalculateDatamodelInput; - type Output = CalculateDatamodelOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + 'static, - { - debug!("{:?}", input); - - let base_datamodel = SchemaAst::empty(); - let datamodel = engine.datamodel_calculator().infer(&base_datamodel, &input.steps)?; - - Ok(CalculateDatamodelOutput { - datamodel: datamodel::render_schema_ast_to_string(&datamodel), - }) - } -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct CalculateDatamodelInput { - pub steps: Vec, -} - -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct CalculateDatamodelOutput { - pub datamodel: String, -} From bb573f9b382f5b97e2b6fae146418298287193fc Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:50:34 +0100 Subject: [PATCH 05/29] remove inferMigrationSteps --- migration-engine/core/src/api.rs | 10 - migration-engine/core/src/api/rpc.rs | 4 - migration-engine/core/src/commands.rs | 3 - .../src/commands/infer_migration_steps.rs | 185 ------------------ migration-engine/core/src/lib.rs | 2 +- 5 files changed, 1 insertion(+), 203 deletions(-) delete mode 100644 migration-engine/core/src/commands/infer_migration_steps.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index d1e69ec4c746..ab25303bdd49 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -50,7 +50,6 @@ pub trait GenericApi: Send + Sync + 'static { input: &DiagnoseMigrationHistoryInput, ) -> CoreResult; async fn evaluate_data_loss(&self, input: &EvaluateDataLossInput) -> CoreResult; - async fn infer_migration_steps(&self, input: &InferMigrationStepsInput) -> CoreResult; async fn initialize(&self, input: &InitializeInput) -> CoreResult; async fn list_migration_directories( &self, @@ -124,15 +123,6 @@ where .await } - async fn infer_migration_steps(&self, input: &InferMigrationStepsInput) -> CoreResult { - self.handle_command::>(input) - .instrument(tracing::info_span!( - "InferMigrationSteps", - migration_id = input.migration_id.as_str() - )) - .await - } - async fn initialize(&self, input: &InitializeInput) -> CoreResult { self.handle_command::(input) .instrument(tracing::info_span!( diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 879f5dffb9e8..4372692f8b00 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -19,7 +19,6 @@ enum RpcCommand { DiagnoseMigrationHistory, EvaluateDataLoss, GetDatabaseVersion, - InferMigrationSteps, Initialize, ListMigrationDirectories, MarkMigrationApplied, @@ -41,7 +40,6 @@ impl RpcCommand { RpcCommand::DiagnoseMigrationHistory => "diagnoseMigrationHistory", RpcCommand::EvaluateDataLoss => "evaluateDataLoss", RpcCommand::GetDatabaseVersion => "getDatabaseVersion", - RpcCommand::InferMigrationSteps => "inferMigrationSteps", RpcCommand::Initialize => "initialize", RpcCommand::ListMigrationDirectories => "listMigrationDirectories", RpcCommand::MarkMigrationApplied => "markMigrationApplied", @@ -63,7 +61,6 @@ const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::DiagnoseMigrationHistory, RpcCommand::EvaluateDataLoss, RpcCommand::GetDatabaseVersion, - RpcCommand::InferMigrationSteps, RpcCommand::Initialize, RpcCommand::ListMigrationDirectories, RpcCommand::MarkMigrationApplied, @@ -132,7 +129,6 @@ impl RpcApi { } RpcCommand::EvaluateDataLoss => render(executor.evaluate_data_loss(¶ms.parse()?).await?), RpcCommand::GetDatabaseVersion => render(executor.version(&serde_json::Value::Null).await?), - RpcCommand::InferMigrationSteps => render(executor.infer_migration_steps(¶ms.parse()?).await?), RpcCommand::Initialize => render(executor.initialize(¶ms.parse()?).await?), RpcCommand::ListMigrationDirectories => { render(executor.list_migration_directories(¶ms.parse()?).await?) diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 8d0cf4301680..7af4e3d0ba5c 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -11,8 +11,6 @@ mod debug_panic; mod diagnose_migration_history; mod evaluate_data_loss; mod get_database_version; -#[allow(missing_docs)] -mod infer_migration_steps; mod initialize; mod list_migration_directories; mod mark_migration_applied; @@ -36,7 +34,6 @@ pub use diagnose_migration_history::{ }; pub use evaluate_data_loss::*; pub use get_database_version::*; -pub use infer_migration_steps::*; pub use initialize::{InitializeCommand, InitializeInput, InitializeOutput}; pub use list_migration_directories::*; pub use mark_migration_applied::{MarkMigrationAppliedCommand, MarkMigrationAppliedInput, MarkMigrationAppliedOutput}; diff --git a/migration-engine/core/src/commands/infer_migration_steps.rs b/migration-engine/core/src/commands/infer_migration_steps.rs deleted file mode 100644 index e74502d5be79..000000000000 --- a/migration-engine/core/src/commands/infer_migration_steps.rs +++ /dev/null @@ -1,185 +0,0 @@ -//! The InferMigrationSteps RPC method. - -use super::MigrationStepsResultOutput; -use crate::{commands::command::*, migration_engine::MigrationEngine, *}; -use datamodel::ast::{parser::parse_schema, SchemaAst}; -use migration_connector::*; -use serde::Deserialize; -use tracing::debug; - -pub struct InferMigrationStepsCommand<'a> { - input: &'a InferMigrationStepsInput, -} - -#[async_trait::async_trait] -impl<'a> MigrationCommand for InferMigrationStepsCommand<'a> { - type Input = InferMigrationStepsInput; - type Output = MigrationStepsResultOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Sync + Send + 'static, - { - let cmd = InferMigrationStepsCommand { input }; - debug!(?cmd.input); - - let connector = engine.connector(); - let migration_persistence = connector.migration_persistence(); - migration_persistence.init().await?; - let database_migration_inferrer = connector.database_migration_inferrer(); - - let assume_to_be_applied = cmd.assume_to_be_applied(); - - cmd.validate_assumed_migrations_are_not_applied(migration_persistence) - .await?; - - let last_migration = migration_persistence.last().await?; - let current_datamodel_ast = if let Some(migration) = last_migration.as_ref() { - migration - .parse_schema_ast() - .map_err(CoreError::InvalidPersistedDatamodel)? - } else { - SchemaAst::empty() - }; - let assumed_datamodel_ast = engine - .datamodel_calculator() - .infer(¤t_datamodel_ast, assume_to_be_applied.as_slice())?; - let assumed_datamodel = - datamodel::lift_ast_to_datamodel(&assumed_datamodel_ast).map_err(CoreError::ProducedBadDatamodel)?; - - let next_datamodel = parse_datamodel(&cmd.input.datamodel)?; - if let Some(err) = connector.check_database_version_compatibility(&next_datamodel) { - return Err(ConnectorError::user_facing_error(err).into()); - }; - - let next_datamodel_ast = parse_schema(&cmd.input.datamodel) - .map_err(|err| CoreError::Input(anyhow::anyhow!("{}", err.to_pretty_string("", &cmd.input.datamodel))))?; - - let model_migration_steps = engine - .datamodel_migration_steps_inferrer() - .infer(&assumed_datamodel_ast, &next_datamodel_ast); - - let database_migration = database_migration_inferrer - .infer(&assumed_datamodel.subject, &next_datamodel, &model_migration_steps) - .await?; - - let DestructiveChangeDiagnostics { - warnings, - unexecutable_migrations, - } = connector - .destructive_change_checker() - .check(&database_migration) - .await?; - - let (returned_datamodel_steps, returned_database_migration) = - if !cmd.input.is_watch_migration() && last_migration.map(|mig| mig.is_watch_migration()).unwrap_or(false) { - // Transition out of watch mode - let last_non_watch_applied_migration = migration_persistence.last_non_watch_applied_migration().await?; - let last_non_watch_datamodel_ast = last_non_watch_applied_migration - .as_ref() - .map(|m| m.parse_schema_ast()) - .unwrap_or_else(|| Ok(SchemaAst::empty())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - let last_non_watch_datamodel = last_non_watch_applied_migration - .map(|m| m.parse_datamodel()) - .unwrap_or_else(|| Ok(Datamodel::new())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - let datamodel_steps = engine - .datamodel_migration_steps_inferrer() - .infer(&last_non_watch_datamodel_ast, &next_datamodel_ast); - - // The database migration since the last non-watch migration, so we can render all the steps applied - // in watch mode to the migrations folder. - let full_database_migration = database_migration_inferrer.infer_from_datamodels( - &last_non_watch_datamodel, - &next_datamodel, - &datamodel_steps, - )?; - - (datamodel_steps, full_database_migration) - } else { - (model_migration_steps, database_migration) - }; - - let database_steps = connector - .database_migration_step_applier() - .render_steps_pretty(&returned_database_migration)?; - - debug!(?returned_datamodel_steps); - - Ok(MigrationStepsResultOutput { - datamodel: datamodel::render_datamodel_to_string(&next_datamodel), - datamodel_steps: returned_datamodel_steps, - database_steps, - errors: [], - warnings, - general_errors: [], - unexecutable_migrations, - }) - } -} - -impl InferMigrationStepsCommand<'_> { - fn assume_to_be_applied(&self) -> Vec { - self.input - .assume_to_be_applied - .clone() - .or_else(|| { - self.input.assume_applied_migrations.as_ref().map(|migrations| { - migrations - .iter() - .flat_map(|migration| migration.datamodel_steps.clone().into_iter()) - .collect() - }) - }) - .unwrap_or_else(Vec::new) - } - - async fn validate_assumed_migrations_are_not_applied( - &self, - migration_persistence: &dyn MigrationPersistence, - ) -> CoreResult<()> { - if let Some(migrations) = self.input.assume_applied_migrations.as_ref() { - for migration in migrations { - if migration_persistence - .migration_is_already_applied(&migration.migration_id) - .await? - { - return Err(CoreError::Generic(anyhow::anyhow!( - "Input is invalid. Migration {} is already applied.", - migration.migration_id - ))); - } - } - } - - Ok(()) - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct InferMigrationStepsInput { - pub migration_id: String, - #[serde(alias = "dataModel")] - pub datamodel: String, - /// Migration steps from migrations that have been inferred but not applied yet. - /// - /// These steps must be provided and correct for migration inferrence to work. - pub assume_to_be_applied: Option>, - pub assume_applied_migrations: Option>, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct AppliedMigration { - pub migration_id: String, - pub datamodel_steps: Vec, -} - -impl IsWatchMigration for InferMigrationStepsInput { - fn is_watch_migration(&self) -> bool { - self.migration_id.starts_with("watch") - } -} diff --git a/migration-engine/core/src/lib.rs b/migration-engine/core/src/lib.rs index f1efd2cc8908..0902a30b78ce 100644 --- a/migration-engine/core/src/lib.rs +++ b/migration-engine/core/src/lib.rs @@ -15,7 +15,7 @@ mod gate_keeper; use anyhow::anyhow; pub use api::GenericApi; -pub use commands::{InferMigrationStepsInput, MigrationStepsResultOutput, SchemaPushInput}; +pub use commands::{MigrationStepsResultOutput, SchemaPushInput}; pub use core_error::{CoreError, CoreResult}; pub use gate_keeper::GateKeeper; From 32c295089f05f91a9c583fc7187377030a34850f Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:54:11 +0100 Subject: [PATCH 06/29] remove migrationProgress --- migration-engine/core/src/api.rs | 10 --- migration-engine/core/src/api/rpc.rs | 4 -- migration-engine/core/src/commands.rs | 3 - .../core/src/commands/migration_progress.rs | 61 ------------------- 4 files changed, 78 deletions(-) delete mode 100644 migration-engine/core/src/commands/migration_progress.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index ab25303bdd49..4b69e7add8ac 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -61,7 +61,6 @@ pub trait GenericApi: Send + Sync + 'static { &self, input: &MarkMigrationRolledBackInput, ) -> CoreResult; - async fn migration_progress(&self, input: &MigrationProgressInput) -> CoreResult; async fn plan_migration(&self, input: &PlanMigrationInput) -> CoreResult; async fn reset(&self, input: &()) -> CoreResult<()>; async fn schema_push(&self, input: &SchemaPushInput) -> CoreResult; @@ -165,15 +164,6 @@ where .await } - async fn migration_progress(&self, input: &MigrationProgressInput) -> CoreResult { - self.handle_command::(input) - .instrument(tracing::info_span!( - "MigrationProgress", - migration_id = input.migration_id.as_str() - )) - .await - } - async fn plan_migration(&self, input: &PlanMigrationInput) -> CoreResult { self.handle_command::(input) .instrument(tracing::info_span!("PlanMigration")) diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 4372692f8b00..2044f4516727 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -23,7 +23,6 @@ enum RpcCommand { ListMigrationDirectories, MarkMigrationApplied, MarkMigrationRolledBack, - MigrationProgress, PlanMigration, Reset, SchemaPush, @@ -44,7 +43,6 @@ impl RpcCommand { RpcCommand::ListMigrationDirectories => "listMigrationDirectories", RpcCommand::MarkMigrationApplied => "markMigrationApplied", RpcCommand::MarkMigrationRolledBack => "markMigrationRolledBack", - RpcCommand::MigrationProgress => "migrationProgress", RpcCommand::PlanMigration => "planMigration", RpcCommand::Reset => "reset", RpcCommand::SchemaPush => "schemaPush", @@ -64,7 +62,6 @@ const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::Initialize, RpcCommand::ListMigrationDirectories, RpcCommand::MarkMigrationApplied, - RpcCommand::MigrationProgress, RpcCommand::MarkMigrationRolledBack, RpcCommand::PlanMigration, RpcCommand::Reset, @@ -135,7 +132,6 @@ impl RpcApi { } RpcCommand::MarkMigrationApplied => render(executor.mark_migration_applied(¶ms.parse()?).await?), RpcCommand::MarkMigrationRolledBack => render(executor.mark_migration_rolled_back(¶ms.parse()?).await?), - RpcCommand::MigrationProgress => render(executor.migration_progress(¶ms.parse()?).await?), RpcCommand::PlanMigration => render(executor.plan_migration(¶ms.parse()?).await?), RpcCommand::Reset => render(executor.reset(&()).await?), RpcCommand::SchemaPush => render(executor.schema_push(¶ms.parse()?).await?), diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 7af4e3d0ba5c..7f9555a8d630 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -15,8 +15,6 @@ mod initialize; mod list_migration_directories; mod mark_migration_applied; mod mark_migration_rolled_back; -#[allow(missing_docs)] -mod migration_progress; mod plan_migration; mod reset; mod schema_push; @@ -40,7 +38,6 @@ pub use mark_migration_applied::{MarkMigrationAppliedCommand, MarkMigrationAppli pub use mark_migration_rolled_back::{ MarkMigrationRolledBackCommand, MarkMigrationRolledBackInput, MarkMigrationRolledBackOutput, }; -pub use migration_progress::*; pub use plan_migration::{PlanMigrationCommand, PlanMigrationInput, PlanMigrationOutput}; pub use reset::ResetCommand; pub use schema_push::{SchemaPushCommand, SchemaPushInput, SchemaPushOutput}; diff --git a/migration-engine/core/src/commands/migration_progress.rs b/migration-engine/core/src/commands/migration_progress.rs deleted file mode 100644 index 3fd8dbf2362f..000000000000 --- a/migration-engine/core/src/commands/migration_progress.rs +++ /dev/null @@ -1,61 +0,0 @@ -use crate::{commands::command::*, migration_engine::MigrationEngine, CoreError, CoreResult}; -use chrono::{DateTime, Utc}; -use migration_connector::*; -use serde::{Deserialize, Serialize}; - -pub struct MigrationProgressCommand; - -#[async_trait::async_trait] -impl MigrationCommand for MigrationProgressCommand { - type Input = MigrationProgressInput; - type Output = MigrationProgressOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + 'static, - { - let migration_persistence = engine.connector().migration_persistence(); - migration_persistence.init().await?; - - let migration = migration_persistence - .by_name(&input.migration_id) - .await? - .ok_or_else(|| { - let error = anyhow::anyhow!( - "Could not load migration from database. Migration name was: {}", - &input.migration_id - ); - - CoreError::Input(error) - })?; - - Ok(MigrationProgressOutput { - status: migration.status, - steps: migration.datamodel_steps.len(), - applied: migration.applied, - rolled_back: migration.rolled_back, - errors: migration.errors, - started_at: migration.started_at, - finished_at: migration.finished_at, - }) - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct MigrationProgressInput { - pub migration_id: String, -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct MigrationProgressOutput { - status: MigrationStatus, - steps: usize, - applied: usize, - rolled_back: usize, - errors: Vec, - started_at: DateTime, - finished_at: Option>, -} From 01a7bec03c24824dc6f73fe79b89fceafe091156 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:57:54 +0100 Subject: [PATCH 07/29] remove unapplyMigrations --- migration-engine/core/src/api.rs | 7 -- migration-engine/core/src/api/rpc.rs | 4 - migration-engine/core/src/commands.rs | 3 - .../core/src/commands/unapply_migration.rs | 104 ------------------ 4 files changed, 118 deletions(-) delete mode 100644 migration-engine/core/src/commands/unapply_migration.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index 4b69e7add8ac..d082c00553da 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -64,7 +64,6 @@ pub trait GenericApi: Send + Sync + 'static { async fn plan_migration(&self, input: &PlanMigrationInput) -> CoreResult; async fn reset(&self, input: &()) -> CoreResult<()>; async fn schema_push(&self, input: &SchemaPushInput) -> CoreResult; - async fn unapply_migration(&self, input: &UnapplyMigrationInput) -> CoreResult; } #[async_trait::async_trait] @@ -181,10 +180,4 @@ where .instrument(tracing::info_span!("SchemaPush")) .await } - - async fn unapply_migration(&self, input: &UnapplyMigrationInput) -> CoreResult { - self.handle_command::>(input) - .instrument(tracing::info_span!("UnapplyMigration")) - .await - } } diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 2044f4516727..f52d2b656aeb 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -26,7 +26,6 @@ enum RpcCommand { PlanMigration, Reset, SchemaPush, - UnapplyMigration, } impl RpcCommand { @@ -46,7 +45,6 @@ impl RpcCommand { RpcCommand::PlanMigration => "planMigration", RpcCommand::Reset => "reset", RpcCommand::SchemaPush => "schemaPush", - RpcCommand::UnapplyMigration => "unapplyMigration", } } } @@ -66,7 +64,6 @@ const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::PlanMigration, RpcCommand::Reset, RpcCommand::SchemaPush, - RpcCommand::UnapplyMigration, ]; impl RpcApi { @@ -135,7 +132,6 @@ impl RpcApi { RpcCommand::PlanMigration => render(executor.plan_migration(¶ms.parse()?).await?), RpcCommand::Reset => render(executor.reset(&()).await?), RpcCommand::SchemaPush => render(executor.schema_push(¶ms.parse()?).await?), - RpcCommand::UnapplyMigration => render(executor.unapply_migration(¶ms.parse()?).await?), }) } } diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 7f9555a8d630..4f7065f262be 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -18,8 +18,6 @@ mod mark_migration_rolled_back; mod plan_migration; mod reset; mod schema_push; -#[allow(missing_docs)] -mod unapply_migration; pub use apply_migrations::{ApplyMigrationsCommand, ApplyMigrationsInput, ApplyMigrationsOutput}; pub use apply_script::{ApplyScriptCommand, ApplyScriptInput, ApplyScriptOutput}; @@ -41,7 +39,6 @@ pub use mark_migration_rolled_back::{ pub use plan_migration::{PlanMigrationCommand, PlanMigrationInput, PlanMigrationOutput}; pub use reset::ResetCommand; pub use schema_push::{SchemaPushCommand, SchemaPushInput, SchemaPushOutput}; -pub use unapply_migration::*; use migration_connector::{MigrationStep, MigrationWarning, PrettyDatabaseMigrationStep, UnexecutableMigration}; use serde::{Deserialize, Serialize}; diff --git a/migration-engine/core/src/commands/unapply_migration.rs b/migration-engine/core/src/commands/unapply_migration.rs deleted file mode 100644 index 9cb1cb2fc757..000000000000 --- a/migration-engine/core/src/commands/unapply_migration.rs +++ /dev/null @@ -1,104 +0,0 @@ -use crate::{commands::command::*, CoreResult}; -use crate::{migration_engine::MigrationEngine, CoreError}; -use datamodel::{ast::SchemaAst, Datamodel}; -use migration_connector::*; -use serde::{Deserialize, Serialize}; - -pub struct UnapplyMigrationCommand<'a> { - input: &'a UnapplyMigrationInput, -} - -#[async_trait::async_trait] -impl<'a> MigrationCommand for UnapplyMigrationCommand<'a> { - type Input = UnapplyMigrationInput; - type Output = UnapplyMigrationOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + 'static, - { - let cmd = UnapplyMigrationCommand { input }; - tracing::debug!("{:?}", cmd.input); - let connector = engine.connector(); - let persistence = connector.migration_persistence(); - persistence.init().await?; - - let result = match persistence.last_two_migrations().await? { - (None, _) => UnapplyMigrationOutput { - rolled_back: "not-applicable".to_string(), - active: None, - errors: vec!["There is no last migration that can be rolled back.".to_string()], - warnings: Vec::new(), - }, - (Some(migration_to_rollback), second_to_last) => { - let schema_ast_before_last_migration = second_to_last - .as_ref() - .map(|migration| migration.parse_schema_ast()) - .unwrap_or_else(|| Ok(SchemaAst::empty())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - let schema_before_last_migration = second_to_last - .as_ref() - .map(|migration| migration.parse_datamodel()) - .unwrap_or_else(|| Ok(Datamodel::new())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - - let last_schema_ast = migration_to_rollback - .parse_schema_ast() - .map_err(CoreError::InvalidPersistedDatamodel)?; - let last_schema = migration_to_rollback - .parse_datamodel() - .map_err(CoreError::InvalidPersistedDatamodel)?; - - // Generate backwards datamodel steps. - let datamodel_migration = - crate::migration::datamodel_differ::diff(&last_schema_ast, &schema_ast_before_last_migration); - - let database_migration = connector - .database_migration_inferrer() - .infer(&last_schema, &schema_before_last_migration, &datamodel_migration) - .await?; - - let destructive_change_checker = connector.destructive_change_checker(); - - let warnings = destructive_change_checker.check(&database_migration).await?; - - match (warnings.has_warnings(), input.force) { - (false, _) | (true, None) | (true, Some(true)) => { - connector - .migration_applier() - .unapply(&migration_to_rollback, &database_migration) - .await?; - } - (true, Some(false)) => (), - } - - let new_active_migration = connector.migration_persistence().last().await?.map(|m| m.name); - - UnapplyMigrationOutput { - rolled_back: migration_to_rollback.name, - active: new_active_migration, - errors: Vec::new(), - warnings: warnings.warnings, - } - } - }; - - Ok(result) - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct UnapplyMigrationInput { - pub force: Option, -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct UnapplyMigrationOutput { - pub rolled_back: String, - pub active: Option, - pub errors: Vec, - pub warnings: Vec, -} From fea2ab0b5c23277b528d0f9ec41e10dd40898a82 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 16:18:57 +0100 Subject: [PATCH 08/29] remove initialize --- migration-engine/core/src/api.rs | 10 ------ migration-engine/core/src/api/rpc.rs | 4 --- migration-engine/core/src/commands.rs | 2 -- .../core/src/commands/initialize.rs | 32 ------------------- 4 files changed, 48 deletions(-) delete mode 100644 migration-engine/core/src/commands/initialize.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index d082c00553da..1bfe27481722 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -50,7 +50,6 @@ pub trait GenericApi: Send + Sync + 'static { input: &DiagnoseMigrationHistoryInput, ) -> CoreResult; async fn evaluate_data_loss(&self, input: &EvaluateDataLossInput) -> CoreResult; - async fn initialize(&self, input: &InitializeInput) -> CoreResult; async fn list_migration_directories( &self, input: &ListMigrationDirectoriesInput, @@ -121,15 +120,6 @@ where .await } - async fn initialize(&self, input: &InitializeInput) -> CoreResult { - self.handle_command::(input) - .instrument(tracing::info_span!( - "Initialize", - migrations_directory_path = input.migrations_directory_path.as_str() - )) - .await - } - async fn list_migration_directories( &self, input: &ListMigrationDirectoriesInput, diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index f52d2b656aeb..033606c76b0b 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -19,7 +19,6 @@ enum RpcCommand { DiagnoseMigrationHistory, EvaluateDataLoss, GetDatabaseVersion, - Initialize, ListMigrationDirectories, MarkMigrationApplied, MarkMigrationRolledBack, @@ -38,7 +37,6 @@ impl RpcCommand { RpcCommand::DiagnoseMigrationHistory => "diagnoseMigrationHistory", RpcCommand::EvaluateDataLoss => "evaluateDataLoss", RpcCommand::GetDatabaseVersion => "getDatabaseVersion", - RpcCommand::Initialize => "initialize", RpcCommand::ListMigrationDirectories => "listMigrationDirectories", RpcCommand::MarkMigrationApplied => "markMigrationApplied", RpcCommand::MarkMigrationRolledBack => "markMigrationRolledBack", @@ -57,7 +55,6 @@ const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::DiagnoseMigrationHistory, RpcCommand::EvaluateDataLoss, RpcCommand::GetDatabaseVersion, - RpcCommand::Initialize, RpcCommand::ListMigrationDirectories, RpcCommand::MarkMigrationApplied, RpcCommand::MarkMigrationRolledBack, @@ -123,7 +120,6 @@ impl RpcApi { } RpcCommand::EvaluateDataLoss => render(executor.evaluate_data_loss(¶ms.parse()?).await?), RpcCommand::GetDatabaseVersion => render(executor.version(&serde_json::Value::Null).await?), - RpcCommand::Initialize => render(executor.initialize(¶ms.parse()?).await?), RpcCommand::ListMigrationDirectories => { render(executor.list_migration_directories(¶ms.parse()?).await?) } diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 4f7065f262be..8aa15b5888f0 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -11,7 +11,6 @@ mod debug_panic; mod diagnose_migration_history; mod evaluate_data_loss; mod get_database_version; -mod initialize; mod list_migration_directories; mod mark_migration_applied; mod mark_migration_rolled_back; @@ -30,7 +29,6 @@ pub use diagnose_migration_history::{ }; pub use evaluate_data_loss::*; pub use get_database_version::*; -pub use initialize::{InitializeCommand, InitializeInput, InitializeOutput}; pub use list_migration_directories::*; pub use mark_migration_applied::{MarkMigrationAppliedCommand, MarkMigrationAppliedInput, MarkMigrationAppliedOutput}; pub use mark_migration_rolled_back::{ diff --git a/migration-engine/core/src/commands/initialize.rs b/migration-engine/core/src/commands/initialize.rs deleted file mode 100644 index 5e6d44768203..000000000000 --- a/migration-engine/core/src/commands/initialize.rs +++ /dev/null @@ -1,32 +0,0 @@ -use super::MigrationCommand; -use crate::{migration_engine::MigrationEngine, CoreResult}; -use serde::Deserialize; - -/// Input to the `Initialize` command. -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct InitializeInput { - /// Path to the migrations directory. - pub migrations_directory_path: String, -} - -/// Output of the `Initialize` command. -pub type InitializeOutput = (); - -/// Initialize the migrations directory and the migrations table. -pub struct InitializeCommand; - -#[async_trait::async_trait] -impl<'a> MigrationCommand for InitializeCommand { - type Input = InitializeInput; - - type Output = InitializeOutput; - - async fn execute(_input: &Self::Input, _engine: &MigrationEngine) -> CoreResult - where - C: migration_connector::MigrationConnector, - D: migration_connector::DatabaseMigrationMarker + Send + Sync + 'static, - { - todo!("initialize command") - } -} From c1a4242afa58114209986ccdbd572b5daefc492e Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 16:45:53 +0100 Subject: [PATCH 09/29] start removing old migration persistence --- .../connectors/migration-connector/src/lib.rs | 18 -- .../src/migration_applier.rs | 105 ------- .../src/migration_persistence.rs | 268 ---------------- .../sql-migration-connector/src/lib.rs | 7 - .../src/sql_migration_persistence.rs | 296 ------------------ .../src/sql_schema_differ.rs | 6 +- .../src/sql/barrel_migration_executor.rs | 10 +- .../migration-engine-tests/src/test_api.rs | 29 +- .../tests/migration_persistence/mod.rs | 172 ---------- .../tests/migration_tests.rs | 3 +- 10 files changed, 7 insertions(+), 907 deletions(-) delete mode 100644 migration-engine/connectors/migration-connector/src/migration_applier.rs delete mode 100644 migration-engine/connectors/migration-connector/src/migration_persistence.rs delete mode 100644 migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs delete mode 100644 migration-engine/migration-engine-tests/tests/migration_persistence/mod.rs diff --git a/migration-engine/connectors/migration-connector/src/lib.rs b/migration-engine/connectors/migration-connector/src/lib.rs index ab0f315d2c41..d124e633675f 100644 --- a/migration-engine/connectors/migration-connector/src/lib.rs +++ b/migration-engine/connectors/migration-connector/src/lib.rs @@ -7,10 +7,6 @@ mod database_migration_step_applier; mod destructive_change_checker; mod error; mod imperative_migrations_persistence; -#[allow(missing_docs)] -mod migration_applier; -#[allow(missing_docs)] -mod migration_persistence; #[allow(missing_docs)] pub mod steps; @@ -24,8 +20,6 @@ pub use error::*; pub use imperative_migrations_persistence::{ ImperativeMigrationsPersistence, MigrationRecord, PersistenceNotInitializedError, Timestamp, }; -pub use migration_applier::*; -pub use migration_persistence::*; pub use migrations_directory::{create_migration_directory, list_migrations, ListMigrationsError, MigrationDirectory}; pub use steps::MigrationStep; @@ -64,9 +58,6 @@ pub trait MigrationConnector: Send + Sync + 'static { None } - /// See [MigrationPersistence](trait.MigrationPersistence.html). - fn migration_persistence(&self) -> &dyn MigrationPersistence; - /// See [ImperativeMigrationPersistence](trait.ImperativeMigrationPersistence.html). fn new_migration_persistence(&self) -> &dyn ImperativeMigrationsPersistence; @@ -78,15 +69,6 @@ pub trait MigrationConnector: Send + Sync + 'static { /// See [DestructiveChangeChecker](trait.DestructiveChangeChecker.html). fn destructive_change_checker(&self) -> &dyn DestructiveChangeChecker; - - /// See [MigrationStepApplier](trait.MigrationStepApplier.html). - fn migration_applier<'a>(&'a self) -> Box + Send + Sync + 'a> { - let applier = MigrationApplierImpl { - migration_persistence: self.migration_persistence(), - step_applier: self.database_migration_step_applier(), - }; - Box::new(applier) - } } /// Marker for the associated migration type for a connector. diff --git a/migration-engine/connectors/migration-connector/src/migration_applier.rs b/migration-engine/connectors/migration-connector/src/migration_applier.rs deleted file mode 100644 index 11aab23e2ea5..000000000000 --- a/migration-engine/connectors/migration-connector/src/migration_applier.rs +++ /dev/null @@ -1,105 +0,0 @@ -use crate::*; - -/// Apply and unapply migrations on the connector's database. -#[async_trait::async_trait] -pub trait MigrationApplier -where - T: Send + Sync, -{ - async fn apply(&self, migration: &Migration, database_migration: &T) -> ConnectorResult<()>; - - async fn unapply(&self, migration: &Migration, database_migration: &T) -> ConnectorResult<()>; -} - -pub struct MigrationApplierImpl<'a, T> -where - T: Send + Sync + 'static, -{ - pub migration_persistence: &'a dyn MigrationPersistence, - pub step_applier: &'a dyn DatabaseMigrationStepApplier, -} - -#[async_trait::async_trait] -impl<'a, T> MigrationApplier for MigrationApplierImpl<'a, T> -where - T: Send + Sync + 'static, -{ - async fn apply(&self, migration: &Migration, database_migration: &T) -> ConnectorResult<()> { - assert_eq!(migration.status, MigrationStatus::Pending); // what other states are valid here? - let mut migration_updates = migration.update_params(); - migration_updates.status = MigrationStatus::MigrationInProgress; - self.migration_persistence.update(&migration_updates).await?; - - let apply_result = self.go_forward(&mut migration_updates, database_migration).await; - - match apply_result { - Ok(()) => { - migration_updates.mark_as_finished(); - self.migration_persistence.update(&migration_updates).await?; - Ok(()) - } - Err(err) => { - migration_updates.status = MigrationStatus::MigrationFailure; - migration_updates.errors = vec![format!("{:?}", err)]; - self.migration_persistence.update(&migration_updates).await?; - Err(err) - } - } - } - - async fn unapply(&self, migration: &Migration, database_migration: &T) -> ConnectorResult<()> { - assert_eq!(migration.status, MigrationStatus::MigrationSuccess); // what other states are valid here? - let mut migration_updates = migration.update_params(); - migration_updates.status = MigrationStatus::RollingBack; - self.migration_persistence.update(&migration_updates).await?; - - let unapply_result = self.go_backward(&mut migration_updates, database_migration).await; - - match unapply_result { - Ok(()) => { - migration_updates.status = MigrationStatus::RollbackSuccess; - self.migration_persistence.update(&migration_updates).await?; - Ok(()) - } - Err(err) => { - migration_updates.status = MigrationStatus::RollbackFailure; - migration_updates.errors = vec![format!("{:?}", err)]; - self.migration_persistence.update(&migration_updates).await?; - Err(err) - } - } - } -} - -impl<'a, T> MigrationApplierImpl<'a, T> -where - T: Send + Sync, -{ - async fn go_forward( - &self, - migration_updates: &mut MigrationUpdateParams, - database_migration: &T, - ) -> ConnectorResult<()> { - let mut step = 0; - while self.step_applier.apply_step(&database_migration, step).await? { - step += 1; - migration_updates.applied += 1; - self.migration_persistence.update(&migration_updates).await?; - } - Ok(()) - } - - async fn go_backward( - &self, - migration_updates: &mut MigrationUpdateParams, - database_migration: &T, - ) -> ConnectorResult<()> { - let mut step = 0; - while self.step_applier.apply_step(&database_migration, step).await? { - step += 1; - migration_updates.rolled_back += 1; - self.migration_persistence.update(&migration_updates).await?; - } - Ok(()) - } -} diff --git a/migration-engine/connectors/migration-connector/src/migration_persistence.rs b/migration-engine/connectors/migration-connector/src/migration_persistence.rs deleted file mode 100644 index 40f8ef4e15ca..000000000000 --- a/migration-engine/connectors/migration-connector/src/migration_persistence.rs +++ /dev/null @@ -1,268 +0,0 @@ -use crate::{error::ConnectorError, steps::*, ConnectorResult}; -use chrono::{DateTime, Utc}; -use datamodel::{ast::SchemaAst, Datamodel}; -use serde::Serialize; -use std::str::FromStr; - -/// This trait is implemented by each connector. It provides a generic API to store and retrieve [Migration](struct.Migration.html) records. -#[async_trait::async_trait] -pub trait MigrationPersistence: Send + Sync { - /// Initialize migration persistence state. E.g. create the migrations table in an SQL database. - async fn init(&self) -> Result<(), ConnectorError>; - - /// Drop all persisted state. - async fn reset(&self) -> Result<(), ConnectorError>; - - async fn last_non_watch_applied_migration(&self) -> Result, ConnectorError> { - let migration = - self.load_all().await?.into_iter().rev().find(|migration| { - !migration.is_watch_migration() && migration.status == MigrationStatus::MigrationSuccess - }); - - Ok(migration) - } - - async fn last_non_watch_migration(&self) -> Result, ConnectorError> { - let mut all_migrations = self.load_all().await?; - all_migrations.reverse(); - let migration = all_migrations.into_iter().find(|m| !m.is_watch_migration()); - - Ok(migration) - } - - /// Returns the last successful Migration. - async fn last(&self) -> Result, ConnectorError> { - Ok(self.last_two_migrations().await?.0) - } - - /// Returns the last two successful migrations, for rollback purposes. The tuple will be - /// interpreted as (last_migration, second_to_last_migration). - async fn last_two_migrations(&self) -> ConnectorResult<(Option, Option)>; - - /// Fetch a migration by name. - async fn by_name(&self, name: &str) -> Result, ConnectorError>; - - /// This powers the listMigrations command. - async fn load_all(&self) -> Result, ConnectorError>; - - /// Write the migration to the Migration table. - async fn create(&self, migration: Migration) -> Result; - - /// Used by the MigrationApplier to write the progress of a [Migration](struct.Migration.html) - /// into the database. - async fn update(&self, params: &MigrationUpdateParams) -> Result<(), ConnectorError>; - - /// Returns whether the migration with the provided migration id has already been successfully applied. - /// - /// The default impl will load all migrations and scan for the provided migration id. Implementors are encouraged to implement this more efficiently. - async fn migration_is_already_applied(&self, migration_id: &str) -> Result { - let migrations = self.load_all().await?; - - let already_applied = migrations - .iter() - .any(|migration| migration.status == MigrationStatus::MigrationSuccess && migration.name == migration_id); - - Ok(already_applied) - } -} - -/// The representation of a migration as persisted through [MigrationPersistence](trait.MigrationPersistence.html). -#[derive(Debug, Clone, PartialEq)] -pub struct Migration { - /// The migration id. - pub name: String, - pub revision: usize, - pub status: MigrationStatus, - pub applied: usize, - pub rolled_back: usize, - /// The _target_ Prisma schema. - pub datamodel_string: String, - /// The schema migration steps to apply to get to the target Prisma schema. - pub datamodel_steps: Vec, - pub database_migration: serde_json::Value, - pub errors: Vec, - pub started_at: DateTime, - pub finished_at: Option>, -} - -/// Updates to be made to a persisted [Migration](struct.Migration.html). -#[derive(Debug, Clone)] -pub struct MigrationUpdateParams { - pub name: String, - pub new_name: String, - pub revision: usize, - pub status: MigrationStatus, - pub applied: usize, - pub rolled_back: usize, - pub errors: Vec, - pub finished_at: Option>, -} - -impl MigrationUpdateParams { - pub fn mark_as_finished(&mut self) { - self.status = MigrationStatus::MigrationSuccess; - self.finished_at = Some(Migration::timestamp_without_nanos()); - } -} - -pub trait IsWatchMigration { - fn is_watch_migration(&self) -> bool; -} - -pub struct NewMigration { - pub name: String, - pub datamodel_string: String, - pub datamodel_steps: Vec, - pub database_migration: serde_json::Value, -} - -impl Migration { - pub fn new(params: NewMigration) -> Migration { - let NewMigration { - name, - datamodel_string, - datamodel_steps, - database_migration, - } = params; - - Migration { - name, - revision: 0, - status: MigrationStatus::Pending, - datamodel_string, - datamodel_steps, - applied: 0, - rolled_back: 0, - database_migration, - errors: Vec::new(), - started_at: Self::timestamp_without_nanos(), - finished_at: None, - } - } - - pub fn update_params(&self) -> MigrationUpdateParams { - MigrationUpdateParams { - name: self.name.clone(), - new_name: self.name.clone(), - revision: self.revision, - status: self.status, - applied: self.applied, - rolled_back: self.rolled_back, - errors: self.errors.clone(), - finished_at: self.finished_at, - } - } - - // SQLite does not store nano precision. Therefore we cut it so we can assert equality in our tests. - pub fn timestamp_without_nanos() -> DateTime { - let timestamp = Utc::now().timestamp_millis(); - let nsecs = ((timestamp % 1000) * 1_000_000) as u32; - let secs = (timestamp / 1000) as i64; - let naive = chrono::NaiveDateTime::from_timestamp(secs, nsecs); - let datetime: DateTime = DateTime::from_utc(naive, Utc); - datetime - } - - pub fn parse_datamodel(&self) -> Result { - datamodel::parse_datamodel_and_ignore_datasource_urls(&self.datamodel_string) - .map(|d| d.subject) - .map_err(|err| err.to_pretty_string("schema.prisma", &self.datamodel_string)) - } - - pub fn parse_schema_ast(&self) -> Result { - datamodel::parse_schema_ast(&self.datamodel_string) - .map_err(|err| err.to_pretty_string("schema.prisma", &self.datamodel_string)) - } -} - -impl IsWatchMigration for Migration { - fn is_watch_migration(&self) -> bool { - self.name.starts_with("watch") - } -} - -#[derive(Debug, Serialize, PartialEq, Clone, Copy)] -pub enum MigrationStatus { - Pending, - MigrationInProgress, - MigrationSuccess, - MigrationFailure, - RollingBack, - RollbackSuccess, - RollbackFailure, -} - -impl MigrationStatus { - pub fn code(&self) -> &str { - match self { - MigrationStatus::Pending => "Pending", - MigrationStatus::MigrationInProgress => "MigrationInProgress", - MigrationStatus::MigrationSuccess => "MigrationSuccess", - MigrationStatus::MigrationFailure => "MigrationFailure", - MigrationStatus::RollingBack => "RollingBack", - MigrationStatus::RollbackSuccess => "RollbackSuccess", - MigrationStatus::RollbackFailure => "RollbackFailure", - } - } - - pub fn is_success(&self) -> bool { - matches!(self, MigrationStatus::MigrationSuccess) - } - - pub fn is_pending(&self) -> bool { - matches!(self, MigrationStatus::Pending) - } -} - -impl FromStr for MigrationStatus { - type Err = String; - - fn from_str(s: &str) -> Result { - let status = match s { - "Pending" => MigrationStatus::Pending, - "MigrationInProgress" => MigrationStatus::MigrationInProgress, - "MigrationSuccess" => MigrationStatus::MigrationSuccess, - "MigrationFailure" => MigrationStatus::MigrationFailure, - "RollingBack" => MigrationStatus::RollingBack, - "RollbackSuccess" => MigrationStatus::RollbackSuccess, - "RollbackFailure" => MigrationStatus::RollbackFailure, - _ => return Err(format!("MigrationStatus {:?} is not known", s)), - }; - - Ok(status) - } -} - -/// A no-op implementor of [MigrationPersistence](trait.MigrationPersistence.html). -pub struct EmptyMigrationPersistence {} - -#[async_trait::async_trait] -impl MigrationPersistence for EmptyMigrationPersistence { - async fn init(&self) -> Result<(), ConnectorError> { - Ok(()) - } - - async fn reset(&self) -> Result<(), ConnectorError> { - Ok(()) - } - - async fn last_two_migrations(&self) -> ConnectorResult<(Option, Option)> { - Ok((None, None)) - } - - async fn by_name(&self, _name: &str) -> Result, ConnectorError> { - Ok(None) - } - - async fn load_all(&self) -> Result, ConnectorError> { - Ok(Vec::new()) - } - - async fn create(&self, _migration: Migration) -> Result { - unimplemented!("Not allowed on a EmptyMigrationPersistence") - } - - async fn update(&self, _params: &MigrationUpdateParams) -> Result<(), ConnectorError> { - unimplemented!("Not allowed on a EmptyMigrationPersistence") - } -} diff --git a/migration-engine/connectors/sql-migration-connector/src/lib.rs b/migration-engine/connectors/sql-migration-connector/src/lib.rs index 177d87cf1edf..969c9440c01c 100644 --- a/migration-engine/connectors/sql-migration-connector/src/lib.rs +++ b/migration-engine/connectors/sql-migration-connector/src/lib.rs @@ -12,13 +12,10 @@ mod sql_database_step_applier; mod sql_destructive_change_checker; mod sql_imperative_migration_persistence; mod sql_migration; -mod sql_migration_persistence; mod sql_renderer; mod sql_schema_calculator; mod sql_schema_differ; - pub use sql_migration::SqlMigration; -pub use sql_migration_persistence::MIGRATION_TABLE_NAME; use connection_wrapper::Connection; use datamodel::Datamodel; @@ -125,10 +122,6 @@ impl MigrationConnector for SqlMigrationConnector { self.flavour.check_database_version_compatibility(datamodel) } - fn migration_persistence(&self) -> &dyn MigrationPersistence { - self - } - fn database_migration_inferrer(&self) -> &dyn DatabaseMigrationInferrer { self } diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs b/migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs deleted file mode 100644 index b0b323ed4eb6..000000000000 --- a/migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs +++ /dev/null @@ -1,296 +0,0 @@ -use crate::{connection_wrapper::Connection, SqlMigrationConnector}; -use barrel::types; -use chrono::*; -use migration_connector::*; -use quaint::{ast::*, connector::ResultSet, prelude::SqlFamily}; -use std::convert::TryFrom; - -#[async_trait::async_trait] -impl MigrationPersistence for SqlMigrationConnector { - async fn init(&self) -> Result<(), ConnectorError> { - let schema_name = self.conn().connection_info().schema_name(); - - let sql_str = match self.flavour.sql_family() { - SqlFamily::Sqlite => { - let mut m = barrel::Migration::new().schema(schema_name); - m.create_table_if_not_exists(MIGRATION_TABLE_NAME, migration_table_setup_sqlite); - m.make_from(barrel::SqlVariant::Sqlite) - } - SqlFamily::Postgres => { - let mut m = barrel::Migration::new().schema(schema_name); - m.create_table(MIGRATION_TABLE_NAME, migration_table_setup_postgres); - m.schema(schema_name).make_from(barrel::SqlVariant::Pg) - } - SqlFamily::Mysql => { - let mut m = barrel::Migration::new().schema(schema_name); - m.create_table(MIGRATION_TABLE_NAME, migration_table_setup_mysql); - m.make_from(barrel::SqlVariant::Mysql) - } - SqlFamily::Mssql => { - let mut m = barrel::Migration::new().schema(schema_name); - m.create_table_if_not_exists(MIGRATION_TABLE_NAME, migration_table_setup_mssql); - m.make_from(barrel::SqlVariant::Mssql) - } - }; - - self.conn().raw_cmd(&sql_str).await.ok(); - - Ok(()) - } - - async fn reset(&self) -> ConnectorResult<()> { - use quaint::ast::Delete; - - self.conn() - .query(Delete::from_table(( - self.conn().connection_info().schema_name(), - MIGRATION_TABLE_NAME, - ))) - .await - .ok(); - - Ok(()) - } - - async fn last_two_migrations(&self) -> ConnectorResult<(Option, Option)> { - last_applied_migrations(self.conn(), self.table()).await - } - - async fn load_all(&self) -> ConnectorResult> { - let query = Select::from_table(self.table()).order_by(REVISION_COLUMN.ascend()); - let result_set = self.conn().query(query).await?; - - Ok(parse_rows_new(result_set)) - } - - async fn by_name(&self, name: &str) -> ConnectorResult> { - let conditions = NAME_COLUMN.equals(name); - let query = Select::from_table(self.table()) - .so_that(conditions) - .order_by(REVISION_COLUMN.descend()); - let result_set = self.conn().query(query).await?; - - Ok(parse_rows_new(result_set).into_iter().next()) - } - - async fn create(&self, migration: Migration) -> Result { - let mut cloned = migration.clone(); - let model_steps_json = serde_json::to_string(&migration.datamodel_steps).unwrap(); - let database_migration_json = serde_json::to_string(&migration.database_migration).unwrap(); - let errors_json = serde_json::to_string(&migration.errors).unwrap(); - - let insert = Insert::single_into(self.table()) - .value(DATAMODEL_COLUMN, migration.datamodel_string) - .value(NAME_COLUMN, migration.name) - .value(STATUS_COLUMN, migration.status.code()) - .value(APPLIED_COLUMN, migration.applied) - .value(ROLLED_BACK_COLUMN, migration.rolled_back) - .value(DATAMODEL_STEPS_COLUMN, model_steps_json) - .value(DATABASE_MIGRATION_COLUMN, database_migration_json) - .value(ERRORS_COLUMN, errors_json) - .value(STARTED_AT_COLUMN, self.convert_datetime(migration.started_at)) - .value(FINISHED_AT_COLUMN, Option::>::None); - - match self.flavour.sql_family() { - SqlFamily::Sqlite | SqlFamily::Mysql => { - let result_set = self.conn().query(insert).await.unwrap(); - let id = result_set.last_insert_id().unwrap(); - - cloned.revision = usize::try_from(id).unwrap(); - } - SqlFamily::Postgres | SqlFamily::Mssql => { - let returning_insert = Insert::from(insert).returning(&["revision"]); - let result_set = self.conn().query(returning_insert).await.unwrap(); - - if let Some(row) = result_set.into_iter().next() { - cloned.revision = row["revision"].as_i64().unwrap() as usize; - } - } - } - - Ok(cloned) - } - - async fn update(&self, params: &MigrationUpdateParams) -> Result<(), ConnectorError> { - let finished_at_value = match params.finished_at { - Some(x) => self.convert_datetime(x), - None => Value::from(Option::>::None), - }; - let errors_json = serde_json::to_string(¶ms.errors).unwrap(); - let query = Update::table(self.table()) - .set(NAME_COLUMN, params.new_name.clone()) - .set(STATUS_COLUMN, params.status.code()) - .set(APPLIED_COLUMN, params.applied) - .set(ROLLED_BACK_COLUMN, params.rolled_back) - .set(ERRORS_COLUMN, errors_json) - .set(FINISHED_AT_COLUMN, finished_at_value) - .so_that( - NAME_COLUMN - .equals(params.name.clone()) - .and(REVISION_COLUMN.equals(params.revision)), - ); - - self.conn().query(query).await?; - - Ok(()) - } -} - -/// Returns the last 2 applied migrations, or a shorter vec in absence of applied migrations. -async fn last_applied_migrations( - conn: &Connection, - table: Table<'_>, -) -> ConnectorResult<(Option, Option)> { - let conditions = STATUS_COLUMN.equals(MigrationStatus::MigrationSuccess.code()); - let query = Select::from_table(table) - .so_that(conditions) - .order_by(REVISION_COLUMN.descend()) - .limit(2); - - let result_set = conn.query(query).await?; - let mut rows = parse_rows_new(result_set).into_iter(); - let last = rows.next(); - let second_to_last = rows.next(); - Ok((last, second_to_last)) -} - -fn migration_table_setup_sqlite(t: &mut barrel::Table) { - migration_table_setup(t, types::text(), types::custom("DATETIME"), types::custom("TEXT")); -} - -fn migration_table_setup_postgres(t: &mut barrel::Table) { - migration_table_setup(t, types::text(), types::custom("timestamp(3)"), types::custom("TEXT")); -} - -fn migration_table_setup_mysql(t: &mut barrel::Table) { - migration_table_setup( - t, - types::text(), - types::custom("datetime(3)"), - types::custom("LONGTEXT"), - ); -} - -fn migration_table_setup_mssql(t: &mut barrel::Table) { - migration_table_setup( - t, - types::custom("nvarchar(max)"), - types::custom("datetime2"), - types::custom("nvarchar(max)"), - ); -} - -fn migration_table_setup( - t: &mut barrel::Table, - text_type: barrel::types::Type, - datetime_type: barrel::types::Type, - unlimited_text_type: barrel::types::Type, -) { - t.add_column(REVISION_COLUMN, types::primary()); - t.add_column(NAME_COLUMN, text_type.clone()); - t.add_column(DATAMODEL_COLUMN, unlimited_text_type.clone()); - t.add_column(STATUS_COLUMN, text_type); - t.add_column(APPLIED_COLUMN, types::integer()); - t.add_column(ROLLED_BACK_COLUMN, types::integer()); - t.add_column(DATAMODEL_STEPS_COLUMN, unlimited_text_type.clone()); - t.add_column(DATABASE_MIGRATION_COLUMN, unlimited_text_type.clone()); - t.add_column(ERRORS_COLUMN, unlimited_text_type); - t.add_column(STARTED_AT_COLUMN, datetime_type.clone()); - t.add_column(FINISHED_AT_COLUMN, datetime_type.nullable(true)); -} - -impl SqlMigrationConnector { - fn table(&self) -> Table<'_> { - match self.flavour.sql_family() { - SqlFamily::Sqlite => { - // sqlite case. Otherwise quaint produces invalid SQL - MIGRATION_TABLE_NAME.to_string().into() - } - _ => ( - self.conn().connection_info().schema_name().to_string(), - MIGRATION_TABLE_NAME.to_string(), - ) - .into(), - } - } - - fn convert_datetime(&self, datetime: DateTime) -> Value<'_> { - match self.flavour.sql_family() { - SqlFamily::Sqlite => Value::integer(datetime.timestamp_millis()), - SqlFamily::Postgres => Value::datetime(datetime), - SqlFamily::Mysql => Value::datetime(datetime), - SqlFamily::Mssql => Value::datetime(datetime), - } - } -} - -fn convert_parameterized_date_value(db_value: &Value<'_>) -> DateTime { - match db_value { - Value::Integer(Some(x)) => timestamp_to_datetime(*x), - Value::DateTime(Some(x)) => *x, - Value::Date(Some(date)) => DateTime::from_utc(date.and_hms(0, 0, 0), Utc), - x => unimplemented!("Got unsupported value {:?} in date conversion", x), - } -} - -fn timestamp_to_datetime(timestamp: i64) -> DateTime { - let nsecs = ((timestamp % 1000) * 1_000_000) as u32; - let secs = (timestamp / 1000) as i64; - let naive = chrono::NaiveDateTime::from_timestamp(secs, nsecs); - let datetime: DateTime = DateTime::from_utc(naive, Utc); - - datetime -} - -fn parse_rows_new(result_set: ResultSet) -> Vec { - result_set - .into_iter() - .map(|row| { - let datamodel_string: String = row[DATAMODEL_COLUMN].to_string().unwrap(); - let datamodel_steps_json: String = row[DATAMODEL_STEPS_COLUMN].to_string().unwrap(); - - let database_migration_string: String = row[DATABASE_MIGRATION_COLUMN].to_string().unwrap(); - let errors_json: String = row[ERRORS_COLUMN].to_string().unwrap(); - - let finished_at = match &row[FINISHED_AT_COLUMN] { - v if v.is_null() => None, - x => Some(convert_parameterized_date_value(x)), - }; - - let datamodel_steps = - serde_json::from_str(&datamodel_steps_json).expect("Error parsing the migration steps"); - - let database_migration_json = - serde_json::from_str(&database_migration_string).expect("Error parsing the database migration steps"); - let errors: Vec = serde_json::from_str(&errors_json).unwrap(); - - Migration { - name: row[NAME_COLUMN].to_string().unwrap(), - revision: row[REVISION_COLUMN].as_i64().unwrap() as usize, - datamodel_string, - status: row[STATUS_COLUMN].to_string().unwrap().parse().unwrap(), - applied: row[APPLIED_COLUMN].as_i64().unwrap() as usize, - rolled_back: row[ROLLED_BACK_COLUMN].as_i64().unwrap() as usize, - datamodel_steps, - database_migration: database_migration_json, - errors, - started_at: convert_parameterized_date_value(&row[STARTED_AT_COLUMN]), - finished_at, - } - }) - .collect() -} - -/// The name of the migrations table. -pub static MIGRATION_TABLE_NAME: &str = "_Migration"; -static NAME_COLUMN: &str = "name"; -static REVISION_COLUMN: &str = "revision"; -static DATAMODEL_COLUMN: &str = "datamodel"; -static STATUS_COLUMN: &str = "status"; -static APPLIED_COLUMN: &str = "applied"; -static ROLLED_BACK_COLUMN: &str = "rolled_back"; -static DATAMODEL_STEPS_COLUMN: &str = "datamodel_steps"; -static DATABASE_MIGRATION_COLUMN: &str = "database_migration"; -static ERRORS_COLUMN: &str = "errors"; -static STARTED_AT_COLUMN: &str = "started_at"; -static FINISHED_AT_COLUMN: &str = "finished_at"; diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs b/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs index e61de347667f..89449dc39b8b 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs @@ -13,7 +13,7 @@ use crate::{ self, AddColumn, AddForeignKey, AlterColumn, AlterEnum, AlterTable, CreateEnum, CreateIndex, CreateTable, DropColumn, DropEnum, DropForeignKey, DropIndex, DropTable, RedefineTable, SqlMigrationStep, TableChange, }, - SqlFlavour, SqlSchema, MIGRATION_TABLE_NAME, + SqlFlavour, SqlSchema, }; use column::ColumnTypeChange; use enums::EnumDiffer; @@ -496,9 +496,7 @@ impl<'schema> SqlSchemaDiffer<'schema> { } fn table_is_ignored(&self, table_name: &str) -> bool { - table_name == MIGRATION_TABLE_NAME - || table_name == "_prisma_migrations" - || self.flavour.table_should_be_ignored(&table_name) + table_name == "_prisma_migrations" || self.flavour.table_should_be_ignored(&table_name) } fn enum_pairs(&self) -> impl Iterator> { diff --git a/migration-engine/migration-engine-tests/src/sql/barrel_migration_executor.rs b/migration-engine/migration-engine-tests/src/sql/barrel_migration_executor.rs index 2954cb897646..d4e75ca9f785 100644 --- a/migration-engine/migration-engine-tests/src/sql/barrel_migration_executor.rs +++ b/migration-engine/migration-engine-tests/src/sql/barrel_migration_executor.rs @@ -1,6 +1,5 @@ use crate::sql::TestApi; use quaint::prelude::Queryable; -use sql_migration_connector::MIGRATION_TABLE_NAME; use sql_schema_describer::SqlSchema; pub struct BarrelMigrationExecutor<'a> { @@ -26,14 +25,7 @@ impl BarrelMigrationExecutor<'_> { let full_sql = migration.make_from(self.sql_variant); self.api.database().raw_cmd(&full_sql).await.unwrap(); - let mut result = self.api.describe_database().await.expect("Description failed"); - - // The presence of the _Migration table makes assertions harder. Therefore remove it. - result.tables = result - .tables - .into_iter() - .filter(|t| t.name != MIGRATION_TABLE_NAME) - .collect(); + let result = self.api.describe_database().await.expect("Description failed"); Ok(result) } diff --git a/migration-engine/migration-engine-tests/src/test_api.rs b/migration-engine/migration-engine-tests/src/test_api.rs index 8cd64c26c8b6..85a80c97e3e7 100644 --- a/migration-engine/migration-engine-tests/src/test_api.rs +++ b/migration-engine/migration-engine-tests/src/test_api.rs @@ -24,7 +24,7 @@ use super::{ }; use crate::{connectors::Tags, test_api::list_migration_directories::ListMigrationDirectories, AssertionResult}; use enumflags2::BitFlags; -use migration_connector::{ImperativeMigrationsPersistence, MigrationConnector, MigrationPersistence, MigrationRecord}; +use migration_connector::{ImperativeMigrationsPersistence, MigrationRecord}; use migration_core::{ api::{GenericApi, MigrationApi}, commands::ApplyScriptInput, @@ -33,7 +33,7 @@ use quaint::{ prelude::{ConnectionInfo, Queryable, SqlFamily}, single::Quaint, }; -use sql_migration_connector::{SqlMigration, SqlMigrationConnector, MIGRATION_TABLE_NAME}; +use sql_migration_connector::{SqlMigration, SqlMigrationConnector}; use sql_schema_describer::*; use tempfile::TempDir; use test_setup::*; @@ -71,14 +71,6 @@ impl TestApi { self.tags.contains(Tags::Mariadb) } - pub async fn migration_persistence(&self) -> &dyn MigrationPersistence { - let persistence = self.api.connector().migration_persistence(); - - persistence.init().await.unwrap(); - - persistence - } - pub fn imperative_migration_persistence<'a>(&'a self) -> &(dyn ImperativeMigrationsPersistence + 'a) { self.api.connector() } @@ -186,22 +178,7 @@ impl TestApi { } pub async fn describe_database(&self) -> Result { - let mut result = self.api.connector().describe_schema().await?; - - // the presence of the _Migration table makes assertions harder. Therefore remove it from the result. - result.tables = result - .tables - .into_iter() - .filter(|t| t.name != MIGRATION_TABLE_NAME) - .collect(); - - // Also the sequences of the _Migration table - result.sequences = result - .sequences - .into_iter() - .filter(|seq| !seq.name.contains("_Migration")) - .collect(); - + let result = self.api.connector().describe_schema().await?; Ok(result) } diff --git a/migration-engine/migration-engine-tests/tests/migration_persistence/mod.rs b/migration-engine/migration-engine-tests/tests/migration_persistence/mod.rs deleted file mode 100644 index 2fdffdf2ed49..000000000000 --- a/migration-engine/migration-engine-tests/tests/migration_persistence/mod.rs +++ /dev/null @@ -1,172 +0,0 @@ -mod imperative_migration_persistence_tests; - -use migration_connector::{steps::CreateEnum, *}; -use migration_engine_tests::*; -use pretty_assertions::assert_eq; -use quaint::prelude::SqlFamily; - -fn empty_migration(name: String) -> Migration { - Migration { - name, - revision: 0, - status: MigrationStatus::Pending, - datamodel_string: String::new(), - datamodel_steps: Vec::new(), - applied: 0, - rolled_back: 0, - database_migration: serde_json::json!({}), - errors: Vec::new(), - started_at: Migration::timestamp_without_nanos(), - finished_at: None, - } -} - -#[test_each_connector] -async fn last_should_return_none_if_there_is_no_migration(api: &TestApi) { - let persistence = api.migration_persistence().await; - let result = persistence.last().await.unwrap(); - assert_eq!(result.is_some(), false); -} - -#[test_each_connector] -async fn last_must_return_none_if_there_is_no_successful_migration(api: &TestApi) -> TestResult { - let persistence = api.migration_persistence().await; - persistence.create(empty_migration("my_migration".to_string())).await?; - let loaded = persistence.last().await?; - assert_eq!(loaded, None); - - Ok(()) -} - -#[test_each_connector] -async fn load_all_should_return_empty_if_there_is_no_migration(api: &TestApi) { - let persistence = api.migration_persistence().await; - let result = persistence.load_all().await.unwrap(); - assert_eq!(result.is_empty(), true); -} - -#[test_each_connector] -async fn load_all_must_return_all_created_migrations(api: &TestApi) { - let persistence = api.migration_persistence().await; - let migration1 = persistence - .create(empty_migration("migration_1".to_string())) - .await - .unwrap(); - let migration2 = persistence - .create(empty_migration("migration_2".to_string())) - .await - .unwrap(); - let migration3 = persistence - .create(empty_migration("migration_3".to_string())) - .await - .unwrap(); - - let mut result = persistence.load_all().await.unwrap(); - if matches!(api.sql_family(), SqlFamily::Mysql | SqlFamily::Sqlite) { - // TODO: mysql currently loses milli seconds on loading, and sqlite is - // the wrong column type - result[0].started_at = migration1.started_at; - result[1].started_at = migration2.started_at; - result[2].started_at = migration3.started_at; - } - assert_eq!(result, vec![migration1, migration2, migration3]) -} - -#[test_each_connector] -async fn create_should_allow_to_create_a_new_migration(api: &TestApi) { - let dm = r#" - model Test { - id String @id @default(cuid()) - } - "#; - - let persistence = api.migration_persistence().await; - let mut migration = empty_migration("my_migration".to_string()); - migration.status = MigrationStatus::MigrationSuccess; - migration.datamodel_string = dm.to_owned(); - migration.datamodel_steps = vec![MigrationStep::CreateEnum(CreateEnum { - r#enum: "MyEnum".to_string(), - values: vec!["A".to_string(), "B".to_string()], - })]; - migration.errors = vec!["error1".to_string(), "error2".to_string()]; - - let result = persistence.create(migration.clone()).await.unwrap(); - migration.revision = result.revision; // copy over the generated revision so that the assertion can work.` - - assert_eq!(result, migration); - let mut loaded = persistence.last().await.unwrap().unwrap(); - - if matches!(api.sql_family(), SqlFamily::Mysql | SqlFamily::Sqlite) { - // TODO: mysql currently loses milli seconds on loading, and sqlite is - // the wrong column type - loaded.started_at = migration.started_at; - } - - assert_eq!(loaded, migration); -} - -#[test_each_connector] -async fn create_should_increment_revisions(api: &TestApi) { - let persistence = api.migration_persistence().await; - let migration1 = persistence - .create(empty_migration("migration_1".to_string())) - .await - .unwrap(); - let migration2 = persistence - .create(empty_migration("migration_2".to_string())) - .await - .unwrap(); - assert_eq!(migration1.revision + 1, migration2.revision); -} - -#[test_each_connector] -async fn update_must_work(api: &TestApi) { - let persistence = api.migration_persistence().await; - let migration = persistence - .create(empty_migration("my_migration".to_string())) - .await - .unwrap(); - - let mut params = migration.update_params(); - params.status = MigrationStatus::MigrationSuccess; - params.applied = 10; - params.rolled_back = 11; - params.errors = vec!["err1".to_string(), "err2".to_string()]; - params.finished_at = Some(Migration::timestamp_without_nanos()); - params.new_name = "my_new_migration_name".to_string(); - - persistence.update(¶ms).await.unwrap(); - - let loaded = persistence.last().await.unwrap().unwrap(); - assert_eq!(loaded.status, params.status); - assert_eq!(loaded.applied, params.applied); - assert_eq!(loaded.rolled_back, params.rolled_back); - assert_eq!(loaded.errors, params.errors); - if !matches!(api.sql_family(), SqlFamily::Mysql | SqlFamily::Sqlite) { - // TODO: mysql currently loses milli seconds on loading, and sqlite is - // the wrong column type - assert_eq!(loaded.finished_at, params.finished_at); - } - assert_eq!(loaded.name, params.new_name); -} - -#[test_each_connector] -async fn migration_is_already_applied_must_work(api: &TestApi) -> TestResult { - let persistence = api.migration_persistence().await; - - let mut migration_1 = empty_migration("migration_1".to_string()); - migration_1.status = MigrationStatus::MigrationSuccess; - - persistence.create(migration_1).await?; - - let mut migration_2 = empty_migration("migration_2".to_string()); - migration_2.status = MigrationStatus::MigrationFailure; - - persistence.create(migration_2).await?; - - assert!(persistence.migration_is_already_applied("migration_1").await?); - assert!(!persistence.migration_is_already_applied("migration_2").await?); - assert!(!persistence.migration_is_already_applied("another_migration").await?); - - Ok(()) -} diff --git a/migration-engine/migration-engine-tests/tests/migration_tests.rs b/migration-engine/migration-engine-tests/tests/migration_tests.rs index e3187d9b6a86..7fe4187126d8 100644 --- a/migration-engine/migration-engine-tests/tests/migration_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migration_tests.rs @@ -7,7 +7,6 @@ mod existing_data; mod existing_databases; mod initialization; mod list_migration_directories; -mod migration_persistence; mod migrations; mod reset; mod schema_push; @@ -2435,7 +2434,7 @@ async fn switching_databases_must_work(api: &TestApi) -> TestResult { api.schema_push(dm1).send().await?.assert_green()?; // Drop the existing migrations. - api.migration_persistence().await.reset().await?; + api.reset().send().await?; let dm2 = r#" datasource db { From a8838ec24338a841354c4f8e5ad2afa786b9ec07 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Sun, 20 Dec 2020 19:48:11 +0100 Subject: [PATCH 10/29] start removing migrationsteps serialization --- .../migration-connector/src/steps.rs | 4 +- .../migration-connector/tests/steps_tests.rs | 784 +++++++++--------- .../core/rpc_examples/applyMigration.json | 179 ---- .../core/rpc_examples/dmmfToDml.json | 9 - .../rpc_examples/inferMigrationSteps.json | 11 - .../core/rpc_examples/listMigrations.json | 1 - .../core/rpc_examples/migrationProgress.json | 9 - migration-engine/core/src/commands.rs | 16 - migration-engine/core/src/lib.rs | 2 +- .../src/command_helpers.rs | 21 - .../migration-engine-tests/src/lib.rs | 2 - .../migration-engine-tests/src/sql.rs | 1 - 12 files changed, 395 insertions(+), 644 deletions(-) delete mode 100644 migration-engine/core/rpc_examples/applyMigration.json delete mode 100644 migration-engine/core/rpc_examples/dmmfToDml.json delete mode 100644 migration-engine/core/rpc_examples/inferMigrationSteps.json delete mode 100644 migration-engine/core/rpc_examples/listMigrations.json delete mode 100644 migration-engine/core/rpc_examples/migrationProgress.json delete mode 100644 migration-engine/migration-engine-tests/src/command_helpers.rs diff --git a/migration-engine/connectors/migration-connector/src/steps.rs b/migration-engine/connectors/migration-connector/src/steps.rs index 43807a0cd9fb..e206cfa613d1 100644 --- a/migration-engine/connectors/migration-connector/src/steps.rs +++ b/migration-engine/connectors/migration-connector/src/steps.rs @@ -4,8 +4,8 @@ use datamodel::ast; use serde::{Deserialize, Serialize}; /// An atomic change to a [Datamodel AST](datamodel/ast/struct.Datamodel.html). -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(tag = "tag", deny_unknown_fields)] +// #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +// #[serde(tag = "tag", deny_unknown_fields)] pub enum MigrationStep { CreateModel(CreateModel), UpdateModel(UpdateModel), diff --git a/migration-engine/connectors/migration-connector/tests/steps_tests.rs b/migration-engine/connectors/migration-connector/tests/steps_tests.rs index 9e3ce175ea35..5614b7cf4348 100644 --- a/migration-engine/connectors/migration-connector/tests/steps_tests.rs +++ b/migration-engine/connectors/migration-connector/tests/steps_tests.rs @@ -1,392 +1,392 @@ -#![allow(non_snake_case)] - -use migration_connector::steps::*; - -#[test] -fn full_CreateModel_must_work() { - let json = r#"{"tag":"CreateModel","model":"Blog"}"#; - let expected_struct = MigrationStep::CreateModel(CreateModel { - model: "Blog".to_string(), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn minimal_UpdateModel_must_work() { - let json = r#"{"tag":"UpdateModel","model":"Blog"}"#; - let expected_struct = MigrationStep::UpdateModel(UpdateModel { - model: "Blog".to_string(), - new_name: None, - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn full_UpdateModel_must_work() { - let json = r#"{"tag":"UpdateModel","model":"Blog","newName":"MyBlog"}"#; - let expected_struct = MigrationStep::UpdateModel(UpdateModel { - model: "Blog".to_string(), - new_name: Some("MyBlog".to_string()), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn DeleteModel_must_work() { - let json = r#"{"tag":"DeleteModel","model":"Blog"}"#; - let expected_struct = MigrationStep::DeleteModel(DeleteModel { - model: "Blog".to_string(), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn minimal_CreateField_must_work() { - let json = r#" - { - "tag":"CreateField", - "model":"Blog", - "field":"title", - "type":"String", - "arity":"Required" - } - "#; - let expected_struct = MigrationStep::CreateField(CreateField { - model: "Blog".to_string(), - field: "title".to_string(), - tpe: "String".to_owned(), - arity: FieldArity::Required, - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn full_CreateField_must_work() { - let json = r#"{ - "tag":"CreateField", - "model": "Blog", - "field": "title", - "type": "String", - "arity": "Optional" - }"#; - let expected_struct = MigrationStep::CreateField(CreateField { - model: "Blog".to_string(), - field: "title".to_string(), - tpe: "String".to_owned(), - arity: FieldArity::Optional, - }); - - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn minimal_UpdateField_must_work() { - let json = r#"{"tag":"UpdateField","model":"Blog","field":"title"}"#; - let expected_struct = MigrationStep::UpdateField(UpdateField { - model: "Blog".to_string(), - field: "title".to_string(), - new_name: None, - tpe: None, - arity: None, - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn full_UpdateField_must_work() { - let json = r#" - { - "tag": "UpdateField", - "model": "Blog", - "field": "title", - "newName": "MyBlog", - "type": "String", - "arity": "Optional" - } - "#; - let expected_struct = MigrationStep::UpdateField(UpdateField { - model: "Blog".to_string(), - field: "title".to_string(), - new_name: Some("MyBlog".to_string()), - tpe: Some("String".to_owned()), - arity: Some(FieldArity::Optional), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn DeleteField_must_work() { - let json = r#"{"tag":"DeleteField","model":"Blog","field":"title"}"#; - let expected_struct = MigrationStep::DeleteField(DeleteField { - model: "Blog".to_string(), - field: "title".to_string(), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn CreateEnum_must_work() { - let json = r#" - { - "tag": "CreateEnum", - "enum": "BlogCategory", - "values": ["Politics","Tech"] - } - "#; - let expected_struct = MigrationStep::CreateEnum(CreateEnum { - r#enum: "BlogCategory".to_string(), - values: vec!["Politics".to_string(), "Tech".to_string()], - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn minimal_UpdateEnum_must_work() { - let json = r#" - { - "tag": "UpdateEnum", - "enum": "BlogCategory" - } - "#; - let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { - r#enum: "BlogCategory".to_string(), - new_name: None, - created_values: vec![], - deleted_values: vec![], - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn full_Update_Enum_must_work() { - let json = r#" - { - "tag": "UpdateEnum", - "enum": "BlogCategory", - "newName": "MyBlogCategory", - "createdValues": ["Tech"], - "deletedValues": ["Nology"] - } - "#; - let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { - r#enum: "BlogCategory".to_string(), - new_name: Some("MyBlogCategory".to_string()), - created_values: vec!["Tech".to_string()], - deleted_values: vec!["Nology".to_string()], - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn DeleteEnum_must_work() { - let json = r#"{"tag":"DeleteEnum","enum":"BlogCategory"}"#; - let expected_struct = MigrationStep::DeleteEnum(DeleteEnum { - r#enum: "BlogCategory".to_string(), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn CreateDirective_must_work() { - let json = r#" - { - "tag": "CreateDirective", - "location": { - "directive": "map", - "path": { - "tag": "Model", - "model": "Blog" - } - } - } - "#; - - let expected_step = MigrationStep::CreateDirective(CreateDirective { - location: DirectiveLocation { - path: DirectivePath::Model { - model: "Blog".to_owned(), - arguments: None, - }, - directive: "map".to_owned(), - }, - }); - - assert_symmetric_serde(json, expected_step); -} - -#[test] -fn minimal_DeleteDirective_must_work() { - let json = r#" - { - "tag": "DeleteDirective", - "location": { - "path": { - "tag": "Field", - "model": "Blog", - "field": "title" - }, - "directive": "map" - } - } - "#; - - let expected_step = MigrationStep::DeleteDirective(DeleteDirective { - location: DirectiveLocation { - path: DirectivePath::Field { - model: "Blog".to_owned(), - field: "title".to_owned(), - }, - directive: "map".to_owned(), - }, - }); - - assert_symmetric_serde(json, expected_step); -} - -#[test] -fn full_DeleteDirective_must_work() { - let json = r#" - { - "tag": "DeleteDirective", - "location": { - "path": { - "tag": "Model", - "model": "Blog", - "arguments": [ - { - "name": "", - "value": "[name, age]" - } - ] - }, - "directive": "unique" - } - } - "#; - - let expected_step = MigrationStep::DeleteDirective(DeleteDirective { - location: DirectiveLocation { - path: DirectivePath::Model { - model: "Blog".to_owned(), - arguments: Some(vec![Argument { - name: "".to_owned(), - value: MigrationExpression("[name, age]".to_owned()), - }]), - }, - directive: "unique".to_owned(), - }, - }); - - assert_symmetric_serde(json, expected_step); -} - -#[test] -fn UpdateArgument_must_work() { - let json = r#" - { - "tag": "UpdateArgument", - "location": { - "tag": "Directive", - "path": { - "tag": "Model", - "model": "CatMood" - }, - "directive": "map" - }, - "argument": "name", - "newValue": "cat_mood" - } - "#; - - let expected_step = MigrationStep::UpdateArgument(UpdateArgument { - location: ArgumentLocation::Directive(DirectiveLocation { - path: DirectivePath::Model { - model: "CatMood".to_owned(), - arguments: None, - }, - directive: "map".to_owned(), - }), - argument: "name".to_owned(), - new_value: MigrationExpression("cat_mood".to_owned()), - }); - - assert_symmetric_serde(json, expected_step); -} - -#[test] -fn CreateArgument_must_work() { - let json = r#" - { - "tag": "CreateArgument", - "location": { - "tag": "Directive", - "directive": "map", - "path": { - "enum": "CatMood", - "tag": "Enum" - } - }, - "argument": "name", - "value": "cat_mood" - } - "#; - - let expected_step = MigrationStep::CreateArgument(CreateArgument { - location: ArgumentLocation::Directive(DirectiveLocation { - path: DirectivePath::Enum { - r#enum: "CatMood".to_owned(), - }, - directive: "map".to_owned(), - }), - argument: "name".to_owned(), - value: MigrationExpression("cat_mood".to_owned()), - }); - - println!("{}", serde_json::to_value(&expected_step).unwrap()); - - assert_symmetric_serde(json, expected_step); -} - -#[test] -fn DeleteArgument_must_work() { - let json = r#" - { - "tag": "DeleteArgument", - "location": { - "tag": "Directive", - "path": { - "tag":"Enum", - "enum": "CatMood" - }, - "directive":"map" - }, - "argument": "name" - } - "#; - - let expected_step = MigrationStep::DeleteArgument(DeleteArgument { - location: ArgumentLocation::Directive(DirectiveLocation { - path: DirectivePath::Enum { - r#enum: "CatMood".to_owned(), - }, - directive: "map".to_owned(), - }), - argument: "name".to_owned(), - }); - - assert_symmetric_serde(json, expected_step); -} - -fn assert_symmetric_serde(json: &str, expected: MigrationStep) { - let serde_value: serde_json::Value = serde_json::from_str(&json).expect("The provided input was invalid json."); - let deserialized: MigrationStep = serde_json::from_str(&json).expect("Deserialization failed."); - let serialized_again = serde_json::to_value(&deserialized).expect("Serialization failed"); - assert_eq!( - deserialized, expected, - "The provided json could not be deserialized into the expected struct." - ); - assert_eq!( - serialized_again, serde_value, - "Reserializing did not produce the original json input." - ); -} +// #![allow(non_snake_case)] +// +// use migration_connector::steps::*; +// +// #[test] +// fn full_CreateModel_must_work() { +// let json = r#"{"tag":"CreateModel","model":"Blog"}"#; +// let expected_struct = MigrationStep::CreateModel(CreateModel { +// model: "Blog".to_string(), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn minimal_UpdateModel_must_work() { +// let json = r#"{"tag":"UpdateModel","model":"Blog"}"#; +// let expected_struct = MigrationStep::UpdateModel(UpdateModel { +// model: "Blog".to_string(), +// new_name: None, +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn full_UpdateModel_must_work() { +// let json = r#"{"tag":"UpdateModel","model":"Blog","newName":"MyBlog"}"#; +// let expected_struct = MigrationStep::UpdateModel(UpdateModel { +// model: "Blog".to_string(), +// new_name: Some("MyBlog".to_string()), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn DeleteModel_must_work() { +// let json = r#"{"tag":"DeleteModel","model":"Blog"}"#; +// let expected_struct = MigrationStep::DeleteModel(DeleteModel { +// model: "Blog".to_string(), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn minimal_CreateField_must_work() { +// let json = r#" +// { +// "tag":"CreateField", +// "model":"Blog", +// "field":"title", +// "type":"String", +// "arity":"Required" +// } +// "#; +// let expected_struct = MigrationStep::CreateField(CreateField { +// model: "Blog".to_string(), +// field: "title".to_string(), +// tpe: "String".to_owned(), +// arity: FieldArity::Required, +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn full_CreateField_must_work() { +// let json = r#"{ +// "tag":"CreateField", +// "model": "Blog", +// "field": "title", +// "type": "String", +// "arity": "Optional" +// }"#; +// let expected_struct = MigrationStep::CreateField(CreateField { +// model: "Blog".to_string(), +// field: "title".to_string(), +// tpe: "String".to_owned(), +// arity: FieldArity::Optional, +// }); +// +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn minimal_UpdateField_must_work() { +// let json = r#"{"tag":"UpdateField","model":"Blog","field":"title"}"#; +// let expected_struct = MigrationStep::UpdateField(UpdateField { +// model: "Blog".to_string(), +// field: "title".to_string(), +// new_name: None, +// tpe: None, +// arity: None, +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn full_UpdateField_must_work() { +// let json = r#" +// { +// "tag": "UpdateField", +// "model": "Blog", +// "field": "title", +// "newName": "MyBlog", +// "type": "String", +// "arity": "Optional" +// } +// "#; +// let expected_struct = MigrationStep::UpdateField(UpdateField { +// model: "Blog".to_string(), +// field: "title".to_string(), +// new_name: Some("MyBlog".to_string()), +// tpe: Some("String".to_owned()), +// arity: Some(FieldArity::Optional), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn DeleteField_must_work() { +// let json = r#"{"tag":"DeleteField","model":"Blog","field":"title"}"#; +// let expected_struct = MigrationStep::DeleteField(DeleteField { +// model: "Blog".to_string(), +// field: "title".to_string(), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn CreateEnum_must_work() { +// let json = r#" +// { +// "tag": "CreateEnum", +// "enum": "BlogCategory", +// "values": ["Politics","Tech"] +// } +// "#; +// let expected_struct = MigrationStep::CreateEnum(CreateEnum { +// r#enum: "BlogCategory".to_string(), +// values: vec!["Politics".to_string(), "Tech".to_string()], +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn minimal_UpdateEnum_must_work() { +// let json = r#" +// { +// "tag": "UpdateEnum", +// "enum": "BlogCategory" +// } +// "#; +// let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { +// r#enum: "BlogCategory".to_string(), +// new_name: None, +// created_values: vec![], +// deleted_values: vec![], +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn full_Update_Enum_must_work() { +// let json = r#" +// { +// "tag": "UpdateEnum", +// "enum": "BlogCategory", +// "newName": "MyBlogCategory", +// "createdValues": ["Tech"], +// "deletedValues": ["Nology"] +// } +// "#; +// let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { +// r#enum: "BlogCategory".to_string(), +// new_name: Some("MyBlogCategory".to_string()), +// created_values: vec!["Tech".to_string()], +// deleted_values: vec!["Nology".to_string()], +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn DeleteEnum_must_work() { +// let json = r#"{"tag":"DeleteEnum","enum":"BlogCategory"}"#; +// let expected_struct = MigrationStep::DeleteEnum(DeleteEnum { +// r#enum: "BlogCategory".to_string(), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn CreateDirective_must_work() { +// let json = r#" +// { +// "tag": "CreateDirective", +// "location": { +// "directive": "map", +// "path": { +// "tag": "Model", +// "model": "Blog" +// } +// } +// } +// "#; +// +// let expected_step = MigrationStep::CreateDirective(CreateDirective { +// location: DirectiveLocation { +// path: DirectivePath::Model { +// model: "Blog".to_owned(), +// arguments: None, +// }, +// directive: "map".to_owned(), +// }, +// }); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// #[test] +// fn minimal_DeleteDirective_must_work() { +// let json = r#" +// { +// "tag": "DeleteDirective", +// "location": { +// "path": { +// "tag": "Field", +// "model": "Blog", +// "field": "title" +// }, +// "directive": "map" +// } +// } +// "#; +// +// let expected_step = MigrationStep::DeleteDirective(DeleteDirective { +// location: DirectiveLocation { +// path: DirectivePath::Field { +// model: "Blog".to_owned(), +// field: "title".to_owned(), +// }, +// directive: "map".to_owned(), +// }, +// }); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// #[test] +// fn full_DeleteDirective_must_work() { +// let json = r#" +// { +// "tag": "DeleteDirective", +// "location": { +// "path": { +// "tag": "Model", +// "model": "Blog", +// "arguments": [ +// { +// "name": "", +// "value": "[name, age]" +// } +// ] +// }, +// "directive": "unique" +// } +// } +// "#; +// +// let expected_step = MigrationStep::DeleteDirective(DeleteDirective { +// location: DirectiveLocation { +// path: DirectivePath::Model { +// model: "Blog".to_owned(), +// arguments: Some(vec![Argument { +// name: "".to_owned(), +// value: MigrationExpression("[name, age]".to_owned()), +// }]), +// }, +// directive: "unique".to_owned(), +// }, +// }); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// #[test] +// fn UpdateArgument_must_work() { +// let json = r#" +// { +// "tag": "UpdateArgument", +// "location": { +// "tag": "Directive", +// "path": { +// "tag": "Model", +// "model": "CatMood" +// }, +// "directive": "map" +// }, +// "argument": "name", +// "newValue": "cat_mood" +// } +// "#; +// +// let expected_step = MigrationStep::UpdateArgument(UpdateArgument { +// location: ArgumentLocation::Directive(DirectiveLocation { +// path: DirectivePath::Model { +// model: "CatMood".to_owned(), +// arguments: None, +// }, +// directive: "map".to_owned(), +// }), +// argument: "name".to_owned(), +// new_value: MigrationExpression("cat_mood".to_owned()), +// }); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// #[test] +// fn CreateArgument_must_work() { +// let json = r#" +// { +// "tag": "CreateArgument", +// "location": { +// "tag": "Directive", +// "directive": "map", +// "path": { +// "enum": "CatMood", +// "tag": "Enum" +// } +// }, +// "argument": "name", +// "value": "cat_mood" +// } +// "#; +// +// let expected_step = MigrationStep::CreateArgument(CreateArgument { +// location: ArgumentLocation::Directive(DirectiveLocation { +// path: DirectivePath::Enum { +// r#enum: "CatMood".to_owned(), +// }, +// directive: "map".to_owned(), +// }), +// argument: "name".to_owned(), +// value: MigrationExpression("cat_mood".to_owned()), +// }); +// +// println!("{}", serde_json::to_value(&expected_step).unwrap()); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// #[test] +// fn DeleteArgument_must_work() { +// let json = r#" +// { +// "tag": "DeleteArgument", +// "location": { +// "tag": "Directive", +// "path": { +// "tag":"Enum", +// "enum": "CatMood" +// }, +// "directive":"map" +// }, +// "argument": "name" +// } +// "#; +// +// let expected_step = MigrationStep::DeleteArgument(DeleteArgument { +// location: ArgumentLocation::Directive(DirectiveLocation { +// path: DirectivePath::Enum { +// r#enum: "CatMood".to_owned(), +// }, +// directive: "map".to_owned(), +// }), +// argument: "name".to_owned(), +// }); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// fn assert_symmetric_serde(json: &str, expected: MigrationStep) { +// let serde_value: serde_json::Value = serde_json::from_str(&json).expect("The provided input was invalid json."); +// let deserialized: MigrationStep = serde_json::from_str(&json).expect("Deserialization failed."); +// let serialized_again = serde_json::to_value(&deserialized).expect("Serialization failed"); +// assert_eq!( +// deserialized, expected, +// "The provided json could not be deserialized into the expected struct." +// ); +// assert_eq!( +// serialized_again, serde_value, +// "Reserializing did not produce the original json input." +// ); +// } diff --git a/migration-engine/core/rpc_examples/applyMigration.json b/migration-engine/core/rpc_examples/applyMigration.json deleted file mode 100644 index 289ece0507fa..000000000000 --- a/migration-engine/core/rpc_examples/applyMigration.json +++ /dev/null @@ -1,179 +0,0 @@ -{ - "id": 1, - "jsonrpc": "2.0", - "method": "applyMigration", - "params": { - "projectInfo": "the-project-id", - "migrationId": "", - "steps": [ - { - "stepType": "CreateModel", - "name": "Blog", - "embedded": false - }, - { - "stepType": "CreateModel", - "name": "Author", - "embedded": false - }, - { - "stepType": "CreateModel", - "name": "Post", - "embedded": false - }, - { - "stepType": "CreateField", - "model": "Blog", - "name": "id", - "type": { - "Base": "Int" - }, - "arity": "required", - "isUnique": false, - "id": { - "strategy": "Auto", - "sequence": null - } - }, - { - "stepType": "CreateField", - "model": "Blog", - "name": "name", - "type": { - "Base": "String" - }, - "arity": "required", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Blog", - "name": "viewCount", - "type": { - "Base": "Int" - }, - "arity": "required", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Blog", - "name": "posts", - "type": { - "Relation": { - "to": "Post", - "to_field": null, - "name": null, - "on_delete": "None" - } - }, - "arity": "list", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Blog", - "name": "authors", - "type": { - "Relation": { - "to": "Author", - "to_field": null, - "name": null, - "on_delete": "None" - } - }, - "arity": "list", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Author", - "name": "id", - "type": { - "Base": "Int" - }, - "arity": "required", - "isUnique": false, - "id": { - "strategy": "Auto", - "sequence": null - } - }, - { - "stepType": "CreateField", - "model": "Author", - "name": "name", - "type": { - "Base": "String" - }, - "arity": "optional", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Author", - "name": "authors", - "type": { - "Relation": { - "to": "Blog", - "to_field": null, - "name": null, - "on_delete": "None" - } - }, - "arity": "list", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Post", - "name": "id", - "type": { - "Base": "Int" - }, - "arity": "required", - "isUnique": false, - "id": { - "strategy": "Auto", - "sequence": null - } - }, - { - "stepType": "CreateField", - "model": "Post", - "name": "title", - "type": { - "Base": "String" - }, - "arity": "required", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Post", - "name": "tags", - "type": { - "Base": "String" - }, - "arity": "list", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Post", - "name": "blog", - "type": { - "Relation": { - "to": "Blog", - "to_field": null, - "name": null, - "on_delete": "None" - } - }, - "arity": "required", - "isUnique": false - } - ], - "force": false - } -} \ No newline at end of file diff --git a/migration-engine/core/rpc_examples/dmmfToDml.json b/migration-engine/core/rpc_examples/dmmfToDml.json deleted file mode 100644 index e50e39c5a10d..000000000000 --- a/migration-engine/core/rpc_examples/dmmfToDml.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "id": 1, - "jsonrpc": "2.0", - "method": "convertDmmfToDml", - "params": { - "projectInfo": "the-project-id", - "dmmf": "yada yada" - } -} \ No newline at end of file diff --git a/migration-engine/core/rpc_examples/inferMigrationSteps.json b/migration-engine/core/rpc_examples/inferMigrationSteps.json deleted file mode 100644 index 766c85ca3f82..000000000000 --- a/migration-engine/core/rpc_examples/inferMigrationSteps.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "id": 1, - "jsonrpc": "2.0", - "method": "inferMigrationSteps", - "params": { - "projectInfo": "the-project-id", - "migrationId": "the-migration_id", - "assumeToBeApplied": [], - "dataModel": "model Blog {\n id Int @id\n name String\n viewCount Int\n posts Post[]\n authors Author[]\n}\n\nmodel Author {\n id Int @id\n name String?\n authors Blog[]\n}\n\nmodel Post {\n id Int @id\n title String\n tags String[]\n blog Blog\n}" - } -} diff --git a/migration-engine/core/rpc_examples/listMigrations.json b/migration-engine/core/rpc_examples/listMigrations.json deleted file mode 100644 index 5ed7e96b58eb..000000000000 --- a/migration-engine/core/rpc_examples/listMigrations.json +++ /dev/null @@ -1 +0,0 @@ -{"id": 1, "jsonrpc": "2.0", "method": "listMigrations", "params": {"projectInfo": "the-project-id"}} diff --git a/migration-engine/core/rpc_examples/migrationProgress.json b/migration-engine/core/rpc_examples/migrationProgress.json deleted file mode 100644 index 4799f4a5d697..000000000000 --- a/migration-engine/core/rpc_examples/migrationProgress.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "id": 1, - "jsonrpc": "2.0", - "method": "migrationProgress", - "params": { - "projectInfo": "the-project-id", - "migrationId": "the-migration-id" - } -} \ No newline at end of file diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 8aa15b5888f0..9f03c479430d 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -37,19 +37,3 @@ pub use mark_migration_rolled_back::{ pub use plan_migration::{PlanMigrationCommand, PlanMigrationInput, PlanMigrationOutput}; pub use reset::ResetCommand; pub use schema_push::{SchemaPushCommand, SchemaPushInput, SchemaPushOutput}; - -use migration_connector::{MigrationStep, MigrationWarning, PrettyDatabaseMigrationStep, UnexecutableMigration}; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[allow(missing_docs)] -pub struct MigrationStepsResultOutput { - pub datamodel: String, - pub datamodel_steps: Vec, - pub database_steps: Vec, - pub warnings: Vec, - pub errors: [(); 0], - pub general_errors: [(); 0], - pub unexecutable_migrations: Vec, -} diff --git a/migration-engine/core/src/lib.rs b/migration-engine/core/src/lib.rs index 0902a30b78ce..79eedb13c6cd 100644 --- a/migration-engine/core/src/lib.rs +++ b/migration-engine/core/src/lib.rs @@ -15,7 +15,7 @@ mod gate_keeper; use anyhow::anyhow; pub use api::GenericApi; -pub use commands::{MigrationStepsResultOutput, SchemaPushInput}; +pub use commands::SchemaPushInput; pub use core_error::{CoreError, CoreResult}; pub use gate_keeper::GateKeeper; diff --git a/migration-engine/migration-engine-tests/src/command_helpers.rs b/migration-engine/migration-engine-tests/src/command_helpers.rs deleted file mode 100644 index 8c98250139de..000000000000 --- a/migration-engine/migration-engine-tests/src/command_helpers.rs +++ /dev/null @@ -1,21 +0,0 @@ -use migration_core::commands::*; -use sql_schema_describer::*; - -#[derive(Debug)] -pub struct InferAndApplyOutput { - pub sql_schema: SqlSchema, - pub migration_output: MigrationStepsResultOutput, -} - -pub trait MigrationStepsResultOutputExt { - fn describe_steps(&self) -> Vec<&String>; -} - -impl MigrationStepsResultOutputExt for MigrationStepsResultOutput { - fn describe_steps(&self) -> Vec<&String> { - self.database_steps - .iter() - .map(|step| step.step.as_object().unwrap().keys().next().unwrap()) - .collect() - } -} diff --git a/migration-engine/migration-engine-tests/src/lib.rs b/migration-engine/migration-engine-tests/src/lib.rs index eb158c20210a..1d4f51046586 100644 --- a/migration-engine/migration-engine-tests/src/lib.rs +++ b/migration-engine/migration-engine-tests/src/lib.rs @@ -2,14 +2,12 @@ #![deny(unsafe_code)] mod assertions; -mod command_helpers; mod misc_helpers; pub mod sql; mod step_helpers; mod test_api; pub use assertions::*; -pub use command_helpers::*; pub use misc_helpers::*; pub use step_helpers::*; pub use test_api::*; diff --git a/migration-engine/migration-engine-tests/src/sql.rs b/migration-engine/migration-engine-tests/src/sql.rs index 2adfe58047db..075455c9f8c8 100644 --- a/migration-engine/migration-engine-tests/src/sql.rs +++ b/migration-engine/migration-engine-tests/src/sql.rs @@ -3,7 +3,6 @@ pub(crate) mod barrel_migration_executor; mod quaint_result_set_ext; pub use super::assertions::*; -pub use super::command_helpers::*; pub use super::misc_helpers::*; pub use super::step_helpers::*; pub use super::test_api::*; From d0b0c1a2c5c05801a22f9827daabe04e673a60b7 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Sun, 20 Dec 2020 20:05:35 +0100 Subject: [PATCH 11/29] remove steps serialization --- .../migration-connector/src/steps.rs | 144 ++----- .../migration-connector/tests/steps_tests.rs | 392 ------------------ 2 files changed, 27 insertions(+), 509 deletions(-) delete mode 100644 migration-engine/connectors/migration-connector/tests/steps_tests.rs diff --git a/migration-engine/connectors/migration-connector/src/steps.rs b/migration-engine/connectors/migration-connector/src/steps.rs index e206cfa613d1..97a8cee75d40 100644 --- a/migration-engine/connectors/migration-connector/src/steps.rs +++ b/migration-engine/connectors/migration-connector/src/steps.rs @@ -1,11 +1,9 @@ //! Datamodel migration steps. use datamodel::ast; -use serde::{Deserialize, Serialize}; /// An atomic change to a [Datamodel AST](datamodel/ast/struct.Datamodel.html). -// #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -// #[serde(tag = "tag", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub enum MigrationStep { CreateModel(CreateModel), UpdateModel(UpdateModel), @@ -28,18 +26,14 @@ pub enum MigrationStep { DeleteSource(DeleteSource), } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Hash, Eq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq, Hash, Eq)] pub struct CreateModel { pub model: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Hash, Eq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq, Hash, Eq)] pub struct UpdateModel { pub model: String, - - #[serde(skip_serializing_if = "Option::is_none")] pub new_name: Option, } @@ -49,39 +43,25 @@ impl UpdateModel { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteModel { pub model: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateField { pub model: String, - pub field: String, - - #[serde(rename = "type")] pub tpe: String, - pub arity: FieldArity, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct UpdateField { pub model: String, - pub field: String, - - #[serde(skip_serializing_if = "Option::is_none")] pub new_name: Option, - - #[serde(rename = "type", skip_serializing_if = "Option::is_none")] pub tpe: Option, - - #[serde(skip_serializing_if = "Option::is_none")] pub arity: Option, } @@ -91,32 +71,23 @@ impl UpdateField { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteField { pub model: String, pub field: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateEnum { pub r#enum: String, pub values: Vec, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct UpdateEnum { pub r#enum: String, - - #[serde(skip_serializing_if = "Option::is_none")] pub new_name: Option, - - #[serde(skip_serializing_if = "Vec::is_empty", default = "Vec::new")] pub created_values: Vec, - - #[serde(skip_serializing_if = "Vec::is_empty", default = "Vec::new")] pub deleted_values: Vec, } @@ -126,26 +97,22 @@ impl UpdateEnum { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteEnum { pub r#enum: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateDirective { pub location: DirectiveLocation, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteDirective { pub location: DirectiveLocation, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct Argument { pub name: String, pub value: MigrationExpression, @@ -176,15 +143,13 @@ impl Into for &Argument { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(tag = "tag", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub enum ArgumentLocation { Directive(DirectiveLocation), Source(SourceLocation), } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DirectiveLocation { pub path: DirectivePath, pub directive: String, @@ -219,8 +184,7 @@ impl DirectiveLocation { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct SourceLocation { pub source: String, } @@ -231,8 +195,7 @@ impl SourceLocation { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(tag = "tag", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub enum DirectivePath { Field { model: String, @@ -240,7 +203,6 @@ pub enum DirectivePath { }, Model { model: String, - #[serde(skip_serializing_if = "Option::is_none")] arguments: Option>, }, Enum { @@ -251,7 +213,6 @@ pub enum DirectivePath { value: String, }, TypeAlias { - #[serde(rename = "typeAlias")] type_alias: String, }, } @@ -275,30 +236,27 @@ impl DirectivePath { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateArgument { pub location: ArgumentLocation, pub argument: String, pub value: MigrationExpression, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteArgument { pub location: ArgumentLocation, pub argument: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct UpdateArgument { pub location: ArgumentLocation, pub argument: String, pub new_value: MigrationExpression, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq)] pub struct MigrationExpression(pub String); impl MigrationExpression { @@ -311,21 +269,16 @@ impl MigrationExpression { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateTypeAlias { pub type_alias: String, - pub r#type: String, pub arity: FieldArity, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct UpdateTypeAlias { pub type_alias: String, - - #[serde(skip_serializing_if = "Option::is_none")] pub r#type: Option, } @@ -335,26 +288,22 @@ impl UpdateTypeAlias { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteTypeAlias { pub type_alias: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateSource { pub source: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteSource { pub source: String, } -#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] +#[derive(Debug, Copy, Clone, PartialEq)] pub enum FieldArity { Required, Optional, @@ -392,42 +341,3 @@ impl Into for &FieldArity { } } } - -#[cfg(test)] -mod tests { - use super::*; - use serde_json::json; - - #[test] - fn directive_location_serialization_gives_expected_json_shape() { - let create_directive = CreateDirective { - location: DirectiveLocation { - path: DirectivePath::Field { - model: "Cat".to_owned(), - field: "owner".to_owned(), - }, - directive: "status".to_owned(), - }, - }; - - let serialized_step = serde_json::to_value(&create_directive).unwrap(); - - let expected_json = json!({ - "location": { - "path": { - "tag": "Field", - "model": "Cat", - "field": "owner", - }, - "directive": "status" - } - }); - - println!("{}\n{}", serialized_step, expected_json); - - assert_eq!(serialized_step, expected_json); - - let deserialized_step: CreateDirective = serde_json::from_value(expected_json).unwrap(); - assert_eq!(create_directive, deserialized_step); - } -} diff --git a/migration-engine/connectors/migration-connector/tests/steps_tests.rs b/migration-engine/connectors/migration-connector/tests/steps_tests.rs deleted file mode 100644 index 5614b7cf4348..000000000000 --- a/migration-engine/connectors/migration-connector/tests/steps_tests.rs +++ /dev/null @@ -1,392 +0,0 @@ -// #![allow(non_snake_case)] -// -// use migration_connector::steps::*; -// -// #[test] -// fn full_CreateModel_must_work() { -// let json = r#"{"tag":"CreateModel","model":"Blog"}"#; -// let expected_struct = MigrationStep::CreateModel(CreateModel { -// model: "Blog".to_string(), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn minimal_UpdateModel_must_work() { -// let json = r#"{"tag":"UpdateModel","model":"Blog"}"#; -// let expected_struct = MigrationStep::UpdateModel(UpdateModel { -// model: "Blog".to_string(), -// new_name: None, -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn full_UpdateModel_must_work() { -// let json = r#"{"tag":"UpdateModel","model":"Blog","newName":"MyBlog"}"#; -// let expected_struct = MigrationStep::UpdateModel(UpdateModel { -// model: "Blog".to_string(), -// new_name: Some("MyBlog".to_string()), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn DeleteModel_must_work() { -// let json = r#"{"tag":"DeleteModel","model":"Blog"}"#; -// let expected_struct = MigrationStep::DeleteModel(DeleteModel { -// model: "Blog".to_string(), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn minimal_CreateField_must_work() { -// let json = r#" -// { -// "tag":"CreateField", -// "model":"Blog", -// "field":"title", -// "type":"String", -// "arity":"Required" -// } -// "#; -// let expected_struct = MigrationStep::CreateField(CreateField { -// model: "Blog".to_string(), -// field: "title".to_string(), -// tpe: "String".to_owned(), -// arity: FieldArity::Required, -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn full_CreateField_must_work() { -// let json = r#"{ -// "tag":"CreateField", -// "model": "Blog", -// "field": "title", -// "type": "String", -// "arity": "Optional" -// }"#; -// let expected_struct = MigrationStep::CreateField(CreateField { -// model: "Blog".to_string(), -// field: "title".to_string(), -// tpe: "String".to_owned(), -// arity: FieldArity::Optional, -// }); -// -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn minimal_UpdateField_must_work() { -// let json = r#"{"tag":"UpdateField","model":"Blog","field":"title"}"#; -// let expected_struct = MigrationStep::UpdateField(UpdateField { -// model: "Blog".to_string(), -// field: "title".to_string(), -// new_name: None, -// tpe: None, -// arity: None, -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn full_UpdateField_must_work() { -// let json = r#" -// { -// "tag": "UpdateField", -// "model": "Blog", -// "field": "title", -// "newName": "MyBlog", -// "type": "String", -// "arity": "Optional" -// } -// "#; -// let expected_struct = MigrationStep::UpdateField(UpdateField { -// model: "Blog".to_string(), -// field: "title".to_string(), -// new_name: Some("MyBlog".to_string()), -// tpe: Some("String".to_owned()), -// arity: Some(FieldArity::Optional), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn DeleteField_must_work() { -// let json = r#"{"tag":"DeleteField","model":"Blog","field":"title"}"#; -// let expected_struct = MigrationStep::DeleteField(DeleteField { -// model: "Blog".to_string(), -// field: "title".to_string(), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn CreateEnum_must_work() { -// let json = r#" -// { -// "tag": "CreateEnum", -// "enum": "BlogCategory", -// "values": ["Politics","Tech"] -// } -// "#; -// let expected_struct = MigrationStep::CreateEnum(CreateEnum { -// r#enum: "BlogCategory".to_string(), -// values: vec!["Politics".to_string(), "Tech".to_string()], -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn minimal_UpdateEnum_must_work() { -// let json = r#" -// { -// "tag": "UpdateEnum", -// "enum": "BlogCategory" -// } -// "#; -// let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { -// r#enum: "BlogCategory".to_string(), -// new_name: None, -// created_values: vec![], -// deleted_values: vec![], -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn full_Update_Enum_must_work() { -// let json = r#" -// { -// "tag": "UpdateEnum", -// "enum": "BlogCategory", -// "newName": "MyBlogCategory", -// "createdValues": ["Tech"], -// "deletedValues": ["Nology"] -// } -// "#; -// let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { -// r#enum: "BlogCategory".to_string(), -// new_name: Some("MyBlogCategory".to_string()), -// created_values: vec!["Tech".to_string()], -// deleted_values: vec!["Nology".to_string()], -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn DeleteEnum_must_work() { -// let json = r#"{"tag":"DeleteEnum","enum":"BlogCategory"}"#; -// let expected_struct = MigrationStep::DeleteEnum(DeleteEnum { -// r#enum: "BlogCategory".to_string(), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn CreateDirective_must_work() { -// let json = r#" -// { -// "tag": "CreateDirective", -// "location": { -// "directive": "map", -// "path": { -// "tag": "Model", -// "model": "Blog" -// } -// } -// } -// "#; -// -// let expected_step = MigrationStep::CreateDirective(CreateDirective { -// location: DirectiveLocation { -// path: DirectivePath::Model { -// model: "Blog".to_owned(), -// arguments: None, -// }, -// directive: "map".to_owned(), -// }, -// }); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// #[test] -// fn minimal_DeleteDirective_must_work() { -// let json = r#" -// { -// "tag": "DeleteDirective", -// "location": { -// "path": { -// "tag": "Field", -// "model": "Blog", -// "field": "title" -// }, -// "directive": "map" -// } -// } -// "#; -// -// let expected_step = MigrationStep::DeleteDirective(DeleteDirective { -// location: DirectiveLocation { -// path: DirectivePath::Field { -// model: "Blog".to_owned(), -// field: "title".to_owned(), -// }, -// directive: "map".to_owned(), -// }, -// }); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// #[test] -// fn full_DeleteDirective_must_work() { -// let json = r#" -// { -// "tag": "DeleteDirective", -// "location": { -// "path": { -// "tag": "Model", -// "model": "Blog", -// "arguments": [ -// { -// "name": "", -// "value": "[name, age]" -// } -// ] -// }, -// "directive": "unique" -// } -// } -// "#; -// -// let expected_step = MigrationStep::DeleteDirective(DeleteDirective { -// location: DirectiveLocation { -// path: DirectivePath::Model { -// model: "Blog".to_owned(), -// arguments: Some(vec![Argument { -// name: "".to_owned(), -// value: MigrationExpression("[name, age]".to_owned()), -// }]), -// }, -// directive: "unique".to_owned(), -// }, -// }); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// #[test] -// fn UpdateArgument_must_work() { -// let json = r#" -// { -// "tag": "UpdateArgument", -// "location": { -// "tag": "Directive", -// "path": { -// "tag": "Model", -// "model": "CatMood" -// }, -// "directive": "map" -// }, -// "argument": "name", -// "newValue": "cat_mood" -// } -// "#; -// -// let expected_step = MigrationStep::UpdateArgument(UpdateArgument { -// location: ArgumentLocation::Directive(DirectiveLocation { -// path: DirectivePath::Model { -// model: "CatMood".to_owned(), -// arguments: None, -// }, -// directive: "map".to_owned(), -// }), -// argument: "name".to_owned(), -// new_value: MigrationExpression("cat_mood".to_owned()), -// }); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// #[test] -// fn CreateArgument_must_work() { -// let json = r#" -// { -// "tag": "CreateArgument", -// "location": { -// "tag": "Directive", -// "directive": "map", -// "path": { -// "enum": "CatMood", -// "tag": "Enum" -// } -// }, -// "argument": "name", -// "value": "cat_mood" -// } -// "#; -// -// let expected_step = MigrationStep::CreateArgument(CreateArgument { -// location: ArgumentLocation::Directive(DirectiveLocation { -// path: DirectivePath::Enum { -// r#enum: "CatMood".to_owned(), -// }, -// directive: "map".to_owned(), -// }), -// argument: "name".to_owned(), -// value: MigrationExpression("cat_mood".to_owned()), -// }); -// -// println!("{}", serde_json::to_value(&expected_step).unwrap()); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// #[test] -// fn DeleteArgument_must_work() { -// let json = r#" -// { -// "tag": "DeleteArgument", -// "location": { -// "tag": "Directive", -// "path": { -// "tag":"Enum", -// "enum": "CatMood" -// }, -// "directive":"map" -// }, -// "argument": "name" -// } -// "#; -// -// let expected_step = MigrationStep::DeleteArgument(DeleteArgument { -// location: ArgumentLocation::Directive(DirectiveLocation { -// path: DirectivePath::Enum { -// r#enum: "CatMood".to_owned(), -// }, -// directive: "map".to_owned(), -// }), -// argument: "name".to_owned(), -// }); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// fn assert_symmetric_serde(json: &str, expected: MigrationStep) { -// let serde_value: serde_json::Value = serde_json::from_str(&json).expect("The provided input was invalid json."); -// let deserialized: MigrationStep = serde_json::from_str(&json).expect("Deserialization failed."); -// let serialized_again = serde_json::to_value(&deserialized).expect("Serialization failed"); -// assert_eq!( -// deserialized, expected, -// "The provided json could not be deserialized into the expected struct." -// ); -// assert_eq!( -// serialized_again, serde_value, -// "Reserializing did not produce the original json input." -// ); -// } From e2ab7907179abef7d6b0aa4cf17ab6f92f94a3f5 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Mon, 21 Dec 2020 09:13:02 +0100 Subject: [PATCH 12/29] remove partialeq --- .../migration-connector/src/steps.rs | 50 +++++++++---------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/migration-engine/connectors/migration-connector/src/steps.rs b/migration-engine/connectors/migration-connector/src/steps.rs index 97a8cee75d40..2c98206782b2 100644 --- a/migration-engine/connectors/migration-connector/src/steps.rs +++ b/migration-engine/connectors/migration-connector/src/steps.rs @@ -3,7 +3,7 @@ use datamodel::ast; /// An atomic change to a [Datamodel AST](datamodel/ast/struct.Datamodel.html). -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub enum MigrationStep { CreateModel(CreateModel), UpdateModel(UpdateModel), @@ -26,12 +26,12 @@ pub enum MigrationStep { DeleteSource(DeleteSource), } -#[derive(Debug, Clone, PartialEq, Hash, Eq)] +#[derive(Debug, Clone)] pub struct CreateModel { pub model: String, } -#[derive(Debug, Clone, PartialEq, Hash, Eq)] +#[derive(Debug, Clone)] pub struct UpdateModel { pub model: String, pub new_name: Option, @@ -43,12 +43,12 @@ impl UpdateModel { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteModel { pub model: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateField { pub model: String, pub field: String, @@ -56,7 +56,7 @@ pub struct CreateField { pub arity: FieldArity, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct UpdateField { pub model: String, pub field: String, @@ -71,19 +71,19 @@ impl UpdateField { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteField { pub model: String, pub field: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateEnum { pub r#enum: String, pub values: Vec, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct UpdateEnum { pub r#enum: String, pub new_name: Option, @@ -97,17 +97,17 @@ impl UpdateEnum { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteEnum { pub r#enum: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateDirective { pub location: DirectiveLocation, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteDirective { pub location: DirectiveLocation, } @@ -143,13 +143,13 @@ impl Into for &Argument { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub enum ArgumentLocation { Directive(DirectiveLocation), Source(SourceLocation), } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DirectiveLocation { pub path: DirectivePath, pub directive: String, @@ -184,7 +184,7 @@ impl DirectiveLocation { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct SourceLocation { pub source: String, } @@ -195,7 +195,7 @@ impl SourceLocation { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub enum DirectivePath { Field { model: String, @@ -236,20 +236,20 @@ impl DirectivePath { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateArgument { pub location: ArgumentLocation, pub argument: String, pub value: MigrationExpression, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteArgument { pub location: ArgumentLocation, pub argument: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct UpdateArgument { pub location: ArgumentLocation, pub argument: String, @@ -269,14 +269,14 @@ impl MigrationExpression { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateTypeAlias { pub type_alias: String, pub r#type: String, pub arity: FieldArity, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct UpdateTypeAlias { pub type_alias: String, pub r#type: Option, @@ -288,22 +288,22 @@ impl UpdateTypeAlias { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteTypeAlias { pub type_alias: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateSource { pub source: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteSource { pub source: String, } -#[derive(Debug, Copy, Clone, PartialEq)] +#[derive(Debug, Copy, Clone)] pub enum FieldArity { Required, Optional, From 1a6f6264e8be4edb30615e718059f8919bebbf5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= Date: Mon, 21 Dec 2020 09:57:32 +0100 Subject: [PATCH 13/29] Ignore postgis tables when checking for initialized schemas ...in migration persistence. --- .../sql_imperative_migration_persistence.rs | 6 +++- .../tests/apply_migrations/mod.rs | 34 +++++++++++++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_imperative_migration_persistence.rs b/migration-engine/connectors/sql-migration-connector/src/sql_imperative_migration_persistence.rs index ebac68b60949..18424f081833 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_imperative_migration_persistence.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_imperative_migration_persistence.rs @@ -24,7 +24,11 @@ impl ImperativeMigrationsPersistence for SqlMigrationConnector { return Ok(()); } - if !schema.is_empty() { + if !schema.is_empty() + && schema + .table_walkers() + .any(|t| !self.flavour().table_should_be_ignored(t.name())) + { return Err(ConnectorError::user_facing_error( user_facing_errors::migration_engine::DatabaseSchemaNotEmpty { database_name: self.connection.connection_info().database_location(), diff --git a/migration-engine/migration-engine-tests/tests/apply_migrations/mod.rs b/migration-engine/migration-engine-tests/tests/apply_migrations/mod.rs index b5253664ad3b..c97a94a59e07 100644 --- a/migration-engine/migration-engine-tests/tests/apply_migrations/mod.rs +++ b/migration-engine/migration-engine-tests/tests/apply_migrations/mod.rs @@ -205,3 +205,37 @@ async fn migrations_should_fail_on_an_uninitialized_nonempty_database(api: &Test Ok(()) } + +// Reference for the tables created by PostGIS: https://postgis.net/docs/manual-1.4/ch04.html#id418599 +#[test_each_connector(tags("postgres"))] +async fn migrations_should_succeed_on_an_uninitialized_nonempty_database_with_postgis_tables( + api: &TestApi, +) -> TestResult { + let dm = r#" + model Cat { + id Int @id + name String + } + "#; + + let create_spatial_ref_sys_table = "CREATE TABLE IF NOT EXISTS \"spatial_ref_sys\" ( id SERIAL PRIMARY KEY )"; + // The capitalized Geometry is intentional here, because we want the matching to be case-insensitive. + let create_geometry_columns_table = "CREATE TABLE IF NOT EXiSTS \"Geometry_columns\" ( id SERIAL PRIMARY KEY )"; + + api.database().raw_cmd(create_spatial_ref_sys_table).await?; + api.database().raw_cmd(create_geometry_columns_table).await?; + + let directory = api.create_migrations_directory()?; + + api.create_migration("01-init", dm, &directory) + .send() + .await? + .assert_migration_directories_count(1)?; + + api.apply_migrations(&directory) + .send() + .await? + .assert_applied_migrations(&["01-init"])?; + + Ok(()) +} From 82e51ced39c3171b8cb74ccee31bd2be836f583e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= Date: Mon, 21 Dec 2020 10:51:46 +0100 Subject: [PATCH 14/29] Fix sqlite test --- migration-engine/core/src/commands/reset.rs | 2 ++ .../migration-engine-tests/tests/migration_tests.rs | 9 +++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/migration-engine/core/src/commands/reset.rs b/migration-engine/core/src/commands/reset.rs index 3ace246a1714..70f973b781d5 100644 --- a/migration-engine/core/src/commands/reset.rs +++ b/migration-engine/core/src/commands/reset.rs @@ -15,6 +15,8 @@ impl<'a> MigrationCommand for ResetCommand { C: MigrationConnector, D: DatabaseMigrationMarker + 'static, { + tracing::debug!("Resetting the database."); + engine.connector().reset().await?; Ok(()) diff --git a/migration-engine/migration-engine-tests/tests/migration_tests.rs b/migration-engine/migration-engine-tests/tests/migration_tests.rs index 7fe4187126d8..7eeb2eef2693 100644 --- a/migration-engine/migration-engine-tests/tests/migration_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migration_tests.rs @@ -2433,9 +2433,6 @@ async fn switching_databases_must_work(api: &TestApi) -> TestResult { api.schema_push(dm1).send().await?.assert_green()?; - // Drop the existing migrations. - api.reset().send().await?; - let dm2 = r#" datasource db { provider = "sqlite" @@ -2448,7 +2445,11 @@ async fn switching_databases_must_work(api: &TestApi) -> TestResult { } "#; - api.schema_push(dm2).send().await?.assert_green()?; + api.schema_push(dm2) + .migration_id(Some("mig2")) + .send() + .await? + .assert_green()?; Ok(()) } From 6a44439e2e03ff4eca9a571f55600db88a6a7933 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= Date: Wed, 23 Dec 2020 10:08:52 +0100 Subject: [PATCH 15/29] Update windows CI toolchain to 1.48.0 The hope is that the random LLVM failures will stop, https://github.com/rust-lang/rust/issues/72470 being fixed. --- .github/workflows/build-windows.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml index 339870363f66..716c1caa4773 100644 --- a/.github/workflows/build-windows.yml +++ b/.github/workflows/build-windows.yml @@ -23,7 +23,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: - toolchain: 1.46.0 + toolchain: 1.48.0 default: true - uses: actions/cache@v2 From ab41692cafce3c1ba5fd0b53d07d4a770b28676d Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:16:02 +0100 Subject: [PATCH 16/29] remove apply migration --- migration-engine/core/src/api.rs | 10 - migration-engine/core/src/api/rpc.rs | 4 - migration-engine/core/src/commands.rs | 2 - .../core/src/commands/apply_migration.rs | 205 ------------------ migration-engine/core/src/lib.rs | 2 +- .../migration-engine-tests/src/test_api.rs | 27 +-- 6 files changed, 3 insertions(+), 247 deletions(-) delete mode 100644 migration-engine/core/src/commands/apply_migration.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index 8f6887e5e614..175cd29accfa 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -41,7 +41,6 @@ where #[async_trait::async_trait] pub trait GenericApi: Send + Sync + 'static { async fn version(&self, input: &serde_json::Value) -> CoreResult; - async fn apply_migration(&self, input: &ApplyMigrationInput) -> CoreResult; async fn apply_migrations(&self, input: &ApplyMigrationsInput) -> CoreResult; async fn apply_script(&self, input: &ApplyScriptInput) -> CoreResult; async fn calculate_database_steps( @@ -88,15 +87,6 @@ where .await } - async fn apply_migration(&self, input: &ApplyMigrationInput) -> CoreResult { - self.handle_command::>(input) - .instrument(tracing::info_span!( - "ApplyMigration", - migration_id = input.migration_id.as_str() - )) - .await - } - async fn apply_migrations(&self, input: &ApplyMigrationsInput) -> CoreResult { self.handle_command::(input) .instrument(tracing::info_span!("ApplyMigrations")) diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 0e264d275a15..06af057c6b1a 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -14,7 +14,6 @@ pub struct RpcApi { #[derive(Debug, Clone, Copy, PartialEq)] enum RpcCommand { - ApplyMigration, ApplyMigrations, ApplyScript, CalculateDatabaseSteps, @@ -40,7 +39,6 @@ enum RpcCommand { impl RpcCommand { fn name(&self) -> &'static str { match self { - RpcCommand::ApplyMigration => "applyMigration", RpcCommand::ApplyMigrations => "applyMigrations", RpcCommand::ApplyScript => "applyScript", RpcCommand::CalculateDatabaseSteps => "calculateDatabaseSteps", @@ -66,7 +64,6 @@ impl RpcCommand { } const AVAILABLE_COMMANDS: &[RpcCommand] = &[ - RpcCommand::ApplyMigration, RpcCommand::ApplyMigrations, RpcCommand::ApplyScript, RpcCommand::CalculateDatabaseSteps, @@ -141,7 +138,6 @@ impl RpcApi { RpcCommand::ApplyMigrations => render(executor.apply_migrations(¶ms.parse()?).await?), RpcCommand::CreateMigration => render(executor.create_migration(¶ms.parse()?).await?), RpcCommand::DebugPanic => render(executor.debug_panic(&()).await?), - RpcCommand::ApplyMigration => render(executor.apply_migration(¶ms.parse()?).await?), RpcCommand::CalculateDatabaseSteps => render(executor.calculate_database_steps(¶ms.parse()?).await?), RpcCommand::CalculateDatamodel => render(executor.calculate_datamodel(¶ms.parse()?).await?), RpcCommand::DiagnoseMigrationHistory => { diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 9f5def98ea73..af5827bbf6c4 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -4,7 +4,6 @@ //! module. #[allow(missing_docs)] -mod apply_migration; mod apply_migrations; mod apply_script; #[allow(missing_docs)] @@ -33,7 +32,6 @@ mod schema_push; #[allow(missing_docs)] mod unapply_migration; -pub use apply_migration::*; pub use apply_migrations::{ApplyMigrationsCommand, ApplyMigrationsInput, ApplyMigrationsOutput}; pub use apply_script::{ApplyScriptCommand, ApplyScriptInput, ApplyScriptOutput}; pub use calculate_database_steps::*; diff --git a/migration-engine/core/src/commands/apply_migration.rs b/migration-engine/core/src/commands/apply_migration.rs deleted file mode 100644 index 71ab17f8a7fc..000000000000 --- a/migration-engine/core/src/commands/apply_migration.rs +++ /dev/null @@ -1,205 +0,0 @@ -use super::MigrationStepsResultOutput; -use crate::{commands::command::*, CoreError}; -use crate::{migration_engine::MigrationEngine, CoreResult}; -use datamodel::{ast::SchemaAst, Datamodel}; -use migration_connector::*; -use serde::Deserialize; - -pub struct ApplyMigrationCommand<'a> { - input: &'a ApplyMigrationInput, -} - -#[async_trait::async_trait] -impl<'a> MigrationCommand for ApplyMigrationCommand<'a> { - type Input = ApplyMigrationInput; - type Output = MigrationStepsResultOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let cmd = ApplyMigrationCommand { input }; - tracing::debug!("{:?}", cmd.input); - - let connector = engine.connector(); - let migration_persistence = connector.migration_persistence(); - migration_persistence.init().await?; - - match migration_persistence.last().await? { - Some(ref last_migration) if last_migration.is_watch_migration() && !cmd.input.is_watch_migration() => { - cmd.handle_transition_out_of_watch_mode(&engine).await - } - _ => cmd.handle_normal_migration(&engine).await, - } - } -} - -impl<'a> ApplyMigrationCommand<'a> { - async fn handle_transition_out_of_watch_mode( - &self, - engine: &MigrationEngine, - ) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let connector = engine.connector(); - - let migration_persistence = connector.migration_persistence(); - - let last_migration = migration_persistence.last().await?; - let current_datamodel = last_migration - .map(|migration| migration.parse_datamodel()) - .unwrap_or_else(|| Ok(Datamodel::new())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - - let last_non_watch_datamodel = migration_persistence - .last_non_watch_migration() - .await? - .map(|m| m.parse_schema_ast()) - .unwrap_or_else(|| Ok(SchemaAst::empty())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - - let next_datamodel_ast = engine - .datamodel_calculator() - .infer(&last_non_watch_datamodel, self.input.steps.as_slice())?; - - self.handle_migration(&engine, current_datamodel, next_datamodel_ast) - .await - } - - async fn handle_normal_migration( - &self, - engine: &MigrationEngine, - ) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let connector = engine.connector(); - let migration_persistence = connector.migration_persistence(); - - if migration_persistence - .migration_is_already_applied(&self.input.migration_id) - .await? - { - return Err(CoreError::Input(anyhow::anyhow!( - "Invariant violation: the migration with id `{migration_id}` has already been applied.", - migration_id = self.input.migration_id - ))); - } - - let last_migration = migration_persistence.last().await?; - let current_datamodel_ast = last_migration - .as_ref() - .map(|migration| migration.parse_schema_ast()) - .unwrap_or_else(|| Ok(SchemaAst::empty())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - let current_datamodel = last_migration - .map(|migration| migration.parse_datamodel()) - .unwrap_or_else(|| Ok(Datamodel::new())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - - let next_datamodel_ast = engine - .datamodel_calculator() - .infer(¤t_datamodel_ast, self.input.steps.as_slice())?; - - self.handle_migration(&engine, current_datamodel, next_datamodel_ast) - .await - } - - async fn handle_migration( - &self, - engine: &MigrationEngine, - current_datamodel: Datamodel, - next_schema_ast: SchemaAst, - ) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let connector = engine.connector(); - let next_datamodel = - datamodel::lift_ast_to_datamodel(&next_schema_ast).map_err(CoreError::ProducedBadDatamodel)?; - let migration_persistence = connector.migration_persistence(); - - let database_migration = connector - .database_migration_inferrer() - .infer(¤t_datamodel, &next_datamodel.subject, &self.input.steps) - .await?; - - let database_steps_json_pretty = connector - .database_migration_step_applier() - .render_steps_pretty(&database_migration)?; - - tracing::trace!(?database_steps_json_pretty); - - let database_migration_json = serde_json::Value::Null; - - let migration = Migration::new(NewMigration { - name: self.input.migration_id.clone(), - datamodel_steps: self.input.steps.clone(), - datamodel_string: datamodel::render_schema_ast_to_string(&next_schema_ast), - database_migration: database_migration_json, - }); - - let diagnostics = connector - .destructive_change_checker() - .check(&database_migration) - .await?; - - match ( - !diagnostics.unexecutable_migrations.is_empty(), - diagnostics.has_warnings(), - self.input.force.unwrap_or(false), - ) { - (true, _, _) => { - tracing::info!("There are unexecutable migration steps, the migration will not be applied.") - } - // We have no warnings, or the force flag is passed. - (_, false, _) | (_, true, true) => { - tracing::debug!("Applying the migration"); - let saved_migration = migration_persistence.create(migration).await?; - - connector - .migration_applier() - .apply(&saved_migration, &database_migration) - .await?; - - tracing::debug!("Migration applied"); - } - // We have warnings, but no force flag was passed. - (_, true, false) => tracing::info!("The force flag was not passed, the migration will not be applied."), - } - - let DestructiveChangeDiagnostics { - warnings, - unexecutable_migrations, - } = diagnostics; - - Ok(MigrationStepsResultOutput { - datamodel: datamodel::render_datamodel_to_string(&next_datamodel.subject), - datamodel_steps: self.input.steps.clone(), - database_steps: database_steps_json_pretty, - errors: [], - warnings, - general_errors: [], - unexecutable_migrations, - }) - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ApplyMigrationInput { - pub migration_id: String, - pub steps: Vec, - pub force: Option, -} - -impl IsWatchMigration for ApplyMigrationInput { - fn is_watch_migration(&self) -> bool { - self.migration_id.starts_with("watch") - } -} diff --git a/migration-engine/core/src/lib.rs b/migration-engine/core/src/lib.rs index 88d505cd4a92..eb96b6698aad 100644 --- a/migration-engine/core/src/lib.rs +++ b/migration-engine/core/src/lib.rs @@ -15,7 +15,7 @@ mod gate_keeper; use anyhow::anyhow; pub use api::GenericApi; -pub use commands::{ApplyMigrationInput, InferMigrationStepsInput, MigrationStepsResultOutput, SchemaPushInput}; +pub use commands::{InferMigrationStepsInput, MigrationStepsResultOutput, SchemaPushInput}; pub use core_error::{CoreError, CoreResult}; use enumflags2::BitFlags; pub use gate_keeper::GateKeeper; diff --git a/migration-engine/migration-engine-tests/src/test_api.rs b/migration-engine/migration-engine-tests/src/test_api.rs index 34c4a2441620..3ea7420d64e5 100644 --- a/migration-engine/migration-engine-tests/src/test_api.rs +++ b/migration-engine/migration-engine-tests/src/test_api.rs @@ -23,7 +23,6 @@ use super::{ assertions::SchemaAssertion, misc_helpers::{mysql_migration_connector, postgres_migration_connector, sqlite_migration_connector}, sql::barrel_migration_executor::BarrelMigrationExecutor, - InferAndApplyOutput, }; use crate::{connectors::Tags, test_api::list_migration_directories::ListMigrationDirectories, AssertionResult}; use enumflags2::BitFlags; @@ -32,9 +31,10 @@ use migration_connector::{ ImperativeMigrationsPersistence, MigrationConnector, MigrationFeature, MigrationPersistence, MigrationRecord, MigrationStep, }; + use migration_core::{ api::{GenericApi, MigrationApi}, - commands::{ApplyMigrationInput, ApplyScriptInput}, + commands::ApplyScriptInput, }; use quaint::{ prelude::{ConnectionInfo, Queryable, SqlFamily}, @@ -121,29 +121,6 @@ impl TestApi { Ok(tempfile::tempdir()?) } - pub async fn apply_migration(&self, steps: Vec, migration_id: &str) -> InferAndApplyOutput { - let input = ApplyMigrationInput { - migration_id: migration_id.into(), - steps, - force: None, - }; - - let migration_output = self.api.apply_migration(&input).await.expect("ApplyMigration failed"); - - assert!( - migration_output.general_errors.is_empty(), - format!( - "ApplyMigration returned unexpected errors: {:?}", - migration_output.general_errors - ) - ); - - InferAndApplyOutput { - sql_schema: self.describe_database().await.unwrap(), - migration_output, - } - } - pub fn apply_migrations<'a>(&'a self, migrations_directory: &'a TempDir) -> ApplyMigrations<'a> { ApplyMigrations::new(&self.api, migrations_directory) } From 3241b8be9c478273aa5e0b3c5f5579d1c5d3c0d9 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:37:25 +0100 Subject: [PATCH 17/29] remove listmigrations --- migration-engine/core/src/api.rs | 7 --- migration-engine/core/src/api/rpc.rs | 4 -- migration-engine/core/src/commands.rs | 4 -- .../core/src/commands/list_migrations.rs | 57 ------------------- 4 files changed, 72 deletions(-) delete mode 100644 migration-engine/core/src/commands/list_migrations.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index 175cd29accfa..d80211a85f3e 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -57,7 +57,6 @@ pub trait GenericApi: Send + Sync + 'static { async fn evaluate_data_loss(&self, input: &EvaluateDataLossInput) -> CoreResult; async fn infer_migration_steps(&self, input: &InferMigrationStepsInput) -> CoreResult; async fn initialize(&self, input: &InitializeInput) -> CoreResult; - async fn list_migrations(&self, input: &serde_json::Value) -> CoreResult>; async fn list_migration_directories( &self, input: &ListMigrationDirectoriesInput, @@ -163,12 +162,6 @@ where .await } - async fn list_migrations(&self, input: &serde_json::Value) -> CoreResult> { - self.handle_command::(input) - .instrument(tracing::info_span!("ListMigrations")) - .await - } - async fn list_migration_directories( &self, input: &ListMigrationDirectoriesInput, diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 06af057c6b1a..42e7e38703ce 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -26,7 +26,6 @@ enum RpcCommand { InferMigrationSteps, Initialize, ListMigrationDirectories, - ListMigrations, MarkMigrationApplied, MarkMigrationRolledBack, MigrationProgress, @@ -51,7 +50,6 @@ impl RpcCommand { RpcCommand::InferMigrationSteps => "inferMigrationSteps", RpcCommand::Initialize => "initialize", RpcCommand::ListMigrationDirectories => "listMigrationDirectories", - RpcCommand::ListMigrations => "listMigrations", RpcCommand::MarkMigrationApplied => "markMigrationApplied", RpcCommand::MarkMigrationRolledBack => "markMigrationRolledBack", RpcCommand::MigrationProgress => "migrationProgress", @@ -76,7 +74,6 @@ const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::InferMigrationSteps, RpcCommand::Initialize, RpcCommand::ListMigrationDirectories, - RpcCommand::ListMigrations, RpcCommand::MarkMigrationApplied, RpcCommand::MigrationProgress, RpcCommand::MarkMigrationRolledBack, @@ -150,7 +147,6 @@ impl RpcApi { RpcCommand::ListMigrationDirectories => { render(executor.list_migration_directories(¶ms.parse()?).await?) } - RpcCommand::ListMigrations => render(executor.list_migrations(&serde_json::Value::Null).await?), RpcCommand::MarkMigrationApplied => render(executor.mark_migration_applied(¶ms.parse()?).await?), RpcCommand::MarkMigrationRolledBack => render(executor.mark_migration_rolled_back(¶ms.parse()?).await?), RpcCommand::MigrationProgress => render(executor.migration_progress(¶ms.parse()?).await?), diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index af5827bbf6c4..237cf5aca815 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -3,7 +3,6 @@ //! The commands exposed by the migration engine core are defined in this //! module. -#[allow(missing_docs)] mod apply_migrations; mod apply_script; #[allow(missing_docs)] @@ -20,8 +19,6 @@ mod get_database_version; mod infer_migration_steps; mod initialize; mod list_migration_directories; -#[allow(missing_docs)] -mod list_migrations; mod mark_migration_applied; mod mark_migration_rolled_back; #[allow(missing_docs)] @@ -48,7 +45,6 @@ pub use get_database_version::*; pub use infer_migration_steps::*; pub use initialize::{InitializeCommand, InitializeInput, InitializeOutput}; pub use list_migration_directories::*; -pub use list_migrations::*; pub use mark_migration_applied::{MarkMigrationAppliedCommand, MarkMigrationAppliedInput, MarkMigrationAppliedOutput}; pub use mark_migration_rolled_back::{ MarkMigrationRolledBackCommand, MarkMigrationRolledBackInput, MarkMigrationRolledBackOutput, diff --git a/migration-engine/core/src/commands/list_migrations.rs b/migration-engine/core/src/commands/list_migrations.rs deleted file mode 100644 index 781039ea953f..000000000000 --- a/migration-engine/core/src/commands/list_migrations.rs +++ /dev/null @@ -1,57 +0,0 @@ -use crate::migration_engine::MigrationEngine; -use crate::{commands::command::*, CoreResult}; -use migration_connector::steps::*; -use migration_connector::*; -use serde::Serialize; - -pub struct ListMigrationsCommand; - -#[async_trait::async_trait] -impl<'a> MigrationCommand for ListMigrationsCommand { - type Input = serde_json::Value; - type Output = Vec; - - async fn execute(_input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let migration_persistence = engine.connector().migration_persistence(); - migration_persistence.init().await?; - - let migrations: Self::Output = migration_persistence - .load_all() - .await? - .into_iter() - .map(convert_migration_to_list_migration_steps_output) - .collect(); - - tracing::info!( - "Returning {migrations_count} migrations ({pending_count} pending).", - migrations_count = migrations.len(), - pending_count = migrations.iter().filter(|mig| mig.status.is_pending()).count(), - ); - - Ok(migrations) - } -} - -pub fn convert_migration_to_list_migration_steps_output(migration: Migration) -> ListMigrationsOutput { - ListMigrationsOutput { - id: migration.name, - datamodel_steps: migration.datamodel_steps, - database_steps: vec![], - status: migration.status, - datamodel: migration.datamodel_string, - } -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct ListMigrationsOutput { - pub id: String, - pub datamodel_steps: Vec, - pub database_steps: Vec, - pub status: MigrationStatus, - pub datamodel: String, -} From 6b0efadacc8afd5b2505481a0b58b969e6d809a2 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:43:42 +0100 Subject: [PATCH 18/29] remove calculatedatabasesteps --- migration-engine/core/src/api.rs | 13 --- migration-engine/core/src/api/rpc.rs | 4 - migration-engine/core/src/commands.rs | 3 - .../src/commands/calculate_database_steps.rs | 107 ------------------ 4 files changed, 127 deletions(-) delete mode 100644 migration-engine/core/src/commands/calculate_database_steps.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index d80211a85f3e..a640759dbf6f 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -43,10 +43,6 @@ pub trait GenericApi: Send + Sync + 'static { async fn version(&self, input: &serde_json::Value) -> CoreResult; async fn apply_migrations(&self, input: &ApplyMigrationsInput) -> CoreResult; async fn apply_script(&self, input: &ApplyScriptInput) -> CoreResult; - async fn calculate_database_steps( - &self, - input: &CalculateDatabaseStepsInput, - ) -> CoreResult; async fn calculate_datamodel(&self, input: &CalculateDatamodelInput) -> CoreResult; async fn create_migration(&self, input: &CreateMigrationInput) -> CoreResult; async fn debug_panic(&self, input: &()) -> CoreResult<()>; @@ -98,15 +94,6 @@ where .await } - async fn calculate_database_steps( - &self, - input: &CalculateDatabaseStepsInput, - ) -> CoreResult { - self.handle_command::>(input) - .instrument(tracing::info_span!("CalculateDatabaseSteps")) - .await - } - async fn calculate_datamodel(&self, input: &CalculateDatamodelInput) -> CoreResult { self.handle_command::(input) .instrument(tracing::info_span!("CalculateDatamodel")) diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 42e7e38703ce..8e4f80383205 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -16,7 +16,6 @@ pub struct RpcApi { enum RpcCommand { ApplyMigrations, ApplyScript, - CalculateDatabaseSteps, CalculateDatamodel, CreateMigration, DebugPanic, @@ -40,7 +39,6 @@ impl RpcCommand { match self { RpcCommand::ApplyMigrations => "applyMigrations", RpcCommand::ApplyScript => "applyScript", - RpcCommand::CalculateDatabaseSteps => "calculateDatabaseSteps", RpcCommand::CalculateDatamodel => "calculateDatamodel", RpcCommand::CreateMigration => "createMigration", RpcCommand::DebugPanic => "debugPanic", @@ -64,7 +62,6 @@ impl RpcCommand { const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::ApplyMigrations, RpcCommand::ApplyScript, - RpcCommand::CalculateDatabaseSteps, RpcCommand::CalculateDatamodel, RpcCommand::CreateMigration, RpcCommand::DebugPanic, @@ -135,7 +132,6 @@ impl RpcApi { RpcCommand::ApplyMigrations => render(executor.apply_migrations(¶ms.parse()?).await?), RpcCommand::CreateMigration => render(executor.create_migration(¶ms.parse()?).await?), RpcCommand::DebugPanic => render(executor.debug_panic(&()).await?), - RpcCommand::CalculateDatabaseSteps => render(executor.calculate_database_steps(¶ms.parse()?).await?), RpcCommand::CalculateDatamodel => render(executor.calculate_datamodel(¶ms.parse()?).await?), RpcCommand::DiagnoseMigrationHistory => { render(executor.diagnose_migration_history(¶ms.parse()?).await?) diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 237cf5aca815..c83907325386 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -6,8 +6,6 @@ mod apply_migrations; mod apply_script; #[allow(missing_docs)] -mod calculate_database_steps; -#[allow(missing_docs)] mod calculate_datamodel; mod command; mod create_migration; @@ -31,7 +29,6 @@ mod unapply_migration; pub use apply_migrations::{ApplyMigrationsCommand, ApplyMigrationsInput, ApplyMigrationsOutput}; pub use apply_script::{ApplyScriptCommand, ApplyScriptInput, ApplyScriptOutput}; -pub use calculate_database_steps::*; pub use calculate_datamodel::*; pub use command::MigrationCommand; pub use create_migration::{CreateMigrationCommand, CreateMigrationInput, CreateMigrationOutput}; diff --git a/migration-engine/core/src/commands/calculate_database_steps.rs b/migration-engine/core/src/commands/calculate_database_steps.rs deleted file mode 100644 index 5df129be3b8b..000000000000 --- a/migration-engine/core/src/commands/calculate_database_steps.rs +++ /dev/null @@ -1,107 +0,0 @@ -//! The CalculateDatabaseSteps RPC method. -//! -//! Its purpose is to infer the database steps for a given migration without reference to a target -//! prisma schema/datamodel, based on the datamodel migration steps and previous already applied -//! migrations. - -use super::MigrationStepsResultOutput; -use crate::{commands::command::MigrationCommand, migration_engine::MigrationEngine, CoreError, CoreResult}; -use datamodel::ast::SchemaAst; -use migration_connector::{DatabaseMigrationMarker, DestructiveChangeDiagnostics, MigrationConnector, MigrationStep}; -use serde::Deserialize; - -pub struct CalculateDatabaseStepsCommand<'a> { - input: &'a CalculateDatabaseStepsInput, -} - -#[async_trait::async_trait] -impl<'a> MigrationCommand for CalculateDatabaseStepsCommand<'a> { - type Input = CalculateDatabaseStepsInput; - type Output = MigrationStepsResultOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Send + Sync + 'static, - { - let cmd = CalculateDatabaseStepsCommand { input }; - tracing::debug!(command_input = ?cmd.input); - - let connector = engine.connector(); - - let steps_to_apply = &cmd.input.steps_to_apply; - let assume_to_be_applied = cmd.applicable_steps(); - - let assumed_datamodel_ast = engine - .datamodel_calculator() - .infer(&SchemaAst::empty(), &assume_to_be_applied)?; - let assumed_datamodel = - datamodel::lift_ast_to_datamodel(&assumed_datamodel_ast).map_err(CoreError::ProducedBadDatamodel)?; - - let next_datamodel_ast = engine - .datamodel_calculator() - .infer(&assumed_datamodel_ast, &steps_to_apply)?; - let next_datamodel = - datamodel::lift_ast_to_datamodel(&next_datamodel_ast).map_err(CoreError::ProducedBadDatamodel)?; - - let database_migration = connector - .database_migration_inferrer() - .infer(&assumed_datamodel.subject, &next_datamodel.subject, &steps_to_apply) - .await?; - - let DestructiveChangeDiagnostics { - warnings, - unexecutable_migrations, - } = connector - .destructive_change_checker() - .check(&database_migration) - .await?; - - let database_steps_json = connector - .database_migration_step_applier() - .render_steps_pretty(&database_migration)?; - - Ok(MigrationStepsResultOutput { - datamodel: datamodel::render_schema_ast_to_string(&next_datamodel_ast), - datamodel_steps: steps_to_apply.to_vec(), - database_steps: database_steps_json, - errors: [], - warnings, - general_errors: [], - unexecutable_migrations, - }) - } -} - -impl CalculateDatabaseStepsCommand<'_> { - /// Returns assume_to_be_applied from the input, with the exception of the steps from - /// steps_to_apply that may have been sent by mistake. - fn applicable_steps(&self) -> &[MigrationStep] { - match self.input.assume_to_be_applied.as_ref() { - Some(all_steps) => { - let steps_to_apply = &self.input.steps_to_apply; - - if steps_to_apply.len() >= all_steps.len() { - return all_steps; - } - - let start_idx = all_steps.len() - (steps_to_apply.len()); - let sliced = &all_steps[start_idx..]; - - if sliced == steps_to_apply.as_slice() { - return &all_steps[..start_idx]; - } - - all_steps - } - None => &[], - } - } -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct CalculateDatabaseStepsInput { - pub steps_to_apply: Vec, - pub assume_to_be_applied: Option>, -} From dcd26aae7248ca6f36f30f084361effdb68372dd Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:47:08 +0100 Subject: [PATCH 19/29] remove calculateDatamodel --- migration-engine/core/src/api.rs | 7 ---- migration-engine/core/src/api/rpc.rs | 4 -- migration-engine/core/src/commands.rs | 3 -- .../core/src/commands/calculate_datamodel.rs | 41 ------------------- 4 files changed, 55 deletions(-) delete mode 100644 migration-engine/core/src/commands/calculate_datamodel.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index a640759dbf6f..d1e69ec4c746 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -43,7 +43,6 @@ pub trait GenericApi: Send + Sync + 'static { async fn version(&self, input: &serde_json::Value) -> CoreResult; async fn apply_migrations(&self, input: &ApplyMigrationsInput) -> CoreResult; async fn apply_script(&self, input: &ApplyScriptInput) -> CoreResult; - async fn calculate_datamodel(&self, input: &CalculateDatamodelInput) -> CoreResult; async fn create_migration(&self, input: &CreateMigrationInput) -> CoreResult; async fn debug_panic(&self, input: &()) -> CoreResult<()>; async fn diagnose_migration_history( @@ -94,12 +93,6 @@ where .await } - async fn calculate_datamodel(&self, input: &CalculateDatamodelInput) -> CoreResult { - self.handle_command::(input) - .instrument(tracing::info_span!("CalculateDatamodel")) - .await - } - async fn create_migration(&self, input: &CreateMigrationInput) -> CoreResult { self.handle_command::(input) .instrument(tracing::info_span!( diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 8e4f80383205..299ef3be3298 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -16,7 +16,6 @@ pub struct RpcApi { enum RpcCommand { ApplyMigrations, ApplyScript, - CalculateDatamodel, CreateMigration, DebugPanic, DiagnoseMigrationHistory, @@ -39,7 +38,6 @@ impl RpcCommand { match self { RpcCommand::ApplyMigrations => "applyMigrations", RpcCommand::ApplyScript => "applyScript", - RpcCommand::CalculateDatamodel => "calculateDatamodel", RpcCommand::CreateMigration => "createMigration", RpcCommand::DebugPanic => "debugPanic", RpcCommand::DiagnoseMigrationHistory => "diagnoseMigrationHistory", @@ -62,7 +60,6 @@ impl RpcCommand { const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::ApplyMigrations, RpcCommand::ApplyScript, - RpcCommand::CalculateDatamodel, RpcCommand::CreateMigration, RpcCommand::DebugPanic, RpcCommand::DiagnoseMigrationHistory, @@ -132,7 +129,6 @@ impl RpcApi { RpcCommand::ApplyMigrations => render(executor.apply_migrations(¶ms.parse()?).await?), RpcCommand::CreateMigration => render(executor.create_migration(¶ms.parse()?).await?), RpcCommand::DebugPanic => render(executor.debug_panic(&()).await?), - RpcCommand::CalculateDatamodel => render(executor.calculate_datamodel(¶ms.parse()?).await?), RpcCommand::DiagnoseMigrationHistory => { render(executor.diagnose_migration_history(¶ms.parse()?).await?) } diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index c83907325386..8d0cf4301680 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -5,8 +5,6 @@ mod apply_migrations; mod apply_script; -#[allow(missing_docs)] -mod calculate_datamodel; mod command; mod create_migration; mod debug_panic; @@ -29,7 +27,6 @@ mod unapply_migration; pub use apply_migrations::{ApplyMigrationsCommand, ApplyMigrationsInput, ApplyMigrationsOutput}; pub use apply_script::{ApplyScriptCommand, ApplyScriptInput, ApplyScriptOutput}; -pub use calculate_datamodel::*; pub use command::MigrationCommand; pub use create_migration::{CreateMigrationCommand, CreateMigrationInput, CreateMigrationOutput}; pub use debug_panic::DebugPanicCommand; diff --git a/migration-engine/core/src/commands/calculate_datamodel.rs b/migration-engine/core/src/commands/calculate_datamodel.rs deleted file mode 100644 index b55db030a87b..000000000000 --- a/migration-engine/core/src/commands/calculate_datamodel.rs +++ /dev/null @@ -1,41 +0,0 @@ -use crate::migration_engine::MigrationEngine; -use crate::{commands::command::*, CoreResult}; -use datamodel::ast::SchemaAst; -use migration_connector::*; -use serde::{Deserialize, Serialize}; -use tracing::debug; - -pub struct CalculateDatamodelCommand; - -#[async_trait::async_trait] -impl MigrationCommand for CalculateDatamodelCommand { - type Input = CalculateDatamodelInput; - type Output = CalculateDatamodelOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + 'static, - { - debug!("{:?}", input); - - let base_datamodel = SchemaAst::empty(); - let datamodel = engine.datamodel_calculator().infer(&base_datamodel, &input.steps)?; - - Ok(CalculateDatamodelOutput { - datamodel: datamodel::render_schema_ast_to_string(&datamodel), - }) - } -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct CalculateDatamodelInput { - pub steps: Vec, -} - -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct CalculateDatamodelOutput { - pub datamodel: String, -} From fc809918489867cb3a2a1e063d8a52ab5f3e8b63 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:50:34 +0100 Subject: [PATCH 20/29] remove inferMigrationSteps --- migration-engine/core/src/api.rs | 10 - migration-engine/core/src/api/rpc.rs | 4 - migration-engine/core/src/commands.rs | 3 - .../src/commands/infer_migration_steps.rs | 185 ------------------ migration-engine/core/src/lib.rs | 2 +- 5 files changed, 1 insertion(+), 203 deletions(-) delete mode 100644 migration-engine/core/src/commands/infer_migration_steps.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index d1e69ec4c746..ab25303bdd49 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -50,7 +50,6 @@ pub trait GenericApi: Send + Sync + 'static { input: &DiagnoseMigrationHistoryInput, ) -> CoreResult; async fn evaluate_data_loss(&self, input: &EvaluateDataLossInput) -> CoreResult; - async fn infer_migration_steps(&self, input: &InferMigrationStepsInput) -> CoreResult; async fn initialize(&self, input: &InitializeInput) -> CoreResult; async fn list_migration_directories( &self, @@ -124,15 +123,6 @@ where .await } - async fn infer_migration_steps(&self, input: &InferMigrationStepsInput) -> CoreResult { - self.handle_command::>(input) - .instrument(tracing::info_span!( - "InferMigrationSteps", - migration_id = input.migration_id.as_str() - )) - .await - } - async fn initialize(&self, input: &InitializeInput) -> CoreResult { self.handle_command::(input) .instrument(tracing::info_span!( diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 299ef3be3298..40cb93d19873 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -21,7 +21,6 @@ enum RpcCommand { DiagnoseMigrationHistory, EvaluateDataLoss, GetDatabaseVersion, - InferMigrationSteps, Initialize, ListMigrationDirectories, MarkMigrationApplied, @@ -43,7 +42,6 @@ impl RpcCommand { RpcCommand::DiagnoseMigrationHistory => "diagnoseMigrationHistory", RpcCommand::EvaluateDataLoss => "evaluateDataLoss", RpcCommand::GetDatabaseVersion => "getDatabaseVersion", - RpcCommand::InferMigrationSteps => "inferMigrationSteps", RpcCommand::Initialize => "initialize", RpcCommand::ListMigrationDirectories => "listMigrationDirectories", RpcCommand::MarkMigrationApplied => "markMigrationApplied", @@ -65,7 +63,6 @@ const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::DiagnoseMigrationHistory, RpcCommand::EvaluateDataLoss, RpcCommand::GetDatabaseVersion, - RpcCommand::InferMigrationSteps, RpcCommand::Initialize, RpcCommand::ListMigrationDirectories, RpcCommand::MarkMigrationApplied, @@ -134,7 +131,6 @@ impl RpcApi { } RpcCommand::EvaluateDataLoss => render(executor.evaluate_data_loss(¶ms.parse()?).await?), RpcCommand::GetDatabaseVersion => render(executor.version(&serde_json::Value::Null).await?), - RpcCommand::InferMigrationSteps => render(executor.infer_migration_steps(¶ms.parse()?).await?), RpcCommand::Initialize => render(executor.initialize(¶ms.parse()?).await?), RpcCommand::ListMigrationDirectories => { render(executor.list_migration_directories(¶ms.parse()?).await?) diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 8d0cf4301680..7af4e3d0ba5c 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -11,8 +11,6 @@ mod debug_panic; mod diagnose_migration_history; mod evaluate_data_loss; mod get_database_version; -#[allow(missing_docs)] -mod infer_migration_steps; mod initialize; mod list_migration_directories; mod mark_migration_applied; @@ -36,7 +34,6 @@ pub use diagnose_migration_history::{ }; pub use evaluate_data_loss::*; pub use get_database_version::*; -pub use infer_migration_steps::*; pub use initialize::{InitializeCommand, InitializeInput, InitializeOutput}; pub use list_migration_directories::*; pub use mark_migration_applied::{MarkMigrationAppliedCommand, MarkMigrationAppliedInput, MarkMigrationAppliedOutput}; diff --git a/migration-engine/core/src/commands/infer_migration_steps.rs b/migration-engine/core/src/commands/infer_migration_steps.rs deleted file mode 100644 index e74502d5be79..000000000000 --- a/migration-engine/core/src/commands/infer_migration_steps.rs +++ /dev/null @@ -1,185 +0,0 @@ -//! The InferMigrationSteps RPC method. - -use super::MigrationStepsResultOutput; -use crate::{commands::command::*, migration_engine::MigrationEngine, *}; -use datamodel::ast::{parser::parse_schema, SchemaAst}; -use migration_connector::*; -use serde::Deserialize; -use tracing::debug; - -pub struct InferMigrationStepsCommand<'a> { - input: &'a InferMigrationStepsInput, -} - -#[async_trait::async_trait] -impl<'a> MigrationCommand for InferMigrationStepsCommand<'a> { - type Input = InferMigrationStepsInput; - type Output = MigrationStepsResultOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + Sync + Send + 'static, - { - let cmd = InferMigrationStepsCommand { input }; - debug!(?cmd.input); - - let connector = engine.connector(); - let migration_persistence = connector.migration_persistence(); - migration_persistence.init().await?; - let database_migration_inferrer = connector.database_migration_inferrer(); - - let assume_to_be_applied = cmd.assume_to_be_applied(); - - cmd.validate_assumed_migrations_are_not_applied(migration_persistence) - .await?; - - let last_migration = migration_persistence.last().await?; - let current_datamodel_ast = if let Some(migration) = last_migration.as_ref() { - migration - .parse_schema_ast() - .map_err(CoreError::InvalidPersistedDatamodel)? - } else { - SchemaAst::empty() - }; - let assumed_datamodel_ast = engine - .datamodel_calculator() - .infer(¤t_datamodel_ast, assume_to_be_applied.as_slice())?; - let assumed_datamodel = - datamodel::lift_ast_to_datamodel(&assumed_datamodel_ast).map_err(CoreError::ProducedBadDatamodel)?; - - let next_datamodel = parse_datamodel(&cmd.input.datamodel)?; - if let Some(err) = connector.check_database_version_compatibility(&next_datamodel) { - return Err(ConnectorError::user_facing_error(err).into()); - }; - - let next_datamodel_ast = parse_schema(&cmd.input.datamodel) - .map_err(|err| CoreError::Input(anyhow::anyhow!("{}", err.to_pretty_string("", &cmd.input.datamodel))))?; - - let model_migration_steps = engine - .datamodel_migration_steps_inferrer() - .infer(&assumed_datamodel_ast, &next_datamodel_ast); - - let database_migration = database_migration_inferrer - .infer(&assumed_datamodel.subject, &next_datamodel, &model_migration_steps) - .await?; - - let DestructiveChangeDiagnostics { - warnings, - unexecutable_migrations, - } = connector - .destructive_change_checker() - .check(&database_migration) - .await?; - - let (returned_datamodel_steps, returned_database_migration) = - if !cmd.input.is_watch_migration() && last_migration.map(|mig| mig.is_watch_migration()).unwrap_or(false) { - // Transition out of watch mode - let last_non_watch_applied_migration = migration_persistence.last_non_watch_applied_migration().await?; - let last_non_watch_datamodel_ast = last_non_watch_applied_migration - .as_ref() - .map(|m| m.parse_schema_ast()) - .unwrap_or_else(|| Ok(SchemaAst::empty())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - let last_non_watch_datamodel = last_non_watch_applied_migration - .map(|m| m.parse_datamodel()) - .unwrap_or_else(|| Ok(Datamodel::new())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - let datamodel_steps = engine - .datamodel_migration_steps_inferrer() - .infer(&last_non_watch_datamodel_ast, &next_datamodel_ast); - - // The database migration since the last non-watch migration, so we can render all the steps applied - // in watch mode to the migrations folder. - let full_database_migration = database_migration_inferrer.infer_from_datamodels( - &last_non_watch_datamodel, - &next_datamodel, - &datamodel_steps, - )?; - - (datamodel_steps, full_database_migration) - } else { - (model_migration_steps, database_migration) - }; - - let database_steps = connector - .database_migration_step_applier() - .render_steps_pretty(&returned_database_migration)?; - - debug!(?returned_datamodel_steps); - - Ok(MigrationStepsResultOutput { - datamodel: datamodel::render_datamodel_to_string(&next_datamodel), - datamodel_steps: returned_datamodel_steps, - database_steps, - errors: [], - warnings, - general_errors: [], - unexecutable_migrations, - }) - } -} - -impl InferMigrationStepsCommand<'_> { - fn assume_to_be_applied(&self) -> Vec { - self.input - .assume_to_be_applied - .clone() - .or_else(|| { - self.input.assume_applied_migrations.as_ref().map(|migrations| { - migrations - .iter() - .flat_map(|migration| migration.datamodel_steps.clone().into_iter()) - .collect() - }) - }) - .unwrap_or_else(Vec::new) - } - - async fn validate_assumed_migrations_are_not_applied( - &self, - migration_persistence: &dyn MigrationPersistence, - ) -> CoreResult<()> { - if let Some(migrations) = self.input.assume_applied_migrations.as_ref() { - for migration in migrations { - if migration_persistence - .migration_is_already_applied(&migration.migration_id) - .await? - { - return Err(CoreError::Generic(anyhow::anyhow!( - "Input is invalid. Migration {} is already applied.", - migration.migration_id - ))); - } - } - } - - Ok(()) - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct InferMigrationStepsInput { - pub migration_id: String, - #[serde(alias = "dataModel")] - pub datamodel: String, - /// Migration steps from migrations that have been inferred but not applied yet. - /// - /// These steps must be provided and correct for migration inferrence to work. - pub assume_to_be_applied: Option>, - pub assume_applied_migrations: Option>, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct AppliedMigration { - pub migration_id: String, - pub datamodel_steps: Vec, -} - -impl IsWatchMigration for InferMigrationStepsInput { - fn is_watch_migration(&self) -> bool { - self.migration_id.starts_with("watch") - } -} diff --git a/migration-engine/core/src/lib.rs b/migration-engine/core/src/lib.rs index eb96b6698aad..d2d6ee6796c5 100644 --- a/migration-engine/core/src/lib.rs +++ b/migration-engine/core/src/lib.rs @@ -15,7 +15,7 @@ mod gate_keeper; use anyhow::anyhow; pub use api::GenericApi; -pub use commands::{InferMigrationStepsInput, MigrationStepsResultOutput, SchemaPushInput}; +pub use commands::{MigrationStepsResultOutput, SchemaPushInput}; pub use core_error::{CoreError, CoreResult}; use enumflags2::BitFlags; pub use gate_keeper::GateKeeper; From c6eb860b0f25057f88a6b3b6ad61c0c3ee2e5fad Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:54:11 +0100 Subject: [PATCH 21/29] remove migrationProgress --- migration-engine/core/src/api.rs | 10 --- migration-engine/core/src/api/rpc.rs | 4 -- migration-engine/core/src/commands.rs | 3 - .../core/src/commands/migration_progress.rs | 61 ------------------- 4 files changed, 78 deletions(-) delete mode 100644 migration-engine/core/src/commands/migration_progress.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index ab25303bdd49..4b69e7add8ac 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -61,7 +61,6 @@ pub trait GenericApi: Send + Sync + 'static { &self, input: &MarkMigrationRolledBackInput, ) -> CoreResult; - async fn migration_progress(&self, input: &MigrationProgressInput) -> CoreResult; async fn plan_migration(&self, input: &PlanMigrationInput) -> CoreResult; async fn reset(&self, input: &()) -> CoreResult<()>; async fn schema_push(&self, input: &SchemaPushInput) -> CoreResult; @@ -165,15 +164,6 @@ where .await } - async fn migration_progress(&self, input: &MigrationProgressInput) -> CoreResult { - self.handle_command::(input) - .instrument(tracing::info_span!( - "MigrationProgress", - migration_id = input.migration_id.as_str() - )) - .await - } - async fn plan_migration(&self, input: &PlanMigrationInput) -> CoreResult { self.handle_command::(input) .instrument(tracing::info_span!("PlanMigration")) diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 40cb93d19873..084210763c7d 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -25,7 +25,6 @@ enum RpcCommand { ListMigrationDirectories, MarkMigrationApplied, MarkMigrationRolledBack, - MigrationProgress, PlanMigration, Reset, SchemaPush, @@ -46,7 +45,6 @@ impl RpcCommand { RpcCommand::ListMigrationDirectories => "listMigrationDirectories", RpcCommand::MarkMigrationApplied => "markMigrationApplied", RpcCommand::MarkMigrationRolledBack => "markMigrationRolledBack", - RpcCommand::MigrationProgress => "migrationProgress", RpcCommand::PlanMigration => "planMigration", RpcCommand::Reset => "reset", RpcCommand::SchemaPush => "schemaPush", @@ -66,7 +64,6 @@ const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::Initialize, RpcCommand::ListMigrationDirectories, RpcCommand::MarkMigrationApplied, - RpcCommand::MigrationProgress, RpcCommand::MarkMigrationRolledBack, RpcCommand::PlanMigration, RpcCommand::Reset, @@ -137,7 +134,6 @@ impl RpcApi { } RpcCommand::MarkMigrationApplied => render(executor.mark_migration_applied(¶ms.parse()?).await?), RpcCommand::MarkMigrationRolledBack => render(executor.mark_migration_rolled_back(¶ms.parse()?).await?), - RpcCommand::MigrationProgress => render(executor.migration_progress(¶ms.parse()?).await?), RpcCommand::PlanMigration => render(executor.plan_migration(¶ms.parse()?).await?), RpcCommand::Reset => render(executor.reset(&()).await?), RpcCommand::SchemaPush => render(executor.schema_push(¶ms.parse()?).await?), diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 7af4e3d0ba5c..7f9555a8d630 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -15,8 +15,6 @@ mod initialize; mod list_migration_directories; mod mark_migration_applied; mod mark_migration_rolled_back; -#[allow(missing_docs)] -mod migration_progress; mod plan_migration; mod reset; mod schema_push; @@ -40,7 +38,6 @@ pub use mark_migration_applied::{MarkMigrationAppliedCommand, MarkMigrationAppli pub use mark_migration_rolled_back::{ MarkMigrationRolledBackCommand, MarkMigrationRolledBackInput, MarkMigrationRolledBackOutput, }; -pub use migration_progress::*; pub use plan_migration::{PlanMigrationCommand, PlanMigrationInput, PlanMigrationOutput}; pub use reset::ResetCommand; pub use schema_push::{SchemaPushCommand, SchemaPushInput, SchemaPushOutput}; diff --git a/migration-engine/core/src/commands/migration_progress.rs b/migration-engine/core/src/commands/migration_progress.rs deleted file mode 100644 index 3fd8dbf2362f..000000000000 --- a/migration-engine/core/src/commands/migration_progress.rs +++ /dev/null @@ -1,61 +0,0 @@ -use crate::{commands::command::*, migration_engine::MigrationEngine, CoreError, CoreResult}; -use chrono::{DateTime, Utc}; -use migration_connector::*; -use serde::{Deserialize, Serialize}; - -pub struct MigrationProgressCommand; - -#[async_trait::async_trait] -impl MigrationCommand for MigrationProgressCommand { - type Input = MigrationProgressInput; - type Output = MigrationProgressOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + 'static, - { - let migration_persistence = engine.connector().migration_persistence(); - migration_persistence.init().await?; - - let migration = migration_persistence - .by_name(&input.migration_id) - .await? - .ok_or_else(|| { - let error = anyhow::anyhow!( - "Could not load migration from database. Migration name was: {}", - &input.migration_id - ); - - CoreError::Input(error) - })?; - - Ok(MigrationProgressOutput { - status: migration.status, - steps: migration.datamodel_steps.len(), - applied: migration.applied, - rolled_back: migration.rolled_back, - errors: migration.errors, - started_at: migration.started_at, - finished_at: migration.finished_at, - }) - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct MigrationProgressInput { - pub migration_id: String, -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct MigrationProgressOutput { - status: MigrationStatus, - steps: usize, - applied: usize, - rolled_back: usize, - errors: Vec, - started_at: DateTime, - finished_at: Option>, -} From 44920d05f4ab9d77e1b11b1f07456148b8b7447d Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 15:57:54 +0100 Subject: [PATCH 22/29] remove unapplyMigrations --- migration-engine/core/src/api.rs | 7 -- migration-engine/core/src/api/rpc.rs | 4 - migration-engine/core/src/commands.rs | 3 - .../core/src/commands/unapply_migration.rs | 104 ------------------ 4 files changed, 118 deletions(-) delete mode 100644 migration-engine/core/src/commands/unapply_migration.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index 4b69e7add8ac..d082c00553da 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -64,7 +64,6 @@ pub trait GenericApi: Send + Sync + 'static { async fn plan_migration(&self, input: &PlanMigrationInput) -> CoreResult; async fn reset(&self, input: &()) -> CoreResult<()>; async fn schema_push(&self, input: &SchemaPushInput) -> CoreResult; - async fn unapply_migration(&self, input: &UnapplyMigrationInput) -> CoreResult; } #[async_trait::async_trait] @@ -181,10 +180,4 @@ where .instrument(tracing::info_span!("SchemaPush")) .await } - - async fn unapply_migration(&self, input: &UnapplyMigrationInput) -> CoreResult { - self.handle_command::>(input) - .instrument(tracing::info_span!("UnapplyMigration")) - .await - } } diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 084210763c7d..2551db340cc0 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -28,7 +28,6 @@ enum RpcCommand { PlanMigration, Reset, SchemaPush, - UnapplyMigration, } impl RpcCommand { @@ -48,7 +47,6 @@ impl RpcCommand { RpcCommand::PlanMigration => "planMigration", RpcCommand::Reset => "reset", RpcCommand::SchemaPush => "schemaPush", - RpcCommand::UnapplyMigration => "unapplyMigration", } } } @@ -68,7 +66,6 @@ const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::PlanMigration, RpcCommand::Reset, RpcCommand::SchemaPush, - RpcCommand::UnapplyMigration, ]; impl RpcApi { @@ -137,7 +134,6 @@ impl RpcApi { RpcCommand::PlanMigration => render(executor.plan_migration(¶ms.parse()?).await?), RpcCommand::Reset => render(executor.reset(&()).await?), RpcCommand::SchemaPush => render(executor.schema_push(¶ms.parse()?).await?), - RpcCommand::UnapplyMigration => render(executor.unapply_migration(¶ms.parse()?).await?), }) } } diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 7f9555a8d630..4f7065f262be 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -18,8 +18,6 @@ mod mark_migration_rolled_back; mod plan_migration; mod reset; mod schema_push; -#[allow(missing_docs)] -mod unapply_migration; pub use apply_migrations::{ApplyMigrationsCommand, ApplyMigrationsInput, ApplyMigrationsOutput}; pub use apply_script::{ApplyScriptCommand, ApplyScriptInput, ApplyScriptOutput}; @@ -41,7 +39,6 @@ pub use mark_migration_rolled_back::{ pub use plan_migration::{PlanMigrationCommand, PlanMigrationInput, PlanMigrationOutput}; pub use reset::ResetCommand; pub use schema_push::{SchemaPushCommand, SchemaPushInput, SchemaPushOutput}; -pub use unapply_migration::*; use migration_connector::{MigrationStep, MigrationWarning, PrettyDatabaseMigrationStep, UnexecutableMigration}; use serde::{Deserialize, Serialize}; diff --git a/migration-engine/core/src/commands/unapply_migration.rs b/migration-engine/core/src/commands/unapply_migration.rs deleted file mode 100644 index 9cb1cb2fc757..000000000000 --- a/migration-engine/core/src/commands/unapply_migration.rs +++ /dev/null @@ -1,104 +0,0 @@ -use crate::{commands::command::*, CoreResult}; -use crate::{migration_engine::MigrationEngine, CoreError}; -use datamodel::{ast::SchemaAst, Datamodel}; -use migration_connector::*; -use serde::{Deserialize, Serialize}; - -pub struct UnapplyMigrationCommand<'a> { - input: &'a UnapplyMigrationInput, -} - -#[async_trait::async_trait] -impl<'a> MigrationCommand for UnapplyMigrationCommand<'a> { - type Input = UnapplyMigrationInput; - type Output = UnapplyMigrationOutput; - - async fn execute(input: &Self::Input, engine: &MigrationEngine) -> CoreResult - where - C: MigrationConnector, - D: DatabaseMigrationMarker + 'static, - { - let cmd = UnapplyMigrationCommand { input }; - tracing::debug!("{:?}", cmd.input); - let connector = engine.connector(); - let persistence = connector.migration_persistence(); - persistence.init().await?; - - let result = match persistence.last_two_migrations().await? { - (None, _) => UnapplyMigrationOutput { - rolled_back: "not-applicable".to_string(), - active: None, - errors: vec!["There is no last migration that can be rolled back.".to_string()], - warnings: Vec::new(), - }, - (Some(migration_to_rollback), second_to_last) => { - let schema_ast_before_last_migration = second_to_last - .as_ref() - .map(|migration| migration.parse_schema_ast()) - .unwrap_or_else(|| Ok(SchemaAst::empty())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - let schema_before_last_migration = second_to_last - .as_ref() - .map(|migration| migration.parse_datamodel()) - .unwrap_or_else(|| Ok(Datamodel::new())) - .map_err(CoreError::InvalidPersistedDatamodel)?; - - let last_schema_ast = migration_to_rollback - .parse_schema_ast() - .map_err(CoreError::InvalidPersistedDatamodel)?; - let last_schema = migration_to_rollback - .parse_datamodel() - .map_err(CoreError::InvalidPersistedDatamodel)?; - - // Generate backwards datamodel steps. - let datamodel_migration = - crate::migration::datamodel_differ::diff(&last_schema_ast, &schema_ast_before_last_migration); - - let database_migration = connector - .database_migration_inferrer() - .infer(&last_schema, &schema_before_last_migration, &datamodel_migration) - .await?; - - let destructive_change_checker = connector.destructive_change_checker(); - - let warnings = destructive_change_checker.check(&database_migration).await?; - - match (warnings.has_warnings(), input.force) { - (false, _) | (true, None) | (true, Some(true)) => { - connector - .migration_applier() - .unapply(&migration_to_rollback, &database_migration) - .await?; - } - (true, Some(false)) => (), - } - - let new_active_migration = connector.migration_persistence().last().await?.map(|m| m.name); - - UnapplyMigrationOutput { - rolled_back: migration_to_rollback.name, - active: new_active_migration, - errors: Vec::new(), - warnings: warnings.warnings, - } - } - }; - - Ok(result) - } -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct UnapplyMigrationInput { - pub force: Option, -} - -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct UnapplyMigrationOutput { - pub rolled_back: String, - pub active: Option, - pub errors: Vec, - pub warnings: Vec, -} From 7b75ae3331522a6aa568688d9f946aa8d2f37fc8 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 16:18:57 +0100 Subject: [PATCH 23/29] remove initialize --- migration-engine/core/src/api.rs | 10 ------ migration-engine/core/src/api/rpc.rs | 4 --- migration-engine/core/src/commands.rs | 2 -- .../core/src/commands/initialize.rs | 32 ------------------- 4 files changed, 48 deletions(-) delete mode 100644 migration-engine/core/src/commands/initialize.rs diff --git a/migration-engine/core/src/api.rs b/migration-engine/core/src/api.rs index d082c00553da..1bfe27481722 100644 --- a/migration-engine/core/src/api.rs +++ b/migration-engine/core/src/api.rs @@ -50,7 +50,6 @@ pub trait GenericApi: Send + Sync + 'static { input: &DiagnoseMigrationHistoryInput, ) -> CoreResult; async fn evaluate_data_loss(&self, input: &EvaluateDataLossInput) -> CoreResult; - async fn initialize(&self, input: &InitializeInput) -> CoreResult; async fn list_migration_directories( &self, input: &ListMigrationDirectoriesInput, @@ -121,15 +120,6 @@ where .await } - async fn initialize(&self, input: &InitializeInput) -> CoreResult { - self.handle_command::(input) - .instrument(tracing::info_span!( - "Initialize", - migrations_directory_path = input.migrations_directory_path.as_str() - )) - .await - } - async fn list_migration_directories( &self, input: &ListMigrationDirectoriesInput, diff --git a/migration-engine/core/src/api/rpc.rs b/migration-engine/core/src/api/rpc.rs index 2551db340cc0..2f42b09f1245 100644 --- a/migration-engine/core/src/api/rpc.rs +++ b/migration-engine/core/src/api/rpc.rs @@ -21,7 +21,6 @@ enum RpcCommand { DiagnoseMigrationHistory, EvaluateDataLoss, GetDatabaseVersion, - Initialize, ListMigrationDirectories, MarkMigrationApplied, MarkMigrationRolledBack, @@ -40,7 +39,6 @@ impl RpcCommand { RpcCommand::DiagnoseMigrationHistory => "diagnoseMigrationHistory", RpcCommand::EvaluateDataLoss => "evaluateDataLoss", RpcCommand::GetDatabaseVersion => "getDatabaseVersion", - RpcCommand::Initialize => "initialize", RpcCommand::ListMigrationDirectories => "listMigrationDirectories", RpcCommand::MarkMigrationApplied => "markMigrationApplied", RpcCommand::MarkMigrationRolledBack => "markMigrationRolledBack", @@ -59,7 +57,6 @@ const AVAILABLE_COMMANDS: &[RpcCommand] = &[ RpcCommand::DiagnoseMigrationHistory, RpcCommand::EvaluateDataLoss, RpcCommand::GetDatabaseVersion, - RpcCommand::Initialize, RpcCommand::ListMigrationDirectories, RpcCommand::MarkMigrationApplied, RpcCommand::MarkMigrationRolledBack, @@ -125,7 +122,6 @@ impl RpcApi { } RpcCommand::EvaluateDataLoss => render(executor.evaluate_data_loss(¶ms.parse()?).await?), RpcCommand::GetDatabaseVersion => render(executor.version(&serde_json::Value::Null).await?), - RpcCommand::Initialize => render(executor.initialize(¶ms.parse()?).await?), RpcCommand::ListMigrationDirectories => { render(executor.list_migration_directories(¶ms.parse()?).await?) } diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 4f7065f262be..8aa15b5888f0 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -11,7 +11,6 @@ mod debug_panic; mod diagnose_migration_history; mod evaluate_data_loss; mod get_database_version; -mod initialize; mod list_migration_directories; mod mark_migration_applied; mod mark_migration_rolled_back; @@ -30,7 +29,6 @@ pub use diagnose_migration_history::{ }; pub use evaluate_data_loss::*; pub use get_database_version::*; -pub use initialize::{InitializeCommand, InitializeInput, InitializeOutput}; pub use list_migration_directories::*; pub use mark_migration_applied::{MarkMigrationAppliedCommand, MarkMigrationAppliedInput, MarkMigrationAppliedOutput}; pub use mark_migration_rolled_back::{ diff --git a/migration-engine/core/src/commands/initialize.rs b/migration-engine/core/src/commands/initialize.rs deleted file mode 100644 index 5e6d44768203..000000000000 --- a/migration-engine/core/src/commands/initialize.rs +++ /dev/null @@ -1,32 +0,0 @@ -use super::MigrationCommand; -use crate::{migration_engine::MigrationEngine, CoreResult}; -use serde::Deserialize; - -/// Input to the `Initialize` command. -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct InitializeInput { - /// Path to the migrations directory. - pub migrations_directory_path: String, -} - -/// Output of the `Initialize` command. -pub type InitializeOutput = (); - -/// Initialize the migrations directory and the migrations table. -pub struct InitializeCommand; - -#[async_trait::async_trait] -impl<'a> MigrationCommand for InitializeCommand { - type Input = InitializeInput; - - type Output = InitializeOutput; - - async fn execute(_input: &Self::Input, _engine: &MigrationEngine) -> CoreResult - where - C: migration_connector::MigrationConnector, - D: migration_connector::DatabaseMigrationMarker + Send + Sync + 'static, - { - todo!("initialize command") - } -} From 1b5efcb80a5f3d886dcbcbfd84e3b91d9d6c5a74 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Fri, 18 Dec 2020 16:45:53 +0100 Subject: [PATCH 24/29] start removing old migration persistence --- .../connectors/migration-connector/src/lib.rs | 18 -- .../src/migration_applier.rs | 105 ------- .../src/migration_persistence.rs | 268 ---------------- .../sql-migration-connector/src/lib.rs | 7 - .../src/sql_migration_persistence.rs | 296 ------------------ .../src/sql_schema_differ.rs | 6 +- .../src/sql/barrel_migration_executor.rs | 10 +- .../migration-engine-tests/src/test_api.rs | 32 +- .../tests/migration_persistence/mod.rs | 172 ---------- .../tests/migration_tests.rs | 3 +- 10 files changed, 7 insertions(+), 910 deletions(-) delete mode 100644 migration-engine/connectors/migration-connector/src/migration_applier.rs delete mode 100644 migration-engine/connectors/migration-connector/src/migration_persistence.rs delete mode 100644 migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs delete mode 100644 migration-engine/migration-engine-tests/tests/migration_persistence/mod.rs diff --git a/migration-engine/connectors/migration-connector/src/lib.rs b/migration-engine/connectors/migration-connector/src/lib.rs index 8b775101ec38..aa9d89bffbdc 100644 --- a/migration-engine/connectors/migration-connector/src/lib.rs +++ b/migration-engine/connectors/migration-connector/src/lib.rs @@ -8,10 +8,6 @@ mod destructive_change_checker; mod error; pub mod features; mod imperative_migrations_persistence; -#[allow(missing_docs)] -mod migration_applier; -#[allow(missing_docs)] -mod migration_persistence; #[allow(missing_docs)] pub mod steps; @@ -26,8 +22,6 @@ pub use features::MigrationFeature; pub use imperative_migrations_persistence::{ ImperativeMigrationsPersistence, MigrationRecord, PersistenceNotInitializedError, Timestamp, }; -pub use migration_applier::*; -pub use migration_persistence::*; pub use migrations_directory::{create_migration_directory, list_migrations, ListMigrationsError, MigrationDirectory}; pub use steps::MigrationStep; @@ -66,9 +60,6 @@ pub trait MigrationConnector: Send + Sync + 'static { None } - /// See [MigrationPersistence](trait.MigrationPersistence.html). - fn migration_persistence(&self) -> &dyn MigrationPersistence; - /// See [ImperativeMigrationPersistence](trait.ImperativeMigrationPersistence.html). fn new_migration_persistence(&self) -> &dyn ImperativeMigrationsPersistence; @@ -80,15 +71,6 @@ pub trait MigrationConnector: Send + Sync + 'static { /// See [DestructiveChangeChecker](trait.DestructiveChangeChecker.html). fn destructive_change_checker(&self) -> &dyn DestructiveChangeChecker; - - /// See [MigrationStepApplier](trait.MigrationStepApplier.html). - fn migration_applier<'a>(&'a self) -> Box + Send + Sync + 'a> { - let applier = MigrationApplierImpl { - migration_persistence: self.migration_persistence(), - step_applier: self.database_migration_step_applier(), - }; - Box::new(applier) - } } /// Marker for the associated migration type for a connector. diff --git a/migration-engine/connectors/migration-connector/src/migration_applier.rs b/migration-engine/connectors/migration-connector/src/migration_applier.rs deleted file mode 100644 index 11aab23e2ea5..000000000000 --- a/migration-engine/connectors/migration-connector/src/migration_applier.rs +++ /dev/null @@ -1,105 +0,0 @@ -use crate::*; - -/// Apply and unapply migrations on the connector's database. -#[async_trait::async_trait] -pub trait MigrationApplier -where - T: Send + Sync, -{ - async fn apply(&self, migration: &Migration, database_migration: &T) -> ConnectorResult<()>; - - async fn unapply(&self, migration: &Migration, database_migration: &T) -> ConnectorResult<()>; -} - -pub struct MigrationApplierImpl<'a, T> -where - T: Send + Sync + 'static, -{ - pub migration_persistence: &'a dyn MigrationPersistence, - pub step_applier: &'a dyn DatabaseMigrationStepApplier, -} - -#[async_trait::async_trait] -impl<'a, T> MigrationApplier for MigrationApplierImpl<'a, T> -where - T: Send + Sync + 'static, -{ - async fn apply(&self, migration: &Migration, database_migration: &T) -> ConnectorResult<()> { - assert_eq!(migration.status, MigrationStatus::Pending); // what other states are valid here? - let mut migration_updates = migration.update_params(); - migration_updates.status = MigrationStatus::MigrationInProgress; - self.migration_persistence.update(&migration_updates).await?; - - let apply_result = self.go_forward(&mut migration_updates, database_migration).await; - - match apply_result { - Ok(()) => { - migration_updates.mark_as_finished(); - self.migration_persistence.update(&migration_updates).await?; - Ok(()) - } - Err(err) => { - migration_updates.status = MigrationStatus::MigrationFailure; - migration_updates.errors = vec![format!("{:?}", err)]; - self.migration_persistence.update(&migration_updates).await?; - Err(err) - } - } - } - - async fn unapply(&self, migration: &Migration, database_migration: &T) -> ConnectorResult<()> { - assert_eq!(migration.status, MigrationStatus::MigrationSuccess); // what other states are valid here? - let mut migration_updates = migration.update_params(); - migration_updates.status = MigrationStatus::RollingBack; - self.migration_persistence.update(&migration_updates).await?; - - let unapply_result = self.go_backward(&mut migration_updates, database_migration).await; - - match unapply_result { - Ok(()) => { - migration_updates.status = MigrationStatus::RollbackSuccess; - self.migration_persistence.update(&migration_updates).await?; - Ok(()) - } - Err(err) => { - migration_updates.status = MigrationStatus::RollbackFailure; - migration_updates.errors = vec![format!("{:?}", err)]; - self.migration_persistence.update(&migration_updates).await?; - Err(err) - } - } - } -} - -impl<'a, T> MigrationApplierImpl<'a, T> -where - T: Send + Sync, -{ - async fn go_forward( - &self, - migration_updates: &mut MigrationUpdateParams, - database_migration: &T, - ) -> ConnectorResult<()> { - let mut step = 0; - while self.step_applier.apply_step(&database_migration, step).await? { - step += 1; - migration_updates.applied += 1; - self.migration_persistence.update(&migration_updates).await?; - } - Ok(()) - } - - async fn go_backward( - &self, - migration_updates: &mut MigrationUpdateParams, - database_migration: &T, - ) -> ConnectorResult<()> { - let mut step = 0; - while self.step_applier.apply_step(&database_migration, step).await? { - step += 1; - migration_updates.rolled_back += 1; - self.migration_persistence.update(&migration_updates).await?; - } - Ok(()) - } -} diff --git a/migration-engine/connectors/migration-connector/src/migration_persistence.rs b/migration-engine/connectors/migration-connector/src/migration_persistence.rs deleted file mode 100644 index 40f8ef4e15ca..000000000000 --- a/migration-engine/connectors/migration-connector/src/migration_persistence.rs +++ /dev/null @@ -1,268 +0,0 @@ -use crate::{error::ConnectorError, steps::*, ConnectorResult}; -use chrono::{DateTime, Utc}; -use datamodel::{ast::SchemaAst, Datamodel}; -use serde::Serialize; -use std::str::FromStr; - -/// This trait is implemented by each connector. It provides a generic API to store and retrieve [Migration](struct.Migration.html) records. -#[async_trait::async_trait] -pub trait MigrationPersistence: Send + Sync { - /// Initialize migration persistence state. E.g. create the migrations table in an SQL database. - async fn init(&self) -> Result<(), ConnectorError>; - - /// Drop all persisted state. - async fn reset(&self) -> Result<(), ConnectorError>; - - async fn last_non_watch_applied_migration(&self) -> Result, ConnectorError> { - let migration = - self.load_all().await?.into_iter().rev().find(|migration| { - !migration.is_watch_migration() && migration.status == MigrationStatus::MigrationSuccess - }); - - Ok(migration) - } - - async fn last_non_watch_migration(&self) -> Result, ConnectorError> { - let mut all_migrations = self.load_all().await?; - all_migrations.reverse(); - let migration = all_migrations.into_iter().find(|m| !m.is_watch_migration()); - - Ok(migration) - } - - /// Returns the last successful Migration. - async fn last(&self) -> Result, ConnectorError> { - Ok(self.last_two_migrations().await?.0) - } - - /// Returns the last two successful migrations, for rollback purposes. The tuple will be - /// interpreted as (last_migration, second_to_last_migration). - async fn last_two_migrations(&self) -> ConnectorResult<(Option, Option)>; - - /// Fetch a migration by name. - async fn by_name(&self, name: &str) -> Result, ConnectorError>; - - /// This powers the listMigrations command. - async fn load_all(&self) -> Result, ConnectorError>; - - /// Write the migration to the Migration table. - async fn create(&self, migration: Migration) -> Result; - - /// Used by the MigrationApplier to write the progress of a [Migration](struct.Migration.html) - /// into the database. - async fn update(&self, params: &MigrationUpdateParams) -> Result<(), ConnectorError>; - - /// Returns whether the migration with the provided migration id has already been successfully applied. - /// - /// The default impl will load all migrations and scan for the provided migration id. Implementors are encouraged to implement this more efficiently. - async fn migration_is_already_applied(&self, migration_id: &str) -> Result { - let migrations = self.load_all().await?; - - let already_applied = migrations - .iter() - .any(|migration| migration.status == MigrationStatus::MigrationSuccess && migration.name == migration_id); - - Ok(already_applied) - } -} - -/// The representation of a migration as persisted through [MigrationPersistence](trait.MigrationPersistence.html). -#[derive(Debug, Clone, PartialEq)] -pub struct Migration { - /// The migration id. - pub name: String, - pub revision: usize, - pub status: MigrationStatus, - pub applied: usize, - pub rolled_back: usize, - /// The _target_ Prisma schema. - pub datamodel_string: String, - /// The schema migration steps to apply to get to the target Prisma schema. - pub datamodel_steps: Vec, - pub database_migration: serde_json::Value, - pub errors: Vec, - pub started_at: DateTime, - pub finished_at: Option>, -} - -/// Updates to be made to a persisted [Migration](struct.Migration.html). -#[derive(Debug, Clone)] -pub struct MigrationUpdateParams { - pub name: String, - pub new_name: String, - pub revision: usize, - pub status: MigrationStatus, - pub applied: usize, - pub rolled_back: usize, - pub errors: Vec, - pub finished_at: Option>, -} - -impl MigrationUpdateParams { - pub fn mark_as_finished(&mut self) { - self.status = MigrationStatus::MigrationSuccess; - self.finished_at = Some(Migration::timestamp_without_nanos()); - } -} - -pub trait IsWatchMigration { - fn is_watch_migration(&self) -> bool; -} - -pub struct NewMigration { - pub name: String, - pub datamodel_string: String, - pub datamodel_steps: Vec, - pub database_migration: serde_json::Value, -} - -impl Migration { - pub fn new(params: NewMigration) -> Migration { - let NewMigration { - name, - datamodel_string, - datamodel_steps, - database_migration, - } = params; - - Migration { - name, - revision: 0, - status: MigrationStatus::Pending, - datamodel_string, - datamodel_steps, - applied: 0, - rolled_back: 0, - database_migration, - errors: Vec::new(), - started_at: Self::timestamp_without_nanos(), - finished_at: None, - } - } - - pub fn update_params(&self) -> MigrationUpdateParams { - MigrationUpdateParams { - name: self.name.clone(), - new_name: self.name.clone(), - revision: self.revision, - status: self.status, - applied: self.applied, - rolled_back: self.rolled_back, - errors: self.errors.clone(), - finished_at: self.finished_at, - } - } - - // SQLite does not store nano precision. Therefore we cut it so we can assert equality in our tests. - pub fn timestamp_without_nanos() -> DateTime { - let timestamp = Utc::now().timestamp_millis(); - let nsecs = ((timestamp % 1000) * 1_000_000) as u32; - let secs = (timestamp / 1000) as i64; - let naive = chrono::NaiveDateTime::from_timestamp(secs, nsecs); - let datetime: DateTime = DateTime::from_utc(naive, Utc); - datetime - } - - pub fn parse_datamodel(&self) -> Result { - datamodel::parse_datamodel_and_ignore_datasource_urls(&self.datamodel_string) - .map(|d| d.subject) - .map_err(|err| err.to_pretty_string("schema.prisma", &self.datamodel_string)) - } - - pub fn parse_schema_ast(&self) -> Result { - datamodel::parse_schema_ast(&self.datamodel_string) - .map_err(|err| err.to_pretty_string("schema.prisma", &self.datamodel_string)) - } -} - -impl IsWatchMigration for Migration { - fn is_watch_migration(&self) -> bool { - self.name.starts_with("watch") - } -} - -#[derive(Debug, Serialize, PartialEq, Clone, Copy)] -pub enum MigrationStatus { - Pending, - MigrationInProgress, - MigrationSuccess, - MigrationFailure, - RollingBack, - RollbackSuccess, - RollbackFailure, -} - -impl MigrationStatus { - pub fn code(&self) -> &str { - match self { - MigrationStatus::Pending => "Pending", - MigrationStatus::MigrationInProgress => "MigrationInProgress", - MigrationStatus::MigrationSuccess => "MigrationSuccess", - MigrationStatus::MigrationFailure => "MigrationFailure", - MigrationStatus::RollingBack => "RollingBack", - MigrationStatus::RollbackSuccess => "RollbackSuccess", - MigrationStatus::RollbackFailure => "RollbackFailure", - } - } - - pub fn is_success(&self) -> bool { - matches!(self, MigrationStatus::MigrationSuccess) - } - - pub fn is_pending(&self) -> bool { - matches!(self, MigrationStatus::Pending) - } -} - -impl FromStr for MigrationStatus { - type Err = String; - - fn from_str(s: &str) -> Result { - let status = match s { - "Pending" => MigrationStatus::Pending, - "MigrationInProgress" => MigrationStatus::MigrationInProgress, - "MigrationSuccess" => MigrationStatus::MigrationSuccess, - "MigrationFailure" => MigrationStatus::MigrationFailure, - "RollingBack" => MigrationStatus::RollingBack, - "RollbackSuccess" => MigrationStatus::RollbackSuccess, - "RollbackFailure" => MigrationStatus::RollbackFailure, - _ => return Err(format!("MigrationStatus {:?} is not known", s)), - }; - - Ok(status) - } -} - -/// A no-op implementor of [MigrationPersistence](trait.MigrationPersistence.html). -pub struct EmptyMigrationPersistence {} - -#[async_trait::async_trait] -impl MigrationPersistence for EmptyMigrationPersistence { - async fn init(&self) -> Result<(), ConnectorError> { - Ok(()) - } - - async fn reset(&self) -> Result<(), ConnectorError> { - Ok(()) - } - - async fn last_two_migrations(&self) -> ConnectorResult<(Option, Option)> { - Ok((None, None)) - } - - async fn by_name(&self, _name: &str) -> Result, ConnectorError> { - Ok(None) - } - - async fn load_all(&self) -> Result, ConnectorError> { - Ok(Vec::new()) - } - - async fn create(&self, _migration: Migration) -> Result { - unimplemented!("Not allowed on a EmptyMigrationPersistence") - } - - async fn update(&self, _params: &MigrationUpdateParams) -> Result<(), ConnectorError> { - unimplemented!("Not allowed on a EmptyMigrationPersistence") - } -} diff --git a/migration-engine/connectors/sql-migration-connector/src/lib.rs b/migration-engine/connectors/sql-migration-connector/src/lib.rs index 8183a6233d4f..9726e4bd54ee 100644 --- a/migration-engine/connectors/sql-migration-connector/src/lib.rs +++ b/migration-engine/connectors/sql-migration-connector/src/lib.rs @@ -12,13 +12,10 @@ mod sql_database_step_applier; mod sql_destructive_change_checker; mod sql_imperative_migration_persistence; mod sql_migration; -mod sql_migration_persistence; mod sql_renderer; mod sql_schema_calculator; mod sql_schema_differ; - pub use sql_migration::SqlMigration; -pub use sql_migration_persistence::MIGRATION_TABLE_NAME; use connection_wrapper::Connection; use datamodel::Datamodel; @@ -131,10 +128,6 @@ impl MigrationConnector for SqlMigrationConnector { self.flavour.check_database_version_compatibility(datamodel) } - fn migration_persistence(&self) -> &dyn MigrationPersistence { - self - } - fn database_migration_inferrer(&self) -> &dyn DatabaseMigrationInferrer { self } diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs b/migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs deleted file mode 100644 index b0b323ed4eb6..000000000000 --- a/migration-engine/connectors/sql-migration-connector/src/sql_migration_persistence.rs +++ /dev/null @@ -1,296 +0,0 @@ -use crate::{connection_wrapper::Connection, SqlMigrationConnector}; -use barrel::types; -use chrono::*; -use migration_connector::*; -use quaint::{ast::*, connector::ResultSet, prelude::SqlFamily}; -use std::convert::TryFrom; - -#[async_trait::async_trait] -impl MigrationPersistence for SqlMigrationConnector { - async fn init(&self) -> Result<(), ConnectorError> { - let schema_name = self.conn().connection_info().schema_name(); - - let sql_str = match self.flavour.sql_family() { - SqlFamily::Sqlite => { - let mut m = barrel::Migration::new().schema(schema_name); - m.create_table_if_not_exists(MIGRATION_TABLE_NAME, migration_table_setup_sqlite); - m.make_from(barrel::SqlVariant::Sqlite) - } - SqlFamily::Postgres => { - let mut m = barrel::Migration::new().schema(schema_name); - m.create_table(MIGRATION_TABLE_NAME, migration_table_setup_postgres); - m.schema(schema_name).make_from(barrel::SqlVariant::Pg) - } - SqlFamily::Mysql => { - let mut m = barrel::Migration::new().schema(schema_name); - m.create_table(MIGRATION_TABLE_NAME, migration_table_setup_mysql); - m.make_from(barrel::SqlVariant::Mysql) - } - SqlFamily::Mssql => { - let mut m = barrel::Migration::new().schema(schema_name); - m.create_table_if_not_exists(MIGRATION_TABLE_NAME, migration_table_setup_mssql); - m.make_from(barrel::SqlVariant::Mssql) - } - }; - - self.conn().raw_cmd(&sql_str).await.ok(); - - Ok(()) - } - - async fn reset(&self) -> ConnectorResult<()> { - use quaint::ast::Delete; - - self.conn() - .query(Delete::from_table(( - self.conn().connection_info().schema_name(), - MIGRATION_TABLE_NAME, - ))) - .await - .ok(); - - Ok(()) - } - - async fn last_two_migrations(&self) -> ConnectorResult<(Option, Option)> { - last_applied_migrations(self.conn(), self.table()).await - } - - async fn load_all(&self) -> ConnectorResult> { - let query = Select::from_table(self.table()).order_by(REVISION_COLUMN.ascend()); - let result_set = self.conn().query(query).await?; - - Ok(parse_rows_new(result_set)) - } - - async fn by_name(&self, name: &str) -> ConnectorResult> { - let conditions = NAME_COLUMN.equals(name); - let query = Select::from_table(self.table()) - .so_that(conditions) - .order_by(REVISION_COLUMN.descend()); - let result_set = self.conn().query(query).await?; - - Ok(parse_rows_new(result_set).into_iter().next()) - } - - async fn create(&self, migration: Migration) -> Result { - let mut cloned = migration.clone(); - let model_steps_json = serde_json::to_string(&migration.datamodel_steps).unwrap(); - let database_migration_json = serde_json::to_string(&migration.database_migration).unwrap(); - let errors_json = serde_json::to_string(&migration.errors).unwrap(); - - let insert = Insert::single_into(self.table()) - .value(DATAMODEL_COLUMN, migration.datamodel_string) - .value(NAME_COLUMN, migration.name) - .value(STATUS_COLUMN, migration.status.code()) - .value(APPLIED_COLUMN, migration.applied) - .value(ROLLED_BACK_COLUMN, migration.rolled_back) - .value(DATAMODEL_STEPS_COLUMN, model_steps_json) - .value(DATABASE_MIGRATION_COLUMN, database_migration_json) - .value(ERRORS_COLUMN, errors_json) - .value(STARTED_AT_COLUMN, self.convert_datetime(migration.started_at)) - .value(FINISHED_AT_COLUMN, Option::>::None); - - match self.flavour.sql_family() { - SqlFamily::Sqlite | SqlFamily::Mysql => { - let result_set = self.conn().query(insert).await.unwrap(); - let id = result_set.last_insert_id().unwrap(); - - cloned.revision = usize::try_from(id).unwrap(); - } - SqlFamily::Postgres | SqlFamily::Mssql => { - let returning_insert = Insert::from(insert).returning(&["revision"]); - let result_set = self.conn().query(returning_insert).await.unwrap(); - - if let Some(row) = result_set.into_iter().next() { - cloned.revision = row["revision"].as_i64().unwrap() as usize; - } - } - } - - Ok(cloned) - } - - async fn update(&self, params: &MigrationUpdateParams) -> Result<(), ConnectorError> { - let finished_at_value = match params.finished_at { - Some(x) => self.convert_datetime(x), - None => Value::from(Option::>::None), - }; - let errors_json = serde_json::to_string(¶ms.errors).unwrap(); - let query = Update::table(self.table()) - .set(NAME_COLUMN, params.new_name.clone()) - .set(STATUS_COLUMN, params.status.code()) - .set(APPLIED_COLUMN, params.applied) - .set(ROLLED_BACK_COLUMN, params.rolled_back) - .set(ERRORS_COLUMN, errors_json) - .set(FINISHED_AT_COLUMN, finished_at_value) - .so_that( - NAME_COLUMN - .equals(params.name.clone()) - .and(REVISION_COLUMN.equals(params.revision)), - ); - - self.conn().query(query).await?; - - Ok(()) - } -} - -/// Returns the last 2 applied migrations, or a shorter vec in absence of applied migrations. -async fn last_applied_migrations( - conn: &Connection, - table: Table<'_>, -) -> ConnectorResult<(Option, Option)> { - let conditions = STATUS_COLUMN.equals(MigrationStatus::MigrationSuccess.code()); - let query = Select::from_table(table) - .so_that(conditions) - .order_by(REVISION_COLUMN.descend()) - .limit(2); - - let result_set = conn.query(query).await?; - let mut rows = parse_rows_new(result_set).into_iter(); - let last = rows.next(); - let second_to_last = rows.next(); - Ok((last, second_to_last)) -} - -fn migration_table_setup_sqlite(t: &mut barrel::Table) { - migration_table_setup(t, types::text(), types::custom("DATETIME"), types::custom("TEXT")); -} - -fn migration_table_setup_postgres(t: &mut barrel::Table) { - migration_table_setup(t, types::text(), types::custom("timestamp(3)"), types::custom("TEXT")); -} - -fn migration_table_setup_mysql(t: &mut barrel::Table) { - migration_table_setup( - t, - types::text(), - types::custom("datetime(3)"), - types::custom("LONGTEXT"), - ); -} - -fn migration_table_setup_mssql(t: &mut barrel::Table) { - migration_table_setup( - t, - types::custom("nvarchar(max)"), - types::custom("datetime2"), - types::custom("nvarchar(max)"), - ); -} - -fn migration_table_setup( - t: &mut barrel::Table, - text_type: barrel::types::Type, - datetime_type: barrel::types::Type, - unlimited_text_type: barrel::types::Type, -) { - t.add_column(REVISION_COLUMN, types::primary()); - t.add_column(NAME_COLUMN, text_type.clone()); - t.add_column(DATAMODEL_COLUMN, unlimited_text_type.clone()); - t.add_column(STATUS_COLUMN, text_type); - t.add_column(APPLIED_COLUMN, types::integer()); - t.add_column(ROLLED_BACK_COLUMN, types::integer()); - t.add_column(DATAMODEL_STEPS_COLUMN, unlimited_text_type.clone()); - t.add_column(DATABASE_MIGRATION_COLUMN, unlimited_text_type.clone()); - t.add_column(ERRORS_COLUMN, unlimited_text_type); - t.add_column(STARTED_AT_COLUMN, datetime_type.clone()); - t.add_column(FINISHED_AT_COLUMN, datetime_type.nullable(true)); -} - -impl SqlMigrationConnector { - fn table(&self) -> Table<'_> { - match self.flavour.sql_family() { - SqlFamily::Sqlite => { - // sqlite case. Otherwise quaint produces invalid SQL - MIGRATION_TABLE_NAME.to_string().into() - } - _ => ( - self.conn().connection_info().schema_name().to_string(), - MIGRATION_TABLE_NAME.to_string(), - ) - .into(), - } - } - - fn convert_datetime(&self, datetime: DateTime) -> Value<'_> { - match self.flavour.sql_family() { - SqlFamily::Sqlite => Value::integer(datetime.timestamp_millis()), - SqlFamily::Postgres => Value::datetime(datetime), - SqlFamily::Mysql => Value::datetime(datetime), - SqlFamily::Mssql => Value::datetime(datetime), - } - } -} - -fn convert_parameterized_date_value(db_value: &Value<'_>) -> DateTime { - match db_value { - Value::Integer(Some(x)) => timestamp_to_datetime(*x), - Value::DateTime(Some(x)) => *x, - Value::Date(Some(date)) => DateTime::from_utc(date.and_hms(0, 0, 0), Utc), - x => unimplemented!("Got unsupported value {:?} in date conversion", x), - } -} - -fn timestamp_to_datetime(timestamp: i64) -> DateTime { - let nsecs = ((timestamp % 1000) * 1_000_000) as u32; - let secs = (timestamp / 1000) as i64; - let naive = chrono::NaiveDateTime::from_timestamp(secs, nsecs); - let datetime: DateTime = DateTime::from_utc(naive, Utc); - - datetime -} - -fn parse_rows_new(result_set: ResultSet) -> Vec { - result_set - .into_iter() - .map(|row| { - let datamodel_string: String = row[DATAMODEL_COLUMN].to_string().unwrap(); - let datamodel_steps_json: String = row[DATAMODEL_STEPS_COLUMN].to_string().unwrap(); - - let database_migration_string: String = row[DATABASE_MIGRATION_COLUMN].to_string().unwrap(); - let errors_json: String = row[ERRORS_COLUMN].to_string().unwrap(); - - let finished_at = match &row[FINISHED_AT_COLUMN] { - v if v.is_null() => None, - x => Some(convert_parameterized_date_value(x)), - }; - - let datamodel_steps = - serde_json::from_str(&datamodel_steps_json).expect("Error parsing the migration steps"); - - let database_migration_json = - serde_json::from_str(&database_migration_string).expect("Error parsing the database migration steps"); - let errors: Vec = serde_json::from_str(&errors_json).unwrap(); - - Migration { - name: row[NAME_COLUMN].to_string().unwrap(), - revision: row[REVISION_COLUMN].as_i64().unwrap() as usize, - datamodel_string, - status: row[STATUS_COLUMN].to_string().unwrap().parse().unwrap(), - applied: row[APPLIED_COLUMN].as_i64().unwrap() as usize, - rolled_back: row[ROLLED_BACK_COLUMN].as_i64().unwrap() as usize, - datamodel_steps, - database_migration: database_migration_json, - errors, - started_at: convert_parameterized_date_value(&row[STARTED_AT_COLUMN]), - finished_at, - } - }) - .collect() -} - -/// The name of the migrations table. -pub static MIGRATION_TABLE_NAME: &str = "_Migration"; -static NAME_COLUMN: &str = "name"; -static REVISION_COLUMN: &str = "revision"; -static DATAMODEL_COLUMN: &str = "datamodel"; -static STATUS_COLUMN: &str = "status"; -static APPLIED_COLUMN: &str = "applied"; -static ROLLED_BACK_COLUMN: &str = "rolled_back"; -static DATAMODEL_STEPS_COLUMN: &str = "datamodel_steps"; -static DATABASE_MIGRATION_COLUMN: &str = "database_migration"; -static ERRORS_COLUMN: &str = "errors"; -static STARTED_AT_COLUMN: &str = "started_at"; -static FINISHED_AT_COLUMN: &str = "finished_at"; diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs b/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs index e61de347667f..89449dc39b8b 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs @@ -13,7 +13,7 @@ use crate::{ self, AddColumn, AddForeignKey, AlterColumn, AlterEnum, AlterTable, CreateEnum, CreateIndex, CreateTable, DropColumn, DropEnum, DropForeignKey, DropIndex, DropTable, RedefineTable, SqlMigrationStep, TableChange, }, - SqlFlavour, SqlSchema, MIGRATION_TABLE_NAME, + SqlFlavour, SqlSchema, }; use column::ColumnTypeChange; use enums::EnumDiffer; @@ -496,9 +496,7 @@ impl<'schema> SqlSchemaDiffer<'schema> { } fn table_is_ignored(&self, table_name: &str) -> bool { - table_name == MIGRATION_TABLE_NAME - || table_name == "_prisma_migrations" - || self.flavour.table_should_be_ignored(&table_name) + table_name == "_prisma_migrations" || self.flavour.table_should_be_ignored(&table_name) } fn enum_pairs(&self) -> impl Iterator> { diff --git a/migration-engine/migration-engine-tests/src/sql/barrel_migration_executor.rs b/migration-engine/migration-engine-tests/src/sql/barrel_migration_executor.rs index 2954cb897646..d4e75ca9f785 100644 --- a/migration-engine/migration-engine-tests/src/sql/barrel_migration_executor.rs +++ b/migration-engine/migration-engine-tests/src/sql/barrel_migration_executor.rs @@ -1,6 +1,5 @@ use crate::sql::TestApi; use quaint::prelude::Queryable; -use sql_migration_connector::MIGRATION_TABLE_NAME; use sql_schema_describer::SqlSchema; pub struct BarrelMigrationExecutor<'a> { @@ -26,14 +25,7 @@ impl BarrelMigrationExecutor<'_> { let full_sql = migration.make_from(self.sql_variant); self.api.database().raw_cmd(&full_sql).await.unwrap(); - let mut result = self.api.describe_database().await.expect("Description failed"); - - // The presence of the _Migration table makes assertions harder. Therefore remove it. - result.tables = result - .tables - .into_iter() - .filter(|t| t.name != MIGRATION_TABLE_NAME) - .collect(); + let result = self.api.describe_database().await.expect("Description failed"); Ok(result) } diff --git a/migration-engine/migration-engine-tests/src/test_api.rs b/migration-engine/migration-engine-tests/src/test_api.rs index 3ea7420d64e5..c2f162c5c875 100644 --- a/migration-engine/migration-engine-tests/src/test_api.rs +++ b/migration-engine/migration-engine-tests/src/test_api.rs @@ -27,10 +27,7 @@ use super::{ use crate::{connectors::Tags, test_api::list_migration_directories::ListMigrationDirectories, AssertionResult}; use enumflags2::BitFlags; use indoc::formatdoc; -use migration_connector::{ - ImperativeMigrationsPersistence, MigrationConnector, MigrationFeature, MigrationPersistence, MigrationRecord, - MigrationStep, -}; +use migration_connector::{ImperativeMigrationsPersistence, MigrationFeature, MigrationRecord}; use migration_core::{ api::{GenericApi, MigrationApi}, @@ -40,7 +37,7 @@ use quaint::{ prelude::{ConnectionInfo, Queryable, SqlFamily}, single::Quaint, }; -use sql_migration_connector::{SqlMigration, SqlMigrationConnector, MIGRATION_TABLE_NAME}; +use sql_migration_connector::{SqlMigration, SqlMigrationConnector}; use sql_schema_describer::*; use tempfile::TempDir; use test_setup::*; @@ -78,14 +75,6 @@ impl TestApi { self.tags.contains(Tags::Mariadb) } - pub async fn migration_persistence(&self) -> &dyn MigrationPersistence { - let persistence = self.api.connector().migration_persistence(); - - persistence.init().await.unwrap(); - - persistence - } - pub fn imperative_migration_persistence<'a>(&'a self) -> &(dyn ImperativeMigrationsPersistence + 'a) { self.api.connector() } @@ -193,22 +182,7 @@ impl TestApi { } pub async fn describe_database(&self) -> Result { - let mut result = self.api.connector().describe_schema().await?; - - // the presence of the _Migration table makes assertions harder. Therefore remove it from the result. - result.tables = result - .tables - .into_iter() - .filter(|t| t.name != MIGRATION_TABLE_NAME) - .collect(); - - // Also the sequences of the _Migration table - result.sequences = result - .sequences - .into_iter() - .filter(|seq| !seq.name.contains("_Migration")) - .collect(); - + let result = self.api.connector().describe_schema().await?; Ok(result) } diff --git a/migration-engine/migration-engine-tests/tests/migration_persistence/mod.rs b/migration-engine/migration-engine-tests/tests/migration_persistence/mod.rs deleted file mode 100644 index 2fdffdf2ed49..000000000000 --- a/migration-engine/migration-engine-tests/tests/migration_persistence/mod.rs +++ /dev/null @@ -1,172 +0,0 @@ -mod imperative_migration_persistence_tests; - -use migration_connector::{steps::CreateEnum, *}; -use migration_engine_tests::*; -use pretty_assertions::assert_eq; -use quaint::prelude::SqlFamily; - -fn empty_migration(name: String) -> Migration { - Migration { - name, - revision: 0, - status: MigrationStatus::Pending, - datamodel_string: String::new(), - datamodel_steps: Vec::new(), - applied: 0, - rolled_back: 0, - database_migration: serde_json::json!({}), - errors: Vec::new(), - started_at: Migration::timestamp_without_nanos(), - finished_at: None, - } -} - -#[test_each_connector] -async fn last_should_return_none_if_there_is_no_migration(api: &TestApi) { - let persistence = api.migration_persistence().await; - let result = persistence.last().await.unwrap(); - assert_eq!(result.is_some(), false); -} - -#[test_each_connector] -async fn last_must_return_none_if_there_is_no_successful_migration(api: &TestApi) -> TestResult { - let persistence = api.migration_persistence().await; - persistence.create(empty_migration("my_migration".to_string())).await?; - let loaded = persistence.last().await?; - assert_eq!(loaded, None); - - Ok(()) -} - -#[test_each_connector] -async fn load_all_should_return_empty_if_there_is_no_migration(api: &TestApi) { - let persistence = api.migration_persistence().await; - let result = persistence.load_all().await.unwrap(); - assert_eq!(result.is_empty(), true); -} - -#[test_each_connector] -async fn load_all_must_return_all_created_migrations(api: &TestApi) { - let persistence = api.migration_persistence().await; - let migration1 = persistence - .create(empty_migration("migration_1".to_string())) - .await - .unwrap(); - let migration2 = persistence - .create(empty_migration("migration_2".to_string())) - .await - .unwrap(); - let migration3 = persistence - .create(empty_migration("migration_3".to_string())) - .await - .unwrap(); - - let mut result = persistence.load_all().await.unwrap(); - if matches!(api.sql_family(), SqlFamily::Mysql | SqlFamily::Sqlite) { - // TODO: mysql currently loses milli seconds on loading, and sqlite is - // the wrong column type - result[0].started_at = migration1.started_at; - result[1].started_at = migration2.started_at; - result[2].started_at = migration3.started_at; - } - assert_eq!(result, vec![migration1, migration2, migration3]) -} - -#[test_each_connector] -async fn create_should_allow_to_create_a_new_migration(api: &TestApi) { - let dm = r#" - model Test { - id String @id @default(cuid()) - } - "#; - - let persistence = api.migration_persistence().await; - let mut migration = empty_migration("my_migration".to_string()); - migration.status = MigrationStatus::MigrationSuccess; - migration.datamodel_string = dm.to_owned(); - migration.datamodel_steps = vec![MigrationStep::CreateEnum(CreateEnum { - r#enum: "MyEnum".to_string(), - values: vec!["A".to_string(), "B".to_string()], - })]; - migration.errors = vec!["error1".to_string(), "error2".to_string()]; - - let result = persistence.create(migration.clone()).await.unwrap(); - migration.revision = result.revision; // copy over the generated revision so that the assertion can work.` - - assert_eq!(result, migration); - let mut loaded = persistence.last().await.unwrap().unwrap(); - - if matches!(api.sql_family(), SqlFamily::Mysql | SqlFamily::Sqlite) { - // TODO: mysql currently loses milli seconds on loading, and sqlite is - // the wrong column type - loaded.started_at = migration.started_at; - } - - assert_eq!(loaded, migration); -} - -#[test_each_connector] -async fn create_should_increment_revisions(api: &TestApi) { - let persistence = api.migration_persistence().await; - let migration1 = persistence - .create(empty_migration("migration_1".to_string())) - .await - .unwrap(); - let migration2 = persistence - .create(empty_migration("migration_2".to_string())) - .await - .unwrap(); - assert_eq!(migration1.revision + 1, migration2.revision); -} - -#[test_each_connector] -async fn update_must_work(api: &TestApi) { - let persistence = api.migration_persistence().await; - let migration = persistence - .create(empty_migration("my_migration".to_string())) - .await - .unwrap(); - - let mut params = migration.update_params(); - params.status = MigrationStatus::MigrationSuccess; - params.applied = 10; - params.rolled_back = 11; - params.errors = vec!["err1".to_string(), "err2".to_string()]; - params.finished_at = Some(Migration::timestamp_without_nanos()); - params.new_name = "my_new_migration_name".to_string(); - - persistence.update(¶ms).await.unwrap(); - - let loaded = persistence.last().await.unwrap().unwrap(); - assert_eq!(loaded.status, params.status); - assert_eq!(loaded.applied, params.applied); - assert_eq!(loaded.rolled_back, params.rolled_back); - assert_eq!(loaded.errors, params.errors); - if !matches!(api.sql_family(), SqlFamily::Mysql | SqlFamily::Sqlite) { - // TODO: mysql currently loses milli seconds on loading, and sqlite is - // the wrong column type - assert_eq!(loaded.finished_at, params.finished_at); - } - assert_eq!(loaded.name, params.new_name); -} - -#[test_each_connector] -async fn migration_is_already_applied_must_work(api: &TestApi) -> TestResult { - let persistence = api.migration_persistence().await; - - let mut migration_1 = empty_migration("migration_1".to_string()); - migration_1.status = MigrationStatus::MigrationSuccess; - - persistence.create(migration_1).await?; - - let mut migration_2 = empty_migration("migration_2".to_string()); - migration_2.status = MigrationStatus::MigrationFailure; - - persistence.create(migration_2).await?; - - assert!(persistence.migration_is_already_applied("migration_1").await?); - assert!(!persistence.migration_is_already_applied("migration_2").await?); - assert!(!persistence.migration_is_already_applied("another_migration").await?); - - Ok(()) -} diff --git a/migration-engine/migration-engine-tests/tests/migration_tests.rs b/migration-engine/migration-engine-tests/tests/migration_tests.rs index 9b38a3967bda..1daef5c0210b 100644 --- a/migration-engine/migration-engine-tests/tests/migration_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migration_tests.rs @@ -7,7 +7,6 @@ mod existing_data; mod existing_databases; mod initialization; mod list_migration_directories; -mod migration_persistence; mod migrations; mod native_types; mod reset; @@ -2436,7 +2435,7 @@ async fn switching_databases_must_work(api: &TestApi) -> TestResult { api.schema_push(dm1).send().await?.assert_green()?; // Drop the existing migrations. - api.migration_persistence().await.reset().await?; + api.reset().send().await?; let dm2 = r#" datasource db { From f1f9cd978be460f44bda7be7089f26123f66910d Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Sun, 20 Dec 2020 19:48:11 +0100 Subject: [PATCH 25/29] start removing migrationsteps serialization --- .../migration-connector/src/steps.rs | 4 +- .../migration-connector/tests/steps_tests.rs | 784 +++++++++--------- .../core/rpc_examples/applyMigration.json | 179 ---- .../core/rpc_examples/dmmfToDml.json | 9 - .../rpc_examples/inferMigrationSteps.json | 11 - .../core/rpc_examples/listMigrations.json | 1 - .../core/rpc_examples/migrationProgress.json | 9 - migration-engine/core/src/commands.rs | 16 - migration-engine/core/src/lib.rs | 2 +- .../src/command_helpers.rs | 21 - .../migration-engine-tests/src/lib.rs | 3 +- .../migration-engine-tests/src/sql.rs | 1 - 12 files changed, 396 insertions(+), 644 deletions(-) delete mode 100644 migration-engine/core/rpc_examples/applyMigration.json delete mode 100644 migration-engine/core/rpc_examples/dmmfToDml.json delete mode 100644 migration-engine/core/rpc_examples/inferMigrationSteps.json delete mode 100644 migration-engine/core/rpc_examples/listMigrations.json delete mode 100644 migration-engine/core/rpc_examples/migrationProgress.json delete mode 100644 migration-engine/migration-engine-tests/src/command_helpers.rs diff --git a/migration-engine/connectors/migration-connector/src/steps.rs b/migration-engine/connectors/migration-connector/src/steps.rs index 43807a0cd9fb..e206cfa613d1 100644 --- a/migration-engine/connectors/migration-connector/src/steps.rs +++ b/migration-engine/connectors/migration-connector/src/steps.rs @@ -4,8 +4,8 @@ use datamodel::ast; use serde::{Deserialize, Serialize}; /// An atomic change to a [Datamodel AST](datamodel/ast/struct.Datamodel.html). -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(tag = "tag", deny_unknown_fields)] +// #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +// #[serde(tag = "tag", deny_unknown_fields)] pub enum MigrationStep { CreateModel(CreateModel), UpdateModel(UpdateModel), diff --git a/migration-engine/connectors/migration-connector/tests/steps_tests.rs b/migration-engine/connectors/migration-connector/tests/steps_tests.rs index 9e3ce175ea35..5614b7cf4348 100644 --- a/migration-engine/connectors/migration-connector/tests/steps_tests.rs +++ b/migration-engine/connectors/migration-connector/tests/steps_tests.rs @@ -1,392 +1,392 @@ -#![allow(non_snake_case)] - -use migration_connector::steps::*; - -#[test] -fn full_CreateModel_must_work() { - let json = r#"{"tag":"CreateModel","model":"Blog"}"#; - let expected_struct = MigrationStep::CreateModel(CreateModel { - model: "Blog".to_string(), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn minimal_UpdateModel_must_work() { - let json = r#"{"tag":"UpdateModel","model":"Blog"}"#; - let expected_struct = MigrationStep::UpdateModel(UpdateModel { - model: "Blog".to_string(), - new_name: None, - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn full_UpdateModel_must_work() { - let json = r#"{"tag":"UpdateModel","model":"Blog","newName":"MyBlog"}"#; - let expected_struct = MigrationStep::UpdateModel(UpdateModel { - model: "Blog".to_string(), - new_name: Some("MyBlog".to_string()), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn DeleteModel_must_work() { - let json = r#"{"tag":"DeleteModel","model":"Blog"}"#; - let expected_struct = MigrationStep::DeleteModel(DeleteModel { - model: "Blog".to_string(), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn minimal_CreateField_must_work() { - let json = r#" - { - "tag":"CreateField", - "model":"Blog", - "field":"title", - "type":"String", - "arity":"Required" - } - "#; - let expected_struct = MigrationStep::CreateField(CreateField { - model: "Blog".to_string(), - field: "title".to_string(), - tpe: "String".to_owned(), - arity: FieldArity::Required, - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn full_CreateField_must_work() { - let json = r#"{ - "tag":"CreateField", - "model": "Blog", - "field": "title", - "type": "String", - "arity": "Optional" - }"#; - let expected_struct = MigrationStep::CreateField(CreateField { - model: "Blog".to_string(), - field: "title".to_string(), - tpe: "String".to_owned(), - arity: FieldArity::Optional, - }); - - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn minimal_UpdateField_must_work() { - let json = r#"{"tag":"UpdateField","model":"Blog","field":"title"}"#; - let expected_struct = MigrationStep::UpdateField(UpdateField { - model: "Blog".to_string(), - field: "title".to_string(), - new_name: None, - tpe: None, - arity: None, - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn full_UpdateField_must_work() { - let json = r#" - { - "tag": "UpdateField", - "model": "Blog", - "field": "title", - "newName": "MyBlog", - "type": "String", - "arity": "Optional" - } - "#; - let expected_struct = MigrationStep::UpdateField(UpdateField { - model: "Blog".to_string(), - field: "title".to_string(), - new_name: Some("MyBlog".to_string()), - tpe: Some("String".to_owned()), - arity: Some(FieldArity::Optional), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn DeleteField_must_work() { - let json = r#"{"tag":"DeleteField","model":"Blog","field":"title"}"#; - let expected_struct = MigrationStep::DeleteField(DeleteField { - model: "Blog".to_string(), - field: "title".to_string(), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn CreateEnum_must_work() { - let json = r#" - { - "tag": "CreateEnum", - "enum": "BlogCategory", - "values": ["Politics","Tech"] - } - "#; - let expected_struct = MigrationStep::CreateEnum(CreateEnum { - r#enum: "BlogCategory".to_string(), - values: vec!["Politics".to_string(), "Tech".to_string()], - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn minimal_UpdateEnum_must_work() { - let json = r#" - { - "tag": "UpdateEnum", - "enum": "BlogCategory" - } - "#; - let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { - r#enum: "BlogCategory".to_string(), - new_name: None, - created_values: vec![], - deleted_values: vec![], - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn full_Update_Enum_must_work() { - let json = r#" - { - "tag": "UpdateEnum", - "enum": "BlogCategory", - "newName": "MyBlogCategory", - "createdValues": ["Tech"], - "deletedValues": ["Nology"] - } - "#; - let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { - r#enum: "BlogCategory".to_string(), - new_name: Some("MyBlogCategory".to_string()), - created_values: vec!["Tech".to_string()], - deleted_values: vec!["Nology".to_string()], - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn DeleteEnum_must_work() { - let json = r#"{"tag":"DeleteEnum","enum":"BlogCategory"}"#; - let expected_struct = MigrationStep::DeleteEnum(DeleteEnum { - r#enum: "BlogCategory".to_string(), - }); - assert_symmetric_serde(json, expected_struct); -} - -#[test] -fn CreateDirective_must_work() { - let json = r#" - { - "tag": "CreateDirective", - "location": { - "directive": "map", - "path": { - "tag": "Model", - "model": "Blog" - } - } - } - "#; - - let expected_step = MigrationStep::CreateDirective(CreateDirective { - location: DirectiveLocation { - path: DirectivePath::Model { - model: "Blog".to_owned(), - arguments: None, - }, - directive: "map".to_owned(), - }, - }); - - assert_symmetric_serde(json, expected_step); -} - -#[test] -fn minimal_DeleteDirective_must_work() { - let json = r#" - { - "tag": "DeleteDirective", - "location": { - "path": { - "tag": "Field", - "model": "Blog", - "field": "title" - }, - "directive": "map" - } - } - "#; - - let expected_step = MigrationStep::DeleteDirective(DeleteDirective { - location: DirectiveLocation { - path: DirectivePath::Field { - model: "Blog".to_owned(), - field: "title".to_owned(), - }, - directive: "map".to_owned(), - }, - }); - - assert_symmetric_serde(json, expected_step); -} - -#[test] -fn full_DeleteDirective_must_work() { - let json = r#" - { - "tag": "DeleteDirective", - "location": { - "path": { - "tag": "Model", - "model": "Blog", - "arguments": [ - { - "name": "", - "value": "[name, age]" - } - ] - }, - "directive": "unique" - } - } - "#; - - let expected_step = MigrationStep::DeleteDirective(DeleteDirective { - location: DirectiveLocation { - path: DirectivePath::Model { - model: "Blog".to_owned(), - arguments: Some(vec![Argument { - name: "".to_owned(), - value: MigrationExpression("[name, age]".to_owned()), - }]), - }, - directive: "unique".to_owned(), - }, - }); - - assert_symmetric_serde(json, expected_step); -} - -#[test] -fn UpdateArgument_must_work() { - let json = r#" - { - "tag": "UpdateArgument", - "location": { - "tag": "Directive", - "path": { - "tag": "Model", - "model": "CatMood" - }, - "directive": "map" - }, - "argument": "name", - "newValue": "cat_mood" - } - "#; - - let expected_step = MigrationStep::UpdateArgument(UpdateArgument { - location: ArgumentLocation::Directive(DirectiveLocation { - path: DirectivePath::Model { - model: "CatMood".to_owned(), - arguments: None, - }, - directive: "map".to_owned(), - }), - argument: "name".to_owned(), - new_value: MigrationExpression("cat_mood".to_owned()), - }); - - assert_symmetric_serde(json, expected_step); -} - -#[test] -fn CreateArgument_must_work() { - let json = r#" - { - "tag": "CreateArgument", - "location": { - "tag": "Directive", - "directive": "map", - "path": { - "enum": "CatMood", - "tag": "Enum" - } - }, - "argument": "name", - "value": "cat_mood" - } - "#; - - let expected_step = MigrationStep::CreateArgument(CreateArgument { - location: ArgumentLocation::Directive(DirectiveLocation { - path: DirectivePath::Enum { - r#enum: "CatMood".to_owned(), - }, - directive: "map".to_owned(), - }), - argument: "name".to_owned(), - value: MigrationExpression("cat_mood".to_owned()), - }); - - println!("{}", serde_json::to_value(&expected_step).unwrap()); - - assert_symmetric_serde(json, expected_step); -} - -#[test] -fn DeleteArgument_must_work() { - let json = r#" - { - "tag": "DeleteArgument", - "location": { - "tag": "Directive", - "path": { - "tag":"Enum", - "enum": "CatMood" - }, - "directive":"map" - }, - "argument": "name" - } - "#; - - let expected_step = MigrationStep::DeleteArgument(DeleteArgument { - location: ArgumentLocation::Directive(DirectiveLocation { - path: DirectivePath::Enum { - r#enum: "CatMood".to_owned(), - }, - directive: "map".to_owned(), - }), - argument: "name".to_owned(), - }); - - assert_symmetric_serde(json, expected_step); -} - -fn assert_symmetric_serde(json: &str, expected: MigrationStep) { - let serde_value: serde_json::Value = serde_json::from_str(&json).expect("The provided input was invalid json."); - let deserialized: MigrationStep = serde_json::from_str(&json).expect("Deserialization failed."); - let serialized_again = serde_json::to_value(&deserialized).expect("Serialization failed"); - assert_eq!( - deserialized, expected, - "The provided json could not be deserialized into the expected struct." - ); - assert_eq!( - serialized_again, serde_value, - "Reserializing did not produce the original json input." - ); -} +// #![allow(non_snake_case)] +// +// use migration_connector::steps::*; +// +// #[test] +// fn full_CreateModel_must_work() { +// let json = r#"{"tag":"CreateModel","model":"Blog"}"#; +// let expected_struct = MigrationStep::CreateModel(CreateModel { +// model: "Blog".to_string(), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn minimal_UpdateModel_must_work() { +// let json = r#"{"tag":"UpdateModel","model":"Blog"}"#; +// let expected_struct = MigrationStep::UpdateModel(UpdateModel { +// model: "Blog".to_string(), +// new_name: None, +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn full_UpdateModel_must_work() { +// let json = r#"{"tag":"UpdateModel","model":"Blog","newName":"MyBlog"}"#; +// let expected_struct = MigrationStep::UpdateModel(UpdateModel { +// model: "Blog".to_string(), +// new_name: Some("MyBlog".to_string()), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn DeleteModel_must_work() { +// let json = r#"{"tag":"DeleteModel","model":"Blog"}"#; +// let expected_struct = MigrationStep::DeleteModel(DeleteModel { +// model: "Blog".to_string(), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn minimal_CreateField_must_work() { +// let json = r#" +// { +// "tag":"CreateField", +// "model":"Blog", +// "field":"title", +// "type":"String", +// "arity":"Required" +// } +// "#; +// let expected_struct = MigrationStep::CreateField(CreateField { +// model: "Blog".to_string(), +// field: "title".to_string(), +// tpe: "String".to_owned(), +// arity: FieldArity::Required, +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn full_CreateField_must_work() { +// let json = r#"{ +// "tag":"CreateField", +// "model": "Blog", +// "field": "title", +// "type": "String", +// "arity": "Optional" +// }"#; +// let expected_struct = MigrationStep::CreateField(CreateField { +// model: "Blog".to_string(), +// field: "title".to_string(), +// tpe: "String".to_owned(), +// arity: FieldArity::Optional, +// }); +// +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn minimal_UpdateField_must_work() { +// let json = r#"{"tag":"UpdateField","model":"Blog","field":"title"}"#; +// let expected_struct = MigrationStep::UpdateField(UpdateField { +// model: "Blog".to_string(), +// field: "title".to_string(), +// new_name: None, +// tpe: None, +// arity: None, +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn full_UpdateField_must_work() { +// let json = r#" +// { +// "tag": "UpdateField", +// "model": "Blog", +// "field": "title", +// "newName": "MyBlog", +// "type": "String", +// "arity": "Optional" +// } +// "#; +// let expected_struct = MigrationStep::UpdateField(UpdateField { +// model: "Blog".to_string(), +// field: "title".to_string(), +// new_name: Some("MyBlog".to_string()), +// tpe: Some("String".to_owned()), +// arity: Some(FieldArity::Optional), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn DeleteField_must_work() { +// let json = r#"{"tag":"DeleteField","model":"Blog","field":"title"}"#; +// let expected_struct = MigrationStep::DeleteField(DeleteField { +// model: "Blog".to_string(), +// field: "title".to_string(), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn CreateEnum_must_work() { +// let json = r#" +// { +// "tag": "CreateEnum", +// "enum": "BlogCategory", +// "values": ["Politics","Tech"] +// } +// "#; +// let expected_struct = MigrationStep::CreateEnum(CreateEnum { +// r#enum: "BlogCategory".to_string(), +// values: vec!["Politics".to_string(), "Tech".to_string()], +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn minimal_UpdateEnum_must_work() { +// let json = r#" +// { +// "tag": "UpdateEnum", +// "enum": "BlogCategory" +// } +// "#; +// let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { +// r#enum: "BlogCategory".to_string(), +// new_name: None, +// created_values: vec![], +// deleted_values: vec![], +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn full_Update_Enum_must_work() { +// let json = r#" +// { +// "tag": "UpdateEnum", +// "enum": "BlogCategory", +// "newName": "MyBlogCategory", +// "createdValues": ["Tech"], +// "deletedValues": ["Nology"] +// } +// "#; +// let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { +// r#enum: "BlogCategory".to_string(), +// new_name: Some("MyBlogCategory".to_string()), +// created_values: vec!["Tech".to_string()], +// deleted_values: vec!["Nology".to_string()], +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn DeleteEnum_must_work() { +// let json = r#"{"tag":"DeleteEnum","enum":"BlogCategory"}"#; +// let expected_struct = MigrationStep::DeleteEnum(DeleteEnum { +// r#enum: "BlogCategory".to_string(), +// }); +// assert_symmetric_serde(json, expected_struct); +// } +// +// #[test] +// fn CreateDirective_must_work() { +// let json = r#" +// { +// "tag": "CreateDirective", +// "location": { +// "directive": "map", +// "path": { +// "tag": "Model", +// "model": "Blog" +// } +// } +// } +// "#; +// +// let expected_step = MigrationStep::CreateDirective(CreateDirective { +// location: DirectiveLocation { +// path: DirectivePath::Model { +// model: "Blog".to_owned(), +// arguments: None, +// }, +// directive: "map".to_owned(), +// }, +// }); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// #[test] +// fn minimal_DeleteDirective_must_work() { +// let json = r#" +// { +// "tag": "DeleteDirective", +// "location": { +// "path": { +// "tag": "Field", +// "model": "Blog", +// "field": "title" +// }, +// "directive": "map" +// } +// } +// "#; +// +// let expected_step = MigrationStep::DeleteDirective(DeleteDirective { +// location: DirectiveLocation { +// path: DirectivePath::Field { +// model: "Blog".to_owned(), +// field: "title".to_owned(), +// }, +// directive: "map".to_owned(), +// }, +// }); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// #[test] +// fn full_DeleteDirective_must_work() { +// let json = r#" +// { +// "tag": "DeleteDirective", +// "location": { +// "path": { +// "tag": "Model", +// "model": "Blog", +// "arguments": [ +// { +// "name": "", +// "value": "[name, age]" +// } +// ] +// }, +// "directive": "unique" +// } +// } +// "#; +// +// let expected_step = MigrationStep::DeleteDirective(DeleteDirective { +// location: DirectiveLocation { +// path: DirectivePath::Model { +// model: "Blog".to_owned(), +// arguments: Some(vec![Argument { +// name: "".to_owned(), +// value: MigrationExpression("[name, age]".to_owned()), +// }]), +// }, +// directive: "unique".to_owned(), +// }, +// }); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// #[test] +// fn UpdateArgument_must_work() { +// let json = r#" +// { +// "tag": "UpdateArgument", +// "location": { +// "tag": "Directive", +// "path": { +// "tag": "Model", +// "model": "CatMood" +// }, +// "directive": "map" +// }, +// "argument": "name", +// "newValue": "cat_mood" +// } +// "#; +// +// let expected_step = MigrationStep::UpdateArgument(UpdateArgument { +// location: ArgumentLocation::Directive(DirectiveLocation { +// path: DirectivePath::Model { +// model: "CatMood".to_owned(), +// arguments: None, +// }, +// directive: "map".to_owned(), +// }), +// argument: "name".to_owned(), +// new_value: MigrationExpression("cat_mood".to_owned()), +// }); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// #[test] +// fn CreateArgument_must_work() { +// let json = r#" +// { +// "tag": "CreateArgument", +// "location": { +// "tag": "Directive", +// "directive": "map", +// "path": { +// "enum": "CatMood", +// "tag": "Enum" +// } +// }, +// "argument": "name", +// "value": "cat_mood" +// } +// "#; +// +// let expected_step = MigrationStep::CreateArgument(CreateArgument { +// location: ArgumentLocation::Directive(DirectiveLocation { +// path: DirectivePath::Enum { +// r#enum: "CatMood".to_owned(), +// }, +// directive: "map".to_owned(), +// }), +// argument: "name".to_owned(), +// value: MigrationExpression("cat_mood".to_owned()), +// }); +// +// println!("{}", serde_json::to_value(&expected_step).unwrap()); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// #[test] +// fn DeleteArgument_must_work() { +// let json = r#" +// { +// "tag": "DeleteArgument", +// "location": { +// "tag": "Directive", +// "path": { +// "tag":"Enum", +// "enum": "CatMood" +// }, +// "directive":"map" +// }, +// "argument": "name" +// } +// "#; +// +// let expected_step = MigrationStep::DeleteArgument(DeleteArgument { +// location: ArgumentLocation::Directive(DirectiveLocation { +// path: DirectivePath::Enum { +// r#enum: "CatMood".to_owned(), +// }, +// directive: "map".to_owned(), +// }), +// argument: "name".to_owned(), +// }); +// +// assert_symmetric_serde(json, expected_step); +// } +// +// fn assert_symmetric_serde(json: &str, expected: MigrationStep) { +// let serde_value: serde_json::Value = serde_json::from_str(&json).expect("The provided input was invalid json."); +// let deserialized: MigrationStep = serde_json::from_str(&json).expect("Deserialization failed."); +// let serialized_again = serde_json::to_value(&deserialized).expect("Serialization failed"); +// assert_eq!( +// deserialized, expected, +// "The provided json could not be deserialized into the expected struct." +// ); +// assert_eq!( +// serialized_again, serde_value, +// "Reserializing did not produce the original json input." +// ); +// } diff --git a/migration-engine/core/rpc_examples/applyMigration.json b/migration-engine/core/rpc_examples/applyMigration.json deleted file mode 100644 index 289ece0507fa..000000000000 --- a/migration-engine/core/rpc_examples/applyMigration.json +++ /dev/null @@ -1,179 +0,0 @@ -{ - "id": 1, - "jsonrpc": "2.0", - "method": "applyMigration", - "params": { - "projectInfo": "the-project-id", - "migrationId": "", - "steps": [ - { - "stepType": "CreateModel", - "name": "Blog", - "embedded": false - }, - { - "stepType": "CreateModel", - "name": "Author", - "embedded": false - }, - { - "stepType": "CreateModel", - "name": "Post", - "embedded": false - }, - { - "stepType": "CreateField", - "model": "Blog", - "name": "id", - "type": { - "Base": "Int" - }, - "arity": "required", - "isUnique": false, - "id": { - "strategy": "Auto", - "sequence": null - } - }, - { - "stepType": "CreateField", - "model": "Blog", - "name": "name", - "type": { - "Base": "String" - }, - "arity": "required", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Blog", - "name": "viewCount", - "type": { - "Base": "Int" - }, - "arity": "required", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Blog", - "name": "posts", - "type": { - "Relation": { - "to": "Post", - "to_field": null, - "name": null, - "on_delete": "None" - } - }, - "arity": "list", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Blog", - "name": "authors", - "type": { - "Relation": { - "to": "Author", - "to_field": null, - "name": null, - "on_delete": "None" - } - }, - "arity": "list", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Author", - "name": "id", - "type": { - "Base": "Int" - }, - "arity": "required", - "isUnique": false, - "id": { - "strategy": "Auto", - "sequence": null - } - }, - { - "stepType": "CreateField", - "model": "Author", - "name": "name", - "type": { - "Base": "String" - }, - "arity": "optional", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Author", - "name": "authors", - "type": { - "Relation": { - "to": "Blog", - "to_field": null, - "name": null, - "on_delete": "None" - } - }, - "arity": "list", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Post", - "name": "id", - "type": { - "Base": "Int" - }, - "arity": "required", - "isUnique": false, - "id": { - "strategy": "Auto", - "sequence": null - } - }, - { - "stepType": "CreateField", - "model": "Post", - "name": "title", - "type": { - "Base": "String" - }, - "arity": "required", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Post", - "name": "tags", - "type": { - "Base": "String" - }, - "arity": "list", - "isUnique": false - }, - { - "stepType": "CreateField", - "model": "Post", - "name": "blog", - "type": { - "Relation": { - "to": "Blog", - "to_field": null, - "name": null, - "on_delete": "None" - } - }, - "arity": "required", - "isUnique": false - } - ], - "force": false - } -} \ No newline at end of file diff --git a/migration-engine/core/rpc_examples/dmmfToDml.json b/migration-engine/core/rpc_examples/dmmfToDml.json deleted file mode 100644 index e50e39c5a10d..000000000000 --- a/migration-engine/core/rpc_examples/dmmfToDml.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "id": 1, - "jsonrpc": "2.0", - "method": "convertDmmfToDml", - "params": { - "projectInfo": "the-project-id", - "dmmf": "yada yada" - } -} \ No newline at end of file diff --git a/migration-engine/core/rpc_examples/inferMigrationSteps.json b/migration-engine/core/rpc_examples/inferMigrationSteps.json deleted file mode 100644 index 766c85ca3f82..000000000000 --- a/migration-engine/core/rpc_examples/inferMigrationSteps.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "id": 1, - "jsonrpc": "2.0", - "method": "inferMigrationSteps", - "params": { - "projectInfo": "the-project-id", - "migrationId": "the-migration_id", - "assumeToBeApplied": [], - "dataModel": "model Blog {\n id Int @id\n name String\n viewCount Int\n posts Post[]\n authors Author[]\n}\n\nmodel Author {\n id Int @id\n name String?\n authors Blog[]\n}\n\nmodel Post {\n id Int @id\n title String\n tags String[]\n blog Blog\n}" - } -} diff --git a/migration-engine/core/rpc_examples/listMigrations.json b/migration-engine/core/rpc_examples/listMigrations.json deleted file mode 100644 index 5ed7e96b58eb..000000000000 --- a/migration-engine/core/rpc_examples/listMigrations.json +++ /dev/null @@ -1 +0,0 @@ -{"id": 1, "jsonrpc": "2.0", "method": "listMigrations", "params": {"projectInfo": "the-project-id"}} diff --git a/migration-engine/core/rpc_examples/migrationProgress.json b/migration-engine/core/rpc_examples/migrationProgress.json deleted file mode 100644 index 4799f4a5d697..000000000000 --- a/migration-engine/core/rpc_examples/migrationProgress.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "id": 1, - "jsonrpc": "2.0", - "method": "migrationProgress", - "params": { - "projectInfo": "the-project-id", - "migrationId": "the-migration-id" - } -} \ No newline at end of file diff --git a/migration-engine/core/src/commands.rs b/migration-engine/core/src/commands.rs index 8aa15b5888f0..9f03c479430d 100644 --- a/migration-engine/core/src/commands.rs +++ b/migration-engine/core/src/commands.rs @@ -37,19 +37,3 @@ pub use mark_migration_rolled_back::{ pub use plan_migration::{PlanMigrationCommand, PlanMigrationInput, PlanMigrationOutput}; pub use reset::ResetCommand; pub use schema_push::{SchemaPushCommand, SchemaPushInput, SchemaPushOutput}; - -use migration_connector::{MigrationStep, MigrationWarning, PrettyDatabaseMigrationStep, UnexecutableMigration}; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[allow(missing_docs)] -pub struct MigrationStepsResultOutput { - pub datamodel: String, - pub datamodel_steps: Vec, - pub database_steps: Vec, - pub warnings: Vec, - pub errors: [(); 0], - pub general_errors: [(); 0], - pub unexecutable_migrations: Vec, -} diff --git a/migration-engine/core/src/lib.rs b/migration-engine/core/src/lib.rs index d2d6ee6796c5..107526b2150a 100644 --- a/migration-engine/core/src/lib.rs +++ b/migration-engine/core/src/lib.rs @@ -15,7 +15,7 @@ mod gate_keeper; use anyhow::anyhow; pub use api::GenericApi; -pub use commands::{MigrationStepsResultOutput, SchemaPushInput}; +pub use commands::SchemaPushInput; pub use core_error::{CoreError, CoreResult}; use enumflags2::BitFlags; pub use gate_keeper::GateKeeper; diff --git a/migration-engine/migration-engine-tests/src/command_helpers.rs b/migration-engine/migration-engine-tests/src/command_helpers.rs deleted file mode 100644 index 8c98250139de..000000000000 --- a/migration-engine/migration-engine-tests/src/command_helpers.rs +++ /dev/null @@ -1,21 +0,0 @@ -use migration_core::commands::*; -use sql_schema_describer::*; - -#[derive(Debug)] -pub struct InferAndApplyOutput { - pub sql_schema: SqlSchema, - pub migration_output: MigrationStepsResultOutput, -} - -pub trait MigrationStepsResultOutputExt { - fn describe_steps(&self) -> Vec<&String>; -} - -impl MigrationStepsResultOutputExt for MigrationStepsResultOutput { - fn describe_steps(&self) -> Vec<&String> { - self.database_steps - .iter() - .map(|step| step.step.as_object().unwrap().keys().next().unwrap()) - .collect() - } -} diff --git a/migration-engine/migration-engine-tests/src/lib.rs b/migration-engine/migration-engine-tests/src/lib.rs index 235c8f326985..929e3439921b 100644 --- a/migration-engine/migration-engine-tests/src/lib.rs +++ b/migration-engine/migration-engine-tests/src/lib.rs @@ -2,14 +2,13 @@ #![deny(unsafe_code)] mod assertions; -mod command_helpers; mod misc_helpers; pub mod sql; mod test_api; pub use assertions::*; -pub use command_helpers::*; pub use misc_helpers::TestResult; +pub use misc_helpers::*; pub use test_api::*; pub use test_macros::test_each_connector; pub use test_setup::*; diff --git a/migration-engine/migration-engine-tests/src/sql.rs b/migration-engine/migration-engine-tests/src/sql.rs index 9e4676437869..d21c93149ba8 100644 --- a/migration-engine/migration-engine-tests/src/sql.rs +++ b/migration-engine/migration-engine-tests/src/sql.rs @@ -3,7 +3,6 @@ pub(crate) mod barrel_migration_executor; mod quaint_result_set_ext; pub use super::assertions::*; -pub use super::command_helpers::*; pub use super::misc_helpers::*; pub use super::test_api::*; pub use quaint_result_set_ext::*; From e870c84550f56c8779a2b88e3c6dfb74bc73f62c Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Sun, 20 Dec 2020 20:05:35 +0100 Subject: [PATCH 26/29] remove steps serialization --- .../migration-connector/src/steps.rs | 144 ++----- .../migration-connector/tests/steps_tests.rs | 392 ------------------ 2 files changed, 27 insertions(+), 509 deletions(-) delete mode 100644 migration-engine/connectors/migration-connector/tests/steps_tests.rs diff --git a/migration-engine/connectors/migration-connector/src/steps.rs b/migration-engine/connectors/migration-connector/src/steps.rs index e206cfa613d1..97a8cee75d40 100644 --- a/migration-engine/connectors/migration-connector/src/steps.rs +++ b/migration-engine/connectors/migration-connector/src/steps.rs @@ -1,11 +1,9 @@ //! Datamodel migration steps. use datamodel::ast; -use serde::{Deserialize, Serialize}; /// An atomic change to a [Datamodel AST](datamodel/ast/struct.Datamodel.html). -// #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -// #[serde(tag = "tag", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub enum MigrationStep { CreateModel(CreateModel), UpdateModel(UpdateModel), @@ -28,18 +26,14 @@ pub enum MigrationStep { DeleteSource(DeleteSource), } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Hash, Eq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq, Hash, Eq)] pub struct CreateModel { pub model: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Hash, Eq)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq, Hash, Eq)] pub struct UpdateModel { pub model: String, - - #[serde(skip_serializing_if = "Option::is_none")] pub new_name: Option, } @@ -49,39 +43,25 @@ impl UpdateModel { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteModel { pub model: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateField { pub model: String, - pub field: String, - - #[serde(rename = "type")] pub tpe: String, - pub arity: FieldArity, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct UpdateField { pub model: String, - pub field: String, - - #[serde(skip_serializing_if = "Option::is_none")] pub new_name: Option, - - #[serde(rename = "type", skip_serializing_if = "Option::is_none")] pub tpe: Option, - - #[serde(skip_serializing_if = "Option::is_none")] pub arity: Option, } @@ -91,32 +71,23 @@ impl UpdateField { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteField { pub model: String, pub field: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateEnum { pub r#enum: String, pub values: Vec, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct UpdateEnum { pub r#enum: String, - - #[serde(skip_serializing_if = "Option::is_none")] pub new_name: Option, - - #[serde(skip_serializing_if = "Vec::is_empty", default = "Vec::new")] pub created_values: Vec, - - #[serde(skip_serializing_if = "Vec::is_empty", default = "Vec::new")] pub deleted_values: Vec, } @@ -126,26 +97,22 @@ impl UpdateEnum { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteEnum { pub r#enum: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateDirective { pub location: DirectiveLocation, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteDirective { pub location: DirectiveLocation, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct Argument { pub name: String, pub value: MigrationExpression, @@ -176,15 +143,13 @@ impl Into for &Argument { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(tag = "tag", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub enum ArgumentLocation { Directive(DirectiveLocation), Source(SourceLocation), } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DirectiveLocation { pub path: DirectivePath, pub directive: String, @@ -219,8 +184,7 @@ impl DirectiveLocation { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct SourceLocation { pub source: String, } @@ -231,8 +195,7 @@ impl SourceLocation { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(tag = "tag", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub enum DirectivePath { Field { model: String, @@ -240,7 +203,6 @@ pub enum DirectivePath { }, Model { model: String, - #[serde(skip_serializing_if = "Option::is_none")] arguments: Option>, }, Enum { @@ -251,7 +213,6 @@ pub enum DirectivePath { value: String, }, TypeAlias { - #[serde(rename = "typeAlias")] type_alias: String, }, } @@ -275,30 +236,27 @@ impl DirectivePath { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateArgument { pub location: ArgumentLocation, pub argument: String, pub value: MigrationExpression, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteArgument { pub location: ArgumentLocation, pub argument: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct UpdateArgument { pub location: ArgumentLocation, pub argument: String, pub new_value: MigrationExpression, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq)] pub struct MigrationExpression(pub String); impl MigrationExpression { @@ -311,21 +269,16 @@ impl MigrationExpression { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateTypeAlias { pub type_alias: String, - pub r#type: String, pub arity: FieldArity, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct UpdateTypeAlias { pub type_alias: String, - - #[serde(skip_serializing_if = "Option::is_none")] pub r#type: Option, } @@ -335,26 +288,22 @@ impl UpdateTypeAlias { } } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteTypeAlias { pub type_alias: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct CreateSource { pub source: String, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug, Clone, PartialEq)] pub struct DeleteSource { pub source: String, } -#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] +#[derive(Debug, Copy, Clone, PartialEq)] pub enum FieldArity { Required, Optional, @@ -392,42 +341,3 @@ impl Into for &FieldArity { } } } - -#[cfg(test)] -mod tests { - use super::*; - use serde_json::json; - - #[test] - fn directive_location_serialization_gives_expected_json_shape() { - let create_directive = CreateDirective { - location: DirectiveLocation { - path: DirectivePath::Field { - model: "Cat".to_owned(), - field: "owner".to_owned(), - }, - directive: "status".to_owned(), - }, - }; - - let serialized_step = serde_json::to_value(&create_directive).unwrap(); - - let expected_json = json!({ - "location": { - "path": { - "tag": "Field", - "model": "Cat", - "field": "owner", - }, - "directive": "status" - } - }); - - println!("{}\n{}", serialized_step, expected_json); - - assert_eq!(serialized_step, expected_json); - - let deserialized_step: CreateDirective = serde_json::from_value(expected_json).unwrap(); - assert_eq!(create_directive, deserialized_step); - } -} diff --git a/migration-engine/connectors/migration-connector/tests/steps_tests.rs b/migration-engine/connectors/migration-connector/tests/steps_tests.rs deleted file mode 100644 index 5614b7cf4348..000000000000 --- a/migration-engine/connectors/migration-connector/tests/steps_tests.rs +++ /dev/null @@ -1,392 +0,0 @@ -// #![allow(non_snake_case)] -// -// use migration_connector::steps::*; -// -// #[test] -// fn full_CreateModel_must_work() { -// let json = r#"{"tag":"CreateModel","model":"Blog"}"#; -// let expected_struct = MigrationStep::CreateModel(CreateModel { -// model: "Blog".to_string(), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn minimal_UpdateModel_must_work() { -// let json = r#"{"tag":"UpdateModel","model":"Blog"}"#; -// let expected_struct = MigrationStep::UpdateModel(UpdateModel { -// model: "Blog".to_string(), -// new_name: None, -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn full_UpdateModel_must_work() { -// let json = r#"{"tag":"UpdateModel","model":"Blog","newName":"MyBlog"}"#; -// let expected_struct = MigrationStep::UpdateModel(UpdateModel { -// model: "Blog".to_string(), -// new_name: Some("MyBlog".to_string()), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn DeleteModel_must_work() { -// let json = r#"{"tag":"DeleteModel","model":"Blog"}"#; -// let expected_struct = MigrationStep::DeleteModel(DeleteModel { -// model: "Blog".to_string(), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn minimal_CreateField_must_work() { -// let json = r#" -// { -// "tag":"CreateField", -// "model":"Blog", -// "field":"title", -// "type":"String", -// "arity":"Required" -// } -// "#; -// let expected_struct = MigrationStep::CreateField(CreateField { -// model: "Blog".to_string(), -// field: "title".to_string(), -// tpe: "String".to_owned(), -// arity: FieldArity::Required, -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn full_CreateField_must_work() { -// let json = r#"{ -// "tag":"CreateField", -// "model": "Blog", -// "field": "title", -// "type": "String", -// "arity": "Optional" -// }"#; -// let expected_struct = MigrationStep::CreateField(CreateField { -// model: "Blog".to_string(), -// field: "title".to_string(), -// tpe: "String".to_owned(), -// arity: FieldArity::Optional, -// }); -// -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn minimal_UpdateField_must_work() { -// let json = r#"{"tag":"UpdateField","model":"Blog","field":"title"}"#; -// let expected_struct = MigrationStep::UpdateField(UpdateField { -// model: "Blog".to_string(), -// field: "title".to_string(), -// new_name: None, -// tpe: None, -// arity: None, -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn full_UpdateField_must_work() { -// let json = r#" -// { -// "tag": "UpdateField", -// "model": "Blog", -// "field": "title", -// "newName": "MyBlog", -// "type": "String", -// "arity": "Optional" -// } -// "#; -// let expected_struct = MigrationStep::UpdateField(UpdateField { -// model: "Blog".to_string(), -// field: "title".to_string(), -// new_name: Some("MyBlog".to_string()), -// tpe: Some("String".to_owned()), -// arity: Some(FieldArity::Optional), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn DeleteField_must_work() { -// let json = r#"{"tag":"DeleteField","model":"Blog","field":"title"}"#; -// let expected_struct = MigrationStep::DeleteField(DeleteField { -// model: "Blog".to_string(), -// field: "title".to_string(), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn CreateEnum_must_work() { -// let json = r#" -// { -// "tag": "CreateEnum", -// "enum": "BlogCategory", -// "values": ["Politics","Tech"] -// } -// "#; -// let expected_struct = MigrationStep::CreateEnum(CreateEnum { -// r#enum: "BlogCategory".to_string(), -// values: vec!["Politics".to_string(), "Tech".to_string()], -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn minimal_UpdateEnum_must_work() { -// let json = r#" -// { -// "tag": "UpdateEnum", -// "enum": "BlogCategory" -// } -// "#; -// let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { -// r#enum: "BlogCategory".to_string(), -// new_name: None, -// created_values: vec![], -// deleted_values: vec![], -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn full_Update_Enum_must_work() { -// let json = r#" -// { -// "tag": "UpdateEnum", -// "enum": "BlogCategory", -// "newName": "MyBlogCategory", -// "createdValues": ["Tech"], -// "deletedValues": ["Nology"] -// } -// "#; -// let expected_struct = MigrationStep::UpdateEnum(UpdateEnum { -// r#enum: "BlogCategory".to_string(), -// new_name: Some("MyBlogCategory".to_string()), -// created_values: vec!["Tech".to_string()], -// deleted_values: vec!["Nology".to_string()], -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn DeleteEnum_must_work() { -// let json = r#"{"tag":"DeleteEnum","enum":"BlogCategory"}"#; -// let expected_struct = MigrationStep::DeleteEnum(DeleteEnum { -// r#enum: "BlogCategory".to_string(), -// }); -// assert_symmetric_serde(json, expected_struct); -// } -// -// #[test] -// fn CreateDirective_must_work() { -// let json = r#" -// { -// "tag": "CreateDirective", -// "location": { -// "directive": "map", -// "path": { -// "tag": "Model", -// "model": "Blog" -// } -// } -// } -// "#; -// -// let expected_step = MigrationStep::CreateDirective(CreateDirective { -// location: DirectiveLocation { -// path: DirectivePath::Model { -// model: "Blog".to_owned(), -// arguments: None, -// }, -// directive: "map".to_owned(), -// }, -// }); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// #[test] -// fn minimal_DeleteDirective_must_work() { -// let json = r#" -// { -// "tag": "DeleteDirective", -// "location": { -// "path": { -// "tag": "Field", -// "model": "Blog", -// "field": "title" -// }, -// "directive": "map" -// } -// } -// "#; -// -// let expected_step = MigrationStep::DeleteDirective(DeleteDirective { -// location: DirectiveLocation { -// path: DirectivePath::Field { -// model: "Blog".to_owned(), -// field: "title".to_owned(), -// }, -// directive: "map".to_owned(), -// }, -// }); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// #[test] -// fn full_DeleteDirective_must_work() { -// let json = r#" -// { -// "tag": "DeleteDirective", -// "location": { -// "path": { -// "tag": "Model", -// "model": "Blog", -// "arguments": [ -// { -// "name": "", -// "value": "[name, age]" -// } -// ] -// }, -// "directive": "unique" -// } -// } -// "#; -// -// let expected_step = MigrationStep::DeleteDirective(DeleteDirective { -// location: DirectiveLocation { -// path: DirectivePath::Model { -// model: "Blog".to_owned(), -// arguments: Some(vec![Argument { -// name: "".to_owned(), -// value: MigrationExpression("[name, age]".to_owned()), -// }]), -// }, -// directive: "unique".to_owned(), -// }, -// }); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// #[test] -// fn UpdateArgument_must_work() { -// let json = r#" -// { -// "tag": "UpdateArgument", -// "location": { -// "tag": "Directive", -// "path": { -// "tag": "Model", -// "model": "CatMood" -// }, -// "directive": "map" -// }, -// "argument": "name", -// "newValue": "cat_mood" -// } -// "#; -// -// let expected_step = MigrationStep::UpdateArgument(UpdateArgument { -// location: ArgumentLocation::Directive(DirectiveLocation { -// path: DirectivePath::Model { -// model: "CatMood".to_owned(), -// arguments: None, -// }, -// directive: "map".to_owned(), -// }), -// argument: "name".to_owned(), -// new_value: MigrationExpression("cat_mood".to_owned()), -// }); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// #[test] -// fn CreateArgument_must_work() { -// let json = r#" -// { -// "tag": "CreateArgument", -// "location": { -// "tag": "Directive", -// "directive": "map", -// "path": { -// "enum": "CatMood", -// "tag": "Enum" -// } -// }, -// "argument": "name", -// "value": "cat_mood" -// } -// "#; -// -// let expected_step = MigrationStep::CreateArgument(CreateArgument { -// location: ArgumentLocation::Directive(DirectiveLocation { -// path: DirectivePath::Enum { -// r#enum: "CatMood".to_owned(), -// }, -// directive: "map".to_owned(), -// }), -// argument: "name".to_owned(), -// value: MigrationExpression("cat_mood".to_owned()), -// }); -// -// println!("{}", serde_json::to_value(&expected_step).unwrap()); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// #[test] -// fn DeleteArgument_must_work() { -// let json = r#" -// { -// "tag": "DeleteArgument", -// "location": { -// "tag": "Directive", -// "path": { -// "tag":"Enum", -// "enum": "CatMood" -// }, -// "directive":"map" -// }, -// "argument": "name" -// } -// "#; -// -// let expected_step = MigrationStep::DeleteArgument(DeleteArgument { -// location: ArgumentLocation::Directive(DirectiveLocation { -// path: DirectivePath::Enum { -// r#enum: "CatMood".to_owned(), -// }, -// directive: "map".to_owned(), -// }), -// argument: "name".to_owned(), -// }); -// -// assert_symmetric_serde(json, expected_step); -// } -// -// fn assert_symmetric_serde(json: &str, expected: MigrationStep) { -// let serde_value: serde_json::Value = serde_json::from_str(&json).expect("The provided input was invalid json."); -// let deserialized: MigrationStep = serde_json::from_str(&json).expect("Deserialization failed."); -// let serialized_again = serde_json::to_value(&deserialized).expect("Serialization failed"); -// assert_eq!( -// deserialized, expected, -// "The provided json could not be deserialized into the expected struct." -// ); -// assert_eq!( -// serialized_again, serde_value, -// "Reserializing did not produce the original json input." -// ); -// } From 10ad555942e648d7410d6e39a8e00dc6edb67892 Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Mon, 21 Dec 2020 09:13:02 +0100 Subject: [PATCH 27/29] remove partialeq --- .../migration-connector/src/steps.rs | 50 +++++++++---------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/migration-engine/connectors/migration-connector/src/steps.rs b/migration-engine/connectors/migration-connector/src/steps.rs index 97a8cee75d40..2c98206782b2 100644 --- a/migration-engine/connectors/migration-connector/src/steps.rs +++ b/migration-engine/connectors/migration-connector/src/steps.rs @@ -3,7 +3,7 @@ use datamodel::ast; /// An atomic change to a [Datamodel AST](datamodel/ast/struct.Datamodel.html). -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub enum MigrationStep { CreateModel(CreateModel), UpdateModel(UpdateModel), @@ -26,12 +26,12 @@ pub enum MigrationStep { DeleteSource(DeleteSource), } -#[derive(Debug, Clone, PartialEq, Hash, Eq)] +#[derive(Debug, Clone)] pub struct CreateModel { pub model: String, } -#[derive(Debug, Clone, PartialEq, Hash, Eq)] +#[derive(Debug, Clone)] pub struct UpdateModel { pub model: String, pub new_name: Option, @@ -43,12 +43,12 @@ impl UpdateModel { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteModel { pub model: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateField { pub model: String, pub field: String, @@ -56,7 +56,7 @@ pub struct CreateField { pub arity: FieldArity, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct UpdateField { pub model: String, pub field: String, @@ -71,19 +71,19 @@ impl UpdateField { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteField { pub model: String, pub field: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateEnum { pub r#enum: String, pub values: Vec, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct UpdateEnum { pub r#enum: String, pub new_name: Option, @@ -97,17 +97,17 @@ impl UpdateEnum { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteEnum { pub r#enum: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateDirective { pub location: DirectiveLocation, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteDirective { pub location: DirectiveLocation, } @@ -143,13 +143,13 @@ impl Into for &Argument { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub enum ArgumentLocation { Directive(DirectiveLocation), Source(SourceLocation), } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DirectiveLocation { pub path: DirectivePath, pub directive: String, @@ -184,7 +184,7 @@ impl DirectiveLocation { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct SourceLocation { pub source: String, } @@ -195,7 +195,7 @@ impl SourceLocation { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub enum DirectivePath { Field { model: String, @@ -236,20 +236,20 @@ impl DirectivePath { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateArgument { pub location: ArgumentLocation, pub argument: String, pub value: MigrationExpression, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteArgument { pub location: ArgumentLocation, pub argument: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct UpdateArgument { pub location: ArgumentLocation, pub argument: String, @@ -269,14 +269,14 @@ impl MigrationExpression { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateTypeAlias { pub type_alias: String, pub r#type: String, pub arity: FieldArity, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct UpdateTypeAlias { pub type_alias: String, pub r#type: Option, @@ -288,22 +288,22 @@ impl UpdateTypeAlias { } } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteTypeAlias { pub type_alias: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct CreateSource { pub source: String, } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone)] pub struct DeleteSource { pub source: String, } -#[derive(Debug, Copy, Clone, PartialEq)] +#[derive(Debug, Copy, Clone)] pub enum FieldArity { Required, Optional, From 71ca76c60471fa30c798722b93e7ed73d729e722 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= Date: Mon, 21 Dec 2020 10:51:46 +0100 Subject: [PATCH 28/29] Fix sqlite test --- migration-engine/core/src/commands/reset.rs | 2 ++ .../migration-engine-tests/tests/migration_tests.rs | 9 +++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/migration-engine/core/src/commands/reset.rs b/migration-engine/core/src/commands/reset.rs index 3ace246a1714..70f973b781d5 100644 --- a/migration-engine/core/src/commands/reset.rs +++ b/migration-engine/core/src/commands/reset.rs @@ -15,6 +15,8 @@ impl<'a> MigrationCommand for ResetCommand { C: MigrationConnector, D: DatabaseMigrationMarker + 'static, { + tracing::debug!("Resetting the database."); + engine.connector().reset().await?; Ok(()) diff --git a/migration-engine/migration-engine-tests/tests/migration_tests.rs b/migration-engine/migration-engine-tests/tests/migration_tests.rs index 1daef5c0210b..c597edf03881 100644 --- a/migration-engine/migration-engine-tests/tests/migration_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migration_tests.rs @@ -2434,9 +2434,6 @@ async fn switching_databases_must_work(api: &TestApi) -> TestResult { api.schema_push(dm1).send().await?.assert_green()?; - // Drop the existing migrations. - api.reset().send().await?; - let dm2 = r#" datasource db { provider = "sqlite" @@ -2449,7 +2446,11 @@ async fn switching_databases_must_work(api: &TestApi) -> TestResult { } "#; - api.schema_push(dm2).send().await?.assert_green()?; + api.schema_push(dm2) + .migration_id(Some("mig2")) + .send() + .await? + .assert_green()?; Ok(()) } From 210e13f636839717305a7b489aa40f502755a70b Mon Sep 17 00:00:00 2001 From: Matthias Oertel Date: Wed, 23 Dec 2020 10:48:40 +0100 Subject: [PATCH 29/29] merge fix --- migration-engine/migration-engine-tests/src/test_api.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/migration-engine/migration-engine-tests/src/test_api.rs b/migration-engine/migration-engine-tests/src/test_api.rs index 565a37ca2c7d..81c72b1ec1a3 100644 --- a/migration-engine/migration-engine-tests/src/test_api.rs +++ b/migration-engine/migration-engine-tests/src/test_api.rs @@ -29,11 +29,6 @@ use enumflags2::BitFlags; use migration_connector::{ImperativeMigrationsPersistence, MigrationFeature, MigrationRecord}; use indoc::formatdoc; -<<<<<<< HEAD -use migration_connector::{ImperativeMigrationsPersistence, MigrationFeature, MigrationRecord}; - -======= ->>>>>>> 4fc193ad56716e0251e8e9a8b307a56f494ab16c use migration_core::{ api::{GenericApi, MigrationApi}, commands::ApplyScriptInput,