diff --git a/refinery/tests/mysql.rs b/refinery/tests/mysql.rs index c08488c3..564ecbfd 100644 --- a/refinery/tests/mysql.rs +++ b/refinery/tests/mysql.rs @@ -8,9 +8,9 @@ mod mysql { use chrono::Local; use predicates::str::contains; use refinery::{ - config::{migrate_from_config, Config, ConfigDbType}, + config::{Config, ConfigDbType}, error::Kind, - Migrate, Migration, Target, + Migrate, Migration, Runner, Target, }; use refinery_core::mysql; use std::process::Command; @@ -571,7 +571,7 @@ mod mysql { #[test] fn migrates_from_config() { run_test(|| { - let config = Config::new(ConfigDbType::Mysql) + let mut config = Config::new(ConfigDbType::Mysql) .set_db_name("refinery_test") .set_db_user("refinery") .set_db_pass("root") @@ -579,7 +579,104 @@ mod mysql { .set_db_port("3306"); let migrations = get_migrations(); - migrate_from_config(&config, false, true, true, &migrations).unwrap(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + runner.run(&mut config).unwrap(); + + let applied_migrations = runner.get_applied_migrations(&mut config).unwrap(); + assert_eq!(5, applied_migrations.len()); + + assert_eq!(migrations[0].version(), applied_migrations[0].version()); + assert_eq!(migrations[1].version(), applied_migrations[1].version()); + assert_eq!(migrations[2].version(), applied_migrations[2].version()); + assert_eq!(migrations[3].version(), applied_migrations[3].version()); + assert_eq!(migrations[4].version(), applied_migrations[4].version()); + + assert_eq!(migrations[0].name(), migrations[0].name()); + assert_eq!(migrations[1].name(), applied_migrations[1].name()); + assert_eq!(migrations[2].name(), applied_migrations[2].name()); + assert_eq!(migrations[3].name(), applied_migrations[3].name()); + assert_eq!(migrations[4].name(), applied_migrations[4].name()); + + assert_eq!(migrations[0].checksum(), applied_migrations[0].checksum()); + assert_eq!(migrations[1].checksum(), applied_migrations[1].checksum()); + assert_eq!(migrations[2].checksum(), applied_migrations[2].checksum()); + assert_eq!(migrations[3].checksum(), applied_migrations[3].checksum()); + assert_eq!(migrations[4].checksum(), applied_migrations[4].checksum()); + }) + } + + #[test] + fn migrate_from_config_report_contains_migrations() { + run_test(|| { + let mut config = Config::new(ConfigDbType::Mysql) + .set_db_name("refinery_test") + .set_db_user("refinery") + .set_db_pass("root") + .set_db_host("localhost") + .set_db_port("3306"); + + let migrations = get_migrations(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + let report = runner.run(&mut config).unwrap(); + + let applied_migrations = report.applied_migrations(); + assert_eq!(5, applied_migrations.len()); + + assert_eq!(migrations[0].version(), applied_migrations[0].version()); + assert_eq!(migrations[1].version(), applied_migrations[1].version()); + assert_eq!(migrations[2].version(), applied_migrations[2].version()); + assert_eq!(migrations[3].version(), applied_migrations[3].version()); + assert_eq!(migrations[4].version(), applied_migrations[4].version()); + + assert_eq!(migrations[0].name(), migrations[0].name()); + assert_eq!(migrations[1].name(), applied_migrations[1].name()); + assert_eq!(migrations[2].name(), applied_migrations[2].name()); + assert_eq!(migrations[3].name(), applied_migrations[3].name()); + assert_eq!(migrations[4].name(), applied_migrations[4].name()); + + assert_eq!(migrations[0].checksum(), applied_migrations[0].checksum()); + assert_eq!(migrations[1].checksum(), applied_migrations[1].checksum()); + assert_eq!(migrations[2].checksum(), applied_migrations[2].checksum()); + assert_eq!(migrations[3].checksum(), applied_migrations[3].checksum()); + assert_eq!(migrations[4].checksum(), applied_migrations[4].checksum()); + }) + } + + #[test] + fn migrate_from_config_report_returns_last_applied_migration() { + run_test(|| { + let mut config = Config::new(ConfigDbType::Mysql) + .set_db_name("refinery_test") + .set_db_user("refinery") + .set_db_pass("root") + .set_db_host("localhost") + .set_db_port("3306"); + + let migrations = get_migrations(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + runner.run(&mut config).unwrap(); + + let applied_migration = runner + .get_last_applied_migration(&mut config) + .unwrap() + .unwrap(); + assert_eq!(5, applied_migration.version()); + + assert_eq!(migrations[4].version(), applied_migration.version()); + assert_eq!(migrations[4].name(), applied_migration.name()); + assert_eq!(migrations[4].checksum(), applied_migration.checksum()); }) } diff --git a/refinery/tests/mysql_async.rs b/refinery/tests/mysql_async.rs index dd90d3b2..9914a506 100644 --- a/refinery/tests/mysql_async.rs +++ b/refinery/tests/mysql_async.rs @@ -7,9 +7,9 @@ mod mysql_async { use chrono::Local; use futures::FutureExt; use refinery::{ - config::{migrate_from_config_async, Config, ConfigDbType}, + config::{Config, ConfigDbType}, error::Kind, - AsyncMigrate, Migration, Target, + AsyncMigrate, Migration, Runner, Target, }; use refinery_core::mysql_async::prelude::Queryable; use refinery_core::{mysql_async, tokio}; @@ -641,7 +641,7 @@ mod mysql_async { #[tokio::test] async fn migrates_from_config() { run_test(async { - let config = Config::new(ConfigDbType::Mysql) + let mut config = Config::new(ConfigDbType::Mysql) .set_db_name("refinery_test") .set_db_user("refinery") .set_db_pass("root") @@ -649,9 +649,110 @@ mod mysql_async { .set_db_port("3306"); let migrations = get_migrations(); - migrate_from_config_async(&config, false, true, true, &migrations) + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + runner.run_async(&mut config).await.unwrap(); + + let applied_migrations = runner + .get_applied_migrations_async(&mut config) .await .unwrap(); + assert_eq!(5, applied_migrations.len()); + + assert_eq!(migrations[0].version(), applied_migrations[0].version()); + assert_eq!(migrations[1].version(), applied_migrations[1].version()); + assert_eq!(migrations[2].version(), applied_migrations[2].version()); + assert_eq!(migrations[3].version(), applied_migrations[3].version()); + assert_eq!(migrations[4].version(), applied_migrations[4].version()); + + assert_eq!(migrations[0].name(), migrations[0].name()); + assert_eq!(migrations[1].name(), applied_migrations[1].name()); + assert_eq!(migrations[2].name(), applied_migrations[2].name()); + assert_eq!(migrations[3].name(), applied_migrations[3].name()); + assert_eq!(migrations[4].name(), applied_migrations[4].name()); + + assert_eq!(migrations[0].checksum(), applied_migrations[0].checksum()); + assert_eq!(migrations[1].checksum(), applied_migrations[1].checksum()); + assert_eq!(migrations[2].checksum(), applied_migrations[2].checksum()); + assert_eq!(migrations[3].checksum(), applied_migrations[3].checksum()); + assert_eq!(migrations[4].checksum(), applied_migrations[4].checksum()); + }) + .await; + } + + #[tokio::test] + async fn migrate_from_config_report_contains_migrations() { + run_test(async { + let mut config = Config::new(ConfigDbType::Mysql) + .set_db_name("refinery_test") + .set_db_user("refinery") + .set_db_pass("root") + .set_db_host("localhost") + .set_db_port("3306"); + + let migrations = get_migrations(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + let report = runner.run_async(&mut config).await.unwrap(); + + let applied_migrations = report.applied_migrations(); + assert_eq!(5, applied_migrations.len()); + + assert_eq!(migrations[0].version(), applied_migrations[0].version()); + assert_eq!(migrations[1].version(), applied_migrations[1].version()); + assert_eq!(migrations[2].version(), applied_migrations[2].version()); + assert_eq!(migrations[3].version(), applied_migrations[3].version()); + assert_eq!(migrations[4].version(), applied_migrations[4].version()); + + assert_eq!(migrations[0].name(), migrations[0].name()); + assert_eq!(migrations[1].name(), applied_migrations[1].name()); + assert_eq!(migrations[2].name(), applied_migrations[2].name()); + assert_eq!(migrations[3].name(), applied_migrations[3].name()); + assert_eq!(migrations[4].name(), applied_migrations[4].name()); + + assert_eq!(migrations[0].checksum(), applied_migrations[0].checksum()); + assert_eq!(migrations[1].checksum(), applied_migrations[1].checksum()); + assert_eq!(migrations[2].checksum(), applied_migrations[2].checksum()); + assert_eq!(migrations[3].checksum(), applied_migrations[3].checksum()); + assert_eq!(migrations[4].checksum(), applied_migrations[4].checksum()); + }) + .await; + } + + #[tokio::test] + async fn migrate_from_config_report_returns_last_applied_migration() { + run_test(async { + let mut config = Config::new(ConfigDbType::Mysql) + .set_db_name("refinery_test") + .set_db_user("refinery") + .set_db_pass("root") + .set_db_host("localhost") + .set_db_port("3306"); + + let migrations = get_migrations(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + runner.run_async(&mut config).await.unwrap(); + + let applied_migration = runner + .get_last_applied_migration_async(&mut config) + .await + .unwrap() + .unwrap(); + assert_eq!(5, applied_migration.version()); + + assert_eq!(migrations[4].version(), applied_migration.version()); + assert_eq!(migrations[4].name(), applied_migration.name()); + assert_eq!(migrations[4].checksum(), applied_migration.checksum()); }) .await; } diff --git a/refinery/tests/postgres.rs b/refinery/tests/postgres.rs index c049718c..babfcd8c 100644 --- a/refinery/tests/postgres.rs +++ b/refinery/tests/postgres.rs @@ -8,9 +8,9 @@ mod postgres { use chrono::Local; use predicates::str::contains; use refinery::{ - config::{migrate_from_config, Config, ConfigDbType}, + config::{Config, ConfigDbType}, error::Kind, - Migrate, Migration, Target, + Migrate, Migration, Runner, Target, }; use refinery_core::postgres::{Client, NoTls}; use std::process::Command; @@ -556,14 +556,109 @@ mod postgres { #[test] fn migrates_from_config() { run_test(|| { - let config = Config::new(ConfigDbType::Postgres) + let mut config = Config::new(ConfigDbType::Postgres) .set_db_name("postgres") .set_db_user("postgres") .set_db_host("localhost") .set_db_port("5432"); let migrations = get_migrations(); - migrate_from_config(&config, false, true, true, &migrations).unwrap(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + runner.run(&mut config).unwrap(); + + let applied_migrations = runner.get_applied_migrations(&mut config).unwrap(); + assert_eq!(5, applied_migrations.len()); + + assert_eq!(migrations[0].version(), applied_migrations[0].version()); + assert_eq!(migrations[1].version(), applied_migrations[1].version()); + assert_eq!(migrations[2].version(), applied_migrations[2].version()); + assert_eq!(migrations[3].version(), applied_migrations[3].version()); + assert_eq!(migrations[4].version(), applied_migrations[4].version()); + + assert_eq!(migrations[0].name(), migrations[0].name()); + assert_eq!(migrations[1].name(), applied_migrations[1].name()); + assert_eq!(migrations[2].name(), applied_migrations[2].name()); + assert_eq!(migrations[3].name(), applied_migrations[3].name()); + assert_eq!(migrations[4].name(), applied_migrations[4].name()); + + assert_eq!(migrations[0].checksum(), applied_migrations[0].checksum()); + assert_eq!(migrations[1].checksum(), applied_migrations[1].checksum()); + assert_eq!(migrations[2].checksum(), applied_migrations[2].checksum()); + assert_eq!(migrations[3].checksum(), applied_migrations[3].checksum()); + assert_eq!(migrations[4].checksum(), applied_migrations[4].checksum()); + }) + } + + #[test] + fn migrate_from_config_report_contains_migrations() { + run_test(|| { + let mut config = Config::new(ConfigDbType::Postgres) + .set_db_name("postgres") + .set_db_user("postgres") + .set_db_host("localhost") + .set_db_port("5432"); + + let migrations = get_migrations(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + let report = runner.run(&mut config).unwrap(); + + let applied_migrations = report.applied_migrations(); + assert_eq!(5, applied_migrations.len()); + + assert_eq!(migrations[0].version(), applied_migrations[0].version()); + assert_eq!(migrations[1].version(), applied_migrations[1].version()); + assert_eq!(migrations[2].version(), applied_migrations[2].version()); + assert_eq!(migrations[3].version(), applied_migrations[3].version()); + assert_eq!(migrations[4].version(), applied_migrations[4].version()); + + assert_eq!(migrations[0].name(), migrations[0].name()); + assert_eq!(migrations[1].name(), applied_migrations[1].name()); + assert_eq!(migrations[2].name(), applied_migrations[2].name()); + assert_eq!(migrations[3].name(), applied_migrations[3].name()); + assert_eq!(migrations[4].name(), applied_migrations[4].name()); + + assert_eq!(migrations[0].checksum(), applied_migrations[0].checksum()); + assert_eq!(migrations[1].checksum(), applied_migrations[1].checksum()); + assert_eq!(migrations[2].checksum(), applied_migrations[2].checksum()); + assert_eq!(migrations[3].checksum(), applied_migrations[3].checksum()); + assert_eq!(migrations[4].checksum(), applied_migrations[4].checksum()); + }) + } + + #[test] + fn migrate_from_config_report_returns_last_applied_migration() { + run_test(|| { + let mut config = Config::new(ConfigDbType::Postgres) + .set_db_name("postgres") + .set_db_user("postgres") + .set_db_host("localhost") + .set_db_port("5432"); + + let migrations = get_migrations(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + runner.run(&mut config).unwrap(); + + let applied_migration = runner + .get_last_applied_migration(&mut config) + .unwrap() + .unwrap(); + assert_eq!(5, applied_migration.version()); + + assert_eq!(migrations[4].version(), applied_migration.version()); + assert_eq!(migrations[4].name(), applied_migration.name()); + assert_eq!(migrations[4].checksum(), applied_migration.checksum()); }) } diff --git a/refinery/tests/rusqlite.rs b/refinery/tests/rusqlite.rs index eb60ab1d..9dfecdd0 100644 --- a/refinery/tests/rusqlite.rs +++ b/refinery/tests/rusqlite.rs @@ -8,9 +8,9 @@ mod rusqlite { use chrono::Local; use predicates::str::contains; use refinery::{ - config::{migrate_from_config, Config, ConfigDbType}, + config::{Config, ConfigDbType}, error::Kind, - Migrate, Migration, Target, + Migrate, Migration, Runner, Target, }; use refinery_core::rusqlite::{Connection, OptionalExtension, NO_PARAMS}; use std::fs::{self, File}; @@ -496,10 +496,95 @@ mod rusqlite { #[test] fn migrates_from_config() { let db = tempfile::NamedTempFile::new_in(".").unwrap(); - let config = Config::new(ConfigDbType::Sqlite).set_db_path(db.path().to_str().unwrap()); + let mut config = Config::new(ConfigDbType::Sqlite).set_db_path(db.path().to_str().unwrap()); let migrations = get_migrations(); - migrate_from_config(&config, false, true, true, &migrations).unwrap(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + runner.run(&mut config).unwrap(); + + let applied_migrations = runner.get_applied_migrations(&mut config).unwrap(); + assert_eq!(5, applied_migrations.len()); + + assert_eq!(migrations[0].version(), applied_migrations[0].version()); + assert_eq!(migrations[1].version(), applied_migrations[1].version()); + assert_eq!(migrations[2].version(), applied_migrations[2].version()); + assert_eq!(migrations[3].version(), applied_migrations[3].version()); + assert_eq!(migrations[4].version(), applied_migrations[4].version()); + + assert_eq!(migrations[0].name(), migrations[0].name()); + assert_eq!(migrations[1].name(), applied_migrations[1].name()); + assert_eq!(migrations[2].name(), applied_migrations[2].name()); + assert_eq!(migrations[3].name(), applied_migrations[3].name()); + assert_eq!(migrations[4].name(), applied_migrations[4].name()); + + assert_eq!(migrations[0].checksum(), applied_migrations[0].checksum()); + assert_eq!(migrations[1].checksum(), applied_migrations[1].checksum()); + assert_eq!(migrations[2].checksum(), applied_migrations[2].checksum()); + assert_eq!(migrations[3].checksum(), applied_migrations[3].checksum()); + assert_eq!(migrations[4].checksum(), applied_migrations[4].checksum()); + } + + #[test] + fn migrate_from_config_report_contains_migrations() { + let db = tempfile::NamedTempFile::new_in(".").unwrap(); + let mut config = Config::new(ConfigDbType::Sqlite).set_db_path(db.path().to_str().unwrap()); + + let migrations = get_migrations(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + let report = runner.run(&mut config).unwrap(); + + let applied_migrations = report.applied_migrations(); + assert_eq!(5, applied_migrations.len()); + + assert_eq!(migrations[0].version(), applied_migrations[0].version()); + assert_eq!(migrations[1].version(), applied_migrations[1].version()); + assert_eq!(migrations[2].version(), applied_migrations[2].version()); + assert_eq!(migrations[3].version(), applied_migrations[3].version()); + assert_eq!(migrations[4].version(), applied_migrations[4].version()); + + assert_eq!(migrations[0].name(), migrations[0].name()); + assert_eq!(migrations[1].name(), applied_migrations[1].name()); + assert_eq!(migrations[2].name(), applied_migrations[2].name()); + assert_eq!(migrations[3].name(), applied_migrations[3].name()); + assert_eq!(migrations[4].name(), applied_migrations[4].name()); + + assert_eq!(migrations[0].checksum(), applied_migrations[0].checksum()); + assert_eq!(migrations[1].checksum(), applied_migrations[1].checksum()); + assert_eq!(migrations[2].checksum(), applied_migrations[2].checksum()); + assert_eq!(migrations[3].checksum(), applied_migrations[3].checksum()); + assert_eq!(migrations[4].checksum(), applied_migrations[4].checksum()); + } + + #[test] + fn migrate_from_config_report_returns_last_applied_migration() { + let db = tempfile::NamedTempFile::new_in(".").unwrap(); + let mut config = Config::new(ConfigDbType::Sqlite).set_db_path(db.path().to_str().unwrap()); + + let migrations = get_migrations(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + runner.run(&mut config).unwrap(); + + let applied_migration = runner + .get_last_applied_migration(&mut config) + .unwrap() + .unwrap(); + assert_eq!(5, applied_migration.version()); + + assert_eq!(migrations[4].version(), applied_migration.version()); + assert_eq!(migrations[4].name(), applied_migration.name()); + assert_eq!(migrations[4].checksum(), applied_migration.checksum()); } #[test] diff --git a/refinery/tests/tokio_postgres.rs b/refinery/tests/tokio_postgres.rs index bdc773ce..0555e0a8 100644 --- a/refinery/tests/tokio_postgres.rs +++ b/refinery/tests/tokio_postgres.rs @@ -7,9 +7,9 @@ mod tokio_postgres { use chrono::Local; use futures::FutureExt; use refinery::{ - config::{migrate_from_config_async, Config, ConfigDbType}, + config::{Config, ConfigDbType}, error::Kind, - AsyncMigrate, Migration, Target, + AsyncMigrate, Migration, Runner, Target, }; use refinery_core::tokio_postgres::NoTls; use refinery_core::{tokio, tokio_postgres}; @@ -768,16 +768,115 @@ mod tokio_postgres { #[tokio::test] async fn migrates_from_config() { run_test(async { - let config = Config::new(ConfigDbType::Postgres) + let mut config = Config::new(ConfigDbType::Postgres) .set_db_name("postgres") .set_db_user("postgres") .set_db_host("localhost") .set_db_port("5432"); let migrations = get_migrations(); - migrate_from_config_async(&config, false, true, true, &migrations) + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + runner.run_async(&mut config).await.unwrap(); + + let applied_migrations = runner + .get_applied_migrations_async(&mut config) .await .unwrap(); + assert_eq!(5, applied_migrations.len()); + + assert_eq!(migrations[0].version(), applied_migrations[0].version()); + assert_eq!(migrations[1].version(), applied_migrations[1].version()); + assert_eq!(migrations[2].version(), applied_migrations[2].version()); + assert_eq!(migrations[3].version(), applied_migrations[3].version()); + assert_eq!(migrations[4].version(), applied_migrations[4].version()); + + assert_eq!(migrations[0].name(), migrations[0].name()); + assert_eq!(migrations[1].name(), applied_migrations[1].name()); + assert_eq!(migrations[2].name(), applied_migrations[2].name()); + assert_eq!(migrations[3].name(), applied_migrations[3].name()); + assert_eq!(migrations[4].name(), applied_migrations[4].name()); + + assert_eq!(migrations[0].checksum(), applied_migrations[0].checksum()); + assert_eq!(migrations[1].checksum(), applied_migrations[1].checksum()); + assert_eq!(migrations[2].checksum(), applied_migrations[2].checksum()); + assert_eq!(migrations[3].checksum(), applied_migrations[3].checksum()); + assert_eq!(migrations[4].checksum(), applied_migrations[4].checksum()); + }) + .await; + } + + #[tokio::test] + async fn migrate_from_config_report_contains_migrations() { + run_test(async { + let mut config = Config::new(ConfigDbType::Postgres) + .set_db_name("postgres") + .set_db_user("postgres") + .set_db_host("localhost") + .set_db_port("5432"); + + let migrations = get_migrations(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + let report = runner.run_async(&mut config).await.unwrap(); + + let applied_migrations = report.applied_migrations(); + assert_eq!(5, applied_migrations.len()); + + assert_eq!(migrations[0].version(), applied_migrations[0].version()); + assert_eq!(migrations[1].version(), applied_migrations[1].version()); + assert_eq!(migrations[2].version(), applied_migrations[2].version()); + assert_eq!(migrations[3].version(), applied_migrations[3].version()); + assert_eq!(migrations[4].version(), applied_migrations[4].version()); + + assert_eq!(migrations[0].name(), migrations[0].name()); + assert_eq!(migrations[1].name(), applied_migrations[1].name()); + assert_eq!(migrations[2].name(), applied_migrations[2].name()); + assert_eq!(migrations[3].name(), applied_migrations[3].name()); + assert_eq!(migrations[4].name(), applied_migrations[4].name()); + + assert_eq!(migrations[0].checksum(), applied_migrations[0].checksum()); + assert_eq!(migrations[1].checksum(), applied_migrations[1].checksum()); + assert_eq!(migrations[2].checksum(), applied_migrations[2].checksum()); + assert_eq!(migrations[3].checksum(), applied_migrations[3].checksum()); + assert_eq!(migrations[4].checksum(), applied_migrations[4].checksum()); + }) + .await; + } + + #[tokio::test] + async fn migrate_from_config_report_returns_last_applied_migration() { + run_test(async { + let mut config = Config::new(ConfigDbType::Postgres) + .set_db_name("postgres") + .set_db_user("postgres") + .set_db_host("localhost") + .set_db_port("5432"); + + let migrations = get_migrations(); + let runner = Runner::new(&migrations) + .set_grouped(false) + .set_abort_divergent(true) + .set_abort_missing(true); + + runner.run_async(&mut config).await.unwrap(); + + let applied_migration = runner + .get_last_applied_migration_async(&mut config) + .await + .unwrap() + .unwrap(); + assert_eq!(5, applied_migration.version()); + + assert_eq!(migrations[4].version(), applied_migration.version()); + assert_eq!(migrations[4].name(), applied_migration.name()); + assert_eq!(migrations[4].checksum(), applied_migration.checksum()); }) .await; } diff --git a/refinery_cli/src/migrate.rs b/refinery_cli/src/migrate.rs index 180c1f97..b8cab87e 100644 --- a/refinery_cli/src/migrate.rs +++ b/refinery_cli/src/migrate.rs @@ -2,10 +2,7 @@ use std::path::Path; use anyhow::{Context, Result}; use clap::ArgMatches; -use refinery_core::{ - config::{migrate_from_config, Config}, - find_migration_files, Migration, MigrationType, -}; +use refinery_core::{config::Config, find_migration_files, Migration, MigrationType, Runner}; pub fn handle_migration_command(args: &ArgMatches) -> Result<()> { //safe to call unwrap as we specified default values @@ -49,8 +46,12 @@ fn run_files_migrations( .with_context(|| format!("could not read migration file name {}", path.display()))?; migrations.push(migration); } - let config = + let mut config = Config::from_file_location(config_location).context("could not parse the config file")?; - migrate_from_config(&config, grouped, divergent, missing, &migrations)?; + Runner::new(&migrations) + .set_grouped(grouped) + .set_abort_divergent(divergent) + .set_abort_missing(missing) + .run(&mut config)?; Ok(()) } diff --git a/refinery_core/Cargo.toml b/refinery_core/Cargo.toml index 84949d22..6d9397b5 100644 --- a/refinery_core/Cargo.toml +++ b/refinery_core/Cargo.toml @@ -29,7 +29,7 @@ rusqlite = {version = "0.23", optional = true} postgres = {version = "0.17", optional = true} mysql = {version = "17", optional = true} tokio-postgres = { version = "0.5", optional = true } -mysql_async = { version = "0.21", optional = true } +mysql_async = { version = "0.23", optional = true } tokio = { version = "0.2", features = ["full"], optional = true } diff --git a/refinery_core/src/config.rs b/refinery_core/src/config.rs index 91ba11d4..3357398e 100644 --- a/refinery_core/src/config.rs +++ b/refinery_core/src/config.rs @@ -1,5 +1,5 @@ -use crate::error::{Kind, WrapMigrationError}; -use crate::{Error, Migration, Runner}; +use crate::error::Kind; +use crate::Error; use serde::{Deserialize, Serialize}; use std::fs; @@ -85,6 +85,11 @@ impl Config { Ok(config) } + #[cfg(feature = "rusqlite")] + pub(crate) fn db_path(&self) -> Option<&Path> { + self.main.db_path.as_deref() + } + pub fn get_db_type(&self) -> ConfigDbType { self.main.db_type } @@ -161,7 +166,7 @@ struct Main { feature = "tokio-postgres", feature = "mysql_async" ))] -fn build_db_url(name: &str, config: &Config) -> String { +pub(crate) fn build_db_url(name: &str, config: &Config) -> String { let mut url: String = name.to_string() + "://"; if let Some(user) = &config.main.db_user { @@ -186,111 +191,6 @@ fn build_db_url(name: &str, config: &Config) -> String { url } -/// migrates from a given config file location -/// use this function if you prefer to generate a config file either from refinery_cli or by hand, -/// and migrate without having to pass a database Connection -/// # Panics -/// -/// This function panics if refinery was not built with database driver support for the target database, -/// eg trying to migrate a PostgresSQL without feature postgres enabled. -#[cfg(any(feature = "mysql", feature = "rusqlite", feature = "postgres",))] -pub fn migrate_from_config( - config: &Config, - grouped: bool, - divergent: bool, - missing: bool, - migrations: &[Migration], -) -> Result<(), Error> { - match config.main.db_type { - ConfigDbType::Mysql => { - cfg_if::cfg_if! { - if #[cfg(feature = "mysql")] { - let url = build_db_url("mysql", &config); - let mut connection = mysql::Conn::new(&url).migration_err("could not connect to database", None)?; - Runner::new(migrations).set_grouped(grouped).set_abort_divergent(divergent).set_abort_missing(missing).run(&mut connection)?; - } else { - panic!("tried to migrate from config for a mysql database, but feature mysql not enabled!"); - } - } - } - ConfigDbType::Sqlite => { - cfg_if::cfg_if! { - if #[cfg(feature = "rusqlite")] { - //may have been checked earlier on config parsing, even if not let it fail with a Rusqlite db file not found error - let path = config.main.db_path.clone().unwrap_or_default(); - let mut connection = rusqlite::Connection::open_with_flags(path, rusqlite::OpenFlags::SQLITE_OPEN_READ_WRITE).migration_err("could not open database", None)?; - Runner::new(migrations).set_grouped(grouped).set_abort_divergent(divergent).set_abort_missing(missing).run(&mut connection)?; - } else { - panic!("tried to migrate from config for a sqlite database, but feature rusqlite not enabled!"); - } - } - } - ConfigDbType::Postgres => { - cfg_if::cfg_if! { - if #[cfg(feature = "postgres")] { - let path = build_db_url("postgresql", &config); - let mut connection = postgres::Client::connect(path.as_str(), postgres::NoTls).migration_err("could not connect to database", None)?; - Runner::new(migrations).set_grouped(grouped).set_abort_divergent(divergent).set_abort_missing(missing).run(&mut connection)?; - } else { - panic!("tried to migrate from config for a postgresql database, but feature postgres not enabled!"); - } - } - } - } - Ok(()) -} - -/// migrates from a given config file location -/// use this function if you prefer to generate a config file either from refinery_cli or by hand, -/// and migrate without having to pass a database Connection -/// # Panics -/// -/// This function panics if refinery was not built with database driver support for the target database, -/// eg trying to migrate a PostgresSQL without feature postgres enabled. -#[cfg(any(feature = "mysql_async", feature = "tokio-postgres"))] -pub async fn migrate_from_config_async( - config: &Config, - grouped: bool, - divergent: bool, - missing: bool, - migrations: &[Migration], -) -> Result<(), Error> { - match config.main.db_type { - ConfigDbType::Mysql => { - cfg_if::cfg_if! { - if #[cfg(feature = "mysql_async")] { - let url = build_db_url("mysql", &config); - let mut pool = mysql_async::Pool::from_url(&url).migration_err("could not connect to the database", None)?; - Runner::new(migrations).set_grouped(grouped).set_abort_divergent(divergent).set_abort_missing(missing).run_async(&mut pool).await?; - } else { - panic!("tried to migrate async from config for a mysql database, but feature mysql_async not enabled!"); - } - } - } - ConfigDbType::Sqlite => { - panic!("tried to migrate async from config for a sqlite database, but this feature is not implemented yet"); - } - ConfigDbType::Postgres => { - cfg_if::cfg_if! { - if #[cfg(all(feature = "tokio-postgres", feature = "tokio"))] { - let path = build_db_url("postgresql", &config); - let (mut client, connection ) = tokio_postgres::connect(path.as_str(), tokio_postgres::NoTls).await.migration_err("could not connect to database", None)?; - tokio::spawn(async move { - if let Err(e) = connection.await { - eprintln!("connection error: {}", e); - } - }); - - Runner::new(migrations).set_grouped(grouped).set_abort_divergent(divergent).set_abort_missing(missing).run_async(&mut client).await?; - } else { - panic!("tried to migrate async from config for a postgresql database, but either tokio or tokio-postgres was not enabled!"); - } - } - } - } - Ok(()) -} - #[cfg(test)] mod tests { use super::{build_db_url, Config, Error, Kind}; diff --git a/refinery_core/src/drivers/config.rs b/refinery_core/src/drivers/config.rs new file mode 100644 index 00000000..0ab6cc81 --- /dev/null +++ b/refinery_core/src/drivers/config.rs @@ -0,0 +1,218 @@ +#[cfg(any( + feature = "mysql", + feature = "postgres", + feature = "tokio-postgres", + feature = "mysql_async" +))] +use crate::config::build_db_url; +use crate::config::{Config, ConfigDbType}; +use crate::error::WrapMigrationError; +use crate::traits::r#async::{AsyncQuery, AsyncTransaction}; +use crate::traits::sync::{Query, Transaction}; +use crate::traits::{GET_APPLIED_MIGRATIONS_QUERY, GET_LAST_APPLIED_MIGRATION_QUERY}; +use crate::{Error, Migration, Report, Target}; +use async_trait::async_trait; +use std::convert::Infallible; + +// we impl all the dependent traits as noop's and then override the methods that call them on Migrate and AsyncMigrate +impl Transaction for Config { + type Error = Infallible; + + fn execute(&mut self, _queries: &[&str]) -> Result { + Ok(0) + } +} + +impl Query> for Config { + fn query(&mut self, _query: &str) -> Result, Self::Error> { + Ok(Vec::new()) + } +} + +#[async_trait] +impl AsyncTransaction for Config { + type Error = Infallible; + + async fn execute(&mut self, _queries: &[&str]) -> Result { + Ok(0) + } +} + +#[async_trait] +impl AsyncQuery> for Config { + async fn query( + &mut self, + _query: &str, + ) -> Result, ::Error> { + Ok(Vec::new()) + } +} +// this is written as macro so that we don't have to deal with type signatures +#[cfg(any(feature = "mysql", feature = "postgres", feature = "rusqlite"))] +macro_rules! with_connection { + ($config:ident, $op: expr) => { + match $config.get_db_type() { + ConfigDbType::Mysql => { + cfg_if::cfg_if! { + if #[cfg(feature = "mysql")] { + let url = build_db_url("mysql", &$config); + let conn = mysql::Conn::new(&url).migration_err("could not connect to database", None)?; + $op(conn) + } else { + panic!("tried to migrate from config for a mysql database, but feature mysql not enabled!"); + } + } + } + ConfigDbType::Sqlite => { + cfg_if::cfg_if! { + if #[cfg(feature = "rusqlite")] { + //may have been checked earlier on config parsing, even if not let it fail with a Rusqlite db file not found error + let path = $config.db_path().map(|p| p.to_path_buf()).unwrap_or_default(); + let conn = rusqlite::Connection::open_with_flags(path, rusqlite::OpenFlags::SQLITE_OPEN_READ_WRITE).migration_err("could not open database", None)?; + $op(conn) + } else { + panic!("tried to migrate from config for a sqlite database, but feature rusqlite not enabled!"); + } + } + } + ConfigDbType::Postgres => { + cfg_if::cfg_if! { + if #[cfg(feature = "postgres")] { + let path = build_db_url("postgresql", &$config); + let conn = postgres::Client::connect(path.as_str(), postgres::NoTls).migration_err("could not connect to database", None)?; + $op(conn) + } else { + panic!("tried to migrate from config for a postgresql database, but feature postgres not enabled!"); + } + } + } + }; + } +} + +#[cfg(any(feature = "tokio-postgres", feature = "mysql_async"))] +macro_rules! with_connection_async { + ($config: ident, $op: expr) => { + match $config.get_db_type() { + ConfigDbType::Mysql => { + cfg_if::cfg_if! { + if #[cfg(feature = "mysql_async")] { + let url = build_db_url("mysql", $config); + let pool = mysql_async::Pool::from_url(&url).migration_err("could not connect to the database", None)?; + $op(pool).await + } else { + panic!("tried to migrate async from config for a mysql database, but feature mysql_async not enabled!"); + } + } + } + ConfigDbType::Sqlite => { + panic!("tried to migrate async from config for a sqlite database, but this feature is not implemented yet"); + } + ConfigDbType::Postgres => { + cfg_if::cfg_if! { + if #[cfg(all(feature = "tokio-postgres", feature = "tokio"))] { + let path = build_db_url("postgresql", $config); + let (client, connection ) = tokio_postgres::connect(path.as_str(), tokio_postgres::NoTls).await.migration_err("could not connect to database", None)?; + tokio::spawn(async move { + if let Err(e) = connection.await { + eprintln!("connection error: {}", e); + } + }); + $op(client).await + } else { + panic!("tried to migrate async from config for a postgresql database, but either tokio or tokio-postgres was not enabled!"); + } + } + } + } + } +} + +// rewrite all the default methods as we overrode Transaction and Query +#[cfg(any(feature = "mysql", feature = "postgres", feature = "rusqlite"))] +impl crate::Migrate for Config { + fn get_last_applied_migration(&mut self) -> Result, Error> { + with_connection!(self, |mut conn| { + let mut migrations: Vec = + Query::query(&mut conn, GET_LAST_APPLIED_MIGRATION_QUERY) + .migration_err("error getting last applied migration", None)?; + + Ok(migrations.pop()) + }) + } + + fn get_applied_migrations(&mut self) -> Result, Error> { + with_connection!(self, |mut conn| { + let migrations: Vec = Query::query(&mut conn, GET_APPLIED_MIGRATIONS_QUERY) + .migration_err("error getting applied migrations", None)?; + + Ok(migrations) + }) + } + + fn migrate( + &mut self, + migrations: &[Migration], + abort_divergent: bool, + abort_missing: bool, + grouped: bool, + target: Target, + ) -> Result { + with_connection!(self, |mut conn| { + crate::Migrate::migrate( + &mut conn, + migrations, + abort_divergent, + abort_missing, + grouped, + target, + ) + }) + } +} + +#[cfg(any(feature = "mysql_async", feature = "tokio-postgres",))] +#[async_trait] +impl crate::AsyncMigrate for Config { + async fn get_last_applied_migration(&mut self) -> Result, Error> { + with_connection_async!(self, move |mut conn| async move { + let mut migrations: Vec = + AsyncQuery::query(&mut conn, GET_LAST_APPLIED_MIGRATION_QUERY) + .await + .migration_err("error getting last applied migration", None)?; + + Ok(migrations.pop()) + }) + } + + async fn get_applied_migrations(&mut self) -> Result, Error> { + with_connection_async!(self, move |mut conn| async move { + let migrations: Vec = + AsyncQuery::query(&mut conn, GET_APPLIED_MIGRATIONS_QUERY) + .await + .migration_err("error getting last applied migration", None)?; + Ok(migrations) + }) + } + + async fn migrate( + &mut self, + migrations: &[Migration], + abort_divergent: bool, + abort_missing: bool, + grouped: bool, + target: Target, + ) -> Result { + with_connection_async!(self, move |mut conn| async move { + crate::AsyncMigrate::migrate( + &mut conn, + migrations, + abort_divergent, + abort_missing, + grouped, + target, + ) + .await + }) + } +} diff --git a/refinery_core/src/drivers/mod.rs b/refinery_core/src/drivers/mod.rs index e89a4bac..0f7d9ed0 100644 --- a/refinery_core/src/drivers/mod.rs +++ b/refinery_core/src/drivers/mod.rs @@ -12,3 +12,5 @@ pub mod postgres; #[cfg(feature = "mysql")] pub mod mysql; + +mod config; diff --git a/refinery_core/src/drivers/mysql.rs b/refinery_core/src/drivers/mysql.rs index 96579c16..d08e6e62 100644 --- a/refinery_core/src/drivers/mysql.rs +++ b/refinery_core/src/drivers/mysql.rs @@ -1,4 +1,4 @@ -use crate::traits::sync::{Query, Transaction}; +use crate::traits::sync::{Migrate, Query, Transaction}; use crate::Migration; use chrono::{DateTime, Local}; use mysql::{ @@ -84,3 +84,6 @@ impl Query> for PooledConn { Ok(applied) } } + +impl Migrate for Conn {} +impl Migrate for PooledConn {} diff --git a/refinery_core/src/drivers/mysql_async.rs b/refinery_core/src/drivers/mysql_async.rs index 8bff3459..cdea9cf8 100644 --- a/refinery_core/src/drivers/mysql_async.rs +++ b/refinery_core/src/drivers/mysql_async.rs @@ -1,4 +1,4 @@ -use crate::traits::r#async::{AsyncQuery, AsyncTransaction}; +use crate::traits::r#async::{AsyncMigrate, AsyncQuery, AsyncTransaction}; use crate::Migration; use async_trait::async_trait; use chrono::{DateTime, Local}; @@ -71,3 +71,5 @@ impl AsyncQuery> for Pool { Ok(applied) } } + +impl AsyncMigrate for Pool {} diff --git a/refinery_core/src/drivers/postgres.rs b/refinery_core/src/drivers/postgres.rs index 28b413ec..10c57903 100644 --- a/refinery_core/src/drivers/postgres.rs +++ b/refinery_core/src/drivers/postgres.rs @@ -1,4 +1,4 @@ -use crate::traits::sync::{Query, Transaction}; +use crate::traits::sync::{Migrate, Query, Transaction}; use crate::Migration; use chrono::{DateTime, Local}; use postgres::{Client as PgClient, Error as PgError, Transaction as PgTransaction}; @@ -52,3 +52,5 @@ impl Query> for PgClient { Ok(applied) } } + +impl Migrate for PgClient {} diff --git a/refinery_core/src/drivers/rusqlite.rs b/refinery_core/src/drivers/rusqlite.rs index 5fc668d7..3bf0125f 100644 --- a/refinery_core/src/drivers/rusqlite.rs +++ b/refinery_core/src/drivers/rusqlite.rs @@ -1,4 +1,4 @@ -use crate::traits::sync::{Query, Transaction}; +use crate::traits::sync::{Migrate, Query, Transaction}; use crate::Migration; use chrono::{DateTime, Local}; use rusqlite::{Connection as RqlConnection, Error as RqlError, NO_PARAMS}; @@ -52,3 +52,5 @@ impl Query> for RqlConnection { Ok(applied) } } + +impl Migrate for RqlConnection {} diff --git a/refinery_core/src/drivers/tokio_postgres.rs b/refinery_core/src/drivers/tokio_postgres.rs index a81065cf..8a6ecf5f 100644 --- a/refinery_core/src/drivers/tokio_postgres.rs +++ b/refinery_core/src/drivers/tokio_postgres.rs @@ -1,4 +1,4 @@ -use crate::traits::r#async::{AsyncQuery, AsyncTransaction}; +use crate::traits::r#async::{AsyncMigrate, AsyncQuery, AsyncTransaction}; use crate::Migration; use async_trait::async_trait; use chrono::{DateTime, Local}; @@ -59,3 +59,5 @@ impl AsyncQuery> for Client { Ok(applied) } } + +impl AsyncMigrate for Client {} diff --git a/refinery_core/src/runner.rs b/refinery_core/src/runner.rs index 2a99c0a6..cc8dc73d 100644 --- a/refinery_core/src/runner.rs +++ b/refinery_core/src/runner.rs @@ -219,6 +219,7 @@ pub struct Runner { } impl Runner { + /// instantiate a new Runner pub fn new(migrations: &[Migration]) -> Runner { Runner { grouped: false, diff --git a/refinery_core/src/traits/async.rs b/refinery_core/src/traits/async.rs index f1bca97a..010fd41e 100644 --- a/refinery_core/src/traits/async.rs +++ b/refinery_core/src/traits/async.rs @@ -160,5 +160,3 @@ where } } } - -impl AsyncMigrate for T where T: AsyncQuery> {} diff --git a/refinery_core/src/traits/sync.rs b/refinery_core/src/traits/sync.rs index e92b9be0..842fad29 100644 --- a/refinery_core/src/traits/sync.rs +++ b/refinery_core/src/traits/sync.rs @@ -142,5 +142,3 @@ where } } } - -impl>> Migrate for T {}