Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove unused test utils #22464

Merged
merged 11 commits into from
Feb 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 0 additions & 15 deletions .github/workflows/gradle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -714,21 +714,6 @@ jobs:
update-comment: true
debug-mode: false

- name: Integration test
uses: Wandalen/wretry.action@master
with:
command: SUB_BUILD=PLATFORM ./gradlew newIntegrationTest
attempt_limit: 3
attempt_delay: 5000 # in ms

- name: Slow integration test
if: contains(github.ref, 'bump-version') || contains(github.ref, 'master')
uses: Wandalen/wretry.action@master
with:
command: SUB_BUILD=PLATFORM ./gradlew slowIntegrationTest
attempt_limit: 3
attempt_delay: 5000 # in ms

- name: Test if Seed spec is updated
uses: Wandalen/wretry.action@master
with:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,12 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.ImmutableMap;
import io.airbyte.commons.io.IOs;
import io.airbyte.commons.json.Jsons;
import io.airbyte.commons.string.Strings;
import io.airbyte.db.factory.DataSourceFactory;
import io.airbyte.db.factory.DatabaseDriver;
import io.airbyte.db.jdbc.DefaultJdbcDatabase;
import io.airbyte.db.jdbc.JdbcDatabase;
import io.airbyte.db.jdbc.JdbcUtils;
import io.airbyte.integrations.base.AirbyteMessageConsumer;
Expand All @@ -31,12 +36,14 @@
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import javax.sql.DataSource;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.testcontainers.containers.PostgreSQLContainer;
import org.testcontainers.utility.MountableFile;

public class PostgresDestinationTest {

Expand Down Expand Up @@ -109,7 +116,7 @@ static void init() {

@BeforeEach
void setup() {
config = PostgreSQLContainerHelper.createDatabaseWithRandomNameAndGetPostgresConfig(PSQL_DB);
config = createDatabaseWithRandomNameAndGetPostgresConfig(PSQL_DB);
}

@AfterAll
Expand Down Expand Up @@ -208,7 +215,7 @@ public void testCheckIncorrectDataBaseFailure() {

@Test
public void testUserHasNoPermissionToDataBase() throws Exception {
final JdbcDatabase database = PostgreSQLContainerHelper.getJdbcDatabaseFromConfig(PostgreSQLContainerHelper.getDataSourceFromConfig(config));
final JdbcDatabase database = getJdbcDatabaseFromConfig(getDataSourceFromConfig(config));

database.execute(connection -> connection.createStatement()
.execute(String.format("create user %s with password '%s';", USERNAME, PASSWORD)));
Expand Down Expand Up @@ -249,7 +256,7 @@ void sanityTest() throws Exception {
.withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.of(SCHEMA_NAME + "." + STREAM_NAME, 10)))));
consumer.close();

final JdbcDatabase database = PostgreSQLContainerHelper.getJdbcDatabaseFromConfig(PostgreSQLContainerHelper.getDataSourceFromConfig(config));
final JdbcDatabase database = getJdbcDatabaseFromConfig(getDataSourceFromConfig(config));

final List<JsonNode> actualRecords = database.bufferedResultSetQuery(
connection -> connection.createStatement().executeQuery("SELECT * FROM public._airbyte_raw_id_and_name;"),
Expand All @@ -273,4 +280,40 @@ private List<AirbyteMessage> getNRecords(final int n) {
.collect(Collectors.toList());
}

private JdbcDatabase getJdbcDatabaseFromConfig(final DataSource dataSource) {
return new DefaultJdbcDatabase(dataSource, JdbcUtils.getDefaultSourceOperations());
}

private JsonNode createDatabaseWithRandomNameAndGetPostgresConfig(final PostgreSQLContainer<?> psqlDb) {
final var dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase();
final var initScriptName = "init_" + dbName.concat(".sql");
final var tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";");

PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), psqlDb);
return getDestinationConfig(psqlDb, dbName);
}

private JsonNode getDestinationConfig(final PostgreSQLContainer<?> psqlDb, final String dbName) {
return Jsons.jsonNode(ImmutableMap.builder()
.put(JdbcUtils.HOST_KEY, psqlDb.getHost())
.put(JdbcUtils.PORT_KEY, psqlDb.getFirstMappedPort())
.put(JdbcUtils.DATABASE_KEY, dbName)
.put(JdbcUtils.USERNAME_KEY, psqlDb.getUsername())
.put(JdbcUtils.PASSWORD_KEY, psqlDb.getPassword())
.put(JdbcUtils.SCHEMA_KEY, "public")
.put(JdbcUtils.SSL_KEY, false)
.build());
}

private DataSource getDataSourceFromConfig(final JsonNode config) {
return DataSourceFactory.create(
config.get(JdbcUtils.USERNAME_KEY).asText(),
config.get(JdbcUtils.PASSWORD_KEY).asText(),
DatabaseDriver.POSTGRESQL.getDriverClassName(),
String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(),
config.get(JdbcUtils.HOST_KEY).asText(),
config.get(JdbcUtils.PORT_KEY).asInt(),
config.get(JdbcUtils.DATABASE_KEY).asText()));
}

}

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
* Copyright (c) 2022 Airbyte, Inc., all rights reserved.
*/

package io.airbyte.test.airbyte_test_container;
package io.airbyte.test.container;

import com.google.common.collect.Maps;
import io.airbyte.api.client.AirbyteApiClient;
Expand Down Expand Up @@ -141,7 +141,6 @@ private void serviceLogConsumer(final DockerComposeContainer<?> composeContainer
/**
* Exposes logs generated by docker containers in docker compose temporal test container.
*
*
* @param service - name of docker container from which log is emitted.
* @param customConsumer - each line output by the service in docker compose will be passed ot the
* consumer. if null do nothing.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
import io.airbyte.commons.util.MoreProperties;
import io.airbyte.db.Database;
import io.airbyte.db.jdbc.JdbcUtils;
import io.airbyte.test.airbyte_test_container.AirbyteTestContainer;
import io.airbyte.test.container.AirbyteTestContainer;
import io.fabric8.kubernetes.client.DefaultKubernetesClient;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.temporal.client.WorkflowClient;
Expand Down Expand Up @@ -413,17 +413,18 @@ public Set<SchemaTableNamePair> listAllTables(final Database database) throws SQ

private Set<SchemaTableNamePair> addAirbyteGeneratedTables(final boolean withScdTable, final Set<SchemaTableNamePair> sourceTables) {
return sourceTables.stream().flatMap(x -> {
final String cleanedNameStream = x.tableName.replace(".", "_");
final String cleanedNameStream = x.tableName().replace(".", "_");
final List<SchemaTableNamePair> explodedStreamNames = new ArrayList<>(List.of(
new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName,
new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName(),
String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream)),
new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, String.format("%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream))));
new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName(), String.format("%s%s", OUTPUT_STREAM_PREFIX, cleanedNameStream))));
if (withScdTable) {
explodedStreamNames
.add(new SchemaTableNamePair("_airbyte_" + OUTPUT_NAMESPACE_PREFIX + x.schemaName,
.add(new SchemaTableNamePair("_airbyte_" + OUTPUT_NAMESPACE_PREFIX + x.schemaName(),
String.format("%s%s_stg", OUTPUT_STREAM_PREFIX, cleanedNameStream)));
explodedStreamNames
.add(new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName, String.format("%s%s_scd", OUTPUT_STREAM_PREFIX, cleanedNameStream)));
.add(new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + x.schemaName(),
String.format("%s%s_scd", OUTPUT_STREAM_PREFIX, cleanedNameStream)));
}
return explodedStreamNames.stream();
}).collect(Collectors.toSet());
Expand Down Expand Up @@ -605,8 +606,8 @@ public List<JsonNode> retrieveRawDestinationRecords(final SchemaTableNamePair pa
final Database destination = getDestinationDatabase();
final Set<SchemaTableNamePair> namePairs = listAllTables(destination);

final String rawStreamName = String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, pair.tableName.replace(".", "_"));
final SchemaTableNamePair rawTablePair = new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + pair.schemaName, rawStreamName);
final String rawStreamName = String.format("_airbyte_raw_%s%s", OUTPUT_STREAM_PREFIX, pair.tableName().replace(".", "_"));
final SchemaTableNamePair rawTablePair = new SchemaTableNamePair(OUTPUT_NAMESPACE_PREFIX + pair.schemaName(), rawStreamName);
assertTrue(namePairs.contains(rawTablePair), "can't find a non-normalized version (raw) of " + rawTablePair.getFullyQualifiedTableName());

return retrieveDestinationRecords(destination, rawTablePair.getFullyQualifiedTableName());
Expand Down Expand Up @@ -766,15 +767,15 @@ private void clearSourceDbData() throws SQLException {
final Database database = getSourceDatabase();
final Set<SchemaTableNamePair> pairs = listAllTables(database);
for (final SchemaTableNamePair pair : pairs) {
database.query(context -> context.execute(String.format("DROP TABLE %s.%s", pair.schemaName, pair.tableName)));
database.query(context -> context.execute(String.format("DROP TABLE %s.%s", pair.schemaName(), pair.tableName())));
}
}

private void clearDestinationDbData() throws SQLException {
final Database database = getDestinationDatabase();
final Set<SchemaTableNamePair> pairs = listAllTables(database);
for (final SchemaTableNamePair pair : pairs) {
database.query(context -> context.execute(String.format("DROP TABLE %s.%s CASCADE", pair.schemaName, pair.tableName)));
database.query(context -> context.execute(String.format("DROP TABLE %s.%s CASCADE", pair.schemaName(), pair.tableName())));
}
}

Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@

/**
* This class is used to provide information related to the test databases for running the
* {@link AdvancedAcceptanceTests} on GKE. We launch 2 postgres databases in GKE as pods which act
* as source and destination and the tests run against them. In order to allow the test instance to
* connect to these databases we use port forwarding Refer
* {@link AirbyteAcceptanceTestHarness} on GKE. We launch 2 postgres databases in GKE as pods which
* act as source and destination and the tests run against them. In order to allow the test instance
* to connect to these databases we use port forwarding Refer
* tools/bin/gke-kube-acceptance-test/acceptance_test_kube_gke.sh for more info
*/
public class GKEPostgresConfig {
class GKEPostgresConfig {

private static final String SOURCE_HOST = "postgres-source-svc";
private static final String DESTINATION_HOST = "postgres-destination-svc";
Expand All @@ -29,7 +29,7 @@ public class GKEPostgresConfig {
private static final String PASSWORD = "admin123";
private static final String DB = "postgresdb";

public static Map<Object, Object> dbConfig(final Type connectorType, final boolean hiddenPassword, final boolean withSchema) {
static Map<Object, Object> dbConfig(final Type connectorType, final boolean hiddenPassword, final boolean withSchema) {
final Map<Object, Object> dbConfig = new HashMap<>();
dbConfig.put(JdbcUtils.HOST_KEY, connectorType == Type.SOURCE ? SOURCE_HOST : DESTINATION_HOST);
dbConfig.put(JdbcUtils.PASSWORD_KEY, hiddenPassword ? "**********" : PASSWORD);
Expand All @@ -45,12 +45,12 @@ public static Map<Object, Object> dbConfig(final Type connectorType, final boole
return dbConfig;
}

public static Database getSourceDatabase() {
static Database getSourceDatabase() {
return new Database(DSLContextFactory.create(USERNAME, PASSWORD, DatabaseDriver.POSTGRESQL.getDriverClassName(),
"jdbc:postgresql://localhost:2000/postgresdb", SQLDialect.POSTGRES));
}

public static Database getDestinationDatabase() {
static Database getDestinationDatabase() {
return new Database(DSLContextFactory.create(USERNAME, PASSWORD, DatabaseDriver.POSTGRESQL.getDriverClassName(),
"jdbc:postgresql://localhost:4000/postgresdb", SQLDialect.POSTGRES));
}
Expand Down
Loading