Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Testflows updates #873

Merged
merged 12 commits into from
Oct 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/testflows-sink-connector-kafka.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ on:
output_format:
description: "Testflows output style."
type: string
default: nice-new-fails
default: new-fails
secrets:
DOCKERHUB_USERNAME:
required: false
Expand Down Expand Up @@ -46,11 +46,11 @@ on:
description: "Testflows output style."
type: choice
options:
- new-fails
- nice-new-fails
- brisk-new-fails
- plain-new-fails
- pnice-new-fails
- new-fails
- classic
- nice
- fails
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/testflows-sink-connector-lightweight.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ on:
output_format:
description: "Testflows output style."
type: string
default: nice-new-fails
default: new-fails
secrets:
DOCKERHUB_USERNAME:
required: false
Expand Down Expand Up @@ -46,11 +46,11 @@ on:
description: "Testflows output style."
type: choice
options:
- new-fails
- nice-new-fails
- brisk-new-fails
- plain-new-fails
- pnice-new-fails
- new-fails
- classic
- nice
- fails
Expand Down
17 changes: 9 additions & 8 deletions sink-connector-lightweight/tests/integration/helpers/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,14 +596,15 @@ def up(self, timeout=30 * 60):
"IMAGE_DEPENDENCY_PROXY", ""
)
self.environ["COMPOSE_HTTP_TIMEOUT"] = "300"
self.environ[
"CLICKHOUSE_TESTS_SERVER_BIN_PATH"
] = self.clickhouse_binary_path
self.environ[
"CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH"
] = self.clickhouse_odbc_bridge_binary_path or os.path.join(
os.path.dirname(self.clickhouse_binary_path),
"clickhouse-odbc-bridge",
self.environ["CLICKHOUSE_TESTS_SERVER_BIN_PATH"] = (
self.clickhouse_binary_path
)
self.environ["CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH"] = (
self.clickhouse_odbc_bridge_binary_path
or os.path.join(
os.path.dirname(self.clickhouse_binary_path),
"clickhouse-odbc-bridge",
)
)
self.environ["CLICKHOUSE_TESTS_DIR"] = self.configs_dir

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,5 +47,5 @@
"database.serverTimezone": "UTC",
"clickhouse.datetime.timezone": "UTC",
"auto.create.tables": "true",
"ddl.retry": "true"
"ddl.retry": "true",
}
Original file line number Diff line number Diff line change
Expand Up @@ -311,10 +311,10 @@ def regression(
run=load("tests.schema_only", "module"),
)
Feature(
run=load("tests.sink_cli_commands", "module"),
run=load("tests.multiple_databases", "module"),
)
Feature(
run=load("tests.multiple_databases", "module"),
run=load("tests.sink_cli_commands", "module"),
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@
change_column,
modify_column,
drop_column,
add_primary_key,
drop_primary_key,
)


Expand Down Expand Up @@ -592,26 +590,6 @@ def drop_column_on_a_database(self, database):
check_column(table_name=table_name, database=database, column_name="")


@TestScenario
@Requirements(
RQ_SRS_030_ClickHouse_MySQLToClickHouseReplication_PrimaryKey_Simple("1.0")
)
def add_primary_key_on_a_database(self, database):
"""Check that the primary key is added to the table when we add a primary key on a database."""
table_name = f"table_{getuid()}"
column = "col1"

with Given("I create a table on multiple databases"):
create_table_and_insert_values(table_name=table_name, database_name=database)

with When("I add a primary key on the table"):
drop_primary_key(table_name=table_name, database=database)
add_primary_key(table_name=table_name, database=database, column_name=column)

with Then("I check that the primary key was added to the table"):
check_column(table_name=table_name, database=database, column_name=column)


@TestOutline
def check_different_database_names(self, database_map):
"""Check that the tables are replicated when we have source and destination databases with different names."""
Expand Down Expand Up @@ -755,7 +733,6 @@ def check_alters_on_different_databases(self):
change_column_on_a_database,
modify_column_on_a_database,
drop_column_on_a_database,
add_primary_key_on_a_database,
]

check_alters(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,15 @@ def drop_database(self, database_name=None, node=None):

@TestStep(Then)
def check_column(
self, table_name, column_name, node=None, column_type=None, database=None
self,
table_name,
column_name,
node=None,
column_type=None,
database=None,
is_primary_key=False,
):
"""Check if column exists in ClickHouse table."""
"""Check if column exists in ClickHouse table and optionally verify if it is the primary key."""

if database is None:
database = "test"
Expand Down Expand Up @@ -51,6 +57,14 @@ def check_column(

assert column.output.strip() == expected_output, error()

if is_primary_key:
primary_key = node.query(
f"SELECT is_in_primary_key FROM system.columns WHERE database = '{database}' AND table = '{table_name}' AND name = '{column_name}' LIMIT 1 FORMAT TabSeparated"
)
assert primary_key.output.strip() == 1, error(
f"Column {column_name} is not a primary key"
)


@TestStep(Given)
def create_clickhouse_database(self, name=None, node=None):
Expand Down
2 changes: 1 addition & 1 deletion sink-connector/tests/integration/tests/deduplication.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def deduplication(
statement="count(*)",
clickhouse_table=clickhouse_table,
with_final=True,
timeout=50,
timeout=140,
)


Expand Down
2 changes: 1 addition & 1 deletion sink-connector/tests/integration/tests/partition_limits.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def partition_limits(

retry(
clickhouse.query,
timeout=50,
timeout=140,
delay=1,
)(
f"SELECT count() FROM test.{table_name} FINAL where _sign !=-1 FORMAT CSV",
Expand Down
2 changes: 1 addition & 1 deletion sink-connector/tests/integration/tests/steps/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,7 @@ def complex_check_creation_and_select(
table_name,
clickhouse_table,
statement,
timeout=50,
timeout=140,
message=1,
clickhouse_node=None,
database_name=None,
Expand Down
Loading