From 9747be594ef785593b84d5bbf6ebe0e4d4c3296c Mon Sep 17 00:00:00 2001 From: Alexander Avdonkin Date: Tue, 14 Jan 2025 17:49:45 +0300 Subject: [PATCH 1/6] Multiprocess scenario test --- ydb/tests/olap/scenario/conftest.py | 3 + .../scenario/helpers/scenario_tests_helper.py | 17 ++--- ydb/tests/olap/scenario/multitest.sh | 2 + ydb/tests/olap/scenario/test.mk | 6 ++ ydb/tests/olap/scenario/test_insert.py | 72 ++++++++++++------- 5 files changed, 68 insertions(+), 32 deletions(-) create mode 100755 ydb/tests/olap/scenario/multitest.sh create mode 100644 ydb/tests/olap/scenario/test.mk diff --git a/ydb/tests/olap/scenario/conftest.py b/ydb/tests/olap/scenario/conftest.py index 7cfe72e81e02..797335b750ef 100644 --- a/ydb/tests/olap/scenario/conftest.py +++ b/ydb/tests/olap/scenario/conftest.py @@ -77,6 +77,8 @@ def teardown_class(cls): cls._ydb_instance.stop() def test(self, ctx: TestContext): + test_path = ctx.test + get_external_param("table_suffix", "") + ScenarioTestHelper(None).remove_path(test_path, ctx.suite) start_time = time.time() try: ctx.executable(self, ctx) @@ -103,6 +105,7 @@ def test(self, ctx: TestContext): allure_test_description(ctx.suite, ctx.test, start_time=start_time, end_time=time.time()) raise allure_test_description(ctx.suite, ctx.test, start_time=start_time, end_time=time.time()) + ScenarioTestHelper(None).remove_path(test_path, ctx.suite) def pytest_generate_tests(metafunc): diff --git a/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py b/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py index f5d9f5a9fcba..db386f400fbb 100644 --- a/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py +++ b/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py @@ -10,6 +10,7 @@ from abc import abstractmethod, ABC from typing import Set, List, Dict, Any, Callable, Optional from time import sleep +from ydb.tests.olap.lib.utils import get_external_param class TestContext: @@ -316,7 +317,7 @@ def _add_not_empty(p: str, dir: str): result = os.path.join('/', YdbCluster.ydb_database, YdbCluster.tables_path) if self.test_context is not None: result = _add_not_empty(result, self.test_context.suite) - result = _add_not_empty(result, self.test_context.test) + result = _add_not_empty(result, self.test_context.test) + get_external_param("table_suffix", "") result = _add_not_empty(result, path) return result @@ -653,7 +654,7 @@ def describe_table(self, path: str, settings: ydb.DescribeTableSettings = None) ) @allure.step('List path {path}') - def list_path(self, path: str) -> List[ydb.SchemeEntry]: + def list_path(self, path: str, folder: str) -> List[ydb.SchemeEntry]: """Recursively describe the path in the database under test. If the path is a directory or TableStore, then all subpaths are included in the description. @@ -666,7 +667,7 @@ def list_path(self, path: str) -> List[ydb.SchemeEntry]: If the path does not exist, an empty list is returned. """ - root_path = self.get_full_path('') + root_path = self.get_full_path(folder) try: self_descr = YdbCluster._describe_path_impl(os.path.join(root_path, path)) except ydb.issues.SchemeError: @@ -681,7 +682,7 @@ def list_path(self, path: str) -> List[ydb.SchemeEntry]: return self_descr @allure.step('Remove path {path}') - def remove_path(self, path: str) -> None: + def remove_path(self, path: str, folder: str = '') -> None: """Recursively delete a path in the tested database. If the path is a directory or TableStore, then all nested paths are removed. @@ -696,12 +697,12 @@ def remove_path(self, path: str) -> None: import ydb.tests.olap.scenario.helpers.drop_helper as dh - root_path = self.get_full_path('') - for e in self.list_path(path): + root_path = self.get_full_path(folder) + for e in self.list_path(path, folder): if e.is_any_table(): - self.execute_scheme_query(dh.DropTable(e.name)) + self.execute_scheme_query(dh.DropTable(os.path.join(folder, e.name))) elif e.is_column_store(): - self.execute_scheme_query(dh.DropTableStore(e.name)) + self.execute_scheme_query(dh.DropTableStore(os.path.join(folder, e.name))) elif e.is_directory(): self._run_with_expected_status( lambda: YdbCluster.get_ydb_driver().scheme_client.remove_directory(os.path.join(root_path, e.name)), diff --git a/ydb/tests/olap/scenario/multitest.sh b/ydb/tests/olap/scenario/multitest.sh new file mode 100755 index 000000000000..fa603388f5b8 --- /dev/null +++ b/ydb/tests/olap/scenario/multitest.sh @@ -0,0 +1,2 @@ +#!/bin/sh -e +make S3_ACCESS_KEY=$1 S3_SECRET_KEY=$2 -rkj -f test.mk all.test.dst && echo OK || echo Error diff --git a/ydb/tests/olap/scenario/test.mk b/ydb/tests/olap/scenario/test.mk new file mode 100644 index 000000000000..27bc210f556b --- /dev/null +++ b/ydb/tests/olap/scenario/test.mk @@ -0,0 +1,6 @@ +suffixes:=$(shell jot 20) + +$(suffixes:=.test.dst): %.test.dst: + ../../../../ya test --build=relwithdebinfo --test-disable-timeout --test-param ydb-endpoint="grpc://ydb-olap-testing-vla-0000.search.yandex.net:2135" --test-param ydb-db="/olap-testing/kikimr/testing/testdb" --test-param tables-path=scenario --test-param s3-endpoint=http://storage.yandexcloud.net --test-param s3-access-key=$(S3_ACCESS_KEY) --test-param s3-secret-key=$(S3_SECRET_KEY) --test-param s3-buckets=ydb-test-test,ydb-test-test-2 --test-param test-duration-seconds=2400 --test-param table_suffix=$* --test-param rows_count=100 --test-param batches_count=1000 --test-param reuse-tables=True --test-param keep-tables=True -F test_insert.py::TestInsert::test[read_data_during_bulk_upsert] + +all.test.dst: $(suffixes:=.test.dst) diff --git a/ydb/tests/olap/scenario/test_insert.py b/ydb/tests/olap/scenario/test_insert.py index deee48050c23..f915aebd92a3 100644 --- a/ydb/tests/olap/scenario/test_insert.py +++ b/ydb/tests/olap/scenario/test_insert.py @@ -1,4 +1,6 @@ +import time from conftest import BaseTestSet +from multiprocessing import Process from ydb.tests.olap.scenario.helpers import ( ScenarioTestHelper, TestContext, @@ -24,19 +26,26 @@ class TestInsert(BaseTestSet): .with_key_columns("key") ) - def _loop_upsert(self, ctx: TestContext, data: list): + def _loop_upsert(self, ctx: TestContext, data: list, table: str): sth = ScenarioTestHelper(ctx) + table_name = "log" + table for batch in data: - sth.bulk_upsert_data("log", self.schema_log, batch) + sth.bulk_upsert_data(table_name, self.schema_log, batch) - def _loop_insert(self, ctx: TestContext, rows_count: int): + def _loop_insert(self, ctx: TestContext, rows_count: int, table: str): sth = ScenarioTestHelper(ctx) - log: str = sth.get_full_path("log") - cnt: str = sth.get_full_path("cnt") + log: str = sth.get_full_path("log" + table) + cnt: str = sth.get_full_path("cnt" + table) for i in range(rows_count): - sth.execute_query( - f'$cnt = SELECT CAST(COUNT(*) AS INT64) from `{log}`; INSERT INTO `{cnt}` (key, c) values({i}, $cnt)' - ) + for j in range(10): + try: + sth.execute_query( + f'$cnt = SELECT CAST(COUNT(*) AS INT64) from `{log}`; INSERT INTO `{cnt}` (key, c) values({i}, $cnt)' + ) + break + except: + pass + time.sleep(1) def scenario_read_data_during_bulk_upsert(self, ctx: TestContext): sth = ScenarioTestHelper(ctx) @@ -45,12 +54,15 @@ def scenario_read_data_during_bulk_upsert(self, ctx: TestContext): batches_count = int(get_external_param("batches_count", "10")) rows_count = int(get_external_param("rows_count", "1000")) inserts_count = int(get_external_param("inserts_count", "200")) - sth.execute_scheme_query( - CreateTable(cnt_table_name).with_schema(self.schema_cnt) - ) - sth.execute_scheme_query( - CreateTable(log_table_name).with_schema(self.schema_log) - ) + table_count = int(get_external_param("table_count", "10")) + for table in range(table_count): + sth.execute_scheme_query( + CreateTable(cnt_table_name + str(table)).with_schema(self.schema_cnt) + ) + for table in range(table_count): + sth.execute_scheme_query( + CreateTable(log_table_name + str(table)).with_schema(self.schema_log) + ) data: List = [] for i in range(batches_count): batch: List[Dict[str, Any]] = [] @@ -58,28 +70,40 @@ def scenario_read_data_during_bulk_upsert(self, ctx: TestContext): batch.append({"key": j + rows_count * i}) data.append(batch) - thread1 = TestThread(target=self._loop_upsert, args=[ctx, data]) - thread2 = TestThread(target=self._loop_insert, args=[ctx, inserts_count]) + thread1 = [] + thread2 = [] + for table in range(table_count): + thread1.append(TestThread(target=self._loop_upsert, args=[ctx, data, str(table)])) + for table in range(table_count): + thread2.append(TestThread(target=self._loop_insert, args=[ctx, inserts_count, str(table)])) + + for thread in thread1: + thread.start() + + for thread in thread2: + thread.start() - thread1.start() - thread2.start() + for thread in thread2: + thread.join() - thread2.join() - thread1.join() + for thread in thread1: + thread.join() - rows: int = sth.get_table_rows_count(cnt_table_name) + cnt_table_name0 = cnt_table_name + "0" + rows: int = sth.get_table_rows_count(cnt_table_name0) assert rows == inserts_count scan_result = sth.execute_scan_query( - f"SELECT key, c FROM `{sth.get_full_path(cnt_table_name)}` ORDER BY key" + f"SELECT key, c FROM `{sth.get_full_path(cnt_table_name0)}` ORDER BY key" ) for i in range(rows): if scan_result.result_set.rows[i]["key"] != i: assert False, f"{i} ?= {scan_result.result_set.rows[i]['key']}" - rows: int = sth.get_table_rows_count(log_table_name) + log_table_name0 = log_table_name + "0" + rows: int = sth.get_table_rows_count(log_table_name0) assert rows == rows_count * batches_count scan_result = sth.execute_scan_query( - f"SELECT key FROM `{sth.get_full_path(log_table_name)}` ORDER BY key" + f"SELECT key FROM `{sth.get_full_path(log_table_name0)}` ORDER BY key" ) for i in range(rows): if scan_result.result_set.rows[i]["key"] != i: From 7f91f0af5afa8b6454610ff0429ce4baaa27e2d6 Mon Sep 17 00:00:00 2001 From: Alexander Avdonkin Date: Tue, 14 Jan 2025 18:25:57 +0300 Subject: [PATCH 2/6] Removed unused import --- ydb/tests/olap/scenario/test_insert.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ydb/tests/olap/scenario/test_insert.py b/ydb/tests/olap/scenario/test_insert.py index f915aebd92a3..5c3310cb6968 100644 --- a/ydb/tests/olap/scenario/test_insert.py +++ b/ydb/tests/olap/scenario/test_insert.py @@ -1,6 +1,5 @@ import time from conftest import BaseTestSet -from multiprocessing import Process from ydb.tests.olap.scenario.helpers import ( ScenarioTestHelper, TestContext, From 0e716e445ab5026e9dec8e0e8805a029b339c454 Mon Sep 17 00:00:00 2001 From: Alexander Avdonkin Date: Tue, 14 Jan 2025 20:49:56 +0300 Subject: [PATCH 3/6] Default table_count is 1 --- ydb/tests/olap/scenario/test.mk | 2 +- ydb/tests/olap/scenario/test_insert.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ydb/tests/olap/scenario/test.mk b/ydb/tests/olap/scenario/test.mk index 27bc210f556b..75a60a063ceb 100644 --- a/ydb/tests/olap/scenario/test.mk +++ b/ydb/tests/olap/scenario/test.mk @@ -1,6 +1,6 @@ suffixes:=$(shell jot 20) $(suffixes:=.test.dst): %.test.dst: - ../../../../ya test --build=relwithdebinfo --test-disable-timeout --test-param ydb-endpoint="grpc://ydb-olap-testing-vla-0000.search.yandex.net:2135" --test-param ydb-db="/olap-testing/kikimr/testing/testdb" --test-param tables-path=scenario --test-param s3-endpoint=http://storage.yandexcloud.net --test-param s3-access-key=$(S3_ACCESS_KEY) --test-param s3-secret-key=$(S3_SECRET_KEY) --test-param s3-buckets=ydb-test-test,ydb-test-test-2 --test-param test-duration-seconds=2400 --test-param table_suffix=$* --test-param rows_count=100 --test-param batches_count=1000 --test-param reuse-tables=True --test-param keep-tables=True -F test_insert.py::TestInsert::test[read_data_during_bulk_upsert] + ../../../../ya test --build=relwithdebinfo --test-disable-timeout --test-param ydb-endpoint="grpc://ydb-olap-testing-vla-0000.search.yandex.net:2135" --test-param ydb-db="/olap-testing/kikimr/testing/testdb" --test-param tables-path=scenario --test-param s3-endpoint=http://storage.yandexcloud.net --test-param s3-access-key=$(S3_ACCESS_KEY) --test-param s3-secret-key=$(S3_SECRET_KEY) --test-param s3-buckets=ydb-test-test,ydb-test-test-2 --test-param test-duration-seconds=2400 --test-param table_suffix=$* --test-param rows_count=100 --test-param batches_count=1000 --test-param reuse-tables=True --test-param keep-tables=True --test-param table_count=10 -F test_insert.py::TestInsert::test[read_data_during_bulk_upsert] all.test.dst: $(suffixes:=.test.dst) diff --git a/ydb/tests/olap/scenario/test_insert.py b/ydb/tests/olap/scenario/test_insert.py index 5c3310cb6968..fc210bae57be 100644 --- a/ydb/tests/olap/scenario/test_insert.py +++ b/ydb/tests/olap/scenario/test_insert.py @@ -53,7 +53,7 @@ def scenario_read_data_during_bulk_upsert(self, ctx: TestContext): batches_count = int(get_external_param("batches_count", "10")) rows_count = int(get_external_param("rows_count", "1000")) inserts_count = int(get_external_param("inserts_count", "200")) - table_count = int(get_external_param("table_count", "10")) + table_count = int(get_external_param("table_count", "1")) for table in range(table_count): sth.execute_scheme_query( CreateTable(cnt_table_name + str(table)).with_schema(self.schema_cnt) From 7f29176e1e962fd29f21d1a1b53bae654e6f5100 Mon Sep 17 00:00:00 2001 From: Alexander Avdonkin Date: Wed, 15 Jan 2025 11:59:28 +0300 Subject: [PATCH 4/6] Removed bare exception --- ydb/tests/olap/scenario/test_insert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ydb/tests/olap/scenario/test_insert.py b/ydb/tests/olap/scenario/test_insert.py index fc210bae57be..c868e6de0fd0 100644 --- a/ydb/tests/olap/scenario/test_insert.py +++ b/ydb/tests/olap/scenario/test_insert.py @@ -42,7 +42,7 @@ def _loop_insert(self, ctx: TestContext, rows_count: int, table: str): f'$cnt = SELECT CAST(COUNT(*) AS INT64) from `{log}`; INSERT INTO `{cnt}` (key, c) values({i}, $cnt)' ) break - except: + except Exception: pass time.sleep(1) From a1cdb5454a636890194b3b8f1c1f2588d96d079d Mon Sep 17 00:00:00 2001 From: Alexander Avdonkin Date: Thu, 16 Jan 2025 15:19:41 +0300 Subject: [PATCH 5/6] Fixed issues --- .../scenario/helpers/scenario_tests_helper.py | 4 +- ydb/tests/olap/scenario/multitest.sh | 2 +- ydb/tests/olap/scenario/test.mk | 2 +- ydb/tests/olap/scenario/test_insert.py | 70 ++++++++++--------- 4 files changed, 40 insertions(+), 38 deletions(-) diff --git a/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py b/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py index db386f400fbb..c2bb4717c24d 100644 --- a/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py +++ b/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py @@ -464,7 +464,7 @@ def execute_scan_query( @allure.step('Execute query') def execute_query( - self, yql: str, expected_status: ydb.StatusCode | Set[ydb.StatusCode] = ydb.StatusCode.SUCCESS + self, yql: str, expected_status: ydb.StatusCode | Set[ydb.StatusCode] = ydb.StatusCode.SUCCESS, retries=0 ): """Run a query on the tested database. @@ -480,7 +480,7 @@ def execute_query( allure.attach(yql, 'request', allure.attachment_type.TEXT) with ydb.QuerySessionPool(YdbCluster.get_ydb_driver()) as pool: - self._run_with_expected_status(lambda: pool.execute_with_retries(yql), expected_status) + self._run_with_expected_status(lambda: pool.execute_with_retries(yql, None, ydb.RetrySettings(max_retries=retries)), expected_status) def drop_if_exist(self, names: List[str], operation) -> None: """Erase entities in the tested database, if it exists. diff --git a/ydb/tests/olap/scenario/multitest.sh b/ydb/tests/olap/scenario/multitest.sh index fa603388f5b8..3a43abd8fc63 100755 --- a/ydb/tests/olap/scenario/multitest.sh +++ b/ydb/tests/olap/scenario/multitest.sh @@ -1,2 +1,2 @@ #!/bin/sh -e -make S3_ACCESS_KEY=$1 S3_SECRET_KEY=$2 -rkj -f test.mk all.test.dst && echo OK || echo Error +make S3_ACCESS_KEY=$1 S3_SECRET_KEY=$2 YDB_ENDPOINT=$3 YDB_DB=$4 -rkj -f test.mk all.test.dst && echo OK || echo Error diff --git a/ydb/tests/olap/scenario/test.mk b/ydb/tests/olap/scenario/test.mk index 75a60a063ceb..6395b93f3a79 100644 --- a/ydb/tests/olap/scenario/test.mk +++ b/ydb/tests/olap/scenario/test.mk @@ -1,6 +1,6 @@ suffixes:=$(shell jot 20) $(suffixes:=.test.dst): %.test.dst: - ../../../../ya test --build=relwithdebinfo --test-disable-timeout --test-param ydb-endpoint="grpc://ydb-olap-testing-vla-0000.search.yandex.net:2135" --test-param ydb-db="/olap-testing/kikimr/testing/testdb" --test-param tables-path=scenario --test-param s3-endpoint=http://storage.yandexcloud.net --test-param s3-access-key=$(S3_ACCESS_KEY) --test-param s3-secret-key=$(S3_SECRET_KEY) --test-param s3-buckets=ydb-test-test,ydb-test-test-2 --test-param test-duration-seconds=2400 --test-param table_suffix=$* --test-param rows_count=100 --test-param batches_count=1000 --test-param reuse-tables=True --test-param keep-tables=True --test-param table_count=10 -F test_insert.py::TestInsert::test[read_data_during_bulk_upsert] + ../../../../ya test --build=relwithdebinfo --test-disable-timeout --test-param ydb-endpoint=$(YDB_ENDPOINT) --test-param ydb-db=$(YDB_DB) --test-param tables-path=scenario --test-param s3-endpoint=http://storage.yandexcloud.net --test-param s3-access-key=$(S3_ACCESS_KEY) --test-param s3-secret-key=$(S3_SECRET_KEY) --test-param s3-buckets=ydb-test-test,ydb-test-test-2 --test-param test-duration-seconds=2400 --test-param table_suffix=$* --test-param rows_count=100 --test-param batches_count=1000 --test-param reuse-tables=True --test-param keep-tables=True --test-param tables_count=10 --test-param ignore_read_errors=True -F test_insert.py::TestInsert::test[read_data_during_bulk_upsert] all.test.dst: $(suffixes:=.test.dst) diff --git a/ydb/tests/olap/scenario/test_insert.py b/ydb/tests/olap/scenario/test_insert.py index c868e6de0fd0..1a6ecdfcb10d 100644 --- a/ydb/tests/olap/scenario/test_insert.py +++ b/ydb/tests/olap/scenario/test_insert.py @@ -8,7 +8,7 @@ from helpers.thread_helper import TestThread from ydb import PrimitiveType from typing import List, Dict, Any -from ydb.tests.olap.lib.utils import get_external_param +from ydb.tests.olap.lib.utils import get_external_param, external_param_is_true class TestInsert(BaseTestSet): @@ -31,20 +31,20 @@ def _loop_upsert(self, ctx: TestContext, data: list, table: str): for batch in data: sth.bulk_upsert_data(table_name, self.schema_log, batch) - def _loop_insert(self, ctx: TestContext, rows_count: int, table: str): + def _loop_insert(self, ctx: TestContext, rows_count: int, table: str, ignore_read_errors: bool): sth = ScenarioTestHelper(ctx) log: str = sth.get_full_path("log" + table) cnt: str = sth.get_full_path("cnt" + table) for i in range(rows_count): - for j in range(10): - try: - sth.execute_query( - f'$cnt = SELECT CAST(COUNT(*) AS INT64) from `{log}`; INSERT INTO `{cnt}` (key, c) values({i}, $cnt)' - ) - break - except Exception: + try: + sth.execute_query( + yql=f'$cnt = SELECT CAST(COUNT(*) AS INT64) from `{log}`; INSERT INTO `{cnt}` (key, c) values({i}, $cnt)', retries=10 + ) + except Exception: + if ignore_read_errors: pass - time.sleep(1) + else: + raise def scenario_read_data_during_bulk_upsert(self, ctx: TestContext): sth = ScenarioTestHelper(ctx) @@ -53,12 +53,13 @@ def scenario_read_data_during_bulk_upsert(self, ctx: TestContext): batches_count = int(get_external_param("batches_count", "10")) rows_count = int(get_external_param("rows_count", "1000")) inserts_count = int(get_external_param("inserts_count", "200")) - table_count = int(get_external_param("table_count", "1")) - for table in range(table_count): + tables_count = int(get_external_param("tables_count", "1")) + ignore_read_errors = external_param_is_true("ignore_read_errors") + for table in range(tables_count): sth.execute_scheme_query( CreateTable(cnt_table_name + str(table)).with_schema(self.schema_cnt) ) - for table in range(table_count): + for table in range(tables_count): sth.execute_scheme_query( CreateTable(log_table_name + str(table)).with_schema(self.schema_log) ) @@ -71,10 +72,10 @@ def scenario_read_data_during_bulk_upsert(self, ctx: TestContext): thread1 = [] thread2 = [] - for table in range(table_count): + for table in range(tables_count): thread1.append(TestThread(target=self._loop_upsert, args=[ctx, data, str(table)])) - for table in range(table_count): - thread2.append(TestThread(target=self._loop_insert, args=[ctx, inserts_count, str(table)])) + for table in range(tables_count): + thread2.append(TestThread(target=self._loop_insert, args=[ctx, inserts_count, str(table), ignore_read_errors])) for thread in thread1: thread.start() @@ -88,22 +89,23 @@ def scenario_read_data_during_bulk_upsert(self, ctx: TestContext): for thread in thread1: thread.join() - cnt_table_name0 = cnt_table_name + "0" - rows: int = sth.get_table_rows_count(cnt_table_name0) - assert rows == inserts_count - scan_result = sth.execute_scan_query( - f"SELECT key, c FROM `{sth.get_full_path(cnt_table_name0)}` ORDER BY key" - ) - for i in range(rows): - if scan_result.result_set.rows[i]["key"] != i: - assert False, f"{i} ?= {scan_result.result_set.rows[i]['key']}" + for table in range(tables_count): + cnt_table_name0 = cnt_table_name + str(table) + rows: int = sth.get_table_rows_count(cnt_table_name0) + assert rows == inserts_count + scan_result = sth.execute_scan_query( + f"SELECT key, c FROM `{sth.get_full_path(cnt_table_name0)}` ORDER BY key" + ) + for i in range(rows): + if scan_result.result_set.rows[i]["key"] != i: + assert False, f"{i} ?= {scan_result.result_set.rows[i]['key']}" - log_table_name0 = log_table_name + "0" - rows: int = sth.get_table_rows_count(log_table_name0) - assert rows == rows_count * batches_count - scan_result = sth.execute_scan_query( - f"SELECT key FROM `{sth.get_full_path(log_table_name0)}` ORDER BY key" - ) - for i in range(rows): - if scan_result.result_set.rows[i]["key"] != i: - assert False, f"{i} ?= {scan_result.result_set.rows[i]['key']}" + log_table_name0 = log_table_name + str(table) + rows: int = sth.get_table_rows_count(log_table_name0) + assert rows == rows_count * batches_count + scan_result = sth.execute_scan_query( + f"SELECT key FROM `{sth.get_full_path(log_table_name0)}` ORDER BY key" + ) + for i in range(rows): + if scan_result.result_set.rows[i]["key"] != i: + assert False, f"{i} ?= {scan_result.result_set.rows[i]['key']}" From d148f1dad4a9786d05bbb792a9dcc8e109ad8213 Mon Sep 17 00:00:00 2001 From: Alexander Avdonkin Date: Thu, 16 Jan 2025 15:44:07 +0300 Subject: [PATCH 6/6] Removed unused import --- ydb/tests/olap/scenario/test_insert.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ydb/tests/olap/scenario/test_insert.py b/ydb/tests/olap/scenario/test_insert.py index 1a6ecdfcb10d..c29022a8b2b4 100644 --- a/ydb/tests/olap/scenario/test_insert.py +++ b/ydb/tests/olap/scenario/test_insert.py @@ -1,4 +1,3 @@ -import time from conftest import BaseTestSet from ydb.tests.olap.scenario.helpers import ( ScenarioTestHelper,