From c691b3ff3c0aec5a3984f4623f5f725fbea478ea Mon Sep 17 00:00:00 2001 From: Evgeniy Zayats Date: Thu, 26 Sep 2024 17:11:22 -0400 Subject: [PATCH] tests: refactor s3 compatibily tests naming relevance to s3 gate closes #707 Signed-off-by: Evgeniy Zayats --- pytest_tests/lib/helpers/s3_helper.py | 20 +- .../lib/s3/{s3_gate_base.py => s3_base.py} | 36 +-- .../s3/{s3_gate_bucket.py => s3_bucket.py} | 0 .../s3/{s3_gate_object.py => s3_object.py} | 2 +- .../s3_extensions/test_s3_gate_policy.py} | 118 ++----- .../s3_gate/test_s3_gate.py => s3/test_s3.py} | 194 +++++++----- .../{services/s3_gate => s3}/test_s3_ACL.py | 36 ++- .../s3_gate => s3}/test_s3_bucket.py | 62 ++-- .../s3_gate => s3}/test_s3_locking.py | 63 ++-- .../s3_gate => s3}/test_s3_multipart.py | 64 ++-- .../s3_gate => s3}/test_s3_object.py | 288 +++++++++--------- pytest_tests/tests/s3/test_s3_policy.py | 49 +++ .../s3_gate => s3}/test_s3_tagging.py | 38 ++- .../s3_gate => s3}/test_s3_versioning.py | 38 ++- 14 files changed, 501 insertions(+), 507 deletions(-) rename pytest_tests/lib/s3/{s3_gate_base.py => s3_base.py} (83%) rename pytest_tests/lib/s3/{s3_gate_bucket.py => s3_bucket.py} (100%) rename pytest_tests/lib/s3/{s3_gate_object.py => s3_object.py} (99%) rename pytest_tests/tests/{services/s3_gate/test_s3_policy.py => s3/s3_extensions/test_s3_gate_policy.py} (54%) rename pytest_tests/tests/{services/s3_gate/test_s3_gate.py => s3/test_s3.py} (71%) rename pytest_tests/tests/{services/s3_gate => s3}/test_s3_ACL.py (63%) rename pytest_tests/tests/{services/s3_gate => s3}/test_s3_bucket.py (73%) rename pytest_tests/tests/{services/s3_gate => s3}/test_s3_locking.py (75%) rename pytest_tests/tests/{services/s3_gate => s3}/test_s3_multipart.py (53%) rename pytest_tests/tests/{services/s3_gate => s3}/test_s3_object.py (71%) create mode 100644 pytest_tests/tests/s3/test_s3_policy.py rename pytest_tests/tests/{services/s3_gate => s3}/test_s3_tagging.py (71%) rename pytest_tests/tests/{services/s3_gate => s3}/test_s3_versioning.py (63%) diff --git a/pytest_tests/lib/helpers/s3_helper.py b/pytest_tests/lib/helpers/s3_helper.py index e36c27fcb..a7ecfc65c 100644 --- a/pytest_tests/lib/helpers/s3_helper.py +++ b/pytest_tests/lib/helpers/s3_helper.py @@ -6,7 +6,7 @@ import allure from dateutil.parser import parse -from s3 import s3_gate_bucket, s3_gate_object +from s3 import s3_bucket, s3_object logger = logging.getLogger("NeoLogger") @@ -77,7 +77,7 @@ def check_objects_in_bucket( s3_client, bucket, expected_objects: list, unexpected_objects: Optional[list] = None ) -> None: unexpected_objects = unexpected_objects or [] - bucket_objects = s3_gate_object.list_objects_s3(s3_client, bucket) + bucket_objects = s3_object.list_objects_s3(s3_client, bucket) assert len(bucket_objects) == len(expected_objects), f"Expected {len(expected_objects)} objects in the bucket" for bucket_object in expected_objects: assert bucket_object in bucket_objects, f"Expected object {bucket_object} in objects list {bucket_objects}" @@ -92,17 +92,17 @@ def check_objects_in_bucket( def try_to_get_objects_and_expect_error(s3_client, bucket: str, object_keys: list) -> None: for obj in object_keys: try: - s3_gate_object.get_object_s3(s3_client, bucket, obj) + s3_object.get_object_s3(s3_client, bucket, obj) raise AssertionError(f"Object {obj} found in bucket {bucket}") except Exception as err: assert "The specified key does not exist" in str(err), f"Expected error in exception {err}" @allure.step("Set versioning enable for bucket") -def set_bucket_versioning(s3_client, bucket: str, status: s3_gate_bucket.VersioningStatus): - s3_gate_bucket.get_bucket_versioning_status(s3_client, bucket) - s3_gate_bucket.set_bucket_versioning(s3_client, bucket, status=status) - bucket_status = s3_gate_bucket.get_bucket_versioning_status(s3_client, bucket) +def set_bucket_versioning(s3_client, bucket: str, status: s3_bucket.VersioningStatus): + s3_bucket.get_bucket_versioning_status(s3_client, bucket) + s3_bucket.set_bucket_versioning(s3_client, bucket, status=status) + bucket_status = s3_bucket.get_bucket_versioning_status(s3_client, bucket) assert bucket_status == status.value, f"Expected {bucket_status} status. Got {status.value}" @@ -132,13 +132,13 @@ def check_tags_by_object( expected_tags: list, unexpected_tags: Optional[list] = None, ) -> None: - actual_tags = s3_gate_object.get_object_tagging(s3_client, bucket, key_name) + actual_tags = s3_object.get_object_tagging(s3_client, bucket, key_name) assert_tags(expected_tags=expected_tags, unexpected_tags=unexpected_tags, actual_tags=actual_tags) @allure.step("Expected all tags are presented in bucket") def check_tags_by_bucket(s3_client, bucket: str, expected_tags: list, unexpected_tags: Optional[list] = None) -> None: - actual_tags = s3_gate_bucket.get_bucket_tagging(s3_client, bucket) + actual_tags = s3_bucket.get_bucket_tagging(s3_client, bucket) assert_tags(expected_tags=expected_tags, unexpected_tags=unexpected_tags, actual_tags=actual_tags) @@ -151,7 +151,7 @@ def assert_object_lock_mode( legal_hold_status: str = "OFF", retain_period: Optional[int] = None, ): - object_dict = s3_gate_object.get_object_s3(s3_client, bucket, file_name, full_output=True) + object_dict = s3_object.get_object_s3(s3_client, bucket, file_name, full_output=True) assert object_dict.get("ObjectLockMode") == object_lock_mode, f"Expected Object Lock Mode is {object_lock_mode}" assert ( object_dict.get("ObjectLockLegalHoldStatus") == legal_hold_status diff --git a/pytest_tests/lib/s3/s3_gate_base.py b/pytest_tests/lib/s3/s3_base.py similarity index 83% rename from pytest_tests/lib/s3/s3_gate_base.py rename to pytest_tests/lib/s3/s3_base.py index b5481fd9c..649b8f762 100644 --- a/pytest_tests/lib/s3/s3_gate_base.py +++ b/pytest_tests/lib/s3/s3_base.py @@ -22,7 +22,7 @@ from neofs_testlib.shell import Shell from neofs_testlib.utils.wallet import get_last_public_key_from_wallet from pytest import FixtureRequest -from s3 import s3_gate_bucket, s3_gate_object +from s3 import s3_bucket, s3_object # Disable warnings on self-signed certificate which the # boto library produces on requests to S3-gate in dev-env @@ -51,7 +51,7 @@ def _run_with_passwd(cmd: str, password: str) -> str: return cmd.decode() -class TestNeofsS3GateBase(NeofsEnvTestBase): +class TestNeofsS3Base(NeofsEnvTestBase): s3_client: Any = None # noqa @pytest.fixture(scope="class", autouse=True) @@ -83,47 +83,47 @@ def s3_client( # noqa client = configure_cli_client(access_key_id, secret_access_key, f"https://{neofs_env.s3_gw.address}") else: client = configure_boto3_client(access_key_id, secret_access_key, f"https://{neofs_env.s3_gw.address}") - TestNeofsS3GateBase.s3_client = client - TestNeofsS3GateBase.wallet = wallet + TestNeofsS3Base.s3_client = client + TestNeofsS3Base.wallet = wallet @pytest.fixture @allure.title("Create/delete bucket") def bucket(self): - bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + bucket = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") yield bucket self.delete_all_object_in_bucket(bucket) @pytest.fixture @allure.title("Create two buckets") def two_buckets(self): - bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") - bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + bucket_1 = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + bucket_2 = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") yield bucket_1, bucket_2 for bucket in [bucket_1, bucket_2]: self.delete_all_object_in_bucket(bucket) def delete_all_object_in_bucket(self, bucket): - versioning_status = s3_gate_bucket.get_bucket_versioning_status(self.s3_client, bucket) - if versioning_status == s3_gate_bucket.VersioningStatus.ENABLED.value: + versioning_status = s3_bucket.get_bucket_versioning_status(self.s3_client, bucket) + if versioning_status == s3_bucket.VersioningStatus.ENABLED.value: # From versioned bucket we should delete all versions and delete markers of all objects - objects_versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + objects_versions = s3_object.list_objects_versions_s3(self.s3_client, bucket) if objects_versions: - s3_gate_object.delete_object_versions_s3_without_dm(self.s3_client, bucket, objects_versions) - objects_delete_markers = s3_gate_object.list_objects_delete_markers_s3(self.s3_client, bucket) + s3_object.delete_object_versions_s3_without_dm(self.s3_client, bucket, objects_versions) + objects_delete_markers = s3_object.list_objects_delete_markers_s3(self.s3_client, bucket) if objects_delete_markers: - s3_gate_object.delete_object_versions_s3_without_dm(self.s3_client, bucket, objects_delete_markers) + s3_object.delete_object_versions_s3_without_dm(self.s3_client, bucket, objects_delete_markers) else: # From non-versioned bucket it's sufficient to delete objects by key - objects = s3_gate_object.list_objects_s3(self.s3_client, bucket) + objects = s3_object.list_objects_s3(self.s3_client, bucket) if objects: - s3_gate_object.delete_objects_s3(self.s3_client, bucket, objects) - objects_delete_markers = s3_gate_object.list_objects_delete_markers_s3(self.s3_client, bucket) + s3_object.delete_objects_s3(self.s3_client, bucket, objects) + objects_delete_markers = s3_object.list_objects_delete_markers_s3(self.s3_client, bucket) if objects_delete_markers: - s3_gate_object.delete_object_versions_s3_without_dm(self.s3_client, bucket, objects_delete_markers) + s3_object.delete_object_versions_s3_without_dm(self.s3_client, bucket, objects_delete_markers) # Delete the bucket itself - s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket) + s3_bucket.delete_bucket_s3(self.s3_client, bucket) @allure.step("Init S3 Credentials") diff --git a/pytest_tests/lib/s3/s3_gate_bucket.py b/pytest_tests/lib/s3/s3_bucket.py similarity index 100% rename from pytest_tests/lib/s3/s3_gate_bucket.py rename to pytest_tests/lib/s3/s3_bucket.py diff --git a/pytest_tests/lib/s3/s3_gate_object.py b/pytest_tests/lib/s3/s3_object.py similarity index 99% rename from pytest_tests/lib/s3/s3_gate_object.py rename to pytest_tests/lib/s3/s3_object.py index 8d7f0df90..2819b7aa9 100644 --- a/pytest_tests/lib/s3/s3_gate_object.py +++ b/pytest_tests/lib/s3/s3_object.py @@ -10,7 +10,7 @@ from helpers.aws_cli_client import AwsCliClient from helpers.cli_helpers import log_command_execution from helpers.common import ASSETS_DIR -from s3.s3_gate_bucket import S3_SYNC_WAIT_TIME +from s3.s3_bucket import S3_SYNC_WAIT_TIME ########################################################## # Disabling warnings on self-signed certificate which the diff --git a/pytest_tests/tests/services/s3_gate/test_s3_policy.py b/pytest_tests/tests/s3/s3_extensions/test_s3_gate_policy.py similarity index 54% rename from pytest_tests/tests/services/s3_gate/test_s3_policy.py rename to pytest_tests/tests/s3/s3_extensions/test_s3_gate_policy.py index d982e4817..9453e71d1 100644 --- a/pytest_tests/tests/services/s3_gate/test_s3_policy.py +++ b/pytest_tests/tests/s3/s3_extensions/test_s3_gate_policy.py @@ -11,8 +11,8 @@ set_bucket_versioning, ) from helpers.utility import placement_policy_from_container -from s3 import s3_gate_bucket, s3_gate_object -from s3.s3_gate_base import TestNeofsS3GateBase +from s3 import s3_bucket, s3_object +from s3.s3_base import TestNeofsS3Base def pytest_generate_tests(metafunc): @@ -26,8 +26,7 @@ def pytest_generate_tests(metafunc): ) -@pytest.mark.s3_gate -class TestS3GatePolicy(TestNeofsS3GateBase): +class TestS3GatePolicy(TestNeofsS3Base): def check_container_policy(self, bucket_name: str, expected_policy: str): cid = search_container_by_name(self.wallet.path, bucket_name, shell=self.shell, endpoint=self.neofs_env.sn_rpc) container_info: str = get_container( @@ -50,30 +49,30 @@ def test_s3_bucket_location(self, simple_object_size): file_name_2 = object_key_from_file_path(file_path_2) with allure.step("Create two buckets with different bucket configuration"): - bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="complex") - set_bucket_versioning(self.s3_client, bucket_1, s3_gate_bucket.VersioningStatus.ENABLED) - bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-3") - set_bucket_versioning(self.s3_client, bucket_2, s3_gate_bucket.VersioningStatus.ENABLED) - list_buckets = s3_gate_bucket.list_buckets_s3(self.s3_client) + bucket_1 = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="complex") + set_bucket_versioning(self.s3_client, bucket_1, s3_bucket.VersioningStatus.ENABLED) + bucket_2 = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-3") + set_bucket_versioning(self.s3_client, bucket_2, s3_bucket.VersioningStatus.ENABLED) + list_buckets = s3_bucket.list_buckets_s3(self.s3_client) assert ( bucket_1 in list_buckets and bucket_2 in list_buckets ), f"Expected two buckets {bucket_1, bucket_2}, got {list_buckets}" # with allure.step("Check head buckets"): - head_1 = s3_gate_bucket.head_bucket(self.s3_client, bucket_1) - head_2 = s3_gate_bucket.head_bucket(self.s3_client, bucket_2) + head_1 = s3_bucket.head_bucket(self.s3_client, bucket_1) + head_2 = s3_bucket.head_bucket(self.s3_client, bucket_2) assert head_1 == {} or head_1.get("HEAD") is None, "Expected head is empty" assert head_2 == {} or head_2.get("HEAD") is None, "Expected head is empty" with allure.step("Put objects into buckets"): - version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_path_1) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket_2, file_path_2) + version_id_1 = s3_object.put_object_s3(self.s3_client, bucket_1, file_path_1) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket_2, file_path_2) check_objects_in_bucket(self.s3_client, bucket_1, [file_name_1]) check_objects_in_bucket(self.s3_client, bucket_2, [file_name_2]) with allure.step("Check bucket location"): - bucket_loc_1 = s3_gate_bucket.get_bucket_location(self.s3_client, bucket_1) - bucket_loc_2 = s3_gate_bucket.get_bucket_location(self.s3_client, bucket_2) + bucket_loc_1 = s3_bucket.get_bucket_location(self.s3_client, bucket_1) + bucket_loc_2 = s3_bucket.get_bucket_location(self.s3_client, bucket_2) assert bucket_loc_1 == "complex" assert bucket_loc_2 == "rep-3" @@ -111,29 +110,29 @@ def test_s3_bucket_location_from_config_file(self, simple_object_size): file_name_2 = object_key_from_file_path(file_path_2) with allure.step("Create two buckets with different bucket configuration"): - bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="select") - set_bucket_versioning(self.s3_client, bucket_1, s3_gate_bucket.VersioningStatus.ENABLED) - bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-2") - set_bucket_versioning(self.s3_client, bucket_2, s3_gate_bucket.VersioningStatus.ENABLED) - list_buckets = s3_gate_bucket.list_buckets_s3(self.s3_client) + bucket_1 = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="select") + set_bucket_versioning(self.s3_client, bucket_1, s3_bucket.VersioningStatus.ENABLED) + bucket_2 = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-2") + set_bucket_versioning(self.s3_client, bucket_2, s3_bucket.VersioningStatus.ENABLED) + list_buckets = s3_bucket.list_buckets_s3(self.s3_client) assert ( bucket_1 in list_buckets and bucket_2 in list_buckets ), f"Expected two buckets {bucket_1, bucket_2}, got {list_buckets}" - head_1 = s3_gate_bucket.head_bucket(self.s3_client, bucket_1) - head_2 = s3_gate_bucket.head_bucket(self.s3_client, bucket_2) + head_1 = s3_bucket.head_bucket(self.s3_client, bucket_1) + head_2 = s3_bucket.head_bucket(self.s3_client, bucket_2) assert head_1 == {} or head_1.get("HEAD") is None, "Expected head is empty" assert head_2 == {} or head_2.get("HEAD") is None, "Expected head is empty" with allure.step("Put objects into buckets"): - version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_path_1) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket_2, file_path_2) + version_id_1 = s3_object.put_object_s3(self.s3_client, bucket_1, file_path_1) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket_2, file_path_2) check_objects_in_bucket(self.s3_client, bucket_1, [file_name_1]) check_objects_in_bucket(self.s3_client, bucket_2, [file_name_2]) with allure.step("Check bucket location"): - bucket_loc_1 = s3_gate_bucket.get_bucket_location(self.s3_client, bucket_1) - bucket_loc_2 = s3_gate_bucket.get_bucket_location(self.s3_client, bucket_2) + bucket_loc_1 = s3_bucket.get_bucket_location(self.s3_client, bucket_1) + bucket_loc_2 = s3_bucket.get_bucket_location(self.s3_client, bucket_2) assert bucket_loc_1 == "select" assert bucket_loc_2 == "rep-2" @@ -164,70 +163,3 @@ def test_s3_bucket_location_from_config_file(self, simple_object_size): nodes=self.neofs_env.storage_nodes, ) assert copies_2 == 2 - - @allure.title("Test S3: bucket policy ") - def test_s3_bucket_policy(self): - with allure.step("Create bucket with default policy"): - bucket = s3_gate_bucket.create_bucket_s3(self.s3_client) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) - - with allure.step("GetBucketPolicy"): - s3_gate_bucket.get_bucket_policy(self.s3_client, bucket) - - with allure.step("Put new policy"): - custom_policy = f"file://{os.getcwd()}/pytest_tests/data/bucket_policy.json" - custom_policy = { - "Version": "2008-10-17", - "Id": "aaaa-bbbb-cccc-dddd", - "Statement": [ - { - "Sid": "AddPerm", - "Effect": "Allow", - "Principal": {"AWS": "*"}, - "Action": ["s3:GetObject"], - "Resource": [f"arn:aws:s3:::{bucket}/*"], - } - ], - } - - s3_gate_bucket.put_bucket_policy(self.s3_client, bucket, custom_policy) - with allure.step("GetBucketPolicy"): - policy_1 = s3_gate_bucket.get_bucket_policy(self.s3_client, bucket) - print(policy_1) - - @allure.title("Test S3: bucket policy ") - def test_s3_cors(self): - with allure.step("Create bucket without cors"): - bucket = s3_gate_bucket.create_bucket_s3(self.s3_client) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) - - with pytest.raises(Exception): - bucket_cors = s3_gate_bucket.get_bucket_cors(self.s3_client, bucket) - - with allure.step("Put bucket cors"): - cors = { - "CORSRules": [ - { - "AllowedOrigins": ["http://www.example.com"], - "AllowedHeaders": ["*"], - "AllowedMethods": ["PUT", "POST", "DELETE"], - "MaxAgeSeconds": 3000, - "ExposeHeaders": ["x-amz-server-side-encryption"], - }, - { - "AllowedOrigins": ["*"], - "AllowedHeaders": ["Authorization"], - "AllowedMethods": ["GET"], - "MaxAgeSeconds": 3000, - }, - ] - } - s3_gate_bucket.put_bucket_cors(self.s3_client, bucket, cors) - bucket_cors = s3_gate_bucket.get_bucket_cors(self.s3_client, bucket) - assert bucket_cors == cors.get("CORSRules"), f"Expected corsrules must be {cors.get('CORSRules')}" - - with allure.step("delete bucket cors"): - s3_gate_bucket.delete_bucket_cors(self.s3_client, bucket) - - with pytest.raises(Exception): - bucket_cors = s3_gate_bucket.get_bucket_cors(self.s3_client, bucket) diff --git a/pytest_tests/tests/services/s3_gate/test_s3_gate.py b/pytest_tests/tests/s3/test_s3.py similarity index 71% rename from pytest_tests/tests/services/s3_gate/test_s3_gate.py rename to pytest_tests/tests/s3/test_s3.py index 1a56c0bb4..77a8507be 100644 --- a/pytest_tests/tests/services/s3_gate/test_s3_gate.py +++ b/pytest_tests/tests/s3/test_s3.py @@ -22,8 +22,8 @@ set_bucket_versioning, try_to_get_objects_and_expect_error, ) -from s3 import s3_gate_bucket, s3_gate_object -from s3.s3_gate_base import TestNeofsS3GateBase +from s3 import s3_bucket, s3_object +from s3.s3_base import TestNeofsS3Base logger = logging.getLogger("NeoLogger") @@ -33,9 +33,7 @@ def pytest_generate_tests(metafunc): @allure.link("https://github.com/nspcc-dev/neofs-s3-gw#neofs-s3-gateway", name="neofs-s3-gateway") -@pytest.mark.s3_gate -@pytest.mark.s3_gate_base -class TestS3Gate(TestNeofsS3GateBase): +class TestS3(TestNeofsS3Base): @allure.title("Test S3 Bucket API") def test_s3_buckets(self, simple_object_size): """ @@ -46,62 +44,62 @@ def test_s3_buckets(self, simple_object_size): file_name = self.object_key_from_file_path(file_path) with allure.step("Create buckets"): - bucket_1 = s3_gate_bucket.create_bucket_s3( + bucket_1 = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, bucket_configuration="rep-1" ) - set_bucket_versioning(self.s3_client, bucket_1, s3_gate_bucket.VersioningStatus.ENABLED) - bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + set_bucket_versioning(self.s3_client, bucket_1, s3_bucket.VersioningStatus.ENABLED) + bucket_2 = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") with allure.step("Check buckets are presented in the system"): - buckets = s3_gate_bucket.list_buckets_s3(self.s3_client) + buckets = s3_bucket.list_buckets_s3(self.s3_client) assert bucket_1 in buckets, f"Expected bucket {bucket_1} is in the list" assert bucket_2 in buckets, f"Expected bucket {bucket_2} is in the list" with allure.step("Bucket must be empty"): for bucket in (bucket_1, bucket_2): - objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + objects_list = s3_object.list_objects_s3(self.s3_client, bucket) assert not objects_list, f"Expected empty bucket, got {objects_list}" with allure.step("Check buckets are visible with S3 API HeadBucket op"): - s3_gate_bucket.head_bucket(self.s3_client, bucket_1) - s3_gate_bucket.head_bucket(self.s3_client, bucket_2) + s3_bucket.head_bucket(self.s3_client, bucket_1) + s3_bucket.head_bucket(self.s3_client, bucket_2) with allure.step("Check we can put/list object with S3 commands"): - version_id = s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_path) - s3_gate_object.head_object_s3(self.s3_client, bucket_1, file_name) + version_id = s3_object.put_object_s3(self.s3_client, bucket_1, file_path) + s3_object.head_object_s3(self.s3_client, bucket_1, file_name) - bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket_1) + bucket_objects = s3_object.list_objects_s3(self.s3_client, bucket_1) assert file_name in bucket_objects, f"Expected file {file_name} in objects list {bucket_objects}" with allure.step("Try to delete not empty bucket and expect error"): with pytest.raises(Exception, match=r".*The bucket you tried to delete is not empty.*"): - s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket_1) + s3_bucket.delete_bucket_s3(self.s3_client, bucket_1) - s3_gate_bucket.head_bucket(self.s3_client, bucket_1) + s3_bucket.head_bucket(self.s3_client, bucket_1) with allure.step(f"Delete empty bucket {bucket_2}"): - s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket_2) + s3_bucket.delete_bucket_s3(self.s3_client, bucket_2) self.tick_epochs_and_wait(1) with allure.step(f"Check bucket {bucket_2} deleted"): with pytest.raises(Exception, match=r".*Not Found.*"): - s3_gate_bucket.head_bucket(self.s3_client, bucket_2) + s3_bucket.head_bucket(self.s3_client, bucket_2) - buckets = s3_gate_bucket.list_buckets_s3(self.s3_client) + buckets = s3_bucket.list_buckets_s3(self.s3_client) assert bucket_1 in buckets, f"Expected bucket {bucket_1} is in the list" assert bucket_2 not in buckets, f"Expected bucket {bucket_2} is not in the list" with allure.step(f"Delete object from {bucket_1}"): - s3_gate_object.delete_object_s3(self.s3_client, bucket_1, file_name, version_id) + s3_object.delete_object_s3(self.s3_client, bucket_1, file_name, version_id) check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=[]) with allure.step(f"Delete bucket {bucket_1}"): - s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket_1) + s3_bucket.delete_bucket_s3(self.s3_client, bucket_1) self.tick_epochs_and_wait(1) with allure.step(f"Check bucket {bucket_1} deleted"): with pytest.raises(Exception, match=r".*Not Found.*"): - s3_gate_bucket.head_bucket(self.s3_client, bucket_1) + s3_bucket.head_bucket(self.s3_client, bucket_1) @allure.title("Test S3 Object API") @pytest.mark.parametrize("file_type", ["simple", "large"], ids=["Simple object", "Large object"]) @@ -116,18 +114,18 @@ def test_s3_api_object(self, file_type, two_buckets, simple_object_size, complex for bucket in (bucket_1, bucket_2): with allure.step("Bucket must be empty"): - objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + objects_list = s3_object.list_objects_s3(self.s3_client, bucket) assert not objects_list, f"Expected empty bucket, got {objects_list}" - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) - s3_gate_object.head_object_s3(self.s3_client, bucket, file_name) + s3_object.put_object_s3(self.s3_client, bucket, file_path) + s3_object.head_object_s3(self.s3_client, bucket, file_name) - bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket) + bucket_objects = s3_object.list_objects_s3(self.s3_client, bucket) assert file_name in bucket_objects, f"Expected file {file_name} in objects list {bucket_objects}" with allure.step("Check object's attributes"): for attrs in (["ETag"], ["ObjectSize", "StorageClass"]): - s3_gate_object.get_object_attributes(self.s3_client, bucket, file_name, *attrs) + s3_object.get_object_attributes(self.s3_client, bucket, file_name, *attrs) @allure.title("Test S3 Sync directory") @pytest.mark.aws_cli_only @@ -145,12 +143,12 @@ def test_s3_sync_dir(self, bucket, simple_object_size): self.s3_client.sync(bucket_name=bucket, dir_path=os.path.dirname(file_path_1)) with allure.step("Check objects are synced"): - objects = s3_gate_object.list_objects_s3(self.s3_client, bucket) + objects = s3_object.list_objects_s3(self.s3_client, bucket) with allure.step("Check these are the same objects"): assert set(key_to_path.keys()) == set(objects), f"Expected exact objects saved. Got {objects}" for obj_key in objects: - got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key) + got_object = s3_object.get_object_s3(self.s3_client, bucket, obj_key) assert get_file_hash(got_object) == get_file_hash( key_to_path.get(obj_key) ), "Expected hashes are the same" @@ -164,15 +162,15 @@ def test_s3_api_versioning(self, bucket, simple_object_size): version_2_content = "Version 2" file_name_simple = generate_file_with_content(simple_object_size, content=version_1_content) obj_key = os.path.basename(file_name_simple) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put several versions of object into bucket"): - version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple) + version_id_1 = s3_object.put_object_s3(self.s3_client, bucket, file_name_simple) generate_file_with_content(simple_object_size, file_path=file_name_simple, content=version_2_content) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket, file_name_simple) with allure.step("Check bucket shows all versions"): - versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + versions = s3_object.list_objects_versions_s3(self.s3_client, bucket) obj_versions = {version.get("VersionId") for version in versions if version.get("Key") == obj_key} assert obj_versions == { version_id_1, @@ -181,7 +179,7 @@ def test_s3_api_versioning(self, bucket, simple_object_size): with allure.step("Show information about particular version"): for version_id in (version_id_1, version_id_2): - response = s3_gate_object.head_object_s3(self.s3_client, bucket, obj_key, version_id=version_id) + response = s3_object.head_object_s3(self.s3_client, bucket, obj_key, version_id=version_id) assert "LastModified" in response, "Expected LastModified field" assert "ETag" in response, "Expected ETag field" assert response.get("VersionId") == version_id, f"Expected VersionId is {version_id}" @@ -189,38 +187,37 @@ def test_s3_api_versioning(self, bucket, simple_object_size): with allure.step("Check object's attributes"): for version_id in (version_id_1, version_id_2): - got_attrs = s3_gate_object.get_object_attributes( + got_attrs = s3_object.get_object_attributes( self.s3_client, bucket, obj_key, "ETag", version_id=version_id ) if got_attrs: assert got_attrs.get("VersionId") == version_id, f"Expected VersionId is {version_id}" with allure.step("Delete object and check it was deleted"): - response = s3_gate_object.delete_object_s3(self.s3_client, bucket, obj_key) + response = s3_object.delete_object_s3(self.s3_client, bucket, obj_key) version_id_delete = response.get("VersionId") with pytest.raises(Exception, match=r".*Not Found.*"): - s3_gate_object.head_object_s3(self.s3_client, bucket, obj_key) + s3_object.head_object_s3(self.s3_client, bucket, obj_key) with allure.step("Get content for all versions and check it is correct"): for version, content in ( (version_id_2, version_2_content), (version_id_1, version_1_content), ): - file_name = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key, version_id=version) + file_name = s3_object.get_object_s3(self.s3_client, bucket, obj_key, version_id=version) got_content = get_file_content(file_name) assert got_content == content, f"Expected object content is\n{content}\nGot\n{got_content}" with allure.step("Restore previous object version"): - s3_gate_object.delete_object_s3(self.s3_client, bucket, obj_key, version_id=version_id_delete) + s3_object.delete_object_s3(self.s3_client, bucket, obj_key, version_id=version_id_delete) - file_name = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key) + file_name = s3_object.get_object_s3(self.s3_client, bucket, obj_key) got_content = get_file_content(file_name) assert ( got_content == version_2_content ), f"Expected object content is\n{version_2_content}\nGot\n{got_content}" - @pytest.mark.s3_gate_multipart @allure.title("Test S3 Object Multipart API") def test_s3_api_multipart(self, bucket, simple_object_size): """ @@ -233,37 +230,37 @@ def test_s3_api_multipart(self, bucket, simple_object_size): part_files = split_file(file_name_large, parts_count) parts = [] - uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + uploads = s3_object.list_multipart_uploads_s3(self.s3_client, bucket) assert not uploads, f"Expected there is no uploads in bucket {bucket}" with allure.step("Create and abort multipart upload"): - upload_id = s3_gate_object.create_multipart_upload_s3(self.s3_client, bucket, object_key) - uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + upload_id = s3_object.create_multipart_upload_s3(self.s3_client, bucket, object_key) + uploads = s3_object.list_multipart_uploads_s3(self.s3_client, bucket) assert uploads, f"Expected there one upload in bucket {bucket}" assert uploads[0].get("Key") == object_key, f"Expected correct key {object_key} in upload {uploads}" assert uploads[0].get("UploadId") == upload_id, f"Expected correct UploadId {upload_id} in upload {uploads}" - s3_gate_object.abort_multipart_uploads_s3(self.s3_client, bucket, object_key, upload_id) - uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + s3_object.abort_multipart_uploads_s3(self.s3_client, bucket, object_key, upload_id) + uploads = s3_object.list_multipart_uploads_s3(self.s3_client, bucket) assert not uploads, f"Expected there is no uploads in bucket {bucket}" with allure.step("Create new multipart upload and upload several parts"): - upload_id = s3_gate_object.create_multipart_upload_s3(self.s3_client, bucket, object_key) + upload_id = s3_object.create_multipart_upload_s3(self.s3_client, bucket, object_key) for part_id, file_path in enumerate(part_files, start=1): - etag = s3_gate_object.upload_part_s3(self.s3_client, bucket, object_key, upload_id, part_id, file_path) + etag = s3_object.upload_part_s3(self.s3_client, bucket, object_key, upload_id, part_id, file_path) parts.append((part_id, etag)) with allure.step("Check all parts are visible in bucket"): - got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) + got_parts = s3_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) assert len(got_parts) == len(part_files), f"Expected {parts_count} parts, got\n{got_parts}" - s3_gate_object.complete_multipart_upload_s3(self.s3_client, bucket, object_key, upload_id, parts) + s3_object.complete_multipart_upload_s3(self.s3_client, bucket, object_key, upload_id, parts) - uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + uploads = s3_object.list_multipart_uploads_s3(self.s3_client, bucket) assert not uploads, f"Expected there is no uploads in bucket {bucket}" with allure.step("Check we can get whole object from bucket"): - got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, object_key) + got_object = s3_object.get_object_s3(self.s3_client, bucket, object_key) assert get_file_hash(got_object) == get_file_hash(file_name_large) self.check_object_attributes(bucket, object_key, parts_count) @@ -275,10 +272,10 @@ def test_s3_api_bucket_tagging(self, bucket): """ key_value_pair = [("some-key", "some-value"), ("some-key-2", "some-value-2")] - s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, key_value_pair) + s3_bucket.put_bucket_tagging(self.s3_client, bucket, key_value_pair) check_tags_by_bucket(self.s3_client, bucket, key_value_pair) - s3_gate_bucket.delete_bucket_tagging(self.s3_client, bucket) + s3_bucket.delete_bucket_tagging(self.s3_client, bucket) with pytest.raises(Exception, match=NO_SUCH_TAGS_ERROR): check_tags_by_bucket(self.s3_client, bucket, []) @@ -296,10 +293,10 @@ def test_s3_api_object_tagging(self, bucket, simple_object_size): file_name_simple = generate_file(simple_object_size) obj_key = self.object_key_from_file_path(file_name_simple) - s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple) + s3_object.put_object_s3(self.s3_client, bucket, file_name_simple) for tags in (key_value_pair_obj, key_value_pair_obj_new): - s3_gate_object.put_object_tagging(self.s3_client, bucket, obj_key, tags) + s3_object.put_object_tagging(self.s3_client, bucket, obj_key, tags) check_tags_by_object( self.s3_client, bucket, @@ -307,7 +304,7 @@ def test_s3_api_object_tagging(self, bucket, simple_object_size): tags, ) - s3_gate_object.delete_object_tagging(self.s3_client, bucket, obj_key) + s3_object.delete_object_tagging(self.s3_client, bucket, obj_key) check_tags_by_object(self.s3_client, bucket, obj_key, []) @allure.title("Test S3: Delete object & delete objects S3 API") @@ -330,15 +327,15 @@ def test_s3_api_delete(self, two_buckets, simple_object_size, complex_object_siz for bucket in (bucket_1, bucket_2): with allure.step(f"Bucket {bucket} must be empty as it just created"): - objects_list = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket) + objects_list = s3_object.list_objects_s3_v2(self.s3_client, bucket) assert not objects_list, f"Expected empty bucket, got {objects_list}" for file_path in file_paths: - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + s3_object.put_object_s3(self.s3_client, bucket, file_path) put_objects.append(self.object_key_from_file_path(file_path)) with allure.step(f"Check all objects put in bucket {bucket} successfully"): - bucket_objects = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket) + bucket_objects = s3_object.list_objects_s3_v2(self.s3_client, bucket) assert set(put_objects) == set( bucket_objects ), f"Expected all objects {put_objects} in objects list {bucket_objects}" @@ -346,10 +343,10 @@ def test_s3_api_delete(self, two_buckets, simple_object_size, complex_object_siz with allure.step("Delete some objects from bucket_1 one by one"): objects_to_delete_b1 = choices(put_objects, k=max_delete_objects) for obj in objects_to_delete_b1: - s3_gate_object.delete_object_s3(self.s3_client, bucket_1, obj) + s3_object.delete_object_s3(self.s3_client, bucket_1, obj) with allure.step("Check deleted objects are not visible in bucket bucket_1"): - bucket_objects = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket_1) + bucket_objects = s3_object.list_objects_s3_v2(self.s3_client, bucket_1) assert set(put_objects).difference(set(objects_to_delete_b1)) == set( bucket_objects ), f"Expected all objects {put_objects} in objects list {bucket_objects}" @@ -357,10 +354,10 @@ def test_s3_api_delete(self, two_buckets, simple_object_size, complex_object_siz with allure.step("Delete some objects from bucket_2 at once"): objects_to_delete_b2 = choices(put_objects, k=max_delete_objects) - s3_gate_object.delete_objects_s3(self.s3_client, bucket_2, objects_to_delete_b2) + s3_object.delete_objects_s3(self.s3_client, bucket_2, objects_to_delete_b2) with allure.step("Check deleted objects are not visible in bucket bucket_2"): - objects_list = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket_2) + objects_list = s3_object.list_objects_s3_v2(self.s3_client, bucket_2) assert set(put_objects).difference(set(objects_to_delete_b2)) == set( objects_list ), f"Expected all objects {put_objects} in objects list {bucket_objects}" @@ -378,25 +375,25 @@ def test_s3_copy_same_bucket(self, bucket, complex_object_size, simple_object_si bucket_objects = [file_name_simple, file_name_large] with allure.step("Bucket must be empty"): - objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + objects_list = s3_object.list_objects_s3(self.s3_client, bucket) assert not objects_list, f"Expected empty bucket, got {objects_list}" with allure.step("Put objects into bucket"): for file_path in (file_path_simple, file_path_large): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + s3_object.put_object_s3(self.s3_client, bucket, file_path) with allure.step("Copy one object into the same bucket"): - copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name_simple) + copy_obj_path = s3_object.copy_object_s3(self.s3_client, bucket, file_name_simple) bucket_objects.append(copy_obj_path) check_objects_in_bucket(self.s3_client, bucket, bucket_objects) with allure.step("Check copied object has the same content"): - got_copied_file = s3_gate_object.get_object_s3(self.s3_client, bucket, copy_obj_path) + got_copied_file = s3_object.get_object_s3(self.s3_client, bucket, copy_obj_path) assert get_file_hash(file_path_simple) == get_file_hash(got_copied_file), "Hashes must be the same" with allure.step("Delete one object from bucket"): - s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_simple) + s3_object.delete_object_s3(self.s3_client, bucket, file_name_simple) bucket_objects.remove(file_name_simple) check_objects_in_bucket( @@ -421,42 +418,77 @@ def test_s3_copy_to_another_bucket(self, two_buckets, complex_object_size, simpl with allure.step("Buckets must be empty"): for bucket in (bucket_1, bucket_2): - objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + objects_list = s3_object.list_objects_s3(self.s3_client, bucket) assert not objects_list, f"Expected empty bucket, got {objects_list}" with allure.step("Put objects into one bucket"): for file_path in (file_path_simple, file_path_large): - s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_path) + s3_object.put_object_s3(self.s3_client, bucket_1, file_path) with allure.step("Copy object from first bucket into second"): - copy_obj_path_b2 = s3_gate_object.copy_object_s3( - self.s3_client, bucket_1, file_name_large, bucket_dst=bucket_2 - ) + copy_obj_path_b2 = s3_object.copy_object_s3(self.s3_client, bucket_1, file_name_large, bucket_dst=bucket_2) check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=bucket_1_objects) check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[copy_obj_path_b2]) with allure.step("Check copied object has the same content"): - got_copied_file_b2 = s3_gate_object.get_object_s3(self.s3_client, bucket_2, copy_obj_path_b2) + got_copied_file_b2 = s3_object.get_object_s3(self.s3_client, bucket_2, copy_obj_path_b2) assert get_file_hash(file_path_large) == get_file_hash(got_copied_file_b2), "Hashes must be the same" with allure.step("Delete one object from first bucket"): - s3_gate_object.delete_object_s3(self.s3_client, bucket_1, file_name_simple) + s3_object.delete_object_s3(self.s3_client, bucket_1, file_name_simple) bucket_1_objects.remove(file_name_simple) check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=bucket_1_objects) check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[copy_obj_path_b2]) with allure.step("Delete one object from second bucket and check it is empty"): - s3_gate_object.delete_object_s3(self.s3_client, bucket_2, copy_obj_path_b2) + s3_object.delete_object_s3(self.s3_client, bucket_2, copy_obj_path_b2) check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[]) + @allure.title("Test S3: bucket cors") + def test_s3_cors(self): + with allure.step("Create bucket without cors"): + bucket = s3_bucket.create_bucket_s3(self.s3_client) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) + + with pytest.raises(Exception): + bucket_cors = s3_bucket.get_bucket_cors(self.s3_client, bucket) + + with allure.step("Put bucket cors"): + cors = { + "CORSRules": [ + { + "AllowedOrigins": ["http://www.example.com"], + "AllowedHeaders": ["*"], + "AllowedMethods": ["PUT", "POST", "DELETE"], + "MaxAgeSeconds": 3000, + "ExposeHeaders": ["x-amz-server-side-encryption"], + }, + { + "AllowedOrigins": ["*"], + "AllowedHeaders": ["Authorization"], + "AllowedMethods": ["GET"], + "MaxAgeSeconds": 3000, + }, + ] + } + s3_bucket.put_bucket_cors(self.s3_client, bucket, cors) + bucket_cors = s3_bucket.get_bucket_cors(self.s3_client, bucket) + assert bucket_cors == cors.get("CORSRules"), f"Expected corsrules must be {cors.get('CORSRules')}" + + with allure.step("delete bucket cors"): + s3_bucket.delete_bucket_cors(self.s3_client, bucket) + + with pytest.raises(Exception): + bucket_cors = s3_bucket.get_bucket_cors(self.s3_client, bucket) + def check_object_attributes(self, bucket: str, object_key: str, parts_count: int): if not isinstance(self.s3_client, AwsCliClient): logger.warning("Attributes check is not supported for boto3 implementation") return with allure.step("Check object's attributes"): - obj_parts = s3_gate_object.get_object_attributes( + obj_parts = s3_object.get_object_attributes( self.s3_client, bucket, object_key, "ObjectParts", get_full_resp=False ) assert obj_parts.get("TotalPartsCount") == parts_count, f"Expected TotalPartsCount is {parts_count}" @@ -464,7 +496,7 @@ def check_object_attributes(self, bucket: str, object_key: str, parts_count: int with allure.step("Check object's attribute max-parts"): max_parts = 2 - obj_parts = s3_gate_object.get_object_attributes( + obj_parts = s3_object.get_object_attributes( self.s3_client, bucket, object_key, @@ -478,7 +510,7 @@ def check_object_attributes(self, bucket: str, object_key: str, parts_count: int with allure.step("Check object's attribute part-number-marker"): part_number_marker = 3 - obj_parts = s3_gate_object.get_object_attributes( + obj_parts = s3_object.get_object_attributes( self.s3_client, bucket, object_key, diff --git a/pytest_tests/tests/services/s3_gate/test_s3_ACL.py b/pytest_tests/tests/s3/test_s3_ACL.py similarity index 63% rename from pytest_tests/tests/services/s3_gate/test_s3_ACL.py rename to pytest_tests/tests/s3/test_s3_ACL.py index 45361c230..7061e4f90 100644 --- a/pytest_tests/tests/services/s3_gate/test_s3_ACL.py +++ b/pytest_tests/tests/s3/test_s3_ACL.py @@ -2,8 +2,8 @@ import pytest from helpers.file_helper import generate_file from helpers.s3_helper import ACLType, object_key_from_file_path, verify_acls -from s3 import s3_gate_bucket, s3_gate_object -from s3.s3_gate_base import TestNeofsS3GateBase +from s3 import s3_bucket, s3_object +from s3.s3_base import TestNeofsS3Base def pytest_generate_tests(metafunc): @@ -11,9 +11,7 @@ def pytest_generate_tests(metafunc): metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) -@pytest.mark.acl -@pytest.mark.s3_gate -class TestS3GateACL(TestNeofsS3GateBase): +class TestS3ACL(TestNeofsS3Base): @pytest.mark.sanity @allure.title("Test S3: Object ACL") def test_s3_object_ACL(self, bucket, simple_object_size): @@ -21,56 +19,56 @@ def test_s3_object_ACL(self, bucket, simple_object_size): file_name = object_key_from_file_path(file_path) with allure.step("Put object into bucket, Check ACL is empty"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name) + s3_object.put_object_s3(self.s3_client, bucket, file_path) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, file_name) verify_acls(obj_acl, ACLType.PRIVATE) with allure.step("Put object ACL = public-read"): acl = "public-read" - s3_gate_object.put_object_acl_s3(self.s3_client, bucket, file_name, acl) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name) + s3_object.put_object_acl_s3(self.s3_client, bucket, file_name, acl) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, file_name) verify_acls(obj_acl, ACLType.PUBLIC_READ) with allure.step("Put object ACL = private"): acl = "private" - s3_gate_object.put_object_acl_s3(self.s3_client, bucket, file_name, acl) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name) + s3_object.put_object_acl_s3(self.s3_client, bucket, file_name, acl) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, file_name) verify_acls(obj_acl, ACLType.PRIVATE) with allure.step("Put object with grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers"): - s3_gate_object.put_object_acl_s3( + s3_object.put_object_acl_s3( self.s3_client, bucket, file_name, grant_read="uri=http://acs.amazonaws.com/groups/global/AllUsers", ) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, file_name) verify_acls(obj_acl, ACLType.PUBLIC_READ) @allure.title("Test S3: Bucket ACL") def test_s3_bucket_ACL(self): with allure.step("Create bucket with ACL = public-read-write"): acl = "public-read-write" - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, acl=acl, bucket_configuration="rep-1", ) - bucket_acl = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket) + bucket_acl = s3_bucket.get_bucket_acl(self.s3_client, bucket) verify_acls(bucket_acl, ACLType.PUBLIC_READ_WRITE) with allure.step("Change bucket ACL to private"): acl = "private" - s3_gate_bucket.put_bucket_acl_s3(self.s3_client, bucket, acl=acl) - bucket_acl = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket) + s3_bucket.put_bucket_acl_s3(self.s3_client, bucket, acl=acl) + bucket_acl = s3_bucket.get_bucket_acl(self.s3_client, bucket) verify_acls(bucket_acl, ACLType.PRIVATE) with allure.step("Change bucket acl to --grant-write uri=http://acs.amazonaws.com/groups/global/AllUsers"): - s3_gate_bucket.put_bucket_acl_s3( + s3_bucket.put_bucket_acl_s3( self.s3_client, bucket, grant_write="uri=http://acs.amazonaws.com/groups/global/AllUsers", ) - bucket_acl = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket) + bucket_acl = s3_bucket.get_bucket_acl(self.s3_client, bucket) verify_acls(bucket_acl, ACLType.PUBLIC_WRITE) diff --git a/pytest_tests/tests/services/s3_gate/test_s3_bucket.py b/pytest_tests/tests/s3/test_s3_bucket.py similarity index 73% rename from pytest_tests/tests/services/s3_gate/test_s3_bucket.py rename to pytest_tests/tests/s3/test_s3_bucket.py index 20e2fdee2..984792ee6 100644 --- a/pytest_tests/tests/services/s3_gate/test_s3_bucket.py +++ b/pytest_tests/tests/s3/test_s3_bucket.py @@ -10,8 +10,8 @@ object_key_from_file_path, verify_acls, ) -from s3 import s3_gate_bucket, s3_gate_object -from s3.s3_gate_base import TestNeofsS3GateBase +from s3 import s3_bucket, s3_object +from s3.s3_base import TestNeofsS3Base def pytest_generate_tests(metafunc): @@ -19,88 +19,84 @@ def pytest_generate_tests(metafunc): metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) -@pytest.mark.s3_gate -@pytest.mark.s3_gate_bucket -class TestS3GateBucket(TestNeofsS3GateBase): - @pytest.mark.acl +class TestS3Bucket(TestNeofsS3Base): @pytest.mark.sanity @allure.title("Test S3: Create Bucket with various ACL") def test_s3_create_bucket_with_ACL(self): with allure.step("Create bucket with ACL = private"): acl = "private" - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, acl=acl, bucket_configuration="rep-1", ) - bucket_acl = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket) + bucket_acl = s3_bucket.get_bucket_acl(self.s3_client, bucket) verify_acls(bucket_acl, ACLType.PRIVATE) with allure.step("Create bucket with ACL = public-read"): acl = "public-read" - bucket_1 = s3_gate_bucket.create_bucket_s3( + bucket_1 = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, acl=acl, bucket_configuration="rep-1", ) - bucket_acl_1 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_1) + bucket_acl_1 = s3_bucket.get_bucket_acl(self.s3_client, bucket_1) verify_acls(bucket_acl_1, ACLType.PUBLIC_READ) with allure.step("Create bucket with ACL = public-read-write"): acl = "public-read-write" - bucket_2 = s3_gate_bucket.create_bucket_s3( + bucket_2 = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, acl=acl, bucket_configuration="rep-1", ) - bucket_acl_2 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_2) + bucket_acl_2 = s3_bucket.get_bucket_acl(self.s3_client, bucket_2) verify_acls(bucket_acl_2, ACLType.PUBLIC_READ_WRITE) with allure.step("Create bucket with ACL = authenticated-read"): acl = "authenticated-read" - bucket_3 = s3_gate_bucket.create_bucket_s3( + bucket_3 = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, acl=acl, bucket_configuration="rep-1", ) - bucket_acl_3 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_3) + bucket_acl_3 = s3_bucket.get_bucket_acl(self.s3_client, bucket_3) verify_acls(bucket_acl_3, ACLType.PUBLIC_READ) - @pytest.mark.acl @allure.title("Test S3: Create Bucket with different ACL by grand") def test_s3_create_bucket_with_grands(self): with allure.step("Create bucket with --grant-read"): - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, grant_read="uri=http://acs.amazonaws.com/groups/global/AllUsers", bucket_configuration="rep-1", ) - bucket_acl = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket) + bucket_acl = s3_bucket.get_bucket_acl(self.s3_client, bucket) verify_acls(bucket_acl, ACLType.PUBLIC_READ) with allure.step("Create bucket with --grant-write"): - bucket_1 = s3_gate_bucket.create_bucket_s3( + bucket_1 = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, grant_write="uri=http://acs.amazonaws.com/groups/global/AllUsers", bucket_configuration="rep-1", ) - bucket_acl_1 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_1) + bucket_acl_1 = s3_bucket.get_bucket_acl(self.s3_client, bucket_1) verify_acls(bucket_acl_1, ACLType.PUBLIC_WRITE) with allure.step("Create bucket with --grant-full-control"): - bucket_2 = s3_gate_bucket.create_bucket_s3( + bucket_2 = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, grant_full_control="uri=http://acs.amazonaws.com/groups/global/AllUsers", bucket_configuration="rep-1", ) - bucket_acl_2 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_2) + bucket_acl_2 = s3_bucket.get_bucket_acl(self.s3_client, bucket_2) verify_acls(bucket_acl_2, ACLType.PUBLIC_READ_WRITE) @allure.title("Test S3: create bucket with object lock") @@ -109,14 +105,14 @@ def test_s3_bucket_object_lock(self, simple_object_size): file_name = object_key_from_file_path(file_path) with allure.step("Create bucket with --no-object-lock-enabled-for-bucket"): - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=False, bucket_configuration="rep-1" ) date_obj = datetime.utcnow() + timedelta(days=1) with pytest.raises(Exception, match=r".*Object Lock configuration does not exist for this bucket.*"): # An error occurred (ObjectLockConfigurationNotFoundError) when calling the PutObject operation (reached max retries: 0): # Object Lock configuration does not exist for this bucket - s3_gate_object.put_object_s3( + s3_object.put_object_s3( self.s3_client, bucket, file_path, @@ -124,11 +120,11 @@ def test_s3_bucket_object_lock(self, simple_object_size): ObjectLockRetainUntilDate=date_obj.strftime("%Y-%m-%dT%H:%M:%S"), ) with allure.step("Create bucket with --object-lock-enabled-for-bucket"): - bucket_1 = s3_gate_bucket.create_bucket_s3( + bucket_1 = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, bucket_configuration="rep-1" ) date_obj_1 = datetime.utcnow() + timedelta(days=1) - s3_gate_object.put_object_s3( + s3_object.put_object_s3( self.s3_client, bucket_1, file_path, @@ -144,23 +140,23 @@ def test_s3_delete_bucket(self, simple_object_size): file_name_1 = object_key_from_file_path(file_path_1) file_path_2 = generate_file(simple_object_size) file_name_2 = object_key_from_file_path(file_path_2) - bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + bucket = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") with allure.step("Put two objects into bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_1) - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_2) + s3_object.put_object_s3(self.s3_client, bucket, file_path_1) + s3_object.put_object_s3(self.s3_client, bucket, file_path_2) check_objects_in_bucket(self.s3_client, bucket, [file_name_1, file_name_2]) with allure.step("Try to delete not empty bucket and expect error"): with pytest.raises(Exception, match=r".*The bucket you tried to delete is not empty.*"): - s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket) + s3_bucket.delete_bucket_s3(self.s3_client, bucket) with allure.step("Delete all objects in bucket"): - s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_1) - s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_2) + s3_object.delete_object_s3(self.s3_client, bucket, file_name_1) + s3_object.delete_object_s3(self.s3_client, bucket, file_name_2) check_objects_in_bucket(self.s3_client, bucket, []) with allure.step("Delete empty bucket"): - s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket) + s3_bucket.delete_bucket_s3(self.s3_client, bucket) with pytest.raises(Exception, match=r".*Not Found.*"): - s3_gate_bucket.head_bucket(self.s3_client, bucket) + s3_bucket.head_bucket(self.s3_client, bucket) diff --git a/pytest_tests/tests/services/s3_gate/test_s3_locking.py b/pytest_tests/tests/s3/test_s3_locking.py similarity index 75% rename from pytest_tests/tests/services/s3_gate/test_s3_locking.py rename to pytest_tests/tests/s3/test_s3_locking.py index 3eca416b7..793d93cb5 100644 --- a/pytest_tests/tests/services/s3_gate/test_s3_locking.py +++ b/pytest_tests/tests/s3/test_s3_locking.py @@ -9,8 +9,8 @@ check_objects_in_bucket, object_key_from_file_path, ) -from s3 import s3_gate_bucket, s3_gate_object -from s3.s3_gate_base import TestNeofsS3GateBase +from s3 import s3_bucket, s3_object +from s3.s3_base import TestNeofsS3Base def pytest_generate_tests(metafunc): @@ -18,24 +18,22 @@ def pytest_generate_tests(metafunc): metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) -@pytest.mark.s3_gate -@pytest.mark.s3_gate_locking @pytest.mark.parametrize("version_id", [None, "second"]) -class TestS3GateLocking(TestNeofsS3GateBase): +class TestS3Locking(TestNeofsS3Base): @allure.title("Test S3: Checking the operation of retention period & legal lock on the object") def test_s3_object_locking(self, version_id, simple_object_size): file_path = generate_file(simple_object_size) file_name = object_key_from_file_path(file_path) retention_period = 2 - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, bucket_configuration="rep-1" ) with allure.step("Put several versions of object into bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + s3_object.put_object_s3(self.s3_client, bucket, file_path) file_name_1 = generate_file_with_content(simple_object_size, file_path=file_path) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket, file_name_1) check_objects_in_bucket(self.s3_client, bucket, [file_name]) if version_id: version_id = version_id_2 @@ -46,18 +44,18 @@ def test_s3_object_locking(self, version_id, simple_object_size): "Mode": "COMPLIANCE", "RetainUntilDate": date_obj, } - s3_gate_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) + s3_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) assert_object_lock_mode(self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "OFF") with allure.step(f"Put legal hold to object {file_name}"): - s3_gate_object.put_object_legal_hold(self.s3_client, bucket, file_name, "ON", version_id) + s3_object.put_object_legal_hold(self.s3_client, bucket, file_name, "ON", version_id) assert_object_lock_mode(self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "ON") with allure.step("Fail with deleting object with legal hold and retention period"): if version_id: with pytest.raises(Exception): # An error occurred (AccessDenied) when calling the DeleteObject operation (reached max retries: 0): Access Denied. - s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name, version_id) + s3_object.delete_object_s3(self.s3_client, bucket, file_name, version_id) with allure.step("Check retention period is no longer set on the uploaded object"): time.sleep((retention_period + 1) * 60) @@ -67,9 +65,9 @@ def test_s3_object_locking(self, version_id, simple_object_size): if version_id: with pytest.raises(Exception): # An error occurred (AccessDenied) when calling the DeleteObject operation (reached max retries: 0): Access Denied. - s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name, version_id) + s3_object.delete_object_s3(self.s3_client, bucket, file_name, version_id) else: - s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name, version_id) + s3_object.delete_object_s3(self.s3_client, bucket, file_name, version_id) @allure.title("Test S3: Checking the impossibility to change the retention mode COMPLIANCE") def test_s3_mode_compliance(self, version_id, simple_object_size): @@ -78,12 +76,12 @@ def test_s3_mode_compliance(self, version_id, simple_object_size): retention_period = 2 retention_period_1 = 1 - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, bucket_configuration="rep-1" ) with allure.step("Put object into bucket"): - obj_version = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + obj_version = s3_object.put_object_s3(self.s3_client, bucket, file_path) if version_id: version_id = obj_version check_objects_in_bucket(self.s3_client, bucket, [file_name]) @@ -94,7 +92,7 @@ def test_s3_mode_compliance(self, version_id, simple_object_size): "Mode": "COMPLIANCE", "RetainUntilDate": date_obj, } - s3_gate_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) + s3_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) assert_object_lock_mode(self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "OFF") with allure.step(f"Try to change retention period {retention_period_1}min to object {file_name}"): @@ -104,7 +102,7 @@ def test_s3_mode_compliance(self, version_id, simple_object_size): "RetainUntilDate": date_obj, } with pytest.raises(Exception): - s3_gate_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) + s3_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) @allure.title("Test S3: Checking the ability to change retention mode GOVERNANCE") def test_s3_mode_governance(self, version_id, simple_object_size): @@ -114,12 +112,12 @@ def test_s3_mode_governance(self, version_id, simple_object_size): retention_period_1 = 2 retention_period_2 = 5 - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, bucket_configuration="rep-1" ) with allure.step("Put object into bucket"): - obj_version = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + obj_version = s3_object.put_object_s3(self.s3_client, bucket, file_path) if version_id: version_id = obj_version check_objects_in_bucket(self.s3_client, bucket, [file_name]) @@ -130,7 +128,7 @@ def test_s3_mode_governance(self, version_id, simple_object_size): "Mode": "GOVERNANCE", "RetainUntilDate": date_obj, } - s3_gate_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) + s3_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) assert_object_lock_mode(self.s3_client, bucket, file_name, "GOVERNANCE", date_obj, "OFF") with allure.step(f"Try to change retention period {retention_period_1}min to object {file_name}"): @@ -140,7 +138,7 @@ def test_s3_mode_governance(self, version_id, simple_object_size): "RetainUntilDate": date_obj, } with pytest.raises(Exception): - s3_gate_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) + s3_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) with allure.step(f"Try to change retention period {retention_period_1}min to object {file_name}"): date_obj = datetime.utcnow() + timedelta(minutes=retention_period_1) @@ -149,7 +147,7 @@ def test_s3_mode_governance(self, version_id, simple_object_size): "RetainUntilDate": date_obj, } with pytest.raises(Exception): - s3_gate_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) + s3_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id) with allure.step(f"Put new retention period {retention_period_2}min to object {file_name}"): date_obj = datetime.utcnow() + timedelta(minutes=retention_period_2) @@ -157,7 +155,7 @@ def test_s3_mode_governance(self, version_id, simple_object_size): "Mode": "GOVERNANCE", "RetainUntilDate": date_obj, } - s3_gate_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id, True) + s3_object.put_object_retention(self.s3_client, bucket, file_name, retention, version_id, True) assert_object_lock_mode(self.s3_client, bucket, file_name, "GOVERNANCE", date_obj, "OFF") @allure.title("Test S3: Checking if an Object Cannot Be Locked") @@ -165,45 +163,44 @@ def test_s3_legal_hold(self, version_id, simple_object_size): file_path = generate_file(simple_object_size) file_name = object_key_from_file_path(file_path) - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=False, bucket_configuration="rep-1" ) with allure.step("Put object into bucket"): - obj_version = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + obj_version = s3_object.put_object_s3(self.s3_client, bucket, file_path) if version_id: version_id = obj_version check_objects_in_bucket(self.s3_client, bucket, [file_name]) with allure.step(f"Put legal hold to object {file_name}"): with pytest.raises(Exception): - s3_gate_object.put_object_legal_hold(self.s3_client, bucket, file_name, "ON", version_id) + s3_object.put_object_legal_hold(self.s3_client, bucket, file_name, "ON", version_id) -@pytest.mark.s3_gate -class TestS3GateLockingBucket(TestNeofsS3GateBase): +class TestS3LockingBucket(TestNeofsS3Base): @allure.title("Test S3: Bucket Lock") def test_s3_bucket_lock(self, simple_object_size): file_path = generate_file(simple_object_size) file_name = object_key_from_file_path(file_path) configuration = {"Rule": {"DefaultRetention": {"Mode": "COMPLIANCE", "Days": 1}}} - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, bucket_configuration="rep-1" ) with allure.step("PutObjectLockConfiguration with ObjectLockEnabled=False"): - s3_gate_bucket.put_object_lock_configuration(self.s3_client, bucket, configuration) + s3_bucket.put_object_lock_configuration(self.s3_client, bucket, configuration) with allure.step("PutObjectLockConfiguration with ObjectLockEnabled=True"): configuration["ObjectLockEnabled"] = "Enabled" - s3_gate_bucket.put_object_lock_configuration(self.s3_client, bucket, configuration) + s3_bucket.put_object_lock_configuration(self.s3_client, bucket, configuration) with allure.step("GetObjectLockConfiguration"): - config = s3_gate_bucket.get_object_lock_configuration(self.s3_client, bucket) + config = s3_bucket.get_object_lock_configuration(self.s3_client, bucket) configuration["Rule"]["DefaultRetention"]["Years"] = 0 assert config == configuration, f"Configurations must be equal {configuration}" with allure.step("Put object into bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + s3_object.put_object_s3(self.s3_client, bucket, file_path) assert_object_lock_mode(self.s3_client, bucket, file_name, "COMPLIANCE", None, "OFF", 1) diff --git a/pytest_tests/tests/services/s3_gate/test_s3_multipart.py b/pytest_tests/tests/s3/test_s3_multipart.py similarity index 53% rename from pytest_tests/tests/services/s3_gate/test_s3_multipart.py rename to pytest_tests/tests/s3/test_s3_multipart.py index 6caec2526..4bb451a52 100644 --- a/pytest_tests/tests/services/s3_gate/test_s3_multipart.py +++ b/pytest_tests/tests/s3/test_s3_multipart.py @@ -6,8 +6,8 @@ object_key_from_file_path, set_bucket_versioning, ) -from s3 import s3_gate_bucket, s3_gate_object -from s3.s3_gate_base import TestNeofsS3GateBase +from s3 import s3_bucket, s3_object +from s3.s3_base import TestNeofsS3Base PART_SIZE = 5 * 1024 * 1024 @@ -17,13 +17,11 @@ def pytest_generate_tests(metafunc): metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) -@pytest.mark.s3_gate -@pytest.mark.s3_gate_multipart -class TestS3GateMultipart(TestNeofsS3GateBase): +class TestS3Multipart(TestNeofsS3Base): @allure.title("Test S3 Object Multipart API") def test_s3_object_multipart(self): - bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + bucket = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) parts_count = 5 file_name_large = generate_file(PART_SIZE * parts_count) # 5Mb - min part object_key = object_key_from_file_path(file_name_large) @@ -31,33 +29,33 @@ def test_s3_object_multipart(self): parts = [] with allure.step("Upload first part"): - upload_id = s3_gate_object.create_multipart_upload_s3(self.s3_client, bucket, object_key) - uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) - etag = s3_gate_object.upload_part_s3(self.s3_client, bucket, object_key, upload_id, 1, part_files[0]) + upload_id = s3_object.create_multipart_upload_s3(self.s3_client, bucket, object_key) + uploads = s3_object.list_multipart_uploads_s3(self.s3_client, bucket) + etag = s3_object.upload_part_s3(self.s3_client, bucket, object_key, upload_id, 1, part_files[0]) parts.append((1, etag)) - got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) + got_parts = s3_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) assert len(got_parts) == 1, f"Expected {1} parts, got\n{got_parts}" with allure.step("Upload last parts"): for part_id, file_path in enumerate(part_files[1:], start=2): - etag = s3_gate_object.upload_part_s3(self.s3_client, bucket, object_key, upload_id, part_id, file_path) + etag = s3_object.upload_part_s3(self.s3_client, bucket, object_key, upload_id, part_id, file_path) parts.append((part_id, etag)) - got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) - s3_gate_object.complete_multipart_upload_s3(self.s3_client, bucket, object_key, upload_id, parts) + got_parts = s3_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) + s3_object.complete_multipart_upload_s3(self.s3_client, bucket, object_key, upload_id, parts) assert len(got_parts) == len(part_files), f"Expected {parts_count} parts, got\n{got_parts}" with allure.step("Check upload list is empty"): - uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + uploads = s3_object.list_multipart_uploads_s3(self.s3_client, bucket) assert not uploads, f"Expected there is no uploads in bucket {bucket}" with allure.step("Check we can get whole object from bucket"): - got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, object_key) + got_object = s3_object.get_object_s3(self.s3_client, bucket, object_key) assert get_file_hash(got_object) == get_file_hash(file_name_large) @allure.title("Test S3 Multipart abord") def test_s3_abort_multipart(self): - bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + bucket = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) parts_count = 5 file_name_large = generate_file(PART_SIZE * parts_count) # 5Mb - min part object_key = object_key_from_file_path(file_name_large) @@ -65,22 +63,22 @@ def test_s3_abort_multipart(self): parts = [] with allure.step("Upload first part"): - upload_id = s3_gate_object.create_multipart_upload_s3(self.s3_client, bucket, object_key) - uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) - etag = s3_gate_object.upload_part_s3(self.s3_client, bucket, object_key, upload_id, 1, part_files[0]) + upload_id = s3_object.create_multipart_upload_s3(self.s3_client, bucket, object_key) + uploads = s3_object.list_multipart_uploads_s3(self.s3_client, bucket) + etag = s3_object.upload_part_s3(self.s3_client, bucket, object_key, upload_id, 1, part_files[0]) parts.append((1, etag)) - got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) + got_parts = s3_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) assert len(got_parts) == 1, f"Expected {1} parts, got\n{got_parts}" with allure.step("Abort multipart upload"): - s3_gate_object.abort_multipart_uploads_s3(self.s3_client, bucket, object_key, upload_id) - uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + s3_object.abort_multipart_uploads_s3(self.s3_client, bucket, object_key, upload_id) + uploads = s3_object.list_multipart_uploads_s3(self.s3_client, bucket) assert not uploads, f"Expected there is no uploads in bucket {bucket}" @allure.title("Test S3 Upload Part Copy") def test_s3_multipart_copy(self): - bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + bucket = s3_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) parts_count = 3 file_name_large = generate_file(PART_SIZE * parts_count) # 5Mb - min part object_key = object_key_from_file_path(file_name_large) @@ -90,25 +88,25 @@ def test_s3_multipart_copy(self): with allure.step(f"Put {parts_count} objec in bucket"): for part in part_files: - s3_gate_object.put_object_s3(self.s3_client, bucket, part) + s3_object.put_object_s3(self.s3_client, bucket, part) objs.append(object_key_from_file_path(part)) check_objects_in_bucket(self.s3_client, bucket, objs) with allure.step("Create multipart upload object"): - upload_id = s3_gate_object.create_multipart_upload_s3(self.s3_client, bucket, object_key) - uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + upload_id = s3_object.create_multipart_upload_s3(self.s3_client, bucket, object_key) + uploads = s3_object.list_multipart_uploads_s3(self.s3_client, bucket) assert uploads, f"Expected there are uploads in bucket {bucket}" with allure.step("Start multipart upload"): for part_id, obj_key in enumerate(objs, start=1): - etag = s3_gate_object.upload_part_copy_s3( + etag = s3_object.upload_part_copy_s3( self.s3_client, bucket, object_key, upload_id, part_id, f"{bucket}/{obj_key}" ) parts.append((part_id, etag)) - got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) - s3_gate_object.complete_multipart_upload_s3(self.s3_client, bucket, object_key, upload_id, parts) + got_parts = s3_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) + s3_object.complete_multipart_upload_s3(self.s3_client, bucket, object_key, upload_id, parts) assert len(got_parts) == len(part_files), f"Expected {parts_count} parts, got\n{got_parts}" with allure.step("Check we can get whole object from bucket"): - got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, object_key) + got_object = s3_object.get_object_s3(self.s3_client, bucket, object_key) assert get_file_hash(got_object) == get_file_hash(file_name_large) diff --git a/pytest_tests/tests/services/s3_gate/test_s3_object.py b/pytest_tests/tests/s3/test_s3_object.py similarity index 71% rename from pytest_tests/tests/services/s3_gate/test_s3_object.py rename to pytest_tests/tests/s3/test_s3_object.py index 07ac84d43..b72fb7bdd 100644 --- a/pytest_tests/tests/services/s3_gate/test_s3_object.py +++ b/pytest_tests/tests/s3/test_s3_object.py @@ -24,17 +24,15 @@ ) from neofs_testlib.env.env import NodeWallet from neofs_testlib.utils.wallet import init_wallet -from s3 import s3_gate_bucket, s3_gate_object -from s3.s3_gate_base import TestNeofsS3GateBase +from s3 import s3_bucket, s3_object +from s3.s3_base import TestNeofsS3Base def pytest_generate_tests(metafunc): parametrize_clients(metafunc) -@pytest.mark.s3_gate -@pytest.mark.s3_gate_object -class TestS3GateObject(TestNeofsS3GateBase): +class TestS3Object(TestNeofsS3Base): @staticmethod def object_key_from_file_path(full_path: str) -> str: return os.path.basename(full_path) @@ -48,38 +46,38 @@ def test_s3_copy_object(self, two_buckets, simple_object_size): bucket_1, bucket_2 = two_buckets - objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket_1) + objects_list = s3_object.list_objects_s3(self.s3_client, bucket_1) assert not objects_list, f"Expected empty bucket, got {objects_list}" with allure.step("Put object into one bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_path) + s3_object.put_object_s3(self.s3_client, bucket_1, file_path) with allure.step("Copy one object into the same bucket"): - copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket_1, file_name) + copy_obj_path = s3_object.copy_object_s3(self.s3_client, bucket_1, file_name) bucket_1_objects.append(copy_obj_path) check_objects_in_bucket(self.s3_client, bucket_1, bucket_1_objects) - objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket_2) + objects_list = s3_object.list_objects_s3(self.s3_client, bucket_2) assert not objects_list, f"Expected empty bucket, got {objects_list}" with allure.step("Copy object from first bucket into second"): - copy_obj_path_b2 = s3_gate_object.copy_object_s3(self.s3_client, bucket_1, file_name, bucket_dst=bucket_2) + copy_obj_path_b2 = s3_object.copy_object_s3(self.s3_client, bucket_1, file_name, bucket_dst=bucket_2) check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=bucket_1_objects) check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[copy_obj_path_b2]) with allure.step("Check copied object has the same content"): - got_copied_file_b2 = s3_gate_object.get_object_s3(self.s3_client, bucket_2, copy_obj_path_b2) + got_copied_file_b2 = s3_object.get_object_s3(self.s3_client, bucket_2, copy_obj_path_b2) assert get_file_hash(file_path) == get_file_hash(got_copied_file_b2), "Hashes must be the same" with allure.step("Delete one object from first bucket"): - s3_gate_object.delete_object_s3(self.s3_client, bucket_1, file_name) + s3_object.delete_object_s3(self.s3_client, bucket_1, file_name) bucket_1_objects.remove(file_name) check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=bucket_1_objects) check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[copy_obj_path_b2]) with allure.step("Copy one object into the same bucket"): with pytest.raises(Exception): - s3_gate_object.copy_object_s3(self.s3_client, bucket_1, file_name) + s3_object.copy_object_s3(self.s3_client, bucket_1, file_name) @allure.title("Test S3: Copy version of object") def test_s3_copy_version_object(self, two_buckets, simple_object_size): @@ -88,50 +86,49 @@ def test_s3_copy_version_object(self, two_buckets, simple_object_size): obj_key = os.path.basename(file_name_simple) bucket_1, bucket_2 = two_buckets - set_bucket_versioning(self.s3_client, bucket_1, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket_1, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put object into bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_name_simple) + s3_object.put_object_s3(self.s3_client, bucket_1, file_name_simple) bucket_1_objects = [obj_key] check_objects_in_bucket(self.s3_client, bucket_1, [obj_key]) with allure.step("Copy one object into the same bucket"): - copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket_1, obj_key) + copy_obj_path = s3_object.copy_object_s3(self.s3_client, bucket_1, obj_key) bucket_1_objects.append(copy_obj_path) check_objects_in_bucket(self.s3_client, bucket_1, bucket_1_objects) - set_bucket_versioning(self.s3_client, bucket_2, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket_2, s3_bucket.VersioningStatus.ENABLED) with allure.step("Copy object from first bucket into second"): - copy_obj_path_b2 = s3_gate_object.copy_object_s3(self.s3_client, bucket_1, obj_key, bucket_dst=bucket_2) + copy_obj_path_b2 = s3_object.copy_object_s3(self.s3_client, bucket_1, obj_key, bucket_dst=bucket_2) check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=bucket_1_objects) check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[copy_obj_path_b2]) with allure.step("Delete one object from first bucket and check object in bucket"): - s3_gate_object.delete_object_s3(self.s3_client, bucket_1, obj_key) + s3_object.delete_object_s3(self.s3_client, bucket_1, obj_key) bucket_1_objects.remove(obj_key) check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=bucket_1_objects) with allure.step("Copy one object into the same bucket"): with pytest.raises(Exception): - s3_gate_object.copy_object_s3(self.s3_client, bucket_1, obj_key) + s3_object.copy_object_s3(self.s3_client, bucket_1, obj_key) - @pytest.mark.acl @allure.title("Test S3: Checking copy with acl") def test_s3_copy_acl(self, bucket, simple_object_size): version_1_content = "Version 1" file_name_simple = generate_file_with_content(simple_object_size, content=version_1_content) obj_key = os.path.basename(file_name_simple) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put several versions of object into bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple) + s3_object.put_object_s3(self.s3_client, bucket, file_name_simple) check_objects_in_bucket(self.s3_client, bucket, [obj_key]) with allure.step("Copy object and check acl attribute"): acl = "private" - copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, obj_key, ACL=acl) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, copy_obj_path) + copy_obj_path = s3_object.copy_object_s3(self.s3_client, bucket, obj_key, ACL=acl) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, copy_obj_path) verify_acls(obj_acl, ACLType.PRIVATE) @allure.title("Test S3: Copy object with metadata") @@ -141,29 +138,29 @@ def test_s3_copy_metadate(self, bucket, simple_object_size): file_name = self.object_key_from_file_path(file_path) bucket_1_objects = [file_name] - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put object into bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path, Metadata=object_metadata) + s3_object.put_object_s3(self.s3_client, bucket, file_path, Metadata=object_metadata) bucket_1_objects = [file_name] check_objects_in_bucket(self.s3_client, bucket, bucket_1_objects) with allure.step("Copy one object"): - copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name) + copy_obj_path = s3_object.copy_object_s3(self.s3_client, bucket, file_name) bucket_1_objects.append(copy_obj_path) check_objects_in_bucket(self.s3_client, bucket, bucket_1_objects) - obj_head = s3_gate_object.head_object_s3(self.s3_client, bucket, copy_obj_path) + obj_head = s3_object.head_object_s3(self.s3_client, bucket, copy_obj_path) assert obj_head.get("Metadata") == object_metadata, f"Metadata must be {object_metadata}" with allure.step("Copy one object with metadata"): - copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name, metadata_directive="COPY") + copy_obj_path = s3_object.copy_object_s3(self.s3_client, bucket, file_name, metadata_directive="COPY") bucket_1_objects.append(copy_obj_path) - obj_head = s3_gate_object.head_object_s3(self.s3_client, bucket, copy_obj_path) + obj_head = s3_object.head_object_s3(self.s3_client, bucket, copy_obj_path) assert obj_head.get("Metadata") == object_metadata, f"Metadata must be {object_metadata}" with allure.step("Copy one object with new metadata"): object_metadata_1 = {f"{uuid.uuid4()}": f"{uuid.uuid4()}"} - copy_obj_path = s3_gate_object.copy_object_s3( + copy_obj_path = s3_object.copy_object_s3( self.s3_client, bucket, file_name, @@ -171,7 +168,7 @@ def test_s3_copy_metadate(self, bucket, simple_object_size): metadata=object_metadata_1, ) bucket_1_objects.append(copy_obj_path) - obj_head = s3_gate_object.head_object_s3(self.s3_client, bucket, copy_obj_path) + obj_head = s3_object.head_object_s3(self.s3_client, bucket, copy_obj_path) assert obj_head.get("Metadata") == object_metadata_1, f"Metadata must be {object_metadata_1}" @allure.title("Test S3: Copy object with tagging") @@ -181,27 +178,27 @@ def test_s3_copy_tagging(self, bucket, simple_object_size): file_name_simple = self.object_key_from_file_path(file_path) bucket_1_objects = [file_name_simple] - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put several versions of object into bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) - s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name_simple, tags=object_tagging) + s3_object.put_object_s3(self.s3_client, bucket, file_path) + s3_object.put_object_tagging(self.s3_client, bucket, file_name_simple, tags=object_tagging) bucket_1_objects = [file_name_simple] check_objects_in_bucket(self.s3_client, bucket, bucket_1_objects) with allure.step("Copy one object without tag"): - copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name_simple) - got_tags = s3_gate_object.get_object_tagging(self.s3_client, bucket, copy_obj_path) + copy_obj_path = s3_object.copy_object_s3(self.s3_client, bucket, file_name_simple) + got_tags = s3_object.get_object_tagging(self.s3_client, bucket, copy_obj_path) assert got_tags, f"Expected tags, got {got_tags}" expected_tags = [{"Key": key, "Value": value} for key, value in object_tagging] for tag in expected_tags: assert tag in got_tags, f"Expected tag {tag} in {got_tags}" with allure.step("Copy one object with tag"): - copy_obj_path_1 = s3_gate_object.copy_object_s3( + copy_obj_path_1 = s3_object.copy_object_s3( self.s3_client, bucket, file_name_simple, tagging_directive="COPY" ) - got_tags = s3_gate_object.get_object_tagging(self.s3_client, bucket, copy_obj_path_1) + got_tags = s3_object.get_object_tagging(self.s3_client, bucket, copy_obj_path_1) assert got_tags, f"Expected tags, got {got_tags}" expected_tags = [{"Key": key, "Value": value} for key, value in object_tagging] for tag in expected_tags: @@ -211,14 +208,14 @@ def test_s3_copy_tagging(self, bucket, simple_object_size): tag_key = "tag1" tag_value = uuid.uuid4() new_tag = f"{tag_key}={tag_value}" - copy_obj_path = s3_gate_object.copy_object_s3( + copy_obj_path = s3_object.copy_object_s3( self.s3_client, bucket, file_name_simple, tagging_directive="REPLACE", tagging=new_tag, ) - got_tags = s3_gate_object.get_object_tagging(self.s3_client, bucket, copy_obj_path) + got_tags = s3_object.get_object_tagging(self.s3_client, bucket, copy_obj_path) assert got_tags, f"Expected tags, got {got_tags}" expected_tags = [{"Key": tag_key, "Value": str(tag_value)}] for tag in expected_tags: @@ -231,17 +228,17 @@ def test_s3_delete_versioning(self, bucket, complex_object_size, simple_object_s file_name_simple = generate_file_with_content(simple_object_size, content=version_1_content) obj_key = os.path.basename(file_name_simple) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put several versions of object into bucket"): - version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple) + version_id_1 = s3_object.put_object_s3(self.s3_client, bucket, file_name_simple) file_name_1 = generate_file_with_content( simple_object_size, file_path=file_name_simple, content=version_2_content ) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket, file_name_1) with allure.step("Check bucket shows all versions"): - versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + versions = s3_object.list_objects_versions_s3(self.s3_client, bucket) obj_versions = {version.get("VersionId") for version in versions if version.get("Key") == obj_key} assert obj_versions == { version_id_1, @@ -249,15 +246,15 @@ def test_s3_delete_versioning(self, bucket, complex_object_size, simple_object_s }, f"Expected object has versions: {version_id_1, version_id_2}" with allure.step("Delete 1 version of object"): - delete_obj = s3_gate_object.delete_object_s3(self.s3_client, bucket, obj_key, version_id=version_id_1) - versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + delete_obj = s3_object.delete_object_s3(self.s3_client, bucket, obj_key, version_id=version_id_1) + versions = s3_object.list_objects_versions_s3(self.s3_client, bucket) obj_versions = {version.get("VersionId") for version in versions if version.get("Key") == obj_key} assert obj_versions == {version_id_2}, f"Expected object has versions: {version_id_2}" assert "DeleteMarkers" not in delete_obj.keys(), "Delete markes not found" with allure.step("Delete second version of object"): - delete_obj = s3_gate_object.delete_object_s3(self.s3_client, bucket, obj_key, version_id=version_id_2) - versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + delete_obj = s3_object.delete_object_s3(self.s3_client, bucket, obj_key, version_id=version_id_2) + versions = s3_object.list_objects_versions_s3(self.s3_client, bucket) obj_versions = {version.get("VersionId") for version in versions if version.get("Key") == obj_key} assert not obj_versions, "Expected object not found" assert "DeleteMarkers" not in delete_obj.keys(), "Delete markes not found" @@ -265,11 +262,11 @@ def test_s3_delete_versioning(self, bucket, complex_object_size, simple_object_s with allure.step("Put new object into bucket"): file_name_simple = generate_file(complex_object_size) obj_key = os.path.basename(file_name_simple) - s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple) + s3_object.put_object_s3(self.s3_client, bucket, file_name_simple) with allure.step("Delete last object"): - delete_obj = s3_gate_object.delete_object_s3(self.s3_client, bucket, obj_key) - versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket, True) + delete_obj = s3_object.delete_object_s3(self.s3_client, bucket, obj_key) + versions = s3_object.list_objects_versions_s3(self.s3_client, bucket, True) assert versions.get("DeleteMarkers", None), "Expected delete Marker" assert "DeleteMarker" in delete_obj.keys(), "Expected delete Marker" @@ -282,26 +279,26 @@ def test_s3_bulk_delete_versioning(self, bucket, simple_object_size): file_name_1 = generate_file_with_content(simple_object_size, content=version_1_content) obj_key = os.path.basename(file_name_1) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put several versions of object into bucket"): - version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1) + version_id_1 = s3_object.put_object_s3(self.s3_client, bucket, file_name_1) file_name_2 = generate_file_with_content( simple_object_size, file_path=file_name_1, content=version_2_content ) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_2) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket, file_name_2) file_name_3 = generate_file_with_content( simple_object_size, file_path=file_name_1, content=version_3_content ) - version_id_3 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_3) + version_id_3 = s3_object.put_object_s3(self.s3_client, bucket, file_name_3) file_name_4 = generate_file_with_content( simple_object_size, file_path=file_name_1, content=version_4_content ) - version_id_4 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_4) + version_id_4 = s3_object.put_object_s3(self.s3_client, bucket, file_name_4) version_ids = {version_id_1, version_id_2, version_id_3, version_id_4} with allure.step("Check bucket shows all versions"): - versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + versions = s3_object.list_objects_versions_s3(self.s3_client, bucket) obj_versions = {version.get("VersionId") for version in versions if version.get("Key") == obj_key} assert obj_versions == version_ids, f"Expected object has versions: {version_ids}" @@ -309,10 +306,10 @@ def test_s3_bulk_delete_versioning(self, bucket, simple_object_size): version_to_delete_b1 = sample([version_id_1, version_id_2, version_id_3, version_id_4], k=2) version_to_save = list(set(version_ids) - set(version_to_delete_b1)) for ver in version_to_delete_b1: - s3_gate_object.delete_object_s3(self.s3_client, bucket, obj_key, ver) + s3_object.delete_object_s3(self.s3_client, bucket, obj_key, ver) with allure.step("Check bucket shows all versions"): - versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + versions = s3_object.list_objects_versions_s3(self.s3_client, bucket) obj_versions = [version.get("VersionId") for version in versions if version.get("Key") == obj_key] assert obj_versions.sort() == version_to_save.sort(), f"Expected object has versions: {version_to_save}" @@ -323,24 +320,24 @@ def test_s3_get_versioning(self, bucket, simple_object_size): file_name_simple = generate_file_with_content(simple_object_size, content=version_1_content) obj_key = os.path.basename(file_name_simple) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put several versions of object into bucket"): - version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple) + version_id_1 = s3_object.put_object_s3(self.s3_client, bucket, file_name_simple) file_name_1 = generate_file_with_content( simple_object_size, file_path=file_name_simple, content=version_2_content ) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket, file_name_1) with allure.step("Get first version of object"): - object_1 = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key, version_id_1, full_output=True) + object_1 = s3_object.get_object_s3(self.s3_client, bucket, obj_key, version_id_1, full_output=True) assert object_1.get("VersionId") == version_id_1, f"Get object with version {version_id_1}" with allure.step("Get second version of object"): - object_2 = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key, version_id_2, full_output=True) + object_2 = s3_object.get_object_s3(self.s3_client, bucket, obj_key, version_id_2, full_output=True) assert object_2.get("VersionId") == version_id_2, f"Get object with version {version_id_2}" with allure.step("Get object"): - object_3 = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key, full_output=True) + object_3 = s3_object.get_object_s3(self.s3_client, bucket, obj_key, full_output=True) assert object_3.get("VersionId") == version_id_2, f"Get object with version {version_id_2}" @allure.title("Test S3: Get range") @@ -348,28 +345,28 @@ def test_s3_get_range(self, bucket, complex_object_size: int, simple_object_size file_path = generate_file(complex_object_size) file_name = self.object_key_from_file_path(file_path) file_hash = get_file_hash(file_path) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put several versions of object into bucket"): - version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + version_id_1 = s3_object.put_object_s3(self.s3_client, bucket, file_path) file_name_1 = generate_file_with_content(simple_object_size, file_path=file_path) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket, file_name_1) with allure.step("Get first version of object"): - object_1_part_1 = s3_gate_object.get_object_s3( + object_1_part_1 = s3_object.get_object_s3( self.s3_client, bucket, file_name, version_id_1, range=[0, int(complex_object_size / 3)], ) - object_1_part_2 = s3_gate_object.get_object_s3( + object_1_part_2 = s3_object.get_object_s3( self.s3_client, bucket, file_name, version_id_1, range=[int(complex_object_size / 3) + 1, 2 * int(complex_object_size / 3)], ) - object_1_part_3 = s3_gate_object.get_object_s3( + object_1_part_3 = s3_object.get_object_s3( self.s3_client, bucket, file_name, @@ -380,21 +377,21 @@ def test_s3_get_range(self, bucket, complex_object_size: int, simple_object_size assert get_file_hash(con_file) == file_hash, "Hashes must be the same" with allure.step("Get second version of object"): - object_2_part_1 = s3_gate_object.get_object_s3( + object_2_part_1 = s3_object.get_object_s3( self.s3_client, bucket, file_name, version_id_2, range=[0, int(simple_object_size / 3)], ) - object_2_part_2 = s3_gate_object.get_object_s3( + object_2_part_2 = s3_object.get_object_s3( self.s3_client, bucket, file_name, version_id_2, range=[int(simple_object_size / 3) + 1, 2 * int(simple_object_size / 3)], ) - object_2_part_3 = s3_gate_object.get_object_s3( + object_2_part_3 = s3_object.get_object_s3( self.s3_client, bucket, file_name, @@ -405,16 +402,16 @@ def test_s3_get_range(self, bucket, complex_object_size: int, simple_object_size assert get_file_hash(con_file_1) == get_file_hash(file_name_1), "Hashes must be the same" with allure.step("Get object"): - object_3_part_1 = s3_gate_object.get_object_s3( + object_3_part_1 = s3_object.get_object_s3( self.s3_client, bucket, file_name, range=[0, int(simple_object_size / 3)] ) - object_3_part_2 = s3_gate_object.get_object_s3( + object_3_part_2 = s3_object.get_object_s3( self.s3_client, bucket, file_name, range=[int(simple_object_size / 3) + 1, 2 * int(simple_object_size / 3)], ) - object_3_part_3 = s3_gate_object.get_object_s3( + object_3_part_3 = s3_object.get_object_s3( self.s3_client, bucket, file_name, @@ -424,20 +421,19 @@ def test_s3_get_range(self, bucket, complex_object_size: int, simple_object_size assert get_file_hash(con_file) == get_file_hash(file_name_1), "Hashes must be the same" @allure.title("Test S3: Copy object with metadata") - @pytest.mark.smoke def test_s3_head_object(self, bucket, complex_object_size, simple_object_size): object_metadata = {f"{uuid.uuid4()}": f"{uuid.uuid4()}"} file_path = generate_file(complex_object_size) file_name = self.object_key_from_file_path(file_path) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put several versions of object into bucket"): - version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path, Metadata=object_metadata) + version_id_1 = s3_object.put_object_s3(self.s3_client, bucket, file_path, Metadata=object_metadata) file_name_1 = generate_file_with_content(simple_object_size, file_path=file_path) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket, file_name_1) with allure.step("Get head of first version of object"): - response = s3_gate_object.head_object_s3(self.s3_client, bucket, file_name) + response = s3_object.head_object_s3(self.s3_client, bucket, file_name) assert "LastModified" in response, "Expected LastModified field" assert "ETag" in response, "Expected ETag field" assert response.get("Metadata") == {}, "Expected Metadata empty" @@ -445,7 +441,7 @@ def test_s3_head_object(self, bucket, complex_object_size, simple_object_size): assert response.get("ContentLength") != 0, "Expected ContentLength is not zero" with allure.step("Get head ob first version of object"): - response = s3_gate_object.head_object_s3(self.s3_client, bucket, file_name, version_id=version_id_1) + response = s3_object.head_object_s3(self.s3_client, bucket, file_name, version_id=version_id_1) assert "LastModified" in response, "Expected LastModified field" assert "ETag" in response, "Expected ETag field" assert response.get("Metadata") == object_metadata, f"Expected Metadata is {object_metadata}" @@ -460,27 +456,27 @@ def test_s3_list_object(self, list_type: str, bucket, complex_object_size): file_path_2 = generate_file(complex_object_size) file_name_2 = self.object_key_from_file_path(file_path_2) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put several versions of object into bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_1) - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_2) + s3_object.put_object_s3(self.s3_client, bucket, file_path_1) + s3_object.put_object_s3(self.s3_client, bucket, file_path_2) with allure.step("Get list of object"): if list_type == "v1": - list_obj = s3_gate_object.list_objects_s3(self.s3_client, bucket) + list_obj = s3_object.list_objects_s3(self.s3_client, bucket) elif list_type == "v2": - list_obj = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket) + list_obj = s3_object.list_objects_s3_v2(self.s3_client, bucket) assert len(list_obj) == 2, "bucket have 2 objects" assert ( list_obj.sort() == [file_name, file_name_2].sort() ), f"bucket have object key {file_name, file_name_2}" with allure.step("Delete object"): - delete_obj = s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name) + delete_obj = s3_object.delete_object_s3(self.s3_client, bucket, file_name) if list_type == "v1": - list_obj_1 = s3_gate_object.list_objects_s3(self.s3_client, bucket, full_output=True) + list_obj_1 = s3_object.list_objects_s3(self.s3_client, bucket, full_output=True) elif list_type == "v2": - list_obj_1 = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket, full_output=True) + list_obj_1 = s3_object.list_objects_s3_v2(self.s3_client, bucket, full_output=True) contents = list_obj_1.get("Contents", []) assert len(contents) == 1, "bucket have only 1 object" assert contents[0].get("Key") == file_name_2, f"bucket has object key {file_name_2}" @@ -498,26 +494,26 @@ def test_s3_put_object(self, bucket, complex_object_size, simple_object_size): tag_key_2 = "tag2" tag_value_2 = uuid.uuid4() tag_2 = f"{tag_key_2}={tag_value_2}" - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.SUSPENDED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.SUSPENDED) with allure.step("Put first object into bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_1, Metadata=object_1_metadata, Tagging=tag_1) - obj_head = s3_gate_object.head_object_s3(self.s3_client, bucket, file_name) + s3_object.put_object_s3(self.s3_client, bucket, file_path_1, Metadata=object_1_metadata, Tagging=tag_1) + obj_head = s3_object.head_object_s3(self.s3_client, bucket, file_name) assert obj_head.get("Metadata") == object_1_metadata, "Matadata must be the same" - got_tags = s3_gate_object.get_object_tagging(self.s3_client, bucket, file_name) + got_tags = s3_object.get_object_tagging(self.s3_client, bucket, file_name) assert got_tags, f"Expected tags, got {got_tags}" assert got_tags == [{"Key": tag_key_1, "Value": str(tag_value_1)}], "Tags must be the same" with allure.step("Rewrite file into bucket"): file_path_2 = generate_file_with_content(simple_object_size, file_path=file_path_1) - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_2, Metadata=object_2_metadata, Tagging=tag_2) - obj_head = s3_gate_object.head_object_s3(self.s3_client, bucket, file_name) + s3_object.put_object_s3(self.s3_client, bucket, file_path_2, Metadata=object_2_metadata, Tagging=tag_2) + obj_head = s3_object.head_object_s3(self.s3_client, bucket, file_name) assert obj_head.get("Metadata") == object_2_metadata, "Matadata must be the same" - got_tags_1 = s3_gate_object.get_object_tagging(self.s3_client, bucket, file_name) + got_tags_1 = s3_object.get_object_tagging(self.s3_client, bucket, file_name) assert got_tags_1, f"Expected tags, got {got_tags_1}" assert got_tags_1 == [{"Key": tag_key_2, "Value": str(tag_value_2)}], "Tags must be the same" - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) file_path_3 = generate_file(complex_object_size) file_hash = get_file_hash(file_path_3) @@ -528,41 +524,41 @@ def test_s3_put_object(self, bucket, complex_object_size, simple_object_size): tag_3 = f"{tag_key_3}={tag_value_3}" with allure.step("Put third object into bucket"): - version_id_1 = s3_gate_object.put_object_s3( + version_id_1 = s3_object.put_object_s3( self.s3_client, bucket, file_path_3, Metadata=object_3_metadata, Tagging=tag_3 ) - obj_head_3 = s3_gate_object.head_object_s3(self.s3_client, bucket, file_name_3) + obj_head_3 = s3_object.head_object_s3(self.s3_client, bucket, file_name_3) assert obj_head_3.get("Metadata") == object_3_metadata, "Matadata must be the same" - got_tags_3 = s3_gate_object.get_object_tagging(self.s3_client, bucket, file_name_3) + got_tags_3 = s3_object.get_object_tagging(self.s3_client, bucket, file_name_3) assert got_tags_3, f"Expected tags, got {got_tags_3}" assert got_tags_3 == [{"Key": tag_key_3, "Value": str(tag_value_3)}], "Tags must be the same" with allure.step("Put new version of file into bucket"): file_path_4 = generate_file_with_content(simple_object_size, file_path=file_path_3) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_4) - versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket, file_path_4) + versions = s3_object.list_objects_versions_s3(self.s3_client, bucket) obj_versions = {version.get("VersionId") for version in versions if version.get("Key") == file_name_3} assert obj_versions == { version_id_1, version_id_2, }, f"Expected object has versions: {version_id_1, version_id_2}" - got_tags_4 = s3_gate_object.get_object_tagging(self.s3_client, bucket, file_name_3) + got_tags_4 = s3_object.get_object_tagging(self.s3_client, bucket, file_name_3) assert not got_tags_4, "No expected tags" with allure.step("Get object"): - object_3 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name_3, full_output=True) + object_3 = s3_object.get_object_s3(self.s3_client, bucket, file_name_3, full_output=True) assert object_3.get("VersionId") == version_id_2, f"get object with version {version_id_2}" - object_3 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name_3) + object_3 = s3_object.get_object_s3(self.s3_client, bucket, file_name_3) assert get_file_hash(file_path_4) == get_file_hash(object_3), "Hashes must be the same" with allure.step("Get first version of object"): - object_4 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name_3, version_id_1, full_output=True) + object_4 = s3_object.get_object_s3(self.s3_client, bucket, file_name_3, version_id_1, full_output=True) assert object_4.get("VersionId") == version_id_1, f"get object with version {version_id_1}" - object_4 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name_3, version_id_1) + object_4 = s3_object.get_object_s3(self.s3_client, bucket, file_name_3, version_id_1) assert file_hash == get_file_hash(object_4), "Hashes must be the same" - obj_head_3 = s3_gate_object.head_object_s3(self.s3_client, bucket, file_name_3, version_id_1) + obj_head_3 = s3_object.head_object_s3(self.s3_client, bucket, file_name_3, version_id_1) assert obj_head_3.get("Metadata") == object_3_metadata, "Matadata must be the same" - got_tags_3 = s3_gate_object.get_object_tagging(self.s3_client, bucket, file_name_3, version_id_1) + got_tags_3 = s3_object.get_object_tagging(self.s3_client, bucket, file_name_3, version_id_1) assert got_tags_3, f"Expected tags, got {got_tags_3}" assert got_tags_3 == [{"Key": tag_key_3, "Value": str(tag_value_3)}], "Tags must be the same" @@ -588,44 +584,44 @@ def test_s3_put_object_acl( file_path_1 = generate_file(complex_object_size) file_name = self.object_key_from_file_path(file_path_1) if bucket_versioning == "ENABLED": - status = s3_gate_bucket.VersioningStatus.ENABLED + status = s3_bucket.VersioningStatus.ENABLED elif bucket_versioning == "SUSPENDED": - status = s3_gate_bucket.VersioningStatus.SUSPENDED + status = s3_bucket.VersioningStatus.SUSPENDED set_bucket_versioning(self.s3_client, bucket, status) with allure.step("Put object with acl private"): acl = "private" - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_1, ACL=acl) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name) + s3_object.put_object_s3(self.s3_client, bucket, file_path_1, ACL=acl) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, file_name) verify_acls(obj_acl, ACLType.PRIVATE) - object_1 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name) + object_1 = s3_object.get_object_s3(self.s3_client, bucket, file_name) assert get_file_hash(file_path_1) == get_file_hash(object_1), "Hashes must be the same" with allure.step("Put object with acl public-read"): acl = "public-read" file_path_2 = generate_file_with_content(simple_object_size, file_path=file_path_1) - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_2, ACL=acl) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name) + s3_object.put_object_s3(self.s3_client, bucket, file_path_2, ACL=acl) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, file_name) verify_acls(obj_acl, ACLType.PUBLIC_READ) - object_2 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name) + object_2 = s3_object.get_object_s3(self.s3_client, bucket, file_name) assert get_file_hash(file_path_2) == get_file_hash(object_2), "Hashes must be the same" with allure.step("Put object with acl public-read-write"): acl = "public-read-write" file_path_3 = generate_file_with_content(simple_object_size, file_path=file_path_1) - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_3, ACL=acl) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name) + s3_object.put_object_s3(self.s3_client, bucket, file_path_3, ACL=acl) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, file_name) verify_acls(obj_acl, ACLType.PUBLIC_READ_WRITE) - object_3 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name) + object_3 = s3_object.get_object_s3(self.s3_client, bucket, file_name) assert get_file_hash(file_path_3) == get_file_hash(object_3), "Hashes must be the same" with allure.step("Put object with acl authenticated-read"): acl = "authenticated-read" file_path_4 = generate_file_with_content(simple_object_size, file_path=file_path_1) - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_4, ACL=acl) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name) + s3_object.put_object_s3(self.s3_client, bucket, file_path_4, ACL=acl) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, file_name) verify_acls(obj_acl, ACLType.PUBLIC_READ) - object_4 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name) + object_4 = s3_object.get_object_s3(self.s3_client, bucket, file_name) assert get_file_hash(file_path_4) == get_file_hash(object_4), "Hashes must be the same" file_path_5 = generate_file(complex_object_size) @@ -633,40 +629,40 @@ def test_s3_put_object_acl( with allure.step("Put object with --grant-full-control id=mycanonicaluserid"): file_path_6 = generate_file_with_content(simple_object_size, file_path=file_path_5) - s3_gate_object.put_object_s3( + s3_object.put_object_s3( self.s3_client, bucket, file_path_6, GrantFullControl=f"id={self.other_wallet.address}", ) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name_5) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, file_name_5) verify_acls(obj_acl, ACLType.PRIVATE) - object_4 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name_5) + object_4 = s3_object.get_object_s3(self.s3_client, bucket, file_name_5) assert get_file_hash(file_path_5) == get_file_hash(object_4), "Hashes must be the same" with allure.step("Put object with --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers"): file_path_7 = generate_file_with_content(simple_object_size, file_path=file_path_5) - s3_gate_object.put_object_s3( + s3_object.put_object_s3( self.s3_client, bucket, file_path_7, GrantRead="uri=http://acs.amazonaws.com/groups/global/AllUsers", ) - obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name_5) + obj_acl = s3_object.get_object_acl_s3(self.s3_client, bucket, file_name_5) verify_acls(obj_acl, ACLType.PUBLIC_READ) - object_7 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name_5) + object_7 = s3_object.get_object_s3(self.s3_client, bucket, file_name_5) assert get_file_hash(file_path_7) == get_file_hash(object_7), "Hashes must be the same" @allure.title("Test S3: put object with lock-mode") def test_s3_put_object_lock_mode(self, complex_object_size, simple_object_size): file_path_1 = generate_file(complex_object_size) file_name = self.object_key_from_file_path(file_path_1) - bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + bucket = s3_bucket.create_bucket_s3(self.s3_client, True) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put object with lock-mode GOVERNANCE lock-retain-until-date +1day, lock-legal-hold-status"): date_obj = datetime.utcnow() + timedelta(days=1) - s3_gate_object.put_object_s3( + s3_object.put_object_s3( self.s3_client, bucket, file_path_1, @@ -681,7 +677,7 @@ def test_s3_put_object_lock_mode(self, complex_object_size, simple_object_size): ): date_obj = datetime.utcnow() + timedelta(days=2) generate_file_with_content(simple_object_size, file_path=file_path_1) - s3_gate_object.put_object_s3( + s3_object.put_object_s3( self.s3_client, bucket, file_path_1, @@ -695,7 +691,7 @@ def test_s3_put_object_lock_mode(self, complex_object_size, simple_object_size): ): date_obj = datetime.utcnow() + timedelta(days=3) generate_file_with_content(simple_object_size, file_path=file_path_1) - s3_gate_object.put_object_s3( + s3_object.put_object_s3( self.s3_client, bucket, file_path_1, @@ -711,7 +707,7 @@ def test_s3_put_object_lock_mode(self, complex_object_size, simple_object_size): match=r".*must both be supplied*", ): # x-amz-object-lock-retain-until-date and x-amz-object-lock-mode must both be supplied - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_1, ObjectLockMode="COMPLIANCE") + s3_object.put_object_s3(self.s3_client, bucket, file_path_1, ObjectLockMode="COMPLIANCE") with allure.step("Put object with lock-mode and past date"): date_obj = datetime.utcnow() - timedelta(days=3) @@ -720,7 +716,7 @@ def test_s3_put_object_lock_mode(self, complex_object_size, simple_object_size): match=r".*until date must be in the future*", ): # The retain until date must be in the future - s3_gate_object.put_object_s3( + s3_object.put_object_s3( self.s3_client, bucket, file_path_1, @@ -739,7 +735,7 @@ def test_s3_sync_dir(self, sync_type, bucket, simple_object_size): generate_file_with_content(simple_object_size, file_path=file_path_1) generate_file_with_content(simple_object_size, file_path=file_path_2) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) # TODO: return ACL, when https://github.com/nspcc-dev/neofs-s3-gw/issues/685 will be closed if sync_type == "sync": self.s3_client.sync( @@ -757,16 +753,16 @@ def test_s3_sync_dir(self, sync_type, bucket, simple_object_size): ) with allure.step("Check objects are synced"): - objects = s3_gate_object.list_objects_s3(self.s3_client, bucket) + objects = s3_object.list_objects_s3(self.s3_client, bucket) assert set(key_to_path.keys()) == set(objects), f"Expected all abjects saved. Got {objects}" with allure.step("Check these are the same objects"): for obj_key in objects: - got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key) + got_object = s3_object.get_object_s3(self.s3_client, bucket, obj_key) assert get_file_hash(got_object) == get_file_hash( key_to_path.get(obj_key) ), "Expected hashes are the same" - obj_head = s3_gate_object.head_object_s3(self.s3_client, bucket, obj_key) + obj_head = s3_object.head_object_s3(self.s3_client, bucket, obj_key) assert obj_head.get("Metadata") == object_metadata, f"Metadata of object is {object_metadata}" # Uncomment after https://github.com/nspcc-dev/neofs-s3-gw/issues/685 is solved # obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, obj_key) @@ -778,9 +774,9 @@ def test_s3_put_10_folder(self, bucket, temp_directory, simple_object_size): file_path_1 = os.path.join(temp_directory, path, "test_file_1") generate_file_with_content(simple_object_size, file_path=file_path_1) file_name = self.object_key_from_file_path(file_path_1) - objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + objects_list = s3_object.list_objects_s3(self.s3_client, bucket) assert not objects_list, f"Expected empty bucket, got {objects_list}" with allure.step("Put object"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_1) + s3_object.put_object_s3(self.s3_client, bucket, file_path_1) check_objects_in_bucket(self.s3_client, bucket, [file_name]) diff --git a/pytest_tests/tests/s3/test_s3_policy.py b/pytest_tests/tests/s3/test_s3_policy.py new file mode 100644 index 000000000..fb28bbc55 --- /dev/null +++ b/pytest_tests/tests/s3/test_s3_policy.py @@ -0,0 +1,49 @@ +import os + +import allure +import pytest +from helpers.s3_helper import set_bucket_versioning +from s3 import s3_bucket +from s3.s3_base import TestNeofsS3Base + + +def pytest_generate_tests(metafunc): + policy = f"{os.getcwd()}/pytest_tests/data/policy.json" + if "s3_client" in metafunc.fixturenames: + metafunc.parametrize( + "s3_client", + [("aws cli", policy), ("boto3", policy)], + indirect=True, + ids=["aws cli", "boto3"], + ) + + +class TestS3Policy(TestNeofsS3Base): + @allure.title("Test S3: bucket policy ") + def test_s3_bucket_policy(self): + with allure.step("Create bucket with default policy"): + bucket = s3_bucket.create_bucket_s3(self.s3_client) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) + + with allure.step("GetBucketPolicy"): + s3_bucket.get_bucket_policy(self.s3_client, bucket) + + with allure.step("Put new policy"): + custom_policy = { + "Version": "2008-10-17", + "Id": "aaaa-bbbb-cccc-dddd", + "Statement": [ + { + "Sid": "AddPerm", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": ["s3:GetObject"], + "Resource": [f"arn:aws:s3:::{bucket}/*"], + } + ], + } + + s3_bucket.put_bucket_policy(self.s3_client, bucket, custom_policy) + with allure.step("GetBucketPolicy"): + policy_1 = s3_bucket.get_bucket_policy(self.s3_client, bucket) + print(policy_1) diff --git a/pytest_tests/tests/services/s3_gate/test_s3_tagging.py b/pytest_tests/tests/s3/test_s3_tagging.py similarity index 71% rename from pytest_tests/tests/services/s3_gate/test_s3_tagging.py rename to pytest_tests/tests/s3/test_s3_tagging.py index ee4a793d3..c80bc11de 100644 --- a/pytest_tests/tests/services/s3_gate/test_s3_tagging.py +++ b/pytest_tests/tests/s3/test_s3_tagging.py @@ -11,8 +11,8 @@ check_tags_by_object, object_key_from_file_path, ) -from s3 import s3_gate_bucket, s3_gate_object -from s3.s3_gate_base import TestNeofsS3GateBase +from s3 import s3_bucket, s3_object +from s3.s3_base import TestNeofsS3Base def pytest_generate_tests(metafunc): @@ -20,9 +20,7 @@ def pytest_generate_tests(metafunc): metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) -@pytest.mark.s3_gate -@pytest.mark.s3_gate_tagging -class TestS3GateTagging(TestNeofsS3GateBase): +class TestS3Tagging(TestNeofsS3Base): @staticmethod def create_tags(count: int) -> Tuple[list, list]: tags = [] @@ -39,74 +37,74 @@ def test_s3_object_tagging(self, bucket, simple_object_size): with allure.step("Put with 3 tags object into bucket"): tag_1 = "Tag1=Value1" - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path, Tagging=tag_1) - got_tags = s3_gate_object.get_object_tagging(self.s3_client, bucket, file_name) + s3_object.put_object_s3(self.s3_client, bucket, file_path, Tagging=tag_1) + got_tags = s3_object.get_object_tagging(self.s3_client, bucket, file_name) assert got_tags, f"Expected tags, got {got_tags}" assert got_tags == [{"Key": "Tag1", "Value": "Value1"}], "Tags must be the same" with allure.step("Put 10 new tags for object"): tags_2 = self.create_tags(10) - s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_2) + s3_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_2) check_tags_by_object(self.s3_client, bucket, file_name, tags_2, [("Tag1", "Value1")]) with allure.step("Put 10 extra new tags for object"): tags_3 = self.create_tags(10) - s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_3) + s3_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_3) check_tags_by_object(self.s3_client, bucket, file_name, tags_3, tags_2) with allure.step("Copy one object with tag"): - copy_obj_path_1 = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name, tagging_directive="COPY") + copy_obj_path_1 = s3_object.copy_object_s3(self.s3_client, bucket, file_name, tagging_directive="COPY") check_tags_by_object(self.s3_client, bucket, copy_obj_path_1, tags_3, tags_2) with allure.step("Put 11 new tags to object and expect an error"): tags_4 = self.create_tags(11) with pytest.raises(Exception, match=r".*Object tags cannot be greater than 10*"): # An error occurred (BadRequest) when calling the PutObjectTagging operation: Object tags cannot be greater than 10 - s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_4) + s3_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_4) with allure.step("Put empty tag"): tags_5 = [] - s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_5) + s3_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_5) check_tags_by_object(self.s3_client, bucket, file_name, []) with allure.step("Put 10 object tags"): tags_6 = self.create_tags(10) - s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_6) + s3_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_6) check_tags_by_object(self.s3_client, bucket, file_name, tags_6) with allure.step("Delete tags by delete-object-tagging"): - s3_gate_object.delete_object_tagging(self.s3_client, bucket, file_name) + s3_object.delete_object_tagging(self.s3_client, bucket, file_name) check_tags_by_object(self.s3_client, bucket, file_name, []) @allure.title("Test S3: bucket tagging") def test_s3_bucket_tagging(self, bucket): with allure.step("Put 10 bucket tags"): tags_1 = self.create_tags(10) - s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, tags_1) + s3_bucket.put_bucket_tagging(self.s3_client, bucket, tags_1) check_tags_by_bucket(self.s3_client, bucket, tags_1) with allure.step("Put new 10 bucket tags"): tags_2 = self.create_tags(10) - s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, tags_2) + s3_bucket.put_bucket_tagging(self.s3_client, bucket, tags_2) check_tags_by_bucket(self.s3_client, bucket, tags_2, tags_1) with allure.step("Put 11 new tags to bucket and expect an error"): tags_3 = self.create_tags(11) with pytest.raises(Exception, match=r".*Object tags cannot be greater than 10.*"): # An error occurred (BadRequest) when calling the PutBucketTagging operation (reached max retries: 0): Object tags cannot be greater than 10 - s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, tags_3) + s3_bucket.put_bucket_tagging(self.s3_client, bucket, tags_3) with allure.step("Put empty tag"): tags_4 = [] - s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, tags_4) + s3_bucket.put_bucket_tagging(self.s3_client, bucket, tags_4) check_tags_by_bucket(self.s3_client, bucket, tags_4) with allure.step("Put new 10 bucket tags"): tags_5 = self.create_tags(10) - s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, tags_5) + s3_bucket.put_bucket_tagging(self.s3_client, bucket, tags_5) check_tags_by_bucket(self.s3_client, bucket, tags_5, tags_2) with allure.step("Delete tags by delete-bucket-tagging"): - s3_gate_bucket.delete_bucket_tagging(self.s3_client, bucket) + s3_bucket.delete_bucket_tagging(self.s3_client, bucket) with pytest.raises(Exception, match=NO_SUCH_TAGS_ERROR): check_tags_by_bucket(self.s3_client, bucket, []) diff --git a/pytest_tests/tests/services/s3_gate/test_s3_versioning.py b/pytest_tests/tests/s3/test_s3_versioning.py similarity index 63% rename from pytest_tests/tests/services/s3_gate/test_s3_versioning.py rename to pytest_tests/tests/s3/test_s3_versioning.py index 00860bda0..d984ca316 100644 --- a/pytest_tests/tests/services/s3_gate/test_s3_versioning.py +++ b/pytest_tests/tests/s3/test_s3_versioning.py @@ -4,8 +4,8 @@ import pytest from helpers.file_helper import generate_file, generate_file_with_content from helpers.s3_helper import set_bucket_versioning -from s3 import s3_gate_bucket, s3_gate_object -from s3.s3_gate_base import TestNeofsS3GateBase +from s3 import s3_bucket, s3_object +from s3.s3_base import TestNeofsS3Base def pytest_generate_tests(metafunc): @@ -13,65 +13,63 @@ def pytest_generate_tests(metafunc): metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) -@pytest.mark.s3_gate -@pytest.mark.s3_gate_versioning -class TestS3GateVersioning(TestNeofsS3GateBase): +class TestS3Versioning(TestNeofsS3Base): @staticmethod def object_key_from_file_path(full_path: str) -> str: return os.path.basename(full_path) @allure.title("Test S3: try to disable versioning") def test_s3_version_off(self): - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=True, bucket_configuration="rep-1" ) with pytest.raises(Exception): - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.SUSPENDED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.SUSPENDED) @allure.title("Test S3: Enable and disable versioning") def test_s3_version(self, simple_object_size): file_path = generate_file(simple_object_size) file_name = self.object_key_from_file_path(file_path) bucket_objects = [file_name] - bucket = s3_gate_bucket.create_bucket_s3( + bucket = s3_bucket.create_bucket_s3( self.s3_client, object_lock_enabled_for_bucket=False, bucket_configuration="rep-1" ) - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.SUSPENDED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.SUSPENDED) with allure.step("Put object into bucket"): - s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) - objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + s3_object.put_object_s3(self.s3_client, bucket, file_path) + objects_list = s3_object.list_objects_s3(self.s3_client, bucket) assert objects_list == bucket_objects, f"Expected list with single objects in bucket, got {objects_list}" - object_version = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + object_version = s3_object.list_objects_versions_s3(self.s3_client, bucket) actual_version = [version.get("VersionId") for version in object_version if version.get("Key") == file_name] assert actual_version == ["null"], f"Expected version is null in list-object-versions, got {object_version}" - object_0 = s3_gate_object.head_object_s3(self.s3_client, bucket, file_name) + object_0 = s3_object.head_object_s3(self.s3_client, bucket, file_name) assert ( object_0.get("VersionId") == "null" ), f"Expected version is null in head-object, got {object_0.get('VersionId')}" - set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + set_bucket_versioning(self.s3_client, bucket, s3_bucket.VersioningStatus.ENABLED) with allure.step("Put several versions of object into bucket"): - version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + version_id_1 = s3_object.put_object_s3(self.s3_client, bucket, file_path) file_name_1 = generate_file_with_content(simple_object_size, file_path=file_path) - version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1) + version_id_2 = s3_object.put_object_s3(self.s3_client, bucket, file_name_1) with allure.step("Check bucket shows all versions"): - versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + versions = s3_object.list_objects_versions_s3(self.s3_client, bucket) obj_versions = [version.get("VersionId") for version in versions if version.get("Key") == file_name] assert ( obj_versions.sort() == [version_id_1, version_id_2, "null"].sort() ), f"Expected object has versions: {version_id_1, version_id_2, 'null'}" with allure.step("Get object"): - object_1 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name, full_output=True) + object_1 = s3_object.get_object_s3(self.s3_client, bucket, file_name, full_output=True) assert object_1.get("VersionId") == version_id_2, f"Get object with version {version_id_2}" with allure.step("Get first version of object"): - object_2 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name, version_id_1, full_output=True) + object_2 = s3_object.get_object_s3(self.s3_client, bucket, file_name, version_id_1, full_output=True) assert object_2.get("VersionId") == version_id_1, f"Get object with version {version_id_1}" with allure.step("Get second version of object"): - object_3 = s3_gate_object.get_object_s3(self.s3_client, bucket, file_name, version_id_2, full_output=True) + object_3 = s3_object.get_object_s3(self.s3_client, bucket, file_name, version_id_2, full_output=True) assert object_3.get("VersionId") == version_id_2, f"Get object with version {version_id_2}"