From 9304aefad781bd3a8ccf02f8b52c5a73222acaf6 Mon Sep 17 00:00:00 2001 From: Oleg Kulachenko Date: Sat, 30 Dec 2023 00:21:53 +0400 Subject: [PATCH] Migrate part of s3 tests to dynamic env Signed-off-by: Oleg Kulachenko --- .../tests/services/s3_gate/test_s3_bucket.py | 165 ++++++ .../tests/services/s3_gate/test_s3_gate.py | 559 ++++++++++++++++++ .../tests/services/s3_gate/test_s3_locking.py | 228 +++++++ .../services/s3_gate/test_s3_multipart.py | 131 ++++ .../tests/services/s3_gate/test_s3_tagging.py | 112 ++++ .../services/s3_gate/test_s3_versioning.py | 97 +++ 6 files changed, 1292 insertions(+) create mode 100644 dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_bucket.py create mode 100644 dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_gate.py create mode 100644 dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_locking.py create mode 100644 dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_multipart.py create mode 100644 dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_tagging.py create mode 100644 dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_versioning.py diff --git a/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_bucket.py b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_bucket.py new file mode 100644 index 000000000..1b30e4252 --- /dev/null +++ b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_bucket.py @@ -0,0 +1,165 @@ +import allure +import pytest +from file_helper import generate_file +from s3_helper import ( + assert_object_lock_mode, + check_objects_in_bucket, + object_key_from_file_path, + assert_bucket_s3_acl, +) + +from datetime import datetime, timedelta +from pytest_tests.steps import s3_gate_bucket, s3_gate_object +from s3.s3_gate_base import TestNeofsS3GateBase + + +def pytest_generate_tests(metafunc): + if "s3_client" in metafunc.fixturenames: + metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) + + +@pytest.mark.s3_gate +@pytest.mark.s3_gate_bucket +class TestS3GateBucket(TestNeofsS3GateBase): + @pytest.mark.acl + @pytest.mark.sanity + @allure.title("Test S3: Create Bucket with different ACL") + def test_s3_create_bucket_with_ACL(self): + + with allure.step("Create bucket with ACL private"): + acl="private" + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True, acl=acl, bucket_configuration="rep-1") + bucket_acl = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket) + assert_bucket_s3_acl( + acl_grants=bucket_acl, permitted_users="CanonicalUser", acl=acl + ) + + with allure.step("Create bucket with ACL = public-read"): + acl="public-read" + bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client, True, acl=acl, bucket_configuration="rep-1") + bucket_acl_1 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_1) + assert_bucket_s3_acl( + acl_grants=bucket_acl_1, permitted_users="AllUsers", acl=acl + ) + + with allure.step("Create bucket with ACL public-read-write"): + acl="public-read-write" + bucket_2 = s3_gate_bucket.create_bucket_s3( + self.s3_client, True, acl=acl, bucket_configuration="rep-1" + ) + bucket_acl_2 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_2) + assert_bucket_s3_acl( + acl_grants=bucket_acl_2, permitted_users="AllUsers", acl=acl + ) + + with allure.step("Create bucket with ACL = authenticated-read"): + acl="authenticated-read" + bucket_3 = s3_gate_bucket.create_bucket_s3( + self.s3_client, True, acl=acl, bucket_configuration="rep-1" + ) + bucket_acl_3 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_3) + assert_bucket_s3_acl( + acl_grants=bucket_acl_3, permitted_users="AllUsers", acl=acl + ) + + @pytest.mark.acl + @allure.title("Test S3: Create Bucket with different ACL by grand") + def test_s3_create_bucket_with_grands(self): + + with allure.step("Create bucket with --grant-read"): + bucket = s3_gate_bucket.create_bucket_s3( + self.s3_client, + True, + grant_read="uri=http://acs.amazonaws.com/groups/global/AllUsers", + bucket_configuration="rep-1", + ) + bucket_acl = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket) + assert_bucket_s3_acl( + acl_grants=bucket_acl, permitted_users="AllUsers", acl="grant-read" + ) + + with allure.step("Create bucket with --grant-wtite"): + bucket_1 = s3_gate_bucket.create_bucket_s3( + self.s3_client, + True, + grant_write="uri=http://acs.amazonaws.com/groups/global/AllUsers", + bucket_configuration="rep-1", + ) + bucket_acl_1 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_1) + assert_bucket_s3_acl( + acl_grants=bucket_acl_1, permitted_users="AllUsers", acl="grant-write" + ) + + with allure.step("Create bucket with --grant-full-control"): + bucket_2 = s3_gate_bucket.create_bucket_s3( + self.s3_client, + True, + grant_full_control="uri=http://acs.amazonaws.com/groups/global/AllUsers", + bucket_configuration="rep-1", + ) + bucket_acl_2 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_2) + assert_bucket_s3_acl( + acl_grants=bucket_acl_2, permitted_users="AllUsers", acl="grant-full-control" + ) + + @allure.title("Test S3: create bucket with object lock") + def test_s3_bucket_object_lock(self, simple_object_size): + file_path = generate_file(simple_object_size) + file_name = object_key_from_file_path(file_path) + + with allure.step("Create bucket with --no-object-lock-enabled-for-bucket"): + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, False, bucket_configuration="rep-1") + date_obj = datetime.utcnow() + timedelta(days=1) + with pytest.raises( + Exception, match=r".*Object Lock configuration does not exist for this bucket.*" + ): + # An error occurred (ObjectLockConfigurationNotFoundError) when calling the PutObject operation (reached max retries: 0): + # Object Lock configuration does not exist for this bucket + s3_gate_object.put_object_s3( + self.s3_client, + bucket, + file_path, + ObjectLockMode="COMPLIANCE", + ObjectLockRetainUntilDate=date_obj.strftime("%Y-%m-%dT%H:%M:%S"), + ) + with allure.step("Create bucket with --object-lock-enabled-for-bucket"): + bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client, True, bucket_configuration="rep-1") + date_obj_1 = datetime.utcnow() + timedelta(days=1) + s3_gate_object.put_object_s3( + self.s3_client, + bucket_1, + file_path, + ObjectLockMode="COMPLIANCE", + ObjectLockRetainUntilDate=date_obj_1.strftime("%Y-%m-%dT%H:%M:%S"), + ObjectLockLegalHoldStatus="ON", + ) + assert_object_lock_mode( + self.s3_client, bucket_1, file_name, "COMPLIANCE", date_obj_1, "ON" + ) + + @allure.title("Test S3: delete bucket") + def test_s3_delete_bucket(self, simple_object_size): + file_path_1 = generate_file(simple_object_size) + file_name_1 = object_key_from_file_path(file_path_1) + file_path_2 = generate_file(simple_object_size) + file_name_2 = object_key_from_file_path(file_path_2) + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + + with allure.step("Put two objects into bucket"): + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_1) + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_2) + check_objects_in_bucket(self.s3_client, bucket, [file_name_1, file_name_2]) + + with allure.step("Try to delete not empty bucket and get error"): + with pytest.raises(Exception, match=r".*The bucket you tried to delete is not empty.*"): + s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket) + + with allure.step("Delete object in bucket"): + s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_1) + s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_2) + check_objects_in_bucket(self.s3_client, bucket, []) + + with allure.step(f"Delete empty bucket"): + s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket) + with pytest.raises(Exception, match=r".*Not Found.*"): + s3_gate_bucket.head_bucket(self.s3_client, bucket) diff --git a/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_gate.py b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_gate.py new file mode 100644 index 000000000..4dad825e2 --- /dev/null +++ b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_gate.py @@ -0,0 +1,559 @@ +import logging +import os +from random import choice, choices + +import allure +import pytest +from aws_cli_client import AwsCliClient +from common import ASSETS_DIR +from file_helper import ( + generate_file, + generate_file_with_content, + get_file_content, + get_file_hash, + split_file, +) +from s3_helper import ( + check_objects_in_bucket, + check_tags_by_bucket, + check_tags_by_object, + set_bucket_versioning, + try_to_get_objects_and_expect_error, + parametrize_clients +) + +from pytest_tests.steps import s3_gate_bucket, s3_gate_object +from s3.s3_gate_base import TestNeofsS3GateBase + +logger = logging.getLogger("NeoLogger") + + +def pytest_generate_tests(metafunc): + parametrize_clients(metafunc) + + +@allure.link("https://github.com/nspcc-dev/neofs-s3-gw#neofs-s3-gateway", name="neofs-s3-gateway") +@pytest.mark.s3_gate +@pytest.mark.s3_gate_base +class TestS3Gate(TestNeofsS3GateBase): + @allure.title("Test S3 Bucket API") + def test_s3_buckets(self, simple_object_size): + """ + Test base S3 Bucket API (Create/List/Head/Delete). + """ + + file_path = generate_file(simple_object_size) + file_name = self.object_key_from_file_path(file_path) + + with allure.step("Create buckets"): + bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client, True, bucket_configuration="rep-1") + set_bucket_versioning(self.s3_client, bucket_1, s3_gate_bucket.VersioningStatus.ENABLED) + bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + + with allure.step("Check buckets are presented in the system"): + buckets = s3_gate_bucket.list_buckets_s3(self.s3_client) + assert bucket_1 in buckets, f"Expected bucket {bucket_1} is in the list" + assert bucket_2 in buckets, f"Expected bucket {bucket_2} is in the list" + + with allure.step("Bucket must be empty"): + for bucket in (bucket_1, bucket_2): + objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + assert not objects_list, f"Expected empty bucket, got {objects_list}" + + with allure.step("Check buckets are visible with S3 head command"): + s3_gate_bucket.head_bucket(self.s3_client, bucket_1) + s3_gate_bucket.head_bucket(self.s3_client, bucket_2) + + with allure.step("Check we can put/list object with S3 commands"): + version_id = s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_path) + s3_gate_object.head_object_s3(self.s3_client, bucket_1, file_name) + + bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket_1) + assert ( + file_name in bucket_objects + ), f"Expected file {file_name} in objects list {bucket_objects}" + + with allure.step("Try to delete not empty bucket and get error"): + with pytest.raises(Exception, match=r".*The bucket you tried to delete is not empty.*"): + s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket_1) + + s3_gate_bucket.head_bucket(self.s3_client, bucket_1) + + with allure.step(f"Delete empty bucket {bucket_2}"): + s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket_2) + self.tick_epochs_and_wait(1) + + with allure.step(f"Check bucket {bucket_2} deleted"): + with pytest.raises(Exception, match=r".*Not Found.*"): + s3_gate_bucket.head_bucket(self.s3_client, bucket_2) + + buckets = s3_gate_bucket.list_buckets_s3(self.s3_client) + assert bucket_1 in buckets, f"Expected bucket {bucket_1} is in the list" + assert bucket_2 not in buckets, f"Expected bucket {bucket_2} is not in the list" + + with allure.step(f"Delete object from {bucket_1}"): + s3_gate_object.delete_object_s3(self.s3_client, bucket_1, file_name, version_id) + check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=[]) + + with allure.step(f"Delete bucket {bucket_1}"): + s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket_1) + self.tick_epochs_and_wait(1) + + with allure.step(f"Check bucket {bucket_1} deleted"): + with pytest.raises(Exception, match=r".*Not Found.*"): + s3_gate_bucket.head_bucket(self.s3_client, bucket_1) + + @allure.title("Test S3 Object API") + @pytest.mark.parametrize( + "file_type", ["simple", "large"], ids=["Simple object", "Large object"] + ) + def test_s3_api_object(self, file_type, two_buckets, simple_object_size, complex_object_size): + """ + Test base S3 Object API (Put/Head/List) for simple and large objects. + """ + file_path = generate_file( + simple_object_size if file_type == "simple" else complex_object_size + ) + file_name = self.object_key_from_file_path(file_path) + + bucket_1, bucket_2 = two_buckets + + for bucket in (bucket_1, bucket_2): + with allure.step("Bucket must be empty"): + objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + assert not objects_list, f"Expected empty bucket, got {objects_list}" + + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + s3_gate_object.head_object_s3(self.s3_client, bucket, file_name) + + bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket) + assert ( + file_name in bucket_objects + ), f"Expected file {file_name} in objects list {bucket_objects}" + + with allure.step("Check object's attributes"): + for attrs in (["ETag"], ["ObjectSize", "StorageClass"]): + s3_gate_object.get_object_attributes(self.s3_client, bucket, file_name, *attrs) + + @allure.title("Test S3 Sync directory") + @pytest.mark.aws_cli_only + def test_s3_sync_dir(self, bucket, simple_object_size): + """ + Test checks sync directory with AWS CLI utility. + """ + file_path_1 = os.path.join(os.getcwd(), ASSETS_DIR, "test_sync", "test_file_1") + file_path_2 = os.path.join(os.getcwd(), ASSETS_DIR, "test_sync", "test_file_2") + key_to_path = {"test_file_1": file_path_1, "test_file_2": file_path_2} + + + generate_file_with_content(simple_object_size, file_path=file_path_1) + generate_file_with_content(simple_object_size, file_path=file_path_2) + + self.s3_client.sync(bucket_name=bucket, dir_path=os.path.dirname(file_path_1)) + + with allure.step("Check objects are synced"): + objects = s3_gate_object.list_objects_s3(self.s3_client, bucket) + + with allure.step("Check these are the same objects"): + assert set(key_to_path.keys()) == set( + objects + ), f"Expected all objects saved. Got {objects}" + for obj_key in objects: + got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key) + assert get_file_hash(got_object) == get_file_hash( + key_to_path.get(obj_key) + ), "Expected hashes are the same" + + @allure.title("Test S3 Object versioning") + def test_s3_api_versioning(self, bucket, simple_object_size): + """ + Test checks basic versioning functionality for S3 bucket. + """ + version_1_content = "Version 1" + version_2_content = "Version 2" + file_name_simple = generate_file_with_content(simple_object_size, content=version_1_content) + obj_key = os.path.basename(file_name_simple) + set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + + with allure.step("Put several versions of object into bucket"): + version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple) + generate_file_with_content( + simple_object_size, file_path=file_name_simple, content=version_2_content + ) + version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple) + + with allure.step("Check bucket shows all versions"): + versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + obj_versions = { + version.get("VersionId") for version in versions if version.get("Key") == obj_key + } + assert obj_versions == { + version_id_1, + version_id_2, + }, f"Expected object has versions: {version_id_1, version_id_2}" + + with allure.step("Show information about particular version"): + for version_id in (version_id_1, version_id_2): + response = s3_gate_object.head_object_s3( + self.s3_client, bucket, obj_key, version_id=version_id + ) + assert "LastModified" in response, "Expected LastModified field" + assert "ETag" in response, "Expected ETag field" + assert ( + response.get("VersionId") == version_id + ), f"Expected VersionId is {version_id}" + assert response.get("ContentLength") != 0, "Expected ContentLength is not zero" + + with allure.step("Check object's attributes"): + for version_id in (version_id_1, version_id_2): + got_attrs = s3_gate_object.get_object_attributes( + self.s3_client, bucket, obj_key, "ETag", version_id=version_id + ) + if got_attrs: + assert ( + got_attrs.get("VersionId") == version_id + ), f"Expected VersionId is {version_id}" + + with allure.step("Delete object and check it was deleted"): + response = s3_gate_object.delete_object_s3(self.s3_client, bucket, obj_key) + version_id_delete = response.get("VersionId") + + with pytest.raises(Exception, match=r".*Not Found.*"): + s3_gate_object.head_object_s3(self.s3_client, bucket, obj_key) + + with allure.step("Get content for all versions and check it is correct"): + for version, content in ( + (version_id_2, version_2_content), + (version_id_1, version_1_content), + ): + file_name = s3_gate_object.get_object_s3( + self.s3_client, bucket, obj_key, version_id=version + ) + got_content = get_file_content(file_name) + assert ( + got_content == content + ), f"Expected object content is\n{content}\nGot\n{got_content}" + + with allure.step("Restore previous object version"): + s3_gate_object.delete_object_s3( + self.s3_client, bucket, obj_key, version_id=version_id_delete + ) + + file_name = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key) + got_content = get_file_content(file_name) + assert ( + got_content == version_2_content + ), f"Expected object content is\n{version_2_content}\nGot\n{got_content}" + + @pytest.mark.s3_gate_multipart + @allure.title("Test S3 Object Multipart API") + def test_s3_api_multipart(self, bucket, simple_object_size): + """ + Test checks S3 Multipart API (Create multipart upload/Abort multipart upload/List multipart upload/ + Upload part/List parts/Complete multipart upload). + """ + parts_count = 3 + file_name_large = generate_file( + simple_object_size * 1024 * 6 * parts_count + ) # 5Mb - min part + object_key = self.object_key_from_file_path(file_name_large) + part_files = split_file(file_name_large, parts_count) + parts = [] + + uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + assert not uploads, f"Expected there is no uploads in bucket {bucket}" + + with allure.step("Create and abort multipart upload"): + upload_id = s3_gate_object.create_multipart_upload_s3( + self.s3_client, bucket, object_key + ) + uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + assert uploads, f"Expected there one upload in bucket {bucket}" + assert ( + uploads[0].get("Key") == object_key + ), f"Expected correct key {object_key} in upload {uploads}" + assert ( + uploads[0].get("UploadId") == upload_id + ), f"Expected correct UploadId {upload_id} in upload {uploads}" + + s3_gate_object.abort_multipart_uploads_s3(self.s3_client, bucket, object_key, upload_id) + uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + assert not uploads, f"Expected there is no uploads in bucket {bucket}" + + with allure.step("Create new multipart upload and upload several parts"): + upload_id = s3_gate_object.create_multipart_upload_s3( + self.s3_client, bucket, object_key + ) + for part_id, file_path in enumerate(part_files, start=1): + etag = s3_gate_object.upload_part_s3( + self.s3_client, bucket, object_key, upload_id, part_id, file_path + ) + parts.append((part_id, etag)) + + with allure.step("Check all parts are visible in bucket"): + got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) + assert len(got_parts) == len( + part_files + ), f"Expected {parts_count} parts, got\n{got_parts}" + + s3_gate_object.complete_multipart_upload_s3( + self.s3_client, bucket, object_key, upload_id, parts + ) + + uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + assert not uploads, f"Expected there is no uploads in bucket {bucket}" + + with allure.step("Check we can get whole object from bucket"): + got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, object_key) + assert get_file_hash(got_object) == get_file_hash(file_name_large) + + self.check_object_attributes(bucket, object_key, parts_count) + + @allure.title("Test S3 Bucket tagging API") + def test_s3_api_bucket_tagging(self, bucket): + """ + Test checks S3 Bucket tagging API (Put tag/Get tag). + """ + key_value_pair = [("some-key", "some-value"), ("some-key-2", "some-value-2")] + + s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, key_value_pair) + check_tags_by_bucket(self.s3_client, bucket, key_value_pair) + + s3_gate_bucket.delete_bucket_tagging(self.s3_client, bucket) + check_tags_by_bucket(self.s3_client, bucket, []) + + @allure.title("Test S3 Object tagging API") + def test_s3_api_object_tagging(self, bucket, simple_object_size): + """ + Test checks S3 Object tagging API (Put tag/Get tag/Update tag). + """ + key_value_pair_bucket = [("some-key", "some-value"), ("some-key-2", "some-value-2")] + key_value_pair_obj = [ + ("some-key-obj", "some-value-obj"), + ("some-key--obj2", "some-value--obj2"), + ] + key_value_pair_obj_new = [("some-key-obj-new", "some-value-obj-new")] + file_name_simple = generate_file(simple_object_size) + obj_key = self.object_key_from_file_path(file_name_simple) + + s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, key_value_pair_bucket) + + s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple) + + for tags in (key_value_pair_obj, key_value_pair_obj_new): + s3_gate_object.put_object_tagging(self.s3_client, bucket, obj_key, tags) + check_tags_by_object( + self.s3_client, + bucket, + obj_key, + tags, + ) + + s3_gate_object.delete_object_tagging(self.s3_client, bucket, obj_key) + check_tags_by_object(self.s3_client, bucket, obj_key, []) + + @allure.title("Test S3: Delete object & delete objects S3 API") + def test_s3_api_delete(self, two_buckets, simple_object_size, complex_object_size): + """ + Check delete_object and delete_objects S3 API operation. From first bucket some objects deleted one by one. + From second bucket some objects deleted all at once. + """ + max_obj_count = 20 + max_delete_objects = 17 + put_objects = [] + file_paths = [] + obj_sizes = [simple_object_size, complex_object_size] + + bucket_1, bucket_2 = two_buckets + + with allure.step(f"Generate {max_obj_count} files"): + for _ in range(max_obj_count): + file_paths.append(generate_file(choice(obj_sizes))) + + for bucket in (bucket_1, bucket_2): + with allure.step(f"Bucket {bucket} must be empty as it just created"): + objects_list = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket) + assert not objects_list, f"Expected empty bucket, got {objects_list}" + + for file_path in file_paths: + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + put_objects.append(self.object_key_from_file_path(file_path)) + + with allure.step(f"Check all objects put in bucket {bucket} successfully"): + bucket_objects = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket) + assert set(put_objects) == set( + bucket_objects + ), f"Expected all objects {put_objects} in objects list {bucket_objects}" + + with allure.step("Delete some objects from bucket_1 one by one"): + objects_to_delete_b1 = choices(put_objects, k=max_delete_objects) + for obj in objects_to_delete_b1: + s3_gate_object.delete_object_s3(self.s3_client, bucket_1, obj) + + with allure.step("Check deleted objects are not visible in bucket bucket_1"): + bucket_objects = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket_1) + assert set(put_objects).difference(set(objects_to_delete_b1)) == set( + bucket_objects + ), f"Expected all objects {put_objects} in objects list {bucket_objects}" + try_to_get_objects_and_expect_error(self.s3_client, bucket_1, objects_to_delete_b1) + + with allure.step("Delete some objects from bucket_2 at once"): + objects_to_delete_b2 = choices(put_objects, k=max_delete_objects) + s3_gate_object.delete_objects_s3(self.s3_client, bucket_2, objects_to_delete_b2) + + with allure.step("Check deleted objects are not visible in bucket bucket_2"): + objects_list = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket_2) + assert set(put_objects).difference(set(objects_to_delete_b2)) == set( + objects_list + ), f"Expected all objects {put_objects} in objects list {bucket_objects}" + try_to_get_objects_and_expect_error(self.s3_client, bucket_2, objects_to_delete_b2) + + @allure.title("Test S3: Copy object to the same bucket") + def test_s3_copy_same_bucket(self, bucket, complex_object_size, simple_object_size): + """ + Test object can be copied to the same bucket. + #TODO: delete after test_s3_copy_object will be merge + """ + file_path_simple, file_path_large = generate_file(simple_object_size), generate_file( + complex_object_size + ) + file_name_simple = self.object_key_from_file_path(file_path_simple) + file_name_large = self.object_key_from_file_path(file_path_large) + bucket_objects = [file_name_simple, file_name_large] + + with allure.step("Bucket must be empty"): + objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + assert not objects_list, f"Expected empty bucket, got {objects_list}" + + with allure.step("Put objects into bucket"): + for file_path in (file_path_simple, file_path_large): + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + + with allure.step("Copy one object into the same bucket"): + copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name_simple) + bucket_objects.append(copy_obj_path) + + check_objects_in_bucket(self.s3_client, bucket, bucket_objects) + + with allure.step("Check copied object has the same content"): + got_copied_file = s3_gate_object.get_object_s3(self.s3_client, bucket, copy_obj_path) + assert get_file_hash(file_path_simple) == get_file_hash( + got_copied_file + ), "Hashes must be the same" + + with allure.step("Delete one object from bucket"): + s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_simple) + bucket_objects.remove(file_name_simple) + + check_objects_in_bucket( + self.s3_client, + bucket, + expected_objects=bucket_objects, + unexpected_objects=[file_name_simple], + ) + + @allure.title("Test S3: Copy object to another bucket") + def test_s3_copy_to_another_bucket(self, two_buckets, complex_object_size, simple_object_size): + """ + Test object can be copied to another bucket. + #TODO: delete after test_s3_copy_object will be merge + """ + file_path_simple, file_path_large = generate_file(simple_object_size), generate_file( + complex_object_size + ) + file_name_simple = self.object_key_from_file_path(file_path_simple) + file_name_large = self.object_key_from_file_path(file_path_large) + bucket_1_objects = [file_name_simple, file_name_large] + + bucket_1, bucket_2 = two_buckets + + with allure.step("Buckets must be empty"): + for bucket in (bucket_1, bucket_2): + objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + assert not objects_list, f"Expected empty bucket, got {objects_list}" + + with allure.step("Put objects into one bucket"): + for file_path in (file_path_simple, file_path_large): + s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_path) + + with allure.step("Copy object from first bucket into second"): + copy_obj_path_b2 = s3_gate_object.copy_object_s3( + self.s3_client, bucket_1, file_name_large, bucket_dst=bucket_2 + ) + check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=bucket_1_objects) + check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[copy_obj_path_b2]) + + with allure.step("Check copied object has the same content"): + got_copied_file_b2 = s3_gate_object.get_object_s3( + self.s3_client, bucket_2, copy_obj_path_b2 + ) + assert get_file_hash(file_path_large) == get_file_hash( + got_copied_file_b2 + ), "Hashes must be the same" + + with allure.step("Delete one object from first bucket"): + s3_gate_object.delete_object_s3(self.s3_client, bucket_1, file_name_simple) + bucket_1_objects.remove(file_name_simple) + + check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=bucket_1_objects) + check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[copy_obj_path_b2]) + + with allure.step("Delete one object from second bucket and check it is empty"): + s3_gate_object.delete_object_s3(self.s3_client, bucket_2, copy_obj_path_b2) + check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[]) + + def check_object_attributes(self, bucket: str, object_key: str, parts_count: int): + if not isinstance(self.s3_client, AwsCliClient): + logger.warning("Attributes check is not supported for boto3 implementation") + return + + with allure.step("Check object's attributes"): + obj_parts = s3_gate_object.get_object_attributes( + self.s3_client, bucket, object_key, "ObjectParts", get_full_resp=False + ) + assert ( + obj_parts.get("TotalPartsCount") == parts_count + ), f"Expected TotalPartsCount is {parts_count}" + assert ( + len(obj_parts.get("Parts")) == parts_count + ), f"Expected Parts cunt is {parts_count}" + + with allure.step("Check object's attribute max-parts"): + max_parts = 2 + obj_parts = s3_gate_object.get_object_attributes( + self.s3_client, + bucket, + object_key, + "ObjectParts", + max_parts=max_parts, + get_full_resp=False, + ) + assert ( + obj_parts.get("TotalPartsCount") == parts_count + ), f"Expected TotalPartsCount is {parts_count}" + assert obj_parts.get("MaxParts") == max_parts, f"Expected MaxParts is {parts_count}" + assert ( + len(obj_parts.get("Parts")) == max_parts + ), f"Expected Parts count is {parts_count}" + + with allure.step("Check object's attribute part-number-marker"): + part_number_marker = 3 + obj_parts = s3_gate_object.get_object_attributes( + self.s3_client, + bucket, + object_key, + "ObjectParts", + part_number=part_number_marker, + get_full_resp=False, + ) + assert ( + obj_parts.get("TotalPartsCount") == parts_count + ), f"Expected TotalPartsCount is {parts_count}" + assert ( + obj_parts.get("PartNumberMarker") == part_number_marker + ), f"Expected PartNumberMarker is {part_number_marker}" + assert len(obj_parts.get("Parts")) == 1, f"Expected Parts count is {parts_count}" + + @staticmethod + def object_key_from_file_path(full_path: str) -> str: + return os.path.basename(full_path) diff --git a/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_locking.py b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_locking.py new file mode 100644 index 000000000..b582656de --- /dev/null +++ b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_locking.py @@ -0,0 +1,228 @@ +import time +from datetime import datetime, timedelta + +import allure +import pytest +from file_helper import generate_file, generate_file_with_content +from s3_helper import assert_object_lock_mode, check_objects_in_bucket, object_key_from_file_path + +from pytest_tests.steps import s3_gate_bucket, s3_gate_object +from s3.s3_gate_base import TestNeofsS3GateBase + + +def pytest_generate_tests(metafunc): + if "s3_client" in metafunc.fixturenames: + metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) + + +@pytest.mark.s3_gate +@pytest.mark.s3_gate_locking +@pytest.mark.parametrize("version_id", [None, "second"]) +class TestS3GateLocking(TestNeofsS3GateBase): + @allure.title("Test S3: Checking the operation of retention period & legal lock on the object") + def test_s3_object_locking(self, version_id, simple_object_size): + file_path = generate_file(simple_object_size) + file_name = object_key_from_file_path(file_path) + retention_period = 2 + + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True, bucket_configuration="rep-1") + + with allure.step("Put several versions of object into bucket"): + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + file_name_1 = generate_file_with_content(simple_object_size, file_path=file_path) + version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1) + check_objects_in_bucket(self.s3_client, bucket, [file_name]) + if version_id: + version_id = version_id_2 + + with allure.step(f"Put retention period {retention_period}min to object {file_name}"): + date_obj = datetime.utcnow() + timedelta(minutes=retention_period) + retention = { + "Mode": "COMPLIANCE", + "RetainUntilDate": date_obj, + } + s3_gate_object.put_object_retention( + self.s3_client, bucket, file_name, retention, version_id + ) + assert_object_lock_mode( + self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "OFF" + ) + + with allure.step(f"Put legal hold to object {file_name}"): + s3_gate_object.put_object_legal_hold( + self.s3_client, bucket, file_name, "ON", version_id + ) + assert_object_lock_mode(self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "ON") + + with allure.step(f"Fail with deleting object with legal hold and retention period"): + if version_id: + with pytest.raises(Exception): + # An error occurred (AccessDenied) when calling the DeleteObject operation (reached max retries: 0): Access Denied. + s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name, version_id) + + with allure.step(f"Check retention period is no longer set on the uploaded object"): + time.sleep((retention_period + 1) * 60) + assert_object_lock_mode(self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "ON") + + with allure.step(f"Fail with deleting object with legal hold and retention period"): + if version_id: + with pytest.raises(Exception): + # An error occurred (AccessDenied) when calling the DeleteObject operation (reached max retries: 0): Access Denied. + s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name, version_id) + else: + s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name, version_id) + + @allure.title("Test S3: Checking the impossibility to change the retention mode COMPLIANCE") + def test_s3_mode_compliance(self, version_id, simple_object_size): + file_path = generate_file(simple_object_size) + file_name = object_key_from_file_path(file_path) + retention_period = 2 + retention_period_1 = 1 + + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True, bucket_configuration="rep-1") + + with allure.step("Put object into bucket"): + obj_version = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + if version_id: + version_id = obj_version + check_objects_in_bucket(self.s3_client, bucket, [file_name]) + + with allure.step(f"Put retention period {retention_period}min to object {file_name}"): + date_obj = datetime.utcnow() + timedelta(minutes=retention_period) + retention = { + "Mode": "COMPLIANCE", + "RetainUntilDate": date_obj, + } + s3_gate_object.put_object_retention( + self.s3_client, bucket, file_name, retention, version_id + ) + assert_object_lock_mode( + self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "OFF" + ) + + with allure.step( + f"Try to change retention period {retention_period_1}min to object {file_name}" + ): + date_obj = datetime.utcnow() + timedelta(minutes=retention_period_1) + retention = { + "Mode": "COMPLIANCE", + "RetainUntilDate": date_obj, + } + with pytest.raises(Exception): + s3_gate_object.put_object_retention( + self.s3_client, bucket, file_name, retention, version_id + ) + + @allure.title("Test S3: Checking the ability to change retention mode GOVERNANCE") + def test_s3_mode_governance(self, version_id, simple_object_size): + file_path = generate_file(simple_object_size) + file_name = object_key_from_file_path(file_path) + retention_period = 3 + retention_period_1 = 2 + retention_period_2 = 5 + + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True, bucket_configuration="rep-1") + + with allure.step("Put object into bucket"): + obj_version = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + if version_id: + version_id = obj_version + check_objects_in_bucket(self.s3_client, bucket, [file_name]) + + with allure.step(f"Put retention period {retention_period}min to object {file_name}"): + date_obj = datetime.utcnow() + timedelta(minutes=retention_period) + retention = { + "Mode": "GOVERNANCE", + "RetainUntilDate": date_obj, + } + s3_gate_object.put_object_retention( + self.s3_client, bucket, file_name, retention, version_id + ) + assert_object_lock_mode( + self.s3_client, bucket, file_name, "GOVERNANCE", date_obj, "OFF" + ) + + with allure.step( + f"Try to change retention period {retention_period_1}min to object {file_name}" + ): + date_obj = datetime.utcnow() + timedelta(minutes=retention_period_1) + retention = { + "Mode": "GOVERNANCE", + "RetainUntilDate": date_obj, + } + with pytest.raises(Exception): + s3_gate_object.put_object_retention( + self.s3_client, bucket, file_name, retention, version_id + ) + + with allure.step( + f"Try to change retention period {retention_period_1}min to object {file_name}" + ): + date_obj = datetime.utcnow() + timedelta(minutes=retention_period_1) + retention = { + "Mode": "GOVERNANCE", + "RetainUntilDate": date_obj, + } + with pytest.raises(Exception): + s3_gate_object.put_object_retention( + self.s3_client, bucket, file_name, retention, version_id + ) + + with allure.step(f"Put new retention period {retention_period_2}min to object {file_name}"): + date_obj = datetime.utcnow() + timedelta(minutes=retention_period_2) + retention = { + "Mode": "GOVERNANCE", + "RetainUntilDate": date_obj, + } + s3_gate_object.put_object_retention( + self.s3_client, bucket, file_name, retention, version_id, True + ) + assert_object_lock_mode( + self.s3_client, bucket, file_name, "GOVERNANCE", date_obj, "OFF" + ) + + @allure.title("Test S3: Checking if an Object Cannot Be Locked") + def test_s3_legal_hold(self, version_id, simple_object_size): + file_path = generate_file(simple_object_size) + file_name = object_key_from_file_path(file_path) + + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, False, bucket_configuration="rep-1") + + with allure.step("Put object into bucket"): + obj_version = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + if version_id: + version_id = obj_version + check_objects_in_bucket(self.s3_client, bucket, [file_name]) + + with allure.step(f"Put legal hold to object {file_name}"): + with pytest.raises(Exception): + s3_gate_object.put_object_legal_hold( + self.s3_client, bucket, file_name, "ON", version_id + ) + + +@pytest.mark.s3_gate +class TestS3GateLockingBucket(TestNeofsS3GateBase): + @allure.title("Test S3: Bucket Lock") + def test_s3_bucket_lock(self, simple_object_size): + file_path = generate_file(simple_object_size) + file_name = object_key_from_file_path(file_path) + configuration = {"Rule": {"DefaultRetention": {"Mode": "COMPLIANCE", "Days": 1}}} + + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True, bucket_configuration="rep-1") + + with allure.step("PutObjectLockConfiguration with ObjectLockEnabled=False"): + s3_gate_bucket.put_object_lock_configuration(self.s3_client, bucket, configuration) + + with allure.step("PutObjectLockConfiguration with ObjectLockEnabled=True"): + configuration["ObjectLockEnabled"] = "Enabled" + s3_gate_bucket.put_object_lock_configuration(self.s3_client, bucket, configuration) + + with allure.step("GetObjectLockConfiguration"): + config = s3_gate_bucket.get_object_lock_configuration(self.s3_client, bucket) + configuration["Rule"]["DefaultRetention"]["Years"] = 0 + assert config == configuration, f"Configurations must be equal {configuration}" + + with allure.step("Put object into bucket"): + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + assert_object_lock_mode(self.s3_client, bucket, file_name, "COMPLIANCE", None, "OFF", 1) diff --git a/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_multipart.py b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_multipart.py new file mode 100644 index 000000000..77e545729 --- /dev/null +++ b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_multipart.py @@ -0,0 +1,131 @@ +import allure +import pytest +from file_helper import generate_file, get_file_hash, split_file +from s3_helper import check_objects_in_bucket, object_key_from_file_path, set_bucket_versioning + +from pytest_tests.steps import s3_gate_bucket, s3_gate_object +from s3.s3_gate_base import TestNeofsS3GateBase + +PART_SIZE = 5 * 1024 * 1024 + + +def pytest_generate_tests(metafunc): + if "s3_client" in metafunc.fixturenames: + metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) + + +@pytest.mark.s3_gate +@pytest.mark.s3_gate_multipart +class TestS3GateMultipart(TestNeofsS3GateBase): + @allure.title("Test S3 Object Multipart API") + def test_s3_object_multipart(self): + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + parts_count = 5 + file_name_large = generate_file(PART_SIZE * parts_count) # 5Mb - min part + object_key = object_key_from_file_path(file_name_large) + part_files = split_file(file_name_large, parts_count) + parts = [] + + with allure.step("Upload first part"): + upload_id = s3_gate_object.create_multipart_upload_s3( + self.s3_client, bucket, object_key + ) + uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + etag = s3_gate_object.upload_part_s3( + self.s3_client, bucket, object_key, upload_id, 1, part_files[0] + ) + parts.append((1, etag)) + got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) + assert len(got_parts) == 1, f"Expected {1} parts, got\n{got_parts}" + + with allure.step("Upload last parts"): + for part_id, file_path in enumerate(part_files[1:], start=2): + etag = s3_gate_object.upload_part_s3( + self.s3_client, bucket, object_key, upload_id, part_id, file_path + ) + parts.append((part_id, etag)) + got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) + s3_gate_object.complete_multipart_upload_s3( + self.s3_client, bucket, object_key, upload_id, parts + ) + assert len(got_parts) == len( + part_files + ), f"Expected {parts_count} parts, got\n{got_parts}" + + with allure.step("Check upload list is empty"): + uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + assert not uploads, f"Expected there is no uploads in bucket {bucket}" + + with allure.step("Check we can get whole object from bucket"): + got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, object_key) + assert get_file_hash(got_object) == get_file_hash(file_name_large) + + @allure.title("Test S3 Multipart abord") + def test_s3_abort_multipart(self): + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + parts_count = 5 + file_name_large = generate_file(PART_SIZE * parts_count) # 5Mb - min part + object_key = object_key_from_file_path(file_name_large) + part_files = split_file(file_name_large, parts_count) + parts = [] + + with allure.step("Upload first part"): + upload_id = s3_gate_object.create_multipart_upload_s3( + self.s3_client, bucket, object_key + ) + uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + etag = s3_gate_object.upload_part_s3( + self.s3_client, bucket, object_key, upload_id, 1, part_files[0] + ) + parts.append((1, etag)) + got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) + assert len(got_parts) == 1, f"Expected {1} parts, got\n{got_parts}" + + with allure.step("Abort multipart upload"): + s3_gate_object.abort_multipart_uploads_s3(self.s3_client, bucket, object_key, upload_id) + uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + assert not uploads, f"Expected there is no uploads in bucket {bucket}" + + @allure.title("Test S3 Upload Part Copy") + def test_s3_multipart_copy(self): + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-1") + set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + parts_count = 3 + file_name_large = generate_file(PART_SIZE * parts_count) # 5Mb - min part + object_key = object_key_from_file_path(file_name_large) + part_files = split_file(file_name_large, parts_count) + parts = [] + objs = [] + + with allure.step(f"Put {parts_count} objec in bucket"): + for part in part_files: + s3_gate_object.put_object_s3(self.s3_client, bucket, part) + objs.append(object_key_from_file_path(part)) + check_objects_in_bucket(self.s3_client, bucket, objs) + + with allure.step("Create multipart upload object"): + upload_id = s3_gate_object.create_multipart_upload_s3( + self.s3_client, bucket, object_key + ) + uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket) + assert uploads, f"Expected there are uploads in bucket {bucket}" + + with allure.step("Start multipart upload"): + for part_id, obj_key in enumerate(objs, start=1): + etag = s3_gate_object.upload_part_copy_s3( + self.s3_client, bucket, object_key, upload_id, part_id, f"{bucket}/{obj_key}" + ) + parts.append((part_id, etag)) + got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id) + s3_gate_object.complete_multipart_upload_s3( + self.s3_client, bucket, object_key, upload_id, parts + ) + assert len(got_parts) == len( + part_files + ), f"Expected {parts_count} parts, got\n{got_parts}" + + with allure.step("Check we can get whole object from bucket"): + got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, object_key) + assert get_file_hash(got_object) == get_file_hash(file_name_large) diff --git a/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_tagging.py b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_tagging.py new file mode 100644 index 000000000..62a1baf69 --- /dev/null +++ b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_tagging.py @@ -0,0 +1,112 @@ +import os +import uuid +from random import choice +from string import ascii_letters +from typing import Tuple + +import allure +import pytest +from file_helper import generate_file +from s3_helper import check_tags_by_bucket, check_tags_by_object, object_key_from_file_path + +from pytest_tests.steps import s3_gate_bucket, s3_gate_object +from s3.s3_gate_base import TestNeofsS3GateBase + + +def pytest_generate_tests(metafunc): + if "s3_client" in metafunc.fixturenames: + metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) + + +@pytest.mark.s3_gate +@pytest.mark.s3_gate_tagging +class TestS3GateTagging(TestNeofsS3GateBase): + @staticmethod + def create_tags(count: int) -> Tuple[list, list]: + tags = [] + for _ in range(count): + tag_key = "".join(choice(ascii_letters) for _ in range(8)) + tag_value = "".join(choice(ascii_letters) for _ in range(12)) + tags.append((tag_key, tag_value)) + return tags + + @allure.title("Test S3: Object tagging") + def test_s3_object_tagging(self, bucket, simple_object_size): + file_path = generate_file(simple_object_size) + file_name = object_key_from_file_path(file_path) + + with allure.step("Put with 3 tags object into bucket"): + tag_1 = "Tag1=Value1" + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path, Tagging=tag_1) + got_tags = s3_gate_object.get_object_tagging(self.s3_client, bucket, file_name) + assert got_tags, f"Expected tags, got {got_tags}" + assert got_tags == [{"Key": "Tag1", "Value": "Value1"}], "Tags must be the same" + + with allure.step("Put 10 new tags for object"): + tags_2 = self.create_tags(10) + s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_2) + check_tags_by_object(self.s3_client, bucket, file_name, tags_2, [("Tag1", "Value1")]) + + with allure.step("Put 10 extra new tags for object"): + tags_3 = self.create_tags(10) + s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_3) + check_tags_by_object(self.s3_client, bucket, file_name, tags_3, tags_2) + + with allure.step("Copy one object with tag"): + copy_obj_path_1 = s3_gate_object.copy_object_s3( + self.s3_client, bucket, file_name, tagging_directive="COPY" + ) + check_tags_by_object(self.s3_client, bucket, copy_obj_path_1, tags_3, tags_2) + + with allure.step("Put 11 new tags to object and expect an error"): + tags_4 = self.create_tags(11) + with pytest.raises(Exception, match=r".*Object tags cannot be greater than 10*"): + # An error occurred (BadRequest) when calling the PutObjectTagging operation: Object tags cannot be greater than 10 + s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_4) + + with allure.step("Put empty tag"): + tags_5 = [] + s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_5) + check_tags_by_object(self.s3_client, bucket, file_name, []) + + with allure.step("Put 10 object tags"): + tags_6 = self.create_tags(10) + s3_gate_object.put_object_tagging(self.s3_client, bucket, file_name, tags=tags_6) + check_tags_by_object(self.s3_client, bucket, file_name, tags_6) + + with allure.step("Delete tags by delete-object-tagging"): + s3_gate_object.delete_object_tagging(self.s3_client, bucket, file_name) + check_tags_by_object(self.s3_client, bucket, file_name, []) + + @allure.title("Test S3: bucket tagging") + def test_s3_bucket_tagging(self, bucket): + + with allure.step("Put 10 bucket tags"): + tags_1 = self.create_tags(10) + s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, tags_1) + check_tags_by_bucket(self.s3_client, bucket, tags_1) + + with allure.step("Put new 10 bucket tags"): + tags_2 = self.create_tags(10) + s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, tags_2) + check_tags_by_bucket(self.s3_client, bucket, tags_2, tags_1) + + with allure.step("Put 11 new tags to bucket and expect an error"): + tags_3 = self.create_tags(11) + with pytest.raises(Exception, match=r".*Object tags cannot be greater than 10.*"): + # An error occurred (BadRequest) when calling the PutBucketTagging operation (reached max retries: 0): Object tags cannot be greater than 10 + s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, tags_3) + + with allure.step("Put empty tag"): + tags_4 = [] + s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, tags_4) + check_tags_by_bucket(self.s3_client, bucket, tags_4) + + with allure.step("Put new 10 bucket tags"): + tags_5 = self.create_tags(10) + s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, tags_5) + check_tags_by_bucket(self.s3_client, bucket, tags_5, tags_2) + + with allure.step("Delete tags by delete-bucket-tagging"): + s3_gate_bucket.delete_bucket_tagging(self.s3_client, bucket) + check_tags_by_bucket(self.s3_client, bucket, []) diff --git a/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_versioning.py b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_versioning.py new file mode 100644 index 000000000..9f3352b50 --- /dev/null +++ b/dynamic_env_pytest_tests/tests/services/s3_gate/test_s3_versioning.py @@ -0,0 +1,97 @@ +import os + +import allure +import pytest +from file_helper import generate_file, generate_file_with_content +from s3_helper import set_bucket_versioning + +from pytest_tests.steps import s3_gate_bucket, s3_gate_object +from s3.s3_gate_base import TestNeofsS3GateBase + + +def pytest_generate_tests(metafunc): + if "s3_client" in metafunc.fixturenames: + metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) + + +@pytest.mark.s3_gate +@pytest.mark.s3_gate_versioning +class TestS3GateVersioning(TestNeofsS3GateBase): + @staticmethod + def object_key_from_file_path(full_path: str) -> str: + return os.path.basename(full_path) + + @allure.title("Test S3: try to disable versioning") + def test_s3_version_off(self): + + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True, bucket_configuration="rep-1") + with pytest.raises(Exception): + set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.SUSPENDED) + + @allure.title("Test S3: Enable and disable versioning") + def test_s3_version(self, simple_object_size): + file_path = generate_file(simple_object_size) + file_name = self.object_key_from_file_path(file_path) + bucket_objects = [file_name] + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, False, bucket_configuration="rep-1") + set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.SUSPENDED) + + with allure.step("Put object into bucket"): + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) + assert ( + objects_list == bucket_objects + ), f"Expected list with single objects in bucket, got {objects_list}" + object_version = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + actual_version = [ + version.get("VersionId") + for version in object_version + if version.get("Key") == file_name + ] + assert actual_version == [ + "null" + ], f"Expected version is null in list-object-versions, got {object_version}" + object_0 = s3_gate_object.head_object_s3(self.s3_client, bucket, file_name) + assert ( + object_0.get("VersionId") == "null" + ), f"Expected version is null in head-object, got {object_0.get('VersionId')}" + + set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + + with allure.step("Put several versions of object into bucket"): + version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path) + file_name_1 = generate_file_with_content(simple_object_size, file_path=file_path) + version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1) + + with allure.step("Check bucket shows all versions"): + versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket) + obj_versions = [ + version.get("VersionId") for version in versions if version.get("Key") == file_name + ] + assert ( + obj_versions.sort() == [version_id_1, version_id_2, "null"].sort() + ), f"Expected object has versions: {version_id_1, version_id_2, 'null'}" + + with allure.step("Get object"): + object_1 = s3_gate_object.get_object_s3( + self.s3_client, bucket, file_name, full_output=True + ) + assert ( + object_1.get("VersionId") == version_id_2 + ), f"Get object with version {version_id_2}" + + with allure.step("Get first version of object"): + object_2 = s3_gate_object.get_object_s3( + self.s3_client, bucket, file_name, version_id_1, full_output=True + ) + assert ( + object_2.get("VersionId") == version_id_1 + ), f"Get object with version {version_id_1}" + + with allure.step("Get second version of object"): + object_3 = s3_gate_object.get_object_s3( + self.s3_client, bucket, file_name, version_id_2, full_output=True + ) + assert ( + object_3.get("VersionId") == version_id_2 + ), f"Get object with version {version_id_2}"