Skip to content

Commit

Permalink
Remove Redundant EsBlobStoreTestCase (elastic#49603) (elastic#49605)
Browse files Browse the repository at this point in the history
All the implementations of `EsBlobStoreTestCase` use the exact same
bootstrap code that is also used by their implementation of
`EsBlobStoreContainerTestCase`.
This means all tests might as well live under `EsBlobStoreContainerTestCase`
saving a lot of code duplication. Also, there was no HDFS implementation for
`EsBlobStoreTestCase` which is now automatically resolved by moving the tests over
since there is a HDFS implementation for the container tests.
  • Loading branch information
original-brownbear authored Nov 26, 2019
1 parent 7774312 commit 3862400
Show file tree
Hide file tree
Showing 13 changed files with 185 additions and 409 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@
import java.util.stream.Collectors;

import static java.nio.charset.StandardCharsets.UTF_8;
import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes;
import static org.elasticsearch.repositories.ESBlobStoreContainerTestCase.randomBytes;
import static org.elasticsearch.repositories.azure.AzureRepository.Repository.CONTAINER_SETTING;
import static org.elasticsearch.repositories.azure.AzureStorageSettings.ACCOUNT_SETTING;
import static org.elasticsearch.repositories.azure.AzureStorageSettings.ENDPOINT_SUFFIX_SETTING;
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@
import static fixture.gcs.GoogleCloudStorageHttpHandler.getContentRangeStart;
import static fixture.gcs.GoogleCloudStorageHttpHandler.parseMultipartRequestBody;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes;
import static org.elasticsearch.repositories.ESBlobStoreContainerTestCase.randomBytes;
import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING;
import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING;
import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
import java.util.Locale;
import java.util.concurrent.ConcurrentHashMap;

import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,6 @@
import java.security.PrivilegedExceptionAction;
import java.util.Collections;

import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes;
import static org.elasticsearch.repositories.ESBlobStoreTestCase.readBlobFully;


@ThreadLeakFilters(filters = {HdfsClientThreadLeakFilter.class})
public class HdfsBlobStoreContainerTests extends ESBlobStoreContainerTestCase {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,14 @@
import com.amazonaws.services.s3.model.StorageClass;
import com.amazonaws.services.s3.model.UploadPartRequest;
import com.amazonaws.services.s3.model.UploadPartResult;
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.blobstore.BlobStoreException;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
import org.mockito.ArgumentCaptor;

Expand All @@ -46,10 +50,12 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

import static org.elasticsearch.repositories.s3.S3BlobStoreTests.randomMockS3BlobStore;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doNothing;
Expand Down Expand Up @@ -397,10 +403,99 @@ public void testNumberOfMultiparts() {
assertNumberOfMultiparts(factor + 1, remaining, (size * factor) + remaining, size);
}

public void testInitCannedACL() {
String[] aclList = new String[]{
"private", "public-read", "public-read-write", "authenticated-read",
"log-delivery-write", "bucket-owner-read", "bucket-owner-full-control"};

//empty acl
assertThat(S3BlobStore.initCannedACL(null), equalTo(CannedAccessControlList.Private));
assertThat(S3BlobStore.initCannedACL(""), equalTo(CannedAccessControlList.Private));

// it should init cannedACL correctly
for (String aclString : aclList) {
CannedAccessControlList acl = S3BlobStore.initCannedACL(aclString);
assertThat(acl.toString(), equalTo(aclString));
}

// it should accept all aws cannedACLs
for (CannedAccessControlList awsList : CannedAccessControlList.values()) {
CannedAccessControlList acl = S3BlobStore.initCannedACL(awsList.toString());
assertThat(acl, equalTo(awsList));
}
}

public void testInvalidCannedACL() {
BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initCannedACL("test_invalid"));
assertThat(ex.getMessage(), equalTo("cannedACL is not valid: [test_invalid]"));
}

public void testInitStorageClass() {
// it should default to `standard`
assertThat(S3BlobStore.initStorageClass(null), equalTo(StorageClass.Standard));
assertThat(S3BlobStore.initStorageClass(""), equalTo(StorageClass.Standard));

// it should accept [standard, standard_ia, onezone_ia, reduced_redundancy, intelligent_tiering]
assertThat(S3BlobStore.initStorageClass("standard"), equalTo(StorageClass.Standard));
assertThat(S3BlobStore.initStorageClass("standard_ia"), equalTo(StorageClass.StandardInfrequentAccess));
assertThat(S3BlobStore.initStorageClass("onezone_ia"), equalTo(StorageClass.OneZoneInfrequentAccess));
assertThat(S3BlobStore.initStorageClass("reduced_redundancy"), equalTo(StorageClass.ReducedRedundancy));
assertThat(S3BlobStore.initStorageClass("intelligent_tiering"), equalTo(StorageClass.IntelligentTiering));
}

public void testCaseInsensitiveStorageClass() {
assertThat(S3BlobStore.initStorageClass("sTandaRd"), equalTo(StorageClass.Standard));
assertThat(S3BlobStore.initStorageClass("sTandaRd_Ia"), equalTo(StorageClass.StandardInfrequentAccess));
assertThat(S3BlobStore.initStorageClass("oNeZoNe_iA"), equalTo(StorageClass.OneZoneInfrequentAccess));
assertThat(S3BlobStore.initStorageClass("reduCED_redundancy"), equalTo(StorageClass.ReducedRedundancy));
assertThat(S3BlobStore.initStorageClass("intelLigeNt_tieriNG"), equalTo(StorageClass.IntelligentTiering));
}

public void testInvalidStorageClass() {
BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initStorageClass("whatever"));
assertThat(ex.getMessage(), equalTo("`whatever` is not a valid S3 Storage Class."));
}

public void testRejectGlacierStorageClass() {
BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initStorageClass("glacier"));
assertThat(ex.getMessage(), equalTo("Glacier storage class is not supported"));
}

private static void assertNumberOfMultiparts(final int expectedParts, final long expectedRemaining, long totalSize, long partSize) {
final Tuple<Long, Long> result = S3BlobContainer.numberOfMultiparts(totalSize, partSize);

assertEquals("Expected number of parts [" + expectedParts + "] but got [" + result.v1() + "]", expectedParts, (long) result.v1());
assertEquals("Expected remaining [" + expectedRemaining + "] but got [" + result.v2() + "]", expectedRemaining, (long) result.v2());
}

/**
* Creates a new {@link S3BlobStore} with random settings.
* <p>
* The blobstore uses a {@link MockAmazonS3} client.
*/
public static S3BlobStore randomMockS3BlobStore() {
String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
ByteSizeValue bufferSize = new ByteSizeValue(randomIntBetween(5, 100), ByteSizeUnit.MB);
boolean serverSideEncryption = randomBoolean();

String cannedACL = null;
if (randomBoolean()) {
cannedACL = randomFrom(CannedAccessControlList.values()).toString();
}

String storageClass = null;
if (randomBoolean()) {
storageClass = randomValueOtherThan(StorageClass.Glacier, () -> randomFrom(StorageClass.values())).toString();
}

final AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass);
final S3Service service = new S3Service() {
@Override
public synchronized AmazonS3Reference client(RepositoryMetaData repositoryMetaData) {
return new AmazonS3Reference(client);
}
};
return new S3BlobStore(service, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass,
new RepositoryMetaData(bucket, "s3", Settings.EMPTY));
}
}

This file was deleted.

Loading

0 comments on commit 3862400

Please sign in to comment.