diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 23ca7a299e588..c8e44daa5a073 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -202,14 +202,14 @@ Run all build commands from within the root directory: cd elasticsearch/ ``` -To build a tar distribution, run this command: +To build a darwin-tar distribution, run this command: ```sh -./gradlew -p distribution/archives/tar assemble --parallel +./gradlew -p distribution/archives/darwin-tar assemble --parallel ``` You will find the distribution under: -`./distribution/archives/tar/build/distributions/` +`./distribution/archives/darwin-tar/build/distributions/` To create all build artifacts (e.g., plugins and Javadocs) as well as distributions in all formats, run this command: @@ -219,10 +219,10 @@ distributions in all formats, run this command: ``` The package distributions (Debian and RPM) can be found under: -`./distribution/packages/(deb|rpm)/build/distributions/` +`./distribution/packages/(deb|rpm|oss-deb|oss-rpm)/build/distributions/` The archive distributions (tar and zip) can be found under: -`./distribution/archives/(tar|zip)/build/distributions/` +`./distribution/archives/(darwin-tar|linux-tar|windows-zip|oss-darwin-tar|oss-linux-tar|oss-windows-zip)/build/distributions/` ### Running The Full Test Suite diff --git a/distribution/build.gradle b/distribution/build.gradle index 295dc35d270ea..6b90c1415cd26 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -385,7 +385,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { copySpec { from project(':distribution').tasks.getByName("extract${platform.capitalize()}Jdk") eachFile { FileCopyDetails details -> - if (details.relativePath.segments[-2] == 'bin') { + if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') { details.mode = 0755 } } diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index b0b7a1d4b8295..8d1bbbcc39987 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -145,7 +145,7 @@ Closure commonPackageConfig(String type, boolean oss) { String[] segments = fcp.relativePath.segments for (int i = segments.length - 2; i > 2; --i) { directory('/' + segments[0..i].join('/'), 0755) - if (segments[-2] == 'bin') { + if (segments[-2] == 'bin' || segments[-1] == 'jspawnhelper') { fcp.mode = 0755 } else { fcp.mode = 0644 diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java index 6653117c62afb..7fb4a18c66b9a 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java @@ -400,7 +400,7 @@ public void testMultipleJoinFields() throws Exception { .endObject()); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE)); - assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]")); + assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice.")); } { @@ -426,7 +426,7 @@ public void testMultipleJoinFields() throws Exception { .endObject()); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE)); - assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]")); + assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice.")); } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/20_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/20_mix_typeless_typeful.yml index 5114b7f7521ab..a05134866628b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/20_mix_typeless_typeful.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/20_mix_typeless_typeful.yml @@ -104,8 +104,8 @@ "Implicitly create a typeless index while there is a typed template": - skip: - version: "all" - reason: "awaits fix in #39198" + version: " - 6.99.99" + reason: needs typeless index operations to work on typed indices - do: indices.put_template: @@ -124,6 +124,11 @@ index: test-1 body: { bar: 42 } +# ensures dynamic mapping update is visible to get_mapping + - do: + cluster.health: + wait_for_events: normal + - do: indices.get_mapping: include_type_name: true diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java index 440be98ad9ec9..3ca70f4c99996 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperMergeValidator.java @@ -37,37 +37,18 @@ class MapperMergeValidator { * duplicate fields are present, and if the provided fields have already been * defined with a different data type. * - * @param type The mapping type, for use in error messages. * @param objectMappers The newly added object mappers. * @param fieldMappers The newly added field mappers. * @param fieldAliasMappers The newly added field alias mappers. - * @param fullPathObjectMappers All object mappers, indexed by their full path. - * @param fieldTypes All field and field alias mappers, collected into a lookup structure. */ - public static void validateMapperStructure(String type, - Collection objectMappers, + public static void validateMapperStructure(Collection objectMappers, Collection fieldMappers, - Collection fieldAliasMappers, - Map fullPathObjectMappers, - FieldTypeLookup fieldTypes) { - checkFieldUniqueness(type, objectMappers, fieldMappers, - fieldAliasMappers, fullPathObjectMappers, fieldTypes); - checkObjectsCompatibility(objectMappers, fullPathObjectMappers); - } - - private static void checkFieldUniqueness(String type, - Collection objectMappers, - Collection fieldMappers, - Collection fieldAliasMappers, - Map fullPathObjectMappers, - FieldTypeLookup fieldTypes) { - - // first check within mapping + Collection fieldAliasMappers) { Set objectFullNames = new HashSet<>(); for (ObjectMapper objectMapper : objectMappers) { String fullPath = objectMapper.fullPath(); if (objectFullNames.add(fullPath) == false) { - throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice in mapping for type [" + type + "]"); + throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice."); } } @@ -76,38 +57,11 @@ private static void checkFieldUniqueness(String type, .forEach(mapper -> { String name = mapper.name(); if (objectFullNames.contains(name)) { - throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field in [" + type + "]"); + throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field."); } else if (fieldNames.add(name) == false) { - throw new IllegalArgumentException("Field [" + name + "] is defined twice in [" + type + "]"); + throw new IllegalArgumentException("Field [" + name + "] is defined twice."); } }); - - // then check other types - for (String fieldName : fieldNames) { - if (fullPathObjectMappers.containsKey(fieldName)) { - throw new IllegalArgumentException("[" + fieldName + "] is defined as a field in mapping [" + type - + "] but this name is already used for an object in other types"); - } - } - - for (String objectPath : objectFullNames) { - if (fieldTypes.get(objectPath) != null) { - throw new IllegalArgumentException("[" + objectPath + "] is defined as an object in mapping [" + type - + "] but this name is already used for a field in other types"); - } - } - } - - private static void checkObjectsCompatibility(Collection objectMappers, - Map fullPathObjectMappers) { - for (ObjectMapper newObjectMapper : objectMappers) { - ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath()); - if (existingObjectMapper != null) { - // simulate a merge and ignore the result, we are just interested - // in exceptions here - existingObjectMapper.merge(newObjectMapper); - } - } } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 398ce4cdd17ce..0716b5a8c280b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -471,8 +471,7 @@ private synchronized Map internalMerge(@Nullable Documen Collections.addAll(fieldMappers, metadataMappers); MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers, fieldAliasMappers); - MapperMergeValidator.validateMapperStructure(newMapper.type(), objectMappers, fieldMappers, - fieldAliasMappers, fullPathObjectMappers, fieldTypes); + MapperMergeValidator.validateMapperStructure(objectMappers, fieldMappers, fieldAliasMappers); checkPartitionedIndexConstraints(newMapper); // update lookup data-structures diff --git a/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java b/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java index b5a17534903ee..e9d593b728edc 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java @@ -58,16 +58,7 @@ public int totalOperations() { @Override public int skippedOperations() { - int skippedOperations = overriddenOperations; - for (TranslogSnapshot translog : translogs) { - skippedOperations += translog.skippedOperations(); - } - return skippedOperations; - } - - @Override - public int overriddenOperations() { - return overriddenOperations; + return Arrays.stream(translogs).mapToInt(TranslogSnapshot::skippedOperations).sum() + overriddenOperations; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index d8acba635f822..63d21ffea158f 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -936,20 +936,12 @@ public interface Snapshot extends Closeable { /** * The number of operations have been skipped (overridden or trimmed) in the snapshot so far. + * Unlike {@link #totalOperations()}, this value is updated each time after {@link #next()}) is called. */ default int skippedOperations() { return 0; } - /** - * The number of operations have been overridden (eg. superseded) in the snapshot so far. - * If two operations have the same sequence number, the operation with a lower term will be overridden by the operation - * with a higher term. Unlike {@link #totalOperations()}, this value is updated each time after {@link #next()}) is called. - */ - default int overriddenOperations() { - return 0; - } - /** * Returns the next operation in the snapshot or null if we reached the end. */ @@ -985,11 +977,6 @@ public int skippedOperations() { return filteredOpsCount + delegate.skippedOperations(); } - @Override - public int overriddenOperations() { - return delegate.overriddenOperations(); - } - @Override public Operation next() throws IOException { Translog.Operation op; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java index af17918baacb9..353b2ccdd1cda 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperMergeValidatorTests.java @@ -36,13 +36,11 @@ public void testDuplicateFieldAliasAndObject() { FieldAliasMapper aliasMapper = new FieldAliasMapper("path", "some.path", "field"); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - MapperMergeValidator.validateMapperStructure("type", + MapperMergeValidator.validateMapperStructure( singletonList(objectMapper), emptyList(), - singletonList(aliasMapper), - emptyMap(), - new FieldTypeLookup())); - assertEquals("Field [some.path] is defined both as an object and a field in [type]", e.getMessage()); + singletonList(aliasMapper))); + assertEquals("Field [some.path] is defined both as an object and a field.", e.getMessage()); } public void testFieldAliasWithNestedScope() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index e527f98f73c20..7314ecb1de7c2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -880,7 +880,7 @@ public void testIndexPrefixMapping() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(illegalMapping), MergeReason.MAPPING_UPDATE)); - assertThat(e.getMessage(), containsString("Field [field._index_prefix] is defined twice in [type]")); + assertThat(e.getMessage(), containsString("Field [field._index_prefix] is defined twice.")); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java index 84f0dc31093a6..bd6a373f3cd9a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java @@ -151,14 +151,14 @@ public void testReuseMetaField() throws IOException { mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); + assertTrue(e.getMessage().contains("Field [_id] is defined twice.")); } try { mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); fail(); } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); + assertTrue(e.getMessage().contains("Field [_id] is defined twice.")); } } diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index d24124a1048e9..1af143c1c98d7 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -584,7 +584,6 @@ public void testSeqNoCollision() throws Exception { assertThat(op2.seqNo(), equalTo(op1.seqNo())); assertThat(op2.primaryTerm(), greaterThan(op1.primaryTerm())); assertThat("Remaining of snapshot should contain init operations", snapshot, containsOperationsInAnyOrder(initOperations)); - assertThat(snapshot.overriddenOperations(), equalTo(0)); assertThat(snapshot.skippedOperations(), equalTo(1)); } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 3eddeea2f2a8a..dd477777e7962 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -715,7 +715,6 @@ public void testSeqNoFilterSnapshot() throws Exception { Translog.Snapshot filter = new Translog.SeqNoFilterSnapshot(snapshot, between(200, 300), between(300, 400)); // out range assertThat(filter, SnapshotMatchers.size(0)); assertThat(filter.totalOperations(), equalTo(snapshot.totalOperations())); - assertThat(filter.overriddenOperations(), equalTo(snapshot.overriddenOperations())); assertThat(filter.skippedOperations(), equalTo(snapshot.totalOperations())); } try (Translog.Snapshot snapshot = translog.newSnapshot()) { @@ -726,7 +725,6 @@ public void testSeqNoFilterSnapshot() throws Exception { Translog.Snapshot filter = new Translog.SeqNoFilterSnapshot(snapshot, fromSeqNo, toSeqNo); assertThat(filter, SnapshotMatchers.containsOperationsInAnyOrder(selectedOps)); assertThat(filter.totalOperations(), equalTo(snapshot.totalOperations())); - assertThat(filter.overriddenOperations(), equalTo(snapshot.overriddenOperations())); assertThat(filter.skippedOperations(), equalTo(snapshot.skippedOperations() + operations.size() - selectedOps.size())); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java index 11d13d48b1dc5..13e62da090c3e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java @@ -49,7 +49,7 @@ public Request(String id) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); } - public Request() { + private Request() { } public Request(StreamInput in) throws IOException { @@ -113,7 +113,7 @@ protected RequestBuilder(ElasticsearchClient client, DeleteDataFrameTransformAct public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { private boolean acknowledged; public Response(StreamInput in) throws IOException { - super(Collections.emptyList(), Collections.emptyList()); + super(in); readFrom(in); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsAction.java index 2d48980d07247..7759e7d3f1611 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsAction.java @@ -63,7 +63,7 @@ public Request(String id) { } } - public Request() {} + private Request() {} public Request(StreamInput in) throws IOException { super(in); @@ -149,7 +149,7 @@ public Response() { } public Response(StreamInput in) throws IOException { - super(Collections.emptyList(), Collections.emptyList()); + super(in); readFrom(in); } @@ -173,6 +173,7 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { List invalidTransforms = new ArrayList<>(); builder.startObject(); + toXContentCommon(builder, params); builder.field(DataFrameField.COUNT.getPreferredName(), transformConfigurations.size()); // XContentBuilder does not support passing the params object for Iterables builder.field(DataFrameField.TRANSFORMS.getPreferredName()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java index f24666d1a4a3a..47e922033072b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsAction.java @@ -55,7 +55,7 @@ public Request(String id) { } } - public Request() {} + private Request() {} public Request(StreamInput in) throws IOException { super(in); @@ -138,7 +138,7 @@ public Response() { } public Response(StreamInput in) throws IOException { - super(Collections.emptyList(), Collections.emptyList()); + super(in); readFrom(in); } @@ -161,6 +161,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + toXContentCommon(builder, params); builder.field(DataFrameField.COUNT.getPreferredName(), transformsStateAndStats.size()); builder.field(DataFrameField.TRANSFORMS.getPreferredName(), transformsStateAndStats); builder.endObject(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformAction.java index c5820e7ea694e..a9e9538288f89 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformAction.java @@ -46,7 +46,7 @@ public Request(String id) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); } - public Request() { + private Request() { } public Request(StreamInput in) throws IOException { @@ -108,7 +108,7 @@ public Response() { } public Response(StreamInput in) throws IOException { - super(Collections.emptyList(), Collections.emptyList()); + super(in); readFrom(in); } @@ -136,6 +136,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + toXContentCommon(builder, params); builder.field("started", started); builder.endObject(); return builder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java index 7a4317a856548..250442b693794 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java @@ -56,7 +56,7 @@ public Request(String id, boolean waitForCompletion, @Nullable TimeValue timeout this.setTimeout(timeout == null ? DEFAULT_TIMEOUT : timeout); } - public Request() { + private Request() { this(null, false, null); } @@ -149,7 +149,7 @@ public Response() { } public Response(StreamInput in) throws IOException { - super(Collections.emptyList(), Collections.emptyList()); + super(in); readFrom(in); } @@ -177,6 +177,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + toXContentCommon(builder, params); builder.field("stopped", stopped); builder.endObject(); return builder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/IndexUpgradeCheckVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/IndexUpgradeCheckVersion.java index e09f73a688e57..298c8ac95c2e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/IndexUpgradeCheckVersion.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/IndexUpgradeCheckVersion.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.core.upgrade; public final class IndexUpgradeCheckVersion { - public static final int UPRADE_VERSION = 6; + public static final int UPGRADE_VERSION = 6; private IndexUpgradeCheckVersion() {} diff --git a/x-pack/plugin/core/src/main/resources/logstash-management.json b/x-pack/plugin/core/src/main/resources/logstash-management.json index 1456b2d7b5e08..d9528238dc0fb 100644 --- a/x-pack/plugin/core/src/main/resources/logstash-management.json +++ b/x-pack/plugin/core/src/main/resources/logstash-management.json @@ -8,7 +8,7 @@ } }, "mappings" : { - "doc" : { + "_doc" : { "_meta": { "logstash-version": "${logstash.template.version}" }, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/AbstractWireSerializingDataFrameTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/AbstractWireSerializingDataFrameTestCase.java new file mode 100644 index 0000000000000..e092bbbb768df --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/AbstractWireSerializingDataFrameTestCase.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe.action; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.junit.Before; + +import static java.util.Collections.emptyList; + +public abstract class AbstractWireSerializingDataFrameTestCase extends AbstractWireSerializingTestCase { + /** + * Test case that ensures aggregation named objects are registered + */ + private NamedWriteableRegistry namedWriteableRegistry; + private NamedXContentRegistry namedXContentRegistry; + + @Before + public void registerAggregationNamedObjects() throws Exception { + // register aggregations as NamedWriteable + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); + + namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); + namedXContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return namedWriteableRegistry; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return namedXContentRegistry; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformActionResponseTests.java new file mode 100644 index 0000000000000..54501fde5cfe8 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformActionResponseTests.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.xpack.core.dataframe.action.DeleteDataFrameTransformAction.Response; + +public class DeleteDataFrameTransformActionResponseTests extends AbstractWireSerializingDataFrameTestCase { + @Override + protected Response createTestInstance() { + return new Response(randomBoolean()); + } + + @Override + protected Reader instanceReader() { + return Response::new; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsActionResponseTests.java index 5f1c2c84d3743..516b791591679 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsActionResponseTests.java @@ -6,23 +6,23 @@ package org.elasticsearch.xpack.core.dataframe.action; +import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.watcher.watch.Payload.XContent; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsAction.Response; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfigTests; +import org.elasticsearch.xpack.core.watcher.watch.Payload.XContent; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; -public class GetDataFrameTransformsActionResponseTests extends ESTestCase { +public class GetDataFrameTransformsActionResponseTests extends AbstractWireSerializingDataFrameTestCase { public void testInvalidTransforms() throws IOException { List transforms = new ArrayList<>(); @@ -66,4 +66,19 @@ public void testNoHeaderInResponse() throws IOException { assertEquals(null, XContentMapValues.extractValue("headers", transformsResponse.get(i))); } } + + @Override + protected Response createTestInstance() { + List configs = new ArrayList<>(); + for (int i = 0; i < randomInt(10); ++i) { + configs.add(DataFrameTransformConfigTests.randomDataFrameTransformConfig()); + } + + return new Response(configs); + } + + @Override + protected Reader instanceReader() { + return Response::new; + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsActionResponseTests.java new file mode 100644 index 0000000000000..4a293f42bb309 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/GetDataFrameTransformsStatsActionResponseTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction.Response; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStatsTests; + +import java.util.ArrayList; +import java.util.List; + +public class GetDataFrameTransformsStatsActionResponseTests extends AbstractWireSerializingDataFrameTestCase { + @Override + protected Response createTestInstance() { + List stats = new ArrayList<>(); + for (int i = 0; i < randomInt(10); ++i) { + stats.add(DataFrameTransformStateAndStatsTests.randomDataFrameTransformStateAndStats()); + } + + return new Response(stats); + } + + @Override + protected Reader instanceReader() { + return Response::new; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformsActionResponseWireTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformsActionResponseWireTests.java new file mode 100644 index 0000000000000..df4fe1c14b9e8 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/PreviewDataFrameTransformsActionResponseWireTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.xpack.core.dataframe.action.PreviewDataFrameTransformAction.Response; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class PreviewDataFrameTransformsActionResponseWireTests extends AbstractWireSerializingDataFrameTestCase { + + @Override + protected Response createTestInstance() { + int size = randomIntBetween(0, 10); + List> data = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + Map datum = new HashMap<>(); + Map entry = new HashMap<>(); + entry.put("value1", randomIntBetween(1, 100)); + datum.put(randomAlphaOfLength(10), entry); + data.add(datum); + } + return new Response(data); + } + + @Override + protected Reader instanceReader() { + return Response::new; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformActionResponseTests.java new file mode 100644 index 0000000000000..d16b8fbf168fe --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformActionResponseTests.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe.action; + +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.dataframe.action.PutDataFrameTransformAction.Response; + +public class PutDataFrameTransformActionResponseTests extends AbstractStreamableTestCase { + + @Override + protected Response createBlankInstance() { + return new Response(); + } + + @Override + protected Response createTestInstance() { + return new Response(randomBoolean()); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformActionRequestTests.java similarity index 87% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformActionTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformActionRequestTests.java index c54d795a5e4c6..976db70c45f49 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformActionRequestTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.dataframe.action.StartDataFrameTransformAction.Request; -public class StartDataFrameTransformActionTests extends AbstractWireSerializingTestCase { +public class StartDataFrameTransformActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { return new Request(randomAlphaOfLengthBetween(1, 20)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformActionResponseTests.java new file mode 100644 index 0000000000000..d2cd377ffba04 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StartDataFrameTransformActionResponseTests.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.xpack.core.dataframe.action.StartDataFrameTransformAction.Response; + +public class StartDataFrameTransformActionResponseTests extends AbstractWireSerializingDataFrameTestCase { + + @Override + protected Response createTestInstance() { + return new Response(randomBoolean()); + } + + @Override + protected Reader instanceReader() { + return Response::new; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformActionResponseTests.java new file mode 100644 index 0000000000000..afd5b6fa2dbca --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformActionResponseTests.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.xpack.core.dataframe.action.StopDataFrameTransformAction.Response; + +public class StopDataFrameTransformActionResponseTests extends AbstractWireSerializingDataFrameTestCase { + + @Override + protected Response createTestInstance() { + return new Response(randomBoolean()); + } + + @Override + protected Reader instanceReader() { + return Response::new; + } + +} diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/PolicyStepsRegistry.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/PolicyStepsRegistry.java index d753a5035f756..b1c1785cb1cf6 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/PolicyStepsRegistry.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/PolicyStepsRegistry.java @@ -39,6 +39,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -125,7 +126,7 @@ public LifecyclePolicyMetadata read(StreamInput in, String key) { List policyAsSteps = policyMetadata.getPolicy().toSteps(policyClient); if (policyAsSteps.isEmpty() == false) { firstStepMap.put(policyMetadata.getName(), policyAsSteps.get(0)); - final Map stepMapForPolicy = new HashMap<>(); + final Map stepMapForPolicy = new LinkedHashMap<>(); for (Step step : policyAsSteps) { assert ErrorStep.NAME.equals(step.getKey().getName()) == false : "unexpected error step in policy"; stepMapForPolicy.put(step.getKey(), step); diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java index cf70c16cdb0b9..d3ca4dd0098b7 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/Logstash.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xpack.core.XPackPlugin; @@ -63,6 +64,8 @@ public UnaryOperator> getIndexTemplateMetaDat templates.keySet().removeIf(OLD_LOGSTASH_INDEX_NAME::equals); TemplateUtils.loadTemplateIntoMap("/" + LOGSTASH_TEMPLATE_FILE_NAME + ".json", templates, LOGSTASH_INDEX_TEMPLATE_NAME, Version.CURRENT.toString(), TEMPLATE_VERSION_PATTERN, LogManager.getLogger(Logstash.class)); + //internal representation of typeless templates requires the default "_doc" type, which is also required for internal templates + assert templates.get(LOGSTASH_INDEX_TEMPLATE_NAME).mappings().get(MapperService.SINGLE_MAPPING_NAME) != null; return templates; }; } diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java index 62a2829b9258c..102827f87f771 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java @@ -5,10 +5,13 @@ */ package org.elasticsearch.xpack.upgrade; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Allocation; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; @@ -18,7 +21,6 @@ import org.elasticsearch.xpack.core.upgrade.IndexUpgradeCheckVersion; import java.util.function.BiConsumer; -import java.util.function.Consumer; import java.util.function.Function; /** @@ -51,7 +53,17 @@ public IndexUpgradeCheck(String name, Function actionRequired, Client client, ClusterService clusterService, String[] types, Script updateScript) { this(name, actionRequired, client, clusterService, types, updateScript, - listener -> listener.onResponse(null), (t, listener) -> listener.onResponse(TransportResponse.Empty.INSTANCE)); + (cs, listener) -> { + Allocation clusterRoutingAllocation = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING + .get(cs.getMetaData().settings()); + if (Allocation.NONE == clusterRoutingAllocation) { + listener.onFailure(new ElasticsearchException( + "pre-upgrade check failed, please enable cluster routing allocation using setting [{}]", + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey())); + } else { + listener.onResponse(null); + } + }, (t, listener) -> listener.onResponse(TransportResponse.Empty.INSTANCE)); } /** @@ -69,11 +81,11 @@ public IndexUpgradeCheck(String name, public IndexUpgradeCheck(String name, Function actionRequired, Client client, ClusterService clusterService, String[] types, Script updateScript, - Consumer> preUpgrade, + BiConsumer> preUpgrade, BiConsumer> postUpgrade) { this.name = name; this.actionRequired = actionRequired; - this.reindexer = new InternalIndexReindexer<>(client, clusterService, IndexUpgradeCheckVersion.UPRADE_VERSION, updateScript, + this.reindexer = new InternalIndexReindexer<>(client, clusterService, IndexUpgradeCheckVersion.UPGRADE_VERSION, updateScript, types, preUpgrade, postUpgrade); } @@ -106,4 +118,9 @@ public void upgrade(TaskId task, IndexMetaData indexMetaData, ClusterState state ActionListener listener) { reindexer.upgrade(task, indexMetaData.getIndex().getName(), state, listener); } + + // pkg scope for testing + InternalIndexReindexer getInternalIndexReindexer() { + return reindexer; + } } diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexer.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexer.java index 6ab920555bb0b..763fc7d92deb8 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexer.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexer.java @@ -5,6 +5,9 @@ */ package org.elasticsearch.xpack.upgrade; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; @@ -15,6 +18,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; @@ -25,7 +29,6 @@ import org.elasticsearch.transport.TransportResponse; import java.util.function.BiConsumer; -import java.util.function.Consumer; import static org.elasticsearch.index.IndexSettings.same; @@ -39,17 +42,18 @@ * - Delete index .{name} and add alias .{name} to .{name}-6 */ public class InternalIndexReindexer { + private static final Logger logger = LogManager.getLogger(InternalIndexReindexer.class); private final Client client; private final ClusterService clusterService; private final Script transformScript; private final String[] types; private final int version; - private final Consumer> preUpgrade; + private final BiConsumer> preUpgrade; private final BiConsumer> postUpgrade; public InternalIndexReindexer(Client client, ClusterService clusterService, int version, Script transformScript, String[] types, - Consumer> preUpgrade, + BiConsumer> preUpgrade, BiConsumer> postUpgrade) { this.client = client; this.clusterService = clusterService; @@ -62,7 +66,7 @@ public InternalIndexReindexer(Client client, ClusterService clusterService, int public void upgrade(TaskId task, String index, ClusterState clusterState, ActionListener listener) { ParentTaskAssigningClient parentAwareClient = new ParentTaskAssigningClient(client, task); - preUpgrade.accept(ActionListener.wrap( + preUpgrade.accept(clusterState, ActionListener.wrap( t -> innerUpgrade(parentAwareClient, index, clusterState, ActionListener.wrap( response -> postUpgrade.accept(t, ActionListener.wrap( empty -> listener.onResponse(response), @@ -76,32 +80,61 @@ public void upgrade(TaskId task, String index, ClusterState clusterState, Action private void innerUpgrade(ParentTaskAssigningClient parentAwareClient, String index, ClusterState clusterState, ActionListener listener) { String newIndex = index + "-" + version; + logger.trace("upgrading index {} to new index {}", index, newIndex); try { checkMasterAndDataNodeVersion(clusterState); - parentAwareClient.admin().indices().prepareCreate(newIndex).execute(ActionListener.wrap(createIndexResponse -> - setReadOnlyBlock(index, ActionListener.wrap(setReadOnlyResponse -> - reindex(parentAwareClient, index, newIndex, ActionListener.wrap( - bulkByScrollResponse -> // Successful completion of reindexing - delete old index - removeReadOnlyBlock(parentAwareClient, index, ActionListener.wrap(unsetReadOnlyResponse -> - parentAwareClient.admin().indices().prepareAliases().removeIndex(index) - .addAlias(newIndex, index).execute(ActionListener.wrap(deleteIndexResponse -> - listener.onResponse(bulkByScrollResponse), listener::onFailure - )), listener::onFailure - )), - e -> // Something went wrong during reindexing - remove readonly flag and report the error - removeReadOnlyBlock(parentAwareClient, index, ActionListener.wrap(unsetReadOnlyResponse -> { - listener.onFailure(e); - }, e1 -> { - listener.onFailure(e); - })) - )), listener::onFailure - )), listener::onFailure - )); + parentAwareClient.admin().indices().prepareCreate(newIndex).execute(ActionListener.wrap(createIndexResponse -> { + setReadOnlyBlock(index, ActionListener.wrap( + setReadOnlyResponse -> reindex(parentAwareClient, index, newIndex, ActionListener.wrap(bulkByScrollResponse -> { + if ((bulkByScrollResponse.getBulkFailures() != null + && bulkByScrollResponse.getBulkFailures().isEmpty() == false) + || (bulkByScrollResponse.getSearchFailures() != null + && bulkByScrollResponse.getSearchFailures().isEmpty() == false)) { + ElasticsearchException ex = logAndThrowExceptionForFailures(bulkByScrollResponse); + removeReadOnlyBlockOnReindexFailure(parentAwareClient, index, listener, ex); + } else { + // Successful completion of reindexing - remove read only and delete old index + removeReadOnlyBlock(parentAwareClient, index, + ActionListener.wrap(unsetReadOnlyResponse -> parentAwareClient.admin().indices().prepareAliases() + .removeIndex(index).addAlias(newIndex, index) + .execute(ActionListener.wrap( + deleteIndexResponse -> listener.onResponse(bulkByScrollResponse), + listener::onFailure)), + listener::onFailure)); + } + }, e -> { + logger.error("error occurred while reindexing", e); + removeReadOnlyBlockOnReindexFailure(parentAwareClient, index, listener, e); + })), listener::onFailure)); + }, listener::onFailure)); } catch (Exception ex) { + logger.error("error occurred while upgrading index", ex); + removeReadOnlyBlockOnReindexFailure(parentAwareClient, index, listener, ex); listener.onFailure(ex); } } + private void removeReadOnlyBlockOnReindexFailure(ParentTaskAssigningClient parentAwareClient, String index, + ActionListener listener, Exception ex) { + removeReadOnlyBlock(parentAwareClient, index, ActionListener.wrap(unsetReadOnlyResponse -> { + listener.onFailure(ex); + }, e1 -> { + listener.onFailure(ex); + })); + } + + private ElasticsearchException logAndThrowExceptionForFailures(BulkByScrollResponse bulkByScrollResponse) { + String bulkFailures = (bulkByScrollResponse.getBulkFailures() != null) + ? Strings.collectionToCommaDelimitedString(bulkByScrollResponse.getBulkFailures()) + : ""; + String searchFailures = (bulkByScrollResponse.getSearchFailures() != null) + ? Strings.collectionToCommaDelimitedString(bulkByScrollResponse.getSearchFailures()) + : ""; + logger.error("error occurred while reindexing, bulk failures [{}], search failures [{}]", bulkFailures, searchFailures); + return new ElasticsearchException("error occurred while reindexing, bulk failures [{}], search failures [{}]", bulkFailures, + searchFailures); + } + private void checkMasterAndDataNodeVersion(ClusterState clusterState) { if (clusterState.nodes().getMinNodeVersion().before(Upgrade.UPGRADE_INTRODUCED)) { throw new IllegalStateException("All nodes should have at least version [" + Upgrade.UPGRADE_INTRODUCED + "] to upgrade"); diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index 3663d586159d9..c764966d1132c 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -96,7 +96,7 @@ public void testInternalUpgradePrePostChecks() throws Exception { } }, client(), internalCluster().clusterService(internalCluster().getMasterName()), Strings.EMPTY_ARRAY, null, - listener -> { + (cs, listener) -> { assertFalse(preUpgradeIsCalled.getAndSet(true)); assertFalse(postUpgradeIsCalled.get()); listener.onResponse(val); diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java index 013680ee2d17b..9f9c7353ad62b 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java @@ -6,8 +6,11 @@ package org.elasticsearch.xpack.upgrade; import com.carrotsearch.hppc.cursors.ObjectCursor; + +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; @@ -19,6 +22,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; @@ -26,13 +30,16 @@ import org.elasticsearch.index.reindex.ReindexPlugin; import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.upgrade.IndexUpgradeCheckVersion; import java.util.ArrayList; import java.util.Arrays; @@ -45,10 +52,13 @@ import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.IsEqual.equalTo; +@ClusterScope(scope=Scope.TEST) public class InternalIndexReindexerIT extends IndexUpgradeIntegTestCase { @Override @@ -77,13 +87,13 @@ protected Map, Object>> pluginScripts() { public void testUpgradeIndex() throws Exception { createTestIndex("test"); - InternalIndexReindexer reindexer = createIndexReindexer(123, script("add_bar"), Strings.EMPTY_ARRAY); + InternalIndexReindexer reindexer = createIndexReindexer(script("add_bar"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); BulkByScrollResponse response = future.actionGet(); assertThat(response.getCreated(), equalTo(2L)); - SearchResponse searchResponse = client().prepareSearch("test-123").get(); + SearchResponse searchResponse = client().prepareSearch("test-" + IndexUpgradeCheckVersion.UPGRADE_VERSION).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, equalTo(2)); for (SearchHit hit : searchResponse.getHits().getHits()) { @@ -94,7 +104,7 @@ public void testUpgradeIndex() throws Exception { GetAliasesResponse aliasesResponse = client().admin().indices().prepareGetAliases("test").get(); assertThat(aliasesResponse.getAliases().size(), equalTo(1)); - List testAlias = aliasesResponse.getAliases().get("test-123"); + List testAlias = aliasesResponse.getAliases().get("test-" + IndexUpgradeCheckVersion.UPGRADE_VERSION); assertNotNull(testAlias); assertThat(testAlias.size(), equalTo(1)); assertThat(testAlias.get(0).alias(), equalTo("test")); @@ -102,8 +112,8 @@ public void testUpgradeIndex() throws Exception { public void testTargetIndexExists() throws Exception { createTestIndex("test"); - createTestIndex("test-123"); - InternalIndexReindexer reindexer = createIndexReindexer(123, script("add_bar"), Strings.EMPTY_ARRAY); + createTestIndex("test-" + IndexUpgradeCheckVersion.UPGRADE_VERSION); + InternalIndexReindexer reindexer = createIndexReindexer(script("add_bar"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); assertThrows(future, ResourceAlreadyExistsException.class); @@ -115,14 +125,14 @@ public void testTargetIndexExists() throws Exception { public void testTargetIndexExistsAsAlias() throws Exception { createTestIndex("test"); createTestIndex("test-foo"); - client().admin().indices().prepareAliases().addAlias("test-foo", "test-123").get(); - InternalIndexReindexer reindexer = createIndexReindexer(123, script("add_bar"), Strings.EMPTY_ARRAY); + client().admin().indices().prepareAliases().addAlias("test-foo", "test-" + IndexUpgradeCheckVersion.UPGRADE_VERSION).get(); + InternalIndexReindexer reindexer = createIndexReindexer(script("add_bar"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); assertThrows(future, InvalidIndexNameException.class); // Make sure that the index is not marked as read-only - client().prepareIndex("test-123", "doc").setSource("foo", "bar").get(); + client().prepareIndex("test-" + IndexUpgradeCheckVersion.UPGRADE_VERSION, "doc").setSource("foo", "bar").get(); } public void testSourceIndexIsReadonly() throws Exception { @@ -130,7 +140,7 @@ public void testSourceIndexIsReadonly() throws Exception { try { Settings settings = Settings.builder().put(IndexMetaData.INDEX_READ_ONLY_SETTING.getKey(), true).build(); assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get()); - InternalIndexReindexer reindexer = createIndexReindexer(123, script("add_bar"), Strings.EMPTY_ARRAY); + InternalIndexReindexer reindexer = createIndexReindexer(script("add_bar"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); assertThrows(future, IllegalStateException.class); @@ -144,12 +154,30 @@ public void testSourceIndexIsReadonly() throws Exception { } } + public void testReindexingFailureWithClusterRoutingAllocationDisabled() throws Exception { + createTestIndex("test"); + + Settings settings = Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") + .build(); + ClusterUpdateSettingsResponse clusterUpdateResponse = client().admin().cluster().prepareUpdateSettings() + .setTransientSettings(settings).get(); + assertThat(clusterUpdateResponse.isAcknowledged(), is(true)); + assertThat(clusterUpdateResponse.getTransientSettings() + .get(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey()), is("none")); + + InternalIndexReindexer reindexer = createIndexReindexer(script("add_bar"), Strings.EMPTY_ARRAY); + PlainActionFuture future = PlainActionFuture.newFuture(); + reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> future.actionGet()); + assertThat(e.getMessage(), containsString( + "pre-upgrade check failed, please enable cluster routing allocation using setting [cluster.routing.allocation.enable]")); + } public void testReindexingFailure() throws Exception { createTestIndex("test"); // Make sure that the index is not marked as read-only client().prepareIndex("test", "doc").setSource("foo", "bar").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); - InternalIndexReindexer reindexer = createIndexReindexer(123, script("fail"), Strings.EMPTY_ARRAY); + InternalIndexReindexer reindexer = createIndexReindexer(script("fail"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); assertThrows(future, RuntimeException.class); @@ -161,7 +189,7 @@ public void testReindexingFailure() throws Exception { public void testMixedNodeVersion() throws Exception { createTestIndex("test"); - InternalIndexReindexer reindexer = createIndexReindexer(123, script("add_bar"), Strings.EMPTY_ARRAY); + InternalIndexReindexer reindexer = createIndexReindexer(script("add_bar"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); reindexer.upgrade(new TaskId("abc", 123), "test", withRandomOldNode(), future); assertThrows(future, IllegalStateException.class); @@ -183,11 +211,9 @@ private Script script(String name) { return new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, name, new HashMap<>()); } - private InternalIndexReindexer createIndexReindexer(int version, Script transformScript, String[] types) { - return new InternalIndexReindexer(client(), internalCluster().clusterService(internalCluster().getMasterName()), - version, transformScript, types, voidActionListener -> voidActionListener.onResponse(null), - (aVoid, listener) -> listener.onResponse(TransportResponse.Empty.INSTANCE)); - + private InternalIndexReindexer createIndexReindexer(Script transformScript, String[] types) { + return new IndexUpgradeCheck("test", imd -> UpgradeActionRequired.UPGRADE, client(), + internalCluster().clusterService(internalCluster().getMasterName()), types, transformScript).getInternalIndexReindexer(); } private ClusterState clusterState() {