Skip to content

Commit

Permalink
Merge remote-tracking branch 'elastic/master' into watcher-stats-cce
Browse files Browse the repository at this point in the history
* elastic/master:
  Add pre-upgrade check to test cluster routing allocation is enabled (elastic#39340)
  Update logstash-management.json to use typeless template (elastic#38653)
  Small simplifications to mapping validation. (elastic#39777)
  Update distribution build instructions to reflect file names with OS/architecture classifiers. (elastic#39762)
  Give jspawnhelper execute permissions in bundled JDK (elastic#39787)
  Maintain step order for ILM trace logging (elastic#39522)
  [ML-DataFrame] fix wire serialization issues in data frame response objects (elastic#39790)
  fix index refresh in test within 20_mix_typeless_typeful (elastic#39198)
  Combine overriddenOps and skippedOps in translog (elastic#39771)
  • Loading branch information
jasontedor committed Mar 8, 2019
2 parents f7e329e + bae7e71 commit 34d9363
Show file tree
Hide file tree
Showing 36 changed files with 399 additions and 161 deletions.
10 changes: 5 additions & 5 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -202,14 +202,14 @@ Run all build commands from within the root directory:
cd elasticsearch/
```

To build a tar distribution, run this command:
To build a darwin-tar distribution, run this command:

```sh
./gradlew -p distribution/archives/tar assemble --parallel
./gradlew -p distribution/archives/darwin-tar assemble --parallel
```

You will find the distribution under:
`./distribution/archives/tar/build/distributions/`
`./distribution/archives/darwin-tar/build/distributions/`

To create all build artifacts (e.g., plugins and Javadocs) as well as
distributions in all formats, run this command:
Expand All @@ -219,10 +219,10 @@ distributions in all formats, run this command:
```

The package distributions (Debian and RPM) can be found under:
`./distribution/packages/(deb|rpm)/build/distributions/`
`./distribution/packages/(deb|rpm|oss-deb|oss-rpm)/build/distributions/`

The archive distributions (tar and zip) can be found under:
`./distribution/archives/(tar|zip)/build/distributions/`
`./distribution/archives/(darwin-tar|linux-tar|windows-zip|oss-darwin-tar|oss-linux-tar|oss-windows-zip)/build/distributions/`

### Running The Full Test Suite

Expand Down
2 changes: 1 addition & 1 deletion distribution/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
copySpec {
from project(':distribution').tasks.getByName("extract${platform.capitalize()}Jdk")
eachFile { FileCopyDetails details ->
if (details.relativePath.segments[-2] == 'bin') {
if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') {
details.mode = 0755
}
}
Expand Down
2 changes: 1 addition & 1 deletion distribution/packages/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ Closure commonPackageConfig(String type, boolean oss) {
String[] segments = fcp.relativePath.segments
for (int i = segments.length - 2; i > 2; --i) {
directory('/' + segments[0..i].join('/'), 0755)
if (segments[-2] == 'bin') {
if (segments[-2] == 'bin' || segments[-1] == 'jspawnhelper') {
fcp.mode = 0755
} else {
fcp.mode = 0644
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ public void testMultipleJoinFields() throws Exception {
.endObject());
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type",
new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]"));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice."));
}

{
Expand All @@ -426,7 +426,7 @@ public void testMultipleJoinFields() throws Exception {
.endObject());
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type",
new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]"));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice."));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,8 @@
"Implicitly create a typeless index while there is a typed template":

- skip:
version: "all"
reason: "awaits fix in #39198"
version: " - 6.99.99"
reason: needs typeless index operations to work on typed indices

- do:
indices.put_template:
Expand All @@ -124,6 +124,11 @@
index: test-1
body: { bar: 42 }

# ensures dynamic mapping update is visible to get_mapping
- do:
cluster.health:
wait_for_events: normal

- do:
indices.get_mapping:
include_type_name: true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,37 +37,18 @@ class MapperMergeValidator {
* duplicate fields are present, and if the provided fields have already been
* defined with a different data type.
*
* @param type The mapping type, for use in error messages.
* @param objectMappers The newly added object mappers.
* @param fieldMappers The newly added field mappers.
* @param fieldAliasMappers The newly added field alias mappers.
* @param fullPathObjectMappers All object mappers, indexed by their full path.
* @param fieldTypes All field and field alias mappers, collected into a lookup structure.
*/
public static void validateMapperStructure(String type,
Collection<ObjectMapper> objectMappers,
public static void validateMapperStructure(Collection<ObjectMapper> objectMappers,
Collection<FieldMapper> fieldMappers,
Collection<FieldAliasMapper> fieldAliasMappers,
Map<String, ObjectMapper> fullPathObjectMappers,
FieldTypeLookup fieldTypes) {
checkFieldUniqueness(type, objectMappers, fieldMappers,
fieldAliasMappers, fullPathObjectMappers, fieldTypes);
checkObjectsCompatibility(objectMappers, fullPathObjectMappers);
}

private static void checkFieldUniqueness(String type,
Collection<ObjectMapper> objectMappers,
Collection<FieldMapper> fieldMappers,
Collection<FieldAliasMapper> fieldAliasMappers,
Map<String, ObjectMapper> fullPathObjectMappers,
FieldTypeLookup fieldTypes) {

// first check within mapping
Collection<FieldAliasMapper> fieldAliasMappers) {
Set<String> objectFullNames = new HashSet<>();
for (ObjectMapper objectMapper : objectMappers) {
String fullPath = objectMapper.fullPath();
if (objectFullNames.add(fullPath) == false) {
throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice in mapping for type [" + type + "]");
throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice.");
}
}

Expand All @@ -76,38 +57,11 @@ private static void checkFieldUniqueness(String type,
.forEach(mapper -> {
String name = mapper.name();
if (objectFullNames.contains(name)) {
throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field in [" + type + "]");
throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field.");
} else if (fieldNames.add(name) == false) {
throw new IllegalArgumentException("Field [" + name + "] is defined twice in [" + type + "]");
throw new IllegalArgumentException("Field [" + name + "] is defined twice.");
}
});

// then check other types
for (String fieldName : fieldNames) {
if (fullPathObjectMappers.containsKey(fieldName)) {
throw new IllegalArgumentException("[" + fieldName + "] is defined as a field in mapping [" + type
+ "] but this name is already used for an object in other types");
}
}

for (String objectPath : objectFullNames) {
if (fieldTypes.get(objectPath) != null) {
throw new IllegalArgumentException("[" + objectPath + "] is defined as an object in mapping [" + type
+ "] but this name is already used for a field in other types");
}
}
}

private static void checkObjectsCompatibility(Collection<ObjectMapper> objectMappers,
Map<String, ObjectMapper> fullPathObjectMappers) {
for (ObjectMapper newObjectMapper : objectMappers) {
ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath());
if (existingObjectMapper != null) {
// simulate a merge and ignore the result, we are just interested
// in exceptions here
existingObjectMapper.merge(newObjectMapper);
}
}
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -471,8 +471,7 @@ private synchronized Map<String, DocumentMapper> internalMerge(@Nullable Documen
Collections.addAll(fieldMappers, metadataMappers);
MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers, fieldAliasMappers);

MapperMergeValidator.validateMapperStructure(newMapper.type(), objectMappers, fieldMappers,
fieldAliasMappers, fullPathObjectMappers, fieldTypes);
MapperMergeValidator.validateMapperStructure(objectMappers, fieldMappers, fieldAliasMappers);
checkPartitionedIndexConstraints(newMapper);

// update lookup data-structures
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,16 +58,7 @@ public int totalOperations() {

@Override
public int skippedOperations() {
int skippedOperations = overriddenOperations;
for (TranslogSnapshot translog : translogs) {
skippedOperations += translog.skippedOperations();
}
return skippedOperations;
}

@Override
public int overriddenOperations() {
return overriddenOperations;
return Arrays.stream(translogs).mapToInt(TranslogSnapshot::skippedOperations).sum() + overriddenOperations;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -936,20 +936,12 @@ public interface Snapshot extends Closeable {

/**
* The number of operations have been skipped (overridden or trimmed) in the snapshot so far.
* Unlike {@link #totalOperations()}, this value is updated each time after {@link #next()}) is called.
*/
default int skippedOperations() {
return 0;
}

/**
* The number of operations have been overridden (eg. superseded) in the snapshot so far.
* If two operations have the same sequence number, the operation with a lower term will be overridden by the operation
* with a higher term. Unlike {@link #totalOperations()}, this value is updated each time after {@link #next()}) is called.
*/
default int overriddenOperations() {
return 0;
}

/**
* Returns the next operation in the snapshot or <code>null</code> if we reached the end.
*/
Expand Down Expand Up @@ -985,11 +977,6 @@ public int skippedOperations() {
return filteredOpsCount + delegate.skippedOperations();
}

@Override
public int overriddenOperations() {
return delegate.overriddenOperations();
}

@Override
public Operation next() throws IOException {
Translog.Operation op;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,11 @@ public void testDuplicateFieldAliasAndObject() {
FieldAliasMapper aliasMapper = new FieldAliasMapper("path", "some.path", "field");

IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
MapperMergeValidator.validateMapperStructure("type",
MapperMergeValidator.validateMapperStructure(
singletonList(objectMapper),
emptyList(),
singletonList(aliasMapper),
emptyMap(),
new FieldTypeLookup()));
assertEquals("Field [some.path] is defined both as an object and a field in [type]", e.getMessage());
singletonList(aliasMapper)));
assertEquals("Field [some.path] is defined both as an object and a field.", e.getMessage());
}

public void testFieldAliasWithNestedScope() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -880,7 +880,7 @@ public void testIndexPrefixMapping() throws IOException {

IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
indexService.mapperService().merge("type", new CompressedXContent(illegalMapping), MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), containsString("Field [field._index_prefix] is defined twice in [type]"));
assertThat(e.getMessage(), containsString("Field [field._index_prefix] is defined twice."));

}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,14 +151,14 @@ public void testReuseMetaField() throws IOException {
mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
fail();
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]"));
assertTrue(e.getMessage().contains("Field [_id] is defined twice."));
}

try {
mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
fail();
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]"));
assertTrue(e.getMessage().contains("Field [_id] is defined twice."));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,6 @@ public void testSeqNoCollision() throws Exception {
assertThat(op2.seqNo(), equalTo(op1.seqNo()));
assertThat(op2.primaryTerm(), greaterThan(op1.primaryTerm()));
assertThat("Remaining of snapshot should contain init operations", snapshot, containsOperationsInAnyOrder(initOperations));
assertThat(snapshot.overriddenOperations(), equalTo(0));
assertThat(snapshot.skippedOperations(), equalTo(1));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -715,7 +715,6 @@ public void testSeqNoFilterSnapshot() throws Exception {
Translog.Snapshot filter = new Translog.SeqNoFilterSnapshot(snapshot, between(200, 300), between(300, 400)); // out range
assertThat(filter, SnapshotMatchers.size(0));
assertThat(filter.totalOperations(), equalTo(snapshot.totalOperations()));
assertThat(filter.overriddenOperations(), equalTo(snapshot.overriddenOperations()));
assertThat(filter.skippedOperations(), equalTo(snapshot.totalOperations()));
}
try (Translog.Snapshot snapshot = translog.newSnapshot()) {
Expand All @@ -726,7 +725,6 @@ public void testSeqNoFilterSnapshot() throws Exception {
Translog.Snapshot filter = new Translog.SeqNoFilterSnapshot(snapshot, fromSeqNo, toSeqNo);
assertThat(filter, SnapshotMatchers.containsOperationsInAnyOrder(selectedOps));
assertThat(filter.totalOperations(), equalTo(snapshot.totalOperations()));
assertThat(filter.overriddenOperations(), equalTo(snapshot.overriddenOperations()));
assertThat(filter.skippedOperations(), equalTo(snapshot.skippedOperations() + operations.size() - selectedOps.size()));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public Request(String id) {
this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName());
}

public Request() {
private Request() {
}

public Request(StreamInput in) throws IOException {
Expand Down Expand Up @@ -113,7 +113,7 @@ protected RequestBuilder(ElasticsearchClient client, DeleteDataFrameTransformAct
public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject {
private boolean acknowledged;
public Response(StreamInput in) throws IOException {
super(Collections.emptyList(), Collections.emptyList());
super(in);
readFrom(in);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public Request(String id) {
}
}

public Request() {}
private Request() {}

public Request(StreamInput in) throws IOException {
super(in);
Expand Down Expand Up @@ -149,7 +149,7 @@ public Response() {
}

public Response(StreamInput in) throws IOException {
super(Collections.emptyList(), Collections.emptyList());
super(in);
readFrom(in);
}

Expand All @@ -173,6 +173,7 @@ public void writeTo(StreamOutput out) throws IOException {
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
List<String> invalidTransforms = new ArrayList<>();
builder.startObject();
toXContentCommon(builder, params);
builder.field(DataFrameField.COUNT.getPreferredName(), transformConfigurations.size());
// XContentBuilder does not support passing the params object for Iterables
builder.field(DataFrameField.TRANSFORMS.getPreferredName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ public Request(String id) {
}
}

public Request() {}
private Request() {}

public Request(StreamInput in) throws IOException {
super(in);
Expand Down Expand Up @@ -138,7 +138,7 @@ public Response() {
}

public Response(StreamInput in) throws IOException {
super(Collections.emptyList(), Collections.emptyList());
super(in);
readFrom(in);
}

Expand All @@ -161,6 +161,7 @@ public void writeTo(StreamOutput out) throws IOException {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
toXContentCommon(builder, params);
builder.field(DataFrameField.COUNT.getPreferredName(), transformsStateAndStats.size());
builder.field(DataFrameField.TRANSFORMS.getPreferredName(), transformsStateAndStats);
builder.endObject();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public Request(String id) {
this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName());
}

public Request() {
private Request() {
}

public Request(StreamInput in) throws IOException {
Expand Down Expand Up @@ -108,7 +108,7 @@ public Response() {
}

public Response(StreamInput in) throws IOException {
super(Collections.emptyList(), Collections.emptyList());
super(in);
readFrom(in);
}

Expand Down Expand Up @@ -136,6 +136,7 @@ public void writeTo(StreamOutput out) throws IOException {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
toXContentCommon(builder, params);
builder.field("started", started);
builder.endObject();
return builder;
Expand Down
Loading

0 comments on commit 34d9363

Please sign in to comment.