Skip to content

Commit

Permalink
remove 6.4.x version constants (#42127)
Browse files Browse the repository at this point in the history
relates refactoring initiative #41164.
  • Loading branch information
talevy authored May 28, 2019
1 parent ae78387 commit 4a08b3d
Show file tree
Hide file tree
Showing 18 changed files with 90 additions and 181 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.Version;
import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
Expand Down Expand Up @@ -313,25 +312,16 @@ public ActionRequestValidationException validate() {
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
script = new Script(in);
if (in.getVersion().before(Version.V_6_4_0)) {
byte scriptContextId = in.readByte();
assert scriptContextId == 0;
} else {
context = fromScriptContextName(in.readString());
contextSetup = in.readOptionalWriteable(ContextSetup::new);
}
context = fromScriptContextName(in.readString());
contextSetup = in.readOptionalWriteable(ContextSetup::new);
}

@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
script.writeTo(out);
if (out.getVersion().before(Version.V_6_4_0)) {
out.writeByte((byte) 0);
} else {
out.writeString(context.name);
out.writeOptionalWriteable(contextSetup);
}
out.writeString(context.name);
out.writeOptionalWriteable(contextSetup);
}

// For testing only:
Expand Down
16 changes: 0 additions & 16 deletions server/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -46,14 +46,6 @@ public class Version implements Comparable<Version>, ToXContentFragment {
*/
public static final int V_EMPTY_ID = 0;
public static final Version V_EMPTY = new Version(V_EMPTY_ID, org.apache.lucene.util.Version.LATEST);
public static final int V_6_4_0_ID = 6040099;
public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final int V_6_4_1_ID = 6040199;
public static final Version V_6_4_1 = new Version(V_6_4_1_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final int V_6_4_2_ID = 6040299;
public static final Version V_6_4_2 = new Version(V_6_4_2_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final int V_6_4_3_ID = 6040399;
public static final Version V_6_4_3 = new Version(V_6_4_3_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
public static final int V_6_5_0_ID = 6050099;
public static final Version V_6_5_0 = new Version(V_6_5_0_ID, org.apache.lucene.util.Version.LUCENE_7_5_0);
public static final int V_6_5_1_ID = 6050199;
Expand Down Expand Up @@ -136,14 +128,6 @@ public static Version fromId(int id) {
return V_6_5_1;
case V_6_5_0_ID:
return V_6_5_0;
case V_6_4_3_ID:
return V_6_4_3;
case V_6_4_2_ID:
return V_6_4_2;
case V_6_4_1_ID:
return V_6_4_1;
case V_6_4_0_ID:
return V_6_4_0;
case V_EMPTY_ID:
return V_EMPTY;
default:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.Version;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.settings.Settings;
Expand Down Expand Up @@ -193,15 +192,11 @@ public TextFieldMapper build(BuilderContext context) {
}
// Copy the index options of the main field to allow phrase queries on
// the prefix field.
if (context.indexCreatedVersion().onOrAfter(Version.V_6_4_0)) {
if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS) {
// frequencies are not needed because prefix queries always use a constant score
prefixFieldType.setIndexOptions(IndexOptions.DOCS);
} else {
prefixFieldType.setIndexOptions(fieldType.indexOptions());
}
} else if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) {
prefixFieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS) {
// frequencies are not needed because prefix queries always use a constant score
prefixFieldType.setIndexOptions(IndexOptions.DOCS);
} else {
prefixFieldType.setIndexOptions(fieldType.indexOptions());
}
if (fieldType.storeTermVectorOffsets()) {
prefixFieldType.setStoreTermVectorOffsets(true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,19 +217,15 @@ public Query toFilter(ClusterService clusterService, ShardSearchRequest request,

int shardId = request.shardId().id();
int numShards = context.getIndexSettings().getNumberOfShards();
if (minNodeVersion.onOrAfter(Version.V_6_4_0) &&
(request.preference() != null || request.indexRoutings().length > 0)) {
if (request.preference() != null || request.indexRoutings().length > 0) {
GroupShardsIterator<ShardIterator> group = buildShardIterator(clusterService, request);
assert group.size() <= numShards : "index routing shards: " + group.size() +
" cannot be greater than total number of shards: " + numShards;
if (group.size() < numShards) {
/**
/*
* The routing of this request targets a subset of the shards of this index so we need to we retrieve
* the original {@link GroupShardsIterator} and compute the request shard id and number of
* shards from it.
* This behavior has been added in {@link Version#V_6_4_0} so if there is another node in the cluster
* with an older version we use the original shard id and number of shards in order to ensure that all
* slices use the same numbers.
*/
numShards = group.size();
int ord = 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,6 @@
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;

import static org.elasticsearch.test.VersionUtils.randomVersionBetween;

public class CloseIndexRequestTests extends ESTestCase {

public void testSerialization() throws Exception {
Expand All @@ -54,7 +52,8 @@ public void testBwcSerialization() throws Exception {
{
final CloseIndexRequest request = randomRequest();
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(randomVersionBetween(random(), Version.V_6_4_0, VersionUtils.getPreviousVersion(Version.V_7_2_0)));
out.setVersion(VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(),
VersionUtils.getPreviousVersion(Version.V_7_2_0)));
request.writeTo(out);

try (StreamInput in = out.bytes().streamInput()) {
Expand All @@ -77,7 +76,8 @@ public void testBwcSerialization() throws Exception {

final CloseIndexRequest deserializedRequest = new CloseIndexRequest();
try (StreamInput in = out.bytes().streamInput()) {
in.setVersion(randomVersionBetween(random(), Version.V_6_4_0, VersionUtils.getPreviousVersion(Version.V_7_2_0)));
in.setVersion(VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(),
VersionUtils.getPreviousVersion(Version.V_7_2_0)));
deserializedRequest.readFrom(in);
}
assertEquals(sample.getParentTask(), deserializedRequest.getParentTask());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import static org.elasticsearch.test.VersionUtils.getPreviousVersion;
import static org.elasticsearch.test.VersionUtils.maxCompatibleVersion;
import static org.elasticsearch.test.VersionUtils.randomCompatibleVersion;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.elasticsearch.test.VersionUtils.randomVersionBetween;

public class JoinTaskExecutorTests extends ESTestCase {
Expand Down Expand Up @@ -69,27 +68,25 @@ public void testPreventJoinClusterWithUnsupportedIndices() {

public void testPreventJoinClusterWithUnsupportedNodeVersions() {
DiscoveryNodes.Builder builder = DiscoveryNodes.builder();
final Version version = randomVersion(random());
final Version version = randomCompatibleVersion(random(), Version.CURRENT);
builder.add(new DiscoveryNode(UUIDs.base64UUID(), buildNewFakeTransportAddress(), version));
builder.add(new DiscoveryNode(UUIDs.base64UUID(), buildNewFakeTransportAddress(), randomCompatibleVersion(random(), version)));
DiscoveryNodes nodes = builder.build();

final Version maxNodeVersion = nodes.getMaxNodeVersion();
final Version minNodeVersion = nodes.getMinNodeVersion();
if (maxNodeVersion.onOrAfter(Version.V_7_0_0)) {
final Version tooLow = getPreviousVersion(maxNodeVersion.minimumCompatibilityVersion());
expectThrows(IllegalStateException.class, () -> {
if (randomBoolean()) {
JoinTaskExecutor.ensureNodesCompatibility(tooLow, nodes);
} else {
JoinTaskExecutor.ensureNodesCompatibility(tooLow, minNodeVersion, maxNodeVersion);
}
});
}

Version oldMajor = Version.V_6_4_0.minimumCompatibilityVersion();
expectThrows(IllegalStateException.class, () -> JoinTaskExecutor.ensureMajorVersionBarrier(oldMajor, minNodeVersion));
final Version tooLow = getPreviousVersion(maxNodeVersion.minimumCompatibilityVersion());
expectThrows(IllegalStateException.class, () -> {
if (randomBoolean()) {
JoinTaskExecutor.ensureNodesCompatibility(tooLow, nodes);
} else {
JoinTaskExecutor.ensureNodesCompatibility(tooLow, minNodeVersion, maxNodeVersion);
}
});

Version oldMajor = minNodeVersion.minimumCompatibilityVersion();
expectThrows(IllegalStateException.class, () -> JoinTaskExecutor.ensureMajorVersionBarrier(oldMajor, minNodeVersion));

final Version minGoodVersion = maxNodeVersion.major == minNodeVersion.major ?
// we have to stick with the same major
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper;

import org.apache.lucene.index.IndexableField;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
Expand All @@ -32,6 +33,7 @@
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;

import java.io.IOException;
import java.io.UncheckedIOException;
Expand Down Expand Up @@ -695,4 +697,44 @@ protected boolean forbidPrivateIndexSettings() {
*/
return false;
}

public void testReorderParent() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("nested1").field("type", "nested").endObject()
.endObject().endObject().endObject());

DocumentMapper docMapper = createIndex("test",
Settings.builder().put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(),
VersionUtils.randomIndexCompatibleVersion(random())).build())
.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));

assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
assertThat(nested1Mapper.nested().isNested(), equalTo(true));

ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject()
.field("field1", "1")
.field("field2", "2")
.endObject()
.startObject()
.field("field1", "3")
.field("field2", "4")
.endObject()
.endArray()
.endObject()),
XContentType.JSON));

assertThat(doc.docs().size(), equalTo(3));
assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("2"));
assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("4"));
assertThat(doc.docs().get(2).get("field"), equalTo("value"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
Expand Down Expand Up @@ -670,11 +669,7 @@ public void testIndexPrefixIndexTypes() throws IOException {

FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix");
FieldType ft = prefix.fieldType;
if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
} else {
assertEquals(IndexOptions.DOCS, ft.indexOptions());
}
assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
assertFalse(ft.storeTermVectors());
}

Expand All @@ -691,11 +686,7 @@ public void testIndexPrefixIndexTypes() throws IOException {

FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix");
FieldType ft = prefix.fieldType;
if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
} else {
assertEquals(IndexOptions.DOCS, ft.indexOptions());
}
assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
assertTrue(ft.storeTermVectorOffsets());
}

Expand All @@ -712,11 +703,7 @@ public void testIndexPrefixIndexTypes() throws IOException {

FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix");
FieldType ft = prefix.fieldType;
if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
} else {
assertEquals(IndexOptions.DOCS, ft.indexOptions());
}
assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
assertFalse(ft.storeTermVectorOffsets());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.store.Directory;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamOutput;
Expand Down Expand Up @@ -193,22 +192,13 @@ public void testToQueryInnerTermQuery() throws IOException {
final QueryShardContext context = createShardContext();
{
Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")).toQuery(context);
if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
assertThat(query, instanceOf(FieldMaskingSpanQuery.class));
FieldMaskingSpanQuery fieldQuery = (FieldMaskingSpanQuery) query;
assertThat(fieldQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class));
assertThat(fieldQuery.getField(), equalTo("prefix_field"));
SpanTermQuery termQuery = (SpanTermQuery) fieldQuery.getMaskedQuery();
assertThat(termQuery.getTerm().field(), equalTo("prefix_field._index_prefix"));
assertThat(termQuery.getTerm().text(), equalTo("foo"));
} else {
assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class));
SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query;
assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class));
PrefixQuery prefixQuery = (PrefixQuery) wrapper.getWrappedQuery();
assertThat(prefixQuery.getField(), equalTo("prefix_field"));
assertThat(prefixQuery.getPrefix().text(), equalTo("foo"));
}
assertThat(query, instanceOf(FieldMaskingSpanQuery.class));
FieldMaskingSpanQuery fieldQuery = (FieldMaskingSpanQuery) query;
assertThat(fieldQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class));
assertThat(fieldQuery.getField(), equalTo("prefix_field"));
SpanTermQuery termQuery = (SpanTermQuery) fieldQuery.getMaskedQuery();
assertThat(termQuery.getTerm().field(), equalTo("prefix_field._index_prefix"));
assertThat(termQuery.getTerm().text(), equalTo("foo"));
}

{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,9 @@
package org.elasticsearch.license;

import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.rest.RestStatus;

import java.util.stream.StreamSupport;

public class LicenseUtils {

public static final String EXPIRED_FEATURE_METADATA = "es.license.expired.feature";
Expand Down Expand Up @@ -58,13 +55,6 @@ public static boolean signatureNeedsUpdate(License license, DiscoveryNodes curre

public static int compatibleLicenseVersion(DiscoveryNodes currentNodes) {
assert License.VERSION_CRYPTO_ALGORITHMS == License.VERSION_CURRENT : "update this method when adding a new version";

if (StreamSupport.stream(currentNodes.spliterator(), false)
.allMatch(node -> node.getVersion().onOrAfter(Version.V_6_4_0))) {
// License.VERSION_CRYPTO_ALGORITHMS was introduced in 6.4.0
return License.VERSION_CRYPTO_ALGORITHMS;
} else {
return License.VERSION_START_DATE;
}
return License.VERSION_CRYPTO_ALGORITHMS;
}
}
Loading

0 comments on commit 4a08b3d

Please sign in to comment.