Skip to content

Commit

Permalink
Merge branch 'main' into test-cluster-mutable-file-config
Browse files Browse the repository at this point in the history
  • Loading branch information
elasticmachine authored Dec 3, 2024
2 parents 2509de5 + f5ff9c6 commit 2d88421
Show file tree
Hide file tree
Showing 145 changed files with 2,608 additions and 1,221 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@
import org.gradle.api.provider.Property;
import org.gradle.api.tasks.Input;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
Expand All @@ -47,6 +49,8 @@

public abstract class ElasticsearchBuildCompletePlugin implements Plugin<Project> {

private static final Logger log = LoggerFactory.getLogger(ElasticsearchBuildCompletePlugin.class);

@Inject
protected abstract FlowScope getFlowScope();

Expand Down Expand Up @@ -241,8 +245,11 @@ private static void createBuildArchiveTar(List<File> files, File projectDir, Fil
tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
tOut.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR);
for (Path path : files.stream().map(File::toPath).toList()) {
if (!Files.isRegularFile(path)) {
throw new IOException("Support only file!");
if (Files.exists(path) == false) {
log.warn("File disappeared before it could be added to CI archive: " + path);
continue;
} else if (!Files.isRegularFile(path)) {
throw new IOException("Support only file!: " + path);
}

long entrySize = Files.size(path);
Expand Down
5 changes: 5 additions & 0 deletions docs/changelog/116755.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 116755
summary: Smarter field caps with subscribable listener
area: ES|QL
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/116904.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 116904
summary: Add a not-master state for desired balance
area: Allocation
type: enhancement
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/117229.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 117229
summary: "In this pr, a 400 error is returned when _source / _seq_no / _feature /\
\ _nested_path / _field_names is requested, rather a 5xx"
area: Search
type: bug
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/117572.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 117572
summary: Address and remove any references of RestApiVersion version 7
area: Search
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/117731.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 117731
summary: Add cluster level reduction
area: ES|QL
type: enhancement
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/117762.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 117762
summary: "Parse the contents of dynamic objects for [subobjects:false]"
area: Mapping
type: bug
issues:
- 117544
5 changes: 5 additions & 0 deletions docs/changelog/117865.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 117865
summary: Fix BWC for ES|QL cluster request
area: ES|QL
type: bug
issues: []
2 changes: 1 addition & 1 deletion docs/reference/watcher/actions/email.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ killed by firewalls or load balancers in-between.
| Name | Description
| `format` | Attaches the watch data, equivalent to specifying `attach_data`
in the watch configuration. Possible values are `json` or `yaml`.
Defaults to `json` if not specified.
Defaults to `yaml` if not specified.
|======


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public String typeName() {

@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
throw new IllegalArgumentException("Cannot fetch values for internal field [" + typeName() + "].");
}

@Override
Expand Down
11 changes: 5 additions & 6 deletions muted-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,6 @@ tests:
- class: org.elasticsearch.search.StressSearchServiceReaperIT
method: testStressReaper
issue: https://github.com/elastic/elasticsearch/issues/115816
- class: org.elasticsearch.search.SearchServiceTests
method: testWaitOnRefreshTimeout
issue: https://github.com/elastic/elasticsearch/issues/115935
- class: org.elasticsearch.search.SearchServiceTests
method: testParseSourceValidation
issue: https://github.com/elastic/elasticsearch/issues/115936
Expand Down Expand Up @@ -141,9 +138,6 @@ tests:
- class: org.elasticsearch.xpack.shutdown.NodeShutdownIT
method: testAllocationPreventedForRemoval
issue: https://github.com/elastic/elasticsearch/issues/116363
- class: org.elasticsearch.threadpool.SimpleThreadPoolIT
method: testThreadPoolMetrics
issue: https://github.com/elastic/elasticsearch/issues/108320
- class: org.elasticsearch.xpack.downsample.ILMDownsampleDisruptionIT
method: testILMDownsampleRollingRestart
issue: https://github.com/elastic/elasticsearch/issues/114233
Expand Down Expand Up @@ -236,6 +230,11 @@ tests:
issue: https://github.com/elastic/elasticsearch/issues/117815
- class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT
issue: https://github.com/elastic/elasticsearch/issues/111319
- class: org.elasticsearch.validation.DotPrefixClientYamlTestSuiteIT
issue: https://github.com/elastic/elasticsearch/issues/117893
- class: org.elasticsearch.xpack.core.ml.search.SparseVectorQueryBuilderTests
method: testToQuery
issue: https://github.com/elastic/elasticsearch/issues/117904

# Examples:
#
Expand Down
1 change: 1 addition & 0 deletions rest-api-spec/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -66,4 +66,5 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task ->
task.skipTest("logsdb/20_source_mapping/stored _source mode is supported", "no longer serialize source_mode")
task.skipTest("logsdb/20_source_mapping/include/exclude is supported with stored _source", "no longer serialize source_mode")
task.skipTest("logsdb/20_source_mapping/synthetic _source is default", "no longer serialize source_mode")
task.skipTest("search/520_fetch_fields/fetch _seq_no via fields", "error code is changed from 5xx to 400 in 9.0")
})
Original file line number Diff line number Diff line change
Expand Up @@ -1177,3 +1177,121 @@ fetch geo_point:
- is_false: hits.hits.0.fields.message
- match: { hits.hits.0._source.message.foo: 10 }
- match: { hits.hits.0._source.message.foo\.bar: 20 }

---
root with subobjects false and dynamic false:
- requires:
cluster_features: mapper.fix_parsing_subobjects_false_dynamic_false
reason: bug fix

- do:
indices.create:
index: test
body:
mappings:
subobjects: false
dynamic: false
properties:
id:
type: integer
my.keyword.field:
type: keyword

- do:
bulk:
index: test
refresh: true
body:
- '{ "index": { } }'
- '{ "id": 1, "my": { "keyword.field": "abc" } }'
- match: { errors: false }

# indexing a dynamically-mapped field still fails (silently)
- do:
bulk:
index: test
refresh: true
body:
- '{ "index": { } }'
- '{ "id": 2, "my": { "random.field": "abc" } }'
- match: { errors: false }

- do:
search:
index: test
body:
sort: id
fields: [ "*" ]

- match: { hits.hits.0.fields: { my.keyword.field: [ abc ], id: [ 1 ] } }
- match: { hits.hits.1.fields: { id: [ 2 ] } }

- do:
search:
index: test
body:
query:
match:
my.keyword.field: abc

- match: { hits.total.value: 1 }

---
object with subobjects false and dynamic false:
- requires:
cluster_features: mapper.fix_parsing_subobjects_false_dynamic_false
reason: bug fix

- do:
indices.create:
index: test
body:
mappings:
properties:
my:
subobjects: false
dynamic: false
properties:
id:
type: integer
nested.keyword.field:
type: keyword

- do:
bulk:
index: test
refresh: true
body:
- '{ "index": { } }'
- '{ "id": 1, "my": { "nested": { "keyword.field": "abc" } } }'
- match: { errors: false }

# indexing a dynamically-mapped field still fails (silently)
- do:
bulk:
index: test
refresh: true
body:
- '{ "index": { } }'
- '{ "id": 2, "my": { "nested": { "random.field": "abc" } } }'
- match: { errors: false }

- do:
search:
index: test
body:
sort: id
fields: [ "*" ]

- match: { hits.hits.0.fields: { my.nested.keyword.field: [ abc ], id: [ 1 ] } }
- match: { hits.hits.1.fields: { id: [ 2 ] } }

- do:
search:
index: test
body:
query:
match:
my.nested.keyword.field: abc

- match: { hits.total.value: 1 }
Original file line number Diff line number Diff line change
Expand Up @@ -128,18 +128,88 @@ fetch _seq_no via stored_fields:

---
fetch _seq_no via fields:
- requires:
cluster_features: ["meta_fetch_fields_error_code_changed"]
reason: The fields_api returns a 400 instead a 5xx when _seq_no is requested via fields

- do:
catch: "request"
catch: bad_request
search:
index: test
body:
fields: [ _seq_no ]

# This should be `unauthorized` (401) or `forbidden` (403) or at least `bad request` (400)
# while instead it is mapped to an `internal_server_error (500)`
- match: { status: 500 }
- match: { error.root_cause.0.type: unsupported_operation_exception }
- match: { status: 400 }
- match: { error.root_cause.0.type: illegal_argument_exception }
- match: { error.root_cause.0.reason: "error fetching [_seq_no]: Cannot fetch values for internal field [_seq_no]." }

---
fetch _source via fields:
- requires:
cluster_features: ["meta_fetch_fields_error_code_changed"]
reason: The fields_api returns a 400 instead a 5xx when _seq_no is requested via fields

- do:
catch: bad_request
search:
index: test
body:
fields: [ _source ]

- match: { status: 400 }
- match: { error.root_cause.0.type: illegal_argument_exception }
- match: { error.root_cause.0.reason: "error fetching [_source]: Cannot fetch values for internal field [_source]." }

---
fetch _feature via fields:
- requires:
cluster_features: ["meta_fetch_fields_error_code_changed"]
reason: The fields_api returns a 400 instead a 5xx when _seq_no is requested via fields

- do:
catch: bad_request
search:
index: test
body:
fields: [ _feature ]

- match: { status: 400 }
- match: { error.root_cause.0.type: illegal_argument_exception }
- match: { error.root_cause.0.reason: "error fetching [_feature]: Cannot fetch values for internal field [_feature]." }

---
fetch _nested_path via fields:
- requires:
cluster_features: ["meta_fetch_fields_error_code_changed"]
reason: The fields_api returns a 400 instead a 5xx when _seq_no is requested via fields

- do:
catch: bad_request
search:
index: test
body:
fields: [ _nested_path ]

- match: { status: 400 }
- match: { error.root_cause.0.type: illegal_argument_exception }
- match: { error.root_cause.0.reason: "error fetching [_nested_path]: Cannot fetch values for internal field [_nested_path]." }

---
fetch _field_names via fields:
- requires:
cluster_features: ["meta_fetch_fields_error_code_changed"]
reason: The fields_api returns a 400 instead a 5xx when _seq_no is requested via fields

- do:
catch: bad_request
search:
index: test
body:
fields: [ _field_names ]

- match: { status: 400 }
- match: { error.root_cause.0.type: illegal_argument_exception }
- match: { error.root_cause.0.reason: "error fetching [_field_names]: Cannot fetch values for internal field [_field_names]." }

---
fetch fields with none stored_fields:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,7 @@ public void testScriptScore() throws ExecutionException, InterruptedException, I
for (SignificantTerms.Bucket bucket : sigTerms.getBuckets()) {
assertThat(
bucket.getSignificanceScore(),
is((double) bucket.getSubsetDf() + bucket.getSubsetSize() + bucket.getSupersetDf() + bucket.getSupersetSize())
is((double) bucket.getSubsetDf() + sigTerms.getSubsetSize() + bucket.getSupersetDf() + sigTerms.getSupersetSize())
);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,10 +167,10 @@ public void testThreadPoolMetrics() throws Exception {
tps[0].forEach(stats -> {
Map<String, Long> threadPoolStats = List.of(
Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED, stats.completed()),
Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_ACTIVE, (long) stats.active()),
Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_CURRENT, (long) stats.threads()),
Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_ACTIVE, 0L),
Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_CURRENT, 0L),
Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_LARGEST, (long) stats.largest()),
Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_QUEUE, (long) stats.queue())
Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_QUEUE, 0L)
).stream().collect(toUnmodifiableSortedMap(e -> stats.name() + e.getKey(), Entry::getValue));

Function<String, List<Long>> measurementExtractor = name -> {
Expand Down
4 changes: 2 additions & 2 deletions server/src/main/java/module-info.java
Original file line number Diff line number Diff line change
Expand Up @@ -457,8 +457,8 @@
org.elasticsearch.index.codec.vectors.ES814HnswScalarQuantizedVectorsFormat,
org.elasticsearch.index.codec.vectors.ES815HnswBitVectorsFormat,
org.elasticsearch.index.codec.vectors.ES815BitFlatVectorFormat,
org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat,
org.elasticsearch.index.codec.vectors.ES816HnswBinaryQuantizedVectorsFormat;
org.elasticsearch.index.codec.vectors.es816.ES816BinaryQuantizedVectorsFormat,
org.elasticsearch.index.codec.vectors.es816.ES816HnswBinaryQuantizedVectorsFormat;

provides org.apache.lucene.codecs.Codec
with
Expand Down
Loading

0 comments on commit 2d88421

Please sign in to comment.