From cbe7921b0af4d6ad721239ebd3ce2ca4f807d437 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Fri, 16 Aug 2024 11:30:19 -0400 Subject: [PATCH 01/16] Bump Project Reactor to 3.5.20 and Reactor Netty to 1.1.22 (#15262) Signed-off-by: Andriy Redko --- CHANGELOG.md | 2 ++ buildSrc/version.properties | 4 ++-- client/rest/licenses/reactor-core-3.5.19.jar.sha1 | 1 - client/rest/licenses/reactor-core-3.5.20.jar.sha1 | 1 + .../licenses/reactor-netty-core-1.1.21.jar.sha1 | 1 - .../licenses/reactor-netty-core-1.1.22.jar.sha1 | 1 + .../licenses/reactor-netty-http-1.1.21.jar.sha1 | 1 - .../licenses/reactor-netty-http-1.1.22.jar.sha1 | 1 + .../licenses/reactor-netty-core-1.1.21.jar.sha1 | 1 - .../licenses/reactor-netty-core-1.1.22.jar.sha1 | 1 + .../licenses/reactor-netty-http-1.1.21.jar.sha1 | 1 - .../licenses/reactor-netty-http-1.1.22.jar.sha1 | 1 + server/licenses/reactor-core-3.5.19.jar.sha1 | 1 - server/licenses/reactor-core-3.5.20.jar.sha1 | 1 + 14 files changed, 10 insertions(+), 8 deletions(-) delete mode 100644 client/rest/licenses/reactor-core-3.5.19.jar.sha1 create mode 100644 client/rest/licenses/reactor-core-3.5.20.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/reactor-netty-core-1.1.21.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-netty-core-1.1.22.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/reactor-netty-http-1.1.21.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-netty-http-1.1.22.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.21.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.22.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.21.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.22.jar.sha1 delete mode 100644 server/licenses/reactor-core-3.5.19.jar.sha1 create mode 100644 server/licenses/reactor-core-3.5.20.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 511dd77c24e22..6cf1de210f98f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.azure:azure-core` from 1.49.1 to 1.51.0 ([#15111](https://github.com/opensearch-project/OpenSearch/pull/15111)) - Bump `org.xerial.snappy:snappy-java` from 1.1.10.5 to 1.1.10.6 ([#15207](https://github.com/opensearch-project/OpenSearch/pull/15207)) - Bump `com.azure:azure-xml` from 1.0.0 to 1.1.0 ([#15206](https://github.com/opensearch-project/OpenSearch/pull/15206)) +- Bump `reactor` from 3.5.19 to 3.5.20 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262)) +- Bump `reactor-netty` from 1.1.21 to 1.1.22 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262)) ### Changed - Add lower limit for primary and replica batch allocators timeout ([#14979](https://github.com/opensearch-project/OpenSearch/pull/14979)) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 08c45ef058716..9d7bbf6f8f769 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -33,8 +33,8 @@ netty = 4.1.112.Final joda = 2.12.7 # project reactor -reactor_netty = 1.1.21 -reactor = 3.5.19 +reactor_netty = 1.1.22 +reactor = 3.5.20 # client dependencies httpclient5 = 5.2.3 diff --git a/client/rest/licenses/reactor-core-3.5.19.jar.sha1 b/client/rest/licenses/reactor-core-3.5.19.jar.sha1 deleted file mode 100644 index 04b59d2faae04..0000000000000 --- a/client/rest/licenses/reactor-core-3.5.19.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1d49ce1d0df79f28d3927da5f4c46a895b94335f \ No newline at end of file diff --git a/client/rest/licenses/reactor-core-3.5.20.jar.sha1 b/client/rest/licenses/reactor-core-3.5.20.jar.sha1 new file mode 100644 index 0000000000000..0c80be89f66c8 --- /dev/null +++ b/client/rest/licenses/reactor-core-3.5.20.jar.sha1 @@ -0,0 +1 @@ +1fc0f91e2b93778a974339d2c24363d7f34f90b4 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.1.21.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.1.21.jar.sha1 deleted file mode 100644 index 21c16c7430016..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-core-1.1.21.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -acb98bd08107287c454ce74e7b1ed8e7a018a662 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.1.22.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.1.22.jar.sha1 new file mode 100644 index 0000000000000..cc894568c5760 --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-core-1.1.22.jar.sha1 @@ -0,0 +1 @@ +08356b59b29f86e7142c9daca0434653a64ae64b \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.1.21.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.1.21.jar.sha1 deleted file mode 100644 index 648df22873d56..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-http-1.1.21.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b83542bb35630ef815b4177e3c670f62e952e695 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.1.22.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.1.22.jar.sha1 new file mode 100644 index 0000000000000..2402813f831ce --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-http-1.1.22.jar.sha1 @@ -0,0 +1 @@ +2faf64b3822b0512f15d72a325e2826eb8564413 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.21.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.21.jar.sha1 deleted file mode 100644 index 21c16c7430016..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.21.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -acb98bd08107287c454ce74e7b1ed8e7a018a662 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.22.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.22.jar.sha1 new file mode 100644 index 0000000000000..cc894568c5760 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.22.jar.sha1 @@ -0,0 +1 @@ +08356b59b29f86e7142c9daca0434653a64ae64b \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.21.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.21.jar.sha1 deleted file mode 100644 index 648df22873d56..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.21.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b83542bb35630ef815b4177e3c670f62e952e695 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.22.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.22.jar.sha1 new file mode 100644 index 0000000000000..2402813f831ce --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.22.jar.sha1 @@ -0,0 +1 @@ +2faf64b3822b0512f15d72a325e2826eb8564413 \ No newline at end of file diff --git a/server/licenses/reactor-core-3.5.19.jar.sha1 b/server/licenses/reactor-core-3.5.19.jar.sha1 deleted file mode 100644 index 04b59d2faae04..0000000000000 --- a/server/licenses/reactor-core-3.5.19.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1d49ce1d0df79f28d3927da5f4c46a895b94335f \ No newline at end of file diff --git a/server/licenses/reactor-core-3.5.20.jar.sha1 b/server/licenses/reactor-core-3.5.20.jar.sha1 new file mode 100644 index 0000000000000..0c80be89f66c8 --- /dev/null +++ b/server/licenses/reactor-core-3.5.20.jar.sha1 @@ -0,0 +1 @@ +1fc0f91e2b93778a974339d2c24363d7f34f90b4 \ No newline at end of file From a900a16ce8c076488324993ec963f6a6f0e9e269 Mon Sep 17 00:00:00 2001 From: Ruirui Zhang Date: Fri, 16 Aug 2024 10:59:07 -0700 Subject: [PATCH 02/16] Add Get QueryGroup API Logic (#14709) * Add Get QueryGroup API Logic Signed-off-by: Ruirui Zhang * add to changelog Signed-off-by: Ruirui Zhang * fix javadoc Signed-off-by: Ruirui Zhang * change GetQueryGroupAction NAME and add more tests Signed-off-by: Ruirui Zhang * add more unit tests Signed-off-by: Ruirui Zhang * fix spotlessapply Signed-off-by: Ruirui Zhang * addressed comments Signed-off-by: Ruirui Zhang * incorperate comments from create api PR Signed-off-by: Ruirui Zhang * use clustermanager to get the most recent querygroups Signed-off-by: Ruirui Zhang * address comments Signed-off-by: Ruirui Zhang * rebase with main Signed-off-by: Ruirui Zhang * add IT Signed-off-by: Ruirui Zhang * address comments Signed-off-by: Ruirui Zhang * fix IT Signed-off-by: Ruirui Zhang --- CHANGELOG.md | 1 + .../plugin/wlm/WorkloadManagementPlugin.java | 10 +- .../wlm/action/GetQueryGroupAction.java | 36 +++++ .../wlm/action/GetQueryGroupRequest.java | 64 ++++++++ .../wlm/action/GetQueryGroupResponse.java | 82 ++++++++++ .../action/TransportGetQueryGroupAction.java | 98 ++++++++++++ .../wlm/rest/RestGetQueryGroupAction.java | 68 ++++++++ .../service/QueryGroupPersistenceService.java | 20 +++ .../plugin/wlm/QueryGroupTestUtils.java | 7 +- .../wlm/action/GetQueryGroupRequestTests.java | 53 ++++++ .../action/GetQueryGroupResponseTests.java | 151 ++++++++++++++++++ .../TransportGetQueryGroupActionTests.java | 47 ++++++ .../QueryGroupPersistenceServiceTests.java | 54 +++++++ .../api/get_query_group_context.json | 25 +++ ...ate_query_group.yml => 10_query_group.yml} | 20 ++- .../cluster/metadata/QueryGroup.java | 11 +- 16 files changed, 738 insertions(+), 9 deletions(-) create mode 100644 plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupAction.java create mode 100644 plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupRequest.java create mode 100644 plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupResponse.java create mode 100644 plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportGetQueryGroupAction.java create mode 100644 plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/rest/RestGetQueryGroupAction.java create mode 100644 plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/GetQueryGroupRequestTests.java create mode 100644 plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/GetQueryGroupResponseTests.java create mode 100644 plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/TransportGetQueryGroupActionTests.java create mode 100644 plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/api/get_query_group_context.json rename plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/{10_create_query_group.yml => 10_query_group.yml} (77%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6cf1de210f98f..9fe700d7190fa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - [Concurrent Segment Search] Support composite aggregations with scripting ([#15072](https://github.com/opensearch-project/OpenSearch/pull/15072)) - Add `rangeQuery` and `regexpQuery` for `constant_keyword` field type ([#14711](https://github.com/opensearch-project/OpenSearch/pull/14711)) - Add took time to request nodes stats ([#15054](https://github.com/opensearch-project/OpenSearch/pull/15054)) +- [Workload Management] Add Get QueryGroup API Logic ([14709](https://github.com/opensearch-project/OpenSearch/pull/14709)) - [Workload Management] QueryGroup resource tracking framework changes ([#13897](https://github.com/opensearch-project/OpenSearch/pull/13897)) - Add slice execution listeners to SearchOperationListener interface ([#15153](https://github.com/opensearch-project/OpenSearch/pull/15153)) diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPlugin.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPlugin.java index 80807f0d5bc37..6b4496af76dc3 100644 --- a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPlugin.java +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPlugin.java @@ -18,8 +18,11 @@ import org.opensearch.common.settings.SettingsFilter; import org.opensearch.core.action.ActionResponse; import org.opensearch.plugin.wlm.action.CreateQueryGroupAction; +import org.opensearch.plugin.wlm.action.GetQueryGroupAction; import org.opensearch.plugin.wlm.action.TransportCreateQueryGroupAction; +import org.opensearch.plugin.wlm.action.TransportGetQueryGroupAction; import org.opensearch.plugin.wlm.rest.RestCreateQueryGroupAction; +import org.opensearch.plugin.wlm.rest.RestGetQueryGroupAction; import org.opensearch.plugin.wlm.service.QueryGroupPersistenceService; import org.opensearch.plugins.ActionPlugin; import org.opensearch.plugins.Plugin; @@ -41,7 +44,10 @@ public WorkloadManagementPlugin() {} @Override public List> getActions() { - return List.of(new ActionPlugin.ActionHandler<>(CreateQueryGroupAction.INSTANCE, TransportCreateQueryGroupAction.class)); + return List.of( + new ActionPlugin.ActionHandler<>(CreateQueryGroupAction.INSTANCE, TransportCreateQueryGroupAction.class), + new ActionPlugin.ActionHandler<>(GetQueryGroupAction.INSTANCE, TransportGetQueryGroupAction.class) + ); } @Override @@ -54,7 +60,7 @@ public List getRestHandlers( IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster ) { - return List.of(new RestCreateQueryGroupAction()); + return List.of(new RestCreateQueryGroupAction(), new RestGetQueryGroupAction()); } @Override diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupAction.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupAction.java new file mode 100644 index 0000000000000..0200185580f7d --- /dev/null +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupAction.java @@ -0,0 +1,36 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.action.ActionType; + +/** + * Transport action to get QueryGroup + * + * @opensearch.experimental + */ +public class GetQueryGroupAction extends ActionType { + + /** + * An instance of GetQueryGroupAction + */ + public static final GetQueryGroupAction INSTANCE = new GetQueryGroupAction(); + + /** + * Name for GetQueryGroupAction + */ + public static final String NAME = "cluster:admin/opensearch/wlm/query_group/_get"; + + /** + * Default constructor + */ + private GetQueryGroupAction() { + super(NAME, GetQueryGroupResponse::new); + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupRequest.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupRequest.java new file mode 100644 index 0000000000000..0524c615a84e7 --- /dev/null +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupRequest.java @@ -0,0 +1,64 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.action.support.clustermanager.ClusterManagerNodeReadRequest; +import org.opensearch.cluster.metadata.QueryGroup; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +import java.io.IOException; + +/** + * Request for get QueryGroup + * + * @opensearch.experimental + */ +public class GetQueryGroupRequest extends ClusterManagerNodeReadRequest { + final String name; + + /** + * Default constructor for GetQueryGroupRequest + * @param name - name for the QueryGroup to get + */ + public GetQueryGroupRequest(String name) { + this.name = name; + } + + /** + * Constructor for GetQueryGroupRequest + * @param in - A {@link StreamInput} object + */ + public GetQueryGroupRequest(StreamInput in) throws IOException { + super(in); + name = in.readOptionalString(); + } + + @Override + public ActionRequestValidationException validate() { + if (name != null) { + QueryGroup.validateName(name); + } + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(name); + } + + /** + * Name getter + */ + public String getName() { + return name; + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupResponse.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupResponse.java new file mode 100644 index 0000000000000..547c501e6a28e --- /dev/null +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/GetQueryGroupResponse.java @@ -0,0 +1,82 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.cluster.metadata.QueryGroup; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collection; + +/** + * Response for the get API for QueryGroup + * + * @opensearch.experimental + */ +public class GetQueryGroupResponse extends ActionResponse implements ToXContent, ToXContentObject { + private final Collection queryGroups; + private final RestStatus restStatus; + + /** + * Constructor for GetQueryGroupResponse + * @param queryGroups - The QueryGroup list to be fetched + * @param restStatus - The rest status of the request + */ + public GetQueryGroupResponse(final Collection queryGroups, RestStatus restStatus) { + this.queryGroups = queryGroups; + this.restStatus = restStatus; + } + + /** + * Constructor for GetQueryGroupResponse + * @param in - A {@link StreamInput} object + */ + public GetQueryGroupResponse(StreamInput in) throws IOException { + this.queryGroups = in.readList(QueryGroup::new); + restStatus = RestStatus.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(queryGroups); + RestStatus.writeTo(out, restStatus); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray("query_groups"); + for (QueryGroup group : queryGroups) { + group.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + /** + * queryGroups getter + */ + public Collection getQueryGroups() { + return queryGroups; + } + + /** + * restStatus getter + */ + public RestStatus getRestStatus() { + return restStatus; + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportGetQueryGroupAction.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportGetQueryGroupAction.java new file mode 100644 index 0000000000000..51bb21b255511 --- /dev/null +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportGetQueryGroupAction.java @@ -0,0 +1,98 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.clustermanager.TransportClusterManagerNodeReadAction; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.block.ClusterBlockException; +import org.opensearch.cluster.block.ClusterBlockLevel; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.QueryGroup; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.plugin.wlm.service.QueryGroupPersistenceService; +import org.opensearch.search.pipeline.SearchPipelineService; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.util.Collection; + +/** + * Transport action to get QueryGroup + * + * @opensearch.experimental + */ +public class TransportGetQueryGroupAction extends TransportClusterManagerNodeReadAction { + private static final Logger logger = LogManager.getLogger(SearchPipelineService.class); + + /** + * Constructor for TransportGetQueryGroupAction + * + * @param clusterService - a {@link ClusterService} object + * @param transportService - a {@link TransportService} object + * @param actionFilters - a {@link ActionFilters} object + * @param threadPool - a {@link ThreadPool} object + * @param indexNameExpressionResolver - a {@link IndexNameExpressionResolver} object + */ + @Inject + public TransportGetQueryGroupAction( + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + ThreadPool threadPool, + IndexNameExpressionResolver indexNameExpressionResolver + ) { + super( + GetQueryGroupAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + GetQueryGroupRequest::new, + indexNameExpressionResolver, + true + ); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected GetQueryGroupResponse read(StreamInput in) throws IOException { + return new GetQueryGroupResponse(in); + } + + @Override + protected ClusterBlockException checkBlock(GetQueryGroupRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } + + @Override + protected void clusterManagerOperation(GetQueryGroupRequest request, ClusterState state, ActionListener listener) + throws Exception { + final String name = request.getName(); + final Collection resultGroups = QueryGroupPersistenceService.getFromClusterStateMetadata(name, state); + + if (resultGroups.isEmpty() && name != null && !name.isEmpty()) { + logger.warn("No QueryGroup exists with the provided name: {}", name); + throw new ResourceNotFoundException("No QueryGroup exists with the provided name: " + name); + } + listener.onResponse(new GetQueryGroupResponse(resultGroups, RestStatus.OK)); + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/rest/RestGetQueryGroupAction.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/rest/RestGetQueryGroupAction.java new file mode 100644 index 0000000000000..c250bd2979e98 --- /dev/null +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/rest/RestGetQueryGroupAction.java @@ -0,0 +1,68 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.rest; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.plugin.wlm.action.GetQueryGroupAction; +import org.opensearch.plugin.wlm.action.GetQueryGroupRequest; +import org.opensearch.plugin.wlm.action.GetQueryGroupResponse; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.BytesRestResponse; +import org.opensearch.rest.RestChannel; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.RestResponse; +import org.opensearch.rest.action.RestResponseListener; + +import java.io.IOException; +import java.util.List; + +import static org.opensearch.rest.RestRequest.Method.GET; + +/** + * Rest action to get a QueryGroup0 + * + * @opensearch.experimental + */ +public class RestGetQueryGroupAction extends BaseRestHandler { + + /** + * Constructor for RestGetQueryGroupAction + */ + public RestGetQueryGroupAction() {} + + @Override + public String getName() { + return "get_query_group"; + } + + /** + * The list of {@link Route}s that this RestHandler is responsible for handling. + */ + @Override + public List routes() { + return List.of(new Route(GET, "_wlm/query_group/{name}"), new Route(GET, "_wlm/query_group/")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + final GetQueryGroupRequest getQueryGroupRequest = new GetQueryGroupRequest(request.param("name")); + return channel -> client.execute(GetQueryGroupAction.INSTANCE, getQueryGroupRequest, getQueryGroupResponse(channel)); + } + + private RestResponseListener getQueryGroupResponse(final RestChannel channel) { + return new RestResponseListener<>(channel) { + @Override + public RestResponse buildResponse(final GetQueryGroupResponse response) throws Exception { + return new BytesRestResponse(RestStatus.OK, response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS)); + } + }; + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceService.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceService.java index b2164df561bf9..fe7080da78bbe 100644 --- a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceService.java +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceService.java @@ -26,9 +26,11 @@ import org.opensearch.plugin.wlm.action.CreateQueryGroupResponse; import org.opensearch.search.ResourceType; +import java.util.Collection; import java.util.EnumMap; import java.util.Map; import java.util.Optional; +import java.util.stream.Collectors; /** * This class defines the functions for QueryGroup persistence @@ -192,6 +194,24 @@ private Map calculateTotalUsage(Map ex return map; } + /** + * Get the QueryGroups with the specified name from cluster state + * @param name - the QueryGroup name we are getting + * @param currentState - current cluster state + */ + public static Collection getFromClusterStateMetadata(String name, ClusterState currentState) { + final Map currentGroups = currentState.getMetadata().queryGroups(); + if (name == null || name.isEmpty()) { + return currentGroups.values(); + } + return currentGroups.values() + .stream() + .filter(group -> group.getName().equals(name)) + .findAny() + .stream() + .collect(Collectors.toList()); + } + /** * maxQueryGroupCount getter */ diff --git a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/QueryGroupTestUtils.java b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/QueryGroupTestUtils.java index fc324853d9b34..5ba1ad5334712 100644 --- a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/QueryGroupTestUtils.java +++ b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/QueryGroupTestUtils.java @@ -23,6 +23,7 @@ import org.opensearch.threadpool.ThreadPool; import java.util.ArrayList; +import java.util.Collection; import java.util.Comparator; import java.util.HashSet; import java.util.List; @@ -130,8 +131,10 @@ public static Tuple preparePersisten return new Tuple(queryGroupPersistenceService, clusterState); } - public static void assertEqualQueryGroups(List listOne, List listTwo) { - assertEquals(listOne.size(), listTwo.size()); + public static void assertEqualQueryGroups(Collection collectionOne, Collection collectionTwo) { + assertEquals(collectionOne.size(), collectionTwo.size()); + List listOne = new ArrayList<>(collectionOne); + List listTwo = new ArrayList<>(collectionTwo); listOne.sort(Comparator.comparing(QueryGroup::getName)); listTwo.sort(Comparator.comparing(QueryGroup::getName)); for (int i = 0; i < listOne.size(); i++) { diff --git a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/GetQueryGroupRequestTests.java b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/GetQueryGroupRequestTests.java new file mode 100644 index 0000000000000..32b5f7ec9e2c3 --- /dev/null +++ b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/GetQueryGroupRequestTests.java @@ -0,0 +1,53 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.plugin.wlm.QueryGroupTestUtils; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; + +public class GetQueryGroupRequestTests extends OpenSearchTestCase { + + /** + * Test case to verify the serialization and deserialization of GetQueryGroupRequest. + */ + public void testSerialization() throws IOException { + GetQueryGroupRequest request = new GetQueryGroupRequest(QueryGroupTestUtils.NAME_ONE); + assertEquals(QueryGroupTestUtils.NAME_ONE, request.getName()); + BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + GetQueryGroupRequest otherRequest = new GetQueryGroupRequest(streamInput); + assertEquals(request.getName(), otherRequest.getName()); + } + + /** + * Test case to verify the serialization and deserialization of GetQueryGroupRequest when name is null. + */ + public void testSerializationWithNull() throws IOException { + GetQueryGroupRequest request = new GetQueryGroupRequest((String) null); + assertNull(request.getName()); + BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + GetQueryGroupRequest otherRequest = new GetQueryGroupRequest(streamInput); + assertEquals(request.getName(), otherRequest.getName()); + } + + /** + * Test case the validation function of GetQueryGroupRequest + */ + public void testValidation() { + GetQueryGroupRequest request = new GetQueryGroupRequest("a".repeat(51)); + assertThrows(IllegalArgumentException.class, request::validate); + } +} diff --git a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/GetQueryGroupResponseTests.java b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/GetQueryGroupResponseTests.java new file mode 100644 index 0000000000000..774f4b2d8db52 --- /dev/null +++ b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/GetQueryGroupResponseTests.java @@ -0,0 +1,151 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.cluster.metadata.QueryGroup; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.plugin.wlm.QueryGroupTestUtils; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.mockito.Mockito.mock; + +public class GetQueryGroupResponseTests extends OpenSearchTestCase { + + /** + * Test case to verify the serialization and deserialization of GetQueryGroupResponse. + */ + public void testSerializationSingleQueryGroup() throws IOException { + List list = new ArrayList<>(); + list.add(QueryGroupTestUtils.queryGroupOne); + GetQueryGroupResponse response = new GetQueryGroupResponse(list, RestStatus.OK); + assertEquals(response.getQueryGroups(), list); + + BytesStreamOutput out = new BytesStreamOutput(); + response.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + + GetQueryGroupResponse otherResponse = new GetQueryGroupResponse(streamInput); + assertEquals(response.getRestStatus(), otherResponse.getRestStatus()); + QueryGroupTestUtils.assertEqualQueryGroups(response.getQueryGroups(), otherResponse.getQueryGroups()); + } + + /** + * Test case to verify the serialization and deserialization of GetQueryGroupResponse when the result contains multiple QueryGroups. + */ + public void testSerializationMultipleQueryGroup() throws IOException { + GetQueryGroupResponse response = new GetQueryGroupResponse(QueryGroupTestUtils.queryGroupList(), RestStatus.OK); + assertEquals(response.getQueryGroups(), QueryGroupTestUtils.queryGroupList()); + + BytesStreamOutput out = new BytesStreamOutput(); + response.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + + GetQueryGroupResponse otherResponse = new GetQueryGroupResponse(streamInput); + assertEquals(response.getRestStatus(), otherResponse.getRestStatus()); + assertEquals(2, otherResponse.getQueryGroups().size()); + QueryGroupTestUtils.assertEqualQueryGroups(response.getQueryGroups(), otherResponse.getQueryGroups()); + } + + /** + * Test case to verify the serialization and deserialization of GetQueryGroupResponse when the result is empty. + */ + public void testSerializationNull() throws IOException { + List list = new ArrayList<>(); + GetQueryGroupResponse response = new GetQueryGroupResponse(list, RestStatus.OK); + assertEquals(response.getQueryGroups(), list); + + BytesStreamOutput out = new BytesStreamOutput(); + response.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + + GetQueryGroupResponse otherResponse = new GetQueryGroupResponse(streamInput); + assertEquals(response.getRestStatus(), otherResponse.getRestStatus()); + assertEquals(0, otherResponse.getQueryGroups().size()); + } + + /** + * Test case to verify the toXContent of GetQueryGroupResponse. + */ + public void testToXContentGetSingleQueryGroup() throws IOException { + List queryGroupList = new ArrayList<>(); + queryGroupList.add(QueryGroupTestUtils.queryGroupOne); + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); + GetQueryGroupResponse response = new GetQueryGroupResponse(queryGroupList, RestStatus.OK); + String actual = response.toXContent(builder, mock(ToXContent.Params.class)).toString(); + String expected = "{\n" + + " \"query_groups\" : [\n" + + " {\n" + + " \"_id\" : \"AgfUO5Ja9yfsYlONlYi3TQ==\",\n" + + " \"name\" : \"query_group_one\",\n" + + " \"resiliency_mode\" : \"monitor\",\n" + + " \"updated_at\" : 4513232413,\n" + + " \"resource_limits\" : {\n" + + " \"memory\" : 0.3\n" + + " }\n" + + " }\n" + + " ]\n" + + "}"; + assertEquals(expected, actual); + } + + /** + * Test case to verify the toXContent of GetQueryGroupResponse when the result contains multiple QueryGroups. + */ + public void testToXContentGetMultipleQueryGroup() throws IOException { + List queryGroupList = new ArrayList<>(); + queryGroupList.add(QueryGroupTestUtils.queryGroupOne); + queryGroupList.add(QueryGroupTestUtils.queryGroupTwo); + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); + GetQueryGroupResponse response = new GetQueryGroupResponse(queryGroupList, RestStatus.OK); + String actual = response.toXContent(builder, mock(ToXContent.Params.class)).toString(); + String expected = "{\n" + + " \"query_groups\" : [\n" + + " {\n" + + " \"_id\" : \"AgfUO5Ja9yfsYlONlYi3TQ==\",\n" + + " \"name\" : \"query_group_one\",\n" + + " \"resiliency_mode\" : \"monitor\",\n" + + " \"updated_at\" : 4513232413,\n" + + " \"resource_limits\" : {\n" + + " \"memory\" : 0.3\n" + + " }\n" + + " },\n" + + " {\n" + + " \"_id\" : \"G5iIqHy4g7eK1qIAAAAIH53=1\",\n" + + " \"name\" : \"query_group_two\",\n" + + " \"resiliency_mode\" : \"monitor\",\n" + + " \"updated_at\" : 4513232415,\n" + + " \"resource_limits\" : {\n" + + " \"memory\" : 0.6\n" + + " }\n" + + " }\n" + + " ]\n" + + "}"; + assertEquals(expected, actual); + } + + /** + * Test case to verify toXContent of GetQueryGroupResponse when the result contains zero QueryGroup. + */ + public void testToXContentGetZeroQueryGroup() throws IOException { + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); + GetQueryGroupResponse otherResponse = new GetQueryGroupResponse(new ArrayList<>(), RestStatus.OK); + String actual = otherResponse.toXContent(builder, mock(ToXContent.Params.class)).toString(); + String expected = "{\n" + " \"query_groups\" : [ ]\n" + "}"; + assertEquals(expected, actual); + } +} diff --git a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/TransportGetQueryGroupActionTests.java b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/TransportGetQueryGroupActionTests.java new file mode 100644 index 0000000000000..755b11a5f4b89 --- /dev/null +++ b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/TransportGetQueryGroupActionTests.java @@ -0,0 +1,47 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.core.action.ActionListener; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import static org.opensearch.plugin.wlm.QueryGroupTestUtils.NAME_NONE_EXISTED; +import static org.opensearch.plugin.wlm.QueryGroupTestUtils.NAME_ONE; +import static org.opensearch.plugin.wlm.QueryGroupTestUtils.clusterState; +import static org.mockito.Mockito.mock; + +public class TransportGetQueryGroupActionTests extends OpenSearchTestCase { + + /** + * Test case for ClusterManagerOperation function + */ + @SuppressWarnings("unchecked") + public void testClusterManagerOperation() throws Exception { + GetQueryGroupRequest getQueryGroupRequest1 = new GetQueryGroupRequest(NAME_NONE_EXISTED); + GetQueryGroupRequest getQueryGroupRequest2 = new GetQueryGroupRequest(NAME_ONE); + TransportGetQueryGroupAction transportGetQueryGroupAction = new TransportGetQueryGroupAction( + mock(ClusterService.class), + mock(TransportService.class), + mock(ActionFilters.class), + mock(ThreadPool.class), + mock(IndexNameExpressionResolver.class) + ); + assertThrows( + ResourceNotFoundException.class, + () -> transportGetQueryGroupAction.clusterManagerOperation(getQueryGroupRequest1, clusterState(), mock(ActionListener.class)) + ); + transportGetQueryGroupAction.clusterManagerOperation(getQueryGroupRequest2, clusterState(), mock(ActionListener.class)); + } +} diff --git a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceServiceTests.java b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceServiceTests.java index 533c98b44685d..2aa3b9e168852 100644 --- a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceServiceTests.java +++ b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceServiceTests.java @@ -29,6 +29,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; import org.mockito.ArgumentCaptor; @@ -42,6 +44,7 @@ import static org.opensearch.plugin.wlm.QueryGroupTestUtils.assertEqualQueryGroups; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.clusterSettings; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.clusterSettingsSet; +import static org.opensearch.plugin.wlm.QueryGroupTestUtils.clusterState; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.preparePersistenceServiceSetup; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.queryGroupList; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.queryGroupOne; @@ -244,4 +247,55 @@ public void testPersistInClusterStateMetadataFailure() { queryGroupPersistenceService.persistInClusterStateMetadata(queryGroupOne, listener); verify(listener).onFailure(any(RuntimeException.class)); } + + /** + * Tests getting a single QueryGroup + */ + public void testGetSingleQueryGroup() { + Collection groupsCollections = QueryGroupPersistenceService.getFromClusterStateMetadata(NAME_ONE, clusterState()); + List groups = new ArrayList<>(groupsCollections); + assertEquals(1, groups.size()); + QueryGroup queryGroup = groups.get(0); + List listOne = new ArrayList<>(); + List listTwo = new ArrayList<>(); + listOne.add(QueryGroupTestUtils.queryGroupOne); + listTwo.add(queryGroup); + QueryGroupTestUtils.assertEqualQueryGroups(listOne, listTwo); + } + + /** + * Tests getting all QueryGroups + */ + public void testGetAllQueryGroups() { + assertEquals(2, QueryGroupTestUtils.clusterState().metadata().queryGroups().size()); + Collection groupsCollections = QueryGroupPersistenceService.getFromClusterStateMetadata(null, clusterState()); + List res = new ArrayList<>(groupsCollections); + assertEquals(2, res.size()); + Set currentNAME = res.stream().map(QueryGroup::getName).collect(Collectors.toSet()); + assertTrue(currentNAME.contains(QueryGroupTestUtils.NAME_ONE)); + assertTrue(currentNAME.contains(QueryGroupTestUtils.NAME_TWO)); + QueryGroupTestUtils.assertEqualQueryGroups(QueryGroupTestUtils.queryGroupList(), res); + } + + /** + * Tests getting a QueryGroup with invalid name + */ + public void testGetNonExistedQueryGroups() { + Collection groupsCollections = QueryGroupPersistenceService.getFromClusterStateMetadata( + NAME_NONE_EXISTED, + clusterState() + ); + List groups = new ArrayList<>(groupsCollections); + assertEquals(0, groups.size()); + } + + /** + * Tests setting maxQueryGroupCount + */ + public void testMaxQueryGroupCount() { + assertThrows(IllegalArgumentException.class, () -> QueryGroupTestUtils.queryGroupPersistenceService().setMaxQueryGroupCount(-1)); + QueryGroupPersistenceService queryGroupPersistenceService = QueryGroupTestUtils.queryGroupPersistenceService(); + queryGroupPersistenceService.setMaxQueryGroupCount(50); + assertEquals(50, queryGroupPersistenceService.getMaxQueryGroupCount()); + } } diff --git a/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/api/get_query_group_context.json b/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/api/get_query_group_context.json new file mode 100644 index 0000000000000..e0d552be616b2 --- /dev/null +++ b/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/api/get_query_group_context.json @@ -0,0 +1,25 @@ +{ + "get_query_group_context": { + "stability": "experimental", + "url": { + "paths": [ + { + "path": "/_wlm/query_group", + "methods": ["GET"], + "parts": {} + }, + { + "path": "/_wlm/query_group/{name}", + "methods": ["GET"], + "parts": { + "name": { + "type": "string", + "description": "QueryGroup name" + } + } + } + ] + }, + "params":{} + } +} diff --git a/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/10_create_query_group.yml b/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/10_query_group.yml similarity index 77% rename from plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/10_create_query_group.yml rename to plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/10_query_group.yml index ae82a8146e9cd..a22dfa2f4477e 100644 --- a/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/10_create_query_group.yml +++ b/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/10_query_group.yml @@ -1,4 +1,4 @@ -"test create QueryGroup API": +"test CRUD Operations for QueryGroup API ": - skip: version: " - 2.16.99" reason: "QueryGroup WorkloadManagement feature was added in 2.17" @@ -20,6 +20,15 @@ - match: { resource_limits.cpu: 0.4 } - match: { resource_limits.memory: 0.2 } + - do: + get_query_group_context: + name: "analytics" + + - match: { query_groups.0.name: "analytics" } + - match: { query_groups.0.resiliency_mode: "monitor" } + - match: { query_groups.0.resource_limits.cpu: 0.4 } + - match: { query_groups.0.resource_limits.memory: 0.2 } + - do: catch: /illegal_argument_exception/ create_query_group_context: @@ -88,3 +97,12 @@ - match: { resiliency_mode: "monitor" } - match: { resource_limits.cpu: 0.35 } - match: { resource_limits.memory: 0.25 } + + - do: + get_query_group_context: + name: "analytics2" + + - match: { query_groups.0.name: "analytics2" } + - match: { query_groups.0.resiliency_mode: "monitor" } + - match: { query_groups.0.resource_limits.cpu: 0.35 } + - match: { query_groups.0.resource_limits.memory: 0.25 } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/QueryGroup.java b/server/src/main/java/org/opensearch/cluster/metadata/QueryGroup.java index 6ab11b1d6f150..9b5c6bc2369a6 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/QueryGroup.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/QueryGroup.java @@ -62,10 +62,7 @@ public QueryGroup(String name, String _id, ResiliencyMode resiliencyMode, Map MAX_CHARS_ALLOWED_IN_NAME || name.isEmpty()) { - throw new IllegalArgumentException("QueryGroup.name shouldn't be empty or more than 50 chars long"); - } + validateName(name); if (resourceLimits.isEmpty()) { throw new IllegalArgumentException("QueryGroup.resourceLimits should at least have 1 resource limit"); @@ -111,6 +108,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(updatedAtInMillis); } + public static void validateName(String name) { + if (name == null || name.isEmpty() || name.length() > MAX_CHARS_ALLOWED_IN_NAME) { + throw new IllegalArgumentException("QueryGroup.name shouldn't be null, empty or more than 50 chars long"); + } + } + private void validateResourceLimits(Map resourceLimits) { for (Map.Entry resource : resourceLimits.entrySet()) { Double threshold = resource.getValue(); From 9661e8db62e8897cb53b0bc1860ead43b28dfa21 Mon Sep 17 00:00:00 2001 From: Matthew Ridehalgh <9155962+mridehalgh@users.noreply.github.com> Date: Fri, 16 Aug 2024 20:11:13 +0100 Subject: [PATCH 03/16] fix: MinHash token filter parameters not working (#15233) * fix: minhash configuration Signed-off-by: Matt Ridehalgh * style: linting Signed-off-by: Matt Ridehalgh * chore: update CHANGELOG.md Signed-off-by: Matt Ridehalgh --------- Signed-off-by: Matt Ridehalgh --- CHANGELOG.md | 3 +- .../common/MinHashTokenFilterFactory.java | 4 +- .../common/MinHashFilterFactoryTests.java | 66 +++++++++++++++---- 3 files changed, 58 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9fe700d7190fa..5103d978b361a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -34,7 +34,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.azure:azure-core` from 1.49.1 to 1.51.0 ([#15111](https://github.com/opensearch-project/OpenSearch/pull/15111)) - Bump `org.xerial.snappy:snappy-java` from 1.1.10.5 to 1.1.10.6 ([#15207](https://github.com/opensearch-project/OpenSearch/pull/15207)) - Bump `com.azure:azure-xml` from 1.0.0 to 1.1.0 ([#15206](https://github.com/opensearch-project/OpenSearch/pull/15206)) -- Bump `reactor` from 3.5.19 to 3.5.20 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262)) +- Bump `reactor` from 3.5.19 to 3.5.20 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262)) - Bump `reactor-netty` from 1.1.21 to 1.1.22 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262)) ### Changed @@ -53,6 +53,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix array_index_out_of_bounds_exception when indexing documents with field name containing only dot ([#15126](https://github.com/opensearch-project/OpenSearch/pull/15126)) - Fixed array field name omission in flat_object function for nested JSON ([#13620](https://github.com/opensearch-project/OpenSearch/pull/13620)) - Fix range aggregation optimization ignoring top level queries ([#15194](https://github.com/opensearch-project/OpenSearch/pull/15194)) +- Fix incorrect parameter names in MinHash token filter configuration handling ([#15233](https://github.com/opensearch-project/OpenSearch/pull/15233)) ### Security diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java index e76354ae3a765..40655b84794d5 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java @@ -65,10 +65,10 @@ private Map convertSettings(Settings settings) { if (settings.hasValue("hash_count")) { settingMap.put("hashCount", settings.get("hash_count")); } - if (settings.hasValue("bucketCount")) { + if (settings.hasValue("bucket_count")) { settingMap.put("bucketCount", settings.get("bucket_count")); } - if (settings.hasValue("hashSetSize")) { + if (settings.hasValue("hash_set_size")) { settingMap.put("hashSetSize", settings.get("hash_set_size")); } if (settings.hasValue("with_rotation")) { diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java index e86a939dc857b..b9f09033f49f3 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MinHashFilterFactoryTests.java @@ -50,14 +50,10 @@ public void testDefault() throws IOException { int default_bucket_size = 512; int default_hash_set_size = 1; Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); - OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( - settings, - new CommonAnalysisModulePlugin() - ); + OpenSearchTestCase.TestAnalysis analysis = getTestAnalysisFromSettings(settings); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("min_hash"); String source = "the quick brown fox"; - Tokenizer tokenizer = new WhitespaceTokenizer(); - tokenizer.setReader(new StringReader(source)); + Tokenizer tokenizer = getTokenizer(source); // with_rotation is true by default, and hash_set_size is 1, so even though the source doesn't // have enough tokens to fill all the buckets, we still expect 512 tokens. @@ -73,17 +69,63 @@ public void testSettings() throws IOException { .put("index.analysis.filter.test_min_hash.with_rotation", false) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( - settings, - new CommonAnalysisModulePlugin() - ); + OpenSearchTestCase.TestAnalysis analysis = getTestAnalysisFromSettings(settings); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("test_min_hash"); String source = "sushi"; - Tokenizer tokenizer = new WhitespaceTokenizer(); - tokenizer.setReader(new StringReader(source)); + Tokenizer tokenizer = getTokenizer(source); // despite the fact that bucket_count is 2 and hash_set_size is 1, // because with_rotation is false, we only expect 1 token here. assertStreamHasNumberOfTokens(tokenFilter.create(tokenizer), 1); } + + public void testBucketCountSetting() throws IOException { + // Correct case with "bucket_count" + Settings settingsWithBucketCount = Settings.builder() + .put("index.analysis.filter.test_min_hash.type", "min_hash") + .put("index.analysis.filter.test_min_hash.hash_count", "1") + .put("index.analysis.filter.test_min_hash.bucket_count", "3") + .put("index.analysis.filter.test_min_hash.hash_set_size", "1") + .put("index.analysis.filter.test_min_hash.with_rotation", false) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + + OpenSearchTestCase.TestAnalysis analysisWithBucketCount = getTestAnalysisFromSettings(settingsWithBucketCount); + + TokenFilterFactory tokenFilterWithBucketCount = analysisWithBucketCount.tokenFilter.get("test_min_hash"); + String sourceWithBucketCount = "salmon avocado roll uramaki"; + Tokenizer tokenizerWithBucketCount = getTokenizer(sourceWithBucketCount); + // Expect 3 tokens due to bucket_count being set to 3 + assertStreamHasNumberOfTokens(tokenFilterWithBucketCount.create(tokenizerWithBucketCount), 3); + } + + public void testHashSetSizeSetting() throws IOException { + // Correct case with "hash_set_size" + Settings settingsWithHashSetSize = Settings.builder() + .put("index.analysis.filter.test_min_hash.type", "min_hash") + .put("index.analysis.filter.test_min_hash.hash_count", "1") + .put("index.analysis.filter.test_min_hash.bucket_count", "1") + .put("index.analysis.filter.test_min_hash.hash_set_size", "2") + .put("index.analysis.filter.test_min_hash.with_rotation", false) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + + OpenSearchTestCase.TestAnalysis analysisWithHashSetSize = getTestAnalysisFromSettings(settingsWithHashSetSize); + + TokenFilterFactory tokenFilterWithHashSetSize = analysisWithHashSetSize.tokenFilter.get("test_min_hash"); + String sourceWithHashSetSize = "salmon avocado roll uramaki"; + Tokenizer tokenizerWithHashSetSize = getTokenizer(sourceWithHashSetSize); + // Expect 2 tokens due to hash_set_size being set to 2 and bucket_count being 1 + assertStreamHasNumberOfTokens(tokenFilterWithHashSetSize.create(tokenizerWithHashSetSize), 2); + } + + private static OpenSearchTestCase.TestAnalysis getTestAnalysisFromSettings(Settings settingsWithBucketCount) throws IOException { + return AnalysisTestsHelper.createTestAnalysisFromSettings(settingsWithBucketCount, new CommonAnalysisModulePlugin()); + } + + private static Tokenizer getTokenizer(String sourceWithBucketCount) { + Tokenizer tokenizerWithBucketCount = new WhitespaceTokenizer(); + tokenizerWithBucketCount.setReader(new StringReader(sourceWithBucketCount)); + return tokenizerWithBucketCount; + } } From a27d26bce25814b4150e7642ed88b9748e698da9 Mon Sep 17 00:00:00 2001 From: Sarthak Aggarwal Date: Mon, 19 Aug 2024 16:52:32 +0530 Subject: [PATCH 04/16] Star Tree Merge and Aggregation Fixes (#15274) --------- Signed-off-by: Sarthak Aggarwal --- .../aggregators/CountValueAggregator.java | 2 +- .../StatelessDoubleValueAggregator.java | 4 +-- .../aggregators/SumValueAggregator.java | 4 +-- .../startree/aggregators/ValueAggregator.java | 4 +-- .../startree/builder/BaseStarTreeBuilder.java | 2 +- .../AbstractValueAggregatorTests.java | 4 +-- .../CountValueAggregatorTests.java | 6 ++-- .../aggregators/MaxValueAggregatorTests.java | 18 +++------- .../aggregators/MinValueAggregatorTests.java | 18 +++------- .../aggregators/SumValueAggregatorTests.java | 14 +++----- .../builder/AbstractStarTreeBuilderTests.java | 36 +++++++++++++++++++ 11 files changed, 64 insertions(+), 48 deletions(-) diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/CountValueAggregator.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/CountValueAggregator.java index 38a59d403d36b..81807cd174a10 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/CountValueAggregator.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/CountValueAggregator.java @@ -60,7 +60,7 @@ public Long mergeAggregatedValues(Long value, Long aggregatedValue) { } @Override - public Long toStarTreeNumericTypeValue(Long value) { + public Long toAggregatedValueType(Long value) { return value; } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/StatelessDoubleValueAggregator.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/StatelessDoubleValueAggregator.java index 04c6bcc906ef2..30a1c47c0ee9b 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/StatelessDoubleValueAggregator.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/StatelessDoubleValueAggregator.java @@ -52,12 +52,12 @@ public Double mergeAggregatedValues(Double value, Double aggregatedValue) { } @Override - public Double toStarTreeNumericTypeValue(Long value) { + public Double toAggregatedValueType(Long value) { try { if (value == null) { return getIdentityMetricValue(); } - return starTreeNumericType.getDoubleValue(value); + return VALUE_AGGREGATOR_TYPE.getDoubleValue(value); } catch (Exception e) { throw new IllegalStateException("Cannot convert " + value + " to sortable aggregation type", e); } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/SumValueAggregator.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/SumValueAggregator.java index 1568debd91ae7..ef97a9b603df3 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/SumValueAggregator.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/SumValueAggregator.java @@ -87,12 +87,12 @@ public Double getInitialAggregatedValue(Double value) { } @Override - public Double toStarTreeNumericTypeValue(Long value) { + public Double toAggregatedValueType(Long value) { try { if (value == null) { return getIdentityMetricValue(); } - return starTreeNumericType.getDoubleValue(value); + return VALUE_AGGREGATOR_TYPE.getDoubleValue(value); } catch (Exception e) { throw new IllegalStateException("Cannot convert " + value + " to sortable aggregation type", e); } diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregator.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregator.java index 39bf235a83409..d5ca7f3493087 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregator.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregator.java @@ -50,9 +50,9 @@ default A getInitialAggregatedValue(A value) { } /** - * Converts an aggregated value from a Long type. + * Converts a segment long value to an aggregated value. */ - A toStarTreeNumericTypeValue(Long rawValue); + A toAggregatedValueType(Long rawValue); /** * Fetches a value that does not alter the result of aggregations diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java index 872826aa6db06..90b2d0727d572 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java @@ -164,7 +164,7 @@ protected StarTreeDocument getStarTreeDocument( // actual indexing field they're based on metrics[i] = metricAggregatorInfos.get(i) .getValueAggregators() - .toStarTreeNumericTypeValue(metricDocValuesIterator.value(currentDocId)); + .toAggregatedValueType(metricDocValuesIterator.value(currentDocId)); i++; } return new StarTreeDocument(dims, metrics); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/AbstractValueAggregatorTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/AbstractValueAggregatorTests.java index f6adf442bb6ab..f5d3e197aa287 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/AbstractValueAggregatorTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/AbstractValueAggregatorTests.java @@ -21,7 +21,7 @@ public abstract class AbstractValueAggregatorTests extends OpenSearchTestCase { private ValueAggregator aggregator; - private StarTreeNumericType starTreeNumericType; + protected StarTreeNumericType starTreeNumericType; public AbstractValueAggregatorTests(StarTreeNumericType starTreeNumericType) { this.starTreeNumericType = starTreeNumericType; @@ -69,7 +69,7 @@ public void testGetInitialAggregatedValueForSegmentDocValue() { assertEquals(CountValueAggregator.DEFAULT_INITIAL_VALUE, aggregator.getInitialAggregatedValueForSegmentDocValue(randomLong())); } else { assertEquals( - aggregator.toStarTreeNumericTypeValue(randomLong), + starTreeNumericType.getDoubleValue(randomLong), aggregator.getInitialAggregatedValueForSegmentDocValue(randomLong) ); } diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/CountValueAggregatorTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/CountValueAggregatorTests.java index 7389d68987898..550a4fea1174a 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/CountValueAggregatorTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/CountValueAggregatorTests.java @@ -36,10 +36,10 @@ public void testGetInitialAggregatedValue() { assertEquals(randomLong, aggregator.getInitialAggregatedValue(randomLong), 0.0); } - public void testToStarTreeNumericTypeValue() { + public void testToAggregatedValueType() { long randomLong = randomLong(); - assertEquals(randomLong, aggregator.toStarTreeNumericTypeValue(randomLong), 0.0); - assertNull(aggregator.toStarTreeNumericTypeValue(null)); + assertEquals(randomLong, aggregator.toAggregatedValueType(randomLong), 0.0); + assertNull(aggregator.toAggregatedValueType(null)); } public void testIdentityMetricValue() { diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MaxValueAggregatorTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MaxValueAggregatorTests.java index a4e01bb70492c..b103416251c46 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MaxValueAggregatorTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MaxValueAggregatorTests.java @@ -23,21 +23,13 @@ public void testMergeAggregatedValueAndSegmentValue() { Long randomLong = randomLong(); double randomDouble = randomDouble(); assertEquals( - Math.max(aggregator.toStarTreeNumericTypeValue(randomLong), randomDouble), + Math.max(starTreeNumericType.getDoubleValue(randomLong), randomDouble), aggregator.mergeAggregatedValueAndSegmentValue(randomDouble, randomLong), 0.0 ); - assertEquals( - aggregator.toStarTreeNumericTypeValue(randomLong), - aggregator.mergeAggregatedValueAndSegmentValue(null, randomLong), - 0.0 - ); + assertEquals(starTreeNumericType.getDoubleValue(randomLong), aggregator.mergeAggregatedValueAndSegmentValue(null, randomLong), 0.0); assertEquals(randomDouble, aggregator.mergeAggregatedValueAndSegmentValue(randomDouble, null), 0.0); - assertEquals( - Math.max(2.0, aggregator.toStarTreeNumericTypeValue(3L)), - aggregator.mergeAggregatedValueAndSegmentValue(2.0, 3L), - 0.0 - ); + assertEquals(Math.max(2.0, starTreeNumericType.getDoubleValue(3L)), aggregator.mergeAggregatedValueAndSegmentValue(2.0, 3L), 0.0); } public void testMergeAggregatedValues() { @@ -53,10 +45,10 @@ public void testGetInitialAggregatedValue() { assertEquals(randomDouble, aggregator.getInitialAggregatedValue(randomDouble), 0.0); } - public void testToStarTreeNumericTypeValue() { + public void testToAggregatedValueType() { MaxValueAggregator aggregator = new MaxValueAggregator(StarTreeNumericType.DOUBLE); long randomLong = randomLong(); - assertEquals(NumericUtils.sortableLongToDouble(randomLong), aggregator.toStarTreeNumericTypeValue(randomLong), 0.0); + assertEquals(NumericUtils.sortableLongToDouble(randomLong), aggregator.toAggregatedValueType(randomLong), 0.0); } public void testIdentityMetricValue() { diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MinValueAggregatorTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MinValueAggregatorTests.java index b8233b0fd7fe0..013c60d8a1b91 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MinValueAggregatorTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MinValueAggregatorTests.java @@ -22,21 +22,13 @@ public void testMergeAggregatedValueAndSegmentValue() { Long randomLong = randomLong(); double randomDouble = randomDouble(); assertEquals( - Math.min(aggregator.toStarTreeNumericTypeValue(randomLong), randomDouble), + Math.min(starTreeNumericType.getDoubleValue(randomLong), randomDouble), aggregator.mergeAggregatedValueAndSegmentValue(randomDouble, randomLong), 0.0 ); - assertEquals( - aggregator.toStarTreeNumericTypeValue(randomLong), - aggregator.mergeAggregatedValueAndSegmentValue(null, randomLong), - 0.0 - ); + assertEquals(starTreeNumericType.getDoubleValue(randomLong), aggregator.mergeAggregatedValueAndSegmentValue(null, randomLong), 0.0); assertEquals(randomDouble, aggregator.mergeAggregatedValueAndSegmentValue(randomDouble, null), 0.0); - assertEquals( - Math.min(2.0, aggregator.toStarTreeNumericTypeValue(3L)), - aggregator.mergeAggregatedValueAndSegmentValue(2.0, 3L), - 0.0 - ); + assertEquals(Math.min(2.0, starTreeNumericType.getDoubleValue(3L)), aggregator.mergeAggregatedValueAndSegmentValue(2.0, 3L), 0.0); } public void testMergeAggregatedValues() { @@ -52,10 +44,10 @@ public void testGetInitialAggregatedValue() { assertEquals(randomDouble, aggregator.getInitialAggregatedValue(randomDouble), 0.0); } - public void testToStarTreeNumericTypeValue() { + public void testToAggregatedValueType() { MinValueAggregator aggregator = new MinValueAggregator(StarTreeNumericType.DOUBLE); long randomLong = randomLong(); - assertEquals(NumericUtils.sortableLongToDouble(randomLong), aggregator.toStarTreeNumericTypeValue(randomLong), 0.0); + assertEquals(NumericUtils.sortableLongToDouble(randomLong), aggregator.toAggregatedValueType(randomLong), 0.0); } public void testIdentityMetricValue() { diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/SumValueAggregatorTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/SumValueAggregatorTests.java index 3a85357da5ffe..44c7f17a276b4 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/SumValueAggregatorTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/SumValueAggregatorTests.java @@ -29,7 +29,7 @@ public void testMergeAggregatedValueAndSegmentValue() { Long randomLong = randomLong(); aggregator.getInitialAggregatedValue(randomDouble); assertEquals( - randomDouble + aggregator.toStarTreeNumericTypeValue(randomLong), + randomDouble + starTreeNumericType.getDoubleValue(randomLong), aggregator.mergeAggregatedValueAndSegmentValue(randomDouble, randomLong), 0.0 ); @@ -41,7 +41,7 @@ public void testMergeAggregatedValueAndSegmentValue_nullSegmentDocValue() { aggregator.getInitialAggregatedValue(randomDouble1); assertEquals(randomDouble1, aggregator.mergeAggregatedValueAndSegmentValue(randomDouble1, null), 0.0); assertEquals( - randomDouble1 + aggregator.toStarTreeNumericTypeValue(randomLong), + randomDouble1 + starTreeNumericType.getDoubleValue(randomLong), aggregator.mergeAggregatedValueAndSegmentValue(randomDouble1, randomLong), 0.0 ); @@ -50,11 +50,7 @@ public void testMergeAggregatedValueAndSegmentValue_nullSegmentDocValue() { public void testMergeAggregatedValueAndSegmentValue_nullInitialDocValue() { Long randomLong = randomLong(); aggregator.getInitialAggregatedValue(null); - assertEquals( - aggregator.toStarTreeNumericTypeValue(randomLong), - aggregator.mergeAggregatedValueAndSegmentValue(null, randomLong), - 0.0 - ); + assertEquals(starTreeNumericType.getDoubleValue(randomLong), aggregator.mergeAggregatedValueAndSegmentValue(null, randomLong), 0.0); } public void testMergeAggregatedValues() { @@ -70,9 +66,9 @@ public void testGetInitialAggregatedValue() { assertEquals(randomDouble, aggregator.getInitialAggregatedValue(randomDouble), 0.0); } - public void testToStarTreeNumericTypeValue() { + public void testToAggregatedValueType() { long randomLong = randomLong(); - assertEquals(aggregator.toStarTreeNumericTypeValue(randomLong), aggregator.toStarTreeNumericTypeValue(randomLong), 0.0); + assertEquals(aggregator.toAggregatedValueType(randomLong), aggregator.toAggregatedValueType(randomLong), 0.0); } public void testIdentityMetricValue() { diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/AbstractStarTreeBuilderTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/AbstractStarTreeBuilderTests.java index d1a85949da7fe..20982359a4db6 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/AbstractStarTreeBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/AbstractStarTreeBuilderTests.java @@ -1508,6 +1508,42 @@ private static StarTreeField getStarTreeFieldWithMultipleMetrics() { return sf; } + public void testMergeFlow_randomNumberTypes() throws Exception { + + DocumentMapper documentMapper = mock(DocumentMapper.class); + when(mapperService.documentMapper()).thenReturn(documentMapper); + Settings settings = Settings.builder().put(settings(org.opensearch.Version.CURRENT).build()).build(); + NumberFieldMapper numberFieldMapper1 = new NumberFieldMapper.Builder( + "field1", + randomFrom(NumberFieldMapper.NumberType.values()), + false, + true + ).build(new Mapper.BuilderContext(settings, new ContentPath())); + NumberFieldMapper numberFieldMapper2 = new NumberFieldMapper.Builder( + "field2", + randomFrom(NumberFieldMapper.NumberType.values()), + false, + true + ).build(new Mapper.BuilderContext(settings, new ContentPath())); + NumberFieldMapper numberFieldMapper3 = new NumberFieldMapper.Builder( + "field3", + randomFrom(NumberFieldMapper.NumberType.values()), + false, + true + ).build(new Mapper.BuilderContext(settings, new ContentPath())); + MappingLookup fieldMappers = new MappingLookup( + Set.of(numberFieldMapper1, numberFieldMapper2, numberFieldMapper3), + Collections.emptyList(), + Collections.emptyList(), + 0, + null + ); + when(documentMapper.mappers()).thenReturn(fieldMappers); + testMergeFlowWithSum(); + builder.close(); + testMergeFlowWithCount(); + } + public void testMergeFlowWithSum() throws IOException { List dimList = List.of(0L, 1L, 3L, 4L, 5L, 6L); List docsWithField = List.of(0, 1, 3, 4, 5, 6); From e8ee6db87e55f66e28b46fac90bf2a4b33755160 Mon Sep 17 00:00:00 2001 From: Bharathwaj G Date: Mon, 19 Aug 2024 19:41:22 +0530 Subject: [PATCH 05/16] rename count to value_count (#15147) Signed-off-by: Bharathwaj G --- .../index/mapper/StarTreeMapperIT.java | 4 +- .../compositeindex/datacube/MetricStat.java | 2 +- .../startree/StarTreeIndexSettings.java | 2 +- .../aggregators/ValueAggregatorFactory.java | 2 +- .../util/ByteArrayBackedBitsetTests.java | 56 +++++++++++++++++++ .../StarTreeDocValuesFormatTests.java | 2 +- .../MetricAggregatorInfoTests.java | 8 +-- .../ValueAggregatorFactoryTests.java | 2 +- .../builder/AbstractStarTreeBuilderTests.java | 22 ++++---- .../index/mapper/StarTreeMapperTests.java | 6 +- 10 files changed, 81 insertions(+), 25 deletions(-) create mode 100644 server/src/test/java/org/opensearch/common/util/ByteArrayBackedBitsetTests.java diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java index 1cabb8b617ce3..6f5b4bba481dd 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java @@ -267,7 +267,7 @@ public void testValidCompositeIndex() { assertEquals("numeric_dv", starTreeFieldType.getMetrics().get(0).getField()); List expectedMetrics = Arrays.asList( MetricStat.AVG, - MetricStat.COUNT, + MetricStat.VALUE_COUNT, MetricStat.SUM, MetricStat.MAX, MetricStat.MIN @@ -351,7 +351,7 @@ public void testUpdateIndexWhenMappingIsSame() { assertEquals("numeric_dv", starTreeFieldType.getMetrics().get(0).getField()); List expectedMetrics = Arrays.asList( MetricStat.AVG, - MetricStat.COUNT, + MetricStat.VALUE_COUNT, MetricStat.SUM, MetricStat.MAX, MetricStat.MIN diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/MetricStat.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/MetricStat.java index fbde296b15f7e..df3b2229d2c5b 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/MetricStat.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/MetricStat.java @@ -17,7 +17,7 @@ */ @ExperimentalApi public enum MetricStat { - COUNT("count"), + VALUE_COUNT("value_count"), AVG("avg"), SUM("sum"), MIN("min"), diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeIndexSettings.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeIndexSettings.java index a2ac545be3cc9..6535f8ed11da3 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeIndexSettings.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeIndexSettings.java @@ -97,7 +97,7 @@ public class StarTreeIndexSettings { "index.composite_index.star_tree.field.default.metrics", Arrays.asList( MetricStat.AVG.toString(), - MetricStat.COUNT.toString(), + MetricStat.VALUE_COUNT.toString(), MetricStat.SUM.toString(), MetricStat.MAX.toString(), MetricStat.MIN.toString() diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregatorFactory.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregatorFactory.java index 5e071e2491d19..ef5b773d81d27 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregatorFactory.java @@ -30,7 +30,7 @@ public static ValueAggregator getValueAggregator(MetricStat aggregationType, Sta // avg aggregator will be covered in the part of query (using count and sum) case SUM: return new SumValueAggregator(starTreeNumericType); - case COUNT: + case VALUE_COUNT: return new CountValueAggregator(starTreeNumericType); case MIN: return new MinValueAggregator(starTreeNumericType); diff --git a/server/src/test/java/org/opensearch/common/util/ByteArrayBackedBitsetTests.java b/server/src/test/java/org/opensearch/common/util/ByteArrayBackedBitsetTests.java new file mode 100644 index 0000000000000..6750a9e48f033 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/util/ByteArrayBackedBitsetTests.java @@ -0,0 +1,56 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.util; + +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.store.RandomAccessInput; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.nio.file.Path; + +/** + * Tests for {@link ByteArrayBackedBitset} + */ +public class ByteArrayBackedBitsetTests extends OpenSearchTestCase { + public void testWriteAndReadNullBitSets() throws IOException { + for (int k = 0; k < 10; k++) { + int randomArraySize = randomIntBetween(2, 300); + int randomIndex1 = randomIntBetween(0, (randomArraySize - 1) * 8); + int randomIndex2 = randomIntBetween(0, (randomArraySize - 1) * 8); + testWriteAndReadBitset(randomArraySize, randomIndex1, randomIndex2); + } + } + + private static void testWriteAndReadBitset(int randomArraySize, int randomIndex1, int randomIndex2) throws IOException { + ByteArrayBackedBitset bitset = new ByteArrayBackedBitset(randomArraySize); + Path basePath = createTempDir("OffHeapTests"); + FSDirectory fsDirectory = FSDirectory.open(basePath); + String TEST_FILE = "test_file"; + IndexOutput indexOutput = fsDirectory.createOutput(TEST_FILE, IOContext.DEFAULT); + bitset.set(randomIndex1); + bitset.set(randomIndex2); + bitset.write(indexOutput); + indexOutput.close(); + + IndexInput in = fsDirectory.openInput(TEST_FILE, IOContext.DEFAULT); + RandomAccessInput randomAccessInput = in.randomAccessSlice(0, in.length()); + ByteArrayBackedBitset bitset1 = new ByteArrayBackedBitset(randomAccessInput, 0, randomArraySize); + for (int i = 0; i < (randomArraySize * 8); i++) { + if (randomIndex1 == i || randomIndex2 == i) { + assertTrue(bitset1.get(i)); + } else { + assertFalse(bitset1.get(i)); + } + } + } +} diff --git a/server/src/test/java/org/opensearch/index/codec/composite/datacube/startree/StarTreeDocValuesFormatTests.java b/server/src/test/java/org/opensearch/index/codec/composite/datacube/startree/StarTreeDocValuesFormatTests.java index 049d91bc42d9c..6fa88215cad48 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite/datacube/startree/StarTreeDocValuesFormatTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite/datacube/startree/StarTreeDocValuesFormatTests.java @@ -126,7 +126,7 @@ private XContentBuilder getExpandedMapping(String dim, String metric) throws IOE b.field("name", "field"); b.startArray("stats"); b.value("sum"); - b.value("count"); // TODO : THIS TEST FAILS. + b.value("value_count"); b.endArray(); b.endObject(); b.endArray(); diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MetricAggregatorInfoTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MetricAggregatorInfoTests.java index 73e6aeb44cfd7..62671ffa03b82 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MetricAggregatorInfoTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/MetricAggregatorInfoTests.java @@ -27,12 +27,12 @@ public void testConstructor() { public void testCountStarConstructor() { MetricAggregatorInfo pair = new MetricAggregatorInfo( - MetricStat.COUNT, + MetricStat.VALUE_COUNT, "anything", "star_tree_field", IndexNumericFieldData.NumericType.DOUBLE ); - assertEquals(MetricStat.COUNT, pair.getMetricStat()); + assertEquals(MetricStat.VALUE_COUNT, pair.getMetricStat()); assertEquals("anything", pair.getField()); } @@ -62,7 +62,7 @@ public void testEquals() { assertEquals(pair1, pair2); assertNotEquals( pair1, - new MetricAggregatorInfo(MetricStat.COUNT, "column1", "star_tree_field", IndexNumericFieldData.NumericType.DOUBLE) + new MetricAggregatorInfo(MetricStat.VALUE_COUNT, "column1", "star_tree_field", IndexNumericFieldData.NumericType.DOUBLE) ); assertNotEquals( pair1, @@ -100,7 +100,7 @@ public void testCompareTo() { IndexNumericFieldData.NumericType.DOUBLE ); MetricAggregatorInfo pair3 = new MetricAggregatorInfo( - MetricStat.COUNT, + MetricStat.VALUE_COUNT, "column1", "star_tree_field", IndexNumericFieldData.NumericType.DOUBLE diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregatorFactoryTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregatorFactoryTests.java index 5e0bedf5e06a5..6572d75d5b738 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregatorFactoryTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/aggregators/ValueAggregatorFactoryTests.java @@ -33,7 +33,7 @@ public void testGetValueAggregatorForMaxType() { } public void testGetValueAggregatorForCountType() { - ValueAggregator aggregator = ValueAggregatorFactory.getValueAggregator(MetricStat.COUNT, StarTreeNumericType.LONG); + ValueAggregator aggregator = ValueAggregatorFactory.getValueAggregator(MetricStat.VALUE_COUNT, StarTreeNumericType.LONG); assertNotNull(aggregator); assertEquals(CountValueAggregator.class, aggregator.getClass()); } diff --git a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/AbstractStarTreeBuilderTests.java b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/AbstractStarTreeBuilderTests.java index 20982359a4db6..389b6cb34f085 100644 --- a/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/AbstractStarTreeBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/compositeindex/datacube/startree/builder/AbstractStarTreeBuilderTests.java @@ -92,7 +92,7 @@ public void setup() throws IOException { metrics = List.of( new Metric("field2", List.of(MetricStat.SUM)), new Metric("field4", List.of(MetricStat.SUM)), - new Metric("field6", List.of(MetricStat.COUNT)), + new Metric("field6", List.of(MetricStat.VALUE_COUNT)), new Metric("field9", List.of(MetricStat.MIN)), new Metric("field10", List.of(MetricStat.MAX)) ); @@ -1496,7 +1496,7 @@ private static StarTreeField getStarTreeFieldWithMultipleMetrics() { Dimension d1 = new NumericDimension("field1"); Dimension d2 = new NumericDimension("field3"); Metric m1 = new Metric("field2", List.of(MetricStat.SUM)); - Metric m2 = new Metric("field2", List.of(MetricStat.COUNT)); + Metric m2 = new Metric("field2", List.of(MetricStat.VALUE_COUNT)); List dims = List.of(d1, d2); List metrics = List.of(m1, m2); StarTreeFieldConfiguration c = new StarTreeFieldConfiguration( @@ -1614,7 +1614,7 @@ public void testMergeFlowWithCount() throws IOException { List metricsList = List.of(0L, 1L, 2L, 3L, 4L, 5L, 6L); List metricsWithField = List.of(0, 1, 2, 3, 4, 5, 6); - StarTreeField sf = getStarTreeField(MetricStat.COUNT); + StarTreeField sf = getStarTreeField(MetricStat.VALUE_COUNT); StarTreeValues starTreeValues = getStarTreeValues( getSortedNumericMock(dimList, docsWithField), getSortedNumericMock(dimList2, docsWithField2), @@ -1694,7 +1694,7 @@ public void testMergeFlowWithDifferentDocsFromSegments() throws IOException { List metricsList2 = List.of(5L, 6L, 7L, 8L, 9L); List metricsWithField2 = List.of(0, 1, 2, 3, 4); - StarTreeField sf = getStarTreeField(MetricStat.COUNT); + StarTreeField sf = getStarTreeField(MetricStat.VALUE_COUNT); StarTreeValues starTreeValues = getStarTreeValues( getSortedNumericMock(dimList, docsWithField), getSortedNumericMock(dimList2, docsWithField2), @@ -1756,7 +1756,7 @@ public void testMergeFlowNumSegmentsDocs() throws IOException { List metricsList2 = List.of(5L, 6L, 7L, 8L, 9L); List metricsWithField2 = List.of(0, 1, 2, 3, 4); - StarTreeField sf = getStarTreeField(MetricStat.COUNT); + StarTreeField sf = getStarTreeField(MetricStat.VALUE_COUNT); StarTreeValues starTreeValues = getStarTreeValues( getSortedNumericMock(dimList, docsWithField), getSortedNumericMock(dimList2, docsWithField2), @@ -1816,7 +1816,7 @@ public void testMergeFlowWithMissingDocs() throws IOException { List metricsList2 = List.of(5L, 6L, 7L, 8L, 9L); List metricsWithField2 = List.of(0, 1, 2, 3, 4); - StarTreeField sf = getStarTreeField(MetricStat.COUNT); + StarTreeField sf = getStarTreeField(MetricStat.VALUE_COUNT); StarTreeValues starTreeValues = getStarTreeValues( getSortedNumericMock(dimList, docsWithField), getSortedNumericMock(dimList2, docsWithField2), @@ -1878,7 +1878,7 @@ public void testMergeFlowWithMissingDocsWithZero() throws IOException { List metricsList2 = List.of(5L, 6L, 7L, 8L, 9L); List metricsWithField2 = List.of(0, 1, 2, 3, 4); - StarTreeField sf = getStarTreeField(MetricStat.COUNT); + StarTreeField sf = getStarTreeField(MetricStat.VALUE_COUNT); StarTreeValues starTreeValues = getStarTreeValues( getSortedNumericMock(dimList, docsWithField), getSortedNumericMock(dimList2, docsWithField2), @@ -1941,7 +1941,7 @@ public void testMergeFlowWithMissingDocsWithZeroComplexCase() throws IOException List metricsList2 = List.of(5L, 6L, 7L, 8L, 9L); List metricsWithField2 = List.of(0, 1, 2, 3, 4); - StarTreeField sf = getStarTreeField(MetricStat.COUNT); + StarTreeField sf = getStarTreeField(MetricStat.VALUE_COUNT); StarTreeValues starTreeValues = getStarTreeValues( getSortedNumericMock(dimList, docsWithField), getSortedNumericMock(dimList2, docsWithField2), @@ -2008,7 +2008,7 @@ public void testMergeFlowWithMissingDocsInSecondDim() throws IOException { List metricsList2 = List.of(5L, 6L, 7L, 8L, 9L); List metricsWithField2 = List.of(0, 1, 2, 3, 4); - StarTreeField sf = getStarTreeField(MetricStat.COUNT); + StarTreeField sf = getStarTreeField(MetricStat.VALUE_COUNT); StarTreeValues starTreeValues = getStarTreeValues( getSortedNumericMock(dimList, docsWithField), getSortedNumericMock(dimList2, docsWithField2), @@ -2071,7 +2071,7 @@ public void testMergeFlowWithDocsMissingAtTheEnd() throws IOException { List metricsList2 = List.of(5L, 6L, 7L, 8L, 9L); List metricsWithField2 = List.of(0, 1, 2, 3, 4); - StarTreeField sf = getStarTreeField(MetricStat.COUNT); + StarTreeField sf = getStarTreeField(MetricStat.VALUE_COUNT); StarTreeValues starTreeValues = getStarTreeValues( getSortedNumericMock(dimList, docsWithField), getSortedNumericMock(dimList2, docsWithField2), @@ -2125,7 +2125,7 @@ public void testMergeFlowWithEmptyFieldsInOneSegment() throws IOException { List metricsList = List.of(0L, 1L, 2L, 3L, 4L, 5L, 6L); List metricsWithField = List.of(0, 1, 2, 3, 4, 5, 6); - StarTreeField sf = getStarTreeField(MetricStat.COUNT); + StarTreeField sf = getStarTreeField(MetricStat.VALUE_COUNT); StarTreeValues starTreeValues = getStarTreeValues( getSortedNumericMock(dimList, docsWithField), getSortedNumericMock(dimList2, docsWithField2), diff --git a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java index 132d2ff5a566a..3fa97825cdfc6 100644 --- a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java @@ -94,7 +94,7 @@ public void testValidStarTreeDefaults() throws IOException { assertEquals("status", starTreeFieldType.getMetrics().get(0).getField()); List expectedMetrics = Arrays.asList( MetricStat.AVG, - MetricStat.COUNT, + MetricStat.VALUE_COUNT, MetricStat.SUM, MetricStat.MAX, MetricStat.MIN @@ -223,11 +223,11 @@ public void testMetric() { assertEquals(metric1, metric2); List m2 = new ArrayList<>(); m2.add(MetricStat.MAX); - m2.add(MetricStat.COUNT); + m2.add(MetricStat.VALUE_COUNT); metric2 = new Metric("name", m2); assertNotEquals(metric1, metric2); - assertEquals(MetricStat.COUNT, MetricStat.fromTypeName("count")); + assertEquals(MetricStat.VALUE_COUNT, MetricStat.fromTypeName("value_count")); assertEquals(MetricStat.MAX, MetricStat.fromTypeName("max")); assertEquals(MetricStat.MIN, MetricStat.fromTypeName("min")); assertEquals(MetricStat.SUM, MetricStat.fromTypeName("sum")); From 096c354dcc7c15d1a248241bccf992fe63482435 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 13:01:19 -0400 Subject: [PATCH 06/16] Bump org.apache.kerby:kerb-admin from 2.0.3 to 2.1.0 in /test/fixtures/hdfs-fixture (#15301) * Bump org.apache.kerby:kerb-admin in /test/fixtures/hdfs-fixture Bumps org.apache.kerby:kerb-admin from 2.0.3 to 2.1.0. --- updated-dependencies: - dependency-name: org.apache.kerby:kerb-admin dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: Andriy Redko * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Signed-off-by: Andriy Redko Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: reta --- .github/workflows/dependabot_pr.yml | 7 +++++++ CHANGELOG.md | 1 + test/fixtures/hdfs-fixture/build.gradle | 2 +- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/dependabot_pr.yml b/.github/workflows/dependabot_pr.yml index cfd3ac81c5bd3..25abd99cadb96 100644 --- a/.github/workflows/dependabot_pr.yml +++ b/.github/workflows/dependabot_pr.yml @@ -22,6 +22,13 @@ jobs: with: token: ${{ steps.github_app_token.outputs.token }} + # See please https://docs.gradle.org/8.10/userguide/upgrading_version_8.html#minimum_daemon_jvm_version + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: 17 + distribution: temurin + - name: Update Gradle SHAs run: | ./gradlew updateSHAs diff --git a/CHANGELOG.md b/CHANGELOG.md index 5103d978b361a..2c854b6b96480 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -36,6 +36,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.azure:azure-xml` from 1.0.0 to 1.1.0 ([#15206](https://github.com/opensearch-project/OpenSearch/pull/15206)) - Bump `reactor` from 3.5.19 to 3.5.20 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262)) - Bump `reactor-netty` from 1.1.21 to 1.1.22 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262)) +- Bump `org.apache.kerby:kerb-admin` from 2.0.3 to 2.1.0 ([#15301](https://github.com/opensearch-project/OpenSearch/pull/15301)) ### Changed - Add lower limit for primary and replica batch allocators timeout ([#14979](https://github.com/opensearch-project/OpenSearch/pull/14979)) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index adf420cf84663..411509dfe5acc 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -80,7 +80,7 @@ dependencies { api 'org.jline:jline:3.26.3' api 'org.apache.commons:commons-configuration2:2.11.0' api 'com.nimbusds:nimbus-jose-jwt:9.40' - api ('org.apache.kerby:kerb-admin:2.0.3') { + api ('org.apache.kerby:kerb-admin:2.1.0') { exclude group: "org.jboss.xnio" exclude group: "org.jline" } From 093c06030cc07d471feeec42bcdf9c58bd06661c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 14:01:11 -0400 Subject: [PATCH 07/16] Bump com.azure:azure-core-http-netty from 1.15.1 to 1.15.3 in /plugins/repository-azure (#15300) * Bump com.azure:azure-core-http-netty in /plugins/repository-azure Bumps [com.azure:azure-core-http-netty](https://github.com/Azure/azure-sdk-for-java) from 1.15.1 to 1.15.3. - [Release notes](https://github.com/Azure/azure-sdk-for-java/releases) - [Commits](https://github.com/Azure/azure-sdk-for-java/compare/azure-core-http-netty_1.15.1...azure-core-http-netty_1.15.3) --- updated-dependencies: - dependency-name: com.azure:azure-core-http-netty dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + plugins/repository-azure/build.gradle | 2 +- .../licenses/azure-core-http-netty-1.15.1.jar.sha1 | 1 - .../licenses/azure-core-http-netty-1.15.3.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-azure/licenses/azure-core-http-netty-1.15.1.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-core-http-netty-1.15.3.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c854b6b96480..663ae586a46a9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,6 +37,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `reactor` from 3.5.19 to 3.5.20 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262)) - Bump `reactor-netty` from 1.1.21 to 1.1.22 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262)) - Bump `org.apache.kerby:kerb-admin` from 2.0.3 to 2.1.0 ([#15301](https://github.com/opensearch-project/OpenSearch/pull/15301)) +- Bump `com.azure:azure-core-http-netty` from 1.15.1 to 1.15.3 ([#15300](https://github.com/opensearch-project/OpenSearch/pull/15300)) ### Changed - Add lower limit for primary and replica batch allocators timeout ([#14979](https://github.com/opensearch-project/OpenSearch/pull/14979)) diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 6b63311cb3125..6844311927db0 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -48,7 +48,7 @@ dependencies { api 'com.azure:azure-json:1.1.0' api 'com.azure:azure-xml:1.1.0' api 'com.azure:azure-storage-common:12.25.1' - api 'com.azure:azure-core-http-netty:1.15.1' + api 'com.azure:azure-core-http-netty:1.15.3' api "io.netty:netty-codec-dns:${versions.netty}" api "io.netty:netty-codec-socks:${versions.netty}" api "io.netty:netty-codec-http2:${versions.netty}" diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.15.1.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.15.1.jar.sha1 deleted file mode 100644 index 3a0747a0daacb..0000000000000 --- a/plugins/repository-azure/licenses/azure-core-http-netty-1.15.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -036f7466a521aa99c79a491a9cf20444667df78b \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.15.3.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.15.3.jar.sha1 new file mode 100644 index 0000000000000..3cea52ba67ce5 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-core-http-netty-1.15.3.jar.sha1 @@ -0,0 +1 @@ +03b5bd5f5c16eea71f130119dbfb1fe5239f806a \ No newline at end of file From cb4a90a31aee878e513a7298a4f49e1fb0836eed Mon Sep 17 00:00:00 2001 From: Bukhtawar Khan Date: Tue, 20 Aug 2024 04:50:16 +0530 Subject: [PATCH 08/16] Adding @jainankitk as a Maintainer (#15304) * Refactor remote writeable entity and store to make it more reusable Signed-off-by: Bukhtawar Khan * Java doc Signed-off-by: Bukhtawar Khan * Adding @jainankitk as a Maintainer Signed-off-by: Bukhtawar Khan * Re-ordering changes Signed-off-by: Bukhtawar Khan --------- Signed-off-by: Bukhtawar Khan --- .github/CODEOWNERS | 2 +- MAINTAINERS.md | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e75e22f30431d..18a310862dfbb 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -11,7 +11,7 @@ # 3. Use the command palette to run the CODEOWNERS: Show owners of current file command, which will display all code owners for the current file. # Default ownership for all repo files -* @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @kotwanikunal @linuxpi @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah +* @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jainankitk @kotwanikunal @linuxpi @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah /modules/lang-painless/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah /modules/parent-join/ @anasalkouz @andrross @ashking94 @Bukhtawar @CEHENKLE @dblock @dbwiddis @gbbafna @jed326 @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @sachinpkale @saratvemulapalli @shwetathareja @sohami @VachaShah diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 7be19fbc877d7..296c2c30aca49 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -9,6 +9,7 @@ This document contains a list of maintainers in this repo. See [opensearch-proje | Anas Alkouz | [anasalkouz](https://github.com/anasalkouz) | Amazon | | Andrew Ross | [andrross](https://github.com/andrross) | Amazon | | Andriy Redko | [reta](https://github.com/reta) | Aiven | +| Ankit Jain | [jainankitk](https://github.com/jainankitk) | Amazon | | Ashish Singh | [ashking94](https://github.com/ashking94) | Amazon | | Bukhtawar Khan | [Bukhtawar](https://github.com/Bukhtawar) | Amazon | | Charlotte Henkle | [CEHENKLE](https://github.com/CEHENKLE) | Amazon | From adf36607823021f7e7fd56f4d170a02856a1fc4f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Aug 2024 20:44:26 -0400 Subject: [PATCH 09/16] Bump com.gradle.develocity from 3.17.6 to 3.18 (#15297) * Bump com.gradle.develocity from 3.17.6 to 3.18 Bumps com.gradle.develocity from 3.17.6 to 3.18. --- updated-dependencies: - dependency-name: com.gradle.develocity dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + settings.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 663ae586a46a9..2afcdcae48693 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `reactor-netty` from 1.1.21 to 1.1.22 ([#15262](https://github.com/opensearch-project/OpenSearch/pull/15262)) - Bump `org.apache.kerby:kerb-admin` from 2.0.3 to 2.1.0 ([#15301](https://github.com/opensearch-project/OpenSearch/pull/15301)) - Bump `com.azure:azure-core-http-netty` from 1.15.1 to 1.15.3 ([#15300](https://github.com/opensearch-project/OpenSearch/pull/15300)) +- Bump `com.gradle.develocity` from 3.17.6 to 3.18 ([#15297](https://github.com/opensearch-project/OpenSearch/pull/15297)) ### Changed - Add lower limit for primary and replica batch allocators timeout ([#14979](https://github.com/opensearch-project/OpenSearch/pull/14979)) diff --git a/settings.gradle b/settings.gradle index ae9f5384be592..b79c2aee135fc 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,7 +10,7 @@ */ plugins { - id "com.gradle.develocity" version "3.17.6" + id "com.gradle.develocity" version "3.18" } ext.disableBuildCache = hasProperty('DISABLE_BUILD_CACHE') || System.getenv().containsKey('DISABLE_BUILD_CACHE') From 52ecbe91b77c495c7937f9aa8ebf343f0d8fd499 Mon Sep 17 00:00:00 2001 From: bowenlan-amzn Date: Mon, 19 Aug 2024 18:16:36 -0700 Subject: [PATCH 10/16] Support Filtering on Large List encoded by Bitmap (#14774) --------- Signed-off-by: bowenlan-amzn Signed-off-by: Michael Froh Co-authored-by: Michael Froh --- CHANGELOG.md | 1 + .../test/search/370_bitmap_filtering.yml | 184 ++++++++++++++++ server/build.gradle | 3 + server/licenses/RoaringBitmap-1.1.0.jar.sha1 | 1 + server/licenses/RoaringBitmap-LICENSE.txt | 202 ++++++++++++++++++ server/licenses/RoaringBitmap-NOTICE.txt | 0 .../search/query/SearchQueryIT.java | 40 ++++ .../index/mapper/NumberFieldMapper.java | 69 ++++++ .../index/query/TermsQueryBuilder.java | 109 +++++++++- .../org/opensearch/indices/TermsLookup.java | 26 ++- .../search/query/BitmapDocValuesQuery.java | 152 +++++++++++++ .../index/mapper/NumberFieldTypeTests.java | 34 +++ .../index/query/TermsQueryBuilderTests.java | 67 +++++- .../opensearch/indices/TermsLookupTests.java | 5 +- .../query/BitmapDocValuesQueryTests.java | 149 +++++++++++++ 15 files changed, 1028 insertions(+), 14 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/370_bitmap_filtering.yml create mode 100644 server/licenses/RoaringBitmap-1.1.0.jar.sha1 create mode 100644 server/licenses/RoaringBitmap-LICENSE.txt create mode 100644 server/licenses/RoaringBitmap-NOTICE.txt create mode 100644 server/src/main/java/org/opensearch/search/query/BitmapDocValuesQuery.java create mode 100644 server/src/test/java/org/opensearch/search/query/BitmapDocValuesQueryTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 2afcdcae48693..12c909cfd46aa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add took time to request nodes stats ([#15054](https://github.com/opensearch-project/OpenSearch/pull/15054)) - [Workload Management] Add Get QueryGroup API Logic ([14709](https://github.com/opensearch-project/OpenSearch/pull/14709)) - [Workload Management] QueryGroup resource tracking framework changes ([#13897](https://github.com/opensearch-project/OpenSearch/pull/13897)) +- Support filtering on a large list encoded by bitmap ([#14774](https://github.com/opensearch-project/OpenSearch/pull/14774)) - Add slice execution listeners to SearchOperationListener interface ([#15153](https://github.com/opensearch-project/OpenSearch/pull/15153)) ### Dependencies diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/370_bitmap_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/370_bitmap_filtering.yml new file mode 100644 index 0000000000000..d728070adb188 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/370_bitmap_filtering.yml @@ -0,0 +1,184 @@ +--- +setup: + - skip: + version: " - 2.99.99" + reason: The bitmap filtering feature is available in 2.17 and later. + - do: + indices.create: + index: students + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + student_id: + type: integer + - do: + bulk: + refresh: true + body: + - { "index": { "_index": "students", "_id": "1" } } + - { "name": "Jane Doe", "student_id": 111 } + - { "index": { "_index": "students", "_id": "2" } } + - { "name": "Mary Major", "student_id": 222 } + - { "index": { "_index": "students", "_id": "3" } } + - { "name": "John Doe", "student_id": 333 } + - do: + indices.create: + index: classes + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + enrolled: + type: binary + store: true + - do: + bulk: + refresh: true + body: + - { "index": { "_index": "classes", "_id": "101" } } + - { "enrolled": "OjAAAAEAAAAAAAEAEAAAAG8A3gA=" } # 111,222 + - { "index": { "_index": "classes", "_id": "102" } } + - { "enrolled": "OjAAAAEAAAAAAAAAEAAAAG8A" } # 111 + - { "index": { "_index": "classes", "_id": "103" } } + - { "enrolled": "OjAAAAEAAAAAAAAAEAAAAE0B" } # 333 + - { "index": { "_index": "classes", "_id": "104" } } + - { "enrolled": "OjAAAAEAAAAAAAEAEAAAAN4ATQE=" } # 222,333 + - do: + cluster.health: + wait_for_status: green + +--- +"Terms lookup on a binary field with bitmap": + - do: + search: + rest_total_hits_as_int: true + index: students + body: { + "query": { + "terms": { + "student_id": { + "index": "classes", + "id": "101", + "path": "enrolled", + "store": true + }, + "value_type": "bitmap" + } + } + } + - match: { hits.total: 2 } + - match: { hits.hits.0._source.name: Jane Doe } + - match: { hits.hits.0._source.student_id: 111 } + - match: { hits.hits.1._source.name: Mary Major } + - match: { hits.hits.1._source.student_id: 222 } + +--- +"Terms query accepting bitmap as value": + - do: + search: + rest_total_hits_as_int: true + index: students + body: { + "query": { + "terms": { + "student_id": ["OjAAAAEAAAAAAAEAEAAAAG8A3gA="], + "value_type": "bitmap" + } + } + } + - match: { hits.total: 2 } + - match: { hits.hits.0._source.name: Jane Doe } + - match: { hits.hits.0._source.student_id: 111 } + - match: { hits.hits.1._source.name: Mary Major } + - match: { hits.hits.1._source.student_id: 222 } + +--- +"Boolean must bitmap filtering": + - do: + search: + rest_total_hits_as_int: true + index: students + body: { + "query": { + "bool": { + "must": [ + { + "terms": { + "student_id": { + "index": "classes", + "id": "101", + "path": "enrolled", + "store": true + }, + "value_type": "bitmap" + } + } + ], + "must_not": [ + { + "terms": { + "student_id": { + "index": "classes", + "id": "104", + "path": "enrolled", + "store": true + }, + "value_type": "bitmap" + } + } + ] + } + } + } + - match: { hits.total: 1 } + - match: { hits.hits.0._source.name: Jane Doe } + - match: { hits.hits.0._source.student_id: 111 } + +--- +"Boolean should bitmap filtering": + - do: + search: + rest_total_hits_as_int: true + index: students + body: { + "query": { + "bool": { + "should": [ + { + "terms": { + "student_id": { + "index": "classes", + "id": "101", + "path": "enrolled", + "store": true + }, + "value_type": "bitmap" + } + }, + { + "terms": { + "student_id": { + "index": "classes", + "id": "104", + "path": "enrolled", + "store": true + }, + "value_type": "bitmap" + } + } + ] + } + } + } + - match: { hits.total: 3 } + - match: { hits.hits.0._source.name: Mary Major } + - match: { hits.hits.0._source.student_id: 222 } + - match: { hits.hits.1._source.name: Jane Doe } + - match: { hits.hits.1._source.student_id: 111 } + - match: { hits.hits.2._source.name: John Doe } + - match: { hits.hits.2._source.student_id: 333 } diff --git a/server/build.gradle b/server/build.gradle index 5facc73dff968..d655796674001 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -126,6 +126,9 @@ dependencies { api "com.google.protobuf:protobuf-java:${versions.protobuf}" api "jakarta.annotation:jakarta.annotation-api:${versions.jakarta_annotation}" + // https://mvnrepository.com/artifact/org.roaringbitmap/RoaringBitmap + implementation 'org.roaringbitmap:RoaringBitmap:1.1.0' + testImplementation(project(":test:framework")) { // tests use the locally compiled version of server exclude group: 'org.opensearch', module: 'server' diff --git a/server/licenses/RoaringBitmap-1.1.0.jar.sha1 b/server/licenses/RoaringBitmap-1.1.0.jar.sha1 new file mode 100644 index 0000000000000..bf34e11b92710 --- /dev/null +++ b/server/licenses/RoaringBitmap-1.1.0.jar.sha1 @@ -0,0 +1 @@ +9607213861158ae7060234d93ee9c9cb19f494d1 \ No newline at end of file diff --git a/server/licenses/RoaringBitmap-LICENSE.txt b/server/licenses/RoaringBitmap-LICENSE.txt new file mode 100644 index 0000000000000..3bdd0036295a6 --- /dev/null +++ b/server/licenses/RoaringBitmap-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2013-2016 the RoaringBitmap authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/server/licenses/RoaringBitmap-NOTICE.txt b/server/licenses/RoaringBitmap-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index 01ad06757640c..3cf63e2f19a16 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -52,6 +52,7 @@ import org.opensearch.common.time.DateFormatter; import org.opensearch.common.unit.Fuzziness; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; @@ -66,6 +67,7 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.RangeQueryBuilder; import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.index.query.WildcardQueryBuilder; import org.opensearch.index.query.WrapperQueryBuilder; import org.opensearch.index.query.functionscore.ScoreFunctionBuilders; @@ -84,6 +86,7 @@ import java.io.IOException; import java.io.Reader; +import java.nio.ByteBuffer; import java.time.Instant; import java.time.ZoneId; import java.time.ZoneOffset; @@ -98,6 +101,8 @@ import java.util.concurrent.ExecutionException; import java.util.regex.Pattern; +import org.roaringbitmap.RoaringBitmap; + import static java.util.Collections.singletonMap; import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; @@ -1157,6 +1162,41 @@ public void testTermsQuery() throws Exception { assertHitCount(searchResponse, 0L); } + public void testTermsQueryWithBitmapDocValuesQuery() throws Exception { + assertAcked( + prepareCreate("products").setMapping( + jsonBuilder().startObject() + .startObject("properties") + .startObject("product") + .field("type", "integer") + .field("index", false) + .endObject() + .endObject() + .endObject() + ) + ); + indexRandom( + true, + client().prepareIndex("products").setId("1").setSource("product", 1), + client().prepareIndex("products").setId("2").setSource("product", 2), + client().prepareIndex("products").setId("3").setSource("product", new int[] { 1, 3 }), + client().prepareIndex("products").setId("4").setSource("product", 4) + ); + + RoaringBitmap r = new RoaringBitmap(); + r.add(1); + r.add(4); + byte[] array = new byte[r.serializedSizeInBytes()]; + r.serialize(ByteBuffer.wrap(array)); + BytesArray bitmap = new BytesArray(array); + // directly building the terms query builder, so pass in the bitmap value as BytesArray + SearchResponse searchResponse = client().prepareSearch("products") + .setQuery(constantScoreQuery(termsQuery("product", bitmap).valueType(TermsQueryBuilder.ValueType.BITMAP))) + .get(); + assertHitCount(searchResponse, 3L); + assertSearchHits(searchResponse, "1", "3", "4"); + } + public void testTermsLookupFilter() throws Exception { assertAcked(prepareCreate("lookup").setMapping("terms", "type=text", "other", "type=text")); indexRandomForConcurrentSearch("lookup"); diff --git a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java index 27e62c3746a8e..9286b5c64b5f2 100644 --- a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java @@ -46,8 +46,10 @@ import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; @@ -58,6 +60,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.core.xcontent.XContentParser.Token; import org.opensearch.index.document.SortedUnsignedLongDocValuesRangeQuery; @@ -68,13 +71,16 @@ import org.opensearch.index.query.QueryShardContext; import org.opensearch.search.DocValueFormat; import org.opensearch.search.lookup.SearchLookup; +import org.opensearch.search.query.BitmapDocValuesQuery; import java.io.IOException; import java.math.BigInteger; +import java.nio.ByteBuffer; import java.time.ZoneId; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -82,6 +88,8 @@ import java.util.function.Function; import java.util.function.Supplier; +import org.roaringbitmap.RoaringBitmap; + /** * A {@link FieldMapper} for numeric types: byte, short, int, long, float and double. * @@ -822,6 +830,24 @@ public Query termsQuery(String field, List values, boolean hasDocValues, return IntPoint.newSetQuery(field, v); } + @Override + public Query bitmapQuery(String field, BytesArray bitmapArray, boolean isSearchable, boolean hasDocValues) { + RoaringBitmap bitmap = new RoaringBitmap(); + try { + bitmap.deserialize(ByteBuffer.wrap(bitmapArray.array())); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to deserialize the bitmap.", e); + } + + if (isSearchable && hasDocValues) { + return new IndexOrDocValuesQuery(bitmapIndexQuery(field, bitmap), new BitmapDocValuesQuery(field, bitmap)); + } + if (isSearchable) { + return bitmapIndexQuery(field, bitmap); + } + return new BitmapDocValuesQuery(field, bitmap); + } + @Override public Query rangeQuery( String field, @@ -1174,6 +1200,10 @@ public final TypeParser parser() { public abstract Query termsQuery(String field, List values, boolean hasDocValues, boolean isSearchable); + public Query bitmapQuery(String field, BytesArray bitmap, boolean isSearchable, boolean hasDocValues) { + throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support bitmap queries"); + } + public abstract Query rangeQuery( String field, Object lowerTerm, @@ -1415,6 +1445,40 @@ public static Query unsignedLongRangeQuery( } return builder.apply(l, u); } + + static PointInSetQuery bitmapIndexQuery(String field, RoaringBitmap bitmap) { + final BytesRef encoded = new BytesRef(new byte[Integer.BYTES]); + return new PointInSetQuery(field, 1, Integer.BYTES, new PointInSetQuery.Stream() { + + final Iterator iterator = bitmap.iterator(); + + @Override + public BytesRef next() { + int value; + if (iterator.hasNext()) { + value = iterator.next(); + } else { + return null; + } + IntPoint.encodeDimension(value, encoded.bytes, 0); + return encoded; + } + }) { + @Override + public Query rewrite(IndexSearcher indexSearcher) throws IOException { + if (bitmap.isEmpty()) { + return new MatchNoDocsQuery(); + } + return super.rewrite(indexSearcher); + } + + @Override + protected String toString(byte[] value) { + assert value.length == Integer.BYTES; + return Integer.toString(IntPoint.decodeDimension(value, 0)); + } + }; + } } /** @@ -1495,6 +1559,11 @@ public Query termsQuery(List values, QueryShardContext context) { return query; } + public Query bitmapQuery(BytesArray bitmap) { + failIfNotIndexedAndNoDocValues(); + return type.bitmapQuery(name(), bitmap, isSearchable(), hasDocValues()); + } + @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { failIfNotIndexedAndNoDocValues(); diff --git a/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java index ac0ca3919ea38..21ff90f2bf534 100644 --- a/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java @@ -37,14 +37,17 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.opensearch.Version; import org.opensearch.action.get.GetRequest; import org.opensearch.client.Client; import org.opensearch.common.SetOnce; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.xcontent.support.XContentMapValues; +import org.opensearch.core.ParseField; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.ParsingException; import org.opensearch.core.common.Strings; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; @@ -53,6 +56,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.mapper.ConstantFieldType; import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.mapper.NumberFieldMapper; import org.opensearch.indices.TermsLookup; import java.io.IOException; @@ -60,6 +64,7 @@ import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; +import java.util.Base64; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -82,6 +87,39 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { private final TermsLookup termsLookup; private final Supplier> supplier; + private static final ParseField VALUE_TYPE_FIELD = new ParseField("value_type"); + private ValueType valueType = ValueType.DEFAULT; + + /** + * Terms query may accept different types of value + *

+ * This flag is used to decide how to parse the value and build query upon later + */ + public enum ValueType { + DEFAULT("default"), + BITMAP("bitmap"); + + private final String type; + + ValueType(String type) { + this.type = type; + } + + static ValueType fromString(String type) { + for (ValueType valueType : ValueType.values()) { + if (valueType.type.equalsIgnoreCase(type)) { + return valueType; + } + } + throw new IllegalArgumentException(type + " is not valid " + VALUE_TYPE_FIELD); + } + } + + public TermsQueryBuilder valueType(ValueType valueType) { + this.valueType = valueType; + return this; + } + public TermsQueryBuilder(String fieldName, TermsLookup termsLookup) { this(fieldName, null, termsLookup); } @@ -187,6 +225,11 @@ public TermsQueryBuilder(String fieldName, Iterable values) { this.supplier = null; } + private TermsQueryBuilder(String fieldName, Iterable values, ValueType valueType) { + this(fieldName, values); + this.valueType = valueType; + } + private TermsQueryBuilder(String fieldName, Supplier> supplier) { this.fieldName = fieldName; this.values = null; @@ -194,6 +237,11 @@ private TermsQueryBuilder(String fieldName, Supplier> supplier) { this.supplier = supplier; } + private TermsQueryBuilder(String fieldName, Supplier> supplier, ValueType valueType) { + this(fieldName, supplier); + this.valueType = valueType; + } + /** * Read from a stream. */ @@ -203,6 +251,9 @@ public TermsQueryBuilder(StreamInput in) throws IOException { termsLookup = in.readOptionalWriteable(TermsLookup::new); values = (List) in.readGenericValue(); this.supplier = null; + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + valueType = in.readEnum(ValueType.class); + } } @Override @@ -213,6 +264,9 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeOptionalWriteable(termsLookup); out.writeGenericValue(values); + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + out.writeEnum(valueType); + } } public String fieldName() { @@ -360,6 +414,9 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep builder.field(fieldName, convertBack(values)); } printBoostAndQueryName(builder); + if (valueType != ValueType.DEFAULT) { + builder.field(VALUE_TYPE_FIELD.getPreferredName(), valueType.type); + } builder.endObject(); } @@ -371,6 +428,8 @@ public static TermsQueryBuilder fromXContent(XContentParser parser) throws IOExc String queryName = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; + String valueTypeStr = ValueType.DEFAULT.type; + XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -406,6 +465,8 @@ public static TermsQueryBuilder fromXContent(XContentParser parser) throws IOExc boost = parser.floatValue(); } else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { queryName = parser.text(); + } else if (VALUE_TYPE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + valueTypeStr = parser.text(); } else { throw new ParsingException( parser.getTokenLocation(), @@ -430,7 +491,18 @@ public static TermsQueryBuilder fromXContent(XContentParser parser) throws IOExc ); } - return new TermsQueryBuilder(fieldName, values, termsLookup).boost(boost).queryName(queryName); + ValueType valueType = ValueType.fromString(valueTypeStr); + if (valueType == ValueType.BITMAP) { + if (values != null && values.size() == 1 && values.get(0) instanceof BytesRef) { + values.set(0, new BytesArray(Base64.getDecoder().decode(((BytesRef) values.get(0)).utf8ToString()))); + } else if (termsLookup == null) { + throw new IllegalArgumentException( + "Invalid value for bitmap type: Expected a single-element array with a base64 encoded serialized bitmap." + ); + } + } + + return new TermsQueryBuilder(fieldName, values, termsLookup).boost(boost).queryName(queryName).valueType(valueType); } static List parseValues(XContentParser parser) throws IOException { @@ -473,17 +545,37 @@ protected Query doToQuery(QueryShardContext context) throws IOException { if (fieldType == null) { throw new IllegalStateException("Rewrite first"); } + if (valueType == ValueType.BITMAP) { + if (values.size() == 1 && values.get(0) instanceof BytesArray) { + if (fieldType instanceof NumberFieldMapper.NumberFieldType) { + return ((NumberFieldMapper.NumberFieldType) fieldType).bitmapQuery((BytesArray) values.get(0)); + } + } + } return fieldType.termsQuery(values, context); } private void fetch(TermsLookup termsLookup, Client client, ActionListener> actionListener) { GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.id()); getRequest.preference("_local").routing(termsLookup.routing()); + if (termsLookup.store()) { + getRequest.storedFields(termsLookup.path()); + } client.get(getRequest, ActionListener.delegateFailure(actionListener, (delegatedListener, getResponse) -> { List terms = new ArrayList<>(); - if (getResponse.isSourceEmpty() == false) { // extract terms only if the doc source exists - List extractedValues = XContentMapValues.extractRawValues(termsLookup.path(), getResponse.getSourceAsMap()); - terms.addAll(extractedValues); + if (termsLookup.store()) { + List values = getResponse.getField(termsLookup.path()).getValues(); + if (values.size() != 1 && valueType == ValueType.BITMAP) { + throw new IllegalArgumentException( + "Invalid value for bitmap type: Expected a single base64 encoded serialized bitmap." + ); + } + terms.addAll(values); + } else { + if (getResponse.isSourceEmpty() == false) { // extract terms only if the doc source exists + List extractedValues = XContentMapValues.extractRawValues(termsLookup.path(), getResponse.getSourceAsMap()); + terms.addAll(extractedValues); + } } delegatedListener.onResponse(terms); })); @@ -491,7 +583,7 @@ private void fetch(TermsLookup termsLookup, Client client, ActionListener> supplier = new SetOnce<>(); queryRewriteContext.registerAsyncAction((client, listener) -> fetch(termsLookup, client, ActionListener.map(listener, list -> { supplier.set(list); return null; }))); - return new TermsQueryBuilder(this.fieldName, supplier::get); + return new TermsQueryBuilder(this.fieldName, supplier::get, valueType); } if (values == null || values.isEmpty()) { diff --git a/server/src/main/java/org/opensearch/indices/TermsLookup.java b/server/src/main/java/org/opensearch/indices/TermsLookup.java index 37533c0809d7a..6b83b713c05c2 100644 --- a/server/src/main/java/org/opensearch/indices/TermsLookup.java +++ b/server/src/main/java/org/opensearch/indices/TermsLookup.java @@ -87,6 +87,9 @@ public TermsLookup(StreamInput in) throws IOException { path = in.readString(); index = in.readString(); routing = in.readOptionalString(); + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + store = in.readBoolean(); + } } @Override @@ -98,6 +101,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(path); out.writeString(index); out.writeOptionalString(routing); + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + out.writeBoolean(store); + } } public String index() { @@ -121,6 +127,17 @@ public TermsLookup routing(String routing) { return this; } + private boolean store; + + public boolean store() { + return store; + } + + public TermsLookup store(boolean store) { + this.store = store; + return this; + } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("terms_lookup", args -> { String index = (String) args[0]; String id = (String) args[1]; @@ -132,6 +149,7 @@ public TermsLookup routing(String routing) { PARSER.declareString(constructorArg(), new ParseField("id")); PARSER.declareString(constructorArg(), new ParseField("path")); PARSER.declareString(TermsLookup::routing, new ParseField("routing")); + PARSER.declareBoolean(TermsLookup::store, new ParseField("store")); } public static TermsLookup parseTermsLookup(XContentParser parser) throws IOException { @@ -151,12 +169,15 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (routing != null) { builder.field("routing", routing); } + if (store) { + builder.field("store", true); + } return builder; } @Override public int hashCode() { - return Objects.hash(index, id, path, routing); + return Objects.hash(index, id, path, routing, store); } @Override @@ -171,6 +192,7 @@ public boolean equals(Object obj) { return Objects.equals(index, other.index) && Objects.equals(id, other.id) && Objects.equals(path, other.path) - && Objects.equals(routing, other.routing); + && Objects.equals(routing, other.routing) + && Objects.equals(store, other.store); } } diff --git a/server/src/main/java/org/opensearch/search/query/BitmapDocValuesQuery.java b/server/src/main/java/org/opensearch/search/query/BitmapDocValuesQuery.java new file mode 100644 index 0000000000000..dfa5fc4567f80 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/query/BitmapDocValuesQuery.java @@ -0,0 +1,152 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.query; + +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; + +import java.io.IOException; +import java.util.Objects; + +import org.roaringbitmap.RoaringBitmap; + +/** + * Filter with bitmap + *

+ * Similar to Lucene SortedNumericDocValuesSetQuery + */ +public class BitmapDocValuesQuery extends Query implements Accountable { + + final String field; + final RoaringBitmap bitmap; + final long min; + final long max; + + public BitmapDocValuesQuery(String field, RoaringBitmap bitmap) { + this.field = field; + this.bitmap = bitmap; + if (!bitmap.isEmpty()) { + min = bitmap.first(); + max = bitmap.last(); + } else { + min = 0; // final field + max = 0; + } + } + + @Override + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + return new ConstantScoreWeight(this, boost) { + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), field); + final NumericDocValues singleton = DocValues.unwrapSingleton(values); + final TwoPhaseIterator iterator; + if (singleton != null) { + iterator = new TwoPhaseIterator(singleton) { + @Override + public boolean matches() throws IOException { + long value = singleton.longValue(); + return value >= min && value <= max && bitmap.contains((int) value); + } + + @Override + public float matchCost() { + return 5; // 2 comparisons, possible lookup in the bitmap + } + }; + } else { + iterator = new TwoPhaseIterator(values) { + @Override + public boolean matches() throws IOException { + int count = values.docValueCount(); + for (int i = 0; i < count; i++) { + final long value = values.nextValue(); + if (value < min) { + continue; + } else if (value > max) { + return false; // values are sorted, terminate + } else if (bitmap.contains((int) value)) { + return true; + } + } + return false; + } + + @Override + public float matchCost() { + return 5; // 2 comparisons, possible lookup in the bitmap + } + }; + } + return new ConstantScoreScorer(this, score(), scoreMode, iterator); + } + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return DocValues.isCacheable(ctx, field); + } + }; + } + + @Override + public String toString(String field) { + // bitmap may contain high cardinality, so choose to not show the actual values in it + return field + " cardinality: " + bitmap.getLongCardinality(); + } + + @Override + public Query rewrite(IndexSearcher indexSearcher) throws IOException { + if (bitmap.isEmpty()) { + return new MatchNoDocsQuery(); + } + return super.rewrite(indexSearcher); + } + + @Override + public boolean equals(Object other) { + if (sameClassAs(other) == false) { + return false; + } + BitmapDocValuesQuery that = (BitmapDocValuesQuery) other; + return field.equals(that.field) && bitmap.equals(that.bitmap); + } + + @Override + public int hashCode() { + return Objects.hash(classHash(), field, bitmap); + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.shallowSizeOfInstance(BitmapDocValuesQuery.class) + RamUsageEstimator.sizeOfObject(field) + + RamUsageEstimator.sizeOfObject(bitmap); + } + + @Override + public void visit(QueryVisitor visitor) { + if (visitor.acceptField(field)) { + visitor.visitLeaf(this); + } + } +} diff --git a/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java index 96487db6dd512..b27ef49303205 100644 --- a/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java @@ -62,6 +62,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; @@ -73,18 +74,22 @@ import org.opensearch.index.mapper.NumberFieldMapper.NumberType; import org.opensearch.index.query.QueryShardContext; import org.opensearch.search.MultiValueMode; +import org.opensearch.search.query.BitmapDocValuesQuery; import org.junit.Before; import java.io.ByteArrayInputStream; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; +import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Supplier; +import org.roaringbitmap.RoaringBitmap; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; @@ -947,4 +952,33 @@ public void testFetchSourceValue() throws IOException { assertEquals(Collections.singletonList(2.71f), fetchSourceValue(nullValueMapper, "")); assertEquals(Collections.singletonList(2.71f), fetchSourceValue(nullValueMapper, null)); } + + public void testBitmapQuery() throws IOException { + RoaringBitmap r = new RoaringBitmap(); + byte[] array = new byte[r.serializedSizeInBytes()]; + r.serialize(ByteBuffer.wrap(array)); + BytesArray bitmap = new BytesArray(array); + + NumberFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.INTEGER); + assertEquals( + new IndexOrDocValuesQuery(NumberType.bitmapIndexQuery("field", r), new BitmapDocValuesQuery("field", r)), + ft.bitmapQuery(bitmap) + ); + + ft = new NumberFieldType("field", NumberType.INTEGER, false, false, true, true, null, Collections.emptyMap()); + assertEquals(new BitmapDocValuesQuery("field", r), ft.bitmapQuery(bitmap)); + + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig()); + DirectoryReader reader = DirectoryReader.open(w); + assertEquals(new MatchNoDocsQuery(), ft.bitmapQuery(bitmap).rewrite(newSearcher(reader))); + reader.close(); + w.close(); + dir.close(); + + NumberType type = randomValueOtherThan(NumberType.INTEGER, () -> randomFrom(NumberType.values())); + ft = new NumberFieldMapper.NumberFieldType("field", type); + NumberFieldType finalFt = ft; + assertThrows(IllegalArgumentException.class, () -> finalFt.bitmapQuery(bitmap)); + } } diff --git a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java index 97f372dc04a1b..cfe9fed3bc97c 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java @@ -46,6 +46,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; +import org.opensearch.common.document.DocumentField; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.common.ParsingException; @@ -59,15 +60,20 @@ import org.junit.Before; import java.io.IOException; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; +import org.roaringbitmap.RoaringBitmap; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.instanceOf; @@ -120,10 +126,9 @@ protected TermsQueryBuilder doCreateTestQueryBuilder() { } private TermsLookup randomTermsLookup() { - // Randomly choose between a typeless terms lookup and one with an explicit type to make sure we are TermsLookup lookup = new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath); - // testing both cases. lookup.routing(randomBoolean() ? randomAlphaOfLength(10) : null); + lookup.store(randomBoolean()); return lookup; } @@ -245,7 +250,17 @@ public GetResponse executeGet(GetRequest getRequest) { } catch (IOException ex) { throw new OpenSearchException("boom", ex); } - return new GetResponse(new GetResult(getRequest.index(), getRequest.id(), 0, 1, 0, true, new BytesArray(json), null, null)); + Map documentField = new HashMap<>(); + List nonNullTerms = new ArrayList<>(); + for (Object obj : randomTerms) { + if (obj != null) { + nonNullTerms.add(obj); + } + } + documentField.put(termsPath, new DocumentField(termsPath, nonNullTerms)); + return new GetResponse( + new GetResult(getRequest.index(), getRequest.id(), 0, 1, 0, true, new BytesArray(json), documentField, null) + ); } public void testNumeric() throws IOException { @@ -388,4 +403,50 @@ protected QueryBuilder parseQuery(XContentParser parser) throws IOException { } } + public void testFromJsonWithValueType() throws IOException { + String json = "{\n" + + " \"terms\": {\n" + + " \"student_id\": [\"OjAAAAEAAAAAAAEAEAAAAG8A3gA=\"],\n" + + " \"boost\" : 1.0,\n" + + " \"value_type\": \"bitmap\"\n" + + " }\n" + + "}"; + + TermsQueryBuilder parsed = (TermsQueryBuilder) parseQuery(json); + checkGeneratedJson(json, parsed); + assertEquals(json, 1, parsed.values().size()); + } + + public void testFromJsonWithValueTypeFail() { + String json = "{\n" + + " \"terms\": {\n" + + " \"student_id\": [\"OjAAAAEAAAAAAAEAEAAAAG8A3gA=\", \"2\"],\n" + + " \"boost\" : 1.0,\n" + + " \"value_type\": \"bitmap\"\n" + + " }\n" + + "}"; + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); + assertEquals( + "Invalid value for bitmap type: Expected a single-element array with a base64 encoded serialized bitmap.", + e.getMessage() + ); + } + + public void testTermsLookupBitmap() throws IOException { + RoaringBitmap bitmap = new RoaringBitmap(); + bitmap.add(111); + bitmap.add(333); + byte[] array = new byte[bitmap.serializedSizeInBytes()]; + bitmap.serialize(ByteBuffer.wrap(array)); + randomTerms = List.of(new BytesArray(array)); // this will be fetched back by terms lookup + + TermsQueryBuilder query = new TermsQueryBuilder(INT_FIELD_NAME, randomTermsLookup().store(true)).valueType( + TermsQueryBuilder.ValueType.BITMAP + ); + QueryShardContext context = createShardContext(); + QueryBuilder rewritten = rewriteQuery(query, new QueryShardContext(context)); + Query luceneQuery = rewritten.toQuery(context); + assertTrue(luceneQuery instanceof IndexOrDocValuesQuery); + } } diff --git a/server/src/test/java/org/opensearch/indices/TermsLookupTests.java b/server/src/test/java/org/opensearch/indices/TermsLookupTests.java index 8a7867729f2c1..3f45bc86104dd 100644 --- a/server/src/test/java/org/opensearch/indices/TermsLookupTests.java +++ b/server/src/test/java/org/opensearch/indices/TermsLookupTests.java @@ -48,12 +48,15 @@ public void testTermsLookup() { String id = randomAlphaOfLengthBetween(1, 10); String path = randomAlphaOfLengthBetween(1, 10); String routing = randomAlphaOfLengthBetween(1, 10); + boolean store = randomBoolean(); TermsLookup termsLookup = new TermsLookup(index, id, path); termsLookup.routing(routing); + termsLookup.store(store); assertEquals(index, termsLookup.index()); assertEquals(id, termsLookup.id()); assertEquals(path, termsLookup.path()); assertEquals(routing, termsLookup.routing()); + assertEquals(store, termsLookup.store()); } public void testIllegalArguments() { @@ -109,6 +112,6 @@ public void testXContentParsing() throws IOException { public static TermsLookup randomTermsLookup() { return new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10).replace('.', '_')).routing( randomBoolean() ? randomAlphaOfLength(10) : null - ); + ).store(randomBoolean()); } } diff --git a/server/src/test/java/org/opensearch/search/query/BitmapDocValuesQueryTests.java b/server/src/test/java/org/opensearch/search/query/BitmapDocValuesQueryTests.java new file mode 100644 index 0000000000000..6e293d1ec69fd --- /dev/null +++ b/server/src/test/java/org/opensearch/search/query/BitmapDocValuesQueryTests.java @@ -0,0 +1,149 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.query; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.IntField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; +import org.apache.lucene.store.Directory; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import org.roaringbitmap.RoaringBitmap; + +public class BitmapDocValuesQueryTests extends OpenSearchTestCase { + private Directory dir; + private IndexWriter w; + private DirectoryReader reader; + private IndexSearcher searcher; + + @Before + public void initSearcher() throws IOException { + dir = newDirectory(); + w = new IndexWriter(dir, newIndexWriterConfig()); + } + + @After + public void closeAllTheReaders() throws IOException { + reader.close(); + w.close(); + dir.close(); + } + + public void testScore() throws IOException { + Document d = new Document(); + d.add(new IntField("product_id", 1, Field.Store.NO)); + w.addDocument(d); + + d = new Document(); + d.add(new IntField("product_id", 2, Field.Store.NO)); + w.addDocument(d); + + d = new Document(); + d.add(new IntField("product_id", 3, Field.Store.NO)); + w.addDocument(d); + + d = new Document(); + d.add(new IntField("product_id", 4, Field.Store.NO)); + w.addDocument(d); + + w.commit(); + reader = DirectoryReader.open(w); + searcher = newSearcher(reader); + + RoaringBitmap bitmap = new RoaringBitmap(); + bitmap.add(1); + bitmap.add(4); + BitmapDocValuesQuery query = new BitmapDocValuesQuery("product_id", bitmap); + + Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f); + + List actual = new LinkedList<>(); + for (LeafReaderContext leaf : searcher.getIndexReader().leaves()) { + // use doc values to get the actual value of the matching docs and assert + // cannot directly check the docId because test can randomize segment numbers + SortedNumericDocValues dv = DocValues.getSortedNumeric(leaf.reader(), "product_id"); + Scorer scorer = weight.scorer(leaf); + DocIdSetIterator disi = scorer.iterator(); + int docId; + while ((docId = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { + dv.advanceExact(docId); + for (int count = 0; count < dv.docValueCount(); ++count) { + actual.add((int) dv.nextValue()); + } + } + } + List expected = List.of(1, 4); + assertEquals(expected, actual); + } + + public void testScoreMutilValues() throws IOException { + Document d = new Document(); + d.add(new IntField("product_id", 1, Field.Store.NO)); + w.addDocument(d); + + d = new Document(); + d.add(new IntField("product_id", 2, Field.Store.NO)); + d.add(new IntField("product_id", 3, Field.Store.NO)); + w.addDocument(d); + + d = new Document(); + d.add(new IntField("product_id", 3, Field.Store.NO)); + w.addDocument(d); + + d = new Document(); + d.add(new IntField("product_id", 4, Field.Store.NO)); + w.addDocument(d); + + w.commit(); + reader = DirectoryReader.open(w); + searcher = newSearcher(reader); + + RoaringBitmap bitmap = new RoaringBitmap(); + bitmap.add(3); + BitmapDocValuesQuery query = new BitmapDocValuesQuery("product_id", bitmap); + + Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f); + + Set actual = new HashSet<>(); + for (LeafReaderContext leaf : searcher.getIndexReader().leaves()) { + // use doc values to get the actual value of the matching docs and assert + // cannot directly check the docId because test can randomize segment numbers + SortedNumericDocValues dv = DocValues.getSortedNumeric(leaf.reader(), "product_id"); + Scorer scorer = weight.scorer(leaf); + DocIdSetIterator disi = scorer.iterator(); + int docId; + while ((docId = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { + dv.advanceExact(docId); + for (int count = 0; count < dv.docValueCount(); ++count) { + actual.add((int) dv.nextValue()); + } + } + } + Set expected = Set.of(2, 3); + assertEquals(expected, actual); + } +} From b2420f24ae5fe6748e89675eebd20560a5473603 Mon Sep 17 00:00:00 2001 From: Sachin Kale Date: Tue, 20 Aug 2024 10:53:51 +0530 Subject: [PATCH 11/16] Move timestamp pinning settings to RemoteStoreSettings (#15294) --------- Signed-off-by: Sachin Kale Co-authored-by: Sachin Kale --- .../RemoteStorePinnedTimestampsIT.java | 8 ++-- .../common/settings/ClusterSettings.java | 6 +-- .../indices/RemoteStoreSettings.java | 34 ++++++++++++++ .../RemoteStorePinnedTimestampService.java | 45 ++++--------------- 4 files changed, 49 insertions(+), 44 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStorePinnedTimestampsIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStorePinnedTimestampsIT.java index 0bb53309f7a78..05ff738d2df0b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStorePinnedTimestampsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStorePinnedTimestampsIT.java @@ -54,7 +54,7 @@ public void testTimestampPinUnpin() throws Exception { remoteStorePinnedTimestampService.pinTimestamp(timestamp2, "ss3", noOpActionListener); remoteStorePinnedTimestampService.pinTimestamp(timestamp3, "ss4", noOpActionListener); - remoteStorePinnedTimestampService.setPinnedTimestampsSchedulerInterval(TimeValue.timeValueSeconds(1)); + remoteStorePinnedTimestampService.rescheduleAsyncUpdatePinnedTimestampTask(TimeValue.timeValueSeconds(1)); assertBusy(() -> { Tuple> pinnedTimestampWithFetchTimestamp_2 = RemoteStorePinnedTimestampService.getPinnedTimestamps(); @@ -63,7 +63,7 @@ public void testTimestampPinUnpin() throws Exception { assertEquals(Set.of(timestamp1, timestamp2, timestamp3), pinnedTimestampWithFetchTimestamp_2.v2()); }); - remoteStorePinnedTimestampService.setPinnedTimestampsSchedulerInterval(TimeValue.timeValueMinutes(3)); + remoteStorePinnedTimestampService.rescheduleAsyncUpdatePinnedTimestampTask(TimeValue.timeValueMinutes(3)); // This should be a no-op as pinning entity is different remoteStorePinnedTimestampService.unpinTimestamp(timestamp1, "no-snapshot", noOpActionListener); @@ -72,7 +72,7 @@ public void testTimestampPinUnpin() throws Exception { // Adding different entity to already pinned timestamp remoteStorePinnedTimestampService.pinTimestamp(timestamp3, "ss5", noOpActionListener); - remoteStorePinnedTimestampService.setPinnedTimestampsSchedulerInterval(TimeValue.timeValueSeconds(1)); + remoteStorePinnedTimestampService.rescheduleAsyncUpdatePinnedTimestampTask(TimeValue.timeValueSeconds(1)); assertBusy(() -> { Tuple> pinnedTimestampWithFetchTimestamp_3 = RemoteStorePinnedTimestampService.getPinnedTimestamps(); @@ -81,6 +81,6 @@ public void testTimestampPinUnpin() throws Exception { assertEquals(Set.of(timestamp1, timestamp3), pinnedTimestampWithFetchTimestamp_3.v2()); }); - remoteStorePinnedTimestampService.setPinnedTimestampsSchedulerInterval(TimeValue.timeValueMinutes(3)); + remoteStorePinnedTimestampService.rescheduleAsyncUpdatePinnedTimestampTask(TimeValue.timeValueMinutes(3)); } } diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 7baae17dd77cd..d5e8e90458390 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -142,7 +142,6 @@ import org.opensearch.node.Node.DiscoverySettings; import org.opensearch.node.NodeRoleSettings; import org.opensearch.node.remotestore.RemoteStoreNodeService; -import org.opensearch.node.remotestore.RemoteStorePinnedTimestampService; import org.opensearch.node.resource.tracker.ResourceTrackerSettings; import org.opensearch.persistent.PersistentTasksClusterService; import org.opensearch.persistent.decider.EnableAssignmentDecider; @@ -759,9 +758,10 @@ public void apply(Settings value, Settings current, Settings previous) { RemoteStoreSettings.CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING, RemoteStoreSettings.CLUSTER_REMOTE_MAX_TRANSLOG_READERS, RemoteStoreSettings.CLUSTER_REMOTE_STORE_TRANSLOG_METADATA, - SearchService.CLUSTER_ALLOW_DERIVED_FIELD_SETTING, + RemoteStoreSettings.CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_SCHEDULER_INTERVAL, + RemoteStoreSettings.CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_LOOKBACK_INTERVAL, - RemoteStorePinnedTimestampService.CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_SCHEDULER_INTERVAL, + SearchService.CLUSTER_ALLOW_DERIVED_FIELD_SETTING, // Composite index settings CompositeIndexSettings.STAR_TREE_INDEX_ENABLED_SETTING, diff --git a/server/src/main/java/org/opensearch/indices/RemoteStoreSettings.java b/server/src/main/java/org/opensearch/indices/RemoteStoreSettings.java index 8cb482c8d8681..495288626627b 100644 --- a/server/src/main/java/org/opensearch/indices/RemoteStoreSettings.java +++ b/server/src/main/java/org/opensearch/indices/RemoteStoreSettings.java @@ -134,6 +134,27 @@ public class RemoteStoreSettings { Property.Dynamic ); + /** + * Controls pinned timestamp scheduler interval + */ + public static final Setting CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_SCHEDULER_INTERVAL = Setting.timeSetting( + "cluster.remote_store.pinned_timestamps.scheduler_interval", + TimeValue.timeValueMinutes(3), + TimeValue.timeValueMinutes(1), + Setting.Property.NodeScope + ); + + /** + * Controls allowed timestamp values to be pinned from past + */ + public static final Setting CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_LOOKBACK_INTERVAL = Setting.timeSetting( + "cluster.remote_store.pinned_timestamps.lookback_interval", + TimeValue.timeValueMinutes(1), + TimeValue.timeValueMinutes(1), + TimeValue.timeValueMinutes(5), + Setting.Property.NodeScope + ); + private volatile TimeValue clusterRemoteTranslogBufferInterval; private volatile int minRemoteSegmentMetadataFiles; private volatile TimeValue clusterRemoteTranslogTransferTimeout; @@ -142,6 +163,8 @@ public class RemoteStoreSettings { private volatile RemoteStoreEnums.PathHashAlgorithm pathHashAlgorithm; private volatile int maxRemoteTranslogReaders; private volatile boolean isTranslogMetadataEnabled; + private static volatile TimeValue pinnedTimestampsSchedulerInterval; + private static volatile TimeValue pinnedTimestampsLookbackInterval; public RemoteStoreSettings(Settings settings, ClusterSettings clusterSettings) { clusterRemoteTranslogBufferInterval = CLUSTER_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING.get(settings); @@ -179,6 +202,9 @@ public RemoteStoreSettings(Settings settings, ClusterSettings clusterSettings) { CLUSTER_REMOTE_SEGMENT_TRANSFER_TIMEOUT_SETTING, this::setClusterRemoteSegmentTransferTimeout ); + + pinnedTimestampsSchedulerInterval = CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_SCHEDULER_INTERVAL.get(settings); + pinnedTimestampsLookbackInterval = CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_LOOKBACK_INTERVAL.get(settings); } public TimeValue getClusterRemoteTranslogBufferInterval() { @@ -246,4 +272,12 @@ public int getMaxRemoteTranslogReaders() { private void setMaxRemoteTranslogReaders(int maxRemoteTranslogReaders) { this.maxRemoteTranslogReaders = maxRemoteTranslogReaders; } + + public static TimeValue getPinnedTimestampsSchedulerInterval() { + return pinnedTimestampsSchedulerInterval; + } + + public static TimeValue getPinnedTimestampsLookbackInterval() { + return pinnedTimestampsLookbackInterval; + } } diff --git a/server/src/main/java/org/opensearch/node/remotestore/RemoteStorePinnedTimestampService.java b/server/src/main/java/org/opensearch/node/remotestore/RemoteStorePinnedTimestampService.java index 35730a75a8142..c37db618c2522 100644 --- a/server/src/main/java/org/opensearch/node/remotestore/RemoteStorePinnedTimestampService.java +++ b/server/src/main/java/org/opensearch/node/remotestore/RemoteStorePinnedTimestampService.java @@ -15,7 +15,6 @@ import org.opensearch.common.blobstore.BlobMetadata; import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractAsyncTask; @@ -24,6 +23,7 @@ import org.opensearch.gateway.remote.model.RemotePinnedTimestamps.PinnedTimestamps; import org.opensearch.gateway.remote.model.RemoteStorePinnedTimestampsBlobStore; import org.opensearch.index.translog.transfer.BlobStoreTransferService; +import org.opensearch.indices.RemoteStoreSettings; import org.opensearch.node.Node; import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.Repository; @@ -61,30 +61,8 @@ public class RemoteStorePinnedTimestampService implements Closeable { private BlobStoreTransferService blobStoreTransferService; private RemoteStorePinnedTimestampsBlobStore pinnedTimestampsBlobStore; private AsyncUpdatePinnedTimestampTask asyncUpdatePinnedTimestampTask; - private volatile TimeValue pinnedTimestampsSchedulerInterval; private final Semaphore updateTimetampPinningSemaphore = new Semaphore(1); - /** - * Controls pinned timestamp scheduler interval - */ - public static final Setting CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_SCHEDULER_INTERVAL = Setting.timeSetting( - "cluster.remote_store.pinned_timestamps.scheduler_interval", - TimeValue.timeValueMinutes(3), - TimeValue.timeValueMinutes(1), - Setting.Property.NodeScope - ); - - /** - * Controls allowed timestamp values to be pinned from past - */ - public static final Setting CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_LOOKBACK_INTERVAL = Setting.timeSetting( - "cluster.remote_store.pinned_timestamps.lookback_interval", - TimeValue.timeValueMinutes(1), - TimeValue.timeValueMinutes(1), - TimeValue.timeValueMinutes(5), - Setting.Property.NodeScope - ); - public RemoteStorePinnedTimestampService( Supplier repositoriesService, Settings settings, @@ -95,8 +73,6 @@ public RemoteStorePinnedTimestampService( this.settings = settings; this.threadPool = threadPool; this.clusterService = clusterService; - - pinnedTimestampsSchedulerInterval = CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_SCHEDULER_INTERVAL.get(settings); } /** @@ -107,7 +83,7 @@ public RemoteStorePinnedTimestampService( public void start() { validateRemoteStoreConfiguration(); initializeComponents(); - startAsyncUpdateTask(); + startAsyncUpdateTask(RemoteStoreSettings.getPinnedTimestampsSchedulerInterval()); } private void validateRemoteStoreConfiguration() { @@ -132,7 +108,7 @@ private void initializeComponents() { ); } - private void startAsyncUpdateTask() { + private void startAsyncUpdateTask(TimeValue pinnedTimestampsSchedulerInterval) { asyncUpdatePinnedTimestampTask = new AsyncUpdatePinnedTimestampTask(logger, threadPool, pinnedTimestampsSchedulerInterval, true); } @@ -147,8 +123,8 @@ private void startAsyncUpdateTask() { public void pinTimestamp(long timestamp, String pinningEntity, ActionListener listener) { // If a caller uses current system time to pin the timestamp, following check will almost always fail. // So, we allow pinning timestamp in the past upto some buffer - long lookbackIntervalInMills = CLUSTER_REMOTE_STORE_PINNED_TIMESTAMP_LOOKBACK_INTERVAL.get(settings).millis(); - if (timestamp < TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - lookbackIntervalInMills) { + long lookbackIntervalInMills = RemoteStoreSettings.getPinnedTimestampsLookbackInterval().millis(); + if (timestamp < (System.currentTimeMillis() - lookbackIntervalInMills)) { throw new IllegalArgumentException( "Timestamp to be pinned is less than current timestamp - value of cluster.remote_store.pinned_timestamps.lookback_interval" ); @@ -256,17 +232,12 @@ public void close() throws IOException { asyncUpdatePinnedTimestampTask.close(); } - // Visible for testing - public void setPinnedTimestampsSchedulerInterval(TimeValue pinnedTimestampsSchedulerInterval) { - this.pinnedTimestampsSchedulerInterval = pinnedTimestampsSchedulerInterval; - rescheduleAsyncUpdatePinnedTimestampTask(); - } - - private void rescheduleAsyncUpdatePinnedTimestampTask() { + // Used in integ tests + public void rescheduleAsyncUpdatePinnedTimestampTask(TimeValue pinnedTimestampsSchedulerInterval) { if (pinnedTimestampsSchedulerInterval != null) { pinnedTimestampsSet = new Tuple<>(-1L, Set.of()); asyncUpdatePinnedTimestampTask.close(); - startAsyncUpdateTask(); + startAsyncUpdateTask(pinnedTimestampsSchedulerInterval); } } From 073cb5b3b09b826f8ccb0446dc64fc553207c1c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 08:42:37 -0400 Subject: [PATCH 12/16] Bump commons-cli:commons-cli from 1.8.0 to 1.9.0 in /plugins/repository-hdfs (#15298) * Bump commons-cli:commons-cli in /plugins/repository-hdfs Bumps commons-cli:commons-cli from 1.8.0 to 1.9.0. --- updated-dependencies: - dependency-name: commons-cli:commons-cli dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + plugins/repository-hdfs/build.gradle | 2 +- plugins/repository-hdfs/licenses/commons-cli-1.8.0.jar.sha1 | 1 - plugins/repository-hdfs/licenses/commons-cli-1.9.0.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-hdfs/licenses/commons-cli-1.8.0.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-cli-1.9.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 12c909cfd46aa..808ab50c85f20 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -40,6 +40,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.apache.kerby:kerb-admin` from 2.0.3 to 2.1.0 ([#15301](https://github.com/opensearch-project/OpenSearch/pull/15301)) - Bump `com.azure:azure-core-http-netty` from 1.15.1 to 1.15.3 ([#15300](https://github.com/opensearch-project/OpenSearch/pull/15300)) - Bump `com.gradle.develocity` from 3.17.6 to 3.18 ([#15297](https://github.com/opensearch-project/OpenSearch/pull/15297)) +- Bump `commons-cli:commons-cli` from 1.8.0 to 1.9.0 ([#15298](https://github.com/opensearch-project/OpenSearch/pull/15298)) ### Changed - Add lower limit for primary and replica batch allocators timeout ([#14979](https://github.com/opensearch-project/OpenSearch/pull/14979)) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index e1e5f422f3e07..eeb5e2cd88317 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -70,7 +70,7 @@ dependencies { api 'com.google.code.gson:gson:2.11.0' runtimeOnly "com.google.guava:guava:${versions.guava}" api "commons-logging:commons-logging:${versions.commonslogging}" - api 'commons-cli:commons-cli:1.8.0' + api 'commons-cli:commons-cli:1.9.0' api "commons-codec:commons-codec:${versions.commonscodec}" api 'commons-collections:commons-collections:3.2.2' api "org.apache.commons:commons-compress:${versions.commonscompress}" diff --git a/plugins/repository-hdfs/licenses/commons-cli-1.8.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-cli-1.8.0.jar.sha1 deleted file mode 100644 index 65102052409ea..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-cli-1.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -41a4bff12057eecb6daaf9c7f36c237815be3da1 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-cli-1.9.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-cli-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..9a97a11dbe8d5 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-cli-1.9.0.jar.sha1 @@ -0,0 +1 @@ +e1cdfa8bf40ccbb7440b2d1232f9f45bb20a1844 \ No newline at end of file From f94a99e749521b0510a2f59243b5d3c647497705 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 08:45:37 -0400 Subject: [PATCH 13/16] Bump org.bouncycastle:bcpg-fips from 2.0.8 to 2.0.9 in /distribution/tools/plugin-cli (#15299) * Bump org.bouncycastle:bcpg-fips in /distribution/tools/plugin-cli Bumps org.bouncycastle:bcpg-fips from 2.0.8 to 2.0.9. --- updated-dependencies: - dependency-name: org.bouncycastle:bcpg-fips dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 2 +- distribution/tools/plugin-cli/build.gradle | 2 +- distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.8.jar.sha1 | 1 - distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.9.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.8.jar.sha1 create mode 100644 distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.9.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 808ab50c85f20..9411393661838 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,7 +31,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.tukaani:xz` from 1.9 to 1.10 ([#15110](https://github.com/opensearch-project/OpenSearch/pull/15110)) - Bump `actions/setup-java` from 1 to 4 ([#15104](https://github.com/opensearch-project/OpenSearch/pull/15104)) - Bump `org.apache.avro:avro` from 1.11.3 to 1.12.0 in /plugins/repository-hdfs ([#15119](https://github.com/opensearch-project/OpenSearch/pull/15119)) -- Bump `org.bouncycastle:bcpg-fips` from 1.0.7.1 to 2.0.8 and `org.bouncycastle:bc-fips` from 1.0.2.5 to 2.0.0 in /distribution/tools/plugin-cli ([#15103](https://github.com/opensearch-project/OpenSearch/pull/15103)) +- Bump `org.bouncycastle:bcpg-fips` from 1.0.7.1 to 2.0.9 ([#15103](https://github.com/opensearch-project/OpenSearch/pull/15103), [#15299](https://github.com/opensearch-project/OpenSearch/pull/15299)) - Bump `com.azure:azure-core` from 1.49.1 to 1.51.0 ([#15111](https://github.com/opensearch-project/OpenSearch/pull/15111)) - Bump `org.xerial.snappy:snappy-java` from 1.1.10.5 to 1.1.10.6 ([#15207](https://github.com/opensearch-project/OpenSearch/pull/15207)) - Bump `com.azure:azure-xml` from 1.0.0 to 1.1.0 ([#15206](https://github.com/opensearch-project/OpenSearch/pull/15206)) diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index a619ba1acf6a7..784cdc457a1a9 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -37,7 +37,7 @@ base { dependencies { compileOnly project(":server") compileOnly project(":libs:opensearch-cli") - api "org.bouncycastle:bcpg-fips:2.0.8" + api "org.bouncycastle:bcpg-fips:2.0.9" api "org.bouncycastle:bc-fips:2.0.0" testImplementation project(":test:framework") testImplementation 'com.google.jimfs:jimfs:1.3.0' diff --git a/distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.8.jar.sha1 b/distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.8.jar.sha1 deleted file mode 100644 index 758ee2fdf9de6..0000000000000 --- a/distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -51c2f633e0c32d10de1ebab4c86f93310ff820f8 \ No newline at end of file diff --git a/distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.9.jar.sha1 b/distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.9.jar.sha1 new file mode 100644 index 0000000000000..20cdbf6dc8aa8 --- /dev/null +++ b/distribution/tools/plugin-cli/licenses/bcpg-fips-2.0.9.jar.sha1 @@ -0,0 +1 @@ +f69719ef8dbf34d5f906ce480496446b2fd2ae27 \ No newline at end of file From 13163aba31fd3fef078897068040b0fc4c35b251 Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Tue, 20 Aug 2024 10:07:05 -0700 Subject: [PATCH 14/16] Optimize global ordinal includes/excludes for prefix matching (#14371) * Optimize global ordinal includes/excludes for prefix matching If an aggregration specifies includes or excludes based on a regular expression, and the regular expression has a finite expansion followed by .*, then we can optimize the global ordinal filter. Specifically, in this case, we can expand the matching prefixes, then include/exclude the range of global ordinals that start with each prefix. Signed-off-by: Michael Froh * Add unit test Signed-off-by: Michael Froh * Add changelog entry Signed-off-by: Michael Froh * Improve test coverage Updated the unit test to be functionally equivalent, but it covers more of the regex logic. Signed-off-by: Michael Froh * Improve test coverage Signed-off-by: Michael Froh * Fix bug in exclude-only case with no doc values in segment Signed-off-by: Michael Froh * Address comments from @mch2 Signed-off-by: Michael Froh --------- Signed-off-by: Michael Froh --- CHANGELOG.md | 1 + .../bucket/terms/IncludeExclude.java | 189 ++++++++++++- .../terms}/IncludeExcludeTests.java | 252 +++++++++++++++++- 3 files changed, 439 insertions(+), 3 deletions(-) rename server/src/test/java/org/opensearch/search/aggregations/{support => bucket/terms}/IncludeExcludeTests.java (58%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9411393661838..c49f506f94bf1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,6 +44,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Changed - Add lower limit for primary and replica batch allocators timeout ([#14979](https://github.com/opensearch-project/OpenSearch/pull/14979)) +- Optimize regexp-based include/exclude on aggregations when pattern matches prefixes ([#14371](https://github.com/opensearch-project/OpenSearch/pull/14371)) - Replace and block usages of org.apache.logging.log4j.util.Strings ([#15238](https://github.com/opensearch-project/OpenSearch/pull/15238)) ### Deprecated diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java index f8061bcaca50f..20f294bc7199b 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -57,7 +57,11 @@ import org.opensearch.search.DocValueFormat; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Objects; import java.util.Set; import java.util.SortedSet; @@ -86,6 +90,10 @@ public class IncludeExclude implements Writeable, ToXContentFragment { * https://github.com/opensearch-project/OpenSearch/issues/2858 */ private static final int DEFAULT_MAX_REGEX_LENGTH = 1000; + /** + * The maximum number of prefixes to extract from a regex in tryCreatePrefixOrdinalsFilter + */ + private static final int MAX_PREFIXES = 1000; // for parsing purposes only // TODO: move all aggs to the same package so that this stuff could be pkg-private @@ -393,6 +401,92 @@ public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) thro } + /** + * An ordinals filter that includes/excludes all ordinals corresponding to terms starting with the given prefixes + */ + static class PrefixBackedOrdinalsFilter extends OrdinalsFilter { + + private final SortedSet includePrefixes, excludePrefixes; + + private PrefixBackedOrdinalsFilter(SortedSet includePrefixes, SortedSet excludePrefixes) { + this.includePrefixes = includePrefixes; + this.excludePrefixes = excludePrefixes; + } + + private static BytesRef nextBytesRef(BytesRef bytesRef) { + BytesRef next = BytesRef.deepCopyOf(bytesRef); + int pos = next.offset + next.length - 1; + while (pos >= next.offset && next.bytes[pos] == -1) { + next.bytes[pos] = 0; + pos--; + } + if (pos >= next.offset) { + next.bytes[pos]++; + } else { + // Every byte in our prefix had value 0xFF. We must match all subsequent ordinals. + return null; + } + return next; + } + + private interface LongBiConsumer { + void accept(long a, long b); + } + + private static void process(SortedSetDocValues globalOrdinals, long length, SortedSet prefixes, LongBiConsumer consumer) + throws IOException { + for (BytesRef prefix : prefixes) { + long startOrd = globalOrdinals.lookupTerm(prefix); + if (startOrd < 0) { + // The prefix is not an exact match in the ordinals (can skip equal length below) + startOrd = -1 - startOrd; + // Make sure that the term at startOrd starts with prefix + BytesRef startTerm = globalOrdinals.lookupOrd(startOrd); + if (startTerm.length <= prefix.length + || !Arrays.equals( + startTerm.bytes, + startTerm.offset, + startTerm.offset + prefix.length, + prefix.bytes, + prefix.offset, + prefix.offset + prefix.length + )) { + continue; + } + } + if (startOrd >= length) { + continue; + } + BytesRef next = nextBytesRef(prefix); + if (next == null) { + consumer.accept(startOrd, length); + } else { + long endOrd = globalOrdinals.lookupTerm(next); + if (endOrd < 0) { + endOrd = -1 - endOrd; + } + if (startOrd < endOrd) { + consumer.accept(startOrd, endOrd); + } + } + } + + } + + @Override + public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) throws IOException { + LongBitSet accept = new LongBitSet(globalOrdinals.getValueCount()); + if (!includePrefixes.isEmpty()) { + process(globalOrdinals, accept.length(), includePrefixes, accept::set); + } else if (accept.length() > 0) { + // Exclude-only + accept.set(0, accept.length()); + } + process(globalOrdinals, accept.length(), excludePrefixes, accept::clear); + return accept; + } + } + private final String include, exclude; private final SortedSet includeValues, excludeValues; private final int incZeroBasedPartition; @@ -709,8 +803,13 @@ public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format) { } public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format, int maxRegexLength) { - if (isRegexBased()) { + if ((include == null || include.endsWith(".*")) && (exclude == null || exclude.endsWith(".*"))) { + PrefixBackedOrdinalsFilter prefixBackedOrdinalsFilter = tryCreatePrefixOrdinalsFilter(maxRegexLength); + if (prefixBackedOrdinalsFilter != null) { + return prefixBackedOrdinalsFilter; + } + } return new AutomatonBackedOrdinalsFilter(toAutomaton(maxRegexLength)); } if (isPartitionBased()) { @@ -720,6 +819,94 @@ public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format, int maxRege return new TermListBackedOrdinalsFilter(parseForDocValues(includeValues, format), parseForDocValues(excludeValues, format)); } + private static List expandRegexp(RegExp regExp, int maxPrefixes) { + switch (regExp.kind) { + case REGEXP_UNION: + List alternatives = new ArrayList<>(); + while (regExp.exp1.kind == RegExp.Kind.REGEXP_UNION) { + alternatives.add(regExp.exp2); + regExp = regExp.exp1; + } + alternatives.add(regExp.exp2); + alternatives.add(regExp.exp1); + List output = new ArrayList<>(); + for (RegExp leaf : alternatives) { + List leafExpansions = expandRegexp(leaf, maxPrefixes); + if (leafExpansions == null) { + return null; + } else { + if (output.size() + leafExpansions.size() > maxPrefixes) { + return null; + } + output.addAll(leafExpansions); + } + } + return output; + case REGEXP_CONCATENATION: + List prefixes = expandRegexp(regExp.exp1, maxPrefixes); + if (prefixes == null) { + return null; + } + List suffixes = expandRegexp(regExp.exp2, maxPrefixes); + if (suffixes == null) { + return null; + } + if (prefixes.size() * suffixes.size() > maxPrefixes) { + return null; + } + List out = new ArrayList<>(); + StringBuilder stringBuilder = new StringBuilder(); + for (String prefix : prefixes) { + for (String suffix : suffixes) { + stringBuilder.setLength(0); + stringBuilder.append(prefix).append(suffix); + out.add(stringBuilder.toString()); + } + } + return out; + case REGEXP_CHAR: + return List.of(Character.toString(regExp.c)); + case REGEXP_STRING: + return List.of(regExp.s); + default: + return null; + } + } + + static SortedSet extractPrefixes(String pattern, int maxRegexLength) { + if (pattern == null) { + return Collections.emptySortedSet(); + } + SortedSet prefixSet = null; + validateRegExpStringLength(pattern, maxRegexLength); + RegExp regExp = new RegExp(pattern); + if (regExp.kind == RegExp.Kind.REGEXP_CONCATENATION && regExp.exp2.kind == RegExp.Kind.REGEXP_REPEAT) { + RegExp tail = regExp.exp2.exp1; + if (tail.kind == RegExp.Kind.REGEXP_ANYCHAR || tail.kind == RegExp.Kind.REGEXP_ANYSTRING) { + List prefixes = expandRegexp(regExp.exp1, MAX_PREFIXES); + if (prefixes != null) { + prefixSet = new TreeSet<>(); + for (String prefix : prefixes) { + prefixSet.add(new BytesRef(prefix)); + } + } + } + } + return prefixSet; + } + + private PrefixBackedOrdinalsFilter tryCreatePrefixOrdinalsFilter(int maxRegexLength) { + SortedSet includeSet = extractPrefixes(include, maxRegexLength); + if (includeSet == null) { + return null; + } + SortedSet excludeSet = extractPrefixes(exclude, maxRegexLength); + if (excludeSet == null) { + return null; + } + return new PrefixBackedOrdinalsFilter(includeSet, excludeSet); + } + public LongFilter convertToLongFilter(DocValueFormat format) { if (isPartitionBased()) { diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/IncludeExcludeTests.java similarity index 58% rename from server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java rename to server/src/test/java/org/opensearch/search/aggregations/bucket/terms/IncludeExcludeTests.java index f223648de6ef4..2356e2dc6d855 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/IncludeExcludeTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.search.aggregations.support; +package org.opensearch.search.aggregations.bucket.terms; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.SortedSetDocValues; @@ -44,13 +44,14 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.fielddata.AbstractSortedSetDocValues; import org.opensearch.search.DocValueFormat; -import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude.OrdinalsFilter; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; import java.util.Collections; +import java.util.SortedSet; import java.util.TreeSet; +import java.util.concurrent.atomic.LongAdder; public class IncludeExcludeTests extends OpenSearchTestCase { @@ -297,4 +298,251 @@ private IncludeExclude serializeMixedRegex(IncludeExclude incExc) throws IOExcep } } + private static BytesRef[] toBytesRefArray(String... values) { + BytesRef[] bytesRefs = new BytesRef[values.length]; + for (int i = 0; i < values.length; i++) { + bytesRefs[i] = new BytesRef(values[i]); + } + return bytesRefs; + } + + public void testPrefixOrds() throws IOException { + IncludeExclude includeExclude = new IncludeExclude("(color|length|size):.*", "color:g.*"); + + OrdinalsFilter ordinalsFilter = includeExclude.convertToOrdinalsFilter(DocValueFormat.RAW); + + // Which of the following match the filter or not? + BytesRef[] bytesRefs = toBytesRefArray( + "color:blue", // true + "color:crimson", // true + "color:green", // false (excluded) + "color:gray", // false (excluded) + "depth:10in", // false + "depth:12in", // false + "depth:17in", // false + "length:long", // true + "length:medium", // true + "length:short", // true + "material:cotton", // false + "material:linen", // false + "material:polyester", // false + "size:large", // true + "size:medium", // true + "size:small", // true + "width:13in", // false + "width:27in", // false + "width:55in" // false + ); + boolean[] expectedFilter = new boolean[] { + true, + true, + false, + false, + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true, + true, + false, + false, + false }; + + LongAdder lookupCount = new LongAdder(); + SortedSetDocValues sortedSetDocValues = new AbstractSortedSetDocValues() { + @Override + public boolean advanceExact(int target) { + return false; + } + + @Override + public long nextOrd() { + return 0; + } + + @Override + public int docValueCount() { + return 1; + } + + @Override + public BytesRef lookupOrd(long ord) { + lookupCount.increment(); + int ordIndex = Math.toIntExact(ord); + return bytesRefs[ordIndex]; + } + + @Override + public long getValueCount() { + return bytesRefs.length; + } + }; + + LongBitSet acceptedOrds = ordinalsFilter.acceptedGlobalOrdinals(sortedSetDocValues); + long prefixLookupCount = lookupCount.longValue(); + assertEquals(expectedFilter.length, acceptedOrds.length()); + for (int i = 0; i < expectedFilter.length; i++) { + assertEquals(expectedFilter[i], acceptedOrds.get(i)); + } + + // Now repeat, but this time, the prefix optimization won't work (because of the .+ on the exclude filter) + includeExclude = new IncludeExclude("(color|length|size):.*", "color:g.+"); + ordinalsFilter = includeExclude.convertToOrdinalsFilter(DocValueFormat.RAW); + acceptedOrds = ordinalsFilter.acceptedGlobalOrdinals(sortedSetDocValues); + long regexpLookupCount = lookupCount.longValue() - prefixLookupCount; + + // The filter should be functionally the same + assertEquals(expectedFilter.length, acceptedOrds.length()); + for (int i = 0; i < expectedFilter.length; i++) { + assertEquals(expectedFilter[i], acceptedOrds.get(i)); + } + + // But the full regexp requires more lookups + assertTrue(regexpLookupCount > prefixLookupCount); + } + + public void testExtractPrefixes() { + // Positive tests + assertEquals(bytesRefs("a"), IncludeExclude.extractPrefixes("a.*", 1000)); + assertEquals(bytesRefs("a", "b"), IncludeExclude.extractPrefixes("(a|b).*", 1000)); + assertEquals(bytesRefs("ab"), IncludeExclude.extractPrefixes("a(b).*", 1000)); + assertEquals(bytesRefs("ab", "ac"), IncludeExclude.extractPrefixes("a(b|c).*", 1000)); + assertEquals(bytesRefs("aabb", "aacc"), IncludeExclude.extractPrefixes("aa(bb|cc).*", 1000)); + + // No regex means no prefixes + assertEquals(bytesRefs(), IncludeExclude.extractPrefixes(null, 1000)); + + // Negative tests + assertNull(IncludeExclude.extractPrefixes(".*", 1000)); // Literal match-all has no prefixes + assertNull(IncludeExclude.extractPrefixes("ab?.*", 1000)); // Don't support optionals ye + // The following cover various cases involving infinite possible prefixes + assertNull(IncludeExclude.extractPrefixes("ab*.*", 1000)); + assertNull(IncludeExclude.extractPrefixes("a*(b|c).*", 1000)); + assertNull(IncludeExclude.extractPrefixes("a(b*|c)d.*", 1000)); + assertNull(IncludeExclude.extractPrefixes("a(b|c*)d.*", 1000)); + assertNull(IncludeExclude.extractPrefixes("a(b|c*)d*.*", 1000)); + + // Test with too many possible prefixes -- 10 * 10 * 10 + 1 > 1000 + assertNull( + IncludeExclude.extractPrefixes( + "((a1|a2|a3|a4|a5|a6|a7|a8|a9|a10)" + "(b1|b2|b3|b4|b5|b6|b7|b8|b9|b10)" + "(c1|c2|c3|c4|c5|c6|c7|c8|c9|c10)|x).*", + 1000 + ) + ); + // Test with too many possible prefixes -- 10 * 10 * 11 > 1000 + assertNull( + IncludeExclude.extractPrefixes( + "((a1|a2|a3|a4|a5|a6|a7|a8|a9|a10)" + "(b1|b2|b3|b4|b5|b6|b7|b8|b9|b10)" + "(c1|c2|c3|c4|c5|c6|c7|c8|c9|c10|c11)).*", + 1000 + ) + ); + } + + private static SortedSet bytesRefs(String... strings) { + SortedSet bytesRefs = new TreeSet<>(); + for (String string : strings) { + bytesRefs.add(new BytesRef(string)); + } + return bytesRefs; + } + + private static SortedSetDocValues toDocValues(BytesRef[] bytesRefs) { + return new AbstractSortedSetDocValues() { + @Override + public boolean advanceExact(int target) { + return false; + } + + @Override + public long nextOrd() { + return 0; + } + + @Override + public int docValueCount() { + return 1; + } + + @Override + public BytesRef lookupOrd(long ord) { + int ordIndex = Math.toIntExact(ord); + return bytesRefs[ordIndex]; + } + + @Override + public long getValueCount() { + return bytesRefs.length; + } + }; + } + + public void testOnlyIncludeExcludePrefix() throws IOException { + String incExcString = "(color:g|width:).*"; + IncludeExclude excludeOnly = new IncludeExclude(null, incExcString); + + OrdinalsFilter ordinalsFilter = excludeOnly.convertToOrdinalsFilter(DocValueFormat.RAW); + // Which of the following match the filter or not? + BytesRef[] bytesRefs = toBytesRefArray( + "color:blue", // true + "color:crimson", // true + "color:green", // false (excluded) + "color:gray", // false (excluded) + "depth:10in", // true + "depth:12in", // true + "depth:17in", // true + "length:long", // true + "length:medium", // true + "length:short", // true + "material:cotton", // true + "material:linen", // true + "material:polyester", // true + "size:large", // true + "size:medium", // true + "size:small", // true + "width:13in", // false + "width:27in", // false + "width:55in" // false + ); + boolean[] expectedFilter = new boolean[] { + true, + true, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + false }; + LongBitSet longBitSet = ordinalsFilter.acceptedGlobalOrdinals(toDocValues(bytesRefs)); + + for (int i = 0; i < expectedFilter.length; i++) { + assertEquals(expectedFilter[i], longBitSet.get(i)); + } + + // Now repeat, but this time include only. + IncludeExclude includeOnly = new IncludeExclude(incExcString, null); + ordinalsFilter = includeOnly.convertToOrdinalsFilter(DocValueFormat.RAW); + longBitSet = ordinalsFilter.acceptedGlobalOrdinals(toDocValues(bytesRefs)); + + // The previously excluded ordinals should be included and vice versa. + for (int i = 0; i < expectedFilter.length; i++) { + assertEquals(!expectedFilter[i], longBitSet.get(i)); + } + } } From ce64facbd316b1bfd0fe429a2896d564e46977c3 Mon Sep 17 00:00:00 2001 From: Mohammad Hasnain Mohsin Rajan Date: Wed, 21 Aug 2024 10:46:59 +0530 Subject: [PATCH 15/16] =?UTF-8?q?Adding=20access=20to=20noSubMatches=20and?= =?UTF-8?q?=20noOverlappingMatches=20in=20Hyphenation=E2=80=A6=20(#13895)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Adding access to noSubMatches and noOverlappingMatches in HyphenationCompoundWordTokenFilter Signed-off-by: Evan Kielley * Add Changelog Entry Signed-off-by: Mohammad Hasnain Mohsin Rajan * test: add hyphenation decompounder tests Signed-off-by: Mohammad Hasnain * test: refactor tests Signed-off-by: Mohammad Hasnain * test: reformat test files Signed-off-by: Mohammad Hasnain * chore: add changelog entry for 2.X Signed-off-by: Mohammad Hasnain * chore: remove 3.x changelog Signed-off-by: Mohammad Hasnain * chore: commonify settingsarr Signed-off-by: Mohammad Hasnain * chore: commonify settingsarr Signed-off-by: Mohammad Hasnain * chore: linting Signed-off-by: Mohammad Hasnain --------- Signed-off-by: Evan Kielley Signed-off-by: Mohammad Hasnain Mohsin Rajan Signed-off-by: Mohammad Hasnain Co-authored-by: Evan Kielley --- CHANGELOG.md | 1 + ...enationCompoundWordTokenFilterFactory.java | 9 +- .../common/CompoundAnalysisTests.java | 63 +- .../opensearch/analysis/common/da_UTF8.xml | 1208 +++++++++++++++++ .../org/opensearch/analysis/common/test1.json | 30 +- .../org/opensearch/analysis/common/test1.yml | 18 + 6 files changed, 1313 insertions(+), 16 deletions(-) create mode 100644 modules/analysis-common/src/test/resources/org/opensearch/analysis/common/da_UTF8.xml diff --git a/CHANGELOG.md b/CHANGELOG.md index c49f506f94bf1..9fcbc3833ca8a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - [Workload Management] QueryGroup resource tracking framework changes ([#13897](https://github.com/opensearch-project/OpenSearch/pull/13897)) - Support filtering on a large list encoded by bitmap ([#14774](https://github.com/opensearch-project/OpenSearch/pull/14774)) - Add slice execution listeners to SearchOperationListener interface ([#15153](https://github.com/opensearch-project/OpenSearch/pull/15153)) +- Adding access to noSubMatches and noOverlappingMatches in Hyphenation ([#13895](https://github.com/opensearch-project/OpenSearch/pull/13895)) ### Dependencies - Bump `netty` from 4.1.111.Final to 4.1.112.Final ([#15081](https://github.com/opensearch-project/OpenSearch/pull/15081)) diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java index 8d29a347caeb8..181ebe5500ee5 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java @@ -54,11 +54,16 @@ */ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundWordTokenFilterFactory { + private final boolean noSubMatches; + private final boolean noOverlappingMatches; private final HyphenationTree hyphenationTree; HyphenationCompoundWordTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, env, name, settings); + noSubMatches = settings.getAsBoolean("no_sub_matches", false); + noOverlappingMatches = settings.getAsBoolean("no_overlapping_matches", false); + String hyphenationPatternsPath = settings.get("hyphenation_patterns_path", null); if (hyphenationPatternsPath == null) { throw new IllegalArgumentException("hyphenation_patterns_path is a required setting."); @@ -85,7 +90,9 @@ public TokenStream create(TokenStream tokenStream) { minWordSize, minSubwordSize, maxSubwordSize, - onlyLongestMatch + onlyLongestMatch, + noSubMatches, + noOverlappingMatches ); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java index a681d9a104ecf..bfe30da50c055 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java @@ -50,8 +50,12 @@ import org.opensearch.test.IndexSettingsModule; import org.opensearch.test.OpenSearchTestCase; import org.hamcrest.MatcherAssert; +import org.junit.Before; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -63,17 +67,27 @@ import static org.hamcrest.Matchers.instanceOf; public class CompoundAnalysisTests extends OpenSearchTestCase { + + Settings[] settingsArr; + + @Before + public void initialize() throws IOException { + final Path home = createTempDir(); + copyHyphenationPatternsFile(home); + this.settingsArr = new Settings[] { getJsonSettings(home), getYamlSettings(home) }; + } + public void testDefaultsCompoundAnalysis() throws Exception { - Settings settings = getJsonSettings(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisModule analysisModule = createAnalysisModule(settings); - TokenFilterFactory filterFactory = analysisModule.getAnalysisRegistry().buildTokenFilterFactories(idxSettings).get("dict_dec"); - MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class)); + for (Settings settings : this.settingsArr) { + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); + AnalysisModule analysisModule = createAnalysisModule(settings); + TokenFilterFactory filterFactory = analysisModule.getAnalysisRegistry().buildTokenFilterFactories(idxSettings).get("dict_dec"); + MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class)); + } } public void testDictionaryDecompounder() throws Exception { - Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; - for (Settings settings : settingsArr) { + for (Settings settings : this.settingsArr) { List terms = analyze(settings, "decompoundingAnalyzer", "donaudampfschiff spargelcremesuppe"); MatcherAssert.assertThat(terms.size(), equalTo(8)); MatcherAssert.assertThat( @@ -83,6 +97,26 @@ public void testDictionaryDecompounder() throws Exception { } } + // Hyphenation Decompounder tests mimic the behavior of lucene tests + // lucene/analysis/common/src/test/org/apache/lucene/analysis/compound/TestHyphenationCompoundWordTokenFilterFactory.java + public void testHyphenationDecompounder() throws Exception { + for (Settings settings : this.settingsArr) { + List terms = analyze(settings, "hyphenationAnalyzer", "min veninde som er lidt af en læsehest"); + MatcherAssert.assertThat(terms.size(), equalTo(10)); + MatcherAssert.assertThat(terms, hasItems("min", "veninde", "som", "er", "lidt", "af", "en", "læsehest", "læse", "hest")); + } + } + + // Hyphenation Decompounder tests mimic the behavior of lucene tests + // lucene/analysis/common/src/test/org/apache/lucene/analysis/compound/TestHyphenationCompoundWordTokenFilterFactory.java + public void testHyphenationDecompounderNoSubMatches() throws Exception { + for (Settings settings : this.settingsArr) { + List terms = analyze(settings, "hyphenationAnalyzerNoSubMatches", "basketballkurv"); + MatcherAssert.assertThat(terms.size(), equalTo(3)); + MatcherAssert.assertThat(terms, hasItems("basketballkurv", "basketball", "kurv")); + } + } + private List analyze(Settings settings, String analyzerName, String text) throws IOException { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); AnalysisModule analysisModule = createAnalysisModule(settings); @@ -111,21 +145,28 @@ public Map> getTokenFilters() { })); } - private Settings getJsonSettings() throws IOException { + private void copyHyphenationPatternsFile(Path home) throws IOException { + InputStream hyphenation_patterns_path = getClass().getResourceAsStream("da_UTF8.xml"); + Path config = home.resolve("config"); + Files.createDirectory(config); + Files.copy(hyphenation_patterns_path, config.resolve("da_UTF8.xml")); + } + + private Settings getJsonSettings(Path home) throws IOException { String json = "/org/opensearch/analysis/common/test1.json"; return Settings.builder() .loadFromStream(json, getClass().getResourceAsStream(json), false) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), home.toString()) .build(); } - private Settings getYamlSettings() throws IOException { + private Settings getYamlSettings(Path home) throws IOException { String yaml = "/org/opensearch/analysis/common/test1.yml"; return Settings.builder() .loadFromStream(yaml, getClass().getResourceAsStream(yaml), false) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), home.toString()) .build(); } } diff --git a/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/da_UTF8.xml b/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/da_UTF8.xml new file mode 100644 index 0000000000000..2c8d203be6881 --- /dev/null +++ b/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/da_UTF8.xml @@ -0,0 +1,1208 @@ + + + + + + + + + + +aA +bB +cC +dD +eE +fF +gG +hH +iI +jJ +kK +lL +mM +nN +oO +pP +qQ +rR +sS +tT +uU +vV +wW +xX +yY +zZ +æÆ +øØ +åÅ + + + +.ae3 +.an3k +.an1s +.be5la +.be1t +.bi4tr +.der3i +.diagno5 +.her3 +.hoved3 +.ne4t5 +.om1 +.ove4 +.po1 +.til3 +.yd5r +ab5le +3abst +a3c +ade5la +5adg +a1e +5afg +5a4f1l +af3r +af4ri +5afs +a4gef +a4gi +ag5in +ag5si +3agti +a4gy +a3h +ais5t +a3j +a5ka +a3ke +a5kr +aku5 +a3la +a1le +a1li +al3k +4alkv +a1lo +al5si +a3lu +a1ly +am4pa +3analy +an4k5r +a3nu +3anv +a5o +a5pe +a3pi +a5po +a1ra +ar5af +1arb +a1re +5arg +a1ri +a3ro +a3sa +a3sc +a1si +a3sk +a3so +3a3sp +a3ste +a3sti +a1ta1 +a1te +a1ti +a4t5in +a1to +ato5v +a5tr +a1tu +a5va +a1ve +a5z +1ba +ba4ti +4bd +1be +be1k +be3ro +be5ru +be1s4 +be1tr +1bi +bi5sk +b1j +4b1n +1bo +bo4gr +bo3ra +bo5re +1br4 +4bs +bs5k +b3so +b1st +b5t +3bu +bu4s5tr +b5w +1by +by5s +4c1c +1ce +ce5ro +3ch +4ch. +ci4o +ck3 +5cy +3da +4d3af +d5anta +da4s +d1b +d1d4 +1de +de5d +4de4lem +der5eri +de4rig +de5sk +d1f +d1g +d3h +1di +di1e +di5l +d3j +d1k +d1l +d1m +4d1n +3do +4dop +d5ov +d1p +4drett +5d4reve +3drif +3driv +d5ros +d5ru +ds5an +ds5in +d1ski +d4sm +d4su +dsu5l +ds5vi +d3ta +d1te +dt5o +d5tr +dt5u +1du +dub5 +d1v +3dy +e5ad +e3af +e5ag +e3ak +e1al +ea4la +e3an +e5ap +e3at +e3bl +ebs3 +e1ci +ed5ar +edde4 +eddel5 +e4do +ed5ra +ed3re +ed3rin +ed4str +e3e +3eff +e3fr +3eft +e3gu +e1h +e3in +ei5s +e3je +e4j5el +e1ka +e3ke +e3kl +4e1ko +e5kr +ek5sa +3eksem +3eksp +e3ku +e1kv +e5ky +e3lad +el3ak +el3ar +e1las +e3le +e4lek +3elem +e1li +5elim +e3lo +el5sa +e5lu +e3ly +e4mad +em4p5le +em1s +en5ak +e4nan +4enn +e4no +en3so +e5nu +e5ol +e3op +e1or +e3ov +epi3 +e1pr +e3ra +er3af +e4rag +e4rak +e1re +e4ref +er5ege +5erhv +e1ri +e4rib +er1k +ero5d +er5ov +er3s +er5tr +e3rum +er5un +e5ry +e1ta +e1te +etek4s +e1ti +e3tj +e1to +e3tr +e3tu +e1ty +e3um +e3un +3eur +e1va +e3ve +e4v3erf +e1vi +e5x +1fa +fa4ce +fags3 +f1b +f1d +1fe +fej4 +fejl1 +f1f +f1g +f1h +1fi +f1k +3fl +1fo +for1en +fo4ri +f1p +f1s4 +4ft +f3ta +f1te +f1ti +f5to +f5tvi +1fu +f1v +3fy +1ga +g3art +g1b +g1d +1ge +4g5enden +ger3in +ge3s +g3f +g1g +g1h +1gi +gi4b +gi3st +5gj +g3k +g1l +g1m +3go +4g5om +g5ov +g3p +1gr +gs1a +gsde4len +g4se +gsha4 +g5sla +gs3or +gs1p +g5s4tide +g4str +gs1v +g3ta +g1te +g1ti +g5to +g3tr +gt4s +g3ud +gun5 +g3v +1gy +g5yd +4ha. +heds3 +he5s +4het +hi4e +hi4n5 +hi3s +ho5ko +ho5ve +4h3t +hun4 +hund3 +hvo4 +i1a +i3b +i4ble +i1c +i3dr +ids5k +i1el +i1en +i3er +i3et. +if3r +i3gu +i3h +i5i +i5j +i1ka +i1ke +ik1l +i5ko +ik3re +ik5ri +iks5t +ik4tu +i3ku +ik3v +i3lag +il3eg +il5ej +il5el +i3li +i4l5id +il3k +i1lo +il5u +i3mu +ind3t +5inf +ings1 +in3s +in4sv +inter1 +i3nu +i3od +i3og +i5ok +i3ol +ion4 +ions1 +i5o5r +i3ot +i5pi +i3pli +i5pr +i3re +i3ri +ir5t +i3sc +i3si +i4sm +is3p +i1ster +i3sti +i5sua +i1ta +i1te +i1ti +i3to +i3tr +it5re. +i1tu +i3ty +i1u +i1va +i1ve +i1vi +j3ag +jde4rer +jds1 +jek4to +4j5en. +j5k +j3le +j3li +jlmeld5 +jlmel4di +j3r +jre5 +ju3s +5kap +k5au +5kav +k5b +kel5s +ke3sk +ke5st +ke4t5a +k3h +ki3e +ki3st +k1k +k5lak +k1le +3klu +k4ny +5kod +1kon +ko3ra +3kort +ko3v +1kra +5kry +ks3an +k1si +ks3k +ks1p +k3ste +k5stu +ks5v +k1t +k4tar +k4terh +kti4e +kt5re +kt5s +3kur +1kus +3kut +k4vo +k4vu +5lab +lad3r +5lagd +la4g3r +5lam +1lat +l1b +ldiagnos5 +l3dr +ld3st +1le. +5led +4lele +le4mo +3len +1ler +1les +4leu +l1f +lfin4 +lfind5 +l1go1 +l3h +li4ga +4l5ins +4l3int +li5o +l3j +l1ke +l1ko +l3ky +l1l +l5mu +lo4du +l3op +4l5or +3lov +4l3p +l4ps +l3r +4ls +lses1 +ls5in +l5sj +l1ta +l4taf +l1te +l4t5erf +l3ti +lt3o +l3tr +l3tu +lu5l +l3ve +l3vi +1ma +m1b +m3d +1me +4m5ej +m3f +m1g +m3h +1mi +mi3k +m5ing +mi4o +mi5sty +m3k +m1l +m1m +mmen5 +m1n +3mo +mo4da +4mop +4m5ov +m1pe +m3pi +m3pl +m1po +m3pr +m1r +mse5s +ms5in +m5sk +ms3p +m3ste +ms5v +m3ta +m3te +m3ti +m3tr +m1ud +1mul +mu1li +3my +3na +4nak +1nal +n1b +n1c +4nd +n3dr +nd5si +nd5sk +nd5sp +1ne +ne5a +ne4da +nemen4 +nement5e +neo4 +n3erk +n5erl +ne5sl +ne5st +n1f +n4go +4n1h +1ni +4nim +ni5o +ni3st +n1ke +n1ko +n3kr +n3ku +n5kv +4n1l +n1m +n1n +1no +n3ord +n5p +n3r +4ns +n3si +n1sku +ns3po +n1sta +n5sti +n1ta +nta4le +n1te +n1ti +ntiali4 +n3to +n1tr +nt4s5t +nt4su +n3tu +n3ty +4n1v +3ny +n3z +o3a +o4as +ob3li +o1c +o4din +od5ri +od5s +od5un +o1e +of5r +o4gek +o4gel +o4g5o +og5re +og5sk +o5h +o5in +oi6s5e +o1j +o3ka +o1ke +o3ku +o3la +o3le +o1li +o1lo +o3lu +o5ly +1omr +on3k +ook5 +o3or +o5ov +o3pi +op3l +op3r +op3s +3opta +4or. +or1an +3ordn +ord5s +o3re. +o3reg +o3rek +o3rer +o3re3s +o3ret +o3ri +3orient +or5im +o4r5in +or3k +or5o +or3sl +or3st +o3si +o3so +o3t +o1te +o5un +ov4s +3pa +pa5gh +p5anl +p3d +4pec +3pen +1per +pe1ra +pe5s +pe3u +p3f +4p5h +1pla +p4lan +4ple. +4pler +4ples +p3m +p3n +5pok +4po3re +3pot +4p5p4 +p4ro +1proc +p3sk +p5so +ps4p +p3st +p1t +1pu +pu5b +p5ule +p5v +5py3 +qu4 +4raf +ra5is +4rarb +r1b +r4d5ar +r3dr +rd4s3 +4reks +1rel +re5la +r5enss +5rese +re5spo +4ress +re3st +re5s4u +5rett +r1f +r1gu +r1h +ri1e +ri5la +4rimo +r4ing +ringse4 +ringso4r +4rinp +4rint +r3ka +r1ke +r1ki +rk3so +r3ku +r1l +rmo4 +r5mu +r1n +ro1b +ro3p +r3or +r3p +r1r +rre5s +rro4n5 +r1sa +r1si +r5skr +r4sk5v +rs4n +r3sp +r5stu +r5su +r3sv +r5tal +r1te +r4teli +r1ti +r3to +r4t5or +rt5rat +rt3re +r5tri +r5tro +rt3s +r5ty +r3ud +run4da +5rut +r3va +r1ve +r3vi +ry4s +s3af +1sam +sa4ma +s3ap +s1ar +1sat +4s1b +s1d +sdy4 +1se +s4ed +5s4er +se4se +s1f +4s1g4 +4s3h +si4bl +1sig +s5int +5sis +5sit +5siu +s5ju +4sk. +1skab +1ske +s3kl +sk5s4 +5sky +s1le +s1li +slo3 +5slu +s5ly +s1m +s4my +4snin +s4nit +so5k +5sol +5som. +3somm +s5oms +5somt +3son +4s1op +sp4 +3spec +4sper +3s4pi +s1pl +3sprog. +s5r4 +s1s4 +4st. +5s4tam +1stan +st5as +3stat +1stav +1ste. +1sted +3stel +5stemo +1sten +5step +3ster. +3stes +5stet +5stj +3sto +st5om +1str +s1ud +3sul +s3un +3sur +s3ve +3s4y +1sy1s +5ta. +1tag +tands3 +4tanv +4tb +tede4l +teds5 +3teg +5tekn +teo1 +5term +te5ro +4t1f +6t3g +t1h +tialis5t +3tid +ti4en +ti3st +4t3k +4t1l +tli4s5 +t1m +t1n +to5ra +to1re +to1ri +tor4m +4t3p +t4ra +4tres +tro5v +1try +4ts +t3si +ts4pa +ts5pr +t3st +ts5ul +4t1t +t5uds +5tur +t5ve +1typ +u1a +5udl +ud5r +ud3s +3udv +u1e +ue4t5 +uge4ri +ugs3 +u5gu +u3i +u5kl +uk4ta +uk4tr +u1la +u1le +u5ly +u5pe +up5l +u5q +u3ra +u3re +u4r3eg +u1rer +u3ro +us5a +u3si +u5ska +u5so +us5v +u1te +u1ti +u1to +ut5r +ut5s4 +5u5v +va5d +3varm +1ved +ve4l5e +ve4reg +ve3s +5vet +v5h +vi4l3in +1vis +v5j +v5k +vl4 +v3le +v5li +vls1 +1vo +4v5om +v5p +v5re +v3st +v5su +v5t +3vu +y3a +y5dr +y3e +y3ke +y5ki +yk3li +y3ko +yk4s5 +y3kv +y5li +y5lo +y5mu +yns5 +y5o +y1pe +y3pi +y3re +yr3ek +y3ri +y3si +y3ti +y5t3r +y5ve +zi5o + +.så3 +.ær5i +.øv3r +a3tø +a5væ +brød3 +5bæ +5drøv +dstå4 +3dæ +3dø +e3læ +e3lø +e3rø +er5øn +e5tæ +e5tø +e1væ +e3æ +e5å +3fæ +3fø +fø4r5en +giø4 +g4sø +g5så +3gæ +3gø1 +3gå +i5tæ +i3ø +3kø +3kå +lingeniø4 +l3væ +5løs +m5tå +1mæ +3mø +3må +n3kæ +n5tæ +3næ +4n5æb +5nø +o5læ +or3ø +o5å +5præ +5pæd +på3 +r5kæ +r5tæ +r5tø +r3væ +r5æl +4røn +5rør +3råd +r5år +s4kå +3slå +s4næ +5stø +1stå +1sæ +4s5æn +1sø +s5øk +så4r5 +ti4ø +3træk. +t4sø +t5så +t3væ +u3læ +3værd +1værk +5vå +y5væ +æb3l +æ3c +æ3e +æg5a +æ4gek +æ4g5r +ægs5 +æ5i +æ5kv +ælle4 +æn1dr +æ5o +æ1re +ær4g5r +æ3ri +ær4ma +ær4mo +ær5s +æ5si +æ3so +æ3ste +æ3ve +øde5 +ø3e +ø1je +ø3ke +ø3le +øms5 +øn3st +øn4t3 +ø1re +ø3ri +ørne3 +ør5o +ø1ve +å1d +å1e +å5h +å3l +å3re +års5t +å5sk +å3t + + diff --git a/test/framework/src/main/resources/org/opensearch/analysis/common/test1.json b/test/framework/src/main/resources/org/opensearch/analysis/common/test1.json index e69c2db6ff400..18e30ec6f87d5 100644 --- a/test/framework/src/main/resources/org/opensearch/analysis/common/test1.json +++ b/test/framework/src/main/resources/org/opensearch/analysis/common/test1.json @@ -19,8 +19,22 @@ "type":"myfilter" }, "dict_dec":{ - "type":"dictionary_decompounder", - "word_list":["donau", "dampf", "schiff", "spargel", "creme", "suppe"] + "type":"dictionary_decompounder", + "word_list":["donau", "dampf", "schiff", "spargel", "creme", "suppe"] + }, + "hyphen_dec":{ + "type":"hyphenation_decompounder", + "word_list":["læse", "hest"], + "hyphenation_patterns_path": "da_UTF8.xml" + }, + "hyphen_dec_no_sub_matches":{ + "type":"hyphenation_decompounder", + "word_list":["basketball", "basket", "ball", "kurv"], + "hyphenation_patterns_path": "da_UTF8.xml", + "only_longest_match": false, + "no_sub_matches": true, + "no_overlapping_matches": false + } }, "analyzer":{ @@ -45,8 +59,16 @@ "filter":["lowercase", "stop", "czech_stem"] }, "decompoundingAnalyzer":{ - "tokenizer":"standard", - "filter":["dict_dec"] + "tokenizer":"standard", + "filter":["dict_dec"] + }, + "hyphenationAnalyzer":{ + "tokenizer":"standard", + "filter":["hyphen_dec"] + }, + "hyphenationAnalyzerNoSubMatches":{ + "tokenizer":"standard", + "filter":["hyphen_dec_no_sub_matches"] } } } diff --git a/test/framework/src/main/resources/org/opensearch/analysis/common/test1.yml b/test/framework/src/main/resources/org/opensearch/analysis/common/test1.yml index 82f933296a314..30862a9d0be8e 100644 --- a/test/framework/src/main/resources/org/opensearch/analysis/common/test1.yml +++ b/test/framework/src/main/resources/org/opensearch/analysis/common/test1.yml @@ -15,6 +15,18 @@ index : dict_dec : type : dictionary_decompounder word_list : [donau, dampf, schiff, spargel, creme, suppe] + hyphen_dec : + type : hyphenation_decompounder + word_list : [læse, hest] + hyphenation_patterns_path: "da_UTF8.xml" + hyphen_dec_no_sub_matches : + type : hyphenation_decompounder + word_list : [basketball, basket, ball, kurv] + hyphenation_patterns_path: "da_UTF8.xml" + only_longest_match: False + no_sub_matches: True + no_overlapping_matches: False + analyzer : standard : type : standard @@ -37,3 +49,9 @@ index : decompoundingAnalyzer : tokenizer : standard filter : [dict_dec] + hyphenationAnalyzer : + tokenizer : standard + filter : [hyphen_dec] + hyphenationAnalyzerNoSubMatches : + tokenizer : standard + filter : [hyphen_dec_no_sub_matches] From 01184decb897dcdfc1b1d36980c4fc4eeddb8e54 Mon Sep 17 00:00:00 2001 From: Ruirui Zhang Date: Tue, 20 Aug 2024 22:19:17 -0700 Subject: [PATCH 16/16] Add Settings related to Workload Management feature (#15028) * add QeryGroup Service tests Signed-off-by: Ruirui Zhang * add PR to changelog Signed-off-by: Ruirui Zhang * change the test directory Signed-off-by: Ruirui Zhang * modify comments to be more specific Signed-off-by: Ruirui Zhang * add test coverage Signed-off-by: Ruirui Zhang * remove QUERY_GROUP_RUN_INTERVAL_SETTING as we'll define it in QueryGroupService Signed-off-by: Ruirui Zhang * address comments Signed-off-by: Ruirui Zhang --- CHANGELOG.md | 1 + .../common/settings/ClusterSettings.java | 9 +- .../wlm/WorkloadManagementSettings.java | 256 ++++++++++++++++++ .../wlm/WorkloadManagementSettingsTests.java | 241 +++++++++++++++++ 4 files changed, 506 insertions(+), 1 deletion(-) create mode 100644 server/src/main/java/org/opensearch/wlm/WorkloadManagementSettings.java create mode 100644 server/src/test/java/org/opensearch/wlm/WorkloadManagementSettingsTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 9fcbc3833ca8a..d6943e538fdcc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add `rangeQuery` and `regexpQuery` for `constant_keyword` field type ([#14711](https://github.com/opensearch-project/OpenSearch/pull/14711)) - Add took time to request nodes stats ([#15054](https://github.com/opensearch-project/OpenSearch/pull/15054)) - [Workload Management] Add Get QueryGroup API Logic ([14709](https://github.com/opensearch-project/OpenSearch/pull/14709)) +- [Workload Management] Add Settings for Workload Management feature ([#15028](https://github.com/opensearch-project/OpenSearch/pull/15028)) - [Workload Management] QueryGroup resource tracking framework changes ([#13897](https://github.com/opensearch-project/OpenSearch/pull/13897)) - Support filtering on a large list encoded by bitmap ([#14774](https://github.com/opensearch-project/OpenSearch/pull/14774)) - Add slice execution listeners to SearchOperationListener interface ([#15153](https://github.com/opensearch-project/OpenSearch/pull/15153)) diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index d5e8e90458390..49ef87838ed2e 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -173,6 +173,7 @@ import org.opensearch.transport.SniffConnectionStrategy; import org.opensearch.transport.TransportSettings; import org.opensearch.watcher.ResourceWatcherService; +import org.opensearch.wlm.WorkloadManagementSettings; import java.util.Arrays; import java.util.Collections; @@ -766,7 +767,13 @@ public void apply(Settings value, Settings current, Settings previous) { // Composite index settings CompositeIndexSettings.STAR_TREE_INDEX_ENABLED_SETTING, - SystemTemplatesService.SETTING_APPLICATION_BASED_CONFIGURATION_TEMPLATES_ENABLED + SystemTemplatesService.SETTING_APPLICATION_BASED_CONFIGURATION_TEMPLATES_ENABLED, + + // WorkloadManagement settings + WorkloadManagementSettings.NODE_LEVEL_CPU_REJECTION_THRESHOLD, + WorkloadManagementSettings.NODE_LEVEL_CPU_CANCELLATION_THRESHOLD, + WorkloadManagementSettings.NODE_LEVEL_MEMORY_REJECTION_THRESHOLD, + WorkloadManagementSettings.NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD ) ) ); diff --git a/server/src/main/java/org/opensearch/wlm/WorkloadManagementSettings.java b/server/src/main/java/org/opensearch/wlm/WorkloadManagementSettings.java new file mode 100644 index 0000000000000..b104925df77b3 --- /dev/null +++ b/server/src/main/java/org/opensearch/wlm/WorkloadManagementSettings.java @@ -0,0 +1,256 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.wlm; + +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; + +/** + * Main class to declare Workload Management related settings + */ +public class WorkloadManagementSettings { + private static final Double DEFAULT_NODE_LEVEL_MEMORY_REJECTION_THRESHOLD = 0.8; + private static final Double DEFAULT_NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD = 0.9; + private static final Double DEFAULT_NODE_LEVEL_CPU_REJECTION_THRESHOLD = 0.8; + private static final Double DEFAULT_NODE_LEVEL_CPU_CANCELLATION_THRESHOLD = 0.9; + public static final double NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD_MAX_VALUE = 0.95; + public static final double NODE_LEVEL_MEMORY_REJECTION_THRESHOLD_MAX_VALUE = 0.9; + public static final double NODE_LEVEL_CPU_CANCELLATION_THRESHOLD_MAX_VALUE = 0.95; + public static final double NODE_LEVEL_CPU_REJECTION_THRESHOLD_MAX_VALUE = 0.9; + + private Double nodeLevelMemoryCancellationThreshold; + private Double nodeLevelMemoryRejectionThreshold; + private Double nodeLevelCpuCancellationThreshold; + private Double nodeLevelCpuRejectionThreshold; + + /** + * Setting name for node level memory based rejection threshold for QueryGroup service + */ + public static final String NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME = "wlm.query_group.node.memory_rejection_threshold"; + /** + * Setting to control the memory based rejection threshold + */ + public static final Setting NODE_LEVEL_MEMORY_REJECTION_THRESHOLD = Setting.doubleSetting( + NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, + DEFAULT_NODE_LEVEL_MEMORY_REJECTION_THRESHOLD, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + /** + * Setting name for node level cpu based rejection threshold for QueryGroup service + */ + public static final String NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME = "wlm.query_group.node.cpu_rejection_threshold"; + /** + * Setting to control the cpu based rejection threshold + */ + public static final Setting NODE_LEVEL_CPU_REJECTION_THRESHOLD = Setting.doubleSetting( + NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, + DEFAULT_NODE_LEVEL_CPU_REJECTION_THRESHOLD, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + /** + * Setting name for node level memory based cancellation threshold for QueryGroup service + */ + public static final String NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME = "wlm.query_group.node.memory_cancellation_threshold"; + /** + * Setting to control the memory based cancellation threshold + */ + public static final Setting NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD = Setting.doubleSetting( + NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, + DEFAULT_NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + /** + * Setting name for node level cpu based cancellation threshold for QueryGroup service + */ + public static final String NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME = "wlm.query_group.node.cpu_cancellation_threshold"; + /** + * Setting to control the cpu based cancellation threshold + */ + public static final Setting NODE_LEVEL_CPU_CANCELLATION_THRESHOLD = Setting.doubleSetting( + NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, + DEFAULT_NODE_LEVEL_CPU_CANCELLATION_THRESHOLD, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + + /** + * QueryGroup service settings constructor + * @param settings - QueryGroup service settings + * @param clusterSettings - QueryGroup cluster settings + */ + public WorkloadManagementSettings(Settings settings, ClusterSettings clusterSettings) { + nodeLevelMemoryCancellationThreshold = NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD.get(settings); + nodeLevelMemoryRejectionThreshold = NODE_LEVEL_MEMORY_REJECTION_THRESHOLD.get(settings); + nodeLevelCpuCancellationThreshold = NODE_LEVEL_CPU_CANCELLATION_THRESHOLD.get(settings); + nodeLevelCpuRejectionThreshold = NODE_LEVEL_CPU_REJECTION_THRESHOLD.get(settings); + + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelMemoryRejectionThreshold, + nodeLevelMemoryCancellationThreshold, + NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, + NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME + ); + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelCpuRejectionThreshold, + nodeLevelCpuCancellationThreshold, + NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, + NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME + ); + + clusterSettings.addSettingsUpdateConsumer(NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD, this::setNodeLevelMemoryCancellationThreshold); + clusterSettings.addSettingsUpdateConsumer(NODE_LEVEL_MEMORY_REJECTION_THRESHOLD, this::setNodeLevelMemoryRejectionThreshold); + clusterSettings.addSettingsUpdateConsumer(NODE_LEVEL_CPU_CANCELLATION_THRESHOLD, this::setNodeLevelCpuCancellationThreshold); + clusterSettings.addSettingsUpdateConsumer(NODE_LEVEL_CPU_REJECTION_THRESHOLD, this::setNodeLevelCpuRejectionThreshold); + } + + /** + * Method to get the node level memory based cancellation threshold + * @return current node level memory based cancellation threshold + */ + public Double getNodeLevelMemoryCancellationThreshold() { + return nodeLevelMemoryCancellationThreshold; + } + + /** + * Method to set the node level memory based cancellation threshold + * @param nodeLevelMemoryCancellationThreshold sets the new node level memory based cancellation threshold + * @throws IllegalArgumentException if the value is > 0.95 and cancellation < rejection threshold + */ + public void setNodeLevelMemoryCancellationThreshold(Double nodeLevelMemoryCancellationThreshold) { + if (Double.compare(nodeLevelMemoryCancellationThreshold, NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD_MAX_VALUE) > 0) { + throw new IllegalArgumentException( + NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME + " value cannot be greater than 0.95 as it can result in a node drop" + ); + } + + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelMemoryRejectionThreshold, + nodeLevelMemoryCancellationThreshold, + NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, + NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME + ); + + this.nodeLevelMemoryCancellationThreshold = nodeLevelMemoryCancellationThreshold; + } + + /** + * Method to get the node level cpu based cancellation threshold + * @return current node level cpu based cancellation threshold + */ + public Double getNodeLevelCpuCancellationThreshold() { + return nodeLevelCpuCancellationThreshold; + } + + /** + * Method to set the node level cpu based cancellation threshold + * @param nodeLevelCpuCancellationThreshold sets the new node level cpu based cancellation threshold + * @throws IllegalArgumentException if the value is > 0.95 and cancellation < rejection threshold + */ + public void setNodeLevelCpuCancellationThreshold(Double nodeLevelCpuCancellationThreshold) { + if (Double.compare(nodeLevelCpuCancellationThreshold, NODE_LEVEL_CPU_CANCELLATION_THRESHOLD_MAX_VALUE) > 0) { + throw new IllegalArgumentException( + NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME + " value cannot be greater than 0.95 as it can result in a node drop" + ); + } + + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelCpuRejectionThreshold, + nodeLevelCpuCancellationThreshold, + NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, + NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME + ); + + this.nodeLevelCpuCancellationThreshold = nodeLevelCpuCancellationThreshold; + } + + /** + * Method to get the memory based node level rejection threshold + * @return the current memory based node level rejection threshold + */ + public Double getNodeLevelMemoryRejectionThreshold() { + return nodeLevelMemoryRejectionThreshold; + } + + /** + * Method to set the node level memory based rejection threshold + * @param nodeLevelMemoryRejectionThreshold sets the new memory based rejection threshold + * @throws IllegalArgumentException if rejection > 0.90 and rejection < cancellation threshold + */ + public void setNodeLevelMemoryRejectionThreshold(Double nodeLevelMemoryRejectionThreshold) { + if (Double.compare(nodeLevelMemoryRejectionThreshold, NODE_LEVEL_MEMORY_REJECTION_THRESHOLD_MAX_VALUE) > 0) { + throw new IllegalArgumentException( + NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME + " value cannot be greater than 0.90 as it can result in a node drop" + ); + } + + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelMemoryRejectionThreshold, + nodeLevelMemoryCancellationThreshold, + NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, + NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME + ); + + this.nodeLevelMemoryRejectionThreshold = nodeLevelMemoryRejectionThreshold; + } + + /** + * Method to get the cpu based node level rejection threshold + * @return the current cpu based node level rejection threshold + */ + public Double getNodeLevelCpuRejectionThreshold() { + return nodeLevelCpuRejectionThreshold; + } + + /** + * Method to set the node level cpu based rejection threshold + * @param nodeLevelCpuRejectionThreshold sets the new cpu based rejection threshold + * @throws IllegalArgumentException if rejection > 0.90 and rejection < cancellation threshold + */ + public void setNodeLevelCpuRejectionThreshold(Double nodeLevelCpuRejectionThreshold) { + if (Double.compare(nodeLevelCpuRejectionThreshold, NODE_LEVEL_CPU_REJECTION_THRESHOLD_MAX_VALUE) > 0) { + throw new IllegalArgumentException( + NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME + " value cannot be greater than 0.90 as it can result in a node drop" + ); + } + + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelCpuRejectionThreshold, + nodeLevelCpuCancellationThreshold, + NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, + NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME + ); + + this.nodeLevelCpuRejectionThreshold = nodeLevelCpuRejectionThreshold; + } + + /** + * Method to validate that the cancellation threshold is greater than or equal to rejection threshold + * @param nodeLevelRejectionThreshold rejection threshold to be compared + * @param nodeLevelCancellationThreshold cancellation threshold to be compared + * @param rejectionThresholdSettingName name of the rejection threshold setting + * @param cancellationThresholdSettingName name of the cancellation threshold setting + * @throws IllegalArgumentException if cancellation threshold is less than rejection threshold + */ + private void ensureRejectionThresholdIsLessThanCancellation( + Double nodeLevelRejectionThreshold, + Double nodeLevelCancellationThreshold, + String rejectionThresholdSettingName, + String cancellationThresholdSettingName + ) { + if (Double.compare(nodeLevelCancellationThreshold, nodeLevelRejectionThreshold) < 0) { + throw new IllegalArgumentException( + cancellationThresholdSettingName + " value should not be less than " + rejectionThresholdSettingName + ); + } + } +} diff --git a/server/src/test/java/org/opensearch/wlm/WorkloadManagementSettingsTests.java b/server/src/test/java/org/opensearch/wlm/WorkloadManagementSettingsTests.java new file mode 100644 index 0000000000000..0f183555781d3 --- /dev/null +++ b/server/src/test/java/org/opensearch/wlm/WorkloadManagementSettingsTests.java @@ -0,0 +1,241 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.wlm; + +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchTestCase; + +import static org.opensearch.wlm.WorkloadManagementSettings.NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME; +import static org.opensearch.wlm.WorkloadManagementSettings.NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME; +import static org.opensearch.wlm.WorkloadManagementSettings.NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME; +import static org.opensearch.wlm.WorkloadManagementSettings.NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME; + +public class WorkloadManagementSettingsTests extends OpenSearchTestCase { + + /** + * Tests the invalid value for {@code wlm.query_group.node.memory_rejection_threshold} + * When the value is set more than {@code wlm.query_group.node.memory_cancellation_threshold} accidentally during + * new feature development. This test is to ensure that {@link WorkloadManagementSettings} holds the + * invariant {@code nodeLevelRejectionThreshold < nodeLevelCancellationThreshold} + */ + public void testInvalidMemoryInstantiationOfWorkloadManagementSettings() { + Settings settings = Settings.builder() + .put(NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, 0.8) + .put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.7) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + + assertThrows(IllegalArgumentException.class, () -> new WorkloadManagementSettings(settings, cs)); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.cpu_rejection_threshold} + * When the value is set more than {@code wlm.query_group.node.cpu_cancellation_threshold} accidentally during + * new feature development. This test is to ensure that {@link WorkloadManagementSettings} holds the + * invariant {@code nodeLevelRejectionThreshold < nodeLevelCancellationThreshold} + */ + public void testInvalidCpuInstantiationOfWorkloadManagementSettings() { + Settings settings = Settings.builder() + .put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.8) + .put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.7) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + + assertThrows(IllegalArgumentException.class, () -> new WorkloadManagementSettings(settings, cs)); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.cpu_rejection_threshold} + * Using setNodeLevelCpuRejectionThreshold function + */ + public void testValidNodeLevelCpuRejectionThresholdCase1() { + Settings settings = Settings.builder().put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + workloadManagementSettings.setNodeLevelCpuRejectionThreshold(0.7); + assertEquals(0.7, workloadManagementSettings.getNodeLevelCpuRejectionThreshold(), 1e-9); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.cpu_rejection_threshold} + */ + public void testValidNodeLevelCpuRejectionThresholdCase2() { + Settings settings = Settings.builder().put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.79).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertEquals(0.79, workloadManagementSettings.getNodeLevelCpuRejectionThreshold(), 1e-9); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.cpu_rejection_threshold} + * When the value is set more than {@literal 0.9} + */ + public void testInvalidNodeLevelCpuRejectionThresholdCase1() { + Settings settings = Settings.builder().put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelCpuRejectionThreshold(0.95)); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.cpu_rejection_threshold} + * When the value is set more than {@code wlm.query_group.node.cpu_cancellation_threshold} + */ + public void testInvalidNodeLevelCpuRejectionThresholdCase2() { + Settings settings = Settings.builder() + .put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.7) + .put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelCpuRejectionThreshold(0.85)); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.cpu_cancellation_threshold} + */ + public void testValidNodeLevelCpuCancellationThresholdCase1() { + Settings settings = Settings.builder().put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertEquals(0.8, workloadManagementSettings.getNodeLevelCpuRejectionThreshold(), 1e-9); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.cpu_cancellation_threshold} + * Using setNodeLevelCpuCancellationThreshold function + */ + public void testValidNodeLevelCpuCancellationThresholdCase2() { + Settings settings = Settings.builder().put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + workloadManagementSettings.setNodeLevelCpuCancellationThreshold(0.83); + assertEquals(0.83, workloadManagementSettings.getNodeLevelCpuCancellationThreshold(), 1e-9); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.cpu_cancellation_threshold} + * When the value is set more than {@literal 0.95} + */ + public void testInvalidNodeLevelCpuCancellationThresholdCase1() { + Settings settings = Settings.builder().put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.9).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelCpuCancellationThreshold(0.96)); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.cpu_cancellation_threshold} + * When the value is set less than {@code wlm.query_group.node.cpu_rejection_threshold} + */ + public void testInvalidNodeLevelCpuCancellationThresholdCase2() { + Settings settings = Settings.builder() + .put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.7) + .put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelCpuCancellationThreshold(0.65)); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.memory_cancellation_threshold} + */ + public void testValidNodeLevelMemoryCancellationThresholdCase1() { + Settings settings = Settings.builder().put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertEquals(0.8, workloadManagementSettings.getNodeLevelMemoryCancellationThreshold(), 1e-9); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.memory_cancellation_threshold} + * Using setNodeLevelMemoryCancellationThreshold function + */ + public void testValidNodeLevelMemoryCancellationThresholdCase2() { + Settings settings = Settings.builder().put(NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + workloadManagementSettings.setNodeLevelMemoryCancellationThreshold(0.83); + assertEquals(0.83, workloadManagementSettings.getNodeLevelMemoryCancellationThreshold(), 1e-9); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.memory_cancellation_threshold} + * When the value is set more than {@literal 0.95} + */ + public void testInvalidNodeLevelMemoryCancellationThresholdCase1() { + Settings settings = Settings.builder().put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.9).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelMemoryCancellationThreshold(0.96)); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.memory_cancellation_threshold} + * When the value is set less than {@code wlm.query_group.node.memory_rejection_threshold} + */ + public void testInvalidNodeLevelMemoryCancellationThresholdCase2() { + Settings settings = Settings.builder() + .put(NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, 0.7) + .put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelMemoryCancellationThreshold(0.65)); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.memory_rejection_threshold} + */ + public void testValidNodeLevelMemoryRejectionThresholdCase1() { + Settings settings = Settings.builder().put(NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, 0.79).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertEquals(0.79, workloadManagementSettings.getNodeLevelMemoryRejectionThreshold(), 1e-9); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.memory_rejection_threshold} + * Using setNodeLevelMemoryRejectionThreshold function + */ + public void testValidNodeLevelMemoryRejectionThresholdCase2() { + Settings settings = Settings.builder().put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.9).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + workloadManagementSettings.setNodeLevelMemoryRejectionThreshold(0.86); + assertEquals(0.86, workloadManagementSettings.getNodeLevelMemoryRejectionThreshold(), 1e-9); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.memory_rejection_threshold} + * When the value is set more than {@literal 0.9} + */ + public void testInvalidNodeLevelMemoryRejectionThresholdCase1() { + Settings settings = Settings.builder().put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.9).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelMemoryRejectionThreshold(0.92)); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.memory_rejection_threshold} + * When the value is set more than {@code wlm.query_group.node.memory_cancellation_threshold} + */ + public void testInvalidNodeLevelMemoryRejectionThresholdCase2() { + Settings settings = Settings.builder() + .put(NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, 0.7) + .put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelMemoryRejectionThreshold(0.85)); + } +}