From 48ac4f78fdea29cd3a8ba6797eb37e6882f679be Mon Sep 17 00:00:00 2001 From: Sarat Vemulapalli Date: Fri, 4 Dec 2020 10:00:31 -0800 Subject: [PATCH 01/13] Moving common-utils to 1.12.0.2 (#323) --- build.gradle | 2 +- .../amazon/opendistroforelasticsearch/ad/util/ParseUtils.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index c82f7b5b..dcd95596 100644 --- a/build.gradle +++ b/build.gradle @@ -346,7 +346,7 @@ dependencies { compile "org.elasticsearch:elasticsearch:${es_version}" compileOnly "org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${versions.elasticsearch}" compileOnly "com.amazon.opendistroforelasticsearch:opendistro-job-scheduler-spi:1.12.0.0" - compile "com.amazon.opendistroforelasticsearch:common-utils:1.12.0.0" + compile "com.amazon.opendistroforelasticsearch:common-utils:1.12.0.2" compile group: 'com.google.guava', name: 'guava', version:'29.0-jre' compile group: 'org.apache.commons', name: 'commons-math3', version: '3.6.1' compile group: 'com.google.code.gson', name: 'gson', version: '2.8.6' diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/ParseUtils.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/ParseUtils.java index 9e3a99f3..e3c4d584 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/ParseUtils.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/ParseUtils.java @@ -445,7 +445,7 @@ public static SearchSourceBuilder addUserBackendRolesFilter(User user, SearchSou } public static User getUserContext(Client client) { - String userStr = client.threadPool().getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_USER_AND_ROLES); + String userStr = client.threadPool().getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_USER_INFO_THREAD_CONTEXT); logger.debug("Filtering result by " + userStr); return User.parse(userStr); } From c25e630562daf44b0c9c505fb7bef463884adc6c Mon Sep 17 00:00:00 2001 From: Weicong Sun <61702346+weicongs-amazon@users.noreply.github.com> Date: Fri, 4 Dec 2020 10:35:00 -0800 Subject: [PATCH 02/13] update release notes for 1.12.0.0 (#324) --- ...ticsearch-anomaly-detection.release-notes-1.12.0.0.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/release-notes/opendistro-for-elasticsearch-anomaly-detection.release-notes-1.12.0.0.md b/release-notes/opendistro-for-elasticsearch-anomaly-detection.release-notes-1.12.0.0.md index 036e1a96..85d220ef 100644 --- a/release-notes/opendistro-for-elasticsearch-anomaly-detection.release-notes-1.12.0.0.md +++ b/release-notes/opendistro-for-elasticsearch-anomaly-detection.release-notes-1.12.0.0.md @@ -19,3 +19,12 @@ Compatible with Elasticsearch 7.10.0 * Fix for upgrading mapping ([#309](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/309)) * fix double nan error when parse to json ([#310](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/310)) * Fix issue where data hole exists for Preview API ([#312](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/312)) +* fix delete running detector bug ([#320](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/320)) +* fix detector and feature serialization ([#322](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/322)) +* Moving common-utils to 1.12.0.2 ([#323](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/323)) + +### Infrastructure +* Add multi node integration testing into CI workflow ([#318](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/318)) + +### Maintenance +* Support ES 7.10.0 ([#313](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/313)) From f442efdcd292184e9d62696d21b94c648d22435d Mon Sep 17 00:00:00 2001 From: Sarat Vemulapalli Date: Tue, 8 Dec 2020 15:40:12 -0800 Subject: [PATCH 03/13] Adding unit tests for Transport Actions (#327) --- build.gradle | 6 - .../ad/TestHelpers.java | 5 + ...etAnomalyDetectorTransportActionTests.java | 107 +++++++++++++++++- .../IndexAnomalyDetectorActionTests.java | 45 +++++--- .../ad/transport/StopDetectorActionTests.java | 36 ++++++ 5 files changed, 173 insertions(+), 26 deletions(-) create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StopDetectorActionTests.java diff --git a/build.gradle b/build.gradle index dcd95596..1ca22ef7 100644 --- a/build.gradle +++ b/build.gradle @@ -275,7 +275,6 @@ List jacocoExclusions = [ 'com.amazon.opendistroforelasticsearch.ad.common.exception.AnomalyDetectionException', 'com.amazon.opendistroforelasticsearch.ad.util.ClientUtil', - 'com.amazon.opendistroforelasticsearch.ad.transport.StopDetectorAction', 'com.amazon.opendistroforelasticsearch.ad.transport.StopDetectorRequest', 'com.amazon.opendistroforelasticsearch.ad.transport.StopDetectorResponse', 'com.amazon.opendistroforelasticsearch.ad.transport.StopDetectorTransportAction', @@ -291,13 +290,8 @@ List jacocoExclusions = [ 'com.amazon.opendistroforelasticsearch.ad.transport.DeleteAnomalyDetectorTransportAction*', 'com.amazon.opendistroforelasticsearch.ad.transport.SearchAnomalyDetectorTransportAction*', 'com.amazon.opendistroforelasticsearch.ad.transport.GetAnomalyDetectorTransportAction*', - 'com.amazon.opendistroforelasticsearch.ad.transport.GetAnomalyDetectorResponse', - 'com.amazon.opendistroforelasticsearch.ad.transport.IndexAnomalyDetectorRequest', 'com.amazon.opendistroforelasticsearch.ad.transport.SearchAnomalyResultTransportAction*', 'com.amazon.opendistroforelasticsearch.ad.transport.SearchAnomalyDetectorInfoTransportAction*', - 'com.amazon.opendistroforelasticsearch.ad.transport.GetAnomalyDetectorRequest', - 'com.amazon.opendistroforelasticsearch.ad.transport.IndexAnomalyDetectorResponse', - 'com.amazon.opendistroforelasticsearch.ad.transport.IndexAnomalyDetectorTransportAction', // TODO: hc caused coverage to drop 'com.amazon.opendistroforelasticsearch.ad.NodeStateManager', diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java index d079cc92..cf21f4d8 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java @@ -82,6 +82,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.get.GetResult; @@ -192,6 +193,10 @@ public static XContentParser parser(String xc) throws IOException { return parser; } + public static Map XContentBuilderToMap(XContentBuilder builder) { + return XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); + } + public static NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); return new NamedXContentRegistry(searchModule.getNamedXContents()); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportActionTests.java index 573cd223..d4254729 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportActionTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportActionTests.java @@ -16,20 +16,35 @@ package com.amazon.opendistroforelasticsearch.ad.transport; import java.io.IOException; +import java.time.Instant; +import java.util.Map; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; -import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.transport.TransportService; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorJob; +import com.amazon.opendistroforelasticsearch.ad.model.EntityProfile; import com.amazon.opendistroforelasticsearch.ad.util.DiscoveryNodeFilterer; +import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; +import com.google.common.collect.ImmutableMap; -public class GetAnomalyDetectorTransportActionTests extends ESIntegTestCase { +public class GetAnomalyDetectorTransportActionTests extends ESSingleNodeTestCase { private GetAnomalyDetectorTransportAction action; private Task task; private ActionListener response; @@ -58,6 +73,11 @@ public void onFailure(Exception e) {} }; } + @Override + protected NamedWriteableRegistry writableRegistry() { + return getInstanceFromNode(NamedWriteableRegistry.class); + } + @Test public void testGetTransportAction() throws IOException { GetAnomalyDetectorRequest getAnomalyDetectorRequest = new GetAnomalyDetectorRequest( @@ -83,4 +103,87 @@ public void testGetAction() { Assert.assertNotNull(GetAnomalyDetectorAction.INSTANCE.name()); Assert.assertEquals(GetAnomalyDetectorAction.INSTANCE.name(), GetAnomalyDetectorAction.NAME); } + + @Test + public void testGetAnomalyDetectorRequest() throws IOException { + GetAnomalyDetectorRequest request = new GetAnomalyDetectorRequest("1234", 4321, true, "", "abcd", false, "value"); + BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + StreamInput input = out.bytes().streamInput(); + GetAnomalyDetectorRequest newRequest = new GetAnomalyDetectorRequest(input); + Assert.assertEquals(request.getDetectorID(), newRequest.getDetectorID()); + Assert.assertEquals(request.getRawPath(), newRequest.getRawPath()); + Assert.assertNull(newRequest.validate()); + } + + @Test + public void testGetAnomalyDetectorRequestNoEntityValue() throws IOException { + GetAnomalyDetectorRequest request = new GetAnomalyDetectorRequest("1234", 4321, true, "", "abcd", false, null); + BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + StreamInput input = out.bytes().streamInput(); + GetAnomalyDetectorRequest newRequest = new GetAnomalyDetectorRequest(input); + Assert.assertNull(newRequest.getEntityValue()); + } + + @SuppressWarnings("unchecked") + @Test + public void testGetAnomalyDetectorResponse() throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); + AnomalyDetectorJob adJob = TestHelpers.randomAnomalyDetectorJob(); + GetAnomalyDetectorResponse response = new GetAnomalyDetectorResponse( + 4321, + "1234", + 5678, + 9867, + detector, + adJob, + false, + RestStatus.OK, + null, + null, + false + ); + response.writeTo(out); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry()); + GetAnomalyDetectorResponse newResponse = new GetAnomalyDetectorResponse(input); + XContentBuilder builder = TestHelpers.builder(); + Assert.assertNotNull(newResponse.toXContent(builder, ToXContent.EMPTY_PARAMS)); + + Map map = TestHelpers.XContentBuilderToMap(builder); + Assert.assertTrue(map.get(RestHandlerUtils.ANOMALY_DETECTOR) instanceof Map); + Map map1 = (Map) map.get(RestHandlerUtils.ANOMALY_DETECTOR); + Assert.assertEquals(map1.get("name"), detector.getName()); + } + + @Test + public void testGetAnomalyDetectorProfileResponse() throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); + AnomalyDetectorJob adJob = TestHelpers.randomAnomalyDetectorJob(); + EntityProfile entityProfile = new EntityProfile.Builder("catField", "app-0").build(); + GetAnomalyDetectorResponse response = new GetAnomalyDetectorResponse( + 4321, + "1234", + 5678, + 9867, + detector, + adJob, + false, + RestStatus.OK, + null, + entityProfile, + true + ); + response.writeTo(out); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry()); + GetAnomalyDetectorResponse newResponse = new GetAnomalyDetectorResponse(input); + XContentBuilder builder = TestHelpers.builder(); + Assert.assertNotNull(newResponse.toXContent(builder, ToXContent.EMPTY_PARAMS)); + + Map map = TestHelpers.XContentBuilderToMap(builder); + Assert.assertEquals(map.get(EntityProfile.CATEGORY_FIELD), "catField"); + Assert.assertEquals(map.get(EntityProfile.ENTITY_VALUE), "app-0"); + } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorActionTests.java index a376f1c9..dacf1c66 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorActionTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorActionTests.java @@ -15,36 +15,43 @@ package com.amazon.opendistroforelasticsearch.ad.transport; +import java.time.Instant; +import java.util.Map; + import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; +import com.google.common.collect.ImmutableMap; -@RunWith(PowerMockRunner.class) -@PrepareForTest({ IndexAnomalyDetectorRequest.class, IndexAnomalyDetectorResponse.class }) -public class IndexAnomalyDetectorActionTests { +public class IndexAnomalyDetectorActionTests extends ESSingleNodeTestCase { @Before public void setUp() throws Exception { + super.setUp(); + } + @Override + protected NamedWriteableRegistry writableRegistry() { + return getInstanceFromNode(NamedWriteableRegistry.class); } @Test public void testIndexRequest() throws Exception { BytesStreamOutput out = new BytesStreamOutput(); - AnomalyDetector detector = Mockito.mock(AnomalyDetector.class); - Mockito.doNothing().when(detector).writeTo(out); + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); IndexAnomalyDetectorRequest request = new IndexAnomalyDetectorRequest( "1234", 4321, @@ -58,23 +65,25 @@ public void testIndexRequest() throws Exception { 5 ); request.writeTo(out); - StreamInput input = out.bytes().streamInput(); - PowerMockito.whenNew(AnomalyDetector.class).withAnyArguments().thenReturn(detector); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry()); IndexAnomalyDetectorRequest newRequest = new IndexAnomalyDetectorRequest(input); Assert.assertEquals(request.getDetectorID(), newRequest.getDetectorID()); - + Assert.assertNull(newRequest.validate()); } @Test public void testIndexResponse() throws Exception { BytesStreamOutput out = new BytesStreamOutput(); - AnomalyDetector detector = Mockito.mock(AnomalyDetector.class); - Mockito.doNothing().when(detector).writeTo(out); + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); IndexAnomalyDetectorResponse response = new IndexAnomalyDetectorResponse("1234", 56, 78, 90, detector, RestStatus.OK); response.writeTo(out); - StreamInput input = out.bytes().streamInput(); - PowerMockito.whenNew(AnomalyDetector.class).withAnyArguments().thenReturn(detector); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry()); IndexAnomalyDetectorResponse newResponse = new IndexAnomalyDetectorResponse(input); Assert.assertEquals(response.getId(), newResponse.getId()); + XContentBuilder builder = TestHelpers.builder(); + Assert.assertNotNull(newResponse.toXContent(builder, ToXContent.EMPTY_PARAMS)); + + Map map = TestHelpers.XContentBuilderToMap(builder); + Assert.assertEquals(map.get(RestHandlerUtils._ID), "1234"); } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StopDetectorActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StopDetectorActionTests.java new file mode 100644 index 00000000..cd8ab923 --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StopDetectorActionTests.java @@ -0,0 +1,36 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.transport; + +import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +public class StopDetectorActionTests extends ESIntegTestCase { + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + } + + @Test + public void testStopDetectorAction() { + Assert.assertNotNull(StopDetectorAction.INSTANCE.name()); + Assert.assertEquals(StopDetectorAction.INSTANCE.name(), StopDetectorAction.NAME); + } +} From c949011e7d19ce464252c1c0e4ce335dfb63a2f5 Mon Sep 17 00:00:00 2001 From: Sarat Vemulapalli Date: Fri, 11 Dec 2020 13:43:28 -0800 Subject: [PATCH 04/13] Adding role based filtering for rest of APIs (#325) * Adding role based filtering on rest of the APIs * Adding exception handling * Moving common code to ParseUtils * Updating exception handling * Adding tests for creating anomaly detector transport action * Adding additional checks to security util functions --- .../AnomalyDetectorJobTransportAction.java | 63 +++++--- .../DeleteAnomalyDetectorTransportAction.java | 25 +++- .../GetAnomalyDetectorTransportAction.java | 35 ++++- .../IndexAnomalyDetectorTransportAction.java | 64 +++++++-- .../ad/util/ParseUtils.java | 134 ++++++++++++++++++ .../AnomalyDetectorJobActionTests.java | 31 +++- .../DeleteAnomalyDetectorActionTests.java | 18 ++- .../ad/transport/GetAnomalyDetectorTests.java | 21 ++- ...etAnomalyDetectorTransportActionTests.java | 18 +++ ...exAnomalyDetectorTransportActionTests.java | 72 +++++++++- 10 files changed, 443 insertions(+), 38 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyDetectorJobTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyDetectorJobTransportAction.java index 1fb661d9..7787def6 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyDetectorJobTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyDetectorJobTransportAction.java @@ -15,9 +15,10 @@ package com.amazon.opendistroforelasticsearch.ad.transport; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES; import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; - -import java.io.IOException; +import static com.amazon.opendistroforelasticsearch.ad.util.ParseUtils.getUserContext; +import static com.amazon.opendistroforelasticsearch.ad.util.ParseUtils.resolveUserAndExecute; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -25,6 +26,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -35,61 +37,86 @@ import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; import com.amazon.opendistroforelasticsearch.ad.rest.handler.IndexAnomalyDetectorJobActionHandler; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; +import com.amazon.opendistroforelasticsearch.commons.authuser.User; public class AnomalyDetectorJobTransportAction extends HandledTransportAction { private final Logger logger = LogManager.getLogger(AnomalyDetectorJobTransportAction.class); private final Client client; + private final ClusterService clusterService; private final Settings settings; private final AnomalyDetectionIndices anomalyDetectionIndices; private final NamedXContentRegistry xContentRegistry; + private volatile Boolean filterByEnabled; @Inject public AnomalyDetectorJobTransportAction( TransportService transportService, ActionFilters actionFilters, Client client, + ClusterService clusterService, Settings settings, AnomalyDetectionIndices anomalyDetectionIndices, NamedXContentRegistry xContentRegistry ) { super(AnomalyDetectorJobAction.NAME, transportService, actionFilters, AnomalyDetectorJobRequest::new); this.client = client; + this.clusterService = clusterService; this.settings = settings; this.anomalyDetectionIndices = anomalyDetectionIndices; this.xContentRegistry = xContentRegistry; + filterByEnabled = AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(FILTER_BY_BACKEND_ROLES, it -> filterByEnabled = it); } @Override protected void doExecute(Task task, AnomalyDetectorJobRequest request, ActionListener listener) { String detectorId = request.getDetectorID(); - long seqNo = request.getSeqNo(); - long primaryTerm = request.getPrimaryTerm(); - String rawPath = request.getRawPath(); - TimeValue requestTimeout = REQUEST_TIMEOUT.get(settings); - // By the time request reaches here, the user permissions are validated by Security plugin. - // Since the detectorID is provided, this can only happen if User is part of a role which has access - // to the detector. This is filtered by our Search Detector API. - + User user = getUserContext(client); try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { - IndexAnomalyDetectorJobActionHandler handler = new IndexAnomalyDetectorJobActionHandler( - client, - listener, - anomalyDetectionIndices, + resolveUserAndExecute( + user, detectorId, - seqNo, - primaryTerm, - requestTimeout, + filterByEnabled, + listener, + () -> adJobExecute(request, listener), + client, + clusterService, xContentRegistry ); + } catch (Exception e) { + logger.error(e); + listener.onFailure(e); + } + } + + private void adJobExecute(AnomalyDetectorJobRequest request, ActionListener listener) { + String detectorId = request.getDetectorID(); + long seqNo = request.getSeqNo(); + long primaryTerm = request.getPrimaryTerm(); + String rawPath = request.getRawPath(); + TimeValue requestTimeout = REQUEST_TIMEOUT.get(settings); + + IndexAnomalyDetectorJobActionHandler handler = new IndexAnomalyDetectorJobActionHandler( + client, + listener, + anomalyDetectionIndices, + detectorId, + seqNo, + primaryTerm, + requestTimeout, + xContentRegistry + ); + try { if (rawPath.endsWith(RestHandlerUtils.START_JOB)) { handler.startAnomalyDetectorJob(); } else if (rawPath.endsWith(RestHandlerUtils.STOP_JOB)) { handler.stopAnomalyDetectorJob(detectorId); } - } catch (IOException e) { + } catch (Exception e) { logger.error(e); listener.onFailure(e); } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/DeleteAnomalyDetectorTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/DeleteAnomalyDetectorTransportAction.java index b7a84109..eec34603 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/DeleteAnomalyDetectorTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/DeleteAnomalyDetectorTransportAction.java @@ -16,6 +16,9 @@ package com.amazon.opendistroforelasticsearch.ad.transport; import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES; +import static com.amazon.opendistroforelasticsearch.ad.util.ParseUtils.getUserContext; +import static com.amazon.opendistroforelasticsearch.ad.util.ParseUtils.resolveUserAndExecute; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import java.io.IOException; @@ -35,6 +38,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; @@ -47,7 +51,9 @@ import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorJob; import com.amazon.opendistroforelasticsearch.ad.model.DetectorInternalState; import com.amazon.opendistroforelasticsearch.ad.rest.handler.AnomalyDetectorFunction; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; +import com.amazon.opendistroforelasticsearch.commons.authuser.User; public class DeleteAnomalyDetectorTransportAction extends HandledTransportAction { @@ -55,6 +61,7 @@ public class DeleteAnomalyDetectorTransportAction extends HandledTransportAction private final Client client; private final ClusterService clusterService; private NamedXContentRegistry xContentRegistry; + private volatile Boolean filterByEnabled; @Inject public DeleteAnomalyDetectorTransportAction( @@ -62,24 +69,34 @@ public DeleteAnomalyDetectorTransportAction( ActionFilters actionFilters, Client client, ClusterService clusterService, + Settings settings, NamedXContentRegistry xContentRegistry ) { super(DeleteAnomalyDetectorAction.NAME, transportService, actionFilters, DeleteAnomalyDetectorRequest::new); this.client = client; this.clusterService = clusterService; this.xContentRegistry = xContentRegistry; + filterByEnabled = AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(FILTER_BY_BACKEND_ROLES, it -> filterByEnabled = it); } @Override protected void doExecute(Task task, DeleteAnomalyDetectorRequest request, ActionListener listener) { String detectorId = request.getDetectorID(); LOG.info("Delete anomaly detector job {}", detectorId); - + User user = getUserContext(client); // By the time request reaches here, the user permissions are validated by Security plugin. - // Since the detectorID is provided, this can only happen if User is part of a role which has access - // to the detector. This is filtered by our Search Detector API. try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { - getDetectorJob(detectorId, listener, () -> deleteAnomalyDetectorJobDoc(detectorId, listener)); + resolveUserAndExecute( + user, + detectorId, + filterByEnabled, + listener, + () -> getDetectorJob(detectorId, listener, () -> deleteAnomalyDetectorJobDoc(detectorId, listener)), + client, + clusterService, + xContentRegistry + ); } catch (Exception e) { LOG.error(e); listener.onFailure(e); diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportAction.java index 056d1a4d..d3aa09fb 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportAction.java @@ -17,6 +17,9 @@ import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX; import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES; +import static com.amazon.opendistroforelasticsearch.ad.util.ParseUtils.getUserContext; +import static com.amazon.opendistroforelasticsearch.ad.util.ParseUtils.resolveUserAndExecute; import static com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils.PROFILE; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; @@ -37,9 +40,11 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; @@ -58,12 +63,14 @@ import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; import com.amazon.opendistroforelasticsearch.ad.util.DiscoveryNodeFilterer; import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; +import com.amazon.opendistroforelasticsearch.commons.authuser.User; import com.google.common.collect.Sets; public class GetAnomalyDetectorTransportAction extends HandledTransportAction { private static final Logger LOG = LogManager.getLogger(GetAnomalyDetectorTransportAction.class); + private final ClusterService clusterService; private final Client client; private final Set allProfileTypeStrs; @@ -74,16 +81,20 @@ public class GetAnomalyDetectorTransportAction extends HandledTransportAction defaultEntityProfileTypes; private final NamedXContentRegistry xContentRegistry; private final DiscoveryNodeFilterer nodeFilter; + private volatile Boolean filterByEnabled; @Inject public GetAnomalyDetectorTransportAction( TransportService transportService, DiscoveryNodeFilterer nodeFilter, ActionFilters actionFilters, + ClusterService clusterService, Client client, + Settings settings, NamedXContentRegistry xContentRegistry ) { super(GetAnomalyDetectorAction.NAME, transportService, actionFilters, GetAnomalyDetectorRequest::new); + this.clusterService = clusterService; this.client = client; List allProfiles = Arrays.asList(DetectorProfileName.values()); @@ -100,10 +111,32 @@ public GetAnomalyDetectorTransportAction( this.xContentRegistry = xContentRegistry; this.nodeFilter = nodeFilter; + filterByEnabled = AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(FILTER_BY_BACKEND_ROLES, it -> filterByEnabled = it); } @Override protected void doExecute(Task task, GetAnomalyDetectorRequest request, ActionListener listener) { + String detectorID = request.getDetectorID(); + User user = getUserContext(client); + try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { + resolveUserAndExecute( + user, + detectorID, + filterByEnabled, + listener, + () -> getExecute(request, listener), + client, + clusterService, + xContentRegistry + ); + } catch (Exception e) { + LOG.error(e); + listener.onFailure(e); + } + } + + protected void getExecute(GetAnomalyDetectorRequest request, ActionListener listener) { String detectorID = request.getDetectorID(); Long version = request.getVersion(); String typesStr = request.getTypeStr(); @@ -112,7 +145,7 @@ protected void doExecute(Task task, GetAnomalyDetectorRequest request, ActionLis boolean all = request.isAll(); boolean returnJob = request.isReturnJob(); - try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { + try { if (!Strings.isEmpty(typesStr) || rawPath.endsWith(PROFILE) || rawPath.endsWith(PROFILE + "/")) { if (entityValue != null) { Set entityProfilesToCollect = getEntityProfilesToCollect(typesStr, all); diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorTransportAction.java index c44ea317..64e9aac0 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorTransportAction.java @@ -15,6 +15,9 @@ package com.amazon.opendistroforelasticsearch.ad.transport; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES; +import static com.amazon.opendistroforelasticsearch.ad.util.ParseUtils.checkFilterByBackendRoles; +import static com.amazon.opendistroforelasticsearch.ad.util.ParseUtils.getDetector; import static com.amazon.opendistroforelasticsearch.ad.util.ParseUtils.getUserContext; import java.io.IOException; @@ -44,6 +47,7 @@ import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; import com.amazon.opendistroforelasticsearch.ad.rest.handler.AnomalyDetectorFunction; import com.amazon.opendistroforelasticsearch.ad.rest.handler.IndexAnomalyDetectorActionHandler; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; import com.amazon.opendistroforelasticsearch.commons.authuser.User; public class IndexAnomalyDetectorTransportAction extends HandledTransportAction { @@ -52,6 +56,7 @@ public class IndexAnomalyDetectorTransportAction extends HandledTransportAction< private final AnomalyDetectionIndices anomalyDetectionIndices; private final ClusterService clusterService; private final NamedXContentRegistry xContentRegistry; + private volatile Boolean filterByEnabled; @Inject public IndexAnomalyDetectorTransportAction( @@ -68,11 +73,59 @@ public IndexAnomalyDetectorTransportAction( this.clusterService = clusterService; this.anomalyDetectionIndices = anomalyDetectionIndices; this.xContentRegistry = xContentRegistry; + filterByEnabled = AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(FILTER_BY_BACKEND_ROLES, it -> filterByEnabled = it); } @Override protected void doExecute(Task task, IndexAnomalyDetectorRequest request, ActionListener listener) { User user = getUserContext(client); + String detectorId = request.getDetectorID(); + RestRequest.Method method = request.getMethod(); + try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { + resolveUserAndExecute(user, detectorId, method, listener, () -> adExecute(request, user, listener)); + } catch (Exception e) { + LOG.error(e); + listener.onFailure(e); + } + } + + private void resolveUserAndExecute( + User requestedUser, + String detectorId, + RestRequest.Method method, + ActionListener listener, + AnomalyDetectorFunction function + ) { + if (requestedUser == null) { + // Security is disabled or user is superadmin + function.execute(); + } else if (!filterByEnabled) { + // security is enabled and filterby is disabled. + function.execute(); + } else { + // security is enabled and filterby is enabled. + try { + // Check if user has backend roles + // When filter by is enabled, block users creating/updating detectors who do not have backend roles. + if (!checkFilterByBackendRoles(requestedUser, listener)) { + return; + } + if (method == RestRequest.Method.PUT) { + // Update detector request, check if user has permissions to update the detector + // Get detector and verify backend roles + getDetector(requestedUser, detectorId, listener, function, client, clusterService, xContentRegistry); + } else { + // Create Detector + function.execute(); + } + } catch (Exception e) { + listener.onFailure(e); + } + } + } + + protected void adExecute(IndexAnomalyDetectorRequest request, User user, ActionListener listener) { anomalyDetectionIndices.updateMappingIfNecessary(); String detectorId = request.getDetectorID(); long seqNo = request.getSeqNo(); @@ -86,7 +139,7 @@ protected void doExecute(Task task, IndexAnomalyDetectorRequest request, ActionL Integer maxAnomalyFeatures = request.getMaxAnomalyFeatures(); checkIndicesAndExecute(detector.getIndices(), () -> { - try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { + try { IndexAnomalyDetectorActionHandler indexAnomalyDetectorActionHandler = new IndexAnomalyDetectorActionHandler( clusterService, client, @@ -127,14 +180,7 @@ private void checkIndicesAndExecute( SearchRequest searchRequest = new SearchRequest() .indices(indices.toArray(new String[0])) .source(new SearchSourceBuilder().size(1).query(QueryBuilders.matchAllQuery())); - client.search(searchRequest, ActionListener.wrap(r -> { - try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { - function.execute(); - } catch (Exception e) { - LOG.error(e); - listener.onFailure(e); - } - }, e -> { + client.search(searchRequest, ActionListener.wrap(r -> { function.execute(); }, e -> { // Due to below issue with security plugin, we get security_exception when invalid index name is mentioned. // https://github.com/opendistro-for-elasticsearch/security/issues/718 LOG.error(e); diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/ParseUtils.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/ParseUtils.java index e3c4d584..68e515de 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/ParseUtils.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/ParseUtils.java @@ -17,6 +17,7 @@ import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.QUERY_PARAM_PERIOD_END; import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.QUERY_PARAM_PERIOD_START; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.search.aggregations.AggregationBuilders.dateRange; import static org.elasticsearch.search.aggregations.AggregatorFactories.VALID_AGG_NAME; @@ -33,8 +34,13 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -60,6 +66,8 @@ import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; import com.amazon.opendistroforelasticsearch.ad.model.Feature; import com.amazon.opendistroforelasticsearch.ad.model.FeatureData; +import com.amazon.opendistroforelasticsearch.ad.rest.handler.AnomalyDetectorFunction; +import com.amazon.opendistroforelasticsearch.ad.transport.GetAnomalyDetectorResponse; import com.amazon.opendistroforelasticsearch.commons.ConfigConstants; import com.amazon.opendistroforelasticsearch.commons.authuser.User; @@ -450,6 +458,132 @@ public static User getUserContext(Client client) { return User.parse(userStr); } + public static void resolveUserAndExecute( + User requestedUser, + String detectorId, + boolean filterByEnabled, + ActionListener listener, + AnomalyDetectorFunction function, + Client client, + ClusterService clusterService, + NamedXContentRegistry xContentRegistry + ) { + if (requestedUser == null) { + // Security is disabled or user is superadmin + function.execute(); + } else if (!filterByEnabled) { + // security is enabled and filterby is disabled. + function.execute(); + } else { + // security is enabled and filterby is enabled. + // Get detector and check if the user has permissions to access the detector + try { + getDetector(requestedUser, detectorId, listener, function, client, clusterService, xContentRegistry); + } catch (Exception e) { + listener.onFailure(e); + } + } + } + + public static void getDetector( + User requestUser, + String detectorId, + ActionListener listener, + AnomalyDetectorFunction function, + Client client, + ClusterService clusterService, + NamedXContentRegistry xContentRegistry + ) { + if (clusterService.state().metadata().indices().containsKey(AnomalyDetector.ANOMALY_DETECTORS_INDEX)) { + GetRequest request = new GetRequest(AnomalyDetector.ANOMALY_DETECTORS_INDEX).id(detectorId); + client + .get( + request, + ActionListener + .wrap( + response -> onGetAdResponse(response, requestUser, detectorId, listener, function, xContentRegistry), + exception -> { + logger.error("Failed to get anomaly detector: " + detectorId, exception); + listener.onFailure(exception); + } + ) + ); + } else { + listener + .onFailure( + new ResourceNotFoundException("Failed to find anomaly detector index: " + AnomalyDetector.ANOMALY_DETECTORS_INDEX) + ); + } + } + + public static void onGetAdResponse( + GetResponse response, + User requestUser, + String detectorId, + ActionListener listener, + AnomalyDetectorFunction function, + NamedXContentRegistry xContentRegistry + ) { + if (response.isExists()) { + try ( + XContentParser parser = RestHandlerUtils.createXContentParserFromRegistry(xContentRegistry, response.getSourceAsBytesRef()) + ) { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + AnomalyDetector detector = AnomalyDetector.parse(parser); + User resourceUser = detector.getUser(); + + if (checkUserPermissions(requestUser, resourceUser, detectorId)) { + function.execute(); + } else { + logger.debug("User: " + requestUser.getName() + " does not have permissions to access detector: " + detectorId); + listener.onFailure(new ElasticsearchException("User does not have permissions to access detector: " + detectorId)); + } + } catch (Exception e) { + listener.onFailure(new ElasticsearchException("Unable to get user information from detector " + detectorId)); + } + } else { + listener.onFailure(new ResourceNotFoundException("Could not find detector " + detectorId)); + } + } + + private static boolean checkUserPermissions(User requestedUser, User resourceUser, String detectorId) throws Exception { + if (resourceUser.getBackendRoles() == null || requestedUser.getBackendRoles() == null) { + return false; + } + // Check if requested user has backend role required to access the resource + for (String backendRole : requestedUser.getBackendRoles()) { + if (resourceUser.getBackendRoles().contains(backendRole)) { + logger + .debug( + "User: " + + requestedUser.getName() + + " has backend role: " + + backendRole + + " permissions to access detector: " + + detectorId + ); + return true; + } + } + return false; + } + + public static boolean checkFilterByBackendRoles(User requestedUser, ActionListener listener) { + if (requestedUser == null) { + return false; + } + if (requestedUser.getBackendRoles().isEmpty()) { + listener + .onFailure( + new ElasticsearchException( + "Filter by backend roles is enabled and User " + requestedUser.getName() + " does not have backend roles configured" + ) + ); + return false; + } + return true; + } + /** * Parse max timestamp aggregation named CommonName.AGG_NAME_MAX * @param searchResponse Search response diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyDetectorJobActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyDetectorJobActionTests.java index 26ac5859..9c519f8b 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyDetectorJobActionTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyDetectorJobActionTests.java @@ -16,22 +16,34 @@ package com.amazon.opendistroforelasticsearch.ad.transport; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; +import com.amazon.opendistroforelasticsearch.commons.ConfigConstants; public class AnomalyDetectorJobActionTests extends ESIntegTestCase { private AnomalyDetectorJobTransportAction action; @@ -43,11 +55,26 @@ public class AnomalyDetectorJobActionTests extends ESIntegTestCase { @Before public void setUp() throws Exception { super.setUp(); + ClusterService clusterService = mock(ClusterService.class); + ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES))) + ); + + Settings build = Settings.builder().build(); + ThreadContext threadContext = new ThreadContext(build); + threadContext.putTransient(ConfigConstants.OPENDISTRO_SECURITY_USER_INFO_THREAD_CONTEXT, "alice|odfe,aes|engineering,operations"); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + Client client = mock(Client.class); + org.elasticsearch.threadpool.ThreadPool mockThreadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(mockThreadPool); + when(mockThreadPool.getThreadContext()).thenReturn(threadContext); action = new AnomalyDetectorJobTransportAction( mock(TransportService.class), mock(ActionFilters.class), - client(), + client, + clusterService, indexSettings(), mock(AnomalyDetectionIndices.class), xContentRegistry() @@ -58,7 +85,7 @@ public void setUp() throws Exception { @Override public void onResponse(AnomalyDetectorJobResponse adResponse) { // Will not be called as there is no detector - Assert.assertTrue(true); + Assert.assertTrue(false); } @Override diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/DeleteAnomalyDetectorActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/DeleteAnomalyDetectorActionTests.java index 6a3ab3cb..6e7a1e61 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/DeleteAnomalyDetectorActionTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/DeleteAnomalyDetectorActionTests.java @@ -16,15 +16,22 @@ package com.amazon.opendistroforelasticsearch.ad.transport; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.TransportService; @@ -32,6 +39,8 @@ import org.junit.Before; import org.junit.Test; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; + public class DeleteAnomalyDetectorActionTests extends ESIntegTestCase { private DeleteAnomalyDetectorTransportAction action; private ActionListener response; @@ -40,11 +49,18 @@ public class DeleteAnomalyDetectorActionTests extends ESIntegTestCase { @Before public void setUp() throws Exception { super.setUp(); + ClusterService clusterService = mock(ClusterService.class); + ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES))) + ); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); action = new DeleteAnomalyDetectorTransportAction( mock(TransportService.class), mock(ActionFilters.class), client(), - clusterService(), + clusterService, + Settings.EMPTY, xContentRegistry() ); response = new ActionListener() { diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTests.java index c07ba8a0..9a96e17d 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTests.java @@ -23,7 +23,9 @@ import java.io.IOException; import java.security.InvalidParameterException; +import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetRequest; @@ -31,6 +33,8 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; @@ -39,6 +43,7 @@ import com.amazon.opendistroforelasticsearch.ad.AbstractADTest; import com.amazon.opendistroforelasticsearch.ad.constant.CommonErrorMessages; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; import com.amazon.opendistroforelasticsearch.ad.util.DiscoveryNodeFilterer; public class GetAnomalyDetectorTests extends AbstractADTest { @@ -67,6 +72,12 @@ public static void tearDownAfterClass() { @Override public void setUp() throws Exception { super.setUp(); + ClusterService clusterService = mock(ClusterService.class); + ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES))) + ); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); transportService = new TransportService( Settings.EMPTY, @@ -85,7 +96,15 @@ public void setUp() throws Exception { client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); - action = new GetAnomalyDetectorTransportAction(transportService, nodeFilter, actionFilters, client, xContentRegistry()); + action = new GetAnomalyDetectorTransportAction( + transportService, + nodeFilter, + actionFilters, + clusterService, + client, + Settings.EMPTY, + xContentRegistry() + ); } public void testInvalidRequest() throws IOException { diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportActionTests.java index d4254729..7608e2f9 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportActionTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/GetAnomalyDetectorTransportActionTests.java @@ -15,16 +15,25 @@ package com.amazon.opendistroforelasticsearch.ad.transport; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import java.io.IOException; import java.time.Instant; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.Map; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestStatus; @@ -40,6 +49,7 @@ import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorJob; import com.amazon.opendistroforelasticsearch.ad.model.EntityProfile; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; import com.amazon.opendistroforelasticsearch.ad.util.DiscoveryNodeFilterer; import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; import com.google.common.collect.ImmutableMap; @@ -53,11 +63,19 @@ public class GetAnomalyDetectorTransportActionTests extends ESSingleNodeTestCase @Before public void setUp() throws Exception { super.setUp(); + ClusterService clusterService = mock(ClusterService.class); + ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES))) + ); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); action = new GetAnomalyDetectorTransportAction( Mockito.mock(TransportService.class), Mockito.mock(DiscoveryNodeFilterer.class), Mockito.mock(ActionFilters.class), + clusterService, client(), + Settings.EMPTY, xContentRegistry() ); task = Mockito.mock(Task.class); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java index c2e4b644..18f01e73 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java @@ -16,14 +16,25 @@ package com.amazon.opendistroforelasticsearch.ad.transport; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.junit.Assert; import org.junit.Before; @@ -31,23 +42,33 @@ import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; +import com.amazon.opendistroforelasticsearch.commons.ConfigConstants; public class IndexAnomalyDetectorTransportActionTests extends ESIntegTestCase { private IndexAnomalyDetectorTransportAction action; private Task task; private IndexAnomalyDetectorRequest request; private ActionListener response; + private ClusterService clusterService; + private ClusterSettings clusterSettings; @Override @Before public void setUp() throws Exception { super.setUp(); + clusterService = mock(ClusterService.class); + clusterSettings = new ClusterSettings( + Settings.EMPTY, + Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES))) + ); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); action = new IndexAnomalyDetectorTransportAction( mock(TransportService.class), mock(ActionFilters.class), client(), - clusterService(), + clusterService, indexSettings(), mock(AnomalyDetectionIndices.class), xContentRegistry() @@ -68,7 +89,8 @@ public void setUp() throws Exception { response = new ActionListener() { @Override public void onResponse(IndexAnomalyDetectorResponse indexResponse) { - Assert.assertTrue(true); + // onResponse will not be called as we do not have the AD index + Assert.assertTrue(false); } @Override @@ -83,6 +105,52 @@ public void testIndexTransportAction() { action.doExecute(task, request, response); } + @Test + public void testIndexTransportActionWithUserAndFilterOn() { + Settings settings = Settings.builder().put(AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES.getKey(), true).build(); + ThreadContext threadContext = new ThreadContext(settings); + threadContext.putTransient(ConfigConstants.OPENDISTRO_SECURITY_USER_INFO_THREAD_CONTEXT, "alice|odfe,aes|engineering,operations"); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + Client client = mock(Client.class); + org.elasticsearch.threadpool.ThreadPool mockThreadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(mockThreadPool); + when(mockThreadPool.getThreadContext()).thenReturn(threadContext); + + IndexAnomalyDetectorTransportAction transportAction = new IndexAnomalyDetectorTransportAction( + mock(TransportService.class), + mock(ActionFilters.class), + client, + clusterService, + settings, + mock(AnomalyDetectionIndices.class), + xContentRegistry() + ); + transportAction.doExecute(task, request, response); + } + + @Test + public void testIndexTransportActionWithUserAndFilterOff() { + Settings settings = Settings.builder().build(); + ThreadContext threadContext = new ThreadContext(settings); + threadContext.putTransient(ConfigConstants.OPENDISTRO_SECURITY_USER_INFO_THREAD_CONTEXT, "alice|odfe,aes|engineering,operations"); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + Client client = mock(Client.class); + org.elasticsearch.threadpool.ThreadPool mockThreadPool = mock(ThreadPool.class); + when(client.threadPool()).thenReturn(mockThreadPool); + when(mockThreadPool.getThreadContext()).thenReturn(threadContext); + + IndexAnomalyDetectorTransportAction transportAction = new IndexAnomalyDetectorTransportAction( + mock(TransportService.class), + mock(ActionFilters.class), + client, + clusterService, + settings, + mock(AnomalyDetectionIndices.class), + xContentRegistry() + ); + transportAction.doExecute(task, request, response); + } + @Test public void testIndexDetectorAction() { Assert.assertNotNull(IndexAnomalyDetectorAction.INSTANCE.name()); From 9c3b972e7252ec95d9ee9997019386b4bf4e78e7 Mon Sep 17 00:00:00 2001 From: Yaliang <49084640+ylwu-amzn@users.noreply.github.com> Date: Wed, 16 Dec 2020 01:26:18 +0000 Subject: [PATCH 05/13] add AD task and tune detector&AD result data model (#329) * add AD task and tune detector&AD result data model * remove lines which commented out * fix checkstyle * fix null point bug in AD result; add more test cases * add more checking * fix typo * add java doc for task state --- .../ad/model/ADTask.java | 521 ++++++++++++++++++ .../ad/model/ADTaskState.java | 59 ++ .../ad/model/ADTaskType.java | 21 + .../ad/model/AnomalyDetector.java | 151 +++-- .../ad/model/AnomalyDetectorType.java | 23 + .../ad/model/AnomalyResult.java | 79 ++- .../ad/model/DetectionDateRange.java | 131 +++++ .../mappings/anomaly-detection-state.json | 203 ++++++- .../resources/mappings/anomaly-detectors.json | 17 +- .../resources/mappings/anomaly-results.json | 5 +- .../ad/TestHelpers.java | 90 ++- .../ad/feature/SearchFeatureDaoTests.java | 1 - .../ad/model/ADTaskTests.java | 99 ++++ .../AnomalyDetectorSerializationTests.java | 20 +- .../ad/model/AnomalyDetectorTests.java | 120 ++++ .../ad/model/AnomalyResultTests.java | 171 +++++- .../ad/model/DetectionDateRangeTests.java | 68 +++ 17 files changed, 1704 insertions(+), 75 deletions(-) create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTask.java create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskState.java create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskType.java create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorType.java create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DetectionDateRange.java create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskTests.java create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/model/DetectionDateRangeTests.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTask.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTask.java new file mode 100644 index 00000000..c83836ab --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTask.java @@ -0,0 +1,521 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +import java.io.IOException; +import java.time.Instant; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import com.amazon.opendistroforelasticsearch.ad.annotation.Generated; +import com.amazon.opendistroforelasticsearch.ad.util.ParseUtils; +import com.google.common.base.Objects; + +/** + * One anomaly detection task means one detector starts to run until stopped. + */ +public class ADTask implements ToXContentObject, Writeable { + + public static final String DETECTOR_STATE_INDEX = ".opendistro-anomaly-detection-state"; + + public static final String TASK_ID_FIELD = "task_id"; + public static final String LAST_UPDATE_TIME_FIELD = "last_update_time"; + public static final String STARTED_BY_FIELD = "started_by"; + public static final String STOPPED_BY_FIELD = "stopped_by"; + public static final String ERROR_FIELD = "error"; + public static final String STATE_FIELD = "state"; + public static final String DETECTOR_ID_FIELD = "detector_id"; + public static final String TASK_PROGRESS_FIELD = "task_progress"; + public static final String INIT_PROGRESS_FIELD = "init_progress"; + public static final String CURRENT_PIECE_FIELD = "current_piece"; + public static final String EXECUTION_START_TIME_FIELD = "execution_start_time"; + public static final String EXECUTION_END_TIME_FIELD = "execution_end_time"; + public static final String IS_LATEST_FIELD = "is_latest"; + public static final String TASK_TYPE_FIELD = "task_type"; + public static final String CHECKPOINT_ID_FIELD = "checkpoint_id"; + public static final String DETECTOR_FIELD = "detector"; + + private String taskId = null; + private Instant lastUpdateTime = null; + private String startedBy = null; + private String stoppedBy = null; + private String error = null; + private String state = null; + private String detectorId = null; + private Float taskProgress = null; + private Float initProgress = null; + private Instant currentPiece = null; + private Instant executionStartTime = null; + private Instant executionEndTime = null; + private Boolean isLatest = null; + private String taskType = null; + private String checkpointId = null; + private AnomalyDetector detector = null; + + private ADTask() {} + + public ADTask(StreamInput input) throws IOException { + this.taskId = input.readOptionalString(); + this.taskType = input.readOptionalString(); + this.detectorId = input.readOptionalString(); + if (input.readBoolean()) { + this.detector = new AnomalyDetector(input); + } else { + this.detector = null; + } + this.state = input.readOptionalString(); + this.taskProgress = input.readOptionalFloat(); + this.initProgress = input.readOptionalFloat(); + this.currentPiece = input.readOptionalInstant(); + this.executionStartTime = input.readOptionalInstant(); + this.executionEndTime = input.readOptionalInstant(); + this.isLatest = input.readOptionalBoolean(); + this.error = input.readOptionalString(); + this.checkpointId = input.readOptionalString(); + this.lastUpdateTime = input.readOptionalInstant(); + this.startedBy = input.readOptionalString(); + this.stoppedBy = input.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(taskId); + out.writeOptionalString(taskType); + out.writeOptionalString(detectorId); + if (detector != null) { + out.writeBoolean(true); + detector.writeTo(out); + } else { + out.writeBoolean(false); + } + out.writeOptionalString(state); + out.writeOptionalFloat(taskProgress); + out.writeOptionalFloat(initProgress); + out.writeOptionalInstant(currentPiece); + out.writeOptionalInstant(executionStartTime); + out.writeOptionalInstant(executionEndTime); + out.writeOptionalBoolean(isLatest); + out.writeOptionalString(error); + out.writeOptionalString(checkpointId); + out.writeOptionalInstant(lastUpdateTime); + out.writeOptionalString(startedBy); + out.writeOptionalString(stoppedBy); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private String taskId = null; + private String taskType = null; + private String detectorId = null; + private AnomalyDetector detector = null; + private String state = null; + private Float taskProgress = null; + private Float initProgress = null; + private Instant currentPiece = null; + private Instant executionStartTime = null; + private Instant executionEndTime = null; + private Boolean isLatest = null; + private String error = null; + private String checkpointId = null; + private Instant lastUpdateTime = null; + private String startedBy = null; + private String stoppedBy = null; + + public Builder() {} + + public Builder taskId(String taskId) { + this.taskId = taskId; + return this; + } + + public Builder lastUpdateTime(Instant lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + return this; + } + + public Builder startedBy(String startedBy) { + this.startedBy = startedBy; + return this; + } + + public Builder stoppedBy(String stoppedBy) { + this.stoppedBy = stoppedBy; + return this; + } + + public Builder error(String error) { + this.error = error; + return this; + } + + public Builder state(String state) { + this.state = state; + return this; + } + + public Builder detectorId(String detectorId) { + this.detectorId = detectorId; + return this; + } + + public Builder taskProgress(Float taskProgress) { + this.taskProgress = taskProgress; + return this; + } + + public Builder initProgress(Float initProgress) { + this.initProgress = initProgress; + return this; + } + + public Builder currentPiece(Instant currentPiece) { + this.currentPiece = currentPiece; + return this; + } + + public Builder executionStartTime(Instant executionStartTime) { + this.executionStartTime = executionStartTime; + return this; + } + + public Builder executionEndTime(Instant executionEndTime) { + this.executionEndTime = executionEndTime; + return this; + } + + public Builder isLatest(Boolean isLatest) { + this.isLatest = isLatest; + return this; + } + + public Builder taskType(String taskType) { + this.taskType = taskType; + return this; + } + + public Builder checkpointId(String checkpointId) { + this.checkpointId = checkpointId; + return this; + } + + public Builder detector(AnomalyDetector detector) { + this.detector = detector; + return this; + } + + public ADTask build() { + ADTask adTask = new ADTask(); + adTask.taskId = this.taskId; + adTask.lastUpdateTime = this.lastUpdateTime; + adTask.error = this.error; + adTask.state = this.state; + adTask.detectorId = this.detectorId; + adTask.taskProgress = this.taskProgress; + adTask.initProgress = this.initProgress; + adTask.currentPiece = this.currentPiece; + adTask.executionStartTime = this.executionStartTime; + adTask.executionEndTime = this.executionEndTime; + adTask.isLatest = this.isLatest; + adTask.taskType = this.taskType; + adTask.checkpointId = this.checkpointId; + adTask.detector = this.detector; + adTask.startedBy = this.startedBy; + adTask.stoppedBy = this.stoppedBy; + + return adTask; + } + + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + XContentBuilder xContentBuilder = builder.startObject(); + if (taskId != null) { + xContentBuilder.field(TASK_ID_FIELD, taskId); + } + if (lastUpdateTime != null) { + xContentBuilder.field(LAST_UPDATE_TIME_FIELD, lastUpdateTime.toEpochMilli()); + } + if (startedBy != null) { + xContentBuilder.field(STARTED_BY_FIELD, startedBy); + } + if (stoppedBy != null) { + xContentBuilder.field(STOPPED_BY_FIELD, stoppedBy); + } + if (error != null) { + xContentBuilder.field(ERROR_FIELD, error); + } + if (state != null) { + xContentBuilder.field(STATE_FIELD, state); + } + if (detectorId != null) { + xContentBuilder.field(DETECTOR_ID_FIELD, detectorId); + } + if (taskProgress != null) { + xContentBuilder.field(TASK_PROGRESS_FIELD, taskProgress); + } + if (initProgress != null) { + xContentBuilder.field(INIT_PROGRESS_FIELD, initProgress); + } + if (currentPiece != null) { + xContentBuilder.field(CURRENT_PIECE_FIELD, currentPiece.toEpochMilli()); + } + if (executionStartTime != null) { + xContentBuilder.field(EXECUTION_START_TIME_FIELD, executionStartTime.toEpochMilli()); + } + if (executionEndTime != null) { + xContentBuilder.field(EXECUTION_END_TIME_FIELD, executionEndTime.toEpochMilli()); + } + if (isLatest != null) { + xContentBuilder.field(IS_LATEST_FIELD, isLatest); + } + if (taskType != null) { + xContentBuilder.field(TASK_TYPE_FIELD, taskType); + } + if (checkpointId != null) { + xContentBuilder.field(CHECKPOINT_ID_FIELD, checkpointId); + } + if (detector != null) { + xContentBuilder.field(DETECTOR_FIELD, detector); + } + return xContentBuilder.endObject(); + } + + public static ADTask parse(XContentParser parser) throws IOException { + return parse(parser, null); + } + + public static ADTask parse(XContentParser parser, String taskId) throws IOException { + Instant lastUpdateTime = null; + String startedBy = null; + String stoppedBy = null; + String error = null; + String state = null; + String detectorId = null; + Float taskProgress = null; + Float initProgress = null; + Instant currentPiece = null; + Instant executionStartTime = null; + Instant executionEndTime = null; + Boolean isLatest = null; + String taskType = null; + String checkpointId = null; + AnomalyDetector detector = null; + + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = parser.currentName(); + parser.nextToken(); + + switch (fieldName) { + case LAST_UPDATE_TIME_FIELD: + lastUpdateTime = ParseUtils.toInstant(parser); + break; + case STARTED_BY_FIELD: + startedBy = parser.text(); + break; + case STOPPED_BY_FIELD: + stoppedBy = parser.text(); + break; + case ERROR_FIELD: + error = parser.text(); + break; + case STATE_FIELD: + state = parser.text(); + break; + case DETECTOR_ID_FIELD: + detectorId = parser.text(); + break; + case TASK_PROGRESS_FIELD: + taskProgress = parser.floatValue(); + break; + case INIT_PROGRESS_FIELD: + initProgress = parser.floatValue(); + break; + case CURRENT_PIECE_FIELD: + currentPiece = ParseUtils.toInstant(parser); + break; + case EXECUTION_START_TIME_FIELD: + executionStartTime = ParseUtils.toInstant(parser); + break; + case EXECUTION_END_TIME_FIELD: + executionEndTime = ParseUtils.toInstant(parser); + break; + case IS_LATEST_FIELD: + isLatest = parser.booleanValue(); + break; + case TASK_TYPE_FIELD: + taskType = parser.text(); + break; + case CHECKPOINT_ID_FIELD: + checkpointId = parser.text(); + break; + case DETECTOR_FIELD: + detector = AnomalyDetector.parse(parser); + break; + default: + parser.skipChildren(); + break; + } + } + return new Builder() + .taskId(taskId) + .lastUpdateTime(lastUpdateTime) + .startedBy(startedBy) + .stoppedBy(stoppedBy) + .error(error) + .state(state) + .detectorId(detectorId) + .taskProgress(taskProgress) + .initProgress(initProgress) + .currentPiece(currentPiece) + .executionStartTime(executionStartTime) + .executionEndTime(executionEndTime) + .isLatest(isLatest) + .taskType(taskType) + .checkpointId(checkpointId) + .detector(detector) + .build(); + } + + @Generated + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + ADTask that = (ADTask) o; + return Objects.equal(getTaskId(), that.getTaskId()) + && Objects.equal(getLastUpdateTime(), that.getLastUpdateTime()) + && Objects.equal(getStartedBy(), that.getStartedBy()) + && Objects.equal(getStoppedBy(), that.getStoppedBy()) + && Objects.equal(getError(), that.getError()) + && Objects.equal(getState(), that.getState()) + && Objects.equal(getDetectorId(), that.getDetectorId()) + && Objects.equal(getTaskProgress(), that.getTaskProgress()) + && Objects.equal(getInitProgress(), that.getInitProgress()) + && Objects.equal(getCurrentPiece(), that.getCurrentPiece()) + && Objects.equal(getExecutionStartTime(), that.getExecutionStartTime()) + && Objects.equal(getExecutionEndTime(), that.getExecutionEndTime()) + && Objects.equal(getLatest(), that.getLatest()) + && Objects.equal(getTaskType(), that.getTaskType()) + && Objects.equal(getCheckpointId(), that.getCheckpointId()) + && Objects.equal(getDetector(), that.getDetector()); + } + + @Generated + @Override + public int hashCode() { + return Objects + .hashCode( + taskId, + lastUpdateTime, + startedBy, + stoppedBy, + error, + state, + detectorId, + taskProgress, + initProgress, + currentPiece, + executionStartTime, + executionEndTime, + isLatest, + taskType, + checkpointId, + detector + ); + } + + public String getTaskId() { + return taskId; + } + + public void setTaskId(String taskId) { + this.taskId = taskId; + } + + public Instant getLastUpdateTime() { + return lastUpdateTime; + } + + public String getStartedBy() { + return startedBy; + } + + public String getStoppedBy() { + return stoppedBy; + } + + public String getError() { + return error; + } + + public String getState() { + return state; + } + + public String getDetectorId() { + return detectorId; + } + + public Float getTaskProgress() { + return taskProgress; + } + + public Float getInitProgress() { + return initProgress; + } + + public Instant getCurrentPiece() { + return currentPiece; + } + + public Instant getExecutionStartTime() { + return executionStartTime; + } + + public Instant getExecutionEndTime() { + return executionEndTime; + } + + public Boolean getLatest() { + return isLatest; + } + + public String getTaskType() { + return taskType; + } + + public String getCheckpointId() { + return checkpointId; + } + + public AnomalyDetector getDetector() { + return detector; + } +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskState.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskState.java new file mode 100644 index 00000000..56ee6c0b --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskState.java @@ -0,0 +1,59 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +/** + * AD task states. + *
    + *
  • CREATED: + * When user start a historical detector, we will create one task to track the detector + * execution and set its state as CREATED + * + *
  • INIT: + * After task created, coordinate node will gather all eligible node’s state and dispatch + * task to the worker node with lowest load. When the worker node receives the request, + * it will set the task state as INIT immediately, then start to run cold start to train + * RCF model. We will track the initialization progress in task. + * Init_Progress=ModelUpdates/MinSampleSize + * + *
  • RUNNING: + * If RCF model gets enough data points and passed training, it will start to detect data + * normally and output positive anomaly scores. Once the RCF model starts to output positive + * anomaly score, we will set the task state as RUNNING and init progress as 100%. We will + * track task running progress in task: Task_Progress=DetectedPieces/AllPieces + * + *
  • FINISHED: + * When all historical data detected, we set the task state as FINISHED and task progress + * as 100%. + * + *
  • STOPPED: + * User can cancel a running task by stopping detector, for example, user want to tune + * feature and reran and don’t want current task run any more. When a historical detector + * stopped, we will mark the task flag cancelled as true, when run next piece, we will + * check this flag and stop the task. Then task stopped, will set its state as STOPPED + * + *
  • FAILED: + * If any exception happen, we will set task state as FAILED + *
+ */ +public enum ADTaskState { + CREATED, + INIT, + RUNNING, + FAILED, + STOPPED, + FINISHED +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskType.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskType.java new file mode 100644 index 00000000..e55d94ab --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskType.java @@ -0,0 +1,21 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +public enum ADTaskType { + REALTIME, + HISTORICAL +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java index f0e2c9f1..ade2c310 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java @@ -43,7 +43,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.search.builder.SearchSourceBuilder; import com.amazon.opendistroforelasticsearch.ad.annotation.Generated; import com.amazon.opendistroforelasticsearch.ad.constant.CommonErrorMessages; @@ -83,6 +82,8 @@ public class AnomalyDetector implements Writeable, ToXContentObject { public static final String UI_METADATA_FIELD = "ui_metadata"; public static final String CATEGORY_FIELD = "category_field"; public static final String USER_FIELD = "user"; + public static final String DETECTOR_TYPE_FIELD = "detector_type"; + public static final String DETECTION_DATE_RANGE_FIELD = "detection_date_range"; private final String detectorId; private final Long version; @@ -100,6 +101,8 @@ public class AnomalyDetector implements Writeable, ToXContentObject { private final Instant lastUpdateTime; private final List categoryFields; private User user; + private String detectorType; + private DetectionDateRange detectionDateRange; /** * Constructor function. @@ -138,6 +141,48 @@ public AnomalyDetector( Instant lastUpdateTime, List categoryFields, User user + ) { + this( + detectorId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + detectionInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + null, + null + ); + } + + public AnomalyDetector( + String detectorId, + Long version, + String name, + String description, + String timeField, + List indices, + List features, + QueryBuilder filterQuery, + TimeConfiguration detectionInterval, + TimeConfiguration windowDelay, + Integer shingleSize, + Map uiMetadata, + Integer schemaVersion, + Instant lastUpdateTime, + List categoryFields, + User user, + String detectorType, + DetectionDateRange detectionDateRange ) { if (Strings.isBlank(name)) { throw new IllegalArgumentException("Detector name should be set"); @@ -157,6 +202,9 @@ public AnomalyDetector( if (categoryFields != null && categoryFields.size() > CATEGORY_FIELD_LIMIT) { throw new IllegalArgumentException(CommonErrorMessages.CATEGORICAL_FIELD_NUMBER_SURPASSED + CATEGORY_FIELD_LIMIT); } + if (((IntervalTimeConfiguration) detectionInterval).getInterval() <= 0) { + throw new IllegalArgumentException("Detection interval must be a positive integer"); + } this.detectorId = detectorId; this.version = version; this.name = name; @@ -173,36 +221,22 @@ public AnomalyDetector( this.lastUpdateTime = lastUpdateTime; this.categoryFields = categoryFields; this.user = user; + this.detectorType = detectorType; + this.detectionDateRange = detectionDateRange; } public AnomalyDetector(StreamInput input) throws IOException { detectorId = input.readString(); version = input.readLong(); - String name = input.readString(); - if (Strings.isBlank(name)) { - throw new IllegalArgumentException("Detector name should be set"); - } - this.name = name; + name = input.readString(); description = input.readString(); - String timeField = input.readString(); - if (timeField == null) { - throw new IllegalArgumentException("Time field should be set"); - } - this.timeField = timeField; - List indices = input.readStringList(); - if (indices == null || indices.isEmpty()) { - throw new IllegalArgumentException("Indices should be set"); - } - this.indices = indices; + timeField = input.readString(); + indices = input.readStringList(); featureAttributes = input.readList(Feature::new); filterQuery = input.readNamedWriteable(QueryBuilder.class); detectionInterval = IntervalTimeConfiguration.readFrom(input); windowDelay = IntervalTimeConfiguration.readFrom(input); - Integer shingleSize = input.readInt(); - if (shingleSize != null && shingleSize < 1) { - throw new IllegalArgumentException("Shingle size must be a positive integer"); - } - this.shingleSize = shingleSize; + shingleSize = input.readInt(); schemaVersion = input.readInt(); this.categoryFields = input.readOptionalStringList(); lastUpdateTime = input.readInstant(); @@ -211,6 +245,12 @@ public AnomalyDetector(StreamInput input) throws IOException { } else { user = null; } + if (input.readBoolean()) { + detectionDateRange = new DetectionDateRange(input); + } else { + detectionDateRange = null; + } + detectorType = input.readOptionalString(); if (input.readBoolean()) { this.uiMetadata = input.readMap(); } else { @@ -244,6 +284,13 @@ public void writeTo(StreamOutput output) throws IOException { } else { output.writeBoolean(false); // user does not exist } + if (detectionDateRange != null) { + output.writeBoolean(true); // detectionDateRange exists + detectionDateRange.writeTo(output); + } else { + output.writeBoolean(false); // detectionDateRange does not exist + } + output.writeOptionalString(detectorType); if (uiMetadata != null) { output.writeBoolean(true); output.writeMap(uiMetadata); @@ -282,6 +329,12 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (user != null) { xContentBuilder.field(USER_FIELD, user); } + if (detectorType != null) { + xContentBuilder.field(DETECTOR_TYPE_FIELD, detectorType); + } + if (detectionDateRange != null) { + xContentBuilder.field(DETECTION_DATE_RANGE_FIELD, detectionDateRange); + } return xContentBuilder.endObject(); } @@ -348,6 +401,7 @@ public static AnomalyDetector parse( Map uiMetadata = null; Instant lastUpdateTime = null; User user = null; + DetectionDateRange detectionDateRange = null; List categoryField = null; @@ -412,11 +466,24 @@ public static AnomalyDetector parse( case USER_FIELD: user = User.parse(parser); break; + case DETECTION_DATE_RANGE_FIELD: + detectionDateRange = DetectionDateRange.parse(parser); + break; default: parser.skipChildren(); break; } } + String detectorType; + if (AnomalyDetector.isRealTimeDetector(detectionDateRange)) { + detectorType = AnomalyDetector.isMultientityDetector(categoryField) + ? AnomalyDetectorType.REALTIME_MULTI_ENTITY.name() + : AnomalyDetectorType.REALTIME_SINGLE_ENTITY.name(); + } else { + detectorType = AnomalyDetector.isMultientityDetector(categoryField) + ? AnomalyDetectorType.HISTORICAL_MULTI_ENTITY.name() + : AnomalyDetectorType.HISTORICAL_SINGLE_ENTITY.name(); + } return new AnomalyDetector( detectorId, version, @@ -433,18 +500,12 @@ public static AnomalyDetector parse( schemaVersion, lastUpdateTime, categoryField, - user + user, + detectorType, + detectionDateRange ); } - public SearchSourceBuilder generateFeatureQuery() { - SearchSourceBuilder generatedFeatureQuery = new SearchSourceBuilder().query(filterQuery); - if (this.getFeatureAttributes() != null) { - this.getFeatureAttributes().stream().forEach(feature -> generatedFeatureQuery.aggregation(feature.getAggregation())); - } - return generatedFeatureQuery; - } - @Generated @Override public boolean equals(Object o) { @@ -463,7 +524,8 @@ public boolean equals(Object o) { && Objects.equal(getWindowDelay(), detector.getWindowDelay()) && Objects.equal(getShingleSize(), detector.getShingleSize()) && Objects.equal(getCategoryField(), detector.getCategoryField()) - && Objects.equal(getUser(), detector.getUser()); + && Objects.equal(getUser(), detector.getUser()) + && Objects.equal(getDetectionDateRange(), detector.getDetectionDateRange()); } @Generated @@ -482,7 +544,10 @@ public int hashCode() { shingleSize, uiMetadata, schemaVersion, - lastUpdateTime + lastUpdateTime, + user, + detectorType, + detectionDateRange ); } @@ -596,7 +661,27 @@ public void setUser(User user) { this.user = user; } + public String getDetectorType() { + return detectorType; + } + + public DetectionDateRange getDetectionDateRange() { + return detectionDateRange; + } + public boolean isMultientityDetector() { - return getCategoryField() != null && getCategoryField().size() > 0; + return AnomalyDetector.isMultientityDetector(getCategoryField()); + } + + private static boolean isMultientityDetector(List categoryFields) { + return categoryFields != null && categoryFields.size() > 0; + } + + public boolean isRealTimeDetector() { + return AnomalyDetector.isRealTimeDetector(getDetectionDateRange()); + } + + private static boolean isRealTimeDetector(DetectionDateRange detectionDateRange) { + return detectionDateRange == null || detectionDateRange.getEndTime() == null; } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorType.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorType.java new file mode 100644 index 00000000..900e7f95 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorType.java @@ -0,0 +1,23 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +public enum AnomalyDetectorType { + REALTIME_SINGLE_ENTITY, + REALTIME_MULTI_ENTITY, + HISTORICAL_SINGLE_ENTITY, + HISTORICAL_MULTI_ENTITY +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyResult.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyResult.java index 927f1a5b..e459b335 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyResult.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyResult.java @@ -64,8 +64,10 @@ public class AnomalyResult implements ToXContentObject, Writeable { public static final String ERROR_FIELD = "error"; public static final String ENTITY_FIELD = "entity"; public static final String USER_FIELD = "user"; + public static final String TASK_ID_FIELD = "task_id"; private final String detectorId; + private final String taskId; private final Double anomalyScore; private final Double anomalyGrade; private final Double confidence; @@ -124,8 +126,43 @@ public AnomalyResult( List entity, User user, Integer schemaVersion + ) { + this( + detectorId, + null, + anomalyScore, + anomalyGrade, + confidence, + featureData, + dataStartTime, + dataEndTime, + executionStartTime, + executionEndTime, + error, + entity, + user, + schemaVersion + ); + } + + public AnomalyResult( + String detectorId, + String taskId, + Double anomalyScore, + Double anomalyGrade, + Double confidence, + List featureData, + Instant dataStartTime, + Instant dataEndTime, + Instant executionStartTime, + Instant executionEndTime, + String error, + List entity, + User user, + Integer schemaVersion ) { this.detectorId = detectorId; + this.taskId = taskId; this.anomalyScore = anomalyScore; this.anomalyGrade = anomalyGrade; this.confidence = confidence; @@ -146,7 +183,7 @@ public AnomalyResult(StreamInput input) throws IOException { this.anomalyGrade = input.readDouble(); this.confidence = input.readDouble(); int featureSize = input.readVInt(); - this.featureData = new ArrayList(featureSize); + this.featureData = new ArrayList<>(featureSize); for (int i = 0; i < featureSize; i++) { featureData.add(new FeatureData(input)); } @@ -155,10 +192,14 @@ public AnomalyResult(StreamInput input) throws IOException { this.executionStartTime = input.readInstant(); this.executionEndTime = input.readInstant(); this.error = input.readOptionalString(); - int entitySize = input.readVInt(); - this.entity = new ArrayList(entitySize); - for (int i = 0; i < entitySize; i++) { - entity.add(new Entity(input)); + if (input.readBoolean()) { + int entitySize = input.readVInt(); + this.entity = new ArrayList<>(entitySize); + for (int i = 0; i < entitySize; i++) { + entity.add(new Entity(input)); + } + } else { + this.entity = null; } if (input.readBoolean()) { this.user = new User(input); @@ -166,6 +207,7 @@ public AnomalyResult(StreamInput input) throws IOException { user = null; } this.schemaVersion = input.readInt(); + this.taskId = input.readOptionalString(); } @Override @@ -206,6 +248,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (user != null) { xContentBuilder.field(USER_FIELD, user); } + if (taskId != null) { + xContentBuilder.field(TASK_ID_FIELD, taskId); + } return xContentBuilder.endObject(); } @@ -223,6 +268,7 @@ public static AnomalyResult parse(XContentParser parser) throws IOException { List entityList = null; User user = null; Integer schemaVersion = CommonValue.NO_SCHEMA_VERSION; + String taskId = null; ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { @@ -276,6 +322,9 @@ public static AnomalyResult parse(XContentParser parser) throws IOException { case CommonName.SCHEMA_VERSION_FIELD: schemaVersion = parser.intValue(); break; + case TASK_ID_FIELD: + taskId = parser.text(); + break; default: parser.skipChildren(); break; @@ -283,6 +332,7 @@ public static AnomalyResult parse(XContentParser parser) throws IOException { } return new AnomalyResult( detectorId, + taskId, anomalyScore, anomalyGrade, confidence, @@ -307,6 +357,7 @@ public boolean equals(Object o) { return false; AnomalyResult that = (AnomalyResult) o; return Objects.equal(getDetectorId(), that.getDetectorId()) + && Objects.equal(getTaskId(), that.getTaskId()) && Objects.equal(getAnomalyScore(), that.getAnomalyScore()) && Objects.equal(getAnomalyGrade(), that.getAnomalyGrade()) && Objects.equal(getConfidence(), that.getConfidence()) @@ -325,6 +376,7 @@ public int hashCode() { return Objects .hashCode( getDetectorId(), + getTaskId(), getAnomalyScore(), getAnomalyGrade(), getConfidence(), @@ -343,6 +395,7 @@ public int hashCode() { public String toString() { return new ToStringBuilder(this) .append("detectorId", detectorId) + .append("taskId", taskId) .append("anomalyScore", anomalyScore) .append("anomalyGrade", anomalyGrade) .append("confidence", confidence) @@ -360,6 +413,10 @@ public String getDetectorId() { return detectorId; } + public String getTaskId() { + return taskId; + } + public Double getAnomalyScore() { return anomalyScore; } @@ -415,9 +472,14 @@ public void writeTo(StreamOutput out) throws IOException { out.writeInstant(executionStartTime); out.writeInstant(executionEndTime); out.writeOptionalString(error); - out.writeVInt(entity.size()); - for (Entity entityItem : entity) { - entityItem.writeTo(out); + if (entity != null) { + out.writeBoolean(true); + out.writeVInt(entity.size()); + for (Entity entityItem : entity) { + entityItem.writeTo(out); + } + } else { + out.writeBoolean(false); } if (user != null) { out.writeBoolean(true); // user exists @@ -426,5 +488,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(false); // user does not exist } out.writeInt(schemaVersion); + out.writeOptionalString(taskId); } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DetectionDateRange.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DetectionDateRange.java new file mode 100644 index 00000000..e6076fc8 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DetectionDateRange.java @@ -0,0 +1,131 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +import java.io.IOException; +import java.time.Instant; + +import org.apache.commons.lang.builder.ToStringBuilder; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import com.amazon.opendistroforelasticsearch.ad.annotation.Generated; +import com.amazon.opendistroforelasticsearch.ad.util.ParseUtils; +import com.google.common.base.Objects; + +public class DetectionDateRange implements ToXContentObject, Writeable { + + public static final String START_TIME_FIELD = "start_time"; + public static final String END_TIME_FIELD = "end_time"; + + private final Instant startTime; + private final Instant endTime; + + public DetectionDateRange(Instant startTime, Instant endTime) { + this.startTime = startTime; + this.endTime = endTime; + if (startTime == null) { + throw new IllegalArgumentException("Detection data range's start time must not be null"); + } + if (endTime == null) { + throw new IllegalArgumentException("Detection data range's end time must not be null"); + } + } + + public DetectionDateRange(StreamInput in) throws IOException { + this.startTime = in.readInstant(); + this.endTime = in.readInstant(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + XContentBuilder xContentBuilder = builder.startObject(); + if (startTime != null) { + xContentBuilder.field(START_TIME_FIELD, startTime.toEpochMilli()); + } + if (endTime != null) { + xContentBuilder.field(END_TIME_FIELD, endTime.toEpochMilli()); + } + return xContentBuilder.endObject(); + } + + public static DetectionDateRange parse(XContentParser parser) throws IOException { + Instant startTime = null; + Instant endTime = null; + + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = parser.currentName(); + parser.nextToken(); + + switch (fieldName) { + case START_TIME_FIELD: + startTime = ParseUtils.toInstant(parser); + break; + case END_TIME_FIELD: + endTime = ParseUtils.toInstant(parser); + break; + default: + parser.skipChildren(); + break; + } + } + return new DetectionDateRange(startTime, endTime); + } + + @Generated + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + DetectionDateRange that = (DetectionDateRange) o; + return Objects.equal(getStartTime(), that.getStartTime()) && Objects.equal(getEndTime(), that.getEndTime()); + } + + @Generated + @Override + public int hashCode() { + return Objects.hashCode(getStartTime(), getEndTime()); + } + + @Generated + @Override + public String toString() { + return new ToStringBuilder(this).append("startTime", startTime).append("endTime", endTime).toString(); + } + + public Instant getStartTime() { + return startTime; + } + + public Instant getEndTime() { + return endTime; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeInstant(startTime); + out.writeInstant(endTime); + } +} diff --git a/src/main/resources/mappings/anomaly-detection-state.json b/src/main/resources/mappings/anomaly-detection-state.json index dcb0f7c0..f618e1b4 100644 --- a/src/main/resources/mappings/anomaly-detection-state.json +++ b/src/main/resources/mappings/anomaly-detection-state.json @@ -1,7 +1,7 @@ { "dynamic": false, "_meta": { - "schema_version": 1 + "schema_version": 2 }, "properties": { "schema_version": { @@ -13,6 +13,205 @@ }, "error": { "type": "text" + }, + "started_by": { + "type": "keyword" + }, + "stopped_by": { + "type": "keyword" + }, + "detector_id": { + "type": "keyword" + }, + "state": { + "type": "keyword" + }, + "task_progress": { + "type": "float" + }, + "init_progress": { + "type": "float" + }, + "current_piece": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "execution_start_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "execution_end_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "is_latest": { + "type": "boolean" + }, + "task_type": { + "type": "keyword" + }, + "checkpoint_id": { + "type": "keyword" + }, + "detector": { + "properties": { + "schema_version": { + "type": "integer" + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "description": { + "type": "text" + }, + "time_field": { + "type": "keyword" + }, + "indices": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "filter_query": { + "type": "object", + "enabled": false + }, + "feature_attributes": { + "type": "nested", + "properties": { + "feature_id": { + "type": "keyword", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "feature_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "feature_enabled": { + "type": "boolean" + }, + "aggregation_query": { + "type": "object", + "enabled": false + } + } + }, + "detection_interval": { + "properties": { + "period": { + "properties": { + "interval": { + "type": "integer" + }, + "unit": { + "type": "keyword" + } + } + } + } + }, + "window_delay": { + "properties": { + "period": { + "properties": { + "interval": { + "type": "integer" + }, + "unit": { + "type": "keyword" + } + } + } + } + }, + "shingle_size": { + "type": "integer" + }, + "last_update_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "ui_metadata": { + "type": "object", + "enabled": false + }, + "user": { + "type": "nested", + "properties": { + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "backend_roles": { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + }, + "roles": { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + }, + "custom_attribute_names": { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword" + } + } + } + } + }, + "category_field": { + "type": "keyword" + }, + "detector_type": { + "type": "keyword" + }, + "detection_date_range": { + "properties": { + "start_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "end_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + } + } + } + } } } -} \ No newline at end of file +} diff --git a/src/main/resources/mappings/anomaly-detectors.json b/src/main/resources/mappings/anomaly-detectors.json index 94483c14..538682f8 100644 --- a/src/main/resources/mappings/anomaly-detectors.json +++ b/src/main/resources/mappings/anomaly-detectors.json @@ -1,7 +1,7 @@ { "dynamic": false, "_meta": { - "schema_version": 2 + "schema_version": 3 }, "properties": { "schema_version": { @@ -144,6 +144,21 @@ }, "category_field": { "type": "keyword" + }, + "detector_type": { + "type": "keyword" + }, + "detection_date_range": { + "properties": { + "start_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "end_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + } + } } } } diff --git a/src/main/resources/mappings/anomaly-results.json b/src/main/resources/mappings/anomaly-results.json index 5ec7a42a..9fcaab83 100644 --- a/src/main/resources/mappings/anomaly-results.json +++ b/src/main/resources/mappings/anomaly-results.json @@ -1,7 +1,7 @@ { "dynamic": false, "_meta": { - "schema_version": 2 + "schema_version": 3 }, "properties": { "detector_id": { @@ -100,6 +100,9 @@ }, "schema_version": { "type": "integer" + }, + "task_id": { + "type": "keyword" } } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java index cf21f4d8..dc3f7a5b 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java @@ -105,10 +105,14 @@ import com.amazon.opendistroforelasticsearch.ad.constant.CommonName; import com.amazon.opendistroforelasticsearch.ad.constant.CommonValue; +import com.amazon.opendistroforelasticsearch.ad.model.ADTask; +import com.amazon.opendistroforelasticsearch.ad.model.ADTaskState; +import com.amazon.opendistroforelasticsearch.ad.model.ADTaskType; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorExecutionInput; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorJob; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyResult; +import com.amazon.opendistroforelasticsearch.ad.model.DetectionDateRange; import com.amazon.opendistroforelasticsearch.ad.model.DetectorInternalState; import com.amazon.opendistroforelasticsearch.ad.model.Entity; import com.amazon.opendistroforelasticsearch.ad.model.Feature; @@ -203,16 +207,38 @@ public static NamedXContentRegistry xContentRegistry() { } public static AnomalyDetector randomAnomalyDetector(Map uiMetadata, Instant lastUpdateTime) throws IOException { - return randomAnomalyDetector(ImmutableList.of(randomFeature()), uiMetadata, lastUpdateTime); + return randomAnomalyDetector(ImmutableList.of(randomFeature()), uiMetadata, lastUpdateTime, null, null); } public static AnomalyDetector randomAnomalyDetector(Map uiMetadata, Instant lastUpdateTime, boolean featureEnabled) throws IOException { - return randomAnomalyDetector(ImmutableList.of(randomFeature(featureEnabled)), uiMetadata, lastUpdateTime); + return randomAnomalyDetector(ImmutableList.of(randomFeature(featureEnabled)), uiMetadata, lastUpdateTime, null, null); } public static AnomalyDetector randomAnomalyDetector(List features, Map uiMetadata, Instant lastUpdateTime) throws IOException { + return randomAnomalyDetector(features, uiMetadata, lastUpdateTime, null, null); + } + + public static AnomalyDetector randomAnomalyDetector( + List features, + Map uiMetadata, + Instant lastUpdateTime, + String detectorType, + DetectionDateRange dateRange + ) throws IOException { + return randomAnomalyDetector(features, uiMetadata, lastUpdateTime, detectorType, dateRange, true); + } + + public static AnomalyDetector randomAnomalyDetector( + List features, + Map uiMetadata, + Instant lastUpdateTime, + String detectorType, + DetectionDateRange dateRange, + boolean withUser + ) throws IOException { + User user = withUser ? randomUser() : null; return new AnomalyDetector( randomAlphaOfLength(10), randomLong(), @@ -229,7 +255,16 @@ public static AnomalyDetector randomAnomalyDetector(List features, Map< randomInt(), lastUpdateTime, null, - randomUser() + user, + detectorType, + dateRange + ); + } + + public static DetectionDateRange randomDetectionDateRange() { + return new DetectionDateRange( + Instant.now().truncatedTo(ChronoUnit.SECONDS).minus(10, ChronoUnit.DAYS), + Instant.now().truncatedTo(ChronoUnit.SECONDS) ); } @@ -432,20 +467,26 @@ public static FeatureData randomFeatureData() { } public static AnomalyResult randomAnomalyDetectResult() { - return randomAnomalyDetectResult(randomDouble(), randomAlphaOfLength(5)); + return randomAnomalyDetectResult(randomDouble(), randomAlphaOfLength(5), null); } public static AnomalyResult randomAnomalyDetectResult(double score) { - return randomAnomalyDetectResult(randomDouble(), null); + return randomAnomalyDetectResult(randomDouble(), null, null); } public static AnomalyResult randomAnomalyDetectResult(String error) { - return randomAnomalyDetectResult(Double.NaN, error); + return randomAnomalyDetectResult(Double.NaN, error, null); } - public static AnomalyResult randomAnomalyDetectResult(double score, String error) { + public static AnomalyResult randomAnomalyDetectResult(double score, String error, String taskId) { + return randomAnomalyDetectResult(score, error, taskId, true); + } + + public static AnomalyResult randomAnomalyDetectResult(double score, String error, String taskId, boolean withUser) { + User user = withUser ? randomUser() : null; return new AnomalyResult( randomAlphaOfLength(5), + taskId, score, randomDouble(), randomDouble(), @@ -455,7 +496,8 @@ public static AnomalyResult randomAnomalyDetectResult(double score, String error Instant.now().truncatedTo(ChronoUnit.SECONDS), Instant.now().truncatedTo(ChronoUnit.SECONDS), error, - randomUser(), + null, + user, CommonValue.NO_SCHEMA_VERSION ); } @@ -662,10 +704,6 @@ public static SearchResponse createEmptySearchResponse() throws IOException { ); } - public static AnomalyResult randomDetectState() { - return randomAnomalyDetectResult(randomDouble(), randomAlphaOfLength(5)); - } - public static DetectorInternalState randomDetectState(String error) { return randomDetectState(error, Instant.now()); } @@ -691,4 +729,32 @@ public static Map>> create mappings.put(index, Collections.singletonMap(CommonName.MAPPING_TYPE, Collections.singletonMap(fieldName, fieldMappingMetadata))); return mappings; } + + public static ADTask randomAdTask(String taskId, ADTaskState state, Instant executionEndTime, String stoppedBy, boolean withDetector) + throws IOException { + AnomalyDetector detector = withDetector + ? randomAnomalyDetector(ImmutableMap.of(), Instant.now().truncatedTo(ChronoUnit.SECONDS)) + : null; + executionEndTime = executionEndTime == null ? null : executionEndTime.truncatedTo(ChronoUnit.SECONDS); + ADTask task = ADTask + .builder() + .taskId(taskId) + .taskType(ADTaskType.HISTORICAL.name()) + .detectorId(randomAlphaOfLength(5)) + .detector(detector) + .state(state.name()) + .taskProgress(0.5f) + .initProgress(1.0f) + .currentPiece(Instant.now().truncatedTo(ChronoUnit.SECONDS).minus(randomIntBetween(1, 100), ChronoUnit.MINUTES)) + .executionStartTime(Instant.now().truncatedTo(ChronoUnit.SECONDS).minus(100, ChronoUnit.MINUTES)) + .executionEndTime(executionEndTime) + .isLatest(true) + .error(randomAlphaOfLength(5)) + .checkpointId(randomAlphaOfLength(5)) + .lastUpdateTime(Instant.now().truncatedTo(ChronoUnit.SECONDS)) + .startedBy(randomAlphaOfLength(5)) + .stoppedBy(stoppedBy) + .build(); + return task; + } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDaoTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDaoTests.java index 44eb6a1b..d7219510 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDaoTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDaoTests.java @@ -226,7 +226,6 @@ public void setup() throws Exception { detectionInterval = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES); when(detector.getTimeField()).thenReturn("testTimeField"); when(detector.getIndices()).thenReturn(Arrays.asList("testIndices")); - when(detector.generateFeatureQuery()).thenReturn(featureQuery); when(detector.getDetectionInterval()).thenReturn(detectionInterval); when(detector.getFilterQuery()).thenReturn(QueryBuilders.matchAllQuery()); when(detector.getCategoryField()).thenReturn(Collections.singletonList("a")); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskTests.java new file mode 100644 index 00000000..a4439a76 --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ADTaskTests.java @@ -0,0 +1,99 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Collection; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; + +public class ADTaskTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class, AnomalyDetectorPlugin.class); + } + + @Override + protected NamedWriteableRegistry writableRegistry() { + return getInstanceFromNode(NamedWriteableRegistry.class); + } + + public void testAdTaskSerialization() throws IOException { + ADTask adTask = TestHelpers.randomAdTask(randomAlphaOfLength(5), ADTaskState.STOPPED, Instant.now(), randomAlphaOfLength(5), true); + BytesStreamOutput output = new BytesStreamOutput(); + adTask.writeTo(output); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + ADTask parsedADTask = new ADTask(input); + assertEquals("AD task serialization doesn't work", adTask, parsedADTask); + } + + public void testAdTaskSerializationWithNullDetector() throws IOException { + ADTask adTask = TestHelpers.randomAdTask(randomAlphaOfLength(5), ADTaskState.STOPPED, Instant.now(), randomAlphaOfLength(5), false); + BytesStreamOutput output = new BytesStreamOutput(); + adTask.writeTo(output); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + ADTask parsedADTask = new ADTask(input); + assertEquals("AD task serialization doesn't work", adTask, parsedADTask); + } + + public void testParseADTask() throws IOException { + ADTask adTask = TestHelpers + .randomAdTask(null, ADTaskState.STOPPED, Instant.now().truncatedTo(ChronoUnit.SECONDS), randomAlphaOfLength(5), true); + String taskId = randomAlphaOfLength(5); + adTask.setTaskId(taskId); + String adTaskString = TestHelpers.xContentBuilderToString(adTask.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + ADTask parsedADTask = ADTask.parse(TestHelpers.parser(adTaskString), adTask.getTaskId()); + assertEquals("Parsing AD task doesn't work", adTask, parsedADTask); + } + + public void testParseADTaskWithoutTaskId() throws IOException { + String taskId = null; + ADTask adTask = TestHelpers + .randomAdTask(taskId, ADTaskState.STOPPED, Instant.now().truncatedTo(ChronoUnit.SECONDS), randomAlphaOfLength(5), true); + String adTaskString = TestHelpers.xContentBuilderToString(adTask.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + ADTask parsedADTask = ADTask.parse(TestHelpers.parser(adTaskString)); + assertEquals("Parsing AD task doesn't work", adTask, parsedADTask); + } + + public void testParseADTaskWithNullDetector() throws IOException { + String taskId = randomAlphaOfLength(5); + ADTask adTask = TestHelpers + .randomAdTask(taskId, ADTaskState.STOPPED, Instant.now().truncatedTo(ChronoUnit.SECONDS), randomAlphaOfLength(5), false); + String adTaskString = TestHelpers.xContentBuilderToString(adTask.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + ADTask parsedADTask = ADTask.parse(TestHelpers.parser(adTaskString), taskId); + assertEquals("Parsing AD task doesn't work", adTask, parsedADTask); + } + + public void testParseNullableFields() throws IOException { + ADTask adTask = ADTask.builder().build(); + String adTaskString = TestHelpers.xContentBuilderToString(adTask.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + ADTask parsedADTask = ADTask.parse(TestHelpers.parser(adTaskString)); + assertEquals("Parsing AD task doesn't work", adTask, parsedADTask); + } + +} diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorSerializationTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorSerializationTests.java index 4cde2f87..4b442d80 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorSerializationTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorSerializationTests.java @@ -45,7 +45,6 @@ protected NamedWriteableRegistry writableRegistry() { public void testDetectorWithUiMetadata() throws IOException { AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); BytesStreamOutput output = new BytesStreamOutput(); - System.out.println(detector.toString()); detector.writeTo(output); NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); AnomalyDetector parsedDetector = new AnomalyDetector(input); @@ -55,7 +54,6 @@ public void testDetectorWithUiMetadata() throws IOException { public void testDetectorWithoutUiMetadata() throws IOException { AnomalyDetector detector = TestHelpers.randomAnomalyDetector(null, Instant.now()); BytesStreamOutput output = new BytesStreamOutput(); - System.out.println(detector.toString()); detector.writeTo(output); NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); AnomalyDetector parsedDetector = new AnomalyDetector(input); @@ -65,7 +63,6 @@ public void testDetectorWithoutUiMetadata() throws IOException { public void testHCDetector() throws IOException { AnomalyDetector detector = TestHelpers.randomAnomalyDetectorUsingCategoryFields("testId", ImmutableList.of("category_field")); BytesStreamOutput output = new BytesStreamOutput(); - System.out.println(detector.toString()); detector.writeTo(output); NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); AnomalyDetector parsedDetector = new AnomalyDetector(input); @@ -76,7 +73,22 @@ public void testWithoutUser() throws IOException { AnomalyDetector detector = TestHelpers.randomAnomalyDetectorUsingCategoryFields("testId", ImmutableList.of("category_field")); detector.setUser(null); BytesStreamOutput output = new BytesStreamOutput(); - System.out.println(detector.toString()); + detector.writeTo(output); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + AnomalyDetector parsedDetector = new AnomalyDetector(input); + assertTrue(parsedDetector.equals(detector)); + } + + public void testHistoricalDetector() throws IOException { + AnomalyDetector detector = TestHelpers + .randomAnomalyDetector( + ImmutableList.of(TestHelpers.randomFeature()), + ImmutableMap.of(randomAlphaOfLength(5), randomAlphaOfLength(5)), + Instant.now(), + AnomalyDetectorType.HISTORICAL_SINGLE_ENTITY.name(), + TestHelpers.randomDetectionDateRange() + ); + BytesStreamOutput output = new BytesStreamOutput(); detector.writeTo(output); NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); AnomalyDetector parsedDetector = new AnomalyDetector(input); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java index 42994ee8..eb9359ea 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java @@ -19,8 +19,10 @@ import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.Locale; +import java.util.concurrent.TimeUnit; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -42,6 +44,70 @@ public void testParseAnomalyDetector() throws IOException { assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); } + public void testParseAnomalyDetectorWithoutParams() throws IOException { + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), Instant.now()); + String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder())); + LOG.info(detectorString); + detectorString = detectorString + .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); + AnomalyDetector parsedDetector = AnomalyDetector.parse(TestHelpers.parser(detectorString)); + assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); + } + + public void testParseAnomalyDetectorWithCustomDetectionDelay() throws IOException { + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), Instant.now()); + String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder())); + LOG.info(detectorString); + TimeValue detectionInterval = new TimeValue(1, TimeUnit.MINUTES); + TimeValue detectionWindowDelay = new TimeValue(10, TimeUnit.MINUTES); + detectorString = detectorString + .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); + AnomalyDetector parsedDetector = AnomalyDetector + .parse( + TestHelpers.parser(detectorString), + detector.getDetectorId(), + detector.getVersion(), + detectionInterval, + detectionWindowDelay + ); + assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); + } + + public void testParseHistoricalAnomalyDetector() throws IOException { + AnomalyDetector detector = TestHelpers + .randomAnomalyDetector( + ImmutableList.of(TestHelpers.randomFeature()), + TestHelpers.randomUiMetadata(), + Instant.now(), + AnomalyDetectorType.HISTORICAL_SINGLE_ENTITY.name(), + TestHelpers.randomDetectionDateRange() + ); + String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + LOG.info(detectorString); + detectorString = detectorString + .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); + AnomalyDetector parsedDetector = AnomalyDetector.parse(TestHelpers.parser(detectorString)); + assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); + } + + public void testParseHistoricalAnomalyDetectorWithoutUser() throws IOException { + AnomalyDetector detector = TestHelpers + .randomAnomalyDetector( + ImmutableList.of(TestHelpers.randomFeature()), + TestHelpers.randomUiMetadata(), + Instant.now(), + AnomalyDetectorType.HISTORICAL_SINGLE_ENTITY.name(), + TestHelpers.randomDetectionDateRange(), + false + ); + String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + LOG.info(detectorString); + detectorString = detectorString + .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); + AnomalyDetector parsedDetector = AnomalyDetector.parse(TestHelpers.parser(detectorString)); + assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); + } + public void testParseAnomalyDetectorWithNullFilterQuery() throws IOException { String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," @@ -296,6 +362,60 @@ public void testNullDetectionInterval() throws Exception { ); } + public void testInvalidDetectionInterval() { + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> new AnomalyDetector( + randomAlphaOfLength(10), + randomLong(), + randomAlphaOfLength(20), + randomAlphaOfLength(30), + randomAlphaOfLength(5), + ImmutableList.of(randomAlphaOfLength(10).toLowerCase()), + ImmutableList.of(TestHelpers.randomFeature()), + TestHelpers.randomQuery(), + new IntervalTimeConfiguration(0, ChronoUnit.MINUTES), + TestHelpers.randomIntervalTimeConfiguration(), + randomIntBetween(1, 2000), + null, + randomInt(), + Instant.now(), + null, + null, + null, + null + ) + ); + assertEquals("Detection interval must be a positive integer", exception.getMessage()); + } + + public void testInvalidWindowDelay() { + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> new AnomalyDetector( + randomAlphaOfLength(10), + randomLong(), + randomAlphaOfLength(20), + randomAlphaOfLength(30), + randomAlphaOfLength(5), + ImmutableList.of(randomAlphaOfLength(10).toLowerCase()), + ImmutableList.of(TestHelpers.randomFeature()), + TestHelpers.randomQuery(), + new IntervalTimeConfiguration(1, ChronoUnit.MINUTES), + new IntervalTimeConfiguration(-1, ChronoUnit.MINUTES), + randomIntBetween(1, 2000), + null, + randomInt(), + Instant.now(), + null, + null, + null, + null + ) + ); + assertEquals("Interval -1 should be non-negative", exception.getMessage()); + } + public void testNullFeatures() throws IOException { AnomalyDetector detector = TestHelpers.randomAnomalyDetector(null, null, Instant.now().truncatedTo(ChronoUnit.SECONDS)); String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyResultTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyResultTests.java index 9911bb1d..7a0e36f3 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyResultTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyResultTests.java @@ -16,32 +16,177 @@ package com.amazon.opendistroforelasticsearch.ad.model; import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Collection; import java.util.Locale; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; +import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; import com.amazon.opendistroforelasticsearch.ad.TestHelpers; +import com.amazon.opendistroforelasticsearch.ad.constant.CommonValue; +import com.google.common.base.Objects; +import com.google.common.collect.ImmutableList; -public class AnomalyResultTests extends ESTestCase { +public class AnomalyResultTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class, AnomalyDetectorPlugin.class); + } + + @Override + protected NamedWriteableRegistry writableRegistry() { + return getInstanceFromNode(NamedWriteableRegistry.class); + } public void testParseAnomalyDetector() throws IOException { - AnomalyResult detectResult = TestHelpers.randomAnomalyDetectResult(); + AnomalyResult detectResult = TestHelpers.randomAnomalyDetectResult(0.8, randomAlphaOfLength(5), null); + String detectResultString = TestHelpers + .xContentBuilderToString(detectResult.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + detectResultString = detectResultString + .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); + AnomalyResult parsedDetectResult = AnomalyResult.parse(TestHelpers.parser(detectResultString)); + assertEquals("Parsing anomaly detect result doesn't work", detectResult, parsedDetectResult); + } + + public void testParseAnomalyDetectorWithoutUser() throws IOException { + AnomalyResult detectResult = TestHelpers.randomAnomalyDetectResult(0.8, randomAlphaOfLength(5), randomAlphaOfLength(5), false); + String detectResultString = TestHelpers + .xContentBuilderToString(detectResult.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + detectResultString = detectResultString + .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); + AnomalyResult parsedDetectResult = AnomalyResult.parse(TestHelpers.parser(detectResultString)); + assertEquals("Parsing anomaly detect result doesn't work", detectResult, parsedDetectResult); + } + + public void testParseAnomalyDetectorWithoutNormalResult() throws IOException { + AnomalyResult detectResult = new AnomalyResult( + randomAlphaOfLength(5), + randomAlphaOfLength(5), + null, + null, + null, + null, + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + null, + null, + randomAlphaOfLength(5), + null, + TestHelpers.randomUser(), + CommonValue.NO_SCHEMA_VERSION + ); + String detectResultString = TestHelpers + .xContentBuilderToString(detectResult.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + detectResultString = detectResultString + .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); + AnomalyResult parsedDetectResult = AnomalyResult.parse(TestHelpers.parser(detectResultString)); + assertTrue(parsedDetectResult.getFeatureData().size() == 0); + assertTrue( + Objects.equal(detectResult.getDetectorId(), parsedDetectResult.getDetectorId()) + && Objects.equal(detectResult.getTaskId(), parsedDetectResult.getTaskId()) + && Objects.equal(detectResult.getAnomalyScore(), parsedDetectResult.getAnomalyScore()) + && Objects.equal(detectResult.getAnomalyGrade(), parsedDetectResult.getAnomalyGrade()) + && Objects.equal(detectResult.getConfidence(), parsedDetectResult.getConfidence()) + && Objects.equal(detectResult.getDataStartTime(), parsedDetectResult.getDataStartTime()) + && Objects.equal(detectResult.getDataEndTime(), parsedDetectResult.getDataEndTime()) + && Objects.equal(detectResult.getExecutionStartTime(), parsedDetectResult.getExecutionStartTime()) + && Objects.equal(detectResult.getExecutionEndTime(), parsedDetectResult.getExecutionEndTime()) + && Objects.equal(detectResult.getError(), parsedDetectResult.getError()) + && Objects.equal(detectResult.getEntity(), parsedDetectResult.getEntity()) + ); + } + + public void testParseAnomalyDetectorWithNanAnomalyResult() throws IOException { + AnomalyResult detectResult = new AnomalyResult( + randomAlphaOfLength(5), + randomAlphaOfLength(5), + Double.NaN, + Double.NaN, + Double.NaN, + ImmutableList.of(), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + null, + null, + randomAlphaOfLength(5), + null, + null, + CommonValue.NO_SCHEMA_VERSION + ); String detectResultString = TestHelpers .xContentBuilderToString(detectResult.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); detectResultString = detectResultString .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); AnomalyResult parsedDetectResult = AnomalyResult.parse(TestHelpers.parser(detectResultString)); - assertEquals( - "Parsing anomaly detect result doesn't work", - // String.format( - // Locale.ROOT, - // "\"Parsing anomaly detect result doesn't work\". Expected %s, but get %s", - // detectResult, - // parsedDetectResult - // ), - detectResult, - parsedDetectResult + assertNull(parsedDetectResult.getAnomalyGrade()); + assertNull(parsedDetectResult.getAnomalyScore()); + assertNull(parsedDetectResult.getConfidence()); + assertTrue( + Objects.equal(detectResult.getDetectorId(), parsedDetectResult.getDetectorId()) + && Objects.equal(detectResult.getTaskId(), parsedDetectResult.getTaskId()) + && Objects.equal(detectResult.getFeatureData(), parsedDetectResult.getFeatureData()) + && Objects.equal(detectResult.getDataStartTime(), parsedDetectResult.getDataStartTime()) + && Objects.equal(detectResult.getDataEndTime(), parsedDetectResult.getDataEndTime()) + && Objects.equal(detectResult.getExecutionStartTime(), parsedDetectResult.getExecutionStartTime()) + && Objects.equal(detectResult.getExecutionEndTime(), parsedDetectResult.getExecutionEndTime()) + && Objects.equal(detectResult.getError(), parsedDetectResult.getError()) + && Objects.equal(detectResult.getEntity(), parsedDetectResult.getEntity()) ); } + + public void testParseAnomalyDetectorWithTaskId() throws IOException { + AnomalyResult detectResult = TestHelpers.randomAnomalyDetectResult(0.8, randomAlphaOfLength(5), randomAlphaOfLength(5)); + String detectResultString = TestHelpers + .xContentBuilderToString(detectResult.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + detectResultString = detectResultString + .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); + AnomalyResult parsedDetectResult = AnomalyResult.parse(TestHelpers.parser(detectResultString)); + assertEquals("Parsing anomaly detect result doesn't work", detectResult, parsedDetectResult); + } + + public void testParseAnomalyDetectorWithEntity() throws IOException { + AnomalyResult detectResult = TestHelpers.randomMultiEntityAnomalyDetectResult(0.8, 0.5); + String detectResultString = TestHelpers + .xContentBuilderToString(detectResult.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + detectResultString = detectResultString + .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); + AnomalyResult parsedDetectResult = AnomalyResult.parse(TestHelpers.parser(detectResultString)); + assertEquals("Parsing anomaly detect result doesn't work", detectResult, parsedDetectResult); + } + + public void testSerializeAnomalyResult() throws IOException { + AnomalyResult detectResult = TestHelpers.randomAnomalyDetectResult(0.8, randomAlphaOfLength(5), randomAlphaOfLength(5)); + BytesStreamOutput output = new BytesStreamOutput(); + detectResult.writeTo(output); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + AnomalyResult parsedDetectResult = new AnomalyResult(input); + assertTrue(parsedDetectResult.equals(detectResult)); + } + + public void testSerializeAnomalyResultWithoutUser() throws IOException { + AnomalyResult detectResult = TestHelpers.randomAnomalyDetectResult(0.8, randomAlphaOfLength(5), randomAlphaOfLength(5), false); + BytesStreamOutput output = new BytesStreamOutput(); + detectResult.writeTo(output); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + AnomalyResult parsedDetectResult = new AnomalyResult(input); + assertTrue(parsedDetectResult.equals(detectResult)); + } + + public void testSerializeAnomalyResultWithEntity() throws IOException { + AnomalyResult detectResult = TestHelpers.randomMultiEntityAnomalyDetectResult(0.8, 0.5); + BytesStreamOutput output = new BytesStreamOutput(); + detectResult.writeTo(output); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + AnomalyResult parsedDetectResult = new AnomalyResult(input); + assertTrue(parsedDetectResult.equals(detectResult)); + } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/DetectionDateRangeTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/DetectionDateRangeTests.java new file mode 100644 index 00000000..1cc6d501 --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/DetectionDateRangeTests.java @@ -0,0 +1,68 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +import java.io.IOException; +import java.time.Instant; +import java.util.Collection; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; + +public class DetectionDateRangeTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class, AnomalyDetectorPlugin.class); + } + + @Override + protected NamedWriteableRegistry writableRegistry() { + return getInstanceFromNode(NamedWriteableRegistry.class); + } + + public void testParseDetectionDateRangeWithNullStartTime() { + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> new DetectionDateRange(null, Instant.now()) + ); + assertEquals("Detection data range's start time must not be null", exception.getMessage()); + } + + public void testParseDetectionDateRangeWithNullEndTime() { + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> new DetectionDateRange(Instant.now(), null) + ); + assertEquals("Detection data range's end time must not be null", exception.getMessage()); + } + + public void testSerializeDetectoinDateRange() throws IOException { + DetectionDateRange dateRange = TestHelpers.randomDetectionDateRange(); + BytesStreamOutput output = new BytesStreamOutput(); + dateRange.writeTo(output); + NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + DetectionDateRange parsedDateRange = new DetectionDateRange(input); + assertTrue(parsedDateRange.equals(dateRange)); + } +} From e0dcaec6a8ba704f92be1dbc404bbf10bcc38881 Mon Sep 17 00:00:00 2001 From: Yaliang <49084640+ylwu-amzn@users.noreply.github.com> Date: Thu, 17 Dec 2020 04:52:15 +0000 Subject: [PATCH 06/13] add ad task stats (#332) * add ad task stats * change historical detector stats name to historical single entity detector;change getAdStatsResponse as protected method * move jvm heap stats to internal stats --- .../ad/AnomalyDetectorPlugin.java | 5 + .../ad/stats/InternalStatNames.java | 39 +++++ .../ad/stats/StatNames.java | 10 +- .../ADStatsNodesTransportAction.java | 12 +- .../StatsAnomalyDetectorResponse.java | 4 + .../StatsAnomalyDetectorTransportAction.java | 50 ++++-- .../ad/TestHelpers.java | 22 +++ .../ADStatsNodesTransportActionTests.java | 37 ++++- ...tsAnomalyDetectorTransportActionTests.java | 151 ++++++++++++++++++ 9 files changed, 311 insertions(+), 19 deletions(-) create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/stats/InternalStatNames.java create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java index 5961be1b..bc4e9fda 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java @@ -460,6 +460,11 @@ public Collection createComponents( new ADStat<>(true, new IndexStatusSupplier(indexUtils, DetectorInternalState.DETECTOR_STATE_INDEX)) ) .put(StatNames.DETECTOR_COUNT.getName(), new ADStat<>(true, new SettableSupplier())) + .put(StatNames.HISTORICAL_SINGLE_ENTITY_DETECTOR_COUNT.getName(), new ADStat<>(true, new SettableSupplier())) + .put(StatNames.AD_EXECUTING_BATCH_TASK_COUNT.getName(), new ADStat<>(false, new CounterSupplier())) + .put(StatNames.AD_CANCELED_BATCH_TASK_COUNT.getName(), new ADStat<>(false, new CounterSupplier())) + .put(StatNames.AD_TOTAL_BATCH_TASK_EXECUTION_COUNT.getName(), new ADStat<>(false, new CounterSupplier())) + .put(StatNames.AD_BATCH_TASK_FAILURE_COUNT.getName(), new ADStat<>(false, new CounterSupplier())) .build(); adStats = new ADStats(indexUtils, modelManager, stats); diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/stats/InternalStatNames.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/stats/InternalStatNames.java new file mode 100644 index 00000000..c3de7ad6 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/stats/InternalStatNames.java @@ -0,0 +1,39 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.stats; + +/** + * Enum containing names of all internal stats which will not be returned + * in AD stats REST API. + */ +public enum InternalStatNames { + JVM_HEAP_USAGE("jvm_heap_usage"); + + private String name; + + InternalStatNames(String name) { + this.name = name; + } + + /** + * Get internal stat name + * + * @return name + */ + public String getName() { + return name; + } +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/stats/StatNames.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/stats/StatNames.java index 18371452..79a5124a 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/stats/StatNames.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/stats/StatNames.java @@ -19,7 +19,8 @@ import java.util.Set; /** - * Enum containing names of all stats + * Enum containing names of all external stats which will be returned in + * AD stats REST API. */ public enum StatNames { AD_EXECUTE_REQUEST_COUNT("ad_execute_request_count"), @@ -32,7 +33,12 @@ public enum StatNames { MODELS_CHECKPOINT_INDEX_STATUS("models_checkpoint_index_status"), ANOMALY_DETECTION_JOB_INDEX_STATUS("anomaly_detection_job_index_status"), ANOMALY_DETECTION_STATE_STATUS("anomaly_detection_state_status"), - MODEL_INFORMATION("models"); + MODEL_INFORMATION("models"), + HISTORICAL_SINGLE_ENTITY_DETECTOR_COUNT("historical_single_entity_detector_count"), + AD_EXECUTING_BATCH_TASK_COUNT("ad_executing_batch_task_count"), + AD_CANCELED_BATCH_TASK_COUNT("ad_canceled_batch_task_count"), + AD_TOTAL_BATCH_TASK_EXECUTION_COUNT("ad_total_batch_task_execution_count"), + AD_BATCH_TASK_FAILURE_COUNT("ad_batch_task_failure_count"); private String name; diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportAction.java index 433a753e..bec6b026 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportAction.java @@ -27,10 +27,12 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.monitor.jvm.JvmService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import com.amazon.opendistroforelasticsearch.ad.stats.ADStats; +import com.amazon.opendistroforelasticsearch.ad.stats.InternalStatNames; /** * ADStatsNodesTransportAction contains the logic to extract the stats from the nodes @@ -39,6 +41,7 @@ public class ADStatsNodesTransportAction extends TransportNodesAction { private ADStats adStats; + private final JvmService jvmService; /** * Constructor @@ -55,7 +58,8 @@ public ADStatsNodesTransportAction( ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - ADStats adStats + ADStats adStats, + JvmService jvmService ) { super( ADStatsNodesAction.NAME, @@ -69,6 +73,7 @@ public ADStatsNodesTransportAction( ADStatsNodeResponse.class ); this.adStats = adStats; + this.jvmService = jvmService; } @Override @@ -99,6 +104,11 @@ private ADStatsNodeResponse createADStatsNodeResponse(ADStatsRequest adStatsRequ Map statValues = new HashMap<>(); Set statsToBeRetrieved = adStatsRequest.getStatsToBeRetrieved(); + if (statsToBeRetrieved.contains(InternalStatNames.JVM_HEAP_USAGE.getName())) { + long heapUsedPercent = jvmService.stats().getMem().getHeapUsedPercent(); + statValues.put(InternalStatNames.JVM_HEAP_USAGE.getName(), heapUsedPercent); + } + for (String statName : adStats.getNodeStats().keySet()) { if (statsToBeRetrieved.contains(statName)) { statValues.put(statName, adStats.getStats().get(statName).getValue()); diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorResponse.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorResponse.java index 03ff351a..b8514edc 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorResponse.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorResponse.java @@ -47,4 +47,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws adStatsResponse.toXContent(builder, params); return builder; } + + protected ADStatsResponse getAdStatsResponse() { + return adStatsResponse; + } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportAction.java index 463b6d1b..46d1eb3a 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportAction.java @@ -16,6 +16,7 @@ package com.amazon.opendistroforelasticsearch.ad.transport; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; @@ -31,16 +32,22 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorType; import com.amazon.opendistroforelasticsearch.ad.stats.ADStats; import com.amazon.opendistroforelasticsearch.ad.stats.ADStatsResponse; import com.amazon.opendistroforelasticsearch.ad.stats.StatNames; import com.amazon.opendistroforelasticsearch.ad.util.MultiResponsesDelegateActionListener; public class StatsAnomalyDetectorTransportAction extends HandledTransportAction { + public static final String DETECTOR_TYPE_AGG = "detector_type_agg"; private final Logger logger = LogManager.getLogger(StatsAnomalyDetectorTransportAction.class); private final Client client; @@ -120,23 +127,36 @@ private void getClusterStats( ADStatsRequest adStatsRequest ) { ADStatsResponse adStatsResponse = new ADStatsResponse(); - if (adStatsRequest.getStatsToBeRetrieved().contains(StatNames.DETECTOR_COUNT.getName())) { - if (clusterService.state().getRoutingTable().hasIndex(AnomalyDetector.ANOMALY_DETECTORS_INDEX)) { - final SearchRequest request = client - .prepareSearch(AnomalyDetector.ANOMALY_DETECTORS_INDEX) - .setSize(0) - .setTrackTotalHits(true) - .request(); - client.search(request, ActionListener.wrap(indicesStatsResponse -> { - adStats.getStat(StatNames.DETECTOR_COUNT.getName()).setValue(indicesStatsResponse.getHits().getTotalHits().value); - adStatsResponse.setClusterStats(getClusterStatsMap(adStatsRequest)); - listener.onResponse(adStatsResponse); - }, e -> listener.onFailure(new RuntimeException("Failed to get AD cluster stats", e)))); - } else { - adStats.getStat(StatNames.DETECTOR_COUNT.getName()).setValue(0L); + if ((adStatsRequest.getStatsToBeRetrieved().contains(StatNames.DETECTOR_COUNT.getName()) + || adStatsRequest.getStatsToBeRetrieved().contains(StatNames.HISTORICAL_SINGLE_ENTITY_DETECTOR_COUNT.getName())) + && clusterService.state().getRoutingTable().hasIndex(AnomalyDetector.ANOMALY_DETECTORS_INDEX)) { + + TermsAggregationBuilder termsAgg = AggregationBuilders.terms(DETECTOR_TYPE_AGG).field(AnomalyDetector.DETECTOR_TYPE_FIELD); + SearchRequest request = new SearchRequest() + .indices(AnomalyDetector.ANOMALY_DETECTORS_INDEX) + .source(new SearchSourceBuilder().aggregation(termsAgg).size(0).trackTotalHits(true)); + + client.search(request, ActionListener.wrap(r -> { + StringTerms aggregation = r.getAggregations().get(DETECTOR_TYPE_AGG); + List buckets = aggregation.getBuckets(); + long totalDetectors = r.getHits().getTotalHits().value; + long totalHistoricalSingleEntityDetectors = 0; + for (StringTerms.Bucket b : buckets) { + if (AnomalyDetectorType.HISTORICAL_SINGLE_ENTITY.name().equals(b.getKeyAsString())) { + totalHistoricalSingleEntityDetectors += b.getDocCount(); + } + } + if (adStatsRequest.getStatsToBeRetrieved().contains(StatNames.DETECTOR_COUNT.getName())) { + adStats.getStat(StatNames.DETECTOR_COUNT.getName()).setValue(totalDetectors); + } + if (adStatsRequest.getStatsToBeRetrieved().contains(StatNames.HISTORICAL_SINGLE_ENTITY_DETECTOR_COUNT.getName())) { + adStats + .getStat(StatNames.HISTORICAL_SINGLE_ENTITY_DETECTOR_COUNT.getName()) + .setValue(totalHistoricalSingleEntityDetectors); + } adStatsResponse.setClusterStats(getClusterStatsMap(adStatsRequest)); listener.onResponse(adStatsResponse); - } + }, e -> listener.onFailure(e))); } else { adStatsResponse.setClusterStats(getClusterStatsMap(adStatsRequest)); listener.onResponse(adStatsResponse); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java index dc3f7a5b..c084aa08 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java @@ -15,6 +15,7 @@ package com.amazon.opendistroforelasticsearch.ad; +import static org.apache.http.entity.ContentType.APPLICATION_JSON; import static org.elasticsearch.cluster.node.DiscoveryNodeRole.BUILT_IN_ROLES; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -41,6 +42,7 @@ import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; import org.apache.http.nio.entity.NStringEntity; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -49,11 +51,13 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -627,6 +631,11 @@ public static ThreadPool createThreadPool() { return pool; } + public static CreateIndexResponse createIndex(AdminClient adminClient, String indexName, String indexMapping) { + CreateIndexRequest request = new CreateIndexRequest(indexName).mapping(AnomalyDetector.TYPE, indexMapping, XContentType.JSON); + return adminClient.indices().create(request).actionGet(5_000); + } + public static void createIndex(RestClient client, String indexName, HttpEntity data) throws IOException { TestHelpers .makeRequest( @@ -757,4 +766,17 @@ public static ADTask randomAdTask(String taskId, ADTaskState state, Instant exec .build(); return task; } + + public static HttpEntity toHttpEntity(ToXContentObject object) throws IOException { + return new StringEntity(toJsonString(object), APPLICATION_JSON); + } + + public static HttpEntity toHttpEntity(String jsonString) throws IOException { + return new StringEntity(jsonString, APPLICATION_JSON); + } + + public static String toJsonString(ToXContentObject object) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + return TestHelpers.xContentBuilderToString(object.toXContent(builder, ToXContent.EMPTY_PARAMS)); + } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportActionTests.java index 40d8f607..21bc59af 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportActionTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportActionTests.java @@ -29,6 +29,8 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.monitor.jvm.JvmService; +import org.elasticsearch.monitor.jvm.JvmStats; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -40,9 +42,11 @@ import com.amazon.opendistroforelasticsearch.ad.ml.ModelManager; import com.amazon.opendistroforelasticsearch.ad.stats.ADStat; import com.amazon.opendistroforelasticsearch.ad.stats.ADStats; +import com.amazon.opendistroforelasticsearch.ad.stats.InternalStatNames; import com.amazon.opendistroforelasticsearch.ad.stats.suppliers.CounterSupplier; import com.amazon.opendistroforelasticsearch.ad.stats.suppliers.IndexStatusSupplier; import com.amazon.opendistroforelasticsearch.ad.stats.suppliers.ModelsOnNodeSupplier; +import com.amazon.opendistroforelasticsearch.ad.stats.suppliers.SettableSupplier; import com.amazon.opendistroforelasticsearch.ad.util.ClientUtil; import com.amazon.opendistroforelasticsearch.ad.util.IndexUtils; import com.amazon.opendistroforelasticsearch.ad.util.Throttler; @@ -87,17 +91,26 @@ public void setUp() throws Exception { put(nodeStatName2, new ADStat<>(false, new ModelsOnNodeSupplier(modelManager, cacheProvider))); put(clusterStatName1, new ADStat<>(true, new IndexStatusSupplier(indexUtils, "index1"))); put(clusterStatName2, new ADStat<>(true, new IndexStatusSupplier(indexUtils, "index2"))); + put(InternalStatNames.JVM_HEAP_USAGE.getName(), new ADStat<>(true, new SettableSupplier())); } }; adStats = new ADStats(indexUtils, modelManager, statsMap); + JvmService jvmService = mock(JvmService.class); + JvmStats jvmStats = mock(JvmStats.class); + JvmStats.Mem mem = mock(JvmStats.Mem.class); + + when(jvmService.stats()).thenReturn(jvmStats); + when(jvmStats.getMem()).thenReturn(mem); + when(mem.getHeapUsedPercent()).thenReturn(randomShort()); action = new ADStatsNodesTransportAction( client().threadPool(), clusterService(), mock(TransportService.class), mock(ActionFilters.class), - adStats + adStats, + jvmService ); } @@ -133,4 +146,26 @@ public void testNodeOperation() { assertTrue(statsToBeRetrieved.contains(statName)); } } + + @Test + public void testNodeOperationWithJvmHeapUsage() { + String nodeId = clusterService().localNode().getId(); + ADStatsRequest adStatsRequest = new ADStatsRequest((nodeId)); + adStatsRequest.clear(); + + Set statsToBeRetrieved = new HashSet<>(Arrays.asList(nodeStatName1, InternalStatNames.JVM_HEAP_USAGE.getName())); + + for (String stat : statsToBeRetrieved) { + adStatsRequest.addStat(stat); + } + + ADStatsNodeResponse response = action.nodeOperation(new ADStatsNodeRequest(adStatsRequest)); + + Map stats = response.getStatsMap(); + + assertEquals(statsToBeRetrieved.size(), stats.size()); + for (String statName : stats.keySet()) { + assertTrue(statsToBeRetrieved.contains(statName)); + } + } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java new file mode 100644 index 00000000..55ffa9e6 --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java @@ -0,0 +1,151 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.transport; + +import static com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils.XCONTENT_WITH_TYPE; + +import java.io.IOException; +import java.time.Instant; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; + +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; + +import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; +import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorType; +import com.amazon.opendistroforelasticsearch.ad.stats.InternalStatNames; +import com.amazon.opendistroforelasticsearch.ad.stats.StatNames; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +public class StatsAnomalyDetectorTransportActionTests extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singletonList(AnomalyDetectorPlugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return Collections.singletonList(AnomalyDetectorPlugin.class); + } + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + createTestDetector(); + } + + private void createTestDetector() throws IOException { + CreateIndexResponse createIndexResponse = TestHelpers + .createIndex(admin(), AnomalyDetector.ANOMALY_DETECTORS_INDEX, AnomalyDetectionIndices.getAnomalyDetectorMappings()); + assertEquals(true, createIndexResponse.isAcknowledged()); + + IndexRequest indexRequest = new IndexRequest(AnomalyDetector.ANOMALY_DETECTORS_INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source( + TestHelpers + .randomAnomalyDetector(ImmutableMap.of(), Instant.now()) + .toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITH_TYPE) + ); + IndexResponse indexResponse = client().index(indexRequest).actionGet(5_000); + assertEquals(RestStatus.CREATED, indexResponse.status()); + + indexRequest = new IndexRequest(AnomalyDetector.ANOMALY_DETECTORS_INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source( + TestHelpers + .randomAnomalyDetector( + ImmutableList.of(TestHelpers.randomFeature()), + ImmutableMap.of(), + Instant.now(), + AnomalyDetectorType.HISTORICAL_SINGLE_ENTITY.name(), + TestHelpers.randomDetectionDateRange(), + true + ) + .toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITH_TYPE) + ); + indexResponse = client().index(indexRequest).actionGet(5_000); + assertEquals(RestStatus.CREATED, indexResponse.status()); + } + + public void testStatsAnomalyDetectorWithNodeLevelStats() { + ADStatsRequest adStatsRequest = new ADStatsRequest(clusterService().localNode()); + adStatsRequest.addStat(InternalStatNames.JVM_HEAP_USAGE.getName()); + StatsAnomalyDetectorResponse response = client().execute(StatsAnomalyDetectorAction.INSTANCE, adStatsRequest).actionGet(5_000); + assertEquals(1, response.getAdStatsResponse().getADStatsNodesResponse().getNodes().size()); + assertTrue( + response + .getAdStatsResponse() + .getADStatsNodesResponse() + .getNodes() + .get(0) + .getStatsMap() + .containsKey(InternalStatNames.JVM_HEAP_USAGE.getName()) + ); + } + + public void testStatsAnomalyDetectorWithClusterLevelStats() throws IOException { + ADStatsRequest adStatsRequest = new ADStatsRequest(clusterService().localNode()); + adStatsRequest.addStat(StatNames.DETECTOR_COUNT.getName()); + adStatsRequest.addStat(StatNames.HISTORICAL_SINGLE_ENTITY_DETECTOR_COUNT.getName()); + StatsAnomalyDetectorResponse response = client().execute(StatsAnomalyDetectorAction.INSTANCE, adStatsRequest).actionGet(5_000); + assertEquals(1, response.getAdStatsResponse().getADStatsNodesResponse().getNodes().size()); + Map statsMap = response.getAdStatsResponse().getADStatsNodesResponse().getNodes().get(0).getStatsMap(); + Map clusterStats = response.getAdStatsResponse().getClusterStats(); + assertEquals(0, statsMap.size()); + assertEquals(2L, clusterStats.get(StatNames.DETECTOR_COUNT.getName())); + assertEquals(1L, clusterStats.get(StatNames.HISTORICAL_SINGLE_ENTITY_DETECTOR_COUNT.getName())); + } + + public void testStatsAnomalyDetectorWithDetectorCount() throws IOException { + ADStatsRequest adStatsRequest = new ADStatsRequest(clusterService().localNode()); + adStatsRequest.addStat(StatNames.DETECTOR_COUNT.getName()); + StatsAnomalyDetectorResponse response = client().execute(StatsAnomalyDetectorAction.INSTANCE, adStatsRequest).actionGet(5_000); + assertEquals(1, response.getAdStatsResponse().getADStatsNodesResponse().getNodes().size()); + Map statsMap = response.getAdStatsResponse().getADStatsNodesResponse().getNodes().get(0).getStatsMap(); + Map clusterStats = response.getAdStatsResponse().getClusterStats(); + assertEquals(0, statsMap.size()); + assertEquals(2L, clusterStats.get(StatNames.DETECTOR_COUNT.getName())); + assertFalse(clusterStats.containsKey(StatNames.HISTORICAL_SINGLE_ENTITY_DETECTOR_COUNT.getName())); + } + + public void testStatsAnomalyDetectorWithHistoricalDetectorCount() throws IOException { + ADStatsRequest adStatsRequest = new ADStatsRequest(clusterService().localNode()); + adStatsRequest.addStat(StatNames.HISTORICAL_SINGLE_ENTITY_DETECTOR_COUNT.getName()); + StatsAnomalyDetectorResponse response = client().execute(StatsAnomalyDetectorAction.INSTANCE, adStatsRequest).actionGet(5_000); + assertEquals(1, response.getAdStatsResponse().getADStatsNodesResponse().getNodes().size()); + Map statsMap = response.getAdStatsResponse().getADStatsNodesResponse().getNodes().get(0).getStatsMap(); + Map clusterStats = response.getAdStatsResponse().getClusterStats(); + assertEquals(0, statsMap.size()); + assertEquals(1L, clusterStats.get(StatNames.HISTORICAL_SINGLE_ENTITY_DETECTOR_COUNT.getName())); + assertFalse(clusterStats.containsKey(StatNames.DETECTOR_COUNT.getName())); + } + +} From 4ef7e308e15ec67fc86614c8340802f1f71da182 Mon Sep 17 00:00:00 2001 From: Sarat Vemulapalli Date: Thu, 17 Dec 2020 11:47:19 -0800 Subject: [PATCH 07/13] Adding support for Security Test Framework (#331) * Adding support for Security Test Framework * Excluding secure tests when not running against security plugin * Updating Delete Detector test to expect exception --- build.gradle | 6 + .../ad/AnomalyDetectorRestTestCase.java | 194 ++++++++++++++++-- .../ad/rest/AnomalyDetectorRestApiIT.java | 90 ++++---- .../ad/rest/SecureADRestIT.java | 183 +++++++++++++++++ 4 files changed, 415 insertions(+), 58 deletions(-) create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/SecureADRestIT.java diff --git a/build.gradle b/build.gradle index 1ca22ef7..f287a432 100644 --- a/build.gradle +++ b/build.gradle @@ -156,6 +156,12 @@ integTest { } } + if (System.getProperty("https") == null) { + filter { + excludeTestsMatching "com.amazon.opendistroforelasticsearch.ad.rest.SecureADRestIT" + } + } + // The 'doFirst' delays till execution time. doFirst { // Tell the test JVM if the cluster JVM is running under a debugger so that tests can diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorRestTestCase.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorRestTestCase.java index 20abd433..c76a20b1 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorRestTestCase.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorRestTestCase.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; import java.util.Map; import org.apache.http.HttpEntity; @@ -28,6 +29,7 @@ import org.apache.http.message.BasicHeader; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -47,6 +49,7 @@ import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonArray; public abstract class AnomalyDetectorRestTestCase extends ODFERestTestCase { @@ -60,7 +63,7 @@ protected Settings restClientSettings() { return super.restClientSettings(); } - protected AnomalyDetector createRandomAnomalyDetector(Boolean refresh, Boolean withMetadata) throws IOException { + protected AnomalyDetector createRandomAnomalyDetector(Boolean refresh, Boolean withMetadata, RestClient client) throws IOException { Map uiMetadata = null; if (withMetadata) { uiMetadata = TestHelpers.randomUiMetadata(); @@ -69,7 +72,7 @@ protected AnomalyDetector createRandomAnomalyDetector(Boolean refresh, Boolean w String indexName = detector.getIndices().get(0); TestHelpers .makeRequest( - client(), + client, "POST", "/" + indexName + "/_doc/" + randomAlphaOfLength(5) + "?refresh=true", ImmutableMap.of(), @@ -77,17 +80,17 @@ protected AnomalyDetector createRandomAnomalyDetector(Boolean refresh, Boolean w null, false ); - AnomalyDetector createdDetector = createAnomalyDetector(detector, refresh); + AnomalyDetector createdDetector = createAnomalyDetector(detector, refresh, client); if (withMetadata) { - return getAnomalyDetector(createdDetector.getDetectorId(), new BasicHeader(HttpHeaders.USER_AGENT, "Kibana")); + return getAnomalyDetector(createdDetector.getDetectorId(), new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"), client); } - return getAnomalyDetector(createdDetector.getDetectorId(), new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json")); + return getAnomalyDetector(createdDetector.getDetectorId(), new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), client); } - protected AnomalyDetector createAnomalyDetector(AnomalyDetector detector, Boolean refresh) throws IOException { + protected AnomalyDetector createAnomalyDetector(AnomalyDetector detector, Boolean refresh, RestClient client) throws IOException { Response response = TestHelpers - .makeRequest(client(), "POST", TestHelpers.AD_BASE_DETECTORS_URI, ImmutableMap.of(), toHttpEntity(detector), null); + .makeRequest(client, "POST", TestHelpers.AD_BASE_DETECTORS_URI, ImmutableMap.of(), toHttpEntity(detector), null); assertEquals("Create anomaly detector failed", RestStatus.CREATED, restStatus(response)); Map detectorJson = jsonXContent @@ -113,23 +116,38 @@ protected AnomalyDetector createAnomalyDetector(AnomalyDetector detector, Boolea ); } - public AnomalyDetector getAnomalyDetector(String detectorId) throws IOException { - return (AnomalyDetector) getAnomalyDetector(detectorId, false)[0]; + protected Response startAnomalyDetector(String detectorId, RestClient client) throws IOException { + return TestHelpers + .makeRequest(client, "POST", TestHelpers.AD_BASE_DETECTORS_URI + "/" + detectorId + "/_start", ImmutableMap.of(), "", null); + } + + protected Response stopAnomalyDetector(String detectorId, RestClient client) throws IOException { + return TestHelpers + .makeRequest(client, "POST", TestHelpers.AD_BASE_DETECTORS_URI + "/" + detectorId + "/_stop", ImmutableMap.of(), "", null); + } + + protected Response deleteAnomalyDetector(String detectorId, RestClient client) throws IOException { + return TestHelpers.makeRequest(client, "DELETE", TestHelpers.AD_BASE_DETECTORS_URI + "/" + detectorId, ImmutableMap.of(), "", null); + } + + public AnomalyDetector getAnomalyDetector(String detectorId, RestClient client) throws IOException { + return (AnomalyDetector) getAnomalyDetector(detectorId, false, client)[0]; } - public AnomalyDetector getAnomalyDetector(String detectorId, BasicHeader header) throws IOException { - return (AnomalyDetector) getAnomalyDetector(detectorId, header, false)[0]; + public AnomalyDetector getAnomalyDetector(String detectorId, BasicHeader header, RestClient client) throws IOException { + return (AnomalyDetector) getAnomalyDetector(detectorId, header, false, client)[0]; } - public ToXContentObject[] getAnomalyDetector(String detectorId, boolean returnJob) throws IOException { + public ToXContentObject[] getAnomalyDetector(String detectorId, boolean returnJob, RestClient client) throws IOException { BasicHeader header = new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"); - return getAnomalyDetector(detectorId, header, returnJob); + return getAnomalyDetector(detectorId, header, returnJob, client); } - public ToXContentObject[] getAnomalyDetector(String detectorId, BasicHeader header, boolean returnJob) throws IOException { + public ToXContentObject[] getAnomalyDetector(String detectorId, BasicHeader header, boolean returnJob, RestClient client) + throws IOException { Response response = TestHelpers .makeRequest( - client(), + client, "GET", TestHelpers.AD_BASE_DETECTORS_URI + "/" + detectorId + "?job=" + returnJob, null, @@ -221,10 +239,10 @@ public void updateClusterSettings(String settingKey, Object value) throws Except assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); } - public Response getDetectorProfile(String detectorId, boolean all, String customizedProfile) throws IOException { + public Response getDetectorProfile(String detectorId, boolean all, String customizedProfile, RestClient client) throws IOException { return TestHelpers .makeRequest( - client(), + client, "GET", TestHelpers.AD_BASE_DETECTORS_URI + "/" + detectorId + "/" + RestHandlerUtils.PROFILE + customizedProfile + "?_all=" + all, null, @@ -234,11 +252,11 @@ public Response getDetectorProfile(String detectorId, boolean all, String custom } public Response getDetectorProfile(String detectorId) throws IOException { - return getDetectorProfile(detectorId, false, ""); + return getDetectorProfile(detectorId, false, "", client()); } public Response getDetectorProfile(String detectorId, boolean all) throws IOException { - return getDetectorProfile(detectorId, all, ""); + return getDetectorProfile(detectorId, all, "", client()); } public Response getSearchDetectorCount() throws IOException { @@ -264,4 +282,142 @@ public Response getSearchDetectorMatch(String name) throws IOException { ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana")) ); } + + public Response createUser(String name, String password, ArrayList backendRoles) throws IOException { + JsonArray backendRolesString = new JsonArray(); + for (int i = 0; i < backendRoles.size(); i++) { + backendRolesString.add(backendRoles.get(i)); + } + return TestHelpers + .makeRequest( + client(), + "PUT", + "/_opendistro/_security/api/internalusers/" + name, + null, + toHttpEntity( + " {\n" + + "\"password\": \"" + + password + + "\",\n" + + "\"backend_roles\": " + + backendRolesString + + ",\n" + + "\"attributes\": {\n" + + "}} " + ), + ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana")) + ); + } + + public Response createRoleMapping(String role, ArrayList users) throws IOException { + JsonArray usersString = new JsonArray(); + for (int i = 0; i < users.size(); i++) { + usersString.add(users.get(i)); + } + return TestHelpers + .makeRequest( + client(), + "PUT", + "/_opendistro/_security/api/rolesmapping/" + role, + null, + toHttpEntity( + "{\n" + " \"backend_roles\" : [ ],\n" + " \"hosts\" : [ ],\n" + " \"users\" : " + usersString + "\n" + "}" + ), + ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana")) + ); + } + + public Response createIndexRole(String role, String index) throws IOException { + return TestHelpers + .makeRequest( + client(), + "PUT", + "/_opendistro/_security/api/roles/" + role, + null, + toHttpEntity( + "{\n" + + "\"cluster_permissions\": [\n" + + "],\n" + + "\"index_permissions\": [\n" + + "{\n" + + "\"index_patterns\": [\n" + + "\"" + + index + + "\"\n" + + "],\n" + + "\"dls\": \"\",\n" + + "\"fls\": [],\n" + + "\"masked_fields\": [],\n" + + "\"allowed_actions\": [\n" + + "\"crud\",\n" + + "\"indices:admin/create\"\n" + + "]\n" + + "}\n" + + "],\n" + + "\"tenant_permissions\": []\n" + + "}" + ), + ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana")) + ); + } + + public Response deleteUser(String user) throws IOException { + return TestHelpers + .makeRequest( + client(), + "DELETE", + "/_opendistro/_security/api/internalusers/" + user, + null, + "", + ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana")) + ); + } + + public Response deleteRoleMapping(String user) throws IOException { + return TestHelpers + .makeRequest( + client(), + "DELETE", + "/_opendistro/_security/api/rolesmapping/" + user, + null, + "", + ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana")) + ); + } + + public Response enableFilterBy() throws IOException { + return TestHelpers + .makeRequest( + client(), + "PUT", + "_cluster/settings", + null, + toHttpEntity( + "{\n" + + " \"persistent\": {\n" + + " \"opendistro.anomaly_detection.filter_by_backend_roles\" : \"true\"\n" + + " }\n" + + "}" + ), + ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana")) + ); + } + + public Response disableFilterBy() throws IOException { + return TestHelpers + .makeRequest( + client(), + "PUT", + "_cluster/settings", + null, + toHttpEntity( + "{\n" + + " \"persistent\": {\n" + + " \"opendistro.anomaly_detection.filter_by_backend_roles\" : \"false\"\n" + + " }\n" + + "}" + ), + ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana")) + ); + } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java index f6cf1256..736e1975 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.junit.Assert; import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorRestTestCase; @@ -78,7 +79,7 @@ public void testCreateAnomalyDetectorWithEmptyIndices() throws Exception { } public void testCreateAnomalyDetectorWithDuplicateName() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); AnomalyDetector detectorDuplicateName = new AnomalyDetector( AnomalyDetector.NO_ID, @@ -141,26 +142,26 @@ public void testCreateAnomalyDetector() throws Exception { } public void testGetAnomalyDetector() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); updateClusterSettings(EnabledSetting.AD_PLUGIN_ENABLED, false); - Exception ex = expectThrows(ResponseException.class, () -> getAnomalyDetector(detector.getDetectorId())); + Exception ex = expectThrows(ResponseException.class, () -> getAnomalyDetector(detector.getDetectorId(), client())); assertThat(ex.getMessage(), containsString(CommonErrorMessages.DISABLED_ERR_MSG)); updateClusterSettings(EnabledSetting.AD_PLUGIN_ENABLED, true); - AnomalyDetector createdDetector = getAnomalyDetector(detector.getDetectorId()); + AnomalyDetector createdDetector = getAnomalyDetector(detector.getDetectorId(), client()); assertEquals("Incorrect Location header", detector, createdDetector); } public void testGetNotExistingAnomalyDetector() throws Exception { - createRandomAnomalyDetector(true, true); - TestHelpers.assertFailWith(ResponseException.class, null, () -> getAnomalyDetector(randomAlphaOfLength(5))); + createRandomAnomalyDetector(true, true, client()); + TestHelpers.assertFailWith(ResponseException.class, null, () -> getAnomalyDetector(randomAlphaOfLength(5), client())); } public void testUpdateAnomalyDetectorA() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); String newDescription = randomAlphaOfLength(5); @@ -216,15 +217,15 @@ public void testUpdateAnomalyDetectorA() throws Exception { assertEquals("Updated anomaly detector id doesn't match", detector.getDetectorId(), responseBody.get("_id")); assertEquals("Version not incremented", (detector.getVersion().intValue() + 1), (int) responseBody.get("_version")); - AnomalyDetector updatedDetector = getAnomalyDetector(detector.getDetectorId()); + AnomalyDetector updatedDetector = getAnomalyDetector(detector.getDetectorId(), client()); assertNotEquals("Anomaly detector last update time not changed", updatedDetector.getLastUpdateTime(), detector.getLastUpdateTime()); assertEquals("Anomaly detector description not updated", newDescription, updatedDetector.getDescription()); } public void testUpdateAnomalyDetectorNameToExisting() throws Exception { - AnomalyDetector detector1 = createRandomAnomalyDetector(true, true); + AnomalyDetector detector1 = createRandomAnomalyDetector(true, true, client()); - AnomalyDetector detector2 = createRandomAnomalyDetector(true, true); + AnomalyDetector detector2 = createRandomAnomalyDetector(true, true, client()); AnomalyDetector newDetector1WithDetector2Name = new AnomalyDetector( detector1.getDetectorId(), @@ -262,7 +263,7 @@ public void testUpdateAnomalyDetectorNameToExisting() throws Exception { } public void testUpdateAnomalyDetectorNameToNew() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); AnomalyDetector detectorWithNewName = new AnomalyDetector( detector.getDetectorId(), @@ -293,7 +294,7 @@ public void testUpdateAnomalyDetectorNameToNew() throws Exception { null ); - AnomalyDetector resultDetector = getAnomalyDetector(detectorWithNewName.getDetectorId()); + AnomalyDetector resultDetector = getAnomalyDetector(detectorWithNewName.getDetectorId(), client()); assertEquals("Detector name updating failed", detectorWithNewName.getName(), resultDetector.getName()); assertEquals("Updated anomaly detector id doesn't match", detectorWithNewName.getDetectorId(), resultDetector.getDetectorId()); assertNotEquals( @@ -304,7 +305,7 @@ public void testUpdateAnomalyDetectorNameToNew() throws Exception { } public void testUpdateAnomalyDetectorWithNotExistingIndex() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); String newDescription = randomAlphaOfLength(5); @@ -346,7 +347,7 @@ public void testUpdateAnomalyDetectorWithNotExistingIndex() throws Exception { } public void testSearchAnomalyDetector() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); SearchSourceBuilder search = (new SearchSourceBuilder()).query(QueryBuilders.termQuery("_id", detector.getDetectorId())); updateClusterSettings(EnabledSetting.AD_PLUGIN_ENABLED, false); @@ -396,7 +397,7 @@ public void testStatsAnomalyDetector() throws Exception { } public void testPreviewAnomalyDetector() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, false); + AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); AnomalyDetectorExecutionInput input = new AnomalyDetectorExecutionInput( detector.getDetectorId(), Instant.now().minusSeconds(60 * 10), @@ -435,7 +436,7 @@ public void testPreviewAnomalyDetector() throws Exception { } public void testPreviewAnomalyDetectorWhichNotExist() throws Exception { - createRandomAnomalyDetector(true, false); + createRandomAnomalyDetector(true, false, client()); AnomalyDetectorExecutionInput input = new AnomalyDetectorExecutionInput( randomAlphaOfLength(5), Instant.now().minusSeconds(60 * 10), @@ -480,7 +481,7 @@ public void testExecuteAnomalyDetectorWithNullDetectorId() throws Exception { } public void testPreviewAnomalyDetectorWithDetector() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); AnomalyDetectorExecutionInput input = new AnomalyDetectorExecutionInput( detector.getDetectorId(), Instant.now().minusSeconds(60 * 10), @@ -501,7 +502,7 @@ public void testPreviewAnomalyDetectorWithDetector() throws Exception { } public void testPreviewAnomalyDetectorWithDetectorAndNoFeatures() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); AnomalyDetectorExecutionInput input = new AnomalyDetectorExecutionInput( detector.getDetectorId(), Instant.now().minusSeconds(60 * 10), @@ -584,7 +585,7 @@ public void testSearchAnomalyResult() throws Exception { } public void testDeleteAnomalyDetector() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, false); + AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); updateClusterSettings(EnabledSetting.AD_PLUGIN_ENABLED, false); @@ -633,7 +634,7 @@ public void testDeleteAnomalyDetectorWhichNotExist() throws Exception { } public void testDeleteAnomalyDetectorWithNoAdJob() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, false); + AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); Response response = TestHelpers .makeRequest( client(), @@ -647,7 +648,7 @@ public void testDeleteAnomalyDetectorWithNoAdJob() throws Exception { } public void testDeleteAnomalyDetectorWithRunningAdJob() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, false); + AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); Response startAdJobResponse = TestHelpers .makeRequest( @@ -678,7 +679,7 @@ public void testDeleteAnomalyDetectorWithRunningAdJob() throws Exception { } public void testUpdateAnomalyDetectorWithRunningAdJob() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, false); + AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); Response startAdJobResponse = TestHelpers .makeRequest( @@ -730,7 +731,7 @@ public void testUpdateAnomalyDetectorWithRunningAdJob() throws Exception { } public void testGetDetectorWithAdJob() throws IOException { - AnomalyDetector detector = createRandomAnomalyDetector(true, false); + AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); Response startAdJobResponse = TestHelpers .makeRequest( @@ -744,18 +745,18 @@ public void testGetDetectorWithAdJob() throws IOException { assertEquals("Fail to start AD job", RestStatus.OK, restStatus(startAdJobResponse)); - ToXContentObject[] results = getAnomalyDetector(detector.getDetectorId(), true); + ToXContentObject[] results = getAnomalyDetector(detector.getDetectorId(), true, client()); assertEquals("Incorrect Location header", detector, results[0]); assertEquals("Incorrect detector job name", detector.getDetectorId(), ((AnomalyDetectorJob) results[1]).getName()); assertTrue(((AnomalyDetectorJob) results[1]).isEnabled()); - results = getAnomalyDetector(detector.getDetectorId(), false); + results = getAnomalyDetector(detector.getDetectorId(), false, client()); assertEquals("Incorrect Location header", detector, results[0]); assertEquals("Should not return detector job", null, results[1]); } public void testStartAdJobWithExistingDetector() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, false); + AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); updateClusterSettings(EnabledSetting.AD_PLUGIN_ENABLED, false); @@ -818,7 +819,7 @@ public void testStartAdJobWithNonexistingDetectorIndex() throws Exception { } public void testStartAdJobWithNonexistingDetector() throws Exception { - createRandomAnomalyDetector(true, false); + createRandomAnomalyDetector(true, false, client()); TestHelpers .assertFailWith( ResponseException.class, @@ -837,7 +838,7 @@ public void testStartAdJobWithNonexistingDetector() throws Exception { public void testStopAdJob() throws Exception { updateClusterSettings(EnabledSetting.AD_PLUGIN_ENABLED, true); - AnomalyDetector detector = createRandomAnomalyDetector(true, false); + AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); Response startAdJobResponse = TestHelpers .makeRequest( client(), @@ -908,7 +909,7 @@ public void testStopNonExistingAdJobIndex() throws Exception { } public void testStopNonExistingAdJob() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, false); + AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); Response startAdJobResponse = TestHelpers .makeRequest( client(), @@ -937,7 +938,7 @@ public void testStopNonExistingAdJob() throws Exception { } public void testStartDisabledAdjob() throws IOException { - AnomalyDetector detector = createRandomAnomalyDetector(true, false); + AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); Response startAdJobResponse = TestHelpers .makeRequest( client(), @@ -977,7 +978,7 @@ public void testStartAdjobWithNullFeatures() throws Exception { AnomalyDetector detectorWithoutFeature = TestHelpers.randomAnomalyDetector(null, null, Instant.now()); String indexName = detectorWithoutFeature.getIndices().get(0); TestHelpers.createIndex(client(), indexName, toHttpEntity("{\"name\": \"test\"}")); - AnomalyDetector detector = createAnomalyDetector(detectorWithoutFeature, true); + AnomalyDetector detector = createAnomalyDetector(detectorWithoutFeature, true, client()); TestHelpers .assertFailWith( ResponseException.class, @@ -998,7 +999,7 @@ public void testStartAdjobWithEmptyFeatures() throws Exception { AnomalyDetector detectorWithoutFeature = TestHelpers.randomAnomalyDetector(ImmutableList.of(), null, Instant.now()); String indexName = detectorWithoutFeature.getIndices().get(0); TestHelpers.createIndex(client(), indexName, toHttpEntity("{\"name\": \"test\"}")); - AnomalyDetector detector = createAnomalyDetector(detectorWithoutFeature, true); + AnomalyDetector detector = createAnomalyDetector(detectorWithoutFeature, true, client()); TestHelpers .assertFailWith( ResponseException.class, @@ -1016,7 +1017,7 @@ public void testStartAdjobWithEmptyFeatures() throws Exception { } public void testDefaultProfileAnomalyDetector() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); updateClusterSettings(EnabledSetting.AD_PLUGIN_ENABLED, false); @@ -1030,16 +1031,16 @@ public void testDefaultProfileAnomalyDetector() throws Exception { } public void testAllProfileAnomalyDetector() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); Response profileResponse = getDetectorProfile(detector.getDetectorId(), true); assertEquals("Incorrect profile status", RestStatus.OK, restStatus(profileResponse)); } public void testCustomizedProfileAnomalyDetector() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); - Response profileResponse = getDetectorProfile(detector.getDetectorId(), true, "/models/"); + Response profileResponse = getDetectorProfile(detector.getDetectorId(), true, "/models/", client()); assertEquals("Incorrect profile status", RestStatus.OK, restStatus(profileResponse)); } @@ -1051,7 +1052,7 @@ public void testSearchAnomalyDetectorCountNoIndex() throws Exception { } public void testSearchAnomalyDetectorCount() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); Response countResponse = getSearchDetectorCount(); Map responseMap = entityAsMap(countResponse); Integer count = (Integer) responseMap.get("count"); @@ -1066,7 +1067,7 @@ public void testSearchAnomalyDetectorMatchNoIndex() throws Exception { } public void testSearchAnomalyDetectorNoMatch() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); Response matchResponse = getSearchDetectorMatch(detector.getName()); Map responseMap = entityAsMap(matchResponse); boolean nameExists = (boolean) responseMap.get("match"); @@ -1074,10 +1075,21 @@ public void testSearchAnomalyDetectorNoMatch() throws Exception { } public void testSearchAnomalyDetectorMatch() throws Exception { - AnomalyDetector detector = createRandomAnomalyDetector(true, true); + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); Response matchResponse = getSearchDetectorMatch(detector.getName() + "newDetector"); Map responseMap = entityAsMap(matchResponse); boolean nameExists = (boolean) responseMap.get("match"); assertEquals(nameExists, false); } + + public void testDeleteAnomalyDetectorWhileRunning() throws Exception { + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); + Assert.assertNotNull(detector.getDetectorId()); + Response response = startAnomalyDetector(detector.getDetectorId(), client()); + Assert.assertEquals(response.getStatusLine().toString(), "HTTP/1.1 200 OK"); + + // Deleting detector should fail while its running + Exception exception = expectThrows(IOException.class, () -> { deleteAnomalyDetector(detector.getDetectorId(), client()); }); + Assert.assertTrue(exception.getMessage().contains("Detector job is running")); + } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/SecureADRestIT.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/SecureADRestIT.java new file mode 100644 index 00000000..c61ee08a --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/SecureADRestIT.java @@ -0,0 +1,183 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.rest; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; + +import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorRestTestCase; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.commons.rest.SecureRestClientBuilder; + +public class SecureADRestIT extends AnomalyDetectorRestTestCase { + String aliceUser = "alice"; + RestClient aliceClient; + String bobUser = "bob"; + RestClient bobClient; + String catUser = "cat"; + RestClient catClient; + String dogUser = "dog"; + RestClient dogClient; + + @Before + public void setupSecureTests() throws IOException { + if (!isHttps()) + throw new IllegalArgumentException("Secure Tests are running but HTTPS is not set"); + createIndexRole("index_all_access", "*"); + createUser(aliceUser, aliceUser, new ArrayList<>(Arrays.asList("odfe"))); + aliceClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[0]), isHttps(), aliceUser, aliceUser) + .setSocketTimeout(60000) + .build(); + + createUser(bobUser, bobUser, new ArrayList<>(Arrays.asList("odfe"))); + bobClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[0]), isHttps(), bobUser, bobUser) + .setSocketTimeout(60000) + .build(); + + createUser(catUser, catUser, new ArrayList<>(Arrays.asList("aes"))); + catClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[0]), isHttps(), catUser, catUser) + .setSocketTimeout(60000) + .build(); + + createUser(dogUser, dogUser, new ArrayList<>(Arrays.asList())); + dogClient = new SecureRestClientBuilder(getClusterHosts().toArray(new HttpHost[0]), isHttps(), dogUser, dogUser) + .setSocketTimeout(60000) + .build(); + + createRoleMapping("anomaly_read_access", new ArrayList<>(Arrays.asList(bobUser))); + createRoleMapping("anomaly_full_access", new ArrayList<>(Arrays.asList(aliceUser, catUser, dogUser))); + createRoleMapping("index_all_access", new ArrayList<>(Arrays.asList(aliceUser, bobUser, catUser, dogUser))); + } + + @After + public void deleteUserSetup() throws IOException { + aliceClient.close(); + bobClient.close(); + catClient.close(); + dogClient.close(); + deleteUser(aliceUser); + deleteUser(bobUser); + deleteUser(catUser); + deleteUser(dogUser); + } + + public void testCreateAnomalyDetectorWithWriteAccess() throws IOException { + // User Alice has AD full access, should be able to create a detector + AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); + Assert.assertNotNull("User alice could not create detector", aliceDetector.getDetectorId()); + } + + public void testCreateAnomalyDetectorWithReadAccess() { + // User Bob has AD read access, should not be able to create a detector + Exception exception = expectThrows(IOException.class, () -> { createRandomAnomalyDetector(false, false, bobClient); }); + Assert.assertTrue(exception.getMessage().contains("no permissions for [cluster:admin/opendistro/ad/detector/write]")); + } + + public void testStartDetectorWithReadAccess() throws IOException { + // User Bob has AD read access, should not be able to modify a detector + AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); + Assert.assertNotNull(aliceDetector.getDetectorId()); + Exception exception = expectThrows(IOException.class, () -> { startAnomalyDetector(aliceDetector.getDetectorId(), bobClient); }); + Assert.assertTrue(exception.getMessage().contains("no permissions for [cluster:admin/opendistro/ad/detector/jobmanagement]")); + } + + public void testStartDetectorForWriteUser() throws IOException { + // User Alice has AD full access, should be able to modify a detector + AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); + Assert.assertNotNull(aliceDetector.getDetectorId()); + Response response = startAnomalyDetector(aliceDetector.getDetectorId(), aliceClient); + Assert.assertEquals(response.getStatusLine().toString(), "HTTP/1.1 200 OK"); + } + + public void testFilterByDisabled() throws IOException { + // User Alice has AD full access, should be able to create a detector + AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); + // User Cat has AD full access, should be able to get a detector + AnomalyDetector detector = getAnomalyDetector(aliceDetector.getDetectorId(), catClient); + Assert.assertEquals(aliceDetector.getDetectorId(), detector.getDetectorId()); + } + + public void testGetApiFilterByEnabled() throws IOException { + // User Alice has AD full access, should be able to create a detector + AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); + enableFilterBy(); + // User Cat has AD full access, but is part of different backend role so Cat should not be able to access + // Alice detector + Exception exception = expectThrows(IOException.class, () -> { getAnomalyDetector(aliceDetector.getDetectorId(), catClient); }); + Assert + .assertTrue( + exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getDetectorId()) + ); + } + + public void testStartApiFilterByEnabled() throws IOException { + // User Alice has AD full access, should be able to create a detector + AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); + enableFilterBy(); + // User Cat has AD full access, but is part of different backend role so Cat should not be able to access + // Alice detector + Exception exception = expectThrows(IOException.class, () -> { startAnomalyDetector(aliceDetector.getDetectorId(), catClient); }); + Assert + .assertTrue( + exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getDetectorId()) + ); + } + + public void testStopApiFilterByEnabled() throws IOException { + // User Alice has AD full access, should be able to create a detector + AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); + enableFilterBy(); + // User Cat has AD full access, but is part of different backend role so Cat should not be able to access + // Alice detector + Exception exception = expectThrows(IOException.class, () -> { stopAnomalyDetector(aliceDetector.getDetectorId(), catClient); }); + Assert + .assertTrue( + exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getDetectorId()) + ); + } + + public void testDeleteApiFilterByEnabled() throws IOException { + // User Alice has AD full access, should be able to create a detector + AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); + enableFilterBy(); + // User Cat has AD full access, but is part of different backend role so Cat should not be able to access + // Alice detector + Exception exception = expectThrows(IOException.class, () -> { deleteAnomalyDetector(aliceDetector.getDetectorId(), catClient); }); + Assert + .assertTrue( + exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getDetectorId()) + ); + } + + public void testCreateAnomalyDetectorWithNoBackendRole() throws IOException { + enableFilterBy(); + // User Dog has AD full access, but has no backend role + // When filter by is enabled, we block creating Detectors + Exception exception = expectThrows(IOException.class, () -> { createRandomAnomalyDetector(false, false, dogClient); }); + Assert + .assertTrue( + exception.getMessage().contains("Filter by backend roles is enabled and User dog does not have backend roles configured") + ); + } +} From 2ae77edbf9c85e02abf4ca987342c33fd2ebbc5c Mon Sep 17 00:00:00 2001 From: Sarat Vemulapalli Date: Thu, 17 Dec 2020 13:41:06 -0800 Subject: [PATCH 08/13] Updating Readme to include Secure tests (#334) --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 7ea3ff58..8d35dc12 100644 --- a/README.md +++ b/README.md @@ -49,8 +49,9 @@ Currently we just put RCF jar in lib as dependency. Plan to publish to Maven and 1. `./gradlew build` builds and tests 1. `./gradlew :run` launches a single node cluster with the AD (and job-scheduler) plugin installed -1. `./gradlew :integTest` launches a single node cluster with the AD (and job-scheduler) plugin installed and runs all integration tests +1. `./gradlew :integTest` launches a single node cluster with the AD (and job-scheduler) plugin installed and runs all integration tests except security 1. ` ./gradlew :integTest --tests="**.test execute foo"` runs a single integration test class or method +1. `./gradlew integTest -Dtests.rest.cluster=localhost:9200 -Dtests.cluster=localhost:9200 -Dtests.clustername="docker-cluster" -Dhttps=true -Duser=admin -Dpassword=admin` launches integration tests against a local cluster and run tests with security 1. `./gradlew spotlessApply` formats code. And/or import formatting rules in `.eclipseformat.xml` with IDE. When launching a cluster using one of the above commands logs are placed in `/build/cluster/run node0/elasticsearch-/logs`. Though the logs are teed to the console, in practices it's best to check the actual log file. From d5683f6f66d141612068f1704997056855189494 Mon Sep 17 00:00:00 2001 From: Yaliang <49084640+ylwu-amzn@users.noreply.github.com> Date: Wed, 23 Dec 2020 11:35:10 -0800 Subject: [PATCH 09/13] add AD task cache (#337) * add AD task cache * add java doc for exception * change to reserved memory * fix shingle memory calculation;store threshold model training data in double array * address comments --- .../ad/AnomalyDetectorPlugin.java | 3 +- .../ad/MemoryTracker.java | 3 +- .../exception/LimitExceededException.java | 9 + .../ad/settings/AnomalyDetectorSettings.java | 14 + .../ad/task/ADBatchTaskCache.java | 140 ++++++++ .../ad/task/ADTaskCacheManager.java | 329 ++++++++++++++++++ .../ADStatsNodesTransportAction.java | 1 + .../ad/TestHelpers.java | 53 ++- .../ad/task/ADTaskCacheManagerTests.java | 172 +++++++++ 9 files changed, 713 insertions(+), 11 deletions(-) create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/task/ADBatchTaskCache.java create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/task/ADTaskCacheManager.java create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/task/ADTaskCacheManagerTests.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java index bc4e9fda..62a4d503 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java @@ -570,7 +570,8 @@ public List> getSettings() { AnomalyDetectorSettings.INDEX_PRESSURE_SOFT_LIMIT, AnomalyDetectorSettings.MAX_PRIMARY_SHARDS, AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES, - AnomalyDetectorSettings.MAX_CACHE_MISS_HANDLING_PER_SECOND + AnomalyDetectorSettings.MAX_CACHE_MISS_HANDLING_PER_SECOND, + AnomalyDetectorSettings.MAX_BATCH_TASK_PER_NODE ); return unmodifiableList(Stream.concat(enabledSetting.stream(), systemSetting.stream()).collect(Collectors.toList())); } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/MemoryTracker.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/MemoryTracker.java index dfd167c9..3c906117 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/MemoryTracker.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/MemoryTracker.java @@ -38,7 +38,8 @@ public class MemoryTracker { public enum Origin { SINGLE_ENTITY_DETECTOR, - MULTI_ENTITY_DETECTOR + MULTI_ENTITY_DETECTOR, + HISTORICAL_SINGLE_ENTITY_DETECTOR, } // memory tracker for total consumption of bytes diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/LimitExceededException.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/LimitExceededException.java index 1ee0f28e..038133df 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/LimitExceededException.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/LimitExceededException.java @@ -30,6 +30,15 @@ public LimitExceededException(String anomalyDetectorId, String message) { super(anomalyDetectorId, message, true); } + /** + * Constructor with error message. + * + * @param message explanation for the limit + */ + public LimitExceededException(String message) { + super(null, message, true); + } + /** * Constructor with an anomaly detector ID and an explanation, and a flag for stopping. * diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/settings/AnomalyDetectorSettings.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/settings/AnomalyDetectorSettings.java index 2c0e7fbd..7077b3a6 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/settings/AnomalyDetectorSettings.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/settings/AnomalyDetectorSettings.java @@ -321,4 +321,18 @@ private AnomalyDetectorSettings() {} Setting.Property.NodeScope, Setting.Property.Dynamic ); + + // Maximum number of batch tasks running on one node. + // TODO: performance test and tune the setting. + public static final Setting MAX_BATCH_TASK_PER_NODE = Setting + .intSetting( + "opendistro.anomaly_detection.max_batch_task_per_node", + 2, + 1, + 100, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + public static int THRESHOLD_MODEL_TRAINING_SIZE = 1000; } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/task/ADBatchTaskCache.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/task/ADBatchTaskCache.java new file mode 100644 index 00000000..a301370a --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/task/ADBatchTaskCache.java @@ -0,0 +1,140 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.task; + +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.NUM_MIN_SAMPLES; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.NUM_TREES; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.THRESHOLD_MODEL_TRAINING_SIZE; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.TIME_DECAY; + +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; + +import com.amazon.opendistroforelasticsearch.ad.ml.HybridThresholdingModel; +import com.amazon.opendistroforelasticsearch.ad.ml.ThresholdingModel; +import com.amazon.opendistroforelasticsearch.ad.model.ADTask; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; +import com.amazon.randomcutforest.RandomCutForest; + +/** + * AD batch task cache which will hold RCF, threshold model, shingle and training data. + */ +public class ADBatchTaskCache { + private final String detectorId; + private RandomCutForest rcfModel; + private ThresholdingModel thresholdModel; + private boolean thresholdModelTrained; + private Deque>> shingle; + private AtomicInteger thresholdModelTrainingDataSize = new AtomicInteger(0); + private double[] thresholdModelTrainingData; + private AtomicBoolean cancelled = new AtomicBoolean(false); + private AtomicLong cacheMemorySize = new AtomicLong(0); + private String cancelReason; + private String cancelledBy; + + protected ADBatchTaskCache(ADTask adTask) { + this.detectorId = adTask.getDetectorId(); + + AnomalyDetector detector = adTask.getDetector(); + rcfModel = RandomCutForest + .builder() + .dimensions(detector.getShingleSize() * detector.getEnabledFeatureIds().size()) + .numberOfTrees(NUM_TREES) + .lambda(TIME_DECAY) + .sampleSize(NUM_SAMPLES_PER_TREE) + .outputAfter(NUM_MIN_SAMPLES) + .parallelExecutionEnabled(false) + .build(); + + this.thresholdModel = new HybridThresholdingModel( + AnomalyDetectorSettings.THRESHOLD_MIN_PVALUE, + AnomalyDetectorSettings.THRESHOLD_MAX_RANK_ERROR, + AnomalyDetectorSettings.THRESHOLD_MAX_SCORE, + AnomalyDetectorSettings.THRESHOLD_NUM_LOGNORMAL_QUANTILES, + AnomalyDetectorSettings.THRESHOLD_DOWNSAMPLES, + AnomalyDetectorSettings.THRESHOLD_MAX_SAMPLES + ); + this.thresholdModelTrainingData = new double[THRESHOLD_MODEL_TRAINING_SIZE]; + this.thresholdModelTrained = false; + this.shingle = new ArrayDeque<>(detector.getShingleSize()); + } + + protected String getDetectorId() { + return detectorId; + } + + protected RandomCutForest getRcfModel() { + return rcfModel; + } + + protected Deque>> getShingle() { + return shingle; + } + + protected ThresholdingModel getThresholdModel() { + return thresholdModel; + } + + protected void setThresholdModelTrained(boolean thresholdModelTrained) { + this.thresholdModelTrained = thresholdModelTrained; + } + + protected boolean isThresholdModelTrained() { + return thresholdModelTrained; + } + + protected double[] getThresholdModelTrainingData() { + return thresholdModelTrainingData; + } + + protected void clearTrainingData() { + this.thresholdModelTrainingData = null; + this.thresholdModelTrainingDataSize.set(0); + } + + public AtomicInteger getThresholdModelTrainingDataSize() { + return thresholdModelTrainingDataSize; + } + + protected AtomicLong getCacheMemorySize() { + return cacheMemorySize; + } + + protected boolean isCancelled() { + return cancelled.get(); + } + + protected String getCancelReason() { + return cancelReason; + } + + protected String getCancelledBy() { + return cancelledBy; + } + + protected void cancel(String reason, String userName) { + this.cancelled.compareAndSet(false, true); + this.cancelReason = reason; + this.cancelledBy = userName; + } +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/task/ADTaskCacheManager.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/task/ADTaskCacheManager.java new file mode 100644 index 00000000..8b203b68 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/task/ADTaskCacheManager.java @@ -0,0 +1,329 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.task; + +import static com.amazon.opendistroforelasticsearch.ad.MemoryTracker.Origin.HISTORICAL_SINGLE_ENTITY_DETECTOR; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.MAX_BATCH_TASK_PER_NODE; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.NUM_TREES; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.THRESHOLD_MODEL_TRAINING_SIZE; + +import java.util.Deque; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; + +import com.amazon.opendistroforelasticsearch.ad.MemoryTracker; +import com.amazon.opendistroforelasticsearch.ad.common.exception.LimitExceededException; +import com.amazon.opendistroforelasticsearch.ad.ml.ThresholdingModel; +import com.amazon.opendistroforelasticsearch.ad.model.ADTask; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.randomcutforest.RandomCutForest; + +public class ADTaskCacheManager { + + private final Map taskCaches; + private volatile Integer maxAdBatchTaskPerNode; + private final MemoryTracker memoryTracker; + private final int numberSize = 8; + + /** + * Constructor to create AD task cache manager. + * + * @param settings ES settings + * @param clusterService ES cluster service + * @param memoryTracker AD memory tracker + */ + public ADTaskCacheManager(Settings settings, ClusterService clusterService, MemoryTracker memoryTracker) { + this.maxAdBatchTaskPerNode = MAX_BATCH_TASK_PER_NODE.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_BATCH_TASK_PER_NODE, it -> maxAdBatchTaskPerNode = it); + taskCaches = new ConcurrentHashMap<>(); + this.memoryTracker = memoryTracker; + } + + /** + * Put AD task into cache. + * If AD task is already in cache, will throw {@link IllegalArgumentException} + * If there is one AD task in cache for detector, will throw {@link IllegalArgumentException} + * If there is no enough memory for this AD task, will throw {@link LimitExceededException} + * + * @param adTask AD task + */ + public synchronized void put(ADTask adTask) { + String taskId = adTask.getTaskId(); + if (contains(taskId)) { + throw new IllegalArgumentException("AD task is already running"); + } + if (containsTaskOfDetector(adTask.getDetectorId())) { + throw new IllegalArgumentException("There is one task executing for detector"); + } + checkRunningTaskLimit(); + long neededCacheSize = calculateADTaskCacheSize(adTask); + if (!memoryTracker.canAllocateReserved(adTask.getDetectorId(), neededCacheSize)) { + throw new LimitExceededException("No enough memory to run detector"); + } + memoryTracker.consumeMemory(neededCacheSize, true, HISTORICAL_SINGLE_ENTITY_DETECTOR); + ADBatchTaskCache taskCache = new ADBatchTaskCache(adTask); + taskCache.getCacheMemorySize().set(neededCacheSize); + taskCaches.put(taskId, taskCache); + } + + /** + * check if current running batch task on current node exceeds + * max running task limitation. + * If executing task count exceeds limitation, will throw + * {@link LimitExceededException} + */ + public void checkRunningTaskLimit() { + if (size() >= maxAdBatchTaskPerNode) { + String error = "Can't run more than " + maxAdBatchTaskPerNode + " historical detectors per data node"; + throw new LimitExceededException(error); + } + } + + /** + * Get task RCF model. + * If task doesn't exist in cache, will throw {@link java.lang.IllegalArgumentException}. + * + * @param taskId AD task id + * @return RCF model + */ + public RandomCutForest getRcfModel(String taskId) { + return getBatchTaskCache(taskId).getRcfModel(); + } + + /** + * Get task threshold model. + * If task doesn't exist in cache, will throw {@link java.lang.IllegalArgumentException}. + * + * @param taskId AD task id + * @return threshold model + */ + public ThresholdingModel getThresholdModel(String taskId) { + return getBatchTaskCache(taskId).getThresholdModel(); + } + + /** + * Get threshold model training data. + * If task doesn't exist in cache, will throw {@link java.lang.IllegalArgumentException}. + * + * @param taskId AD task id + * @return threshold model training data + */ + public double[] getThresholdModelTrainingData(String taskId) { + return getBatchTaskCache(taskId).getThresholdModelTrainingData(); + } + + public int addThresholdModelTrainingData(String taskId, double... data) { + ADBatchTaskCache taskCache = getBatchTaskCache(taskId); + double[] thresholdModelTrainingData = taskCache.getThresholdModelTrainingData(); + AtomicInteger size = taskCache.getThresholdModelTrainingDataSize(); + int dataPointsAdded = Math.min(data.length, THRESHOLD_MODEL_TRAINING_SIZE - size.get()); + System.arraycopy(data, 0, thresholdModelTrainingData, size.get(), dataPointsAdded); + return size.addAndGet(dataPointsAdded); + } + + /** + * Threshold model trained or not. + * If task doesn't exist in cache, will throw {@link java.lang.IllegalArgumentException}. + * + * @param taskId AD task id + * @return true if threshold model trained; otherwise, return false + */ + public boolean isThresholdModelTrained(String taskId) { + return getBatchTaskCache(taskId).isThresholdModelTrained(); + } + + /** + * Set threshold model trained or not. + * + * @param taskId task id + * @param trained threshold model trained or not + */ + protected void setThresholdModelTrained(String taskId, boolean trained) { + ADBatchTaskCache taskCache = getBatchTaskCache(taskId); + taskCache.setThresholdModelTrained(trained); + if (trained) { + int size = taskCache.getThresholdModelTrainingDataSize().get(); + long cacheSize = trainingDataMemorySize(size); + taskCache.clearTrainingData(); + taskCache.getCacheMemorySize().getAndAdd(-cacheSize); + memoryTracker.releaseMemory(cacheSize, true, HISTORICAL_SINGLE_ENTITY_DETECTOR); + } + } + + /** + * Get shingle data. + * + * @param taskId AD task id + * @return shingle data + */ + public Deque>> getShingle(String taskId) { + return getBatchTaskCache(taskId).getShingle(); + } + + /** + * Check if task exists in cache. + * + * @param taskId task id + * @return true if task exists in cache; otherwise, return false. + */ + public boolean contains(String taskId) { + return taskCaches.containsKey(taskId); + } + + /** + * Check if there is task in cache for detector. + * + * @param detectorId detector id + * @return true if there is task in cache; otherwise return false + */ + public boolean containsTaskOfDetector(String detectorId) { + return taskCaches.values().stream().filter(v -> Objects.equals(detectorId, v.getDetectorId())).findAny().isPresent(); + } + + /** + * Get batch task cache. If task doesn't exist in cache, will throw + * {@link java.lang.IllegalArgumentException} + * We throw exception rather than return {@code Optional.empty} or null + * here, so don't need to check task existence by writing duplicate null + * checking code. All AD task exceptions will be handled in AD task manager. + * + * @param taskId task id + * @return AD batch task cache + */ + private ADBatchTaskCache getBatchTaskCache(String taskId) { + if (!contains(taskId)) { + throw new IllegalArgumentException("AD task not in cache"); + } + return taskCaches.get(taskId); + } + + /** + * Calculate AD task cache memory usage. + * + * @param adTask AD task + * @return how many bytes will consume + */ + private long calculateADTaskCacheSize(ADTask adTask) { + AnomalyDetector detector = adTask.getDetector(); + return memoryTracker.estimateModelSize(detector, NUM_TREES) + trainingDataMemorySize(THRESHOLD_MODEL_TRAINING_SIZE) + + shingleMemorySize(detector.getShingleSize(), detector.getEnabledFeatureIds().size()); + } + + /** + * Remove task from cache. + * + * @param taskId AD task id + */ + public void remove(String taskId) { + if (contains(taskId)) { + memoryTracker.releaseMemory(getBatchTaskCache(taskId).getCacheMemorySize().get(), true, HISTORICAL_SINGLE_ENTITY_DETECTOR); + taskCaches.remove(taskId); + } + } + + /** + * Cancel AD task. + * + * @param taskId AD task id + * @param reason why need to cancel task + * @param userName user name + */ + public void cancel(String taskId, String reason, String userName) { + getBatchTaskCache(taskId).cancel(reason, userName); + } + + /** + * Task is cancelled or not. + * + * @param taskId AD task id + * @return true if task is cancelled; otherwise return false + */ + public boolean isCancelled(String taskId) { + ADBatchTaskCache taskCache = getBatchTaskCache(taskId); + return taskCache.isCancelled(); + } + + /** + * Get why task cancelled. + * + * @param taskId AD task id + * @return task cancellation reason + */ + public String getCancelReason(String taskId) { + return getBatchTaskCache(taskId).getCancelReason(); + } + + /** + * Get task cancelled by which user. + * + * @param taskId AD task id + * @return user name + */ + public String getCancelledBy(String taskId) { + return getBatchTaskCache(taskId).getCancelledBy(); + } + + /** + * Get current task count in cache. + * + * @return task count + */ + public int size() { + return taskCaches.size(); + } + + /** + * Clear all tasks. + */ + public void clear() { + taskCaches.clear(); + } + + /** + * Estimate max memory usage of model training data. + * The training data is double and will cache in double array. + * One double consumes 8 bytes. + * + * @param size training data point count + * @return how many bytes will consume + */ + public long trainingDataMemorySize(int size) { + return numberSize * size; + } + + /** + * Estimate max memory usage of shingle data. + * One feature aggregated data point(double) consumes 8 bytes. + * The shingle data is stored in {@link java.util.Deque}. From testing, + * other parts except feature data consume 80 bytes. + * + * Check {@link ADBatchTaskCache#getShingle()} + * + * @param shingleSize shingle data point count + * @param enabledFeatureSize enabled feature count + * @return how many bytes will consume + */ + public long shingleMemorySize(int shingleSize, int enabledFeatureSize) { + return (80 + numberSize * enabledFeatureSize) * shingleSize; + } + +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportAction.java index bec6b026..5abd6183 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/ADStatsNodesTransportAction.java @@ -51,6 +51,7 @@ public class ADStatsNodesTransportAction extends * @param transportService TransportService * @param actionFilters Action Filters * @param adStats ADStats object + * @param jvmService ES JVM Service */ @Inject public ADStatsNodesTransportAction( diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java index c084aa08..f844a61b 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java @@ -20,6 +20,7 @@ import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; +import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomDouble; import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.ESTestCase.randomIntBetween; @@ -415,14 +416,7 @@ public static Feature randomFeature() { } public static Feature randomFeature(String featureName, String aggregationName) { - AggregationBuilder testAggregation = null; - try { - testAggregation = randomAggregation(aggregationName); - } catch (IOException e) { - logger.error("Fail to generate test aggregation"); - throw new RuntimeException(); - } - return new Feature(randomAlphaOfLength(5), featureName, ESRestTestCase.randomBoolean(), testAggregation); + return randomFeature(featureName, aggregationName, randomBoolean()); } public static Feature randomFeature(boolean enabled) { @@ -739,10 +733,51 @@ public static Map>> create return mappings; } + public static ADTask randomAdTask() throws IOException { + return randomAdTask( + randomAlphaOfLength(5), + ADTaskState.RUNNING, + Instant.now().truncatedTo(ChronoUnit.SECONDS), + randomAlphaOfLength(5), + true + ); + } + + public static ADTask randomAdTask( + String taskId, + ADTaskState state, + Instant executionEndTime, + String stoppedBy, + String detectorId, + AnomalyDetector detector + ) { + executionEndTime = executionEndTime == null ? null : executionEndTime.truncatedTo(ChronoUnit.SECONDS); + ADTask task = ADTask + .builder() + .taskId(taskId) + .taskType(ADTaskType.HISTORICAL.name()) + .detectorId(detectorId) + .detector(detector) + .state(state.name()) + .taskProgress(0.5f) + .initProgress(1.0f) + .currentPiece(Instant.now().truncatedTo(ChronoUnit.SECONDS).minus(randomIntBetween(1, 100), ChronoUnit.MINUTES)) + .executionStartTime(Instant.now().truncatedTo(ChronoUnit.SECONDS).minus(100, ChronoUnit.MINUTES)) + .executionEndTime(executionEndTime) + .isLatest(true) + .error(randomAlphaOfLength(5)) + .checkpointId(randomAlphaOfLength(5)) + .lastUpdateTime(Instant.now().truncatedTo(ChronoUnit.SECONDS)) + .startedBy(randomAlphaOfLength(5)) + .stoppedBy(stoppedBy) + .build(); + return task; + } + public static ADTask randomAdTask(String taskId, ADTaskState state, Instant executionEndTime, String stoppedBy, boolean withDetector) throws IOException { AnomalyDetector detector = withDetector - ? randomAnomalyDetector(ImmutableMap.of(), Instant.now().truncatedTo(ChronoUnit.SECONDS)) + ? randomAnomalyDetector(ImmutableMap.of(), Instant.now().truncatedTo(ChronoUnit.SECONDS), true) : null; executionEndTime = executionEndTime == null ? null : executionEndTime.truncatedTo(ChronoUnit.SECONDS); ADTask task = ADTask diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/task/ADTaskCacheManagerTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/task/ADTaskCacheManagerTests.java new file mode 100644 index 00000000..1a7db42d --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/task/ADTaskCacheManagerTests.java @@ -0,0 +1,172 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.task; + +import static com.amazon.opendistroforelasticsearch.ad.MemoryTracker.Origin.HISTORICAL_SINGLE_ENTITY_DETECTOR; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; + +import com.amazon.opendistroforelasticsearch.ad.MemoryTracker; +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; +import com.amazon.opendistroforelasticsearch.ad.common.exception.LimitExceededException; +import com.amazon.opendistroforelasticsearch.ad.model.ADTask; +import com.amazon.opendistroforelasticsearch.ad.model.ADTaskState; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; + +public class ADTaskCacheManagerTests extends ESTestCase { + private MemoryTracker memoryTracker; + private ADTaskCacheManager adTaskCacheManager; + private ClusterService clusterService; + private Settings settings; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + + settings = Settings.builder().put(AnomalyDetectorSettings.MAX_BATCH_TASK_PER_NODE.getKey(), 2).build(); + + clusterService = mock(ClusterService.class); + ClusterSettings clusterSettings = new ClusterSettings( + settings, + Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.MAX_BATCH_TASK_PER_NODE))) + ); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + memoryTracker = mock(MemoryTracker.class); + adTaskCacheManager = new ADTaskCacheManager(settings, clusterService, memoryTracker); + } + + @Override + @After + public void tearDown() throws Exception { + super.tearDown(); + adTaskCacheManager.clear(); + } + + public void testPutTask() throws IOException { + when(memoryTracker.canAllocateReserved(anyString(), anyLong())).thenReturn(true); + ADTask adTask = TestHelpers.randomAdTask(); + adTaskCacheManager.put(adTask); + assertEquals(1, adTaskCacheManager.size()); + assertTrue(adTaskCacheManager.contains(adTask.getTaskId())); + assertTrue(adTaskCacheManager.containsTaskOfDetector(adTask.getDetectorId())); + assertNotNull(adTaskCacheManager.getRcfModel(adTask.getTaskId())); + assertNotNull(adTaskCacheManager.getShingle(adTask.getTaskId())); + assertNotNull(adTaskCacheManager.getThresholdModel(adTask.getTaskId())); + assertNotNull(adTaskCacheManager.getThresholdModelTrainingData(adTask.getTaskId())); + assertFalse(adTaskCacheManager.isThresholdModelTrained(adTask.getTaskId())); + adTaskCacheManager.remove(adTask.getTaskId()); + assertEquals(0, adTaskCacheManager.size()); + } + + public void testPutDuplicateTask() throws IOException { + when(memoryTracker.canAllocateReserved(anyString(), anyLong())).thenReturn(true); + ADTask adTask1 = TestHelpers.randomAdTask(); + adTaskCacheManager.put(adTask1); + assertEquals(1, adTaskCacheManager.size()); + IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> adTaskCacheManager.put(adTask1)); + assertEquals("AD task is already running", e1.getMessage()); + + ADTask adTask2 = TestHelpers + .randomAdTask( + randomAlphaOfLength(5), + ADTaskState.INIT, + adTask1.getExecutionEndTime(), + adTask1.getStoppedBy(), + adTask1.getDetectorId(), + adTask1.getDetector() + ); + IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, () -> adTaskCacheManager.put(adTask2)); + assertEquals("There is one task executing for detector", e2.getMessage()); + } + + public void testPutTaskWithMemoryExceedLimit() { + when(memoryTracker.canAllocateReserved(anyString(), anyLong())).thenReturn(false); + LimitExceededException exception = expectThrows( + LimitExceededException.class, + () -> adTaskCacheManager.put(TestHelpers.randomAdTask()) + ); + assertEquals("No enough memory to run detector", exception.getMessage()); + } + + public void testThresholdModelTrained() throws IOException { + when(memoryTracker.canAllocateReserved(anyString(), anyLong())).thenReturn(true); + ADTask adTask = TestHelpers.randomAdTask(); + adTaskCacheManager.put(adTask); + assertEquals(1, adTaskCacheManager.size()); + int size = adTaskCacheManager.addThresholdModelTrainingData(adTask.getTaskId(), randomDouble(), randomDouble()); + long cacheSize = adTaskCacheManager.trainingDataMemorySize(size); + adTaskCacheManager.setThresholdModelTrained(adTask.getTaskId(), false); + verify(memoryTracker, never()).releaseMemory(anyLong(), anyBoolean(), eq(HISTORICAL_SINGLE_ENTITY_DETECTOR)); + adTaskCacheManager.setThresholdModelTrained(adTask.getTaskId(), true); + verify(memoryTracker, times(1)).releaseMemory(eq(cacheSize), eq(true), eq(HISTORICAL_SINGLE_ENTITY_DETECTOR)); + } + + public void testCancel() throws IOException { + when(memoryTracker.canAllocateReserved(anyString(), anyLong())).thenReturn(true); + ADTask adTask = TestHelpers.randomAdTask(); + adTaskCacheManager.put(adTask); + assertEquals(1, adTaskCacheManager.size()); + assertEquals(false, adTaskCacheManager.isCancelled(adTask.getTaskId())); + String cancelReason = randomAlphaOfLength(10); + String userName = randomAlphaOfLength(5); + adTaskCacheManager.cancel(adTask.getTaskId(), cancelReason, userName); + assertEquals(true, adTaskCacheManager.isCancelled(adTask.getTaskId())); + assertEquals(cancelReason, adTaskCacheManager.getCancelReason(adTask.getTaskId())); + assertEquals(userName, adTaskCacheManager.getCancelledBy(adTask.getTaskId())); + } + + public void testTaskNotExist() { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> adTaskCacheManager.getRcfModel(randomAlphaOfLength(5)) + ); + assertEquals("AD task not in cache", e.getMessage()); + } + + public void testRemoveTaskWhichNotExist() { + adTaskCacheManager.remove(randomAlphaOfLength(5)); + verify(memoryTracker, never()).releaseMemory(anyLong(), anyBoolean(), eq(HISTORICAL_SINGLE_ENTITY_DETECTOR)); + } + + public void testExceedRunningTaskLimit() throws IOException { + when(memoryTracker.canAllocateReserved(anyString(), anyLong())).thenReturn(true); + adTaskCacheManager.put(TestHelpers.randomAdTask()); + adTaskCacheManager.put(TestHelpers.randomAdTask()); + assertEquals(2, adTaskCacheManager.size()); + LimitExceededException e = expectThrows(LimitExceededException.class, () -> adTaskCacheManager.put(TestHelpers.randomAdTask())); + assertEquals("Can't run more than 2 historical detectors per data node", e.getMessage()); + } +} From 0654957718dc6eb97aee2a6bc3479d5ce1f8e63d Mon Sep 17 00:00:00 2001 From: Kaituo Li Date: Mon, 28 Dec 2020 12:51:21 -0800 Subject: [PATCH 10/13] Fix the profile API returns prematurely. (#340) * Fix the profile API returns prematurely. MultiResponsesDelegateActionListener helps send multiple requests asynchronously and return one final response altogether. While waiting for all inflight requests, the method respondImmediately and failImmediately can stop waiting and return immediately. While these two methods are convenient, it is easy to misuse them and cause bugs (see https://github.com/opendistro-for-elasticsearch/anomaly-detection/issues/339 for example). This PR removes the method respondImmediately and failImmediately and refactor profile runner to avoid using them. This PR also stops printing out the unknown entity state since it is not useful. Testing done: 1. Added unit tests to verify the bug fix. 2. Manual tests to run profile calls for single-stream and multi-entity detectors for different phases of the detector lifecycle (disabled, init, running). Verified profile results make sense. --- .../ad/AnomalyDetectorProfileRunner.java | 100 ++++--- .../ad/EntityProfileRunner.java | 65 +++-- .../ad/constant/CommonErrorMessages.java | 1 + .../ad/model/EntityProfile.java | 6 +- .../MultiResponsesDelegateActionListener.java | 16 -- .../ad/AbstractProfileRunnerTests.java | 158 +++++++++++ .../ad/AnomalyDetectorProfileRunnerTests.java | 138 +++------ .../ad/TestHelpers.java | 30 +- .../ad/model/EntityProfileTests.java | 59 ++++ ...ndexAnomalyDetectorActionHandlerTests.java | 31 +-- .../metrics/CardinalityProfileTests.java | 261 ++++++++++++++++++ 11 files changed, 631 insertions(+), 234 deletions(-) create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/AbstractProfileRunnerTests.java create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/model/EntityProfileTests.java create mode 100644 src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityProfileTests.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunner.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunner.java index 331687f5..535deb68 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunner.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunner.java @@ -99,7 +99,6 @@ public void profile(String detectorId, ActionListener listener, listener.onFailure(new InvalidParameterException(CommonErrorMessages.EMPTY_PROFILES_COLLECT)); return; } - calculateTotalResponsesToWait(detectorId, profilesToCollect, listener); } @@ -118,10 +117,38 @@ private void calculateTotalResponsesToWait( ) { ensureExpectedToken(XContentParser.Token.START_OBJECT, xContentParser.nextToken(), xContentParser); AnomalyDetector detector = AnomalyDetector.parse(xContentParser, detectorId); + + prepareProfile(detector, listener, profilesToCollect); + } catch (Exception e) { + listener.onFailure(new RuntimeException(CommonErrorMessages.FAIL_TO_FIND_DETECTOR_MSG + detectorId, e)); + } + } else { + listener.onFailure(new RuntimeException(CommonErrorMessages.FAIL_TO_FIND_DETECTOR_MSG + detectorId)); + } + }, exception -> listener.onFailure(new RuntimeException(CommonErrorMessages.FAIL_TO_FIND_DETECTOR_MSG + detectorId, exception)))); + } + + private void prepareProfile( + AnomalyDetector detector, + ActionListener listener, + Set profilesToCollect + ) { + String detectorId = detector.getDetectorId(); + GetRequest getRequest = new GetRequest(ANOMALY_DETECTOR_JOB_INDEX, detectorId); + client.get(getRequest, ActionListener.wrap(getResponse -> { + if (getResponse != null && getResponse.isExists()) { + try ( + XContentParser parser = XContentType.JSON + .xContent() + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, getResponse.getSourceAsString()) + ) { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser); + long enabledTimeMs = job.getEnabledTime().toEpochMilli(); + boolean isMultiEntityDetector = detector.isMultientityDetector(); int totalResponsesToWait = 0; - if (profilesToCollect.contains(DetectorProfileName.ERROR)) { totalResponsesToWait++; } @@ -158,50 +185,20 @@ private void calculateTotalResponsesToWait( new MultiResponsesDelegateActionListener( listener, totalResponsesToWait, - "Fail to fetch profile for " + detectorId, + CommonErrorMessages.FAIL_FETCH_ERR_MSG + detectorId, false ); - prepareProfile(detector, delegateListener, profilesToCollect); - } catch (Exception e) { - listener.onFailure(new RuntimeException(CommonErrorMessages.FAIL_TO_FIND_DETECTOR_MSG + detectorId, e)); - } - } else { - listener.onFailure(new RuntimeException(CommonErrorMessages.FAIL_TO_FIND_DETECTOR_MSG + detectorId)); - } - }, exception -> listener.onFailure(new RuntimeException(CommonErrorMessages.FAIL_TO_FIND_DETECTOR_MSG + detectorId, exception)))); - } - - private void prepareProfile( - AnomalyDetector detector, - MultiResponsesDelegateActionListener listener, - Set profilesToCollect - ) { - String detectorId = detector.getDetectorId(); - GetRequest getRequest = new GetRequest(ANOMALY_DETECTOR_JOB_INDEX, detectorId); - client.get(getRequest, ActionListener.wrap(getResponse -> { - if (getResponse != null && getResponse.isExists()) { - try ( - XContentParser parser = XContentType.JSON - .xContent() - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, getResponse.getSourceAsString()) - ) { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser); - long enabledTimeMs = job.getEnabledTime().toEpochMilli(); - if (profilesToCollect.contains(DetectorProfileName.ERROR)) { GetRequest getStateRequest = new GetRequest(DetectorInternalState.DETECTOR_STATE_INDEX, detectorId); - client.get(getStateRequest, onGetDetectorState(listener, detectorId, enabledTimeMs)); + client.get(getStateRequest, onGetDetectorState(delegateListener, detectorId, enabledTimeMs)); } - boolean isMultiEntityDetector = detector.isMultientityDetector(); - // total number of listeners we need to define. Needed by MultiResponsesDelegateActionListener to decide // when to consolidate results and return to users if (isMultiEntityDetector) { if (profilesToCollect.contains(DetectorProfileName.TOTAL_ENTITIES)) { - profileEntityStats(listener, detector); + profileEntityStats(delegateListener, detector); } if (profilesToCollect.contains(DetectorProfileName.COORDINATING_NODE) || profilesToCollect.contains(DetectorProfileName.SHINGLE_SIZE) @@ -210,24 +207,24 @@ private void prepareProfile( || profilesToCollect.contains(DetectorProfileName.ACTIVE_ENTITIES) || profilesToCollect.contains(DetectorProfileName.INIT_PROGRESS) || profilesToCollect.contains(DetectorProfileName.STATE)) { - profileModels(detector, profilesToCollect, job, true, listener); + profileModels(detector, profilesToCollect, job, true, delegateListener); } } else { if (profilesToCollect.contains(DetectorProfileName.STATE) || profilesToCollect.contains(DetectorProfileName.INIT_PROGRESS)) { - profileStateRelated(detector, listener, job.isEnabled(), profilesToCollect); + profileStateRelated(detector, delegateListener, job.isEnabled(), profilesToCollect); } if (profilesToCollect.contains(DetectorProfileName.COORDINATING_NODE) || profilesToCollect.contains(DetectorProfileName.SHINGLE_SIZE) || profilesToCollect.contains(DetectorProfileName.TOTAL_SIZE_IN_BYTES) || profilesToCollect.contains(DetectorProfileName.MODELS)) { - profileModels(detector, profilesToCollect, job, false, listener); + profileModels(detector, profilesToCollect, job, false, delegateListener); } } - } catch (IOException | XContentParseException | NullPointerException e) { - logger.error(e); - listener.failImmediately(CommonErrorMessages.FAIL_TO_GET_PROFILE_MSG, e); + } catch (Exception e) { + logger.error(CommonErrorMessages.FAIL_TO_GET_PROFILE_MSG, e); + listener.onFailure(e); } } else { onGetDetectorForPrepare(listener, profilesToCollect); @@ -261,20 +258,19 @@ private void profileEntityStats(MultiResponsesDelegateActionListener { listener.failImmediately(CommonErrorMessages.FAIL_TO_GET_TOTAL_ENTITIES + detector.getDetectorId()); }) - ); + }, searchException -> { + logger.warn(CommonErrorMessages.FAIL_TO_GET_TOTAL_ENTITIES + detector.getDetectorId()); + listener.onFailure(searchException); + })); } } - private void onGetDetectorForPrepare( - MultiResponsesDelegateActionListener listener, - Set profiles - ) { + private void onGetDetectorForPrepare(ActionListener listener, Set profiles) { DetectorProfile.Builder profileBuilder = new DetectorProfile.Builder(); if (profiles.contains(DetectorProfileName.STATE)) { profileBuilder.state(DetectorState.DISABLED); } - listener.respondImmediately(profileBuilder.build()); + listener.onResponse(profileBuilder.build()); } /** @@ -340,8 +336,8 @@ private ActionListener onGetDetectorState( listener.onResponse(profileBuilder.build()); } catch (IOException | XContentParseException | NullPointerException e) { - logger.error(e); - listener.failImmediately(CommonErrorMessages.FAIL_TO_GET_PROFILE_MSG, e); + logger.error(CommonErrorMessages.FAIL_TO_GET_PROFILE_MSG, e); + listener.onFailure(e); } } else { // detector state for this detector does not exist @@ -475,7 +471,7 @@ private ActionListener onInittedEver( "Fail to find any anomaly result with anomaly score larger than 0 after AD job enabled time for detector {}", detector.getDetectorId() ); - listener.failImmediately(new RuntimeException("Fail to find detector state: " + detector.getDetectorId(), exception)); + listener.onFailure(exception); } }); } @@ -523,7 +519,7 @@ private ActionListener onPollRCFUpdates( new ParameterizedMessage("Fail to get init progress through messaging for {}", detector.getDetectorId()), exception ); - listener.failImmediately(CommonErrorMessages.FAIL_TO_GET_PROFILE_MSG + detector.getDetectorId(), exception); + listener.onFailure(exception); } }); } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/EntityProfileRunner.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/EntityProfileRunner.java index 97e82924..3f03ebfc 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/EntityProfileRunner.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/EntityProfileRunner.java @@ -20,7 +20,6 @@ import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.CATEGORY_FIELD_LIMIT; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import java.io.IOException; import java.security.InvalidParameterException; import java.util.List; import java.util.Optional; @@ -35,7 +34,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; @@ -113,25 +111,7 @@ public void profile( new InvalidParameterException(CommonErrorMessages.CATEGORICAL_FIELD_NUMBER_SURPASSED + CATEGORY_FIELD_LIMIT) ); } else { - int totalResponsesToWait = 0; - if (profilesToCollect.contains(EntityProfileName.INIT_PROGRESS) - || profilesToCollect.contains(EntityProfileName.STATE)) { - totalResponsesToWait++; - } - if (profilesToCollect.contains(EntityProfileName.ENTITY_INFO)) { - totalResponsesToWait++; - } - if (profilesToCollect.contains(EntityProfileName.MODELS)) { - totalResponsesToWait++; - } - MultiResponsesDelegateActionListener delegateListener = - new MultiResponsesDelegateActionListener( - listener, - totalResponsesToWait, - "Fail to fetch profile for " + entityValue + " of detector " + detectorId, - false - ); - prepareEntityProfile(delegateListener, detectorId, entityValue, profilesToCollect, detector, categoryField.get(0)); + prepareEntityProfile(listener, detectorId, entityValue, profilesToCollect, detector, categoryField.get(0)); } } catch (Exception t) { listener.onFailure(t); @@ -143,7 +123,7 @@ public void profile( } private void prepareEntityProfile( - MultiResponsesDelegateActionListener delegateListener, + ActionListener listener, String detectorId, String entityValue, Set profilesToCollect, @@ -158,8 +138,8 @@ private void prepareEntityProfile( request, ActionListener .wrap( - r -> getJob(detectorId, categoryField, entityValue, profilesToCollect, detector, r, delegateListener), - delegateListener::failImmediately + r -> getJob(detectorId, categoryField, entityValue, profilesToCollect, detector, r, listener), + listener::onFailure ) ); } @@ -171,7 +151,7 @@ private void getJob( Set profilesToCollect, AnomalyDetector detector, EntityProfileResponse entityProfileResponse, - MultiResponsesDelegateActionListener delegateListener + ActionListener listener ) { GetRequest getRequest = new GetRequest(ANOMALY_DETECTOR_JOB_INDEX, detectorId); client.get(getRequest, ActionListener.wrap(getResponse -> { @@ -184,6 +164,25 @@ private void getJob( ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser); + int totalResponsesToWait = 0; + if (profilesToCollect.contains(EntityProfileName.INIT_PROGRESS) + || profilesToCollect.contains(EntityProfileName.STATE)) { + totalResponsesToWait++; + } + if (profilesToCollect.contains(EntityProfileName.ENTITY_INFO)) { + totalResponsesToWait++; + } + if (profilesToCollect.contains(EntityProfileName.MODELS)) { + totalResponsesToWait++; + } + MultiResponsesDelegateActionListener delegateListener = + new MultiResponsesDelegateActionListener( + listener, + totalResponsesToWait, + CommonErrorMessages.FAIL_FETCH_ERR_MSG + entityValue + " of detector " + detectorId, + false + ); + if (profilesToCollect.contains(EntityProfileName.MODELS)) { EntityProfile.Builder builder = new EntityProfile.Builder(categoryField, entityValue); if (false == job.isEnabled()) { @@ -233,20 +232,20 @@ private void getJob( delegateListener.onResponse(builder.build()); })); } - } catch (IOException | XContentParseException | NullPointerException e) { - logger.error(e); - delegateListener.failImmediately(CommonErrorMessages.FAIL_TO_GET_PROFILE_MSG, e); + } catch (Exception e) { + logger.error(CommonErrorMessages.FAIL_TO_GET_PROFILE_MSG, e); + listener.onFailure(e); } } else { - sendUnknownState(profilesToCollect, categoryField, entityValue, true, delegateListener); + sendUnknownState(profilesToCollect, categoryField, entityValue, true, listener); } }, exception -> { if (exception instanceof IndexNotFoundException) { logger.info(exception.getMessage()); - sendUnknownState(profilesToCollect, categoryField, entityValue, true, delegateListener); + sendUnknownState(profilesToCollect, categoryField, entityValue, true, listener); } else { logger.error(CommonErrorMessages.FAIL_TO_GET_PROFILE_MSG + detectorId, exception); - delegateListener.failImmediately(exception); + listener.onFailure(exception); } })); } @@ -285,14 +284,14 @@ private void sendUnknownState( String categoryField, String entityValue, boolean immediate, - MultiResponsesDelegateActionListener delegateListener + ActionListener delegateListener ) { EntityProfile.Builder builder = new EntityProfile.Builder(categoryField, entityValue); if (profilesToCollect.contains(EntityProfileName.STATE)) { builder.state(EntityState.UNKNOWN); } if (immediate) { - delegateListener.respondImmediately(builder.build()); + delegateListener.onResponse(builder.build()); } else { delegateListener.onResponse(builder.build()); } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/constant/CommonErrorMessages.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/constant/CommonErrorMessages.java index e5446ac3..253c6b30 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/constant/CommonErrorMessages.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/constant/CommonErrorMessages.java @@ -34,4 +34,5 @@ public class CommonErrorMessages { public static String FAIL_TO_GET_TOTAL_ENTITIES = "Failed to get total entities for detector "; public static String CATEGORICAL_FIELD_NUMBER_SURPASSED = "We don't support categorical fields more than "; public static String EMPTY_PROFILES_COLLECT = "profiles to collect are missing or invalid"; + public static String FAIL_FETCH_ERR_MSG = "Fail to fetch profile for "; } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/EntityProfile.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/EntityProfile.java index a74070be..ae30f506 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/EntityProfile.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/EntityProfile.java @@ -214,7 +214,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (modelProfile != null) { builder.field(CommonName.MODEL, modelProfile); } - if (state != null) { + if (state != null && state != EntityState.UNKNOWN) { builder.field(CommonName.STATE, state); } builder.endObject(); @@ -263,7 +263,7 @@ public String toString() { if (modelProfile != null) { builder.append(CommonName.MODELS, modelProfile); } - if (state != null) { + if (state != null && state != EntityState.UNKNOWN) { builder.append(CommonName.STATE, state); } return builder.toString(); @@ -330,7 +330,7 @@ public void merge(Mergeable other) { if (otherProfile.modelProfile != null) { this.modelProfile = otherProfile.modelProfile; } - if (otherProfile.getState() != null) { + if (otherProfile.getState() != null && otherProfile.getState() != EntityState.UNKNOWN) { this.state = otherProfile.getState(); } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiResponsesDelegateActionListener.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiResponsesDelegateActionListener.java index 52c9380a..f580b264 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiResponsesDelegateActionListener.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiResponsesDelegateActionListener.java @@ -109,20 +109,4 @@ private void handleSavedResponses() { this.delegate.onResponse(response0); } } - - public void failImmediately(Exception e) { - this.delegate.onFailure(new RuntimeException(finalErrorMsg, e)); - } - - public void failImmediately(String errMsg) { - this.delegate.onFailure(new RuntimeException(errMsg)); - } - - public void failImmediately(String errMsg, Exception e) { - this.delegate.onFailure(new RuntimeException(errMsg, e)); - } - - public void respondImmediately(T o) { - this.delegate.onResponse(o); - } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/AbstractProfileRunnerTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/AbstractProfileRunnerTests.java new file mode 100644 index 00000000..5f3b4900 --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/AbstractProfileRunnerTests.java @@ -0,0 +1,158 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import org.elasticsearch.Version; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.transport.TransportAddress; +import org.junit.Before; +import org.junit.BeforeClass; + +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.model.DetectorProfileName; +import com.amazon.opendistroforelasticsearch.ad.util.DiscoveryNodeFilterer; + +public class AbstractProfileRunnerTests extends AbstractADTest { + protected enum DetectorStatus { + INDEX_NOT_EXIST, + NO_DOC, + EXIST + } + + protected enum JobStatus { + INDEX_NOT_EXIT, + DISABLED, + ENABLED + } + + protected enum ErrorResultStatus { + INDEX_NOT_EXIT, + NO_ERROR, + SHINGLE_ERROR, + STOPPED_ERROR, + NULL_POINTER_EXCEPTION + } + + protected AnomalyDetectorProfileRunner runner; + protected Client client; + protected DiscoveryNodeFilterer nodeFilter; + protected AnomalyDetector detector; + protected ClusterService clusterService; + + protected static Set stateOnly; + protected static Set stateNError; + protected static Set modelProfile; + protected static Set stateInitProgress; + protected static Set totalInitProgress; + protected static Set initProgressErrorProfile; + + protected static String noFullShingleError = "No full shingle in current detection window"; + protected static String stoppedError = + "Stopped detector as job failed consecutively for more than 3 times: Having trouble querying data." + + " Maybe all of your features have been disabled."; + + protected static String clusterName; + protected static DiscoveryNode discoveryNode1; + + protected int requiredSamples; + protected int neededSamples; + + // profile model related + protected String node1; + protected String nodeName1; + + protected String node2; + protected String nodeName2; + protected DiscoveryNode discoveryNode2; + + protected long modelSize; + protected String model1Id; + protected String model0Id; + + protected int shingleSize; + + protected int detectorIntervalMin; + protected GetResponse detectorGetReponse; + protected String messaingExceptionError = "blah"; + + @BeforeClass + public static void setUpOnce() { + stateOnly = new HashSet(); + stateOnly.add(DetectorProfileName.STATE); + stateNError = new HashSet(); + stateNError.add(DetectorProfileName.ERROR); + stateNError.add(DetectorProfileName.STATE); + stateInitProgress = new HashSet(); + stateInitProgress.add(DetectorProfileName.INIT_PROGRESS); + stateInitProgress.add(DetectorProfileName.STATE); + modelProfile = new HashSet( + Arrays + .asList( + DetectorProfileName.SHINGLE_SIZE, + DetectorProfileName.MODELS, + DetectorProfileName.COORDINATING_NODE, + DetectorProfileName.TOTAL_SIZE_IN_BYTES + ) + ); + totalInitProgress = new HashSet( + Arrays.asList(DetectorProfileName.TOTAL_ENTITIES, DetectorProfileName.INIT_PROGRESS) + ); + initProgressErrorProfile = new HashSet( + Arrays.asList(DetectorProfileName.INIT_PROGRESS, DetectorProfileName.ERROR) + ); + clusterName = "test-cluster-name"; + discoveryNode1 = new DiscoveryNode( + "nodeName1", + "node1", + new TransportAddress(TransportAddress.META_ADDRESS, 9300), + emptyMap(), + emptySet(), + Version.CURRENT + ); + } + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + client = mock(Client.class); + nodeFilter = mock(DiscoveryNodeFilterer.class); + clusterService = mock(ClusterService.class); + when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("test cluster")).build()); + + requiredSamples = 128; + neededSamples = 5; + + runner = new AnomalyDetectorProfileRunner(client, xContentRegistry(), nodeFilter, requiredSamples); + + detectorIntervalMin = 3; + detectorGetReponse = mock(GetResponse.class); + } +} diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunnerTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunnerTests.java index fd92d61e..bc40ab6d 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunnerTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunnerTests.java @@ -21,7 +21,6 @@ import static java.util.Collections.emptySet; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.IOException; @@ -30,7 +29,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -42,17 +40,12 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.transport.RemoteTransportException; -import org.junit.Before; -import org.junit.BeforeClass; import com.amazon.opendistroforelasticsearch.ad.common.exception.AnomalyDetectionException; import com.amazon.opendistroforelasticsearch.ad.common.exception.ResourceNotFoundException; @@ -72,96 +65,8 @@ import com.amazon.opendistroforelasticsearch.ad.transport.ProfileResponse; import com.amazon.opendistroforelasticsearch.ad.transport.RCFPollingAction; import com.amazon.opendistroforelasticsearch.ad.transport.RCFPollingResponse; -import com.amazon.opendistroforelasticsearch.ad.util.DiscoveryNodeFilterer; - -public class AnomalyDetectorProfileRunnerTests extends AbstractADTest { - private AnomalyDetectorProfileRunner runner; - private Client client; - private DiscoveryNodeFilterer nodeFilter; - private AnomalyDetector detector; - private ClusterService clusterService; - - private static Set stateOnly; - private static Set stateNError; - private static Set modelProfile; - private static Set stateInitProgress; - private static String noFullShingleError = "No full shingle in current detection window"; - private static String stoppedError = "Stopped detector as job failed consecutively for more than 3 times: Having trouble querying data." - + " Maybe all of your features have been disabled."; - - private int requiredSamples; - private int neededSamples; - - // profile model related - private String node1; - private String nodeName1; - private DiscoveryNode discoveryNode1; - - private String node2; - private String nodeName2; - private DiscoveryNode discoveryNode2; - - private long modelSize; - private String model1Id; - private String model0Id; - - private int shingleSize; - - private int detectorIntervalMin; - private GetResponse detectorGetReponse; - private String messaingExceptionError = "blah"; - - @BeforeClass - public static void setUpOnce() { - stateOnly = new HashSet(); - stateOnly.add(DetectorProfileName.STATE); - stateNError = new HashSet(); - stateNError.add(DetectorProfileName.ERROR); - stateNError.add(DetectorProfileName.STATE); - stateInitProgress = new HashSet(); - stateInitProgress.add(DetectorProfileName.INIT_PROGRESS); - stateInitProgress.add(DetectorProfileName.STATE); - modelProfile = new HashSet( - Arrays - .asList( - DetectorProfileName.SHINGLE_SIZE, - DetectorProfileName.MODELS, - DetectorProfileName.COORDINATING_NODE, - DetectorProfileName.TOTAL_SIZE_IN_BYTES - ) - ); - } - - @Override - @Before - public void setUp() throws Exception { - super.setUp(); - client = mock(Client.class); - nodeFilter = mock(DiscoveryNodeFilterer.class); - clusterService = mock(ClusterService.class); - when(clusterService.state()).thenReturn(ClusterState.builder(new ClusterName("test cluster")).build()); - - requiredSamples = 128; - neededSamples = 5; - - runner = new AnomalyDetectorProfileRunner(client, xContentRegistry(), nodeFilter, requiredSamples); - - detectorIntervalMin = 3; - detectorGetReponse = mock(GetResponse.class); - } - - enum DetectorStatus { - INDEX_NOT_EXIST, - NO_DOC, - EXIST - } - - enum JobStatus { - INDEX_NOT_EXIT, - DISABLED, - ENABLED - } +public class AnomalyDetectorProfileRunnerTests extends AbstractProfileRunnerTests { enum RCFPollingStatus { INIT_NOT_EXIT, REMOTE_INIT_NOT_EXIT, @@ -173,13 +78,14 @@ enum RCFPollingStatus { INITTING } - enum ErrorResultStatus { - INDEX_NOT_EXIT, - NO_ERROR, - SHINGLE_ERROR, - STOPPED_ERROR - } - + /** + * Convenience methods for single-stream detector profile tests set up + * @param detectorStatus Detector config status + * @param jobStatus Detector job status + * @param rcfPollingStatus RCF polling result status + * @param errorResultStatus Error result status + * @throws IOException when failing the getting request + */ @SuppressWarnings("unchecked") private void setUpClientGet( DetectorStatus detectorStatus, @@ -188,6 +94,7 @@ private void setUpClientGet( ErrorResultStatus errorResultStatus ) throws IOException { detector = TestHelpers.randomAnomalyDetectorWithInterval(new IntervalTimeConfiguration(detectorIntervalMin, ChronoUnit.MINUTES)); + doAnswer(invocation -> { Object[] args = invocation.getArguments(); GetRequest request = (GetRequest) args[0]; @@ -639,9 +546,9 @@ public void testInitNoUpdateNoIndex() throws IOException, InterruptedException { assertEquals(expectedProfile, response); inProgressLatch.countDown(); }, exception -> { - logger.error(exception); + LOG.error(exception); for (StackTraceElement ste : exception.getStackTrace()) { - logger.info(ste); + LOG.info(ste); } assertTrue("Should not reach here ", false); inProgressLatch.countDown(); @@ -661,9 +568,9 @@ public void testInitNoIndex() throws IOException, InterruptedException { assertEquals(expectedProfile, response); inProgressLatch.countDown(); }, exception -> { - logger.error(exception); + LOG.error(exception); for (StackTraceElement ste : exception.getStackTrace()) { - logger.info(ste); + LOG.info(ste); } assertTrue("Should not reach here ", false); inProgressLatch.countDown(); @@ -674,4 +581,21 @@ public void testInitNoIndex() throws IOException, InterruptedException { public void testInvalidRequiredSamples() { expectThrows(IllegalArgumentException.class, () -> new AnomalyDetectorProfileRunner(client, xContentRegistry(), nodeFilter, 0)); } + + public void testFailRCFPolling() throws IOException, InterruptedException { + setUpClientGet(DetectorStatus.EXIST, JobStatus.ENABLED, RCFPollingStatus.EXCEPTION, ErrorResultStatus.NO_ERROR); + final CountDownLatch inProgressLatch = new CountDownLatch(1); + + runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + assertTrue("Should not reach here ", false); + inProgressLatch.countDown(); + }, exception -> { + assertTrue(exception instanceof RuntimeException); + // this means we don't exit with failImmediately. failImmediately can make we return early when there are other concurrent + // requests + assertTrue(exception.getMessage(), exception.getMessage().contains("Exceptions:")); + inProgressLatch.countDown(); + }), stateNError); + assertTrue(inProgressLatch.await(100, TimeUnit.SECONDS)); + } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java index f844a61b..a05b8329 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java @@ -29,8 +29,10 @@ import static org.powermock.api.mockito.PowerMockito.when; import java.io.IOException; +import java.nio.ByteBuffer; import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -39,6 +41,7 @@ import java.util.Random; import java.util.concurrent.Callable; import java.util.function.Consumer; +import java.util.stream.IntStream; import org.apache.http.Header; import org.apache.http.HttpEntity; @@ -358,7 +361,7 @@ public static AnomalyDetector randomAnomalyDetectorWithInterval(TimeConfiguratio null, randomInt(), Instant.now().truncatedTo(ChronoUnit.SECONDS), - null, + categoryField, randomUser() ); } @@ -660,6 +663,24 @@ public static GetResponse createGetResponse(ToXContentObject o, String id, Strin ); } + public static GetResponse createBrokenGetResponse(String id, String indexName) throws IOException { + ByteBuffer[] buffers = new ByteBuffer[0]; + return new GetResponse( + new GetResult( + indexName, + MapperService.SINGLE_MAPPING_NAME, + id, + UNASSIGNED_SEQ_NO, + 0, + -1, + true, + BytesReference.fromByteBuffers(buffers), + Collections.emptyMap(), + Collections.emptyMap() + ) + ); + } + public static SearchResponse createSearchResponse(ToXContentObject o) throws IOException { XContentBuilder content = o.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS); @@ -814,4 +835,11 @@ public static String toJsonString(ToXContentObject object) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); return TestHelpers.xContentBuilderToString(object.toXContent(builder, ToXContent.EMPTY_PARAMS)); } + + public static SearchHits createSearchHits(int totalHits) { + List hitList = new ArrayList<>(); + IntStream.range(0, totalHits).forEach(i -> hitList.add(new SearchHit(i))); + SearchHit[] hitArray = new SearchHit[hitList.size()]; + return new SearchHits(hitList.toArray(hitArray), new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1.0F); + } } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/EntityProfileTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/EntityProfileTests.java new file mode 100644 index 00000000..5e195f7b --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/EntityProfileTests.java @@ -0,0 +1,59 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + +import java.io.IOException; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import test.com.amazon.opendistroforelasticsearch.ad.util.JsonDeserializer; + +import com.amazon.opendistroforelasticsearch.ad.AbstractADTest; +import com.amazon.opendistroforelasticsearch.ad.common.exception.JsonPathNotFoundException; +import com.amazon.opendistroforelasticsearch.ad.constant.CommonName; + +public class EntityProfileTests extends AbstractADTest { + public void testMerge() { + EntityProfile profile1 = new EntityProfile(null, null, null, -1, -1, null, null, EntityState.INIT); + + EntityProfile profile2 = new EntityProfile(null, null, null, -1, -1, null, null, EntityState.UNKNOWN); + + profile1.merge(profile2); + assertEquals(profile1.getState(), EntityState.INIT); + } + + public void testToXContent() throws IOException, JsonPathNotFoundException { + EntityProfile profile1 = new EntityProfile(null, null, null, -1, -1, null, null, EntityState.INIT); + + XContentBuilder builder = jsonBuilder(); + profile1.toXContent(builder, ToXContent.EMPTY_PARAMS); + String json = Strings.toString(builder); + + assertEquals("INIT", JsonDeserializer.getTextValue(json, CommonName.STATE)); + + EntityProfile profile2 = new EntityProfile(null, null, null, -1, -1, null, null, EntityState.UNKNOWN); + + builder = jsonBuilder(); + profile2.toXContent(builder, ToXContent.EMPTY_PARAMS); + json = Strings.toString(builder); + + assertTrue(false == JsonDeserializer.hasChildNode(json, CommonName.STATE)); + } +} diff --git a/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java b/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java index 30949a86..f65f218b 100644 --- a/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java +++ b/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java @@ -27,13 +27,9 @@ import static org.mockito.Mockito.when; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import java.util.concurrent.TimeUnit; -import java.util.stream.IntStream; -import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -53,8 +49,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.junit.AfterClass; @@ -182,13 +176,6 @@ public void setUp() throws Exception { ); } - private SearchHits createSearchHits(int totalHits) { - List hitList = new ArrayList<>(); - IntStream.range(0, totalHits).forEach(i -> hitList.add(new SearchHit(i))); - SearchHit[] hitArray = new SearchHit[hitList.size()]; - return new SearchHits(hitList.toArray(hitArray), new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1.0F); - } - public void testTwoCategoricalFields() throws IOException { expectThrows( IllegalArgumentException.class, @@ -200,7 +187,7 @@ public void testTwoCategoricalFields() throws IOException { public void testNoCategoricalField() throws IOException { SearchResponse mockResponse = mock(SearchResponse.class); int totalHits = 1001; - when(mockResponse.getHits()).thenReturn(createSearchHits(totalHits)); + when(mockResponse.getHits()).thenReturn(TestHelpers.createSearchHits(totalHits)); doAnswer(invocation -> { Object[] args = invocation.getArguments(); assertTrue(String.format("The size of args is %d. Its content is %s", args.length, Arrays.toString(args)), args.length == 2); @@ -250,7 +237,7 @@ public void testTextField() throws IOException { SearchResponse detectorResponse = mock(SearchResponse.class); int totalHits = 9; - when(detectorResponse.getHits()).thenReturn(createSearchHits(totalHits)); + when(detectorResponse.getHits()).thenReturn(TestHelpers.createSearchHits(totalHits)); // extend NodeClient since its execute method is final and mockito does not allow to mock final methods // we can also use spy to overstep the final methods @@ -313,11 +300,11 @@ private void testValidTypeTemplate(String filedTypeName) throws IOException { SearchResponse detectorResponse = mock(SearchResponse.class); int totalHits = 9; - when(detectorResponse.getHits()).thenReturn(createSearchHits(totalHits)); + when(detectorResponse.getHits()).thenReturn(TestHelpers.createSearchHits(totalHits)); SearchResponse userIndexResponse = mock(SearchResponse.class); int userIndexHits = 0; - when(userIndexResponse.getHits()).thenReturn(createSearchHits(userIndexHits)); + when(userIndexResponse.getHits()).thenReturn(TestHelpers.createSearchHits(userIndexHits)); // extend NodeClient since its execute method is final and mockito does not allow to mock final methods // we can also use spy to overstep the final methods @@ -397,14 +384,14 @@ private void testUpdateTemplate(String fieldTypeName) throws IOException { SearchResponse detectorResponse = mock(SearchResponse.class); int totalHits = 9; - when(detectorResponse.getHits()).thenReturn(createSearchHits(totalHits)); + when(detectorResponse.getHits()).thenReturn(TestHelpers.createSearchHits(totalHits)); GetResponse getDetectorResponse = TestHelpers .createGetResponse(detector, detector.getDetectorId(), AnomalyDetector.ANOMALY_DETECTORS_INDEX); SearchResponse userIndexResponse = mock(SearchResponse.class); int userIndexHits = 0; - when(userIndexResponse.getHits()).thenReturn(createSearchHits(userIndexHits)); + when(userIndexResponse.getHits()).thenReturn(TestHelpers.createSearchHits(userIndexHits)); // extend NodeClient since its execute method is final and mockito does not allow to mock final methods // we can also use spy to overstep the final methods @@ -496,7 +483,7 @@ public void testMoreThanTenMultiEntityDetectors() throws IOException { int totalHits = 11; - when(mockResponse.getHits()).thenReturn(createSearchHits(totalHits)); + when(mockResponse.getHits()).thenReturn(TestHelpers.createSearchHits(totalHits)); doAnswer(invocation -> { Object[] args = invocation.getArguments(); @@ -530,7 +517,7 @@ public void testTenMultiEntityDetectorsUpdateSingleEntityAdToMulti() throws IOEx .createGetResponse(existingDetector, existingDetector.getDetectorId(), AnomalyDetector.ANOMALY_DETECTORS_INDEX); SearchResponse searchResponse = mock(SearchResponse.class); - when(searchResponse.getHits()).thenReturn(createSearchHits(totalHits)); + when(searchResponse.getHits()).thenReturn(TestHelpers.createSearchHits(totalHits)); doAnswer(invocation -> { Object[] args = invocation.getArguments(); @@ -602,7 +589,7 @@ public void testTenMultiEntityDetectorsUpdateExistingMultiEntityAd() throws IOEx .createGetResponse(detector, detector.getDetectorId(), AnomalyDetector.ANOMALY_DETECTORS_INDEX); SearchResponse searchResponse = mock(SearchResponse.class); - when(searchResponse.getHits()).thenReturn(createSearchHits(totalHits)); + when(searchResponse.getHits()).thenReturn(TestHelpers.createSearchHits(totalHits)); doAnswer(invocation -> { Object[] args = invocation.getArguments(); diff --git a/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityProfileTests.java b/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityProfileTests.java new file mode 100644 index 00000000..0be67df3 --- /dev/null +++ b/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityProfileTests.java @@ -0,0 +1,261 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package org.elasticsearch.search.aggregations.metrics; + +import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX; +import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; + +import com.amazon.opendistroforelasticsearch.ad.AbstractProfileRunnerTests; +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; +import com.amazon.opendistroforelasticsearch.ad.constant.CommonName; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorJob; +import com.amazon.opendistroforelasticsearch.ad.model.DetectorInternalState; +import com.amazon.opendistroforelasticsearch.ad.model.IntervalTimeConfiguration; +import com.amazon.opendistroforelasticsearch.ad.transport.ProfileAction; +import com.amazon.opendistroforelasticsearch.ad.transport.ProfileNodeResponse; +import com.amazon.opendistroforelasticsearch.ad.transport.ProfileResponse; +import com.carrotsearch.hppc.BitMixer; + +/** + * Run tests in ES package since InternalCardinality has only package private constructors + * and we cannot mock it since it is a final class. + * + */ +public class CardinalityProfileTests extends AbstractProfileRunnerTests { + enum ADResultStatus { + NO_RESULT, + EXCEPTION + } + + enum CardinalityStatus { + EXCEPTION, + NORMAL + } + + @SuppressWarnings("unchecked") + private void setUpMultiEntityClientGet(DetectorStatus detectorStatus, JobStatus jobStatus, ErrorResultStatus errorResultStatus) + throws IOException { + detector = TestHelpers + .randomAnomalyDetectorWithInterval(new IntervalTimeConfiguration(detectorIntervalMin, ChronoUnit.MINUTES), true); + doAnswer(invocation -> { + Object[] args = invocation.getArguments(); + GetRequest request = (GetRequest) args[0]; + ActionListener listener = (ActionListener) args[1]; + + if (request.index().equals(ANOMALY_DETECTORS_INDEX)) { + switch (detectorStatus) { + case EXIST: + listener + .onResponse( + TestHelpers.createGetResponse(detector, detector.getDetectorId(), AnomalyDetector.ANOMALY_DETECTORS_INDEX) + ); + break; + default: + assertTrue("should not reach here", false); + break; + } + } else if (request.index().equals(ANOMALY_DETECTOR_JOB_INDEX)) { + AnomalyDetectorJob job = null; + switch (jobStatus) { + case ENABLED: + job = TestHelpers.randomAnomalyDetectorJob(true); + listener + .onResponse( + TestHelpers.createGetResponse(job, detector.getDetectorId(), AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX) + ); + break; + default: + assertTrue("should not reach here", false); + break; + } + } else if (request.index().equals(DetectorInternalState.DETECTOR_STATE_INDEX)) { + switch (errorResultStatus) { + case NO_ERROR: + break; + case NULL_POINTER_EXCEPTION: + GetResponse response = mock(GetResponse.class); + when(response.isExists()).thenReturn(true); + doThrow(NullPointerException.class).when(response).getSourceAsString(); + listener.onResponse(response); + break; + default: + assertTrue("should not reach here", false); + break; + } + } + return null; + }).when(client).get(any(), any()); + } + + @SuppressWarnings("unchecked") + private void setUpMultiEntityClientSearch(ADResultStatus resultStatus, CardinalityStatus cardinalityStatus) { + doAnswer(invocation -> { + Object[] args = invocation.getArguments(); + ActionListener listener = (ActionListener) args[1]; + SearchRequest request = (SearchRequest) args[0]; + if (request.indices()[0].equals(CommonName.ANOMALY_RESULT_INDEX_ALIAS)) { + switch (resultStatus) { + case NO_RESULT: + SearchResponse mockResponse = mock(SearchResponse.class); + when(mockResponse.getHits()).thenReturn(TestHelpers.createSearchHits(0)); + listener.onResponse(mockResponse); + break; + case EXCEPTION: + listener.onFailure(new RuntimeException()); + break; + default: + assertTrue("should not reach here", false); + break; + } + } else { + switch (cardinalityStatus) { + case EXCEPTION: + listener.onFailure(new RuntimeException()); + break; + case NORMAL: + SearchResponse response = mock(SearchResponse.class); + List aggs = new ArrayList<>(1); + HyperLogLogPlusPlus hyperLogLog = new HyperLogLogPlusPlus( + AbstractHyperLogLog.MIN_PRECISION, + BigArrays.NON_RECYCLING_INSTANCE, + 0 + ); + for (int i = 0; i < 100; i++) { + hyperLogLog.collect(0, BitMixer.mix64(randomIntBetween(1, 100))); + } + aggs.add(new InternalCardinality(CommonName.TOTAL_ENTITIES, hyperLogLog, new HashMap<>())); + when(response.getAggregations()).thenReturn(InternalAggregations.from(aggs)); + listener.onResponse(response); + break; + default: + assertTrue("should not reach here", false); + break; + } + + } + + return null; + }).when(client).search(any(), any()); + } + + @SuppressWarnings("unchecked") + private void setUpProfileAction() { + doAnswer(invocation -> { + Object[] args = invocation.getArguments(); + + ActionListener listener = (ActionListener) args[2]; + + ProfileNodeResponse profileNodeResponse1 = new ProfileNodeResponse(discoveryNode1, new HashMap<>(), shingleSize, 0, 0); + List profileNodeResponses = Arrays.asList(profileNodeResponse1); + listener.onResponse(new ProfileResponse(new ClusterName(clusterName), profileNodeResponses, Collections.emptyList())); + + return null; + }).when(client).execute(eq(ProfileAction.INSTANCE), any(), any()); + } + + public void testFailGetEntityStats() throws IOException, InterruptedException { + setUpMultiEntityClientGet(DetectorStatus.EXIST, JobStatus.ENABLED, ErrorResultStatus.NO_ERROR); + setUpMultiEntityClientSearch(ADResultStatus.NO_RESULT, CardinalityStatus.EXCEPTION); + setUpProfileAction(); + + final CountDownLatch inProgressLatch = new CountDownLatch(1); + + runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + assertTrue("Should not reach here ", false); + inProgressLatch.countDown(); + }, exception -> { + assertTrue(exception instanceof RuntimeException); + // this means we don't exit with failImmediately. failImmediately can make we return early when there are other concurrent + // requests + assertTrue(exception.getMessage(), exception.getMessage().contains("Exceptions:")); + inProgressLatch.countDown(); + + }), totalInitProgress); + + assertTrue(inProgressLatch.await(100, TimeUnit.SECONDS)); + } + + public void testFailGetState() throws IOException, InterruptedException { + setUpMultiEntityClientGet(DetectorStatus.EXIST, JobStatus.ENABLED, ErrorResultStatus.NULL_POINTER_EXCEPTION); + setUpMultiEntityClientSearch(ADResultStatus.NO_RESULT, CardinalityStatus.NORMAL); + setUpProfileAction(); + + final CountDownLatch inProgressLatch = new CountDownLatch(1); + + runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + assertTrue("Should not reach here ", false); + inProgressLatch.countDown(); + }, exception -> { + assertTrue(exception instanceof RuntimeException); + // this means we don't exit with failImmediately. failImmediately can make we return early when there are other concurrent + // requests + assertTrue(exception.getMessage(), exception.getMessage().contains("Exceptions:")); + inProgressLatch.countDown(); + + }), initProgressErrorProfile); + + assertTrue(inProgressLatch.await(100, TimeUnit.SECONDS)); + } + + public void testFaiConfirmInitted() throws IOException, InterruptedException { + setUpMultiEntityClientGet(DetectorStatus.EXIST, JobStatus.ENABLED, ErrorResultStatus.NO_ERROR); + setUpMultiEntityClientSearch(ADResultStatus.EXCEPTION, CardinalityStatus.NORMAL); + setUpProfileAction(); + + final CountDownLatch inProgressLatch = new CountDownLatch(1); + + runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + assertTrue("Should not reach here ", false); + inProgressLatch.countDown(); + }, exception -> { + assertTrue(exception instanceof RuntimeException); + // this means we don't exit with failImmediately. failImmediately can make we return early when there are other concurrent + // requests + assertTrue(exception.getMessage(), exception.getMessage().contains("Exceptions:")); + inProgressLatch.countDown(); + + }), totalInitProgress); + + assertTrue(inProgressLatch.await(100, TimeUnit.SECONDS)); + } +} From 7df38a144dd9554d2d9fa519397e941daeedd780 Mon Sep 17 00:00:00 2001 From: Yaliang <49084640+ylwu-amzn@users.noreply.github.com> Date: Mon, 28 Dec 2020 13:11:04 -0800 Subject: [PATCH 11/13] filter out exceptions which should not be counted in failure stats (#341) * filter out exceptions which should not be counted in failure stats * add invalid query tag so we can show error message on frontend --- .../ad/NodeStateManager.java | 5 +- .../exception/AnomalyDetectionException.java | 33 +++- .../ad/common/exception/ClientException.java | 7 +- .../ad/common/exception/EndRunException.java | 5 + .../exception/LimitExceededException.java | 2 + .../exception/ResourceNotFoundException.java | 1 + .../ad/feature/SearchFeatureDao.java | 5 +- .../IndexAnomalyDetectorJobActionHandler.java | 10 ++ .../AnomalyResultTransportAction.java | 46 ++++- .../ad/ADIntegTestCase.java | 103 ++++++++++++ .../ad/AnomalyDetectorRestTestCase.java | 7 +- .../ad/TestHelpers.java | 38 ++++- .../ad/feature/SearchFeatureDaoTests.java | 2 +- .../ad/rest/AnomalyDetectorRestApiIT.java | 7 + .../AnomalyResultTransportActionTests.java | 158 ++++++++++++++++++ ...tsAnomalyDetectorTransportActionTests.java | 78 ++------- 16 files changed, 434 insertions(+), 73 deletions(-) create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/ADIntegTestCase.java create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportActionTests.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/NodeStateManager.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/NodeStateManager.java index 3111db83..6f870d5e 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/NodeStateManager.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/NodeStateManager.java @@ -159,7 +159,10 @@ private ActionListener onGetDetectorResponse(String adID, ActionLis AnomalyDetector detector = AnomalyDetector.parse(parser, response.getId()); // end execution if all features are disabled if (detector.getEnabledFeatureIds().isEmpty()) { - listener.onFailure(new EndRunException(adID, CommonErrorMessages.ALL_FEATURES_DISABLED_ERR_MSG, true)); + listener + .onFailure( + new EndRunException(adID, CommonErrorMessages.ALL_FEATURES_DISABLED_ERR_MSG, true).countedInStats(false) + ); return; } NodeState state = states.computeIfAbsent(adID, id -> new NodeState(id, clock)); diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/AnomalyDetectionException.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/AnomalyDetectionException.java index 2f06ff34..a7f1b15e 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/AnomalyDetectionException.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/AnomalyDetectionException.java @@ -20,7 +20,14 @@ */ public class AnomalyDetectionException extends RuntimeException { - private final String anomalyDetectorId; + private String anomalyDetectorId; + // countedInStats will be used to tell whether the exception should be + // counted in failure stats. + private boolean countedInStats = true; + + public AnomalyDetectionException(String message) { + super(message); + } /** * Constructor with an anomaly detector ID and a message. @@ -38,6 +45,10 @@ public AnomalyDetectionException(String adID, String message, Throwable cause) { this.anomalyDetectorId = adID; } + public AnomalyDetectionException(Throwable cause) { + super(cause); + } + public AnomalyDetectionException(String adID, Throwable cause) { super(cause); this.anomalyDetectorId = adID; @@ -52,6 +63,26 @@ public String getAnomalyDetectorId() { return this.anomalyDetectorId; } + /** + * Returns if the exception should be counted in stats. + * + * @return true if should count the exception in stats; otherwise return false + */ + public boolean isCountedInStats() { + return countedInStats; + } + + /** + * Set if the exception should be counted in stats. + * + * @param countInStats count the exception in stats + * @return the exception itself + */ + public AnomalyDetectionException countedInStats(boolean countInStats) { + this.countedInStats = countInStats; + return this; + } + @Override public String toString() { StringBuilder sb = new StringBuilder(); diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/ClientException.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/ClientException.java index 4faa2d66..e50c0dbc 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/ClientException.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/ClientException.java @@ -16,11 +16,14 @@ package com.amazon.opendistroforelasticsearch.ad.common.exception; /** - * All exception visible to AD transport layer's client is under ClientVisible. - * + * All exception visible to AD transport layer's client is under ClientException. */ public class ClientException extends AnomalyDetectionException { + public ClientException(String message) { + super(message); + } + public ClientException(String anomalyDetectorId, String message) { super(anomalyDetectorId, message); } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/EndRunException.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/EndRunException.java index 118a6869..452f6746 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/EndRunException.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/EndRunException.java @@ -22,6 +22,11 @@ public class EndRunException extends ClientException { private boolean endNow; + public EndRunException(String message, boolean endNow) { + super(message); + this.endNow = endNow; + } + public EndRunException(String anomalyDetectorId, String message, boolean endNow) { super(anomalyDetectorId, message); this.endNow = endNow; diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/LimitExceededException.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/LimitExceededException.java index 038133df..7a3f6bcf 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/LimitExceededException.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/LimitExceededException.java @@ -28,6 +28,7 @@ public class LimitExceededException extends EndRunException { */ public LimitExceededException(String anomalyDetectorId, String message) { super(anomalyDetectorId, message, true); + this.countedInStats(false); } /** @@ -48,5 +49,6 @@ public LimitExceededException(String message) { */ public LimitExceededException(String anomalyDetectorId, String message, boolean stopNow) { super(anomalyDetectorId, message, stopNow); + this.countedInStats(false); } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/ResourceNotFoundException.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/ResourceNotFoundException.java index e69f81fd..2f23d452 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/ResourceNotFoundException.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/common/exception/ResourceNotFoundException.java @@ -28,5 +28,6 @@ public class ResourceNotFoundException extends AnomalyDetectionException { */ public ResourceNotFoundException(String detectorId, String message) { super(detectorId, message); + countedInStats(false); } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java index bb9ba881..48868319 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java @@ -313,7 +313,9 @@ private double parseAggregation(Aggregation aggregation) { result = percentile.next().getValue(); } } - return Optional.ofNullable(result).orElseThrow(() -> new IllegalStateException("Failed to parse aggregation " + aggregation)); + return Optional + .ofNullable(result) + .orElseThrow(() -> new EndRunException("Failed to parse aggregation " + aggregation, true).countedInStats(false)); } /** @@ -812,6 +814,7 @@ public void getFeaturesByEntities( ); } catch (Exception e) { + // TODO: catch concrete exception and check if they should be counted in stats or not throw new EndRunException(detector.getDetectorId(), CommonErrorMessages.INVALID_SEARCH_QUERY_MSG, e, false); } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java index 27953cc7..f547c61a 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java @@ -162,6 +162,16 @@ private void onGetAnomalyDetectorResponse(GetResponse response) throws IOExcepti ); return; } + if (detector.getEnabledFeatureIds().size() == 0) { + listener + .onFailure( + new ElasticsearchStatusException( + "Can't start detector job as no enabled features configured", + RestStatus.BAD_REQUEST + ) + ); + return; + } IntervalTimeConfiguration interval = (IntervalTimeConfiguration) detector.getDetectionInterval(); Schedule schedule = new IntervalSchedule(Instant.now(), (int) interval.getInterval(), interval.getUnit()); diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java index 409dcdcd..debdec2e 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java @@ -15,6 +15,8 @@ package com.amazon.opendistroforelasticsearch.ad.transport; +import static com.amazon.opendistroforelasticsearch.ad.constant.CommonErrorMessages.INVALID_SEARCH_QUERY_MSG; + import java.net.ConnectException; import java.util.ArrayList; import java.util.HashSet; @@ -37,6 +39,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; @@ -55,6 +59,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ActionNotFoundTransportException; @@ -108,6 +113,9 @@ public class AnomalyResultTransportAction extends HandledTransportAction { - adStats.getStat(StatNames.AD_EXECUTE_FAIL_COUNT.getName()).increment(); - if (hcDetectors.contains(adID)) { - adStats.getStat(StatNames.AD_HC_EXECUTE_FAIL_COUNT.getName()).increment(); + // If exception is AnomalyDetectionException and it should not be counted in stats, + // we will not count it in failure stats. + if (!(e instanceof AnomalyDetectionException) || ((AnomalyDetectionException) e).isCountedInStats()) { + adStats.getStat(StatNames.AD_EXECUTE_FAIL_COUNT.getName()).increment(); + if (hcDetectors.contains(adID)) { + adStats.getStat(StatNames.AD_HC_EXECUTE_FAIL_COUNT.getName()).increment(); + } } hcDetectors.remove(adID); original.onFailure(e); }); if (!EnabledSetting.isADPluginEnabled()) { - throw new EndRunException(adID, CommonErrorMessages.DISABLED_ERR_MSG, true); + throw new EndRunException(adID, CommonErrorMessages.DISABLED_ERR_MSG, true).countedInStats(false); } adStats.getStat(StatNames.AD_EXECUTE_REQUEST_COUNT.getName()).increment(); @@ -501,7 +513,7 @@ private ActionListener onFeatureResponse( private void handleFailure(Exception exception, ActionListener listener, String adID) { if (exception instanceof IndexNotFoundException) { - listener.onFailure(new EndRunException(adID, TROUBLE_QUERYING_ERR_MSG + exception.getMessage(), true)); + listener.onFailure(new EndRunException(adID, TROUBLE_QUERYING_ERR_MSG + exception.getMessage(), true).countedInStats(false)); } else if (exception instanceof EndRunException) { // invalid feature query listener.onFailure(exception); @@ -598,12 +610,36 @@ void handleExecuteException(Exception ex, ActionListener listener.onFailure(ex); } else if (ex instanceof AnomalyDetectionException) { listener.onFailure(new InternalFailure((AnomalyDetectionException) ex)); + } else if (ex instanceof SearchPhaseExecutionException && invalidQuery((SearchPhaseExecutionException) ex)) { + // This is to catch invalid aggregation on wrong field type. For example, + // sum aggregation on text field. We should end detector run for such case. + listener + .onFailure( + new EndRunException( + adID, + INVALID_SEARCH_QUERY_MSG + ((SearchPhaseExecutionException) ex).getDetailedMessage(), + ex, + true + ).countedInStats(false) + ); } else { Throwable cause = ExceptionsHelper.unwrapCause(ex); listener.onFailure(new InternalFailure(adID, cause)); } } + private boolean invalidQuery(SearchPhaseExecutionException ex) { + boolean invalidQuery = true; + // If all shards return bad request and failure cause is IllegalArgumentException, we + // consider the feature query is invalid and will not count the error in failure stats. + for (ShardSearchFailure failure : ex.shardFailures()) { + if (RestStatus.BAD_REQUEST != failure.status() || !(failure.getCause() instanceof IllegalArgumentException)) { + invalidQuery = false; + } + } + return invalidQuery; + } + class RCFActionListener implements ActionListener { private List rcfResults; private String modelID; diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/ADIntegTestCase.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/ADIntegTestCase.java new file mode 100644 index 00000000..e54c05c1 --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/ADIntegTestCase.java @@ -0,0 +1,103 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad; + +import static com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils.XCONTENT_WITH_TYPE; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; + +import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; + +public abstract class ADIntegTestCase extends ESIntegTestCase { + + private long timeout = 5_000; + + @Override + protected Collection> nodePlugins() { + return Collections.singletonList(AnomalyDetectorPlugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return Collections.singletonList(AnomalyDetectorPlugin.class); + } + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + } + + public void createDetectors(List detectors, boolean createIndexFirst) throws IOException { + if (createIndexFirst) { + createIndex(AnomalyDetector.ANOMALY_DETECTORS_INDEX, AnomalyDetectionIndices.getAnomalyDetectorMappings()); + } + + for (AnomalyDetector detector : detectors) { + indexDoc(AnomalyDetector.ANOMALY_DETECTORS_INDEX, detector.toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITH_TYPE)); + } + } + + public void createDetectorIndex() throws IOException { + createIndex(AnomalyDetector.ANOMALY_DETECTORS_INDEX, AnomalyDetectionIndices.getAnomalyDetectorMappings()); + } + + public String createDetectors(AnomalyDetector detector) throws IOException { + return indexDoc(AnomalyDetector.ANOMALY_DETECTORS_INDEX, detector.toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITH_TYPE)); + } + + public void createIndex(String indexName, String mappings) { + CreateIndexResponse createIndexResponse = TestHelpers.createIndex(admin(), indexName, mappings); + assertEquals(true, createIndexResponse.isAcknowledged()); + } + + public String indexDoc(String indexName, XContentBuilder source) { + IndexRequest indexRequest = new IndexRequest(indexName).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).source(source); + IndexResponse indexResponse = client().index(indexRequest).actionGet(timeout); + assertEquals(RestStatus.CREATED, indexResponse.status()); + return indexResponse.getId(); + } + + public String indexDoc(String indexName, Map source) { + IndexRequest indexRequest = new IndexRequest(indexName).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).source(source); + IndexResponse indexResponse = client().index(indexRequest).actionGet(timeout); + assertEquals(RestStatus.CREATED, indexResponse.status()); + return indexResponse.getId(); + } + + public GetResponse getDoc(String indexName, String id) { + GetRequest getRequest = new GetRequest(indexName).id(id); + return client().get(getRequest).actionGet(timeout); + } +} diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorRestTestCase.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorRestTestCase.java index c76a20b1..5fb32391 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorRestTestCase.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorRestTestCase.java @@ -64,11 +64,16 @@ protected Settings restClientSettings() { } protected AnomalyDetector createRandomAnomalyDetector(Boolean refresh, Boolean withMetadata, RestClient client) throws IOException { + return createRandomAnomalyDetector(refresh, withMetadata, client, true); + } + + protected AnomalyDetector createRandomAnomalyDetector(Boolean refresh, Boolean withMetadata, RestClient client, boolean featureEnabled) + throws IOException { Map uiMetadata = null; if (withMetadata) { uiMetadata = TestHelpers.randomUiMetadata(); } - AnomalyDetector detector = TestHelpers.randomAnomalyDetector(uiMetadata, null); + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(uiMetadata, null, featureEnabled); String indexName = detector.getIndices().get(0); TestHelpers .makeRequest( diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java index a05b8329..18a5c3f0 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java @@ -245,6 +245,26 @@ public static AnomalyDetector randomAnomalyDetector( String detectorType, DetectionDateRange dateRange, boolean withUser + ) throws IOException { + return randomAnomalyDetector( + ImmutableList.of(randomAlphaOfLength(10).toLowerCase()), + features, + uiMetadata, + lastUpdateTime, + detectorType, + dateRange, + withUser + ); + } + + public static AnomalyDetector randomAnomalyDetector( + List indices, + List features, + Map uiMetadata, + Instant lastUpdateTime, + String detectorType, + DetectionDateRange dateRange, + boolean withUser ) throws IOException { User user = withUser ? randomUser() : null; return new AnomalyDetector( @@ -253,7 +273,7 @@ public static AnomalyDetector randomAnomalyDetector( randomAlphaOfLength(20), randomAlphaOfLength(30), randomAlphaOfLength(5), - ImmutableList.of(randomAlphaOfLength(10).toLowerCase()), + indices, features, randomQuery(), randomIntervalTimeConfiguration(), @@ -398,6 +418,22 @@ public static AggregationBuilder randomAggregation(String aggregationName) throw return parsed.getAggregatorFactories().iterator().next(); } + /** + * Parse string aggregation query into {@link AggregationBuilder} + * Sample input: + * "{\"test\":{\"value_count\":{\"field\":\"ok\"}}}" + * + * @param aggregationQuery aggregation builder + * @return aggregation builder + * @throws IOException IO exception + */ + public static AggregationBuilder parseAggregation(String aggregationQuery) throws IOException { + XContentParser parser = parser(aggregationQuery); + + AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parser); + return parsed.getAggregatorFactories().iterator().next(); + } + public static Map randomUiMetadata() { return ImmutableMap.of(randomAlphaOfLength(5), randomFeature()); } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDaoTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDaoTests.java index d7219510..2374e8ee 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDaoTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDaoTests.java @@ -418,7 +418,7 @@ private Object[] getFeaturesForPeriodThrowIllegalStateData() { new Object[] { asList(multiBucket), asList(aggName), null }, }; } - @Test(expected = IllegalStateException.class) + @Test(expected = EndRunException.class) @Parameters(method = "getFeaturesForPeriodThrowIllegalStateData") public void getFeaturesForPeriod_throwIllegalState_forUnknownAggregation( List aggs, diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java index 736e1975..1bebb3e1 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java @@ -1082,6 +1082,13 @@ public void testSearchAnomalyDetectorMatch() throws Exception { assertEquals(nameExists, false); } + public void testRunDetectorWithNoEnabledFeature() throws Exception { + AnomalyDetector detector = createRandomAnomalyDetector(true, true, client(), false); + Assert.assertNotNull(detector.getDetectorId()); + ResponseException e = expectThrows(ResponseException.class, () -> startAnomalyDetector(detector.getDetectorId(), client())); + assertTrue(e.getMessage().contains("Can't start detector job as no enabled features configured")); + } + public void testDeleteAnomalyDetectorWhileRunning() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); Assert.assertNotNull(detector.getDetectorId()); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportActionTests.java new file mode 100644 index 00000000..c77a7cd6 --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportActionTests.java @@ -0,0 +1,158 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.transport; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Map; + +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.junit.Before; + +import com.amazon.opendistroforelasticsearch.ad.ADIntegTestCase; +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; +import com.amazon.opendistroforelasticsearch.ad.common.exception.AnomalyDetectionException; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorType; +import com.amazon.opendistroforelasticsearch.ad.model.Feature; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +public class AnomalyResultTransportActionTests extends ADIntegTestCase { + private String testIndex; + private Instant testDataTimeStamp; + private long start; + private long end; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + testIndex = "test_data"; + testDataTimeStamp = Instant.now(); + start = testDataTimeStamp.minus(10, ChronoUnit.MINUTES).toEpochMilli(); + end = testDataTimeStamp.plus(10, ChronoUnit.MINUTES).toEpochMilli(); + ingestTestData(); + } + + private void ingestTestData() throws IOException { + String mappings = "{\"properties\":{\"timestamp\":{\"type\":\"date\",\"format\":\"strict_date_time||epoch_millis\"}," + + "\"value\":{\"type\":\"double\"}, \"type\":{\"type\":\"keyword\"}," + + "\"is_error\":{\"type\":\"boolean\"}, \"message\":{\"type\":\"text\"}}}"; + createIndex(testIndex, mappings); + double value = randomDouble(); + String type = randomAlphaOfLength(5); + boolean isError = randomBoolean(); + String message = randomAlphaOfLength(10); + String id = indexDoc( + testIndex, + ImmutableMap + .of("timestamp", testDataTimeStamp.toEpochMilli(), "value", value, "type", type, "is_error", isError, "message", message) + ); + GetResponse doc = getDoc(testIndex, id); + Map sourceAsMap = doc.getSourceAsMap(); + assertEquals(testDataTimeStamp.toEpochMilli(), sourceAsMap.get("timestamp")); + assertEquals(value, sourceAsMap.get("value")); + assertEquals(type, sourceAsMap.get("type")); + assertEquals(isError, sourceAsMap.get("is_error")); + assertEquals(message, sourceAsMap.get("message")); + createDetectorIndex(); + } + + public void testFeatureQueryWithTermsAggregation() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"terms\":{\"field\":\"type\"}}}"); + assertErrorMessage(adId, "Failed to parse aggregation"); + } + + public void testFeatureWithSumOfTextField() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"sum\":{\"field\":\"message\"}}}"); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + public void testFeatureWithSumOfTypeField() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"sum\":{\"field\":\"type\"}}}"); + assertErrorMessage(adId, "Field [type] of type [keyword] is not supported for aggregation [sum]"); + } + + public void testFeatureWithMaxOfTextField() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"max\":{\"field\":\"message\"}}}"); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + public void testFeatureWithMaxOfTypeField() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"max\":{\"field\":\"type\"}}}"); + assertErrorMessage(adId, "Field [type] of type [keyword] is not supported for aggregation [max]"); + } + + public void testFeatureWithMinOfTextField() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"min\":{\"field\":\"message\"}}}"); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + public void testFeatureWithMinOfTypeField() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"min\":{\"field\":\"type\"}}}"); + assertErrorMessage(adId, "Field [type] of type [keyword] is not supported for aggregation [min]"); + } + + public void testFeatureWithAvgOfTextField() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"avg\":{\"field\":\"message\"}}}"); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + public void testFeatureWithAvgOfTypeField() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"avg\":{\"field\":\"type\"}}}"); + assertErrorMessage(adId, "Field [type] of type [keyword] is not supported for aggregation [avg]"); + } + + public void testFeatureWithCountOfTextField() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"value_count\":{\"field\":\"message\"}}}"); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + public void testFeatureWithCardinalityOfTextField() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"cardinality\":{\"field\":\"message\"}}}"); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + private String createDetectorWithFeatureAgg(String aggQuery) throws IOException { + AggregationBuilder aggregationBuilder = TestHelpers.parseAggregation(aggQuery); + Feature feature = new Feature(randomAlphaOfLength(5), randomAlphaOfLength(10), true, aggregationBuilder); + AnomalyDetector detector = TestHelpers + .randomAnomalyDetector( + ImmutableList.of(testIndex), + ImmutableList.of(feature), + ImmutableMap.of(), + Instant.now(), + AnomalyDetectorType.REALTIME_SINGLE_ENTITY.name(), + null, + false + ); + String adId = createDetectors(detector); + return adId; + } + + private void assertErrorMessage(String adId, String errorMessage) { + AnomalyResultRequest resultRequest = new AnomalyResultRequest(adId, start, end); + RuntimeException e = expectThrowsAnyOf( + ImmutableList.of(NotSerializableExceptionWrapper.class, AnomalyDetectionException.class), + () -> client().execute(AnomalyResultAction.INSTANCE, resultRequest).actionGet(30_000) + ); + assertTrue(e.getMessage().contains(errorMessage)); + } +} diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java index 55ffa9e6..8bdc354f 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java @@ -15,84 +15,42 @@ package com.amazon.opendistroforelasticsearch.ad.transport; -import static com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils.XCONTENT_WITH_TYPE; - import java.io.IOException; import java.time.Instant; -import java.util.Collection; -import java.util.Collections; import java.util.Map; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.ESIntegTestCase; import org.junit.Before; -import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; +import com.amazon.opendistroforelasticsearch.ad.ADIntegTestCase; import com.amazon.opendistroforelasticsearch.ad.TestHelpers; -import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; -import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorType; import com.amazon.opendistroforelasticsearch.ad.stats.InternalStatNames; import com.amazon.opendistroforelasticsearch.ad.stats.StatNames; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -public class StatsAnomalyDetectorTransportActionTests extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singletonList(AnomalyDetectorPlugin.class); - } - - @Override - protected Collection> transportClientPlugins() { - return Collections.singletonList(AnomalyDetectorPlugin.class); - } +public class StatsAnomalyDetectorTransportActionTests extends ADIntegTestCase { @Override @Before public void setUp() throws Exception { super.setUp(); - createTestDetector(); - } - - private void createTestDetector() throws IOException { - CreateIndexResponse createIndexResponse = TestHelpers - .createIndex(admin(), AnomalyDetector.ANOMALY_DETECTORS_INDEX, AnomalyDetectionIndices.getAnomalyDetectorMappings()); - assertEquals(true, createIndexResponse.isAcknowledged()); - - IndexRequest indexRequest = new IndexRequest(AnomalyDetector.ANOMALY_DETECTORS_INDEX) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source( - TestHelpers - .randomAnomalyDetector(ImmutableMap.of(), Instant.now()) - .toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITH_TYPE) - ); - IndexResponse indexResponse = client().index(indexRequest).actionGet(5_000); - assertEquals(RestStatus.CREATED, indexResponse.status()); - - indexRequest = new IndexRequest(AnomalyDetector.ANOMALY_DETECTORS_INDEX) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source( - TestHelpers - .randomAnomalyDetector( - ImmutableList.of(TestHelpers.randomFeature()), - ImmutableMap.of(), - Instant.now(), - AnomalyDetectorType.HISTORICAL_SINGLE_ENTITY.name(), - TestHelpers.randomDetectionDateRange(), - true - ) - .toXContent(XContentFactory.jsonBuilder(), XCONTENT_WITH_TYPE) - ); - indexResponse = client().index(indexRequest).actionGet(5_000); - assertEquals(RestStatus.CREATED, indexResponse.status()); + createDetectors( + ImmutableList + .of( + TestHelpers.randomAnomalyDetector(ImmutableMap.of(), Instant.now()), + TestHelpers + .randomAnomalyDetector( + ImmutableList.of(TestHelpers.randomFeature()), + ImmutableMap.of(), + Instant.now(), + AnomalyDetectorType.HISTORICAL_SINGLE_ENTITY.name(), + TestHelpers.randomDetectionDateRange(), + true + ) + ), + true + ); } public void testStatsAnomalyDetectorWithNodeLevelStats() { From bb79a0f9e005d5b6d102a43da36d77aec4f7fe32 Mon Sep 17 00:00:00 2001 From: Yaliang <49084640+ylwu-amzn@users.noreply.github.com> Date: Tue, 29 Dec 2020 15:39:50 -0800 Subject: [PATCH 12/13] add IT cases for filtering out non-server exceptions for HC detector (#348) --- .../AnomalyResultTransportAction.java | 5 +- .../ad/TestHelpers.java | 4 + .../AnomalyResultTransportActionTests.java | 134 ++++++++++++++++-- 3 files changed, 125 insertions(+), 18 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java index debdec2e..cc821110 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java @@ -629,15 +629,14 @@ void handleExecuteException(Exception ex, ActionListener } private boolean invalidQuery(SearchPhaseExecutionException ex) { - boolean invalidQuery = true; // If all shards return bad request and failure cause is IllegalArgumentException, we // consider the feature query is invalid and will not count the error in failure stats. for (ShardSearchFailure failure : ex.shardFailures()) { if (RestStatus.BAD_REQUEST != failure.status() || !(failure.getCause() instanceof IllegalArgumentException)) { - invalidQuery = false; + return false; } } - return invalidQuery; + return true; } class RCFActionListener implements ActionListener { diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java index 18a5c3f0..8b958768 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/TestHelpers.java @@ -403,6 +403,10 @@ public static QueryBuilder randomQuery() throws IOException { String query = "{\"bool\":{\"must\":{\"term\":{\"user\":\"kimchy\"}},\"filter\":{\"term\":{\"tag\":" + "\"tech\"}},\"must_not\":{\"range\":{\"age\":{\"gte\":10,\"lte\":20}}},\"should\":[{\"term\":" + "{\"tag\":\"wow\"}},{\"term\":{\"tag\":\"elasticsearch\"}}],\"minimum_should_match\":1,\"boost\":1}}"; + return randomQuery(query); + } + + public static QueryBuilder randomQuery(String query) throws IOException { XContentParser parser = TestHelpers.parser(query); return parseInnerQueryBuilder(parser); } diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportActionTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportActionTests.java index c77a7cd6..63d2355a 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportActionTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportActionTests.java @@ -15,22 +15,26 @@ package com.amazon.opendistroforelasticsearch.ad.transport; +import static com.amazon.opendistroforelasticsearch.ad.TestHelpers.randomQuery; + import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.List; import java.util.Map; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.Before; import com.amazon.opendistroforelasticsearch.ad.ADIntegTestCase; import com.amazon.opendistroforelasticsearch.ad.TestHelpers; import com.amazon.opendistroforelasticsearch.ad.common.exception.AnomalyDetectionException; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; -import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorType; import com.amazon.opendistroforelasticsearch.ad.model.Feature; +import com.amazon.opendistroforelasticsearch.ad.model.IntervalTimeConfiguration; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -39,6 +43,8 @@ public class AnomalyResultTransportActionTests extends ADIntegTestCase { private Instant testDataTimeStamp; private long start; private long end; + private String timeField = "timestamp"; + private String categoryField = "type"; @Override @Before @@ -52,8 +58,12 @@ public void setUp() throws Exception { } private void ingestTestData() throws IOException { - String mappings = "{\"properties\":{\"timestamp\":{\"type\":\"date\",\"format\":\"strict_date_time||epoch_millis\"}," - + "\"value\":{\"type\":\"double\"}, \"type\":{\"type\":\"keyword\"}," + String mappings = "{\"properties\":{\"" + + timeField + + "\":{\"type\":\"date\",\"format\":\"strict_date_time||epoch_millis\"}," + + "\"value\":{\"type\":\"double\"}, \"" + + categoryField + + "\":{\"type\":\"keyword\"}," + "\"is_error\":{\"type\":\"boolean\"}, \"message\":{\"type\":\"text\"}}}"; createIndex(testIndex, mappings); double value = randomDouble(); @@ -63,11 +73,11 @@ private void ingestTestData() throws IOException { String id = indexDoc( testIndex, ImmutableMap - .of("timestamp", testDataTimeStamp.toEpochMilli(), "value", value, "type", type, "is_error", isError, "message", message) + .of(timeField, testDataTimeStamp.toEpochMilli(), "value", value, "type", type, "is_error", isError, "message", message) ); GetResponse doc = getDoc(testIndex, id); Map sourceAsMap = doc.getSourceAsMap(); - assertEquals(testDataTimeStamp.toEpochMilli(), sourceAsMap.get("timestamp")); + assertEquals(testDataTimeStamp.toEpochMilli(), sourceAsMap.get(timeField)); assertEquals(value, sourceAsMap.get("value")); assertEquals(type, sourceAsMap.get("type")); assertEquals(isError, sourceAsMap.get("is_error")); @@ -130,23 +140,117 @@ public void testFeatureWithCardinalityOfTextField() throws IOException { assertErrorMessage(adId, "Text fields are not optimised for operations"); } + public void testFeatureQueryWithTermsAggregationForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"terms\":{\"field\":\"type\"}}}", true); + assertErrorMessage(adId, "Failed to parse aggregation"); + } + + public void testFeatureWithSumOfTextFieldForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"sum\":{\"field\":\"message\"}}}", true); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + public void testFeatureWithSumOfTypeFieldForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"sum\":{\"field\":\"type\"}}}", true); + assertErrorMessage(adId, "Field [type] of type [keyword] is not supported for aggregation [sum]"); + } + + public void testFeatureWithMaxOfTextFieldForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"max\":{\"field\":\"message\"}}}", true); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + public void testFeatureWithMaxOfTypeFieldForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"max\":{\"field\":\"type\"}}}", true); + assertErrorMessage(adId, "Field [type] of type [keyword] is not supported for aggregation [max]"); + } + + public void testFeatureWithMinOfTextFieldForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"min\":{\"field\":\"message\"}}}", true); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + public void testFeatureWithMinOfTypeFieldForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"min\":{\"field\":\"type\"}}}", true); + assertErrorMessage(adId, "Field [type] of type [keyword] is not supported for aggregation [min]"); + } + + public void testFeatureWithAvgOfTextFieldForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"avg\":{\"field\":\"message\"}}}", true); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + public void testFeatureWithAvgOfTypeFieldForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"avg\":{\"field\":\"type\"}}}", true); + assertErrorMessage(adId, "Field [type] of type [keyword] is not supported for aggregation [avg]"); + } + + public void testFeatureWithCountOfTextFieldForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"value_count\":{\"field\":\"message\"}}}", true); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + + public void testFeatureWithCardinalityOfTextFieldForHCDetector() throws IOException { + String adId = createDetectorWithFeatureAgg("{\"test\":{\"cardinality\":{\"field\":\"message\"}}}", true); + assertErrorMessage(adId, "Text fields are not optimised for operations"); + } + private String createDetectorWithFeatureAgg(String aggQuery) throws IOException { + return createDetectorWithFeatureAgg(aggQuery, false); + } + + private String createDetectorWithFeatureAgg(String aggQuery, boolean hcDetector) throws IOException { AggregationBuilder aggregationBuilder = TestHelpers.parseAggregation(aggQuery); Feature feature = new Feature(randomAlphaOfLength(5), randomAlphaOfLength(10), true, aggregationBuilder); - AnomalyDetector detector = TestHelpers - .randomAnomalyDetector( - ImmutableList.of(testIndex), - ImmutableList.of(feature), - ImmutableMap.of(), - Instant.now(), - AnomalyDetectorType.REALTIME_SINGLE_ENTITY.name(), - null, - false - ); + AnomalyDetector detector = hcDetector + ? randomHCDetector(ImmutableList.of(testIndex), ImmutableList.of(feature)) + : randomDetector(ImmutableList.of(testIndex), ImmutableList.of(feature)); String adId = createDetectors(detector); return adId; } + private AnomalyDetector randomDetector(List indices, List features) throws IOException { + return new AnomalyDetector( + randomAlphaOfLength(10), + randomLong(), + randomAlphaOfLength(20), + randomAlphaOfLength(30), + timeField, + indices, + features, + randomQuery("{\"bool\":{\"filter\":[{\"exists\":{\"field\":\"value\"}}]}}"), + new IntervalTimeConfiguration(ESRestTestCase.randomLongBetween(1, 5), ChronoUnit.MINUTES), + new IntervalTimeConfiguration(ESRestTestCase.randomLongBetween(1, 5), ChronoUnit.MINUTES), + 8, + null, + randomInt(), + Instant.now(), + null, + null + ); + } + + private AnomalyDetector randomHCDetector(List indices, List features) throws IOException { + return new AnomalyDetector( + randomAlphaOfLength(10), + randomLong(), + randomAlphaOfLength(20), + randomAlphaOfLength(30), + timeField, + indices, + features, + randomQuery("{\"bool\":{\"filter\":[{\"exists\":{\"field\":\"value\"}}]}}"), + new IntervalTimeConfiguration(ESRestTestCase.randomLongBetween(1, 5), ChronoUnit.MINUTES), + new IntervalTimeConfiguration(ESRestTestCase.randomLongBetween(1, 5), ChronoUnit.MINUTES), + 8, + null, + randomInt(), + Instant.now(), + ImmutableList.of(categoryField), + null + ); + } + private void assertErrorMessage(String adId, String errorMessage) { AnomalyResultRequest resultRequest = new AnomalyResultRequest(adId, start, end); RuntimeException e = expectThrowsAnyOf( From 21418a4a7f1b5553508c4031da89f288d41283a3 Mon Sep 17 00:00:00 2001 From: Kaituo Li Date: Wed, 30 Dec 2020 17:15:54 -0800 Subject: [PATCH 13/13] Fix another case of the profile API returns prematurely (#353) Testing done: 1. Manually verified the issue has been fixed. 2. Reproduced the issue using an unit test and verified fix using the test. --- .../ad/AnomalyDetectorProfileRunner.java | 4 +-- .../metrics/CardinalityProfileTests.java | 26 ++++++++++++++++++- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunner.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunner.java index 535deb68..264dce21 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunner.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorProfileRunner.java @@ -188,7 +188,6 @@ private void prepareProfile( CommonErrorMessages.FAIL_FETCH_ERR_MSG + detectorId, false ); - if (profilesToCollect.contains(DetectorProfileName.ERROR)) { GetRequest getStateRequest = new GetRequest(DetectorInternalState.DETECTOR_STATE_INDEX, detectorId); client.get(getStateRequest, onGetDetectorState(delegateListener, detectorId, enabledTimeMs)); @@ -459,8 +458,8 @@ private ActionListener onInittedEver( processInitResponse(detector, profilesToCollect, totalUpdates, false, profileBuilder, listener); } else { createRunningStateAndInitProgress(profilesToCollect, profileBuilder); + listener.onResponse(profileBuilder.build()); } - listener.onResponse(profileBuilder.build()); }, exception -> { if (exception instanceof IndexNotFoundException) { // anomaly result index is not created yet @@ -554,7 +553,6 @@ private void processInitResponse( } else { long intervalMins = ((IntervalTimeConfiguration) detector.getDetectionInterval()).toDuration().toMinutes(); InitProgressProfile initProgress = computeInitProgressProfile(totalUpdates, intervalMins); - builder.initProgress(initProgress); } } diff --git a/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityProfileTests.java b/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityProfileTests.java index 0be67df3..10d7e463 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityProfileTests.java +++ b/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityProfileTests.java @@ -33,6 +33,7 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetRequest; @@ -111,6 +112,7 @@ private void setUpMultiEntityClientGet(DetectorStatus detectorStatus, JobStatus } else if (request.index().equals(DetectorInternalState.DETECTOR_STATE_INDEX)) { switch (errorResultStatus) { case NO_ERROR: + listener.onResponse(null); break; case NULL_POINTER_EXCEPTION: GetResponse response = mock(GetResponse.class); @@ -237,7 +239,29 @@ public void testFailGetState() throws IOException, InterruptedException { assertTrue(inProgressLatch.await(100, TimeUnit.SECONDS)); } - public void testFaiConfirmInitted() throws IOException, InterruptedException { + public void testNoResultsNoError() throws IOException, InterruptedException { + setUpMultiEntityClientGet(DetectorStatus.EXIST, JobStatus.ENABLED, ErrorResultStatus.NO_ERROR); + setUpMultiEntityClientSearch(ADResultStatus.NO_RESULT, CardinalityStatus.NORMAL); + setUpProfileAction(); + + final AtomicInteger called = new AtomicInteger(0); + + runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + assertTrue(response.getInitProgress() != null); + called.getAndIncrement(); + }, exception -> { + assertTrue("Should not reach here ", false); + called.getAndIncrement(); + }), totalInitProgress); + + while (called.get() == 0) { + Thread.sleep(100); + } + // should only call onResponse once + assertEquals(1, called.get()); + } + + public void testFailConfirmInitted() throws IOException, InterruptedException { setUpMultiEntityClientGet(DetectorStatus.EXIST, JobStatus.ENABLED, ErrorResultStatus.NO_ERROR); setUpMultiEntityClientSearch(ADResultStatus.EXCEPTION, CardinalityStatus.NORMAL); setUpProfileAction();