From 57e619be071cda6c086544bcb8ebabb1c3a30ebb Mon Sep 17 00:00:00 2001
From: Byron Hulcher
Date: Thu, 11 Feb 2021 08:38:30 -0500
Subject: [PATCH 01/72] [App Search] Migrate Create Engine view (#89816)
* New CreateEngine view component
* Add CreateEngine to index router
* Add Layout-level components for CreateEngine
* Static create engine view
* Add new POST route for engines API endpoint
* Logic for Create Engine view WIP tests failing
* Fix enterpriseSearchRequestHandler path
* Use setQueuedSuccessMessage after engine has been created
* Use exact path for CREATE_ENGINES_PATH (but EngineRouter logic is still firing??)
* Add TODO note
* Put CreateEngine inside the common App Search Layout
* Fix CreateEngineLogic jest tests
* Move create engine view to /create_engine from /engines/new
* Add Create an Engine button to Engines Overview
* Missing FlashMessages on EngineOverview
* Fix test for CreateEngine route
* Fix strong'd text in santized name note
* Use local constant for Supported Languages
* Disable submit button when name is empty
* Bad conflict fix
* Lint nits
* Improve CreateEngineLogic tests
* Improve EngineOverview tests
* Disable EnginesOverview header responsiveness
* Moving CreateEngine route
* create_engine/CreateEngine -> engine_creation/EngineCreation
* Use static values for tests
* Fixing constants, better casing, better ID names, i18ning dropdown labels
* Removing unused imports
* Fix EngineCreation tests
* Fix Engines EmptyState tests
* Fix EnginesOverview tests
* Lint fixes
* Reset mocks after tests
* Update MockRouter properties
* Revert newline change
* Lint fix
---
.../components/engine_creation/constants.ts | 215 ++++++++++++++++++
.../engine_creation/engine_creation.test.tsx | 119 ++++++++++
.../engine_creation/engine_creation.tsx | 130 +++++++++++
.../engine_creation_logic.test.ts | 122 ++++++++++
.../engine_creation/engine_creation_logic.ts | 81 +++++++
.../components/engine_creation/index.ts | 8 +
.../engine_overview/engine_overview_empty.tsx | 2 +
.../engine_overview_metrics.tsx | 3 +
.../engines/components/empty_state.test.tsx | 30 ++-
.../engines/components/empty_state.tsx | 31 +--
.../components/engines/constants.ts | 7 +
.../engines/engines_overview.test.tsx | 8 +
.../components/engines/engines_overview.tsx | 29 ++-
.../applications/app_search/index.test.tsx | 19 +-
.../public/applications/app_search/index.tsx | 12 +-
.../public/applications/app_search/routes.ts | 2 +-
.../server/routes/app_search/engines.test.ts | 41 ++++
.../server/routes/app_search/engines.ts | 15 ++
18 files changed, 839 insertions(+), 35 deletions(-)
create mode 100644 x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/constants.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/index.ts
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/constants.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/constants.ts
new file mode 100644
index 0000000000000..0647eeba78786
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/constants.ts
@@ -0,0 +1,215 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { i18n } from '@kbn/i18n';
+
+export const DEFAULT_LANGUAGE = 'Universal';
+
+export const ENGINE_CREATION_TITLE = i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.title',
+ {
+ defaultMessage: 'Create an engine',
+ }
+);
+
+export const ENGINE_CREATION_FORM_TITLE = i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.form.title',
+ {
+ defaultMessage: 'Name your engine',
+ }
+);
+
+export const ENGINE_CREATION_FORM_ENGINE_NAME_LABEL = i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.form.engineName.label',
+ {
+ defaultMessage: 'Engine name',
+ }
+);
+
+export const ALLOWED_CHARS_NOTE = i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.form.engineName.allowedCharactersHelpText',
+ {
+ defaultMessage: 'Engine names can only contain lowercase letters, numbers, and hyphens',
+ }
+);
+
+export const SANITIZED_NAME_NOTE = i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.form.engineName.sanitizedNameHelpText',
+ {
+ defaultMessage: 'Your engine will be named',
+ }
+);
+
+export const ENGINE_CREATION_FORM_ENGINE_NAME_PLACEHOLDER = i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.form.engineName.placeholder',
+ {
+ defaultMessage: 'i.e., my-search-engine',
+ }
+);
+
+export const ENGINE_CREATION_FORM_ENGINE_LANGUAGE_LABEL = i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.form.engineLanguage.label',
+ {
+ defaultMessage: 'Engine language',
+ }
+);
+
+export const ENGINE_CREATION_FORM_SUBMIT_BUTTON_LABEL = i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.form.submitButton.buttonLabel',
+ {
+ defaultMessage: 'Create engine',
+ }
+);
+
+export const ENGINE_CREATION_SUCCESS_MESSAGE = i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.successMessage',
+ {
+ defaultMessage: 'Successfully created engine.',
+ }
+);
+
+export const SUPPORTED_LANGUAGES = [
+ {
+ value: 'Universal',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.universalDropDownOptionLabel',
+ {
+ defaultMessage: 'Universal',
+ }
+ ),
+ },
+ {
+ text: '—',
+ disabled: true,
+ },
+ {
+ value: 'zh',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.chineseDropDownOptionLabel',
+ {
+ defaultMessage: 'Chinese',
+ }
+ ),
+ },
+ {
+ value: 'da',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.danishDropDownOptionLabel',
+ {
+ defaultMessage: 'Danish',
+ }
+ ),
+ },
+ {
+ value: 'nl',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.dutchDropDownOptionLabel',
+ {
+ defaultMessage: 'Dutch',
+ }
+ ),
+ },
+ {
+ value: 'en',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.englishDropDownOptionLabel',
+ {
+ defaultMessage: 'English',
+ }
+ ),
+ },
+ {
+ value: 'fr',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.frenchDropDownOptionLabel',
+ {
+ defaultMessage: 'French',
+ }
+ ),
+ },
+ {
+ value: 'de',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.germanDropDownOptionLabel',
+ {
+ defaultMessage: 'German',
+ }
+ ),
+ },
+ {
+ value: 'it',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.italianDropDownOptionLabel',
+ {
+ defaultMessage: 'Italian',
+ }
+ ),
+ },
+ {
+ value: 'ja',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.japaneseDropDownOptionLabel',
+ {
+ defaultMessage: 'Japanese',
+ }
+ ),
+ },
+ {
+ value: 'ko',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.koreanDropDownOptionLabel',
+ {
+ defaultMessage: 'Korean',
+ }
+ ),
+ },
+ {
+ value: 'pt',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.portugueseDropDownOptionLabel',
+ {
+ defaultMessage: 'Portuguese',
+ }
+ ),
+ },
+ {
+ value: 'pt-br',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.portugueseBrazilDropDownOptionLabel',
+ {
+ defaultMessage: 'Portuguese (Brazil)',
+ }
+ ),
+ },
+ {
+ value: 'ru',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.russianDropDownOptionLabel',
+ {
+ defaultMessage: 'Russian',
+ }
+ ),
+ },
+ {
+ value: 'es',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.spanishDropDownOptionLabel',
+ {
+ defaultMessage: 'Spanish',
+ }
+ ),
+ },
+ {
+ value: 'th',
+ text: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.thaiDropDownOptionLabel',
+ {
+ defaultMessage: 'Thai',
+ }
+ ),
+ },
+];
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation.test.tsx
new file mode 100644
index 0000000000000..cf30fac3c5f49
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation.test.tsx
@@ -0,0 +1,119 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions, setMockValues } from '../../../__mocks__';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EngineCreation } from './';
+
+describe('EngineCreation', () => {
+ const DEFAULT_VALUES = {
+ name: '',
+ rawName: '',
+ language: 'Universal',
+ };
+
+ const MOCK_ACTIONS = {
+ setRawName: jest.fn(),
+ setLanguage: jest.fn(),
+ submitEngine: jest.fn(),
+ };
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ setMockValues(DEFAULT_VALUES);
+ setMockActions(MOCK_ACTIONS);
+ });
+
+ it('renders', () => {
+ const wrapper = shallow();
+ expect(wrapper.find('[data-test-subj="EngineCreation"]')).toHaveLength(1);
+ });
+
+ it('EngineCreationForm calls submitEngine on form submit', () => {
+ const wrapper = shallow();
+ const simulatedEvent = {
+ preventDefault: jest.fn(),
+ };
+ wrapper.find('[data-test-subj="EngineCreationForm"]').simulate('submit', simulatedEvent);
+
+ expect(MOCK_ACTIONS.submitEngine).toHaveBeenCalledTimes(1);
+ });
+
+ it('EngineCreationNameInput calls setRawName on change', () => {
+ const wrapper = shallow();
+ const simulatedEvent = {
+ currentTarget: { value: 'new-raw-name' },
+ };
+ wrapper.find('[data-test-subj="EngineCreationNameInput"]').simulate('change', simulatedEvent);
+
+ expect(MOCK_ACTIONS.setRawName).toHaveBeenCalledWith('new-raw-name');
+ });
+
+ it('EngineCreationLanguageInput calls setLanguage on change', () => {
+ const wrapper = shallow();
+ const simulatedEvent = {
+ currentTarget: { value: 'English' },
+ };
+ wrapper
+ .find('[data-test-subj="EngineCreationLanguageInput"]')
+ .simulate('change', simulatedEvent);
+
+ expect(MOCK_ACTIONS.setLanguage).toHaveBeenCalledWith('English');
+ });
+
+ describe('NewEngineSubmitButton', () => {
+ it('is disabled when name is empty', () => {
+ setMockValues({ ...DEFAULT_VALUES, name: '', rawName: '' });
+ const wrapper = shallow();
+
+ expect(wrapper.find('[data-test-subj="NewEngineSubmitButton"]').prop('disabled')).toEqual(
+ true
+ );
+ });
+
+ it('is enabled when name has a value', () => {
+ setMockValues({ ...DEFAULT_VALUES, name: 'test', rawName: 'test' });
+ const wrapper = shallow();
+
+ expect(wrapper.find('[data-test-subj="NewEngineSubmitButton"]').prop('disabled')).toEqual(
+ false
+ );
+ });
+ });
+
+ describe('EngineCreationNameFormRow', () => {
+ it('renders sanitized name helptext when the raw name is being sanitized', () => {
+ setMockValues({
+ ...DEFAULT_VALUES,
+ name: 'un-sanitized-name',
+ rawName: 'un-----sanitized-------name',
+ });
+ const wrapper = shallow();
+ const formRow = wrapper.find('[data-test-subj="EngineCreationNameFormRow"]').dive();
+
+ expect(formRow.contains('Your engine will be named')).toBeTruthy();
+ });
+
+ it('renders allowed character helptext when rawName and sanitizedName match', () => {
+ setMockValues({
+ ...DEFAULT_VALUES,
+ name: 'pre-sanitized-name',
+ rawName: 'pre-sanitized-name',
+ });
+ const wrapper = shallow();
+ const formRow = wrapper.find('[data-test-subj="EngineCreationNameFormRow"]').dive();
+
+ expect(
+ formRow.contains('Engine names can only contain lowercase letters, numbers, and hyphens')
+ ).toBeTruthy();
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation.tsx
new file mode 100644
index 0000000000000..497c00d1f9144
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation.tsx
@@ -0,0 +1,130 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions, useValues } from 'kea';
+
+import {
+ EuiForm,
+ EuiFlexGroup,
+ EuiFormRow,
+ EuiFlexItem,
+ EuiFieldText,
+ EuiSelect,
+ EuiPageBody,
+ EuiPageHeader,
+ EuiPageHeaderSection,
+ EuiSpacer,
+ EuiText,
+ EuiTitle,
+ EuiButton,
+ EuiPanel,
+} from '@elastic/eui';
+
+import { FlashMessages } from '../../../shared/flash_messages';
+import { SetAppSearchChrome as SetPageChrome } from '../../../shared/kibana_chrome';
+
+import {
+ ALLOWED_CHARS_NOTE,
+ ENGINE_CREATION_FORM_ENGINE_LANGUAGE_LABEL,
+ ENGINE_CREATION_FORM_ENGINE_NAME_LABEL,
+ ENGINE_CREATION_FORM_ENGINE_NAME_PLACEHOLDER,
+ ENGINE_CREATION_FORM_SUBMIT_BUTTON_LABEL,
+ ENGINE_CREATION_FORM_TITLE,
+ ENGINE_CREATION_TITLE,
+ SANITIZED_NAME_NOTE,
+ SUPPORTED_LANGUAGES,
+} from './constants';
+import { EngineCreationLogic } from './engine_creation_logic';
+
+export const EngineCreation: React.FC = () => {
+ const { name, rawName, language } = useValues(EngineCreationLogic);
+ const { setLanguage, setRawName, submitEngine } = useActions(EngineCreationLogic);
+
+ return (
+
+
+
+
+
+ {ENGINE_CREATION_TITLE}
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation_logic.test.ts
new file mode 100644
index 0000000000000..272e4fb3a25c0
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation_logic.test.ts
@@ -0,0 +1,122 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ LogicMounter,
+ mockHttpValues,
+ mockKibanaValues,
+ mockFlashMessageHelpers,
+} from '../../../__mocks__';
+
+import { nextTick } from '@kbn/test/jest';
+
+import { EngineCreationLogic } from './engine_creation_logic';
+
+describe('EngineCreationLogic', () => {
+ const { mount } = new LogicMounter(EngineCreationLogic);
+ const { http } = mockHttpValues;
+ const { navigateToUrl } = mockKibanaValues;
+ const { setQueuedSuccessMessage, flashAPIErrors } = mockFlashMessageHelpers;
+
+ const DEFAULT_VALUES = {
+ name: '',
+ rawName: '',
+ language: 'Universal',
+ };
+
+ it('has expected default values', () => {
+ mount();
+ expect(EngineCreationLogic.values).toEqual(DEFAULT_VALUES);
+ });
+
+ describe('actions', () => {
+ describe('setLanguage', () => {
+ it('sets language to the provided value', () => {
+ mount();
+ EngineCreationLogic.actions.setLanguage('English');
+ expect(EngineCreationLogic.values).toEqual({
+ ...DEFAULT_VALUES,
+ language: 'English',
+ });
+ });
+ });
+
+ describe('setRawName', () => {
+ beforeAll(() => {
+ mount();
+ EngineCreationLogic.actions.setRawName('Name__With#$&*%Special--Characters');
+ });
+
+ afterAll(() => {
+ jest.clearAllMocks();
+ });
+
+ it('should set rawName to provided value', () => {
+ expect(EngineCreationLogic.values.rawName).toEqual('Name__With#$&*%Special--Characters');
+ });
+
+ it('should set name to a sanitized value', () => {
+ expect(EngineCreationLogic.values.name).toEqual('name-with-special-characters');
+ });
+ });
+ });
+
+ describe('listeners', () => {
+ describe('onEngineCreationSuccess', () => {
+ beforeAll(() => {
+ mount({ language: 'English', rawName: 'test' });
+ EngineCreationLogic.actions.onEngineCreationSuccess();
+ });
+
+ afterAll(() => {
+ jest.clearAllMocks();
+ });
+
+ it('should set a success message', () => {
+ expect(setQueuedSuccessMessage).toHaveBeenCalledWith('Successfully created engine.');
+ });
+
+ it('should navigate the user to the engine page', () => {
+ expect(navigateToUrl).toHaveBeenCalledWith('/engines/test');
+ });
+ });
+
+ describe('submitEngine', () => {
+ beforeAll(() => {
+ mount({ language: 'English', rawName: 'test' });
+ });
+
+ afterAll(() => {
+ jest.clearAllMocks();
+ });
+
+ it('POSTS to /api/app_search/engines', () => {
+ const body = JSON.stringify({
+ name: EngineCreationLogic.values.name,
+ language: EngineCreationLogic.values.language,
+ });
+ EngineCreationLogic.actions.submitEngine();
+ expect(http.post).toHaveBeenCalledWith('/api/app_search/engines', { body });
+ });
+
+ it('calls onEngineCreationSuccess on valid submission', async () => {
+ jest.spyOn(EngineCreationLogic.actions, 'onEngineCreationSuccess');
+ http.post.mockReturnValueOnce(Promise.resolve({}));
+ EngineCreationLogic.actions.submitEngine();
+ await nextTick();
+ expect(EngineCreationLogic.actions.onEngineCreationSuccess).toHaveBeenCalledTimes(1);
+ });
+
+ it('calls flashAPIErrors on API Error', async () => {
+ http.post.mockReturnValueOnce(Promise.reject());
+ EngineCreationLogic.actions.submitEngine();
+ await nextTick();
+ expect(flashAPIErrors).toHaveBeenCalledTimes(1);
+ });
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation_logic.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation_logic.ts
new file mode 100644
index 0000000000000..6cea32f826e7a
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/engine_creation_logic.ts
@@ -0,0 +1,81 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { generatePath } from 'react-router-dom';
+
+import { kea, MakeLogicType } from 'kea';
+
+import { flashAPIErrors, setQueuedSuccessMessage } from '../../../shared/flash_messages';
+import { HttpLogic } from '../../../shared/http';
+import { KibanaLogic } from '../../../shared/kibana';
+import { ENGINE_PATH } from '../../routes';
+import { formatApiName } from '../../utils/format_api_name';
+
+import { DEFAULT_LANGUAGE, ENGINE_CREATION_SUCCESS_MESSAGE } from './constants';
+
+interface EngineCreationActions {
+ onEngineCreationSuccess(): void;
+ setLanguage(language: string): { language: string };
+ setRawName(rawName: string): { rawName: string };
+ submitEngine(): void;
+}
+
+interface EngineCreationValues {
+ language: string;
+ name: string;
+ rawName: string;
+}
+
+export const EngineCreationLogic = kea>({
+ path: ['enterprise_search', 'app_search', 'engine_creation_logic'],
+ actions: {
+ onEngineCreationSuccess: true,
+ setLanguage: (language) => ({ language }),
+ setRawName: (rawName) => ({ rawName }),
+ submitEngine: true,
+ },
+ reducers: {
+ language: [
+ DEFAULT_LANGUAGE,
+ {
+ setLanguage: (_, { language }) => language,
+ },
+ ],
+ rawName: [
+ '',
+ {
+ setRawName: (_, { rawName }) => rawName,
+ },
+ ],
+ },
+ selectors: ({ selectors }) => ({
+ name: [() => [selectors.rawName], (rawName) => formatApiName(rawName)],
+ }),
+ listeners: ({ values, actions }) => ({
+ submitEngine: async () => {
+ const { http } = HttpLogic.values;
+ const { name, language } = values;
+
+ const body = JSON.stringify({ name, language });
+
+ try {
+ await http.post('/api/app_search/engines', { body });
+ actions.onEngineCreationSuccess();
+ } catch (e) {
+ flashAPIErrors(e);
+ }
+ },
+ onEngineCreationSuccess: () => {
+ const { name } = values;
+ const { navigateToUrl } = KibanaLogic.values;
+ const enginePath = generatePath(ENGINE_PATH, { engineName: name });
+
+ setQueuedSuccessMessage(ENGINE_CREATION_SUCCESS_MESSAGE);
+ navigateToUrl(enginePath);
+ },
+ }),
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/index.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/index.ts
new file mode 100644
index 0000000000000..a1770cc50ea93
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_creation/index.ts
@@ -0,0 +1,8 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+export { EngineCreation } from './engine_creation';
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview_empty.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview_empty.tsx
index 81bf3716edfb8..f505f08a3531a 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview_empty.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview_empty.tsx
@@ -16,6 +16,7 @@ import {
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
+import { FlashMessages } from '../../../shared/flash_messages';
import { DOCS_PREFIX } from '../../routes';
import { DocumentCreationButtons, DocumentCreationFlyout } from '../document_creation';
@@ -41,6 +42,7 @@ export const EmptyEngineOverview: React.FC = () => {
+
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview_metrics.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview_metrics.tsx
index 34a154ca83741..c33431354eafc 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview_metrics.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview_metrics.tsx
@@ -12,6 +12,8 @@ import { useValues } from 'kea';
import { EuiPageHeader, EuiTitle, EuiSpacer } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
+import { FlashMessages } from '../../../shared/flash_messages';
+
import { UnavailablePrompt, TotalStats, TotalCharts, RecentApiLogs } from './components';
import { EngineOverviewLogic } from './';
@@ -30,6 +32,7 @@ export const EngineOverviewMetrics: React.FC = () => {
+
{apiLogsUnavailable ? (
) : (
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/components/empty_state.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/components/empty_state.test.tsx
index ac540eec3ff91..14772375c9bd4 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/components/empty_state.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/components/empty_state.test.tsx
@@ -10,9 +10,9 @@ import { mockTelemetryActions } from '../../../../__mocks__';
import React from 'react';
-import { shallow } from 'enzyme';
+import { shallow, ShallowWrapper } from 'enzyme';
-import { EuiEmptyPrompt, EuiButton } from '@elastic/eui';
+import { EuiEmptyPrompt } from '@elastic/eui';
import { EmptyState } from './';
@@ -23,12 +23,24 @@ describe('EmptyState', () => {
expect(wrapper.find(EuiEmptyPrompt)).toHaveLength(1);
});
- it('sends telemetry on create first engine click', () => {
- const wrapper = shallow();
- const prompt = wrapper.find(EuiEmptyPrompt).dive();
- const button = prompt.find(EuiButton);
-
- button.simulate('click');
- expect(mockTelemetryActions.sendAppSearchTelemetry).toHaveBeenCalled();
+ describe('CTA Button', () => {
+ let wrapper: ShallowWrapper;
+ let prompt: ShallowWrapper;
+ let button: ShallowWrapper;
+
+ beforeEach(() => {
+ wrapper = shallow();
+ prompt = wrapper.find(EuiEmptyPrompt).dive();
+ button = prompt.find('[data-test-subj="EmptyStateCreateFirstEngineCta"]');
+ });
+
+ it('sends telemetry on create first engine click', () => {
+ button.simulate('click');
+ expect(mockTelemetryActions.sendAppSearchTelemetry).toHaveBeenCalled();
+ });
+
+ it('sends a user to engine creation', () => {
+ expect(button.prop('to')).toEqual('/engine_creation');
+ });
});
});
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/components/empty_state.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/components/empty_state.tsx
index 5419a175c9eff..d742d68b0c9d6 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/components/empty_state.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/components/empty_state.tsx
@@ -9,13 +9,13 @@ import React from 'react';
import { useActions } from 'kea';
-import { EuiPageContent, EuiEmptyPrompt, EuiButton } from '@elastic/eui';
+import { EuiPageContent, EuiEmptyPrompt } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
-import { getAppSearchUrl } from '../../../../shared/enterprise_search_url';
import { SetAppSearchChrome as SetPageChrome } from '../../../../shared/kibana_chrome';
+import { EuiButtonTo } from '../../../../shared/react_router_helpers';
import { TelemetryLogic } from '../../../../shared/telemetry';
-import { CREATE_ENGINES_PATH } from '../../../routes';
+import { ENGINE_CREATION_PATH } from '../../../routes';
import { EnginesOverviewHeader } from './header';
@@ -24,16 +24,6 @@ import './empty_state.scss';
export const EmptyState: React.FC = () => {
const { sendAppSearchTelemetry } = useActions(TelemetryLogic);
- const buttonProps = {
- href: getAppSearchUrl(CREATE_ENGINES_PATH),
- target: '_blank',
- onClick: () =>
- sendAppSearchTelemetry({
- action: 'clicked',
- metric: 'create_first_engine_button',
- }),
- };
-
return (
<>
@@ -60,12 +50,23 @@ export const EmptyState: React.FC = () => {
}
actions={
-
+
+ sendAppSearchTelemetry({
+ action: 'clicked',
+ metric: 'create_first_engine_button',
+ })
+ }
+ >
-
+
}
/>
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/constants.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/constants.ts
index 8b387668b89f9..401d4ccd6d117 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/constants.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/constants.ts
@@ -15,3 +15,10 @@ export const META_ENGINES_TITLE = i18n.translate(
'xpack.enterpriseSearch.appSearch.metaEngines.title',
{ defaultMessage: 'Meta Engines' }
);
+
+export const CREATE_AN_ENGINE_BUTTON_LABEL = i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.engines.createAnEngineButton.ButtonLabel',
+ {
+ defaultMessage: 'Create an engine',
+ }
+);
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/engines_overview.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/engines_overview.test.tsx
index cdc06dbbe3921..978538d26e5d6 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/engines_overview.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/engines_overview.test.tsx
@@ -77,6 +77,14 @@ describe('EnginesOverview', () => {
expect(actions.loadEngines).toHaveBeenCalled();
});
+ it('renders a create engine button which takes users to the create engine page', () => {
+ const wrapper = shallow();
+
+ expect(
+ wrapper.find('[data-test-subj="appSearchEnginesEngineCreationButton"]').prop('to')
+ ).toEqual('/engine_creation');
+ });
+
describe('when on a platinum license', () => {
it('renders a 2nd meta engines table & makes a 2nd meta engines API call', async () => {
setMockValues({
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/engines_overview.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/engines_overview.tsx
index 2835c8b7cb3c4..1a81c1918ad4d 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/engines_overview.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engines/engines_overview.tsx
@@ -12,6 +12,7 @@ import { useValues, useActions } from 'kea';
import {
EuiPageContent,
EuiPageContentHeader,
+ EuiPageContentHeaderSection,
EuiPageContentBody,
EuiTitle,
EuiSpacer,
@@ -20,12 +21,14 @@ import {
import { FlashMessages } from '../../../shared/flash_messages';
import { SetAppSearchChrome as SetPageChrome } from '../../../shared/kibana_chrome';
import { LicensingLogic } from '../../../shared/licensing';
+import { EuiButtonTo } from '../../../shared/react_router_helpers';
import { SendAppSearchTelemetry as SendTelemetry } from '../../../shared/telemetry';
+import { ENGINE_CREATION_PATH } from '../../routes';
import { EngineIcon } from './assets/engine_icon';
import { MetaEngineIcon } from './assets/meta_engine_icon';
import { EnginesOverviewHeader, LoadingState, EmptyState } from './components';
-import { ENGINES_TITLE, META_ENGINES_TITLE } from './constants';
+import { CREATE_AN_ENGINE_BUTTON_LABEL, ENGINES_TITLE, META_ENGINES_TITLE } from './constants';
import { EnginesLogic } from './engines_logic';
import { EnginesTable } from './engines_table';
@@ -65,12 +68,24 @@ export const EnginesOverview: React.FC = () => {
-
-
-
- {ENGINES_TITLE}
-
-
+
+
+
+
+ {ENGINES_TITLE}
+
+
+
+
+
+ {CREATE_AN_ENGINE_BUTTON_LABEL}
+
+
{
});
describe('ability checks', () => {
- // TODO: Use this section for routes wrapped in canViewX conditionals
- // e.g., it('renders settings if a user can view settings')
+ describe('canManageEngines', () => {
+ it('renders EngineCreation when user canManageEngines is true', () => {
+ setMockValues({ myRole: { canManageEngines: true } });
+ const wrapper = shallow();
+
+ expect(wrapper.find(EngineCreation)).toHaveLength(1);
+ });
+
+ it('does not render EngineCreation when user canManageEngines is false', () => {
+ setMockValues({ myRole: { canManageEngines: false } });
+ const wrapper = shallow();
+
+ expect(wrapper.find(EngineCreation)).toHaveLength(0);
+ });
+ });
});
});
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/index.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/index.tsx
index 36ac3fb4dbc5b..40dfc1426e402 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/index.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/index.tsx
@@ -21,6 +21,7 @@ import { NotFound } from '../shared/not_found';
import { AppLogic } from './app_logic';
import { Credentials, CREDENTIALS_TITLE } from './components/credentials';
import { EngineNav, EngineRouter } from './components/engine';
+import { EngineCreation } from './components/engine_creation';
import { EnginesOverview, ENGINES_TITLE } from './components/engines';
import { ErrorConnecting } from './components/error_connecting';
import { Library } from './components/library';
@@ -28,6 +29,7 @@ import { ROLE_MAPPINGS_TITLE } from './components/role_mappings';
import { Settings, SETTINGS_TITLE } from './components/settings';
import { SetupGuide } from './components/setup_guide';
import {
+ ENGINE_CREATION_PATH,
ROOT_PATH,
SETUP_GUIDE_PATH,
SETTINGS_PATH,
@@ -56,7 +58,10 @@ export const AppSearchUnconfigured: React.FC = () => (
export const AppSearchConfigured: React.FC = (props) => {
const { initializeAppData } = useActions(AppLogic);
- const { hasInitialized } = useValues(AppLogic);
+ const {
+ hasInitialized,
+ myRole: { canManageEngines },
+ } = useValues(AppLogic);
const { errorConnecting, readOnlyMode } = useValues(HttpLogic);
useEffect(() => {
@@ -96,6 +101,11 @@ export const AppSearchConfigured: React.FC = (props) => {
+ {canManageEngines && (
+
+
+
+ )}
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/routes.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/routes.ts
index 962efbb7ece3a..dee8858fada8b 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/routes.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/routes.ts
@@ -17,7 +17,7 @@ export const CREDENTIALS_PATH = '/credentials';
export const ROLE_MAPPINGS_PATH = '#/role-mappings'; // This page seems to 404 if the # isn't included
export const ENGINES_PATH = '/engines';
-export const CREATE_ENGINES_PATH = `${ENGINES_PATH}/new`;
+export const ENGINE_CREATION_PATH = '/engine_creation';
export const ENGINE_PATH = `${ENGINES_PATH}/:engineName`;
export const SAMPLE_ENGINE_PATH = `${ENGINES_PATH}/national-parks-demo`;
diff --git a/x-pack/plugins/enterprise_search/server/routes/app_search/engines.test.ts b/x-pack/plugins/enterprise_search/server/routes/app_search/engines.test.ts
index abd26e18c7b9d..6fbc9f5bd2fc4 100644
--- a/x-pack/plugins/enterprise_search/server/routes/app_search/engines.test.ts
+++ b/x-pack/plugins/enterprise_search/server/routes/app_search/engines.test.ts
@@ -110,6 +110,47 @@ describe('engine routes', () => {
});
});
+ describe('POST /api/app_search/engines', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'post',
+ path: '/api/app_search/engines',
+ });
+
+ registerEnginesRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request handler', () => {
+ mockRouter.callRoute({ body: { name: 'some-engine', language: 'en' } });
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/as/engines/collection',
+ });
+ });
+
+ describe('validates', () => {
+ it('correctly', () => {
+ const request = { body: { name: 'some-engine', language: 'en' } };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('missing name', () => {
+ const request = { body: { language: 'en' } };
+ mockRouter.shouldThrow(request);
+ });
+
+ it('optional language', () => {
+ const request = { body: { name: 'some-engine' } };
+ mockRouter.shouldValidate(request);
+ });
+ });
+ });
+
describe('GET /api/app_search/engines/{name}', () => {
let mockRouter: MockRouter;
diff --git a/x-pack/plugins/enterprise_search/server/routes/app_search/engines.ts b/x-pack/plugins/enterprise_search/server/routes/app_search/engines.ts
index 0070680985a34..7d537e5dc0df3 100644
--- a/x-pack/plugins/enterprise_search/server/routes/app_search/engines.ts
+++ b/x-pack/plugins/enterprise_search/server/routes/app_search/engines.ts
@@ -45,6 +45,21 @@ export function registerEnginesRoutes({
}
);
+ router.post(
+ {
+ path: '/api/app_search/engines',
+ validate: {
+ body: schema.object({
+ name: schema.string(),
+ language: schema.maybe(schema.string()),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/as/engines/collection',
+ })
+ );
+
// Single engine endpoints
router.get(
{
From e3f672926efa2129d12295581d11956af9f337e1 Mon Sep 17 00:00:00 2001
From: Stratoula Kalafateli
Date: Thu, 11 Feb 2021 15:52:45 +0200
Subject: [PATCH 02/72] [XY Plugin] Add unit tests (#89582)
* [XY Plugin] Add unit tests
* More unit tests
* Address PR comments
* Update license
* Resolve PR comments
* A nice improvement
* Apply new type everywhere
* Cleaning up
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
---
.../components/detailed_tooltip.mock.ts | 189 ++++
.../components/detailed_tooltip.test.tsx | 62 ++
.../public/components/detailed_tooltip.tsx | 2 +-
.../common/truncate_labels.test.tsx | 51 ++
.../components/common/truncate_labels.tsx | 3 +-
.../point_series/point_series.mocks.ts | 867 ++++++++++++++++++
.../point_series/point_series.test.tsx | 165 ++++
.../options/point_series/threshold_panel.tsx | 1 +
.../public/utils/get_all_series.test.ts | 2 +-
.../public/utils/get_series_name_fn.test.ts | 145 +++
10 files changed, 1484 insertions(+), 3 deletions(-)
create mode 100644 src/plugins/vis_type_xy/public/components/detailed_tooltip.mock.ts
create mode 100644 src/plugins/vis_type_xy/public/components/detailed_tooltip.test.tsx
create mode 100644 src/plugins/vis_type_xy/public/editor/components/common/truncate_labels.test.tsx
create mode 100644 src/plugins/vis_type_xy/public/editor/components/options/point_series/point_series.mocks.ts
create mode 100644 src/plugins/vis_type_xy/public/editor/components/options/point_series/point_series.test.tsx
create mode 100644 src/plugins/vis_type_xy/public/utils/get_series_name_fn.test.ts
diff --git a/src/plugins/vis_type_xy/public/components/detailed_tooltip.mock.ts b/src/plugins/vis_type_xy/public/components/detailed_tooltip.mock.ts
new file mode 100644
index 0000000000000..25310ea1ee7ff
--- /dev/null
+++ b/src/plugins/vis_type_xy/public/components/detailed_tooltip.mock.ts
@@ -0,0 +1,189 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+export const aspects = {
+ x: {
+ accessor: 'col-0-3',
+ column: 0,
+ title: 'timestamp per 3 hours',
+ format: {
+ id: 'date',
+ params: {
+ pattern: 'YYYY-MM-DD HH:mm',
+ },
+ },
+ aggType: 'date_histogram',
+ aggId: '3',
+ params: {
+ date: true,
+ intervalESUnit: 'h',
+ intervalESValue: 3,
+ interval: 10800000,
+ format: 'YYYY-MM-DD HH:mm',
+ },
+ },
+ y: [
+ {
+ accessor: 'col-1-1',
+ column: 1,
+ title: 'Count',
+ format: {
+ id: 'number',
+ },
+ aggType: 'count',
+ aggId: '1',
+ params: {},
+ },
+ ],
+};
+
+export const aspectsWithSplitColumn = {
+ x: {
+ accessor: 'col-0-3',
+ column: 0,
+ title: 'timestamp per 3 hours',
+ format: {
+ id: 'date',
+ params: {
+ pattern: 'YYYY-MM-DD HH:mm',
+ },
+ },
+ aggType: 'date_histogram',
+ aggId: '3',
+ params: {
+ date: true,
+ intervalESUnit: 'h',
+ intervalESValue: 3,
+ interval: 10800000,
+ format: 'YYYY-MM-DD HH:mm',
+ },
+ },
+ y: [
+ {
+ accessor: 'col-2-1',
+ column: 2,
+ title: 'Count',
+ format: {
+ id: 'number',
+ },
+ aggType: 'count',
+ aggId: '1',
+ params: {},
+ },
+ ],
+ splitColumn: {
+ accessor: 'col-1-4',
+ column: 1,
+ title: 'Cancelled: Descending',
+ format: {
+ id: 'terms',
+ params: {
+ id: 'boolean',
+ otherBucketLabel: 'Other',
+ missingBucketLabel: 'Missing',
+ },
+ },
+ aggType: 'terms',
+ aggId: '4',
+ params: {},
+ },
+};
+
+export const aspectsWithSplitRow = {
+ x: {
+ accessor: 'col-0-3',
+ column: 0,
+ title: 'timestamp per 3 hours',
+ format: {
+ id: 'date',
+ params: {
+ pattern: 'YYYY-MM-DD HH:mm',
+ },
+ },
+ aggType: 'date_histogram',
+ aggId: '3',
+ params: {
+ date: true,
+ intervalESUnit: 'h',
+ intervalESValue: 3,
+ interval: 10800000,
+ format: 'YYYY-MM-DD HH:mm',
+ },
+ },
+ y: [
+ {
+ accessor: 'col-3-1',
+ column: 2,
+ title: 'Count',
+ format: {
+ id: 'number',
+ },
+ aggType: 'count',
+ aggId: '1',
+ params: {},
+ },
+ ],
+ splitRow: {
+ accessor: 'col-1-5',
+ column: 1,
+ title: 'Carrier: Descending',
+ format: {
+ id: 'terms',
+ params: {
+ id: 'string',
+ otherBucketLabel: 'Other',
+ missingBucketLabel: 'Missing',
+ },
+ },
+ aggType: 'terms',
+ aggId: '4',
+ params: {},
+ },
+};
+
+export const header = {
+ seriesIdentifier: {
+ key:
+ 'groupId{__pseudo_stacked_group-ValueAxis-1__}spec{area-col-1-1}yAccessor{col-1-1}splitAccessors{}smV{__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__}smH{__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__}',
+ specId: 'area-col-1-1',
+ yAccessor: 'col-1-1',
+ splitAccessors: {},
+ seriesKeys: ['col-1-1'],
+ smVerticalAccessorValue: '__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__',
+ smHorizontalAccessorValue: '__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__',
+ },
+ valueAccessor: 'y1',
+ label: 'Count',
+ value: 1611817200000,
+ formattedValue: '1611817200000',
+ markValue: null,
+ color: '#54b399',
+ isHighlighted: false,
+ isVisible: true,
+};
+
+export const value = {
+ seriesIdentifier: {
+ key:
+ 'groupId{__pseudo_stacked_group-ValueAxis-1__}spec{area-col-1-1}yAccessor{col-1-1}splitAccessors{}smV{__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__}smH{__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__}',
+ specId: 'area-col-1-1',
+ yAccessor: 'col-1-1',
+ splitAccessors: [],
+ seriesKeys: ['col-1-1'],
+ smVerticalAccessorValue: 'kibana',
+ smHorizontalAccessorValue: 'false',
+ },
+ valueAccessor: 'y1',
+ label: 'Count',
+ value: 52,
+ formattedValue: '52',
+ markValue: null,
+ color: '#54b399',
+ isHighlighted: true,
+ isVisible: true,
+};
diff --git a/src/plugins/vis_type_xy/public/components/detailed_tooltip.test.tsx b/src/plugins/vis_type_xy/public/components/detailed_tooltip.test.tsx
new file mode 100644
index 0000000000000..aa76b680f6cc0
--- /dev/null
+++ b/src/plugins/vis_type_xy/public/components/detailed_tooltip.test.tsx
@@ -0,0 +1,62 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { getTooltipData } from './detailed_tooltip';
+import {
+ aspects,
+ aspectsWithSplitColumn,
+ aspectsWithSplitRow,
+ header,
+ value,
+} from './detailed_tooltip.mock';
+
+describe('getTooltipData', () => {
+ it('returns an array with the header and data information', () => {
+ const tooltipData = getTooltipData(aspects, header, value);
+ expect(tooltipData).toStrictEqual([
+ {
+ label: 'timestamp per 3 hours',
+ value: '1611817200000',
+ },
+ {
+ label: 'Count',
+ value: '52',
+ },
+ ]);
+ });
+
+ it('returns an array with the data information if the header is not applied', () => {
+ const tooltipData = getTooltipData(aspects, null, value);
+ expect(tooltipData).toStrictEqual([
+ {
+ label: 'Count',
+ value: '52',
+ },
+ ]);
+ });
+
+ it('returns an array with the split column information if it is provided', () => {
+ const tooltipData = getTooltipData(aspectsWithSplitColumn, null, value);
+ expect(tooltipData).toStrictEqual([
+ {
+ label: 'Cancelled: Descending',
+ value: 'false',
+ },
+ ]);
+ });
+
+ it('returns an array with the split row information if it is provided', () => {
+ const tooltipData = getTooltipData(aspectsWithSplitRow, null, value);
+ expect(tooltipData).toStrictEqual([
+ {
+ label: 'Carrier: Descending',
+ value: 'kibana',
+ },
+ ]);
+ });
+});
diff --git a/src/plugins/vis_type_xy/public/components/detailed_tooltip.tsx b/src/plugins/vis_type_xy/public/components/detailed_tooltip.tsx
index 75b5041dae49f..0c1ab262755a7 100644
--- a/src/plugins/vis_type_xy/public/components/detailed_tooltip.tsx
+++ b/src/plugins/vis_type_xy/public/components/detailed_tooltip.tsx
@@ -30,7 +30,7 @@ interface TooltipData {
// TODO: replace when exported from elastic/charts
const DEFAULT_SINGLE_PANEL_SM_VALUE = '__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__';
-const getTooltipData = (
+export const getTooltipData = (
aspects: Aspects,
header: TooltipValue | null,
value: TooltipValue
diff --git a/src/plugins/vis_type_xy/public/editor/components/common/truncate_labels.test.tsx b/src/plugins/vis_type_xy/public/editor/components/common/truncate_labels.test.tsx
new file mode 100644
index 0000000000000..902167cb24642
--- /dev/null
+++ b/src/plugins/vis_type_xy/public/editor/components/common/truncate_labels.test.tsx
@@ -0,0 +1,51 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React from 'react';
+import { mountWithIntl } from '@kbn/test/jest';
+import { ReactWrapper } from 'enzyme';
+import { TruncateLabelsOption, TruncateLabelsOptionProps } from './truncate_labels';
+import { findTestSubject } from '@elastic/eui/lib/test';
+
+describe('TruncateLabelsOption', function () {
+ let props: TruncateLabelsOptionProps;
+ let component: ReactWrapper;
+
+ beforeAll(() => {
+ props = {
+ disabled: false,
+ value: 20,
+ setValue: jest.fn(),
+ };
+ });
+
+ it('renders an input type number', () => {
+ component = mountWithIntl();
+ expect(findTestSubject(component, 'xyLabelTruncateInput').length).toBe(1);
+ });
+
+ it('renders the value on the input number', () => {
+ component = mountWithIntl();
+ const input = findTestSubject(component, 'xyLabelTruncateInput');
+ expect(input.props().value).toBe(20);
+ });
+
+ it('disables the input if disabled prop is given', () => {
+ const newProps = { ...props, disabled: true };
+ component = mountWithIntl();
+ const input = findTestSubject(component, 'xyLabelTruncateInput');
+ expect(input.props().disabled).toBeTruthy();
+ });
+
+ it('should set the new value', () => {
+ component = mountWithIntl();
+ const input = findTestSubject(component, 'xyLabelTruncateInput');
+ input.simulate('change', { target: { value: 100 } });
+ expect(props.setValue).toHaveBeenCalled();
+ });
+});
diff --git a/src/plugins/vis_type_xy/public/editor/components/common/truncate_labels.tsx b/src/plugins/vis_type_xy/public/editor/components/common/truncate_labels.tsx
index ee192257fa545..4ce95b4c617be 100644
--- a/src/plugins/vis_type_xy/public/editor/components/common/truncate_labels.tsx
+++ b/src/plugins/vis_type_xy/public/editor/components/common/truncate_labels.tsx
@@ -10,7 +10,7 @@ import React, { ChangeEvent } from 'react';
import { i18n } from '@kbn/i18n';
import { EuiFormRow, EuiFieldNumber } from '@elastic/eui';
-interface TruncateLabelsOptionProps {
+export interface TruncateLabelsOptionProps {
disabled?: boolean;
value?: number | null;
setValue: (paramName: 'truncate', value: null | number) => void;
@@ -29,6 +29,7 @@ function TruncateLabelsOption({ disabled, value = null, setValue }: TruncateLabe
display="rowCompressed"
>
{
+ return {
+ indexPattern: {
+ id: 'd3d7af60-4c81-11e8-b3d7-01146121b73d',
+ title: 'kibana_sample_data_flights',
+ fieldFormatMap: {
+ hour_of_day: {
+ id: 'number',
+ params: {
+ pattern: '00',
+ },
+ },
+ AvgTicketPrice: {
+ id: 'number',
+ params: {
+ pattern: '$0,0.[00]',
+ },
+ },
+ },
+ fields: [
+ {
+ count: 4,
+ name: 'AvgTicketPrice',
+ type: 'number',
+ esTypes: ['float'],
+ scripted: false,
+ searchable: true,
+ aggregatable: true,
+ readFromDocValues: true,
+ },
+ {
+ count: 0,
+ name: 'Cancelled',
+ type: 'boolean',
+ esTypes: ['boolean'],
+ scripted: false,
+ searchable: true,
+ aggregatable: true,
+ readFromDocValues: true,
+ },
+ {
+ count: 3,
+ name: 'Carrier',
+ type: 'string',
+ esTypes: ['keyword'],
+ scripted: false,
+ searchable: true,
+ aggregatable: true,
+ readFromDocValues: true,
+ },
+ {
+ count: 2,
+ name: 'timestamp',
+ type: 'date',
+ esTypes: ['date'],
+ scripted: false,
+ searchable: true,
+ aggregatable: true,
+ readFromDocValues: true,
+ },
+ ],
+ timeFieldName: 'timestamp',
+ metaFields: ['_source', '_id', '_type', '_index', '_score'],
+ version: 'WzQ3NjYsMl0=',
+ originalSavedObjectBody: {
+ fieldAttrs:
+ '{"AvgTicketPrice":{"count":4},"Carrier":{"count":3},"DestAirportID":{"count":1},"DestCityName":{"count":3},"DestCountry":{"count":3},"DestLocation":{"count":1},"_score":{"count":1},"dayOfWeek":{"count":4},"timestamp":{"count":2}}',
+ title: 'kibana_sample_data_flights',
+ timeFieldName: 'timestamp',
+ fields:
+ '[{"count":0,"script":"doc[\'timestamp\'].value.hourOfDay","lang":"painless","name":"hour_of_day","type":"number","scripted":true,"searchable":true,"aggregatable":true,"readFromDocValues":false}]',
+ fieldFormatMap:
+ '{"hour_of_day":{"id":"number","params":{"pattern":"00"}},"AvgTicketPrice":{"id":"number","params":{"pattern":"$0,0.[00]"}}}',
+ runtimeFieldMap: '{}',
+ },
+ shortDotsEnable: false,
+ fieldFormats: {
+ fieldFormats: {},
+ defaultMap: {
+ ip: {
+ id: 'ip',
+ params: {},
+ },
+ date: {
+ id: 'date',
+ params: {},
+ },
+ date_nanos: {
+ id: 'date_nanos',
+ params: {},
+ es: true,
+ },
+ number: {
+ id: 'number',
+ params: {},
+ },
+ boolean: {
+ id: 'boolean',
+ params: {},
+ },
+ _source: {
+ id: '_source',
+ params: {},
+ },
+ _default_: {
+ id: 'string',
+ params: {},
+ },
+ },
+ metaParamsOptions: {
+ parsedUrl: {
+ origin: 'http://localhost:5601',
+ pathname: '/thz/app/visualize',
+ basePath: '/thz',
+ },
+ },
+ },
+ fieldAttrs: {
+ AvgTicketPrice: {
+ count: 4,
+ },
+ Carrier: {
+ count: 3,
+ },
+ timestamp: {
+ count: 2,
+ },
+ },
+ runtimeFieldMap: {},
+ allowNoIndex: false,
+ },
+ typesRegistry: {},
+ aggs: [
+ {
+ id: '1',
+ enabled: true,
+ type: 'count',
+ params: {},
+ schema: 'metric',
+ },
+ {
+ id: '2',
+ enabled: true,
+ type: 'date_histogram',
+ params: {
+ field: 'timestamp',
+ timeRange: {
+ from: 'now-15m',
+ to: 'now',
+ },
+ useNormalizedEsInterval: true,
+ scaleMetricValues: false,
+ interval: 'auto',
+ drop_partials: false,
+ min_doc_count: 1,
+ extended_bounds: {},
+ },
+ schema: 'segment',
+ },
+ ],
+ };
+};
+
+export const getVis = (bucketType: string) => {
+ return {
+ data: {
+ aggs: {
+ indexPattern: {
+ id: 'd3d7af60-4c81-11e8-b3d7-01146121b73d',
+ title: 'kibana_sample_data_flights',
+ fieldFormatMap: {
+ hour_of_day: {
+ id: 'number',
+ params: {
+ pattern: '00',
+ },
+ },
+ AvgTicketPrice: {
+ id: 'number',
+ params: {
+ pattern: '$0,0.[00]',
+ },
+ },
+ },
+ fields: [
+ {
+ count: 4,
+ name: 'AvgTicketPrice',
+ type: 'number',
+ esTypes: ['float'],
+ scripted: false,
+ searchable: true,
+ aggregatable: true,
+ readFromDocValues: true,
+ },
+ {
+ count: 0,
+ name: 'Cancelled',
+ type: 'boolean',
+ esTypes: ['boolean'],
+ scripted: false,
+ searchable: true,
+ aggregatable: true,
+ readFromDocValues: true,
+ },
+ {
+ count: 3,
+ name: 'Carrier',
+ type: 'string',
+ esTypes: ['keyword'],
+ scripted: false,
+ searchable: true,
+ aggregatable: true,
+ readFromDocValues: true,
+ },
+ {
+ count: 2,
+ name: 'timestamp',
+ type: 'date',
+ esTypes: ['date'],
+ scripted: false,
+ searchable: true,
+ aggregatable: true,
+ readFromDocValues: true,
+ },
+ ],
+ timeFieldName: 'timestamp',
+ metaFields: ['_source', '_id', '_type', '_index', '_score'],
+ version: 'WzQ3NjYsMl0=',
+ originalSavedObjectBody: {
+ fieldAttrs:
+ '{"AvgTicketPrice":{"count":4},"Carrier":{"count":3},"DestAirportID":{"count":1},"DestCityName":{"count":3},"DestCountry":{"count":3},"DestLocation":{"count":1},"_score":{"count":1},"dayOfWeek":{"count":4},"timestamp":{"count":2}}',
+ title: 'kibana_sample_data_flights',
+ timeFieldName: 'timestamp',
+ fields:
+ '[{"count":0,"script":"doc[\'timestamp\'].value.hourOfDay","lang":"painless","name":"hour_of_day","type":"number","scripted":true,"searchable":true,"aggregatable":true,"readFromDocValues":false}]',
+ fieldFormatMap:
+ '{"hour_of_day":{"id":"number","params":{"pattern":"00"}},"AvgTicketPrice":{"id":"number","params":{"pattern":"$0,0.[00]"}}}',
+ runtimeFieldMap: '{}',
+ },
+ shortDotsEnable: false,
+ fieldFormats: {
+ fieldFormats: {},
+ defaultMap: {
+ ip: {
+ id: 'ip',
+ params: {},
+ },
+ date: {
+ id: 'date',
+ params: {},
+ },
+ date_nanos: {
+ id: 'date_nanos',
+ params: {},
+ es: true,
+ },
+ number: {
+ id: 'number',
+ params: {},
+ },
+ boolean: {
+ id: 'boolean',
+ params: {},
+ },
+ _source: {
+ id: '_source',
+ params: {},
+ },
+ _default_: {
+ id: 'string',
+ params: {},
+ },
+ },
+ metaParamsOptions: {
+ parsedUrl: {
+ origin: 'http://localhost:5601',
+ pathname: '/thz/app/visualize',
+ basePath: '/thz',
+ },
+ },
+ },
+ fieldAttrs: {
+ AvgTicketPrice: {
+ count: 4,
+ },
+ Carrier: {
+ count: 3,
+ },
+ timestamp: {
+ count: 2,
+ },
+ },
+ runtimeFieldMap: {},
+ allowNoIndex: false,
+ },
+ typesRegistry: {},
+ aggs: [
+ {
+ id: '1',
+ enabled: true,
+ type: 'count',
+ params: {},
+ schema: 'metric',
+ },
+ {
+ id: '2',
+ enabled: true,
+ type: {
+ name: bucketType,
+ },
+ params: {
+ field: 'timestamp',
+ timeRange: {
+ from: 'now-15m',
+ to: 'now',
+ },
+ useNormalizedEsInterval: true,
+ scaleMetricValues: false,
+ interval: 'auto',
+ drop_partials: false,
+ min_doc_count: 1,
+ extended_bounds: {},
+ },
+ schema: 'segment',
+ },
+ ],
+ },
+ },
+ type: {
+ name: 'area',
+ title: 'Area',
+ description: 'Emphasize the data between an axis and a line.',
+ note: '',
+ icon: 'visArea',
+ stage: 'production',
+ group: 'aggbased',
+ titleInWizard: '',
+ options: {
+ showTimePicker: true,
+ showQueryBar: true,
+ showFilterBar: true,
+ showIndexSelection: true,
+ hierarchicalData: false,
+ },
+ visConfig: {
+ defaults: {
+ type: 'area',
+ grid: {
+ categoryLines: false,
+ },
+ categoryAxes: [
+ {
+ id: 'CategoryAxis-1',
+ type: 'category',
+ position: 'bottom',
+ show: true,
+ scale: {
+ type: 'linear',
+ },
+ labels: {
+ show: true,
+ filter: true,
+ truncate: 100,
+ },
+ title: {},
+ style: {},
+ },
+ ],
+ valueAxes: [
+ {
+ id: 'ValueAxis-1',
+ name: 'LeftAxis-1',
+ type: 'value',
+ position: 'left',
+ show: true,
+ scale: {
+ type: 'linear',
+ mode: 'normal',
+ },
+ labels: {
+ show: true,
+ rotate: 0,
+ filter: false,
+ truncate: 100,
+ },
+ title: {
+ text: 'Count',
+ },
+ style: {},
+ },
+ ],
+ seriesParams: [
+ {
+ show: true,
+ type: 'area',
+ mode: 'stacked',
+ data: {
+ label: 'Count',
+ id: '1',
+ },
+ drawLinesBetweenPoints: true,
+ lineWidth: 2,
+ showCircles: true,
+ interpolate: 'linear',
+ valueAxis: 'ValueAxis-1',
+ },
+ ],
+ addTooltip: true,
+ detailedTooltip: true,
+ palette: {
+ type: 'palette',
+ name: 'default',
+ },
+ addLegend: true,
+ legendPosition: 'right',
+ fittingFunction: 'linear',
+ times: [],
+ addTimeMarker: false,
+ radiusRatio: 9,
+ thresholdLine: {
+ show: false,
+ value: 10,
+ width: 1,
+ style: 'full',
+ color: '#E7664C',
+ },
+ labels: {},
+ },
+ },
+ editorConfig: {
+ collections: {
+ legendPositions: [
+ {
+ text: 'Top',
+ value: 'top',
+ },
+ {
+ text: 'Left',
+ value: 'left',
+ },
+ {
+ text: 'Right',
+ value: 'right',
+ },
+ {
+ text: 'Bottom',
+ value: 'bottom',
+ },
+ ],
+ positions: [
+ {
+ text: 'Top',
+ value: 'top',
+ },
+ {
+ text: 'Left',
+ value: 'left',
+ },
+ {
+ text: 'Right',
+ value: 'right',
+ },
+ {
+ text: 'Bottom',
+ value: 'bottom',
+ },
+ ],
+ chartTypes: [
+ {
+ text: 'Line',
+ value: 'line',
+ },
+ {
+ text: 'Area',
+ value: 'area',
+ },
+ {
+ text: 'Bar',
+ value: 'histogram',
+ },
+ ],
+ axisModes: [
+ {
+ text: 'Normal',
+ value: 'normal',
+ },
+ {
+ text: 'Percentage',
+ value: 'percentage',
+ },
+ {
+ text: 'Wiggle',
+ value: 'wiggle',
+ },
+ {
+ text: 'Silhouette',
+ value: 'silhouette',
+ },
+ ],
+ scaleTypes: [
+ {
+ text: 'Linear',
+ value: 'linear',
+ },
+ {
+ text: 'Log',
+ value: 'log',
+ },
+ {
+ text: 'Square root',
+ value: 'square root',
+ },
+ ],
+ chartModes: [
+ {
+ text: 'Normal',
+ value: 'normal',
+ },
+ {
+ text: 'Stacked',
+ value: 'stacked',
+ },
+ ],
+ interpolationModes: [
+ {
+ text: 'Straight',
+ value: 'linear',
+ },
+ {
+ text: 'Smoothed',
+ value: 'cardinal',
+ },
+ {
+ text: 'Stepped',
+ value: 'step-after',
+ },
+ ],
+ thresholdLineStyles: [
+ {
+ value: 'full',
+ text: 'Full',
+ },
+ {
+ value: 'dashed',
+ text: 'Dashed',
+ },
+ {
+ value: 'dot-dashed',
+ text: 'Dot-dashed',
+ },
+ ],
+ fittingFunctions: [
+ {
+ value: 'none',
+ text: 'Hide (Do not fill gaps)',
+ },
+ {
+ value: 'zero',
+ text: 'Zero (Fill gaps with zeros)',
+ },
+ {
+ value: 'linear',
+ text: 'Linear (Fill gaps with a line)',
+ },
+ {
+ value: 'carry',
+ text: 'Last (Fill gaps with the last value)',
+ },
+ {
+ value: 'lookahead',
+ text: 'Next (Fill gaps with the next value)',
+ },
+ ],
+ },
+ optionTabs: [
+ {
+ name: 'advanced',
+ title: 'Metrics & axes',
+ },
+ {
+ name: 'options',
+ title: 'Panel settings',
+ },
+ ],
+ schemas: [
+ {
+ group: 'metrics',
+ name: 'metric',
+ title: 'Y-axis',
+ aggFilter: ['!geo_centroid', '!geo_bounds'],
+ min: 1,
+ defaults: [
+ {
+ schema: 'metric',
+ type: 'count',
+ },
+ ],
+ max: null,
+ params: [],
+ },
+ {
+ group: 'metrics',
+ name: 'radius',
+ title: 'Dot size',
+ min: 0,
+ max: 1,
+ aggFilter: ['count', 'avg', 'sum', 'min', 'max', 'cardinality'],
+ params: [],
+ },
+ {
+ group: 'buckets',
+ name: 'segment',
+ title: 'X-axis',
+ min: 0,
+ max: 1,
+ aggFilter: ['!geohash_grid', '!geotile_grid', '!filter'],
+ params: [],
+ },
+ {
+ group: 'buckets',
+ name: 'group',
+ title: 'Split series',
+ min: 0,
+ max: 3,
+ aggFilter: ['!geohash_grid', '!geotile_grid', '!filter'],
+ params: [],
+ },
+ {
+ group: 'buckets',
+ name: 'split',
+ title: 'Split chart',
+ min: 0,
+ max: 1,
+ aggFilter: ['!geohash_grid', '!geotile_grid', '!filter'],
+ params: [
+ {
+ name: 'row',
+ default: true,
+ },
+ ],
+ },
+ ],
+ },
+ hidden: false,
+ requiresSearch: true,
+ hierarchicalData: false,
+ schemas: {
+ all: [
+ {
+ group: 'metrics',
+ name: 'metric',
+ title: 'Y-axis',
+ aggFilter: ['!geo_centroid', '!geo_bounds'],
+ min: 1,
+ defaults: [
+ {
+ schema: 'metric',
+ type: 'count',
+ },
+ ],
+ max: null,
+ params: [],
+ },
+ {
+ group: 'metrics',
+ name: 'radius',
+ title: 'Dot size',
+ min: 0,
+ max: 1,
+ aggFilter: ['count', 'avg', 'sum', 'min', 'max', 'cardinality'],
+ params: [],
+ },
+ {
+ group: 'buckets',
+ name: 'segment',
+ title: 'X-axis',
+ min: 0,
+ max: 1,
+ aggFilter: ['!geohash_grid', '!geotile_grid', '!filter'],
+ params: [],
+ },
+ {
+ group: 'buckets',
+ name: 'group',
+ title: 'Split series',
+ min: 0,
+ max: 3,
+ aggFilter: ['!geohash_grid', '!geotile_grid', '!filter'],
+ params: [],
+ },
+ {
+ group: 'buckets',
+ name: 'split',
+ title: 'Split chart',
+ min: 0,
+ max: 1,
+ aggFilter: ['!geohash_grid', '!geotile_grid', '!filter'],
+ params: [
+ {
+ name: 'row',
+ default: true,
+ },
+ ],
+ },
+ ],
+ buckets: [
+ {
+ group: 'buckets',
+ name: 'segment',
+ title: 'X-axis',
+ min: 0,
+ max: 1,
+ aggFilter: ['!geohash_grid', '!geotile_grid', '!filter'],
+ params: [],
+ },
+ {
+ group: 'buckets',
+ name: 'group',
+ title: 'Split series',
+ min: 0,
+ max: 3,
+ aggFilter: ['!geohash_grid', '!geotile_grid', '!filter'],
+ params: [],
+ },
+ {
+ group: 'buckets',
+ name: 'split',
+ title: 'Split chart',
+ min: 0,
+ max: 1,
+ aggFilter: ['!geohash_grid', '!geotile_grid', '!filter'],
+ params: [
+ {
+ name: 'row',
+ default: true,
+ },
+ ],
+ },
+ ],
+ metrics: [
+ {
+ group: 'metrics',
+ name: 'metric',
+ title: 'Y-axis',
+ aggFilter: ['!geo_centroid', '!geo_bounds'],
+ min: 1,
+ defaults: [
+ {
+ schema: 'metric',
+ type: 'count',
+ },
+ ],
+ max: null,
+ params: [],
+ },
+ {
+ group: 'metrics',
+ name: 'radius',
+ title: 'Dot size',
+ min: 0,
+ max: 1,
+ aggFilter: ['count', 'avg', 'sum', 'min', 'max', 'cardinality'],
+ params: [],
+ },
+ ],
+ },
+ },
+ };
+};
+
+export const getStateParams = (type: string, thresholdPanelOn: boolean) => {
+ return {
+ type: 'area',
+ grid: {
+ categoryLines: false,
+ style: {
+ color: '#eee',
+ },
+ },
+ categoryAxes: [
+ {
+ id: 'CategoryAxis-1',
+ type: 'category',
+ position: 'bottom',
+ show: true,
+ style: {},
+ scale: {
+ type: 'linear',
+ },
+ labels: {
+ show: true,
+ truncate: 100,
+ filter: true,
+ },
+ title: {},
+ },
+ ],
+ valueAxes: [
+ {
+ id: 'ValueAxis-1',
+ name: 'LeftAxis-1',
+ type: 'value',
+ position: 'left',
+ show: true,
+ style: {},
+ scale: {
+ type: 'linear',
+ mode: 'normal',
+ },
+ labels: {
+ show: true,
+ rotate: 0,
+ filter: false,
+ truncate: 100,
+ },
+ title: {
+ text: 'Count',
+ },
+ },
+ ],
+ seriesParams: [
+ {
+ show: 'true',
+ type,
+ mode: 'stacked',
+ data: {
+ label: 'Count',
+ id: '1',
+ },
+ drawLinesBetweenPoints: true,
+ showCircles: true,
+ interpolate: 'cardinal',
+ valueAxis: 'ValueAxis-1',
+ },
+ ],
+ addTooltip: true,
+ addLegend: true,
+ legendPosition: 'right',
+ times: [],
+ addTimeMarker: false,
+ detailedTooltip: true,
+ palette: {
+ type: 'palette',
+ name: 'kibana_palette',
+ },
+ isVislibVis: true,
+ fittingFunction: 'zero',
+ radiusRatio: 9,
+ thresholdLine: {
+ show: thresholdPanelOn,
+ value: 10,
+ width: 1,
+ style: 'full',
+ color: '#E7664C',
+ },
+ labels: {},
+ };
+};
diff --git a/src/plugins/vis_type_xy/public/editor/components/options/point_series/point_series.test.tsx b/src/plugins/vis_type_xy/public/editor/components/options/point_series/point_series.test.tsx
new file mode 100644
index 0000000000000..59c03e02ac9f4
--- /dev/null
+++ b/src/plugins/vis_type_xy/public/editor/components/options/point_series/point_series.test.tsx
@@ -0,0 +1,165 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React from 'react';
+import { mountWithIntl } from '@kbn/test/jest';
+import { ReactWrapper } from 'enzyme';
+import { PointSeriesOptions } from './point_series';
+import { findTestSubject } from '@elastic/eui/lib/test';
+import { act } from 'react-dom/test-utils';
+import { ChartType } from '../../../../../common';
+import { getAggs, getVis, getStateParams } from './point_series.mocks';
+
+jest.mock('../../../../services', () => ({
+ getTrackUiMetric: jest.fn(() => null),
+ getPalettesService: jest.fn(() => {
+ return {
+ getPalettes: jest.fn(),
+ };
+ }),
+}));
+
+type PointSeriesOptionsProps = Parameters[0];
+
+describe('PointSeries Editor', function () {
+ let props: PointSeriesOptionsProps;
+ let component: ReactWrapper;
+
+ beforeEach(() => {
+ props = ({
+ aggs: getAggs(),
+ hasHistogramAgg: false,
+ extraProps: {
+ showElasticChartsOptions: false,
+ },
+ isTabSelected: false,
+ setMultipleValidity: jest.fn(),
+ setTouched: jest.fn(),
+ setValue: jest.fn(),
+ setValidity: jest.fn(),
+ stateParams: getStateParams(ChartType.Histogram, false),
+ vis: getVis('date_histogram'),
+ } as unknown) as PointSeriesOptionsProps;
+ });
+
+ it('renders the showValuesOnChart switch for a bar chart', async () => {
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'showValuesOnChart')).toHaveLength(1);
+ });
+ });
+
+ it('not renders the showValuesOnChart switch for an area chart', async () => {
+ const areaVisProps = ({
+ ...props,
+ stateParams: getStateParams(ChartType.Area, false),
+ } as unknown) as PointSeriesOptionsProps;
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'showValuesOnChart').length).toBe(0);
+ });
+ });
+
+ it('renders the addTimeMarker switch for a date histogram bucket', async () => {
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'addTimeMarker').length).toBe(1);
+ expect(findTestSubject(component, 'orderBucketsBySum').length).toBe(0);
+ });
+ });
+
+ it('renders the orderBucketsBySum switch for a non date histogram bucket', async () => {
+ const newVisProps = ({
+ ...props,
+ vis: getVis('terms'),
+ } as unknown) as PointSeriesOptionsProps;
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'addTimeMarker').length).toBe(0);
+ expect(findTestSubject(component, 'orderBucketsBySum').length).toBe(1);
+ });
+ });
+
+ it('not renders the editor options that are specific for the es charts implementation if showElasticChartsOptions is false', async () => {
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'detailedTooltip').length).toBe(0);
+ });
+ });
+
+ it('renders the editor options that are specific for the es charts implementation if showElasticChartsOptions is true', async () => {
+ const newVisProps = ({
+ ...props,
+ extraProps: {
+ showElasticChartsOptions: true,
+ },
+ } as unknown) as PointSeriesOptionsProps;
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'detailedTooltip').length).toBe(1);
+ });
+ });
+
+ it('not renders the fitting function for a bar chart', async () => {
+ const newVisProps = ({
+ ...props,
+ extraProps: {
+ showElasticChartsOptions: true,
+ },
+ } as unknown) as PointSeriesOptionsProps;
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'fittingFunction').length).toBe(0);
+ });
+ });
+
+ it('renders the fitting function for a line chart', async () => {
+ const newVisProps = ({
+ ...props,
+ stateParams: getStateParams(ChartType.Line, false),
+ extraProps: {
+ showElasticChartsOptions: true,
+ },
+ } as unknown) as PointSeriesOptionsProps;
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'fittingFunction').length).toBe(1);
+ });
+ });
+
+ it('renders the showCategoryLines switch', async () => {
+ const newVisProps = ({
+ ...props,
+ extraProps: {
+ showElasticChartsOptions: true,
+ },
+ } as unknown) as PointSeriesOptionsProps;
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'showValuesOnChart').length).toBe(1);
+ });
+ });
+
+ it('not renders the threshold panel if the Show threshold line switch is off', async () => {
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'thresholdValueInputOption').length).toBe(0);
+ });
+ });
+
+ it('renders the threshold panel if the Show threshold line switch is on', async () => {
+ const newVisProps = ({
+ ...props,
+ stateParams: getStateParams(ChartType.Histogram, true),
+ } as unknown) as PointSeriesOptionsProps;
+ component = mountWithIntl();
+ await act(async () => {
+ expect(findTestSubject(component, 'thresholdValueInputOption').length).toBe(1);
+ });
+ });
+});
diff --git a/src/plugins/vis_type_xy/public/editor/components/options/point_series/threshold_panel.tsx b/src/plugins/vis_type_xy/public/editor/components/options/point_series/threshold_panel.tsx
index dadbe4dd1fc76..00429c6702eeb 100644
--- a/src/plugins/vis_type_xy/public/editor/components/options/point_series/threshold_panel.tsx
+++ b/src/plugins/vis_type_xy/public/editor/components/options/point_series/threshold_panel.tsx
@@ -78,6 +78,7 @@ function ThresholdPanel({
value={stateParams.thresholdLine.value}
setValue={setThresholdLine}
setValidity={setThresholdLineValidity}
+ data-test-subj="thresholdValueInputOption"
/>
{
+describe('getAllSeries', () => {
it('returns empty array if splitAccessors is undefined', () => {
const splitAccessors = undefined;
const series = getAllSeries(rowsNoSplitSeries, splitAccessors, yAspects);
diff --git a/src/plugins/vis_type_xy/public/utils/get_series_name_fn.test.ts b/src/plugins/vis_type_xy/public/utils/get_series_name_fn.test.ts
new file mode 100644
index 0000000000000..8853e6075e269
--- /dev/null
+++ b/src/plugins/vis_type_xy/public/utils/get_series_name_fn.test.ts
@@ -0,0 +1,145 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { XYChartSeriesIdentifier } from '@elastic/charts';
+import { getSeriesNameFn } from './get_series_name_fn';
+
+const aspects = {
+ series: [
+ {
+ accessor: 'col-1-3',
+ column: 1,
+ title: 'FlightDelayType: Descending',
+ format: {
+ id: 'terms',
+ params: {
+ id: 'string',
+ otherBucketLabel: 'Other',
+ missingBucketLabel: 'Missing',
+ },
+ },
+ aggType: 'terms',
+ aggId: '3',
+ params: {},
+ },
+ ],
+ x: {
+ accessor: 'col-0-2',
+ column: 0,
+ title: 'timestamp per day',
+ format: {
+ id: 'date',
+ params: {
+ pattern: 'YYYY-MM-DD',
+ },
+ },
+ aggType: 'date_histogram',
+ aggId: '2',
+ params: {
+ date: true,
+ intervalESUnit: 'd',
+ intervalESValue: 1,
+ interval: 86400000,
+ format: 'YYYY-MM-DD',
+ },
+ },
+ y: [
+ {
+ accessor: 'col-1-1',
+ column: 1,
+ title: 'Count',
+ format: {
+ id: 'number',
+ },
+ aggType: 'count',
+ aggId: '1',
+ params: {},
+ },
+ ],
+};
+
+const series = ({
+ specId: 'histogram-col-1-1',
+ seriesKeys: ['col-1-1'],
+ yAccessor: 'col-1-1',
+ splitAccessors: [],
+ smVerticalAccessorValue: '__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__',
+ smHorizontalAccessorValue: '__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__',
+ groupId: '__pseudo_stacked_group-ValueAxis-1__',
+ seriesType: 'bar',
+ isStacked: true,
+} as unknown) as XYChartSeriesIdentifier;
+
+const splitAccessors = new Map();
+splitAccessors.set('col-1-3', 'Weather Delay');
+
+const seriesSplitAccessors = ({
+ specId: 'histogram-col-2-1',
+ seriesKeys: ['Weather Delay', 'col-2-1'],
+ yAccessor: 'col-2-1',
+ splitAccessors,
+ smVerticalAccessorValue: '__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__',
+ smHorizontalAccessorValue: '__ECH_DEFAULT_SINGLE_PANEL_SM_VALUE__',
+ groupId: '__pseudo_stacked_group-ValueAxis-1__',
+ seriesType: 'bar',
+ isStacked: true,
+} as unknown) as XYChartSeriesIdentifier;
+
+describe('getSeriesNameFn', () => {
+ it('returns the y aspects title if splitAccessors are empty array', () => {
+ const getSeriesName = getSeriesNameFn(aspects, false);
+ expect(getSeriesName(series)).toStrictEqual('Count');
+ });
+
+ it('returns the y aspects title if splitAccessors are empty array but mupliple flag is set to true', () => {
+ const getSeriesName = getSeriesNameFn(aspects, true);
+ expect(getSeriesName(series)).toStrictEqual('Count');
+ });
+
+ it('returns the correct string for multiple set to false and given split accessors', () => {
+ const aspectsSplitSeries = {
+ ...aspects,
+ y: [
+ {
+ accessor: 'col-2-1',
+ column: 2,
+ title: 'Count',
+ format: {
+ id: 'number',
+ },
+ aggType: 'count',
+ aggId: '1',
+ params: {},
+ },
+ ],
+ };
+ const getSeriesName = getSeriesNameFn(aspectsSplitSeries, false);
+ expect(getSeriesName(seriesSplitAccessors)).toStrictEqual('Weather Delay');
+ });
+
+ it('returns the correct string for multiple set to true and given split accessors', () => {
+ const aspectsSplitSeries = {
+ ...aspects,
+ y: [
+ {
+ accessor: 'col-2-1',
+ column: 2,
+ title: 'Count',
+ format: {
+ id: 'number',
+ },
+ aggType: 'count',
+ aggId: '1',
+ params: {},
+ },
+ ],
+ };
+ const getSeriesName = getSeriesNameFn(aspectsSplitSeries, true);
+ expect(getSeriesName(seriesSplitAccessors)).toStrictEqual('Weather Delay: Count');
+ });
+});
From 847d57b3e1057fb82578fde3785117c1847726f9 Mon Sep 17 00:00:00 2001
From: Stratoula Kalafateli
Date: Thu, 11 Feb 2021 16:12:01 +0200
Subject: [PATCH 03/72] [XY axis] Fixes bug on bar charts preventing unstacked
mode (#90602)
---
src/plugins/vis_type_xy/public/config/get_config.ts | 8 +-------
1 file changed, 1 insertion(+), 7 deletions(-)
diff --git a/src/plugins/vis_type_xy/public/config/get_config.ts b/src/plugins/vis_type_xy/public/config/get_config.ts
index b19366fc22dbb..8ebac1b71940a 100644
--- a/src/plugins/vis_type_xy/public/config/get_config.ts
+++ b/src/plugins/vis_type_xy/public/config/get_config.ts
@@ -98,10 +98,6 @@ const shouldEnableHistogramMode = (
);
});
- if (bars.length === 1) {
- return true;
- }
-
const groupIds = [
...bars.reduce>((acc, { valueAxis: groupId, mode }) => {
acc.add(groupId);
@@ -113,11 +109,9 @@ const shouldEnableHistogramMode = (
return false;
}
- const test = bars.every(({ valueAxis: groupId, mode }) => {
+ return bars.every(({ valueAxis: groupId, mode }) => {
const yAxisScale = yAxes.find(({ groupId: axisGroupId }) => axisGroupId === groupId)?.scale;
return mode === 'stacked' || yAxisScale?.mode === 'percentage';
});
-
- return test;
};
From 6bd0a7fcc5f477b75ca173ee0de11ebcd2898f4f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alejandro=20Fern=C3=A1ndez=20Haro?=
Date: Thu, 11 Feb 2021 14:36:17 +0000
Subject: [PATCH 04/72] [Plugins Discovery] Enforce camelCase plugin IDs
(#90752)
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
---
.../discovery/plugin_manifest_parser.test.ts | 77 ++++++++-----------
.../discovery/plugin_manifest_parser.ts | 11 +--
.../plugins/discovery/plugins_discovery.ts | 2 +-
.../fixtures/plugins/newsfeed/kibana.json | 2 +-
.../plugins/kbn_tp_run_pipeline/kibana.json | 2 +-
.../plugins/app_link_test/kibana.json | 2 +-
.../plugins/core_app_status/kibana.json | 2 +-
.../plugins/core_plugin_a/kibana.json | 2 +-
.../plugins/core_plugin_appleave/kibana.json | 2 +-
.../plugins/core_plugin_b/kibana.json | 6 +-
.../plugins/core_plugin_b/public/plugin.tsx | 4 +-
.../core_plugin_chromeless/kibana.json | 2 +-
.../plugins/core_plugin_helpmenu/kibana.json | 2 +-
.../core_plugin_route_timeouts/kibana.json | 2 +-
.../plugins/core_provider_plugin/kibana.json | 4 +-
.../plugins/data_search/kibana.json | 2 +-
.../elasticsearch_client_plugin/kibana.json | 2 +-
.../plugins/index_patterns/kibana.json | 2 +-
.../kbn_sample_panel_action/kibana.json | 2 +-
.../plugins/kbn_top_nav/kibana.json | 4 +-
.../kbn_tp_custom_visualizations/kibana.json | 2 +-
.../management_test_plugin/kibana.json | 2 +-
.../plugins/rendering_plugin/kibana.json | 2 +-
.../plugins/session_notifications/kibana.json | 4 +-
.../plugins/ui_settings_plugin/kibana.json | 2 +-
.../test_suites/core_plugins/ui_plugins.ts | 6 +-
.../common/fixtures/plugins/aad/kibana.json | 2 +-
.../plugins/actions_simulators/kibana.json | 2 +-
.../plugins/task_manager_fixture/kibana.json | 2 +-
.../plugins/kibana_cors_test/kibana.json | 2 +-
.../plugins/iframe_embedded/kibana.json | 2 +-
.../fixtures/plugins/alerts/kibana.json | 2 +-
.../plugins/elasticsearch_client/kibana.json | 2 +-
.../plugins/event_log/kibana.json | 2 +-
.../plugins/feature_usage_test/kibana.json | 2 +-
.../plugins/sample_task_plugin/kibana.json | 2 +-
.../task_manager_performance/kibana.json | 2 +-
.../plugins/resolver_test/kibana.json | 2 +-
.../fixtures/oidc/oidc_provider/kibana.json | 2 +-
.../fixtures/saml/saml_provider/kibana.json | 2 +-
.../fixtures/plugins/foo_plugin/kibana.json | 2 +-
.../stack_management_usage_test/kibana.json | 4 +-
42 files changed, 86 insertions(+), 100 deletions(-)
diff --git a/src/core/server/plugins/discovery/plugin_manifest_parser.test.ts b/src/core/server/plugins/discovery/plugin_manifest_parser.test.ts
index 4dc912680ec63..f3a92c896b014 100644
--- a/src/core/server/plugins/discovery/plugin_manifest_parser.test.ts
+++ b/src/core/server/plugins/discovery/plugin_manifest_parser.test.ts
@@ -9,12 +9,10 @@
import { mockReadFile } from './plugin_manifest_parser.test.mocks';
import { PluginDiscoveryErrorType } from './plugin_discovery_error';
-import { loggingSystemMock } from '../../logging/logging_system.mock';
import { resolve } from 'path';
import { parseManifest } from './plugin_manifest_parser';
-const logger = loggingSystemMock.createLogger();
const pluginPath = resolve('path', 'existent-dir');
const pluginManifestPath = resolve(pluginPath, 'kibana.json');
const packageInfo = {
@@ -34,7 +32,7 @@ test('return error when manifest is empty', async () => {
cb(null, Buffer.from(''));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Unexpected end of JSON input (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
@@ -46,7 +44,7 @@ test('return error when manifest content is null', async () => {
cb(null, Buffer.from('null'));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Plugin manifest must contain a JSON encoded object. (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
@@ -58,7 +56,7 @@ test('return error when manifest content is not a valid JSON', async () => {
cb(null, Buffer.from('not-json'));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Unexpected token o in JSON at position 1 (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
@@ -70,7 +68,7 @@ test('return error when plugin id is missing', async () => {
cb(null, Buffer.from(JSON.stringify({ version: 'some-version' })));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Plugin manifest must contain an "id" property. (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
@@ -82,37 +80,24 @@ test('return error when plugin id includes `.` characters', async () => {
cb(null, Buffer.from(JSON.stringify({ id: 'some.name', version: 'some-version' })));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Plugin "id" must not include \`.\` characters. (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
});
});
-test('logs warning if pluginId is not in camelCase format', async () => {
+test('return error when pluginId is not in camelCase format', async () => {
+ expect.assertions(1);
mockReadFile.mockImplementation((path, cb) => {
cb(null, Buffer.from(JSON.stringify({ id: 'some_name', version: 'kibana', server: true })));
});
- expect(loggingSystemMock.collect(logger).warn).toHaveLength(0);
- await parseManifest(pluginPath, packageInfo, logger);
- expect(loggingSystemMock.collect(logger).warn).toMatchInlineSnapshot(`
- Array [
- Array [
- "Expect plugin \\"id\\" in camelCase, but found: some_name",
- ],
- ]
- `);
-});
-
-test('does not log pluginId format warning in dist mode', async () => {
- mockReadFile.mockImplementation((path, cb) => {
- cb(null, Buffer.from(JSON.stringify({ id: 'some_name', version: 'kibana', server: true })));
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
+ message: `Plugin "id" must be camelCase, but found: some_name. (invalid-manifest, ${pluginManifestPath})`,
+ type: PluginDiscoveryErrorType.InvalidManifest,
+ path: pluginManifestPath,
});
-
- expect(loggingSystemMock.collect(logger).warn).toHaveLength(0);
- await parseManifest(pluginPath, { ...packageInfo, dist: true }, logger);
- expect(loggingSystemMock.collect(logger).warn.length).toBe(0);
});
test('return error when plugin version is missing', async () => {
@@ -120,7 +105,7 @@ test('return error when plugin version is missing', async () => {
cb(null, Buffer.from(JSON.stringify({ id: 'someId' })));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Plugin manifest for "someId" must contain a "version" property. (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
@@ -132,7 +117,7 @@ test('return error when plugin expected Kibana version is lower than actual vers
cb(null, Buffer.from(JSON.stringify({ id: 'someId', version: '6.4.2' })));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Plugin "someId" is only compatible with Kibana version "6.4.2", but used Kibana version is "7.0.0-alpha1". (incompatible-version, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.IncompatibleVersion,
path: pluginManifestPath,
@@ -147,7 +132,7 @@ test('return error when plugin expected Kibana version cannot be interpreted as
);
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Plugin "someId" is only compatible with Kibana version "non-sem-ver", but used Kibana version is "7.0.0-alpha1". (incompatible-version, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.IncompatibleVersion,
path: pluginManifestPath,
@@ -159,7 +144,7 @@ test('return error when plugin config path is not a string', async () => {
cb(null, Buffer.from(JSON.stringify({ id: 'someId', version: '7.0.0', configPath: 2 })));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `The "configPath" in plugin manifest for "someId" should either be a string or an array of strings. (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
@@ -174,7 +159,7 @@ test('return error when plugin config path is an array that contains non-string
);
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `The "configPath" in plugin manifest for "someId" should either be a string or an array of strings. (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
@@ -186,7 +171,7 @@ test('return error when plugin expected Kibana version is higher than actual ver
cb(null, Buffer.from(JSON.stringify({ id: 'someId', version: '7.0.1' })));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Plugin "someId" is only compatible with Kibana version "7.0.1", but used Kibana version is "7.0.0-alpha1". (incompatible-version, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.IncompatibleVersion,
path: pluginManifestPath,
@@ -198,7 +183,7 @@ test('return error when both `server` and `ui` are set to `false` or missing', a
cb(null, Buffer.from(JSON.stringify({ id: 'someId', version: '7.0.0' })));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Both "server" and "ui" are missing or set to "false" in plugin manifest for "someId", but at least one of these must be set to "true". (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
@@ -211,7 +196,7 @@ test('return error when both `server` and `ui` are set to `false` or missing', a
);
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Both "server" and "ui" are missing or set to "false" in plugin manifest for "someId", but at least one of these must be set to "true". (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
@@ -234,7 +219,7 @@ test('return error when manifest contains unrecognized properties', async () =>
);
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).rejects.toMatchObject({
+ await expect(parseManifest(pluginPath, packageInfo)).rejects.toMatchObject({
message: `Manifest for plugin "someId" contains the following unrecognized properties: unknownOne,unknownTwo. (invalid-manifest, ${pluginManifestPath})`,
type: PluginDiscoveryErrorType.InvalidManifest,
path: pluginManifestPath,
@@ -247,20 +232,20 @@ describe('configPath', () => {
cb(null, Buffer.from(JSON.stringify({ id: 'plugin', version: '7.0.0', server: true })));
});
- const manifest = await parseManifest(pluginPath, packageInfo, logger);
+ const manifest = await parseManifest(pluginPath, packageInfo);
expect(manifest.configPath).toBe(manifest.id);
});
test('falls back to plugin id in snakeCase format', async () => {
mockReadFile.mockImplementation((path, cb) => {
- cb(null, Buffer.from(JSON.stringify({ id: 'SomeId', version: '7.0.0', server: true })));
+ cb(null, Buffer.from(JSON.stringify({ id: 'someId', version: '7.0.0', server: true })));
});
- const manifest = await parseManifest(pluginPath, packageInfo, logger);
+ const manifest = await parseManifest(pluginPath, packageInfo);
expect(manifest.configPath).toBe('some_id');
});
- test('not formated to snakeCase if defined explicitly as string', async () => {
+ test('not formatted to snakeCase if defined explicitly as string', async () => {
mockReadFile.mockImplementation((path, cb) => {
cb(
null,
@@ -270,11 +255,11 @@ describe('configPath', () => {
);
});
- const manifest = await parseManifest(pluginPath, packageInfo, logger);
+ const manifest = await parseManifest(pluginPath, packageInfo);
expect(manifest.configPath).toBe('somePath');
});
- test('not formated to snakeCase if defined explicitly as an array of strings', async () => {
+ test('not formatted to snakeCase if defined explicitly as an array of strings', async () => {
mockReadFile.mockImplementation((path, cb) => {
cb(
null,
@@ -284,7 +269,7 @@ describe('configPath', () => {
);
});
- const manifest = await parseManifest(pluginPath, packageInfo, logger);
+ const manifest = await parseManifest(pluginPath, packageInfo);
expect(manifest.configPath).toEqual(['somePath']);
});
});
@@ -294,7 +279,7 @@ test('set defaults for all missing optional fields', async () => {
cb(null, Buffer.from(JSON.stringify({ id: 'someId', version: '7.0.0', server: true })));
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).resolves.toEqual({
+ await expect(parseManifest(pluginPath, packageInfo)).resolves.toEqual({
id: 'someId',
configPath: 'some_id',
version: '7.0.0',
@@ -325,7 +310,7 @@ test('return all set optional fields as they are in manifest', async () => {
);
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).resolves.toEqual({
+ await expect(parseManifest(pluginPath, packageInfo)).resolves.toEqual({
id: 'someId',
configPath: ['some', 'path'],
version: 'some-version',
@@ -355,7 +340,7 @@ test('return manifest when plugin expected Kibana version matches actual version
);
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).resolves.toEqual({
+ await expect(parseManifest(pluginPath, packageInfo)).resolves.toEqual({
id: 'someId',
configPath: 'some-path',
version: 'some-version',
@@ -385,7 +370,7 @@ test('return manifest when plugin expected Kibana version is `kibana`', async ()
);
});
- await expect(parseManifest(pluginPath, packageInfo, logger)).resolves.toEqual({
+ await expect(parseManifest(pluginPath, packageInfo)).resolves.toEqual({
id: 'someId',
configPath: 'some_id',
version: 'some-version',
diff --git a/src/core/server/plugins/discovery/plugin_manifest_parser.ts b/src/core/server/plugins/discovery/plugin_manifest_parser.ts
index 9db68bcaa4cce..eae0e73e86c46 100644
--- a/src/core/server/plugins/discovery/plugin_manifest_parser.ts
+++ b/src/core/server/plugins/discovery/plugin_manifest_parser.ts
@@ -12,7 +12,6 @@ import { coerce } from 'semver';
import { promisify } from 'util';
import { snakeCase } from 'lodash';
import { isConfigPath, PackageInfo } from '../../config';
-import { Logger } from '../../logging';
import { PluginManifest } from '../types';
import { PluginDiscoveryError } from './plugin_discovery_error';
import { isCamelCase } from './is_camel_case';
@@ -63,8 +62,7 @@ const KNOWN_MANIFEST_FIELDS = (() => {
*/
export async function parseManifest(
pluginPath: string,
- packageInfo: PackageInfo,
- log: Logger
+ packageInfo: PackageInfo
): Promise {
const manifestPath = resolve(pluginPath, MANIFEST_FILE_NAME);
@@ -105,8 +103,11 @@ export async function parseManifest(
);
}
- if (!packageInfo.dist && !isCamelCase(manifest.id)) {
- log.warn(`Expect plugin "id" in camelCase, but found: ${manifest.id}`);
+ if (!isCamelCase(manifest.id)) {
+ throw PluginDiscoveryError.invalidManifest(
+ manifestPath,
+ new Error(`Plugin "id" must be camelCase, but found: ${manifest.id}.`)
+ );
}
if (!manifest.version || typeof manifest.version !== 'string') {
diff --git a/src/core/server/plugins/discovery/plugins_discovery.ts b/src/core/server/plugins/discovery/plugins_discovery.ts
index 61eccff982593..368795968a7cb 100644
--- a/src/core/server/plugins/discovery/plugins_discovery.ts
+++ b/src/core/server/plugins/discovery/plugins_discovery.ts
@@ -179,7 +179,7 @@ function createPlugin$(
coreContext: CoreContext,
instanceInfo: InstanceInfo
) {
- return from(parseManifest(path, coreContext.env.packageInfo, log)).pipe(
+ return from(parseManifest(path, coreContext.env.packageInfo)).pipe(
map((manifest) => {
log.debug(`Successfully discovered plugin "${manifest.id}" at "${path}"`);
const opaqueId = Symbol(manifest.id);
diff --git a/test/common/fixtures/plugins/newsfeed/kibana.json b/test/common/fixtures/plugins/newsfeed/kibana.json
index 110b53fc6b2e9..0fbd24f45b684 100644
--- a/test/common/fixtures/plugins/newsfeed/kibana.json
+++ b/test/common/fixtures/plugins/newsfeed/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "newsfeed-fixtures",
+ "id": "newsfeedFixtures",
"version": "kibana",
"server": true,
"ui": false
diff --git a/test/interpreter_functional/plugins/kbn_tp_run_pipeline/kibana.json b/test/interpreter_functional/plugins/kbn_tp_run_pipeline/kibana.json
index 084cee2fddf08..2fd2a9e5144d4 100644
--- a/test/interpreter_functional/plugins/kbn_tp_run_pipeline/kibana.json
+++ b/test/interpreter_functional/plugins/kbn_tp_run_pipeline/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "kbn_tp_run_pipeline",
+ "id": "kbnTpRunPipeline",
"version": "0.0.1",
"kibanaVersion": "kibana",
"requiredPlugins": [
diff --git a/test/plugin_functional/plugins/app_link_test/kibana.json b/test/plugin_functional/plugins/app_link_test/kibana.json
index 5384d4fee1508..c37eae274460c 100644
--- a/test/plugin_functional/plugins/app_link_test/kibana.json
+++ b/test/plugin_functional/plugins/app_link_test/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "app_link_test",
+ "id": "appLinkTest",
"version": "0.0.1",
"kibanaVersion": "kibana",
"server": false,
diff --git a/test/plugin_functional/plugins/core_app_status/kibana.json b/test/plugin_functional/plugins/core_app_status/kibana.json
index 91d8e6fd8f9e1..eb825cf9990c9 100644
--- a/test/plugin_functional/plugins/core_app_status/kibana.json
+++ b/test/plugin_functional/plugins/core_app_status/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "core_app_status",
+ "id": "coreAppStatus",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["core_app_status"],
diff --git a/test/plugin_functional/plugins/core_plugin_a/kibana.json b/test/plugin_functional/plugins/core_plugin_a/kibana.json
index 0989595c49a58..9a153011bdc70 100644
--- a/test/plugin_functional/plugins/core_plugin_a/kibana.json
+++ b/test/plugin_functional/plugins/core_plugin_a/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "core_plugin_a",
+ "id": "corePluginA",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["core_plugin_a"],
diff --git a/test/plugin_functional/plugins/core_plugin_appleave/kibana.json b/test/plugin_functional/plugins/core_plugin_appleave/kibana.json
index 95343cbcf2804..f9337fcc226f2 100644
--- a/test/plugin_functional/plugins/core_plugin_appleave/kibana.json
+++ b/test/plugin_functional/plugins/core_plugin_appleave/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "core_plugin_appleave",
+ "id": "corePluginAppleave",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["core_plugin_appleave"],
diff --git a/test/plugin_functional/plugins/core_plugin_b/kibana.json b/test/plugin_functional/plugins/core_plugin_b/kibana.json
index 7c6aa597c82fa..d132e714ea31d 100644
--- a/test/plugin_functional/plugins/core_plugin_b/kibana.json
+++ b/test/plugin_functional/plugins/core_plugin_b/kibana.json
@@ -1,10 +1,10 @@
{
- "id": "core_plugin_b",
+ "id": "corePluginB",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["core_plugin_b"],
"server": true,
"ui": true,
- "requiredPlugins": ["core_plugin_a"],
- "optionalPlugins": ["core_plugin_c"]
+ "requiredPlugins": ["corePluginA"],
+ "optionalPlugins": ["corePluginC"]
}
diff --git a/test/plugin_functional/plugins/core_plugin_b/public/plugin.tsx b/test/plugin_functional/plugins/core_plugin_b/public/plugin.tsx
index 48c8d85b21dac..5bab0275439df 100644
--- a/test/plugin_functional/plugins/core_plugin_b/public/plugin.tsx
+++ b/test/plugin_functional/plugins/core_plugin_b/public/plugin.tsx
@@ -16,7 +16,7 @@ declare global {
}
export interface CorePluginBDeps {
- core_plugin_a: CorePluginAPluginSetup;
+ corePluginA: CorePluginAPluginSetup;
}
export class CorePluginBPlugin
@@ -37,7 +37,7 @@ export class CorePluginBPlugin
return {
sayHi() {
- return `Plugin A said: ${deps.core_plugin_a.getGreeting()}`;
+ return `Plugin A said: ${deps.corePluginA.getGreeting()}`;
},
};
}
diff --git a/test/plugin_functional/plugins/core_plugin_chromeless/kibana.json b/test/plugin_functional/plugins/core_plugin_chromeless/kibana.json
index a8a5616627726..61863781b8f32 100644
--- a/test/plugin_functional/plugins/core_plugin_chromeless/kibana.json
+++ b/test/plugin_functional/plugins/core_plugin_chromeless/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "core_plugin_chromeless",
+ "id": "corePluginChromeless",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["core_plugin_chromeless"],
diff --git a/test/plugin_functional/plugins/core_plugin_helpmenu/kibana.json b/test/plugin_functional/plugins/core_plugin_helpmenu/kibana.json
index 984b96a8bcba1..1b0f477ef34ae 100644
--- a/test/plugin_functional/plugins/core_plugin_helpmenu/kibana.json
+++ b/test/plugin_functional/plugins/core_plugin_helpmenu/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "core_plugin_helpmenu",
+ "id": "corePluginHelpmenu",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["core_plugin_helpmenu"],
diff --git a/test/plugin_functional/plugins/core_plugin_route_timeouts/kibana.json b/test/plugin_functional/plugins/core_plugin_route_timeouts/kibana.json
index 6fbddad22b764..000f8e38a1035 100644
--- a/test/plugin_functional/plugins/core_plugin_route_timeouts/kibana.json
+++ b/test/plugin_functional/plugins/core_plugin_route_timeouts/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "core_plugin_route_timeouts",
+ "id": "corePluginRouteTimeouts",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["core_plugin_route_timeouts"],
diff --git a/test/plugin_functional/plugins/core_provider_plugin/kibana.json b/test/plugin_functional/plugins/core_provider_plugin/kibana.json
index 8d9b30acab893..c55f62762e233 100644
--- a/test/plugin_functional/plugins/core_provider_plugin/kibana.json
+++ b/test/plugin_functional/plugins/core_provider_plugin/kibana.json
@@ -1,8 +1,8 @@
{
- "id": "core_provider_plugin",
+ "id": "coreProviderPlugin",
"version": "0.0.1",
"kibanaVersion": "kibana",
- "optionalPlugins": ["core_plugin_a", "core_plugin_b", "licensing", "globalSearchTest"],
+ "optionalPlugins": ["corePluginA", "corePluginB", "licensing", "globalSearchTest"],
"server": false,
"ui": true
}
diff --git a/test/plugin_functional/plugins/data_search/kibana.json b/test/plugin_functional/plugins/data_search/kibana.json
index 3acbe9f97d8f0..28f7eb9996fc5 100644
--- a/test/plugin_functional/plugins/data_search/kibana.json
+++ b/test/plugin_functional/plugins/data_search/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "data_search_plugin",
+ "id": "dataSearchPlugin",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["data_search_test_plugin"],
diff --git a/test/plugin_functional/plugins/elasticsearch_client_plugin/kibana.json b/test/plugin_functional/plugins/elasticsearch_client_plugin/kibana.json
index a7674881e8ba0..3d934414adc2f 100644
--- a/test/plugin_functional/plugins/elasticsearch_client_plugin/kibana.json
+++ b/test/plugin_functional/plugins/elasticsearch_client_plugin/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "elasticsearch_client_plugin",
+ "id": "elasticsearchClientPlugin",
"version": "0.0.1",
"kibanaVersion": "kibana",
"server": true,
diff --git a/test/plugin_functional/plugins/index_patterns/kibana.json b/test/plugin_functional/plugins/index_patterns/kibana.json
index e098950dc9677..3b41fa5124a45 100644
--- a/test/plugin_functional/plugins/index_patterns/kibana.json
+++ b/test/plugin_functional/plugins/index_patterns/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "index_patterns_test_plugin",
+ "id": "indexPatternsTestPlugin",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["index_patterns_test_plugin"],
diff --git a/test/plugin_functional/plugins/kbn_sample_panel_action/kibana.json b/test/plugin_functional/plugins/kbn_sample_panel_action/kibana.json
index 08ce182aa0293..51a254016b650 100644
--- a/test/plugin_functional/plugins/kbn_sample_panel_action/kibana.json
+++ b/test/plugin_functional/plugins/kbn_sample_panel_action/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "kbn_sample_panel_action",
+ "id": "kbnSamplePanelAction",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["kbn_sample_panel_action"],
diff --git a/test/plugin_functional/plugins/kbn_top_nav/kibana.json b/test/plugin_functional/plugins/kbn_top_nav/kibana.json
index b274e80b9ef65..a656eae476b87 100644
--- a/test/plugin_functional/plugins/kbn_top_nav/kibana.json
+++ b/test/plugin_functional/plugins/kbn_top_nav/kibana.json
@@ -1,9 +1,9 @@
{
- "id": "kbn_top_nav",
+ "id": "kbnTopNav",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["kbn_top_nav"],
"server": false,
"ui": true,
"requiredPlugins": ["navigation"]
-}
\ No newline at end of file
+}
diff --git a/test/plugin_functional/plugins/kbn_tp_custom_visualizations/kibana.json b/test/plugin_functional/plugins/kbn_tp_custom_visualizations/kibana.json
index 33c8f3238dc47..3e2d1c9e98fee 100644
--- a/test/plugin_functional/plugins/kbn_tp_custom_visualizations/kibana.json
+++ b/test/plugin_functional/plugins/kbn_tp_custom_visualizations/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "kbn_tp_custom_visualizations",
+ "id": "kbnTpCustomVisualizations",
"version": "0.0.1",
"kibanaVersion": "kibana",
"requiredPlugins": [
diff --git a/test/plugin_functional/plugins/management_test_plugin/kibana.json b/test/plugin_functional/plugins/management_test_plugin/kibana.json
index e52b60b3a4e31..f07c2ae997221 100644
--- a/test/plugin_functional/plugins/management_test_plugin/kibana.json
+++ b/test/plugin_functional/plugins/management_test_plugin/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "management_test_plugin",
+ "id": "managementTestPlugin",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["management_test_plugin"],
diff --git a/test/plugin_functional/plugins/rendering_plugin/kibana.json b/test/plugin_functional/plugins/rendering_plugin/kibana.json
index 886eca2bdde1d..f5f218db3c184 100644
--- a/test/plugin_functional/plugins/rendering_plugin/kibana.json
+++ b/test/plugin_functional/plugins/rendering_plugin/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "rendering_plugin",
+ "id": "renderingPlugin",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["rendering_plugin"],
diff --git a/test/plugin_functional/plugins/session_notifications/kibana.json b/test/plugin_functional/plugins/session_notifications/kibana.json
index 0b80b531d2f84..939a96e3f21d6 100644
--- a/test/plugin_functional/plugins/session_notifications/kibana.json
+++ b/test/plugin_functional/plugins/session_notifications/kibana.json
@@ -1,9 +1,9 @@
{
- "id": "session_notifications",
+ "id": "sessionNotifications",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["session_notifications"],
"server": false,
"ui": true,
"requiredPlugins": ["data", "navigation"]
-}
\ No newline at end of file
+}
diff --git a/test/plugin_functional/plugins/ui_settings_plugin/kibana.json b/test/plugin_functional/plugins/ui_settings_plugin/kibana.json
index 35e4c35490e2f..459d995333eca 100644
--- a/test/plugin_functional/plugins/ui_settings_plugin/kibana.json
+++ b/test/plugin_functional/plugins/ui_settings_plugin/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "ui_settings_plugin",
+ "id": "uiSettingsPlugin",
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["ui_settings_plugin"],
diff --git a/test/plugin_functional/test_suites/core_plugins/ui_plugins.ts b/test/plugin_functional/test_suites/core_plugins/ui_plugins.ts
index 4015b8959ece6..1d6b33e41b772 100644
--- a/test/plugin_functional/test_suites/core_plugins/ui_plugins.ts
+++ b/test/plugin_functional/test_suites/core_plugins/ui_plugins.ts
@@ -24,7 +24,7 @@ export default function ({ getService, getPageObjects }: PluginFunctionalProvide
it('should run the new platform plugins', async () => {
expect(
await browser.execute(() => {
- return window._coreProvider.setup.plugins.core_plugin_b.sayHi();
+ return window._coreProvider.setup.plugins.corePluginB.sayHi();
})
).to.be('Plugin A said: Hello from Plugin A!');
});
@@ -65,7 +65,7 @@ export default function ({ getService, getPageObjects }: PluginFunctionalProvide
it('should send kbn-system-request header when asSystemRequest: true', async () => {
expect(
await browser.executeAsync(async (cb) => {
- window._coreProvider.start.plugins.core_plugin_b.sendSystemRequest(true).then(cb);
+ window._coreProvider.start.plugins.corePluginB.sendSystemRequest(true).then(cb);
})
).to.be('/core_plugin_b/system_request says: "System request? true"');
});
@@ -73,7 +73,7 @@ export default function ({ getService, getPageObjects }: PluginFunctionalProvide
it('should not send kbn-system-request header when asSystemRequest: false', async () => {
expect(
await browser.executeAsync(async (cb) => {
- window._coreProvider.start.plugins.core_plugin_b.sendSystemRequest(false).then(cb);
+ window._coreProvider.start.plugins.corePluginB.sendSystemRequest(false).then(cb);
})
).to.be('/core_plugin_b/system_request says: "System request? false"');
});
diff --git a/x-pack/test/alerting_api_integration/common/fixtures/plugins/aad/kibana.json b/x-pack/test/alerting_api_integration/common/fixtures/plugins/aad/kibana.json
index 9a7bedbb5c6d5..6a43c7c74ad8c 100644
--- a/x-pack/test/alerting_api_integration/common/fixtures/plugins/aad/kibana.json
+++ b/x-pack/test/alerting_api_integration/common/fixtures/plugins/aad/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "aad-fixtures",
+ "id": "aadFixtures",
"version": "1.0.0",
"kibanaVersion": "kibana",
"configPath": ["xpack"],
diff --git a/x-pack/test/alerting_api_integration/common/fixtures/plugins/actions_simulators/kibana.json b/x-pack/test/alerting_api_integration/common/fixtures/plugins/actions_simulators/kibana.json
index 5f92b9e5479e8..f63d6ef0d45ac 100644
--- a/x-pack/test/alerting_api_integration/common/fixtures/plugins/actions_simulators/kibana.json
+++ b/x-pack/test/alerting_api_integration/common/fixtures/plugins/actions_simulators/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "actions_simulators",
+ "id": "actionsSimulators",
"version": "1.0.0",
"kibanaVersion": "kibana",
"configPath": ["xpack"],
diff --git a/x-pack/test/alerting_api_integration/common/fixtures/plugins/task_manager_fixture/kibana.json b/x-pack/test/alerting_api_integration/common/fixtures/plugins/task_manager_fixture/kibana.json
index 8f606276998f5..2f8117163471d 100644
--- a/x-pack/test/alerting_api_integration/common/fixtures/plugins/task_manager_fixture/kibana.json
+++ b/x-pack/test/alerting_api_integration/common/fixtures/plugins/task_manager_fixture/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "task_manager_fixture",
+ "id": "taskManagerFixture",
"version": "1.0.0",
"kibanaVersion": "kibana",
"configPath": ["xpack"],
diff --git a/x-pack/test/functional_cors/plugins/kibana_cors_test/kibana.json b/x-pack/test/functional_cors/plugins/kibana_cors_test/kibana.json
index 9c94f2006b7f8..a0ebde9bff4b7 100644
--- a/x-pack/test/functional_cors/plugins/kibana_cors_test/kibana.json
+++ b/x-pack/test/functional_cors/plugins/kibana_cors_test/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "kibana_cors_test",
+ "id": "kibanaCorsTest",
"version": "1.0.0",
"kibanaVersion": "kibana",
"configPath": ["test", "cors"],
diff --git a/x-pack/test/functional_embedded/plugins/iframe_embedded/kibana.json b/x-pack/test/functional_embedded/plugins/iframe_embedded/kibana.json
index ea9f55bd21c6e..919b7f69d28b9 100644
--- a/x-pack/test/functional_embedded/plugins/iframe_embedded/kibana.json
+++ b/x-pack/test/functional_embedded/plugins/iframe_embedded/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "iframe_embedded",
+ "id": "iframeEmbedded",
"version": "1.0.0",
"kibanaVersion": "kibana",
"server": true,
diff --git a/x-pack/test/functional_with_es_ssl/fixtures/plugins/alerts/kibana.json b/x-pack/test/functional_with_es_ssl/fixtures/plugins/alerts/kibana.json
index 784a766e608bc..11a8fb977cd78 100644
--- a/x-pack/test/functional_with_es_ssl/fixtures/plugins/alerts/kibana.json
+++ b/x-pack/test/functional_with_es_ssl/fixtures/plugins/alerts/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "alerting_fixture",
+ "id": "alertingFixture",
"version": "1.0.0",
"kibanaVersion": "kibana",
"configPath": ["xpack"],
diff --git a/x-pack/test/plugin_api_integration/plugins/elasticsearch_client/kibana.json b/x-pack/test/plugin_api_integration/plugins/elasticsearch_client/kibana.json
index 37ec33c168e76..5f4cb3f7f7eb2 100644
--- a/x-pack/test/plugin_api_integration/plugins/elasticsearch_client/kibana.json
+++ b/x-pack/test/plugin_api_integration/plugins/elasticsearch_client/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "elasticsearch_client_xpack",
+ "id": "elasticsearchClientXpack",
"version": "1.0.0",
"kibanaVersion": "kibana",
"server": true,
diff --git a/x-pack/test/plugin_api_integration/plugins/event_log/kibana.json b/x-pack/test/plugin_api_integration/plugins/event_log/kibana.json
index 4b467ce975012..4c940ffec1463 100644
--- a/x-pack/test/plugin_api_integration/plugins/event_log/kibana.json
+++ b/x-pack/test/plugin_api_integration/plugins/event_log/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "event_log_fixture",
+ "id": "eventLogFixture",
"version": "1.0.0",
"kibanaVersion": "kibana",
"configPath": ["xpack"],
diff --git a/x-pack/test/plugin_api_integration/plugins/feature_usage_test/kibana.json b/x-pack/test/plugin_api_integration/plugins/feature_usage_test/kibana.json
index b11b7ada24a57..b81f96362e9f5 100644
--- a/x-pack/test/plugin_api_integration/plugins/feature_usage_test/kibana.json
+++ b/x-pack/test/plugin_api_integration/plugins/feature_usage_test/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "feature_usage_test",
+ "id": "featureUsageTest",
"version": "1.0.0",
"kibanaVersion": "kibana",
"configPath": ["xpack", "feature_usage_test"],
diff --git a/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/kibana.json b/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/kibana.json
index 416ef7fa34591..6a8a2221b48d3 100644
--- a/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/kibana.json
+++ b/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "sample_task_plugin",
+ "id": "sampleTaskPlugin",
"version": "1.0.0",
"kibanaVersion": "kibana",
"configPath": ["xpack"],
diff --git a/x-pack/test/plugin_api_perf/plugins/task_manager_performance/kibana.json b/x-pack/test/plugin_api_perf/plugins/task_manager_performance/kibana.json
index 1fa480cd53c48..387f392c8db98 100644
--- a/x-pack/test/plugin_api_perf/plugins/task_manager_performance/kibana.json
+++ b/x-pack/test/plugin_api_perf/plugins/task_manager_performance/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "task_manager_performance",
+ "id": "taskManagerPerformance",
"version": "1.0.0",
"kibanaVersion": "kibana",
"configPath": ["xpack"],
diff --git a/x-pack/test/plugin_functional/plugins/resolver_test/kibana.json b/x-pack/test/plugin_functional/plugins/resolver_test/kibana.json
index 499983561e89d..a203705e13ed6 100644
--- a/x-pack/test/plugin_functional/plugins/resolver_test/kibana.json
+++ b/x-pack/test/plugin_functional/plugins/resolver_test/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "resolver_test",
+ "id": "resolverTest",
"version": "1.0.0",
"kibanaVersion": "kibana",
"configPath": ["xpack", "resolverTest"],
diff --git a/x-pack/test/security_api_integration/fixtures/oidc/oidc_provider/kibana.json b/x-pack/test/security_api_integration/fixtures/oidc/oidc_provider/kibana.json
index faaa0b9165828..aa7cd499a173a 100644
--- a/x-pack/test/security_api_integration/fixtures/oidc/oidc_provider/kibana.json
+++ b/x-pack/test/security_api_integration/fixtures/oidc/oidc_provider/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "oidc_provider_plugin",
+ "id": "oidcProviderPlugin",
"version": "8.0.0",
"kibanaVersion": "kibana",
"server": true,
diff --git a/x-pack/test/security_api_integration/fixtures/saml/saml_provider/kibana.json b/x-pack/test/security_api_integration/fixtures/saml/saml_provider/kibana.json
index 3cbd37e38bb2d..81ec23fc3d2f3 100644
--- a/x-pack/test/security_api_integration/fixtures/saml/saml_provider/kibana.json
+++ b/x-pack/test/security_api_integration/fixtures/saml/saml_provider/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "saml_provider_plugin",
+ "id": "samlProviderPlugin",
"version": "8.0.0",
"kibanaVersion": "kibana",
"server": true,
diff --git a/x-pack/test/ui_capabilities/common/fixtures/plugins/foo_plugin/kibana.json b/x-pack/test/ui_capabilities/common/fixtures/plugins/foo_plugin/kibana.json
index cec1640fbb047..912cf5d70e16b 100644
--- a/x-pack/test/ui_capabilities/common/fixtures/plugins/foo_plugin/kibana.json
+++ b/x-pack/test/ui_capabilities/common/fixtures/plugins/foo_plugin/kibana.json
@@ -1,5 +1,5 @@
{
- "id": "foo_plugin",
+ "id": "fooPlugin",
"version": "1.0.0",
"kibanaVersion": "kibana",
"requiredPlugins": ["features"],
diff --git a/x-pack/test/usage_collection/plugins/stack_management_usage_test/kibana.json b/x-pack/test/usage_collection/plugins/stack_management_usage_test/kibana.json
index b586de3fa4d79..c41fe744ca946 100644
--- a/x-pack/test/usage_collection/plugins/stack_management_usage_test/kibana.json
+++ b/x-pack/test/usage_collection/plugins/stack_management_usage_test/kibana.json
@@ -1,8 +1,8 @@
{
- "id": "StackManagementUsageTest",
+ "id": "stackManagementUsageTest",
"version": "1.0.0",
"kibanaVersion": "kibana",
- "configPath": ["xpack", "StackManagementUsageTest"],
+ "configPath": ["xpack", "stackManagementUsageTest"],
"requiredPlugins": [],
"server": false,
"ui": true
From 619db365912b752552edc2f97995529e2cc26328 Mon Sep 17 00:00:00 2001
From: Gidi Meir Morris
Date: Thu, 11 Feb 2021 14:46:14 +0000
Subject: [PATCH 05/72] [Task manager] Adds support for limited concurrency
tasks (#90365)
Adds support for limited concurrency on a Task Type.
---
x-pack/plugins/task_manager/README.md | 8 +-
.../server/buffered_task_store.test.ts | 10 +-
.../server/buffered_task_store.ts | 4 -
.../task_manager/server/lib/fill_pool.test.ts | 56 +-
.../task_manager/server/lib/fill_pool.ts | 132 +-
.../monitoring/task_run_statistics.test.ts | 1 +
.../server/monitoring/task_run_statistics.ts | 56 +-
.../task_manager/server/plugin.test.ts | 9 +
x-pack/plugins/task_manager/server/plugin.ts | 10 +-
.../polling/delay_on_claim_conflicts.test.ts | 61 +
.../polling/delay_on_claim_conflicts.ts | 12 +-
.../server/polling_lifecycle.test.ts | 151 +-
.../task_manager/server/polling_lifecycle.ts | 126 +-
.../mark_available_tasks_as_claimed.test.ts | 97 +-
.../mark_available_tasks_as_claimed.ts | 70 +-
.../server/queries/task_claiming.mock.ts | 33 +
.../server/queries/task_claiming.test.ts | 1516 +++++++++++++
.../server/queries/task_claiming.ts | 488 +++++
x-pack/plugins/task_manager/server/task.ts | 10 +
.../task_manager/server/task_events.ts | 16 +-
.../task_manager/server/task_pool.test.ts | 2 +
.../plugins/task_manager/server/task_pool.ts | 54 +-
.../server/task_running/task_runner.test.ts | 1915 +++++++++--------
.../server/task_running/task_runner.ts | 191 +-
.../server/task_scheduling.test.ts | 105 +-
.../task_manager/server/task_scheduling.ts | 29 +-
.../task_manager/server/task_store.mock.ts | 17 +-
.../task_manager/server/task_store.test.ts | 1098 +---------
.../plugins/task_manager/server/task_store.ts | 240 +--
.../server/task_type_dictionary.ts | 4 +
.../sample_task_plugin/server/init_routes.ts | 10 +-
.../sample_task_plugin/server/plugin.ts | 14 +
.../test_suites/task_manager/health_route.ts | 15 +-
.../task_manager/task_management.ts | 207 +-
34 files changed, 4163 insertions(+), 2604 deletions(-)
create mode 100644 x-pack/plugins/task_manager/server/queries/task_claiming.mock.ts
create mode 100644 x-pack/plugins/task_manager/server/queries/task_claiming.test.ts
create mode 100644 x-pack/plugins/task_manager/server/queries/task_claiming.ts
diff --git a/x-pack/plugins/task_manager/README.md b/x-pack/plugins/task_manager/README.md
index 9be3be14ea3fc..c20bc4b29bcc8 100644
--- a/x-pack/plugins/task_manager/README.md
+++ b/x-pack/plugins/task_manager/README.md
@@ -85,10 +85,10 @@ export class Plugin {
// This defaults to what is configured at the task manager level.
maxAttempts: 5,
- // The clusterMonitoring task occupies 2 workers, so if the system has 10 worker slots,
- // 5 clusterMonitoring tasks could run concurrently per Kibana instance. This value is
- // overridden by the `override_num_workers` config value, if specified.
- numWorkers: 2,
+ // The maximum number tasks of this type that can be run concurrently per Kibana instance.
+ // Setting this value will force Task Manager to poll for this task type seperatly from other task types which
+ // can add significant load to the ES cluster, so please use this configuration only when absolutly necesery.
+ maxConcurrency: 1,
// The createTaskRunner function / method returns an object that is responsible for
// performing the work of the task. context: { taskInstance }, is documented below.
diff --git a/x-pack/plugins/task_manager/server/buffered_task_store.test.ts b/x-pack/plugins/task_manager/server/buffered_task_store.test.ts
index 70d24b235d880..45607713a3128 100644
--- a/x-pack/plugins/task_manager/server/buffered_task_store.test.ts
+++ b/x-pack/plugins/task_manager/server/buffered_task_store.test.ts
@@ -13,19 +13,17 @@ import { TaskStatus } from './task';
describe('Buffered Task Store', () => {
test('proxies the TaskStore for `maxAttempts` and `remove`', async () => {
- const taskStore = taskStoreMock.create({ maxAttempts: 10 });
+ const taskStore = taskStoreMock.create();
taskStore.bulkUpdate.mockResolvedValue([]);
const bufferedStore = new BufferedTaskStore(taskStore, {});
- expect(bufferedStore.maxAttempts).toEqual(10);
-
bufferedStore.remove('1');
expect(taskStore.remove).toHaveBeenCalledWith('1');
});
describe('update', () => {
test("proxies the TaskStore's `bulkUpdate`", async () => {
- const taskStore = taskStoreMock.create({ maxAttempts: 10 });
+ const taskStore = taskStoreMock.create();
const bufferedStore = new BufferedTaskStore(taskStore, {});
const task = mockTask();
@@ -37,7 +35,7 @@ describe('Buffered Task Store', () => {
});
test('handles partially successfull bulkUpdates resolving each call appropriately', async () => {
- const taskStore = taskStoreMock.create({ maxAttempts: 10 });
+ const taskStore = taskStoreMock.create();
const bufferedStore = new BufferedTaskStore(taskStore, {});
const tasks = [mockTask(), mockTask(), mockTask()];
@@ -61,7 +59,7 @@ describe('Buffered Task Store', () => {
});
test('handles multiple items with the same id', async () => {
- const taskStore = taskStoreMock.create({ maxAttempts: 10 });
+ const taskStore = taskStoreMock.create();
const bufferedStore = new BufferedTaskStore(taskStore, {});
const duplicateIdTask = mockTask();
diff --git a/x-pack/plugins/task_manager/server/buffered_task_store.ts b/x-pack/plugins/task_manager/server/buffered_task_store.ts
index 4e4a533303867..ca735dd6f3638 100644
--- a/x-pack/plugins/task_manager/server/buffered_task_store.ts
+++ b/x-pack/plugins/task_manager/server/buffered_task_store.ts
@@ -26,10 +26,6 @@ export class BufferedTaskStore implements Updatable {
);
}
- public get maxAttempts(): number {
- return this.taskStore.maxAttempts;
- }
-
public async update(doc: ConcreteTaskInstance): Promise {
return unwrapPromise(this.bufferedUpdate(doc));
}
diff --git a/x-pack/plugins/task_manager/server/lib/fill_pool.test.ts b/x-pack/plugins/task_manager/server/lib/fill_pool.test.ts
index 79a0d2f690042..8e0396a453b3d 100644
--- a/x-pack/plugins/task_manager/server/lib/fill_pool.test.ts
+++ b/x-pack/plugins/task_manager/server/lib/fill_pool.test.ts
@@ -10,27 +10,32 @@ import sinon from 'sinon';
import { fillPool, FillPoolResult } from './fill_pool';
import { TaskPoolRunResult } from '../task_pool';
import { asOk, Result } from './result_type';
-import { ClaimOwnershipResult } from '../task_store';
import { ConcreteTaskInstance, TaskStatus } from '../task';
import { TaskManagerRunner } from '../task_running/task_runner';
+import { from, Observable } from 'rxjs';
+import { ClaimOwnershipResult } from '../queries/task_claiming';
jest.mock('../task_running/task_runner');
describe('fillPool', () => {
function mockFetchAvailableTasks(
tasksToMock: number[][]
- ): () => Promise> {
- const tasks: ConcreteTaskInstance[][] = tasksToMock.map((ids) => mockTaskInstances(ids));
- let index = 0;
- return async () =>
- asOk({
- stats: {
- tasksUpdated: tasks[index + 1]?.length ?? 0,
- tasksConflicted: 0,
- tasksClaimed: 0,
- },
- docs: tasks[index++] || [],
- });
+ ): () => Observable> {
+ const claimCycles: ConcreteTaskInstance[][] = tasksToMock.map((ids) => mockTaskInstances(ids));
+ return () =>
+ from(
+ claimCycles.map((tasks) =>
+ asOk({
+ stats: {
+ tasksUpdated: tasks?.length ?? 0,
+ tasksConflicted: 0,
+ tasksClaimed: 0,
+ tasksRejected: 0,
+ },
+ docs: tasks,
+ })
+ )
+ );
}
const mockTaskInstances = (ids: number[]): ConcreteTaskInstance[] =>
@@ -51,7 +56,7 @@ describe('fillPool', () => {
ownerId: null,
}));
- test('stops filling when pool runs all claimed tasks, even if there is more capacity', async () => {
+ test('fills task pool with all claimed tasks until fetchAvailableTasks stream closes', async () => {
const tasks = [
[1, 2, 3],
[4, 5],
@@ -62,21 +67,7 @@ describe('fillPool', () => {
await fillPool(fetchAvailableTasks, converter, run);
- expect(_.flattenDeep(run.args)).toEqual(mockTaskInstances([1, 2, 3]));
- });
-
- test('stops filling when the pool has no more capacity', async () => {
- const tasks = [
- [1, 2, 3],
- [4, 5],
- ];
- const fetchAvailableTasks = mockFetchAvailableTasks(tasks);
- const run = sinon.spy(async () => TaskPoolRunResult.RanOutOfCapacity);
- const converter = _.identity;
-
- await fillPool(fetchAvailableTasks, converter, run);
-
- expect(_.flattenDeep(run.args)).toEqual(mockTaskInstances([1, 2, 3]));
+ expect(_.flattenDeep(run.args)).toEqual(mockTaskInstances([1, 2, 3, 4, 5]));
});
test('calls the converter on the records prior to running', async () => {
@@ -91,7 +82,7 @@ describe('fillPool', () => {
await fillPool(fetchAvailableTasks, converter, run);
- expect(_.flattenDeep(run.args)).toEqual(['1', '2', '3']);
+ expect(_.flattenDeep(run.args)).toEqual(['1', '2', '3', '4', '5']);
});
describe('error handling', () => {
@@ -101,7 +92,10 @@ describe('fillPool', () => {
(instance.id as unknown) as TaskManagerRunner;
try {
- const fetchAvailableTasks = async () => Promise.reject('fetch is not working');
+ const fetchAvailableTasks = () =>
+ new Observable>((obs) =>
+ obs.error('fetch is not working')
+ );
await fillPool(fetchAvailableTasks, converter, run);
} catch (err) {
diff --git a/x-pack/plugins/task_manager/server/lib/fill_pool.ts b/x-pack/plugins/task_manager/server/lib/fill_pool.ts
index 45a33081bde51..c9050ebb75d69 100644
--- a/x-pack/plugins/task_manager/server/lib/fill_pool.ts
+++ b/x-pack/plugins/task_manager/server/lib/fill_pool.ts
@@ -6,12 +6,14 @@
*/
import { performance } from 'perf_hooks';
+import { Observable } from 'rxjs';
+import { concatMap, last } from 'rxjs/operators';
+import { ClaimOwnershipResult } from '../queries/task_claiming';
import { ConcreteTaskInstance } from '../task';
import { WithTaskTiming, startTaskTimer } from '../task_events';
import { TaskPoolRunResult } from '../task_pool';
import { TaskManagerRunner } from '../task_running';
-import { ClaimOwnershipResult } from '../task_store';
-import { Result, map } from './result_type';
+import { Result, map as mapResult, asErr, asOk } from './result_type';
export enum FillPoolResult {
Failed = 'Failed',
@@ -22,6 +24,17 @@ export enum FillPoolResult {
PoolFilled = 'PoolFilled',
}
+type FillPoolAndRunResult = Result<
+ {
+ result: TaskPoolRunResult;
+ stats?: ClaimOwnershipResult['stats'];
+ },
+ {
+ result: FillPoolResult;
+ stats?: ClaimOwnershipResult['stats'];
+ }
+>;
+
export type ClaimAndFillPoolResult = Partial> & {
result: FillPoolResult;
};
@@ -40,52 +53,81 @@ export type TimedFillPoolResult = WithTaskTiming;
* @param converter - a function that converts task records to the appropriate task runner
*/
export async function fillPool(
- fetchAvailableTasks: () => Promise>,
+ fetchAvailableTasks: () => Observable>,
converter: (taskInstance: ConcreteTaskInstance) => TaskManagerRunner,
run: (tasks: TaskManagerRunner[]) => Promise
): Promise {
performance.mark('fillPool.start');
- const stopTaskTimer = startTaskTimer();
- const augmentTimingTo = (
- result: FillPoolResult,
- stats?: ClaimOwnershipResult['stats']
- ): TimedFillPoolResult => ({
- result,
- stats,
- timing: stopTaskTimer(),
- });
- return map>(
- await fetchAvailableTasks(),
- async ({ docs, stats }) => {
- if (!docs.length) {
- performance.mark('fillPool.bailNoTasks');
- performance.measure(
- 'fillPool.activityDurationUntilNoTasks',
- 'fillPool.start',
- 'fillPool.bailNoTasks'
- );
- return augmentTimingTo(FillPoolResult.NoTasksClaimed, stats);
- }
-
- const tasks = docs.map(converter);
-
- switch (await run(tasks)) {
- case TaskPoolRunResult.RanOutOfCapacity:
- performance.mark('fillPool.bailExhaustedCapacity');
- performance.measure(
- 'fillPool.activityDurationUntilExhaustedCapacity',
- 'fillPool.start',
- 'fillPool.bailExhaustedCapacity'
+ return new Promise((resolve, reject) => {
+ const stopTaskTimer = startTaskTimer();
+ const augmentTimingTo = (
+ result: FillPoolResult,
+ stats?: ClaimOwnershipResult['stats']
+ ): TimedFillPoolResult => ({
+ result,
+ stats,
+ timing: stopTaskTimer(),
+ });
+ fetchAvailableTasks()
+ .pipe(
+ // each ClaimOwnershipResult will be sequencially consumed an ran using the `run` handler
+ concatMap(async (res) =>
+ mapResult>(
+ res,
+ async ({ docs, stats }) => {
+ if (!docs.length) {
+ performance.mark('fillPool.bailNoTasks');
+ performance.measure(
+ 'fillPool.activityDurationUntilNoTasks',
+ 'fillPool.start',
+ 'fillPool.bailNoTasks'
+ );
+ return asOk({ result: TaskPoolRunResult.NoTaskWereRan, stats });
+ }
+ return asOk(
+ await run(docs.map(converter)).then((runResult) => ({
+ result: runResult,
+ stats,
+ }))
+ );
+ },
+ async (fillPoolResult) => asErr({ result: fillPoolResult })
+ )
+ ),
+ // when the final call to `run` completes, we'll complete the stream and emit the
+ // final accumulated result
+ last()
+ )
+ .subscribe(
+ (claimResults) => {
+ resolve(
+ mapResult(
+ claimResults,
+ ({ result, stats }) => {
+ switch (result) {
+ case TaskPoolRunResult.RanOutOfCapacity:
+ performance.mark('fillPool.bailExhaustedCapacity');
+ performance.measure(
+ 'fillPool.activityDurationUntilExhaustedCapacity',
+ 'fillPool.start',
+ 'fillPool.bailExhaustedCapacity'
+ );
+ return augmentTimingTo(FillPoolResult.RanOutOfCapacity, stats);
+ case TaskPoolRunResult.RunningAtCapacity:
+ performance.mark('fillPool.cycle');
+ return augmentTimingTo(FillPoolResult.RunningAtCapacity, stats);
+ case TaskPoolRunResult.NoTaskWereRan:
+ return augmentTimingTo(FillPoolResult.NoTasksClaimed, stats);
+ default:
+ performance.mark('fillPool.cycle');
+ return augmentTimingTo(FillPoolResult.PoolFilled, stats);
+ }
+ },
+ ({ result, stats }) => augmentTimingTo(result, stats)
+ )
);
- return augmentTimingTo(FillPoolResult.RanOutOfCapacity, stats);
- case TaskPoolRunResult.RunningAtCapacity:
- performance.mark('fillPool.cycle');
- return augmentTimingTo(FillPoolResult.RunningAtCapacity, stats);
- default:
- performance.mark('fillPool.cycle');
- return augmentTimingTo(FillPoolResult.PoolFilled, stats);
- }
- },
- async (result) => augmentTimingTo(result)
- );
+ },
+ (err) => reject(err)
+ );
+ });
}
diff --git a/x-pack/plugins/task_manager/server/monitoring/task_run_statistics.test.ts b/x-pack/plugins/task_manager/server/monitoring/task_run_statistics.test.ts
index 5c32c3e7225c4..7040d5acd4eaf 100644
--- a/x-pack/plugins/task_manager/server/monitoring/task_run_statistics.test.ts
+++ b/x-pack/plugins/task_manager/server/monitoring/task_run_statistics.test.ts
@@ -537,6 +537,7 @@ describe('Task Run Statistics', () => {
asTaskPollingCycleEvent(asOk({ result: FillPoolResult.NoTasksClaimed, timing }))
);
events$.next(asTaskManagerStatEvent('pollingDelay', asOk(0)));
+ events$.next(asTaskManagerStatEvent('claimDuration', asOk(10)));
events$.next(
asTaskPollingCycleEvent(asOk({ result: FillPoolResult.NoTasksClaimed, timing }))
);
diff --git a/x-pack/plugins/task_manager/server/monitoring/task_run_statistics.ts b/x-pack/plugins/task_manager/server/monitoring/task_run_statistics.ts
index 4b7bdf595f1f5..3185d3c449c32 100644
--- a/x-pack/plugins/task_manager/server/monitoring/task_run_statistics.ts
+++ b/x-pack/plugins/task_manager/server/monitoring/task_run_statistics.ts
@@ -19,6 +19,7 @@ import {
RanTask,
TaskTiming,
isTaskManagerStatEvent,
+ TaskManagerStat,
} from '../task_events';
import { isOk, Ok, unwrap } from '../lib/result_type';
import { ConcreteTaskInstance } from '../task';
@@ -39,6 +40,7 @@ interface FillPoolStat extends JsonObject {
last_successful_poll: string;
last_polling_delay: string;
duration: number[];
+ claim_duration: number[];
claim_conflicts: number[];
claim_mismatches: number[];
result_frequency_percent_as_number: FillPoolResult[];
@@ -51,6 +53,7 @@ interface ExecutionStat extends JsonObject {
export interface TaskRunStat extends JsonObject {
drift: number[];
+ drift_by_type: Record;
load: number[];
execution: ExecutionStat;
polling: Omit &
@@ -125,6 +128,7 @@ export function createTaskRunAggregator(
const resultFrequencyQueue = createRunningAveragedStat(runningAverageWindowSize);
const pollingDurationQueue = createRunningAveragedStat(runningAverageWindowSize);
+ const claimDurationQueue = createRunningAveragedStat(runningAverageWindowSize);
const claimConflictsQueue = createRunningAveragedStat(runningAverageWindowSize);
const claimMismatchesQueue = createRunningAveragedStat(runningAverageWindowSize);
const taskPollingEvents$: Observable> = combineLatest([
@@ -168,10 +172,26 @@ export function createTaskRunAggregator(
),
map(() => new Date().toISOString())
),
+ // get duration of task claim stage in polling
+ taskPollingLifecycle.events.pipe(
+ filter(
+ (taskEvent: TaskLifecycleEvent) =>
+ isTaskManagerStatEvent(taskEvent) &&
+ taskEvent.id === 'claimDuration' &&
+ isOk(taskEvent.event)
+ ),
+ map((claimDurationEvent) => {
+ const duration = ((claimDurationEvent as TaskManagerStat).event as Ok).value;
+ return {
+ claimDuration: duration ? claimDurationQueue(duration) : claimDurationQueue(),
+ };
+ })
+ ),
]).pipe(
- map(([{ polling }, pollingDelay]) => ({
+ map(([{ polling }, pollingDelay, { claimDuration }]) => ({
polling: {
last_polling_delay: pollingDelay,
+ claim_duration: claimDuration,
...polling,
},
}))
@@ -179,13 +199,18 @@ export function createTaskRunAggregator(
return combineLatest([
taskRunEvents$.pipe(
- startWith({ drift: [], execution: { duration: {}, result_frequency_percent_as_number: {} } })
+ startWith({
+ drift: [],
+ drift_by_type: {},
+ execution: { duration: {}, result_frequency_percent_as_number: {} },
+ })
),
taskManagerLoadStatEvents$.pipe(startWith({ load: [] })),
taskPollingEvents$.pipe(
startWith({
polling: {
duration: [],
+ claim_duration: [],
claim_conflicts: [],
claim_mismatches: [],
result_frequency_percent_as_number: [],
@@ -218,6 +243,7 @@ function hasTiming(taskEvent: TaskLifecycleEvent) {
function createTaskRunEventToStat(runningAverageWindowSize: number) {
const driftQueue = createRunningAveragedStat(runningAverageWindowSize);
+ const driftByTaskQueue = createMapOfRunningAveragedStats(runningAverageWindowSize);
const taskRunDurationQueue = createMapOfRunningAveragedStats(runningAverageWindowSize);
const resultFrequencyQueue = createMapOfRunningAveragedStats(
runningAverageWindowSize
@@ -226,13 +252,17 @@ function createTaskRunEventToStat(runningAverageWindowSize: number) {
task: ConcreteTaskInstance,
timing: TaskTiming,
result: TaskRunResult
- ): Omit => ({
- drift: driftQueue(timing!.start - task.runAt.getTime()),
- execution: {
- duration: taskRunDurationQueue(task.taskType, timing!.stop - timing!.start),
- result_frequency_percent_as_number: resultFrequencyQueue(task.taskType, result),
- },
- });
+ ): Omit => {
+ const drift = timing!.start - task.runAt.getTime();
+ return {
+ drift: driftQueue(drift),
+ drift_by_type: driftByTaskQueue(task.taskType, drift),
+ execution: {
+ duration: taskRunDurationQueue(task.taskType, timing!.stop - timing!.start),
+ result_frequency_percent_as_number: resultFrequencyQueue(task.taskType, result),
+ },
+ };
+ };
}
const DEFAULT_TASK_RUN_FREQUENCIES = {
@@ -258,11 +288,15 @@ export function summarizeTaskRunStat(
// eslint-disable-next-line @typescript-eslint/naming-convention
last_polling_delay,
duration: pollingDuration,
+ // eslint-disable-next-line @typescript-eslint/naming-convention
+ claim_duration,
result_frequency_percent_as_number: pollingResultFrequency,
claim_conflicts: claimConflicts,
claim_mismatches: claimMismatches,
},
drift,
+ // eslint-disable-next-line @typescript-eslint/naming-convention
+ drift_by_type,
load,
execution: { duration, result_frequency_percent_as_number: executionResultFrequency },
}: TaskRunStat,
@@ -273,6 +307,9 @@ export function summarizeTaskRunStat(
polling: {
...(last_successful_poll ? { last_successful_poll } : {}),
...(last_polling_delay ? { last_polling_delay } : {}),
+ ...(claim_duration
+ ? { claim_duration: calculateRunningAverage(claim_duration as number[]) }
+ : {}),
duration: calculateRunningAverage(pollingDuration as number[]),
claim_conflicts: calculateRunningAverage(claimConflicts as number[]),
claim_mismatches: calculateRunningAverage(claimMismatches as number[]),
@@ -282,6 +319,7 @@ export function summarizeTaskRunStat(
},
},
drift: calculateRunningAverage(drift),
+ drift_by_type: mapValues(drift_by_type, (typedDrift) => calculateRunningAverage(typedDrift)),
load: calculateRunningAverage(load),
execution: {
duration: mapValues(duration, (typedDurations) => calculateRunningAverage(typedDurations)),
diff --git a/x-pack/plugins/task_manager/server/plugin.test.ts b/x-pack/plugins/task_manager/server/plugin.test.ts
index 0a879ce92cba6..45db18a3e8385 100644
--- a/x-pack/plugins/task_manager/server/plugin.test.ts
+++ b/x-pack/plugins/task_manager/server/plugin.test.ts
@@ -70,6 +70,15 @@ describe('TaskManagerPlugin', () => {
const setupApi = await taskManagerPlugin.setup(coreMock.createSetup());
+ // we only start a poller if we have task types that we support and we track
+ // phases (moving from Setup to Start) based on whether the poller is working
+ setupApi.registerTaskDefinitions({
+ setupTimeType: {
+ title: 'setupTimeType',
+ createTaskRunner: () => ({ async run() {} }),
+ },
+ });
+
await taskManagerPlugin.start(coreMock.createStart());
expect(() =>
diff --git a/x-pack/plugins/task_manager/server/plugin.ts b/x-pack/plugins/task_manager/server/plugin.ts
index 149d111b08f02..507a021214a90 100644
--- a/x-pack/plugins/task_manager/server/plugin.ts
+++ b/x-pack/plugins/task_manager/server/plugin.ts
@@ -16,13 +16,12 @@ import {
ServiceStatusLevels,
CoreStatus,
} from '../../../../src/core/server';
-import { TaskDefinition } from './task';
import { TaskPollingLifecycle } from './polling_lifecycle';
import { TaskManagerConfig } from './config';
import { createInitialMiddleware, addMiddlewareToChain, Middleware } from './lib/middleware';
import { removeIfExists } from './lib/remove_if_exists';
import { setupSavedObjects } from './saved_objects';
-import { TaskTypeDictionary } from './task_type_dictionary';
+import { TaskDefinitionRegistry, TaskTypeDictionary } from './task_type_dictionary';
import { FetchResult, SearchOpts, TaskStore } from './task_store';
import { createManagedConfiguration } from './lib/create_managed_configuration';
import { TaskScheduling } from './task_scheduling';
@@ -100,7 +99,7 @@ export class TaskManagerPlugin
this.assertStillInSetup('add Middleware');
this.middleware = addMiddlewareToChain(this.middleware, middleware);
},
- registerTaskDefinitions: (taskDefinition: Record) => {
+ registerTaskDefinitions: (taskDefinition: TaskDefinitionRegistry) => {
this.assertStillInSetup('register task definitions');
this.definitions.registerTaskDefinitions(taskDefinition);
},
@@ -110,12 +109,12 @@ export class TaskManagerPlugin
public start({ savedObjects, elasticsearch }: CoreStart): TaskManagerStartContract {
const savedObjectsRepository = savedObjects.createInternalRepository(['task']);
+ const serializer = savedObjects.createSerializer();
const taskStore = new TaskStore({
- serializer: savedObjects.createSerializer(),
+ serializer,
savedObjectsRepository,
esClient: elasticsearch.createClient('taskManager').asInternalUser,
index: this.config!.index,
- maxAttempts: this.config!.max_attempts,
definitions: this.definitions,
taskManagerId: `kibana:${this.taskManagerId!}`,
});
@@ -151,6 +150,7 @@ export class TaskManagerPlugin
taskStore,
middleware: this.middleware,
taskPollingLifecycle: this.taskPollingLifecycle,
+ definitions: this.definitions,
});
return {
diff --git a/x-pack/plugins/task_manager/server/polling/delay_on_claim_conflicts.test.ts b/x-pack/plugins/task_manager/server/polling/delay_on_claim_conflicts.test.ts
index d4617d6549d60..f3af6f50336ea 100644
--- a/x-pack/plugins/task_manager/server/polling/delay_on_claim_conflicts.test.ts
+++ b/x-pack/plugins/task_manager/server/polling/delay_on_claim_conflicts.test.ts
@@ -64,6 +64,7 @@ describe('delayOnClaimConflicts', () => {
tasksUpdated: 0,
tasksConflicted: 8,
tasksClaimed: 0,
+ tasksRejected: 0,
},
docs: [],
})
@@ -79,6 +80,63 @@ describe('delayOnClaimConflicts', () => {
})
);
+ test(
+ 'emits delay only once, no mater how many subscribers there are',
+ fakeSchedulers(async () => {
+ const taskLifecycleEvents$ = new Subject();
+
+ const delays$ = delayOnClaimConflicts(of(10), of(100), taskLifecycleEvents$, 80, 2);
+
+ const firstSubscriber$ = delays$.pipe(take(2), bufferCount(2)).toPromise();
+ const secondSubscriber$ = delays$.pipe(take(2), bufferCount(2)).toPromise();
+
+ taskLifecycleEvents$.next(
+ asTaskPollingCycleEvent(
+ asOk({
+ result: FillPoolResult.PoolFilled,
+ stats: {
+ tasksUpdated: 0,
+ tasksConflicted: 8,
+ tasksClaimed: 0,
+ tasksRejected: 0,
+ },
+ docs: [],
+ })
+ )
+ );
+
+ const thirdSubscriber$ = delays$.pipe(take(2), bufferCount(2)).toPromise();
+
+ taskLifecycleEvents$.next(
+ asTaskPollingCycleEvent(
+ asOk({
+ result: FillPoolResult.PoolFilled,
+ stats: {
+ tasksUpdated: 0,
+ tasksConflicted: 10,
+ tasksClaimed: 0,
+ tasksRejected: 0,
+ },
+ docs: [],
+ })
+ )
+ );
+
+ // should get the initial value of 0 delay
+ const [initialDelay, firstRandom] = await firstSubscriber$;
+ // should get the 0 delay (as a replay), which was the last value plus the first random value
+ const [initialDelayInSecondSub, firstRandomInSecondSub] = await secondSubscriber$;
+ // should get the first random value (as a replay) and the next random value
+ const [firstRandomInThirdSub, secondRandomInThirdSub] = await thirdSubscriber$;
+
+ expect(initialDelay).toEqual(0);
+ expect(initialDelayInSecondSub).toEqual(0);
+ expect(firstRandom).toEqual(firstRandomInSecondSub);
+ expect(firstRandomInSecondSub).toEqual(firstRandomInThirdSub);
+ expect(secondRandomInThirdSub).toBeGreaterThanOrEqual(0);
+ })
+ );
+
test(
'doesnt emit a new delay when conflicts have reduced',
fakeSchedulers(async () => {
@@ -107,6 +165,7 @@ describe('delayOnClaimConflicts', () => {
tasksUpdated: 0,
tasksConflicted: 8,
tasksClaimed: 0,
+ tasksRejected: 0,
},
docs: [],
})
@@ -127,6 +186,7 @@ describe('delayOnClaimConflicts', () => {
tasksUpdated: 0,
tasksConflicted: 7,
tasksClaimed: 0,
+ tasksRejected: 0,
},
docs: [],
})
@@ -145,6 +205,7 @@ describe('delayOnClaimConflicts', () => {
tasksUpdated: 0,
tasksConflicted: 9,
tasksClaimed: 0,
+ tasksRejected: 0,
},
docs: [],
})
diff --git a/x-pack/plugins/task_manager/server/polling/delay_on_claim_conflicts.ts b/x-pack/plugins/task_manager/server/polling/delay_on_claim_conflicts.ts
index 73e7052b65a69..6d7cb77625b58 100644
--- a/x-pack/plugins/task_manager/server/polling/delay_on_claim_conflicts.ts
+++ b/x-pack/plugins/task_manager/server/polling/delay_on_claim_conflicts.ts
@@ -11,7 +11,7 @@
import stats from 'stats-lite';
import { isNumber, random } from 'lodash';
-import { merge, of, Observable, combineLatest } from 'rxjs';
+import { merge, of, Observable, combineLatest, ReplaySubject } from 'rxjs';
import { filter, map } from 'rxjs/operators';
import { Option, none, some, isSome, Some } from 'fp-ts/lib/Option';
import { isOk } from '../lib/result_type';
@@ -32,7 +32,9 @@ export function delayOnClaimConflicts(
runningAverageWindowSize: number
): Observable {
const claimConflictQueue = createRunningAveragedStat(runningAverageWindowSize);
- return merge(
+ // return a subject to allow multicast and replay the last value to new subscribers
+ const multiCastDelays$ = new ReplaySubject(1);
+ merge(
of(0),
combineLatest([
maxWorkersConfiguration$,
@@ -70,5 +72,9 @@ export function delayOnClaimConflicts(
return random(pollInterval * 0.25, pollInterval * 0.75, false);
})
)
- );
+ ).subscribe((delay) => {
+ multiCastDelays$.next(delay);
+ });
+
+ return multiCastDelays$;
}
diff --git a/x-pack/plugins/task_manager/server/polling_lifecycle.test.ts b/x-pack/plugins/task_manager/server/polling_lifecycle.test.ts
index 9f79445070237..63d7f6de81801 100644
--- a/x-pack/plugins/task_manager/server/polling_lifecycle.test.ts
+++ b/x-pack/plugins/task_manager/server/polling_lifecycle.test.ts
@@ -7,17 +7,30 @@
import _ from 'lodash';
import sinon from 'sinon';
-import { of, Subject } from 'rxjs';
+import { Observable, of, Subject } from 'rxjs';
import { TaskPollingLifecycle, claimAvailableTasks } from './polling_lifecycle';
import { createInitialMiddleware } from './lib/middleware';
import { TaskTypeDictionary } from './task_type_dictionary';
import { taskStoreMock } from './task_store.mock';
import { mockLogger } from './test_utils';
+import { taskClaimingMock } from './queries/task_claiming.mock';
+import { TaskClaiming, ClaimOwnershipResult } from './queries/task_claiming';
+import type { TaskClaiming as TaskClaimingClass } from './queries/task_claiming';
+import { asOk, Err, isErr, isOk, Result } from './lib/result_type';
+import { FillPoolResult } from './lib/fill_pool';
+
+let mockTaskClaiming = taskClaimingMock.create({});
+jest.mock('./queries/task_claiming', () => {
+ return {
+ TaskClaiming: jest.fn().mockImplementation(() => {
+ return mockTaskClaiming;
+ }),
+ };
+});
describe('TaskPollingLifecycle', () => {
let clock: sinon.SinonFakeTimers;
-
const taskManagerLogger = mockLogger();
const mockTaskStore = taskStoreMock.create({});
const taskManagerOpts = {
@@ -50,8 +63,9 @@ describe('TaskPollingLifecycle', () => {
};
beforeEach(() => {
+ mockTaskClaiming = taskClaimingMock.create({});
+ (TaskClaiming as jest.Mock).mockClear();
clock = sinon.useFakeTimers();
- taskManagerOpts.definitions = new TaskTypeDictionary(taskManagerLogger);
});
afterEach(() => clock.restore());
@@ -60,17 +74,58 @@ describe('TaskPollingLifecycle', () => {
test('begins polling once the ES and SavedObjects services are available', () => {
const elasticsearchAndSOAvailability$ = new Subject();
new TaskPollingLifecycle({
- elasticsearchAndSOAvailability$,
...taskManagerOpts,
+ elasticsearchAndSOAvailability$,
});
clock.tick(150);
- expect(mockTaskStore.claimAvailableTasks).not.toHaveBeenCalled();
+ expect(mockTaskClaiming.claimAvailableTasksIfCapacityIsAvailable).not.toHaveBeenCalled();
elasticsearchAndSOAvailability$.next(true);
clock.tick(150);
- expect(mockTaskStore.claimAvailableTasks).toHaveBeenCalled();
+ expect(mockTaskClaiming.claimAvailableTasksIfCapacityIsAvailable).toHaveBeenCalled();
+ });
+
+ test('provides TaskClaiming with the capacity available', () => {
+ const elasticsearchAndSOAvailability$ = new Subject();
+ const maxWorkers$ = new Subject();
+ taskManagerOpts.definitions.registerTaskDefinitions({
+ report: {
+ title: 'report',
+ maxConcurrency: 1,
+ createTaskRunner: jest.fn(),
+ },
+ quickReport: {
+ title: 'quickReport',
+ maxConcurrency: 5,
+ createTaskRunner: jest.fn(),
+ },
+ });
+
+ new TaskPollingLifecycle({
+ ...taskManagerOpts,
+ elasticsearchAndSOAvailability$,
+ maxWorkersConfiguration$: maxWorkers$,
+ });
+
+ const taskClaimingGetCapacity = (TaskClaiming as jest.Mock).mock
+ .calls[0][0].getCapacity;
+
+ maxWorkers$.next(20);
+ expect(taskClaimingGetCapacity()).toEqual(20);
+ expect(taskClaimingGetCapacity('report')).toEqual(1);
+ expect(taskClaimingGetCapacity('quickReport')).toEqual(5);
+
+ maxWorkers$.next(30);
+ expect(taskClaimingGetCapacity()).toEqual(30);
+ expect(taskClaimingGetCapacity('report')).toEqual(1);
+ expect(taskClaimingGetCapacity('quickReport')).toEqual(5);
+
+ maxWorkers$.next(2);
+ expect(taskClaimingGetCapacity()).toEqual(2);
+ expect(taskClaimingGetCapacity('report')).toEqual(1);
+ expect(taskClaimingGetCapacity('quickReport')).toEqual(2);
});
});
@@ -85,13 +140,13 @@ describe('TaskPollingLifecycle', () => {
elasticsearchAndSOAvailability$.next(true);
clock.tick(150);
- expect(mockTaskStore.claimAvailableTasks).toHaveBeenCalled();
+ expect(mockTaskClaiming.claimAvailableTasksIfCapacityIsAvailable).toHaveBeenCalled();
elasticsearchAndSOAvailability$.next(false);
- mockTaskStore.claimAvailableTasks.mockClear();
+ mockTaskClaiming.claimAvailableTasksIfCapacityIsAvailable.mockClear();
clock.tick(150);
- expect(mockTaskStore.claimAvailableTasks).not.toHaveBeenCalled();
+ expect(mockTaskClaiming.claimAvailableTasksIfCapacityIsAvailable).not.toHaveBeenCalled();
});
test('restarts polling once the ES and SavedObjects services become available again', () => {
@@ -104,68 +159,64 @@ describe('TaskPollingLifecycle', () => {
elasticsearchAndSOAvailability$.next(true);
clock.tick(150);
- expect(mockTaskStore.claimAvailableTasks).toHaveBeenCalled();
+ expect(mockTaskClaiming.claimAvailableTasksIfCapacityIsAvailable).toHaveBeenCalled();
elasticsearchAndSOAvailability$.next(false);
- mockTaskStore.claimAvailableTasks.mockClear();
+ mockTaskClaiming.claimAvailableTasksIfCapacityIsAvailable.mockClear();
clock.tick(150);
- expect(mockTaskStore.claimAvailableTasks).not.toHaveBeenCalled();
+ expect(mockTaskClaiming.claimAvailableTasksIfCapacityIsAvailable).not.toHaveBeenCalled();
elasticsearchAndSOAvailability$.next(true);
clock.tick(150);
- expect(mockTaskStore.claimAvailableTasks).toHaveBeenCalled();
+ expect(mockTaskClaiming.claimAvailableTasksIfCapacityIsAvailable).toHaveBeenCalled();
});
});
describe('claimAvailableTasks', () => {
- test('should claim Available Tasks when there are available workers', () => {
- const logger = mockLogger();
- const claim = jest.fn(() =>
- Promise.resolve({
- docs: [],
- stats: { tasksUpdated: 0, tasksConflicted: 0, tasksClaimed: 0 },
- })
- );
-
- const availableWorkers = 1;
-
- claimAvailableTasks([], claim, availableWorkers, logger);
-
- expect(claim).toHaveBeenCalledTimes(1);
- });
-
- test('should not claim Available Tasks when there are no available workers', () => {
+ test('should claim Available Tasks when there are available workers', async () => {
const logger = mockLogger();
- const claim = jest.fn(() =>
- Promise.resolve({
- docs: [],
- stats: { tasksUpdated: 0, tasksConflicted: 0, tasksClaimed: 0 },
- })
+ const taskClaiming = taskClaimingMock.create({});
+ taskClaiming.claimAvailableTasksIfCapacityIsAvailable.mockImplementation(() =>
+ of(
+ asOk({
+ docs: [],
+ stats: { tasksUpdated: 0, tasksConflicted: 0, tasksClaimed: 0, tasksRejected: 0 },
+ })
+ )
);
- const availableWorkers = 0;
+ expect(
+ isOk(await getFirstAsPromise(claimAvailableTasks([], taskClaiming, logger)))
+ ).toBeTruthy();
- claimAvailableTasks([], claim, availableWorkers, logger);
-
- expect(claim).not.toHaveBeenCalled();
+ expect(taskClaiming.claimAvailableTasksIfCapacityIsAvailable).toHaveBeenCalledTimes(1);
});
/**
* This handles the case in which Elasticsearch has had inline script disabled.
* This is achieved by setting the `script.allowed_types` flag on Elasticsearch to `none`
*/
- test('handles failure due to inline scripts being disabled', () => {
+ test('handles failure due to inline scripts being disabled', async () => {
const logger = mockLogger();
- const claim = jest.fn(() => {
- throw Object.assign(new Error(), {
- response:
- '{"error":{"root_cause":[{"type":"illegal_argument_exception","reason":"cannot execute [inline] scripts"}],"type":"search_phase_execution_exception","reason":"all shards failed","phase":"query","grouped":true,"failed_shards":[{"shard":0,"index":".kibana_task_manager_1","node":"24A4QbjHSK6prvtopAKLKw","reason":{"type":"illegal_argument_exception","reason":"cannot execute [inline] scripts"}}],"caused_by":{"type":"illegal_argument_exception","reason":"cannot execute [inline] scripts","caused_by":{"type":"illegal_argument_exception","reason":"cannot execute [inline] scripts"}}},"status":400}',
- });
- });
+ const taskClaiming = taskClaimingMock.create({});
+ taskClaiming.claimAvailableTasksIfCapacityIsAvailable.mockImplementation(
+ () =>
+ new Observable>((observer) => {
+ observer.error(
+ Object.assign(new Error(), {
+ response:
+ '{"error":{"root_cause":[{"type":"illegal_argument_exception","reason":"cannot execute [inline] scripts"}],"type":"search_phase_execution_exception","reason":"all shards failed","phase":"query","grouped":true,"failed_shards":[{"shard":0,"index":".kibana_task_manager_1","node":"24A4QbjHSK6prvtopAKLKw","reason":{"type":"illegal_argument_exception","reason":"cannot execute [inline] scripts"}}],"caused_by":{"type":"illegal_argument_exception","reason":"cannot execute [inline] scripts","caused_by":{"type":"illegal_argument_exception","reason":"cannot execute [inline] scripts"}}},"status":400}',
+ })
+ );
+ })
+ );
+
+ const err = await getFirstAsPromise(claimAvailableTasks([], taskClaiming, logger));
- claimAvailableTasks([], claim, 10, logger);
+ expect(isErr(err)).toBeTruthy();
+ expect((err as Err).error).toEqual(FillPoolResult.Failed);
expect(logger.warn).toHaveBeenCalledTimes(1);
expect(logger.warn).toHaveBeenCalledWith(
@@ -174,3 +225,9 @@ describe('TaskPollingLifecycle', () => {
});
});
});
+
+function getFirstAsPromise(obs$: Observable): Promise {
+ return new Promise((resolve, reject) => {
+ obs$.subscribe(resolve, reject);
+ });
+}
diff --git a/x-pack/plugins/task_manager/server/polling_lifecycle.ts b/x-pack/plugins/task_manager/server/polling_lifecycle.ts
index db8eeaaf78dee..260f5ccc70f53 100644
--- a/x-pack/plugins/task_manager/server/polling_lifecycle.ts
+++ b/x-pack/plugins/task_manager/server/polling_lifecycle.ts
@@ -6,15 +6,12 @@
*/
import { Subject, Observable, Subscription } from 'rxjs';
-
-import { performance } from 'perf_hooks';
-
import { pipe } from 'fp-ts/lib/pipeable';
import { Option, some, map as mapOptional } from 'fp-ts/lib/Option';
import { tap } from 'rxjs/operators';
import { Logger } from '../../../../src/core/server';
-import { Result, asErr, mapErr, asOk, map } from './lib/result_type';
+import { Result, asErr, mapErr, asOk, map, mapOk } from './lib/result_type';
import { ManagedConfiguration } from './lib/create_managed_configuration';
import { TaskManagerConfig } from './config';
@@ -41,11 +38,12 @@ import {
} from './polling';
import { TaskPool } from './task_pool';
import { TaskManagerRunner, TaskRunner } from './task_running';
-import { TaskStore, OwnershipClaimingOpts, ClaimOwnershipResult } from './task_store';
+import { TaskStore } from './task_store';
import { identifyEsError } from './lib/identify_es_error';
import { BufferedTaskStore } from './buffered_task_store';
import { TaskTypeDictionary } from './task_type_dictionary';
import { delayOnClaimConflicts } from './polling';
+import { TaskClaiming, ClaimOwnershipResult } from './queries/task_claiming';
export type TaskPollingLifecycleOpts = {
logger: Logger;
@@ -71,6 +69,7 @@ export class TaskPollingLifecycle {
private definitions: TaskTypeDictionary;
private store: TaskStore;
+ private taskClaiming: TaskClaiming;
private bufferedStore: BufferedTaskStore;
private logger: Logger;
@@ -106,8 +105,6 @@ export class TaskPollingLifecycle {
this.store = taskStore;
const emitEvent = (event: TaskLifecycleEvent) => this.events$.next(event);
- // pipe store events into the lifecycle event stream
- this.store.events.subscribe(emitEvent);
this.bufferedStore = new BufferedTaskStore(this.store, {
bufferMaxOperations: config.max_workers,
@@ -120,6 +117,26 @@ export class TaskPollingLifecycle {
});
this.pool.load.subscribe(emitEvent);
+ this.taskClaiming = new TaskClaiming({
+ taskStore,
+ maxAttempts: config.max_attempts,
+ definitions,
+ logger: this.logger,
+ getCapacity: (taskType?: string) =>
+ taskType && this.definitions.get(taskType)?.maxConcurrency
+ ? Math.max(
+ Math.min(
+ this.pool.availableWorkers,
+ this.definitions.get(taskType)!.maxConcurrency! -
+ this.pool.getOccupiedWorkersByType(taskType)
+ ),
+ 0
+ )
+ : this.pool.availableWorkers,
+ });
+ // pipe taskClaiming events into the lifecycle event stream
+ this.taskClaiming.events.subscribe(emitEvent);
+
const {
max_poll_inactivity_cycles: maxPollInactivityCycles,
poll_interval: pollInterval,
@@ -199,6 +216,7 @@ export class TaskPollingLifecycle {
beforeRun: this.middleware.beforeRun,
beforeMarkRunning: this.middleware.beforeMarkRunning,
onTaskEvent: this.emitEvent,
+ defaultMaxAttempts: this.taskClaiming.maxAttempts,
});
};
@@ -212,9 +230,18 @@ export class TaskPollingLifecycle {
() =>
claimAvailableTasks(
tasksToClaim.splice(0, this.pool.availableWorkers),
- this.store.claimAvailableTasks,
- this.pool.availableWorkers,
+ this.taskClaiming,
this.logger
+ ).pipe(
+ tap(
+ mapOk(({ timing }: ClaimOwnershipResult) => {
+ if (timing) {
+ this.emitEvent(
+ asTaskManagerStatEvent('claimDuration', asOk(timing.stop - timing.start))
+ );
+ }
+ })
+ )
),
// wrap each task in a Task Runner
this.createTaskRunnerForTask,
@@ -252,59 +279,40 @@ export class TaskPollingLifecycle {
}
}
-export async function claimAvailableTasks(
+export function claimAvailableTasks(
claimTasksById: string[],
- claim: (opts: OwnershipClaimingOpts) => Promise,
- availableWorkers: number,
+ taskClaiming: TaskClaiming,
logger: Logger
-): Promise> {
- if (availableWorkers > 0) {
- performance.mark('claimAvailableTasks_start');
-
- try {
- const claimResult = await claim({
- size: availableWorkers,
+): Observable> {
+ return new Observable((observer) => {
+ taskClaiming
+ .claimAvailableTasksIfCapacityIsAvailable({
claimOwnershipUntil: intervalFromNow('30s')!,
claimTasksById,
- });
- const {
- docs,
- stats: { tasksClaimed },
- } = claimResult;
-
- if (tasksClaimed === 0) {
- performance.mark('claimAvailableTasks.noTasks');
- }
- performance.mark('claimAvailableTasks_stop');
- performance.measure(
- 'claimAvailableTasks',
- 'claimAvailableTasks_start',
- 'claimAvailableTasks_stop'
+ })
+ .subscribe(
+ (claimResult) => {
+ observer.next(claimResult);
+ },
+ (ex) => {
+ // if the `taskClaiming` stream errors out we want to catch it and see if
+ // we can identify the reason
+ // if we can - we emit an FillPoolResult error rather than erroring out the wrapping Observable
+ // returned by `claimAvailableTasks`
+ if (identifyEsError(ex).includes('cannot execute [inline] scripts')) {
+ logger.warn(
+ `Task Manager cannot operate when inline scripts are disabled in Elasticsearch`
+ );
+ observer.next(asErr(FillPoolResult.Failed));
+ observer.complete();
+ } else {
+ // as we could't identify the reason - we'll error out the wrapping Observable too
+ observer.error(ex);
+ }
+ },
+ () => {
+ observer.complete();
+ }
);
-
- if (docs.length !== tasksClaimed) {
- logger.warn(
- `[Task Ownership error]: ${tasksClaimed} tasks were claimed by Kibana, but ${
- docs.length
- } task(s) were fetched (${docs.map((doc) => doc.id).join(', ')})`
- );
- }
- return asOk(claimResult);
- } catch (ex) {
- if (identifyEsError(ex).includes('cannot execute [inline] scripts')) {
- logger.warn(
- `Task Manager cannot operate when inline scripts are disabled in Elasticsearch`
- );
- return asErr(FillPoolResult.Failed);
- } else {
- throw ex;
- }
- }
- } else {
- performance.mark('claimAvailableTasks.noAvailableWorkers');
- logger.debug(
- `[Task Ownership]: Task Manager has skipped Claiming Ownership of available tasks at it has ran out Available Workers.`
- );
- return asErr(FillPoolResult.NoAvailableWorkers);
- }
+ });
}
diff --git a/x-pack/plugins/task_manager/server/queries/mark_available_tasks_as_claimed.test.ts b/x-pack/plugins/task_manager/server/queries/mark_available_tasks_as_claimed.test.ts
index 75b9b2cdfa977..57a4ab320367d 100644
--- a/x-pack/plugins/task_manager/server/queries/mark_available_tasks_as_claimed.test.ts
+++ b/x-pack/plugins/task_manager/server/queries/mark_available_tasks_as_claimed.test.ts
@@ -52,6 +52,7 @@ describe('mark_available_tasks_as_claimed', () => {
fieldUpdates,
claimTasksById || [],
definitions.getAllTypes(),
+ [],
Array.from(definitions).reduce((accumulator, [type, { maxAttempts }]) => {
return { ...accumulator, [type]: maxAttempts || defaultMaxAttempts };
}, {})
@@ -116,18 +117,23 @@ if (doc['task.runAt'].size()!=0) {
seq_no_primary_term: true,
script: {
source: `
- if (params.registeredTaskTypes.contains(ctx._source.task.taskType)) {
- if (ctx._source.task.schedule != null || ctx._source.task.attempts < params.taskMaxAttempts[ctx._source.task.taskType] || params.claimTasksById.contains(ctx._id)) {
+ if (params.claimableTaskTypes.contains(ctx._source.task.taskType)) {
+ if (ctx._source.task.schedule != null || ctx._source.task.attempts < params.taskMaxAttempts[ctx._source.task.taskType] || params.claimTasksById.contains(ctx._id)) {
+ ctx._source.task.status = "claiming"; ${Object.keys(fieldUpdates)
+ .map((field) => `ctx._source.task.${field}=params.fieldUpdates.${field};`)
+ .join(' ')}
+ } else {
+ ctx._source.task.status = "failed";
+ }
+ } else if (params.skippedTaskTypes.contains(ctx._source.task.taskType) && params.claimTasksById.contains(ctx._id)) {
ctx._source.task.status = "claiming"; ${Object.keys(fieldUpdates)
.map((field) => `ctx._source.task.${field}=params.fieldUpdates.${field};`)
.join(' ')}
+ } else if (!params.skippedTaskTypes.contains(ctx._source.task.taskType)) {
+ ctx._source.task.status = "unrecognized";
} else {
- ctx._source.task.status = "failed";
- }
- } else {
- ctx._source.task.status = "unrecognized";
- }
- `,
+ ctx.op = "noop";
+ }`,
lang: 'painless',
params: {
fieldUpdates: {
@@ -135,7 +141,8 @@ if (doc['task.runAt'].size()!=0) {
retryAt: claimOwnershipUntil,
},
claimTasksById: [],
- registeredTaskTypes: ['sampleTask', 'otherTask'],
+ claimableTaskTypes: ['sampleTask', 'otherTask'],
+ skippedTaskTypes: [],
taskMaxAttempts: {
sampleTask: 5,
otherTask: 1,
@@ -144,4 +151,76 @@ if (doc['task.runAt'].size()!=0) {
},
});
});
+
+ describe(`script`, () => {
+ test('it supports claiming specific tasks by id', async () => {
+ const taskManagerId = '3478fg6-82374f6-83467gf5-384g6f';
+ const claimOwnershipUntil = '2019-02-12T21:01:22.479Z';
+ const fieldUpdates = {
+ ownerId: taskManagerId,
+ retryAt: claimOwnershipUntil,
+ };
+
+ const claimTasksById = [
+ '33c6977a-ed6d-43bd-98d9-3f827f7b7cd8',
+ 'a208b22c-14ec-4fb4-995f-d2ff7a3b03b8',
+ ];
+
+ expect(
+ updateFieldsAndMarkAsFailed(fieldUpdates, claimTasksById, ['foo', 'bar'], [], {
+ foo: 5,
+ bar: 2,
+ })
+ ).toMatchObject({
+ source: `
+ if (params.claimableTaskTypes.contains(ctx._source.task.taskType)) {
+ if (ctx._source.task.schedule != null || ctx._source.task.attempts < params.taskMaxAttempts[ctx._source.task.taskType] || params.claimTasksById.contains(ctx._id)) {
+ ctx._source.task.status = "claiming"; ${Object.keys(fieldUpdates)
+ .map((field) => `ctx._source.task.${field}=params.fieldUpdates.${field};`)
+ .join(' ')}
+ } else {
+ ctx._source.task.status = "failed";
+ }
+ } else if (params.skippedTaskTypes.contains(ctx._source.task.taskType) && params.claimTasksById.contains(ctx._id)) {
+ ctx._source.task.status = "claiming"; ${Object.keys(fieldUpdates)
+ .map((field) => `ctx._source.task.${field}=params.fieldUpdates.${field};`)
+ .join(' ')}
+ } else if (!params.skippedTaskTypes.contains(ctx._source.task.taskType)) {
+ ctx._source.task.status = "unrecognized";
+ } else {
+ ctx.op = "noop";
+ }`,
+ lang: 'painless',
+ params: {
+ fieldUpdates,
+ claimTasksById: [
+ '33c6977a-ed6d-43bd-98d9-3f827f7b7cd8',
+ 'a208b22c-14ec-4fb4-995f-d2ff7a3b03b8',
+ ],
+ claimableTaskTypes: ['foo', 'bar'],
+ skippedTaskTypes: [],
+ taskMaxAttempts: {
+ foo: 5,
+ bar: 2,
+ },
+ },
+ });
+ });
+
+ test('it marks the update as a noop if the type is skipped', async () => {
+ const taskManagerId = '3478fg6-82374f6-83467gf5-384g6f';
+ const claimOwnershipUntil = '2019-02-12T21:01:22.479Z';
+ const fieldUpdates = {
+ ownerId: taskManagerId,
+ retryAt: claimOwnershipUntil,
+ };
+
+ expect(
+ updateFieldsAndMarkAsFailed(fieldUpdates, [], ['foo', 'bar'], [], {
+ foo: 5,
+ bar: 2,
+ }).source
+ ).toMatch(/ctx.op = "noop"/);
+ });
+ });
});
diff --git a/x-pack/plugins/task_manager/server/queries/mark_available_tasks_as_claimed.ts b/x-pack/plugins/task_manager/server/queries/mark_available_tasks_as_claimed.ts
index 067de5a92adb7..8598980a4e236 100644
--- a/x-pack/plugins/task_manager/server/queries/mark_available_tasks_as_claimed.ts
+++ b/x-pack/plugins/task_manager/server/queries/mark_available_tasks_as_claimed.ts
@@ -14,6 +14,8 @@ import {
mustBeAllOf,
MustCondition,
BoolClauseWithAnyCondition,
+ ShouldCondition,
+ FilterCondition,
} from './query_clauses';
export const TaskWithSchedule: ExistsFilter = {
@@ -39,14 +41,26 @@ export function taskWithLessThanMaxAttempts(
};
}
-export function tasksClaimedByOwner(taskManagerId: string) {
+export function tasksOfType(taskTypes: string[]): ShouldCondition {
+ return {
+ bool: {
+ should: [...taskTypes].map((type) => ({ term: { 'task.taskType': type } })),
+ },
+ };
+}
+
+export function tasksClaimedByOwner(
+ taskManagerId: string,
+ ...taskFilters: Array | ShouldCondition>
+) {
return mustBeAllOf(
{
term: {
'task.ownerId': taskManagerId,
},
},
- { term: { 'task.status': 'claiming' } }
+ { term: { 'task.status': 'claiming' } },
+ ...taskFilters
);
}
@@ -107,27 +121,35 @@ export const updateFieldsAndMarkAsFailed = (
[field: string]: string | number | Date;
},
claimTasksById: string[],
- registeredTaskTypes: string[],
+ claimableTaskTypes: string[],
+ skippedTaskTypes: string[],
taskMaxAttempts: { [field: string]: number }
-): ScriptClause => ({
- source: `
- if (params.registeredTaskTypes.contains(ctx._source.task.taskType)) {
- if (ctx._source.task.schedule != null || ctx._source.task.attempts < params.taskMaxAttempts[ctx._source.task.taskType] || params.claimTasksById.contains(ctx._id)) {
- ctx._source.task.status = "claiming"; ${Object.keys(fieldUpdates)
- .map((field) => `ctx._source.task.${field}=params.fieldUpdates.${field};`)
- .join(' ')}
+): ScriptClause => {
+ const markAsClaimingScript = `ctx._source.task.status = "claiming"; ${Object.keys(fieldUpdates)
+ .map((field) => `ctx._source.task.${field}=params.fieldUpdates.${field};`)
+ .join(' ')}`;
+ return {
+ source: `
+ if (params.claimableTaskTypes.contains(ctx._source.task.taskType)) {
+ if (ctx._source.task.schedule != null || ctx._source.task.attempts < params.taskMaxAttempts[ctx._source.task.taskType] || params.claimTasksById.contains(ctx._id)) {
+ ${markAsClaimingScript}
+ } else {
+ ctx._source.task.status = "failed";
+ }
+ } else if (params.skippedTaskTypes.contains(ctx._source.task.taskType) && params.claimTasksById.contains(ctx._id)) {
+ ${markAsClaimingScript}
+ } else if (!params.skippedTaskTypes.contains(ctx._source.task.taskType)) {
+ ctx._source.task.status = "unrecognized";
} else {
- ctx._source.task.status = "failed";
- }
- } else {
- ctx._source.task.status = "unrecognized";
- }
- `,
- lang: 'painless',
- params: {
- fieldUpdates,
- claimTasksById,
- registeredTaskTypes,
- taskMaxAttempts,
- },
-});
+ ctx.op = "noop";
+ }`,
+ lang: 'painless',
+ params: {
+ fieldUpdates,
+ claimTasksById,
+ claimableTaskTypes,
+ skippedTaskTypes,
+ taskMaxAttempts,
+ },
+ };
+};
diff --git a/x-pack/plugins/task_manager/server/queries/task_claiming.mock.ts b/x-pack/plugins/task_manager/server/queries/task_claiming.mock.ts
new file mode 100644
index 0000000000000..38f02780c485e
--- /dev/null
+++ b/x-pack/plugins/task_manager/server/queries/task_claiming.mock.ts
@@ -0,0 +1,33 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { Observable, Subject } from 'rxjs';
+import { TaskClaim } from '../task_events';
+
+import { TaskClaiming } from './task_claiming';
+
+interface TaskClaimingOptions {
+ maxAttempts?: number;
+ taskManagerId?: string;
+ events?: Observable;
+}
+export const taskClaimingMock = {
+ create({
+ maxAttempts = 0,
+ taskManagerId = '',
+ events = new Subject(),
+ }: TaskClaimingOptions) {
+ const mocked = ({
+ claimAvailableTasks: jest.fn(),
+ claimAvailableTasksIfCapacityIsAvailable: jest.fn(),
+ maxAttempts,
+ taskManagerId,
+ events,
+ } as unknown) as jest.Mocked;
+ return mocked;
+ },
+};
diff --git a/x-pack/plugins/task_manager/server/queries/task_claiming.test.ts b/x-pack/plugins/task_manager/server/queries/task_claiming.test.ts
new file mode 100644
index 0000000000000..bd1171d7fd2f8
--- /dev/null
+++ b/x-pack/plugins/task_manager/server/queries/task_claiming.test.ts
@@ -0,0 +1,1516 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import _ from 'lodash';
+import uuid from 'uuid';
+import { filter, take, toArray } from 'rxjs/operators';
+import { some, none } from 'fp-ts/lib/Option';
+
+import { TaskStatus, ConcreteTaskInstance } from '../task';
+import { SearchOpts, StoreOpts, UpdateByQueryOpts, UpdateByQuerySearchOpts } from '../task_store';
+import { asTaskClaimEvent, ClaimTaskErr, TaskClaimErrorType, TaskEvent } from '../task_events';
+import { asOk, asErr } from '../lib/result_type';
+import { TaskTypeDictionary } from '../task_type_dictionary';
+import { BoolClauseWithAnyCondition, TermFilter } from '../queries/query_clauses';
+import { mockLogger } from '../test_utils';
+import { TaskClaiming, OwnershipClaimingOpts, TaskClaimingOpts } from './task_claiming';
+import { Observable } from 'rxjs';
+import { taskStoreMock } from '../task_store.mock';
+
+const taskManagerLogger = mockLogger();
+
+beforeEach(() => jest.resetAllMocks());
+
+const mockedDate = new Date('2019-02-12T21:01:22.479Z');
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+(global as any).Date = class Date {
+ constructor() {
+ return mockedDate;
+ }
+ static now() {
+ return mockedDate.getTime();
+ }
+};
+
+const taskDefinitions = new TaskTypeDictionary(taskManagerLogger);
+taskDefinitions.registerTaskDefinitions({
+ report: {
+ title: 'report',
+ createTaskRunner: jest.fn(),
+ },
+ dernstraight: {
+ title: 'dernstraight',
+ createTaskRunner: jest.fn(),
+ },
+ yawn: {
+ title: 'yawn',
+ createTaskRunner: jest.fn(),
+ },
+});
+
+describe('TaskClaiming', () => {
+ test(`should log when a certain task type is skipped due to having a zero concurency configuration`, () => {
+ const definitions = new TaskTypeDictionary(mockLogger());
+ definitions.registerTaskDefinitions({
+ unlimited: {
+ title: 'unlimited',
+ createTaskRunner: jest.fn(),
+ },
+ anotherUnlimited: {
+ title: 'anotherUnlimited',
+ createTaskRunner: jest.fn(),
+ },
+ limitedToZero: {
+ title: 'limitedToZero',
+ maxConcurrency: 0,
+ createTaskRunner: jest.fn(),
+ },
+ limitedToOne: {
+ title: 'limitedToOne',
+ maxConcurrency: 1,
+ createTaskRunner: jest.fn(),
+ },
+ anotherLimitedToZero: {
+ title: 'anotherLimitedToZero',
+ maxConcurrency: 0,
+ createTaskRunner: jest.fn(),
+ },
+ limitedToTwo: {
+ title: 'limitedToTwo',
+ maxConcurrency: 2,
+ createTaskRunner: jest.fn(),
+ },
+ });
+
+ new TaskClaiming({
+ logger: taskManagerLogger,
+ definitions,
+ taskStore: taskStoreMock.create({ taskManagerId: '' }),
+ maxAttempts: 2,
+ getCapacity: () => 10,
+ });
+
+ expect(taskManagerLogger.info).toHaveBeenCalledTimes(1);
+ expect(taskManagerLogger.info.mock.calls[0][0]).toMatchInlineSnapshot(
+ `"Task Manager will never claim tasks of the following types as their \\"maxConcurrency\\" is set to 0: limitedToZero, anotherLimitedToZero"`
+ );
+ });
+
+ describe('claimAvailableTasks', () => {
+ function initialiseTestClaiming({
+ storeOpts = {},
+ taskClaimingOpts = {},
+ hits = [generateFakeTasks(1)],
+ versionConflicts = 2,
+ }: {
+ storeOpts: Partial;
+ taskClaimingOpts: Partial;
+ hits?: ConcreteTaskInstance[][];
+ versionConflicts?: number;
+ }) {
+ const definitions = storeOpts.definitions ?? taskDefinitions;
+ const store = taskStoreMock.create({ taskManagerId: storeOpts.taskManagerId });
+ store.convertToSavedObjectIds.mockImplementation((ids) => ids.map((id) => `task:${id}`));
+
+ if (hits.length === 1) {
+ store.fetch.mockResolvedValue({ docs: hits[0] });
+ store.updateByQuery.mockResolvedValue({
+ updated: hits[0].length,
+ version_conflicts: versionConflicts,
+ total: hits[0].length,
+ });
+ } else {
+ for (const docs of hits) {
+ store.fetch.mockResolvedValueOnce({ docs });
+ store.updateByQuery.mockResolvedValueOnce({
+ updated: docs.length,
+ version_conflicts: versionConflicts,
+ total: docs.length,
+ });
+ }
+ }
+
+ const taskClaiming = new TaskClaiming({
+ logger: taskManagerLogger,
+ definitions,
+ taskStore: store,
+ maxAttempts: taskClaimingOpts.maxAttempts ?? 2,
+ getCapacity: taskClaimingOpts.getCapacity ?? (() => 10),
+ ...taskClaimingOpts,
+ });
+
+ return { taskClaiming, store };
+ }
+
+ async function testClaimAvailableTasks({
+ storeOpts = {},
+ taskClaimingOpts = {},
+ claimingOpts,
+ hits = [generateFakeTasks(1)],
+ versionConflicts = 2,
+ }: {
+ storeOpts: Partial;
+ taskClaimingOpts: Partial;
+ claimingOpts: Omit;
+ hits?: ConcreteTaskInstance[][];
+ versionConflicts?: number;
+ }) {
+ const getCapacity = taskClaimingOpts.getCapacity ?? (() => 10);
+ const { taskClaiming, store } = initialiseTestClaiming({
+ storeOpts,
+ taskClaimingOpts,
+ hits,
+ versionConflicts,
+ });
+
+ const results = await getAllAsPromise(taskClaiming.claimAvailableTasks(claimingOpts));
+
+ expect(store.updateByQuery.mock.calls[0][1]).toMatchObject({
+ max_docs: getCapacity(),
+ });
+ expect(store.fetch.mock.calls[0][0]).toMatchObject({ size: getCapacity() });
+ return results.map((result, index) => ({
+ result,
+ args: {
+ search: store.fetch.mock.calls[index][0] as SearchOpts & {
+ query: BoolClauseWithAnyCondition;
+ },
+ updateByQuery: store.updateByQuery.mock.calls[index] as [
+ UpdateByQuerySearchOpts,
+ UpdateByQueryOpts
+ ],
+ },
+ }));
+ }
+
+ test('it filters claimed tasks down by supported types, maxAttempts, status, and runAt', async () => {
+ const maxAttempts = _.random(2, 43);
+ const customMaxAttempts = _.random(44, 100);
+
+ const definitions = new TaskTypeDictionary(mockLogger());
+ definitions.registerTaskDefinitions({
+ foo: {
+ title: 'foo',
+ createTaskRunner: jest.fn(),
+ },
+ bar: {
+ title: 'bar',
+ maxAttempts: customMaxAttempts,
+ createTaskRunner: jest.fn(),
+ },
+ });
+
+ const [
+ {
+ args: {
+ updateByQuery: [{ query, sort }],
+ },
+ },
+ ] = await testClaimAvailableTasks({
+ storeOpts: {
+ definitions,
+ },
+ taskClaimingOpts: {
+ maxAttempts,
+ },
+ claimingOpts: {
+ claimOwnershipUntil: new Date(),
+ },
+ });
+ expect(query).toMatchObject({
+ bool: {
+ must: [
+ {
+ bool: {
+ should: [
+ {
+ bool: {
+ must: [
+ { term: { 'task.status': 'idle' } },
+ { range: { 'task.runAt': { lte: 'now' } } },
+ ],
+ },
+ },
+ {
+ bool: {
+ must: [
+ {
+ bool: {
+ should: [
+ { term: { 'task.status': 'running' } },
+ { term: { 'task.status': 'claiming' } },
+ ],
+ },
+ },
+ { range: { 'task.retryAt': { lte: 'now' } } },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ ],
+ filter: [
+ {
+ bool: {
+ must_not: [
+ {
+ bool: {
+ should: [
+ { term: { 'task.status': 'running' } },
+ { term: { 'task.status': 'claiming' } },
+ ],
+ must: { range: { 'task.retryAt': { gt: 'now' } } },
+ },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ });
+ expect(sort).toMatchObject([
+ {
+ _script: {
+ type: 'number',
+ order: 'asc',
+ script: {
+ lang: 'painless',
+ source: `
+if (doc['task.retryAt'].size()!=0) {
+ return doc['task.retryAt'].value.toInstant().toEpochMilli();
+}
+if (doc['task.runAt'].size()!=0) {
+ return doc['task.runAt'].value.toInstant().toEpochMilli();
+}
+ `,
+ },
+ },
+ },
+ ]);
+ });
+
+ test('it supports claiming specific tasks by id', async () => {
+ const maxAttempts = _.random(2, 43);
+ const customMaxAttempts = _.random(44, 100);
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const taskManagerId = uuid.v1();
+ const fieldUpdates = {
+ ownerId: taskManagerId,
+ retryAt: new Date(Date.now()),
+ };
+ definitions.registerTaskDefinitions({
+ foo: {
+ title: 'foo',
+ createTaskRunner: jest.fn(),
+ },
+ bar: {
+ title: 'bar',
+ maxAttempts: customMaxAttempts,
+ createTaskRunner: jest.fn(),
+ },
+ });
+ const [
+ {
+ args: {
+ updateByQuery: [{ query, script, sort }],
+ },
+ },
+ ] = await testClaimAvailableTasks({
+ storeOpts: {
+ taskManagerId,
+ definitions,
+ },
+ taskClaimingOpts: {
+ maxAttempts,
+ },
+ claimingOpts: {
+ claimOwnershipUntil: new Date(),
+ claimTasksById: [
+ '33c6977a-ed6d-43bd-98d9-3f827f7b7cd8',
+ 'a208b22c-14ec-4fb4-995f-d2ff7a3b03b8',
+ ],
+ },
+ });
+
+ expect(query).toMatchObject({
+ bool: {
+ must: [
+ {
+ pinned: {
+ ids: [
+ 'task:33c6977a-ed6d-43bd-98d9-3f827f7b7cd8',
+ 'task:a208b22c-14ec-4fb4-995f-d2ff7a3b03b8',
+ ],
+ organic: {
+ bool: {
+ must: [
+ {
+ bool: {
+ should: [
+ {
+ bool: {
+ must: [
+ { term: { 'task.status': 'idle' } },
+ { range: { 'task.runAt': { lte: 'now' } } },
+ ],
+ },
+ },
+ {
+ bool: {
+ must: [
+ {
+ bool: {
+ should: [
+ { term: { 'task.status': 'running' } },
+ { term: { 'task.status': 'claiming' } },
+ ],
+ },
+ },
+ { range: { 'task.retryAt': { lte: 'now' } } },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ },
+ },
+ },
+ ],
+ filter: [
+ {
+ bool: {
+ must_not: [
+ {
+ bool: {
+ should: [
+ { term: { 'task.status': 'running' } },
+ { term: { 'task.status': 'claiming' } },
+ ],
+ must: { range: { 'task.retryAt': { gt: 'now' } } },
+ },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ });
+
+ expect(script).toMatchObject({
+ source: expect.any(String),
+ lang: 'painless',
+ params: {
+ fieldUpdates,
+ claimTasksById: [
+ 'task:33c6977a-ed6d-43bd-98d9-3f827f7b7cd8',
+ 'task:a208b22c-14ec-4fb4-995f-d2ff7a3b03b8',
+ ],
+ claimableTaskTypes: ['foo', 'bar'],
+ skippedTaskTypes: [],
+ taskMaxAttempts: {
+ bar: customMaxAttempts,
+ foo: maxAttempts,
+ },
+ },
+ });
+
+ expect(sort).toMatchObject([
+ '_score',
+ {
+ _script: {
+ type: 'number',
+ order: 'asc',
+ script: {
+ lang: 'painless',
+ source: `
+if (doc['task.retryAt'].size()!=0) {
+ return doc['task.retryAt'].value.toInstant().toEpochMilli();
+}
+if (doc['task.runAt'].size()!=0) {
+ return doc['task.runAt'].value.toInstant().toEpochMilli();
+}
+ `,
+ },
+ },
+ },
+ ]);
+ });
+
+ test('it should claim in batches partitioned by maxConcurrency', async () => {
+ const maxAttempts = _.random(2, 43);
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const taskManagerId = uuid.v1();
+ const fieldUpdates = {
+ ownerId: taskManagerId,
+ retryAt: new Date(Date.now()),
+ };
+ definitions.registerTaskDefinitions({
+ unlimited: {
+ title: 'unlimited',
+ createTaskRunner: jest.fn(),
+ },
+ limitedToZero: {
+ title: 'limitedToZero',
+ maxConcurrency: 0,
+ createTaskRunner: jest.fn(),
+ },
+ anotherUnlimited: {
+ title: 'anotherUnlimited',
+ createTaskRunner: jest.fn(),
+ },
+ finalUnlimited: {
+ title: 'finalUnlimited',
+ createTaskRunner: jest.fn(),
+ },
+ limitedToOne: {
+ title: 'limitedToOne',
+ maxConcurrency: 1,
+ createTaskRunner: jest.fn(),
+ },
+ anotherLimitedToOne: {
+ title: 'anotherLimitedToOne',
+ maxConcurrency: 1,
+ createTaskRunner: jest.fn(),
+ },
+ limitedToTwo: {
+ title: 'limitedToTwo',
+ maxConcurrency: 2,
+ createTaskRunner: jest.fn(),
+ },
+ });
+ const results = await testClaimAvailableTasks({
+ storeOpts: {
+ taskManagerId,
+ definitions,
+ },
+ taskClaimingOpts: {
+ maxAttempts,
+ getCapacity: (type) => {
+ switch (type) {
+ case 'limitedToOne':
+ case 'anotherLimitedToOne':
+ return 1;
+ case 'limitedToTwo':
+ return 2;
+ default:
+ return 10;
+ }
+ },
+ },
+ claimingOpts: {
+ claimOwnershipUntil: new Date(),
+ claimTasksById: [
+ '33c6977a-ed6d-43bd-98d9-3f827f7b7cd8',
+ 'a208b22c-14ec-4fb4-995f-d2ff7a3b03b8',
+ ],
+ },
+ });
+
+ expect(results.length).toEqual(4);
+
+ expect(results[0].args.updateByQuery[1].max_docs).toEqual(10);
+ expect(results[0].args.updateByQuery[0].script).toMatchObject({
+ source: expect.any(String),
+ lang: 'painless',
+ params: {
+ fieldUpdates,
+ claimTasksById: [
+ 'task:33c6977a-ed6d-43bd-98d9-3f827f7b7cd8',
+ 'task:a208b22c-14ec-4fb4-995f-d2ff7a3b03b8',
+ ],
+ claimableTaskTypes: ['unlimited', 'anotherUnlimited', 'finalUnlimited'],
+ skippedTaskTypes: [
+ 'limitedToZero',
+ 'limitedToOne',
+ 'anotherLimitedToOne',
+ 'limitedToTwo',
+ ],
+ taskMaxAttempts: {
+ unlimited: maxAttempts,
+ },
+ },
+ });
+
+ expect(results[1].args.updateByQuery[1].max_docs).toEqual(1);
+ expect(results[1].args.updateByQuery[0].script).toMatchObject({
+ source: expect.any(String),
+ lang: 'painless',
+ params: {
+ fieldUpdates,
+ claimTasksById: [],
+ claimableTaskTypes: ['limitedToOne'],
+ skippedTaskTypes: [
+ 'unlimited',
+ 'limitedToZero',
+ 'anotherUnlimited',
+ 'finalUnlimited',
+ 'anotherLimitedToOne',
+ 'limitedToTwo',
+ ],
+ taskMaxAttempts: {
+ limitedToOne: maxAttempts,
+ },
+ },
+ });
+
+ expect(results[2].args.updateByQuery[1].max_docs).toEqual(1);
+ expect(results[2].args.updateByQuery[0].script).toMatchObject({
+ source: expect.any(String),
+ lang: 'painless',
+ params: {
+ fieldUpdates,
+ claimTasksById: [],
+ claimableTaskTypes: ['anotherLimitedToOne'],
+ skippedTaskTypes: [
+ 'unlimited',
+ 'limitedToZero',
+ 'anotherUnlimited',
+ 'finalUnlimited',
+ 'limitedToOne',
+ 'limitedToTwo',
+ ],
+ taskMaxAttempts: {
+ anotherLimitedToOne: maxAttempts,
+ },
+ },
+ });
+
+ expect(results[3].args.updateByQuery[1].max_docs).toEqual(2);
+ expect(results[3].args.updateByQuery[0].script).toMatchObject({
+ source: expect.any(String),
+ lang: 'painless',
+ params: {
+ fieldUpdates,
+ claimTasksById: [],
+ claimableTaskTypes: ['limitedToTwo'],
+ skippedTaskTypes: [
+ 'unlimited',
+ 'limitedToZero',
+ 'anotherUnlimited',
+ 'finalUnlimited',
+ 'limitedToOne',
+ 'anotherLimitedToOne',
+ ],
+ taskMaxAttempts: {
+ limitedToTwo: maxAttempts,
+ },
+ },
+ });
+ });
+
+ test('it should reduce the available capacity from batch to batch', async () => {
+ const maxAttempts = _.random(2, 43);
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const taskManagerId = uuid.v1();
+ definitions.registerTaskDefinitions({
+ unlimited: {
+ title: 'unlimited',
+ createTaskRunner: jest.fn(),
+ },
+ limitedToFive: {
+ title: 'limitedToFive',
+ maxConcurrency: 5,
+ createTaskRunner: jest.fn(),
+ },
+ limitedToTwo: {
+ title: 'limitedToTwo',
+ maxConcurrency: 2,
+ createTaskRunner: jest.fn(),
+ },
+ });
+ const results = await testClaimAvailableTasks({
+ storeOpts: {
+ taskManagerId,
+ definitions,
+ },
+ taskClaimingOpts: {
+ maxAttempts,
+ getCapacity: (type) => {
+ switch (type) {
+ case 'limitedToTwo':
+ return 2;
+ case 'limitedToFive':
+ return 5;
+ default:
+ return 10;
+ }
+ },
+ },
+ hits: [
+ [
+ // 7 returned by unlimited query
+ mockInstance({
+ taskType: 'unlimited',
+ }),
+ mockInstance({
+ taskType: 'unlimited',
+ }),
+ mockInstance({
+ taskType: 'unlimited',
+ }),
+ mockInstance({
+ taskType: 'unlimited',
+ }),
+ mockInstance({
+ taskType: 'unlimited',
+ }),
+ mockInstance({
+ taskType: 'unlimited',
+ }),
+ mockInstance({
+ taskType: 'unlimited',
+ }),
+ ],
+ // 2 returned by limitedToFive query
+ [
+ mockInstance({
+ taskType: 'limitedToFive',
+ }),
+ mockInstance({
+ taskType: 'limitedToFive',
+ }),
+ ],
+ // 1 reterned by limitedToTwo query
+ [
+ mockInstance({
+ taskType: 'limitedToTwo',
+ }),
+ ],
+ ],
+ claimingOpts: {
+ claimOwnershipUntil: new Date(),
+ claimTasksById: [],
+ },
+ });
+
+ expect(results.length).toEqual(3);
+
+ expect(results[0].args.updateByQuery[1].max_docs).toEqual(10);
+
+ // only capacity for 3, even though 5 are allowed
+ expect(results[1].args.updateByQuery[1].max_docs).toEqual(3);
+
+ // only capacity for 1, even though 2 are allowed
+ expect(results[2].args.updateByQuery[1].max_docs).toEqual(1);
+ });
+
+ test('it shuffles the types claimed in batches to ensure no type starves another', async () => {
+ const maxAttempts = _.random(2, 43);
+ const definitions = new TaskTypeDictionary(mockLogger());
+ const taskManagerId = uuid.v1();
+ definitions.registerTaskDefinitions({
+ unlimited: {
+ title: 'unlimited',
+ createTaskRunner: jest.fn(),
+ },
+ anotherUnlimited: {
+ title: 'anotherUnlimited',
+ createTaskRunner: jest.fn(),
+ },
+ finalUnlimited: {
+ title: 'finalUnlimited',
+ createTaskRunner: jest.fn(),
+ },
+ limitedToOne: {
+ title: 'limitedToOne',
+ maxConcurrency: 1,
+ createTaskRunner: jest.fn(),
+ },
+ anotherLimitedToOne: {
+ title: 'anotherLimitedToOne',
+ maxConcurrency: 1,
+ createTaskRunner: jest.fn(),
+ },
+ limitedToTwo: {
+ title: 'limitedToTwo',
+ maxConcurrency: 2,
+ createTaskRunner: jest.fn(),
+ },
+ });
+
+ const { taskClaiming, store } = initialiseTestClaiming({
+ storeOpts: {
+ taskManagerId,
+ definitions,
+ },
+ taskClaimingOpts: {
+ maxAttempts,
+ getCapacity: (type) => {
+ switch (type) {
+ case 'limitedToOne':
+ case 'anotherLimitedToOne':
+ return 1;
+ case 'limitedToTwo':
+ return 2;
+ default:
+ return 10;
+ }
+ },
+ },
+ });
+
+ async function getUpdateByQueryScriptParams() {
+ return (
+ await getAllAsPromise(
+ taskClaiming.claimAvailableTasks({
+ claimOwnershipUntil: new Date(),
+ })
+ )
+ ).map(
+ (result, index) =>
+ (store.updateByQuery.mock.calls[index][0] as {
+ query: BoolClauseWithAnyCondition;
+ size: number;
+ sort: string | string[];
+ script: {
+ params: {
+ claimableTaskTypes: string[];
+ };
+ };
+ }).script.params.claimableTaskTypes
+ );
+ }
+
+ const firstCycle = await getUpdateByQueryScriptParams();
+ store.updateByQuery.mockClear();
+ const secondCycle = await getUpdateByQueryScriptParams();
+
+ expect(firstCycle.length).toEqual(4);
+ expect(secondCycle.length).toEqual(4);
+ expect(firstCycle).not.toMatchObject(secondCycle);
+ });
+
+ test('it claims tasks by setting their ownerId, status and retryAt', async () => {
+ const taskManagerId = uuid.v1();
+ const claimOwnershipUntil = new Date(Date.now());
+ const fieldUpdates = {
+ ownerId: taskManagerId,
+ retryAt: claimOwnershipUntil,
+ };
+ const [
+ {
+ args: {
+ updateByQuery: [{ script }],
+ },
+ },
+ ] = await testClaimAvailableTasks({
+ storeOpts: {
+ taskManagerId,
+ },
+ taskClaimingOpts: {},
+ claimingOpts: {
+ claimOwnershipUntil,
+ },
+ });
+ expect(script).toMatchObject({
+ source: expect.any(String),
+ lang: 'painless',
+ params: {
+ fieldUpdates,
+ claimableTaskTypes: ['report', 'dernstraight', 'yawn'],
+ skippedTaskTypes: [],
+ taskMaxAttempts: {
+ dernstraight: 2,
+ report: 2,
+ yawn: 2,
+ },
+ },
+ });
+ });
+
+ test('it filters out running tasks', async () => {
+ const taskManagerId = uuid.v1();
+ const claimOwnershipUntil = new Date(Date.now());
+ const runAt = new Date();
+ const tasks = [
+ mockInstance({
+ id: 'aaa',
+ runAt,
+ taskType: 'foo',
+ schedule: undefined,
+ attempts: 0,
+ status: TaskStatus.Claiming,
+ params: { hello: 'world' },
+ state: { baby: 'Henhen' },
+ user: 'jimbo',
+ scope: ['reporting'],
+ ownerId: taskManagerId,
+ }),
+ ];
+ const [
+ {
+ result: { docs },
+ args: {
+ search: { query },
+ },
+ },
+ ] = await testClaimAvailableTasks({
+ storeOpts: {
+ taskManagerId,
+ },
+ taskClaimingOpts: {},
+ claimingOpts: {
+ claimOwnershipUntil,
+ },
+ hits: [tasks],
+ });
+
+ expect(query).toMatchObject({
+ bool: {
+ must: [
+ {
+ term: {
+ 'task.ownerId': taskManagerId,
+ },
+ },
+ { term: { 'task.status': 'claiming' } },
+ {
+ bool: {
+ should: [
+ {
+ term: {
+ 'task.taskType': 'report',
+ },
+ },
+ {
+ term: {
+ 'task.taskType': 'dernstraight',
+ },
+ },
+ {
+ term: {
+ 'task.taskType': 'yawn',
+ },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ });
+
+ expect(docs).toMatchObject([
+ {
+ attempts: 0,
+ id: 'aaa',
+ schedule: undefined,
+ params: { hello: 'world' },
+ runAt,
+ scope: ['reporting'],
+ state: { baby: 'Henhen' },
+ status: 'claiming',
+ taskType: 'foo',
+ user: 'jimbo',
+ ownerId: taskManagerId,
+ },
+ ]);
+ });
+
+ test('it returns task objects', async () => {
+ const taskManagerId = uuid.v1();
+ const claimOwnershipUntil = new Date(Date.now());
+ const runAt = new Date();
+ const tasks = [
+ mockInstance({
+ id: 'aaa',
+ runAt,
+ taskType: 'foo',
+ schedule: undefined,
+ attempts: 0,
+ status: TaskStatus.Claiming,
+ params: { hello: 'world' },
+ state: { baby: 'Henhen' },
+ user: 'jimbo',
+ scope: ['reporting'],
+ ownerId: taskManagerId,
+ }),
+ mockInstance({
+ id: 'bbb',
+ runAt,
+ taskType: 'bar',
+ schedule: { interval: '5m' },
+ attempts: 2,
+ status: TaskStatus.Claiming,
+ params: { shazm: 1 },
+ state: { henry: 'The 8th' },
+ user: 'dabo',
+ scope: ['reporting', 'ceo'],
+ ownerId: taskManagerId,
+ }),
+ ];
+ const [
+ {
+ result: { docs },
+ args: {
+ search: { query },
+ },
+ },
+ ] = await testClaimAvailableTasks({
+ storeOpts: {
+ taskManagerId,
+ },
+ taskClaimingOpts: {},
+ claimingOpts: {
+ claimOwnershipUntil,
+ },
+ hits: [tasks],
+ });
+
+ expect(query).toMatchObject({
+ bool: {
+ must: [
+ {
+ term: {
+ 'task.ownerId': taskManagerId,
+ },
+ },
+ { term: { 'task.status': 'claiming' } },
+ {
+ bool: {
+ should: [
+ {
+ term: {
+ 'task.taskType': 'report',
+ },
+ },
+ {
+ term: {
+ 'task.taskType': 'dernstraight',
+ },
+ },
+ {
+ term: {
+ 'task.taskType': 'yawn',
+ },
+ },
+ ],
+ },
+ },
+ ],
+ },
+ });
+
+ expect(docs).toMatchObject([
+ {
+ attempts: 0,
+ id: 'aaa',
+ schedule: undefined,
+ params: { hello: 'world' },
+ runAt,
+ scope: ['reporting'],
+ state: { baby: 'Henhen' },
+ status: 'claiming',
+ taskType: 'foo',
+ user: 'jimbo',
+ ownerId: taskManagerId,
+ },
+ {
+ attempts: 2,
+ id: 'bbb',
+ schedule: { interval: '5m' },
+ params: { shazm: 1 },
+ runAt,
+ scope: ['reporting', 'ceo'],
+ state: { henry: 'The 8th' },
+ status: 'claiming',
+ taskType: 'bar',
+ user: 'dabo',
+ ownerId: taskManagerId,
+ },
+ ]);
+ });
+
+ test('it returns version_conflicts that do not include conflicts that were proceeded against', async () => {
+ const taskManagerId = uuid.v1();
+ const claimOwnershipUntil = new Date(Date.now());
+ const runAt = new Date();
+ const tasks = [
+ mockInstance({
+ runAt,
+ taskType: 'foo',
+ schedule: undefined,
+ attempts: 0,
+ status: TaskStatus.Claiming,
+ params: { hello: 'world' },
+ state: { baby: 'Henhen' },
+ user: 'jimbo',
+ scope: ['reporting'],
+ ownerId: taskManagerId,
+ }),
+ mockInstance({
+ runAt,
+ taskType: 'bar',
+ schedule: { interval: '5m' },
+ attempts: 2,
+ status: TaskStatus.Claiming,
+ params: { shazm: 1 },
+ state: { henry: 'The 8th' },
+ user: 'dabo',
+ scope: ['reporting', 'ceo'],
+ ownerId: taskManagerId,
+ }),
+ ];
+ const maxDocs = 10;
+ const [
+ {
+ result: {
+ stats: { tasksUpdated, tasksConflicted, tasksClaimed },
+ },
+ },
+ ] = await testClaimAvailableTasks({
+ storeOpts: {
+ taskManagerId,
+ },
+ taskClaimingOpts: { getCapacity: () => maxDocs },
+ claimingOpts: {
+ claimOwnershipUntil,
+ },
+ hits: [tasks],
+ // assume there were 20 version conflists, but thanks to `conflicts="proceed"`
+ // we proceeded to claim tasks
+ versionConflicts: 20,
+ });
+
+ expect(tasksUpdated).toEqual(2);
+ // ensure we only count conflicts that *may* have counted against max_docs, no more than that
+ expect(tasksConflicted).toEqual(10 - tasksUpdated!);
+ expect(tasksClaimed).toEqual(2);
+ });
+ });
+
+ describe('task events', () => {
+ function generateTasks(taskManagerId: string) {
+ const runAt = new Date();
+ const tasks = [
+ {
+ id: 'claimed-by-id',
+ runAt,
+ taskType: 'foo',
+ schedule: undefined,
+ attempts: 0,
+ status: TaskStatus.Claiming,
+ params: { hello: 'world' },
+ state: { baby: 'Henhen' },
+ user: 'jimbo',
+ scope: ['reporting'],
+ ownerId: taskManagerId,
+ startedAt: null,
+ retryAt: null,
+ scheduledAt: new Date(),
+ },
+ {
+ id: 'claimed-by-schedule',
+ runAt,
+ taskType: 'bar',
+ schedule: { interval: '5m' },
+ attempts: 2,
+ status: TaskStatus.Claiming,
+ params: { shazm: 1 },
+ state: { henry: 'The 8th' },
+ user: 'dabo',
+ scope: ['reporting', 'ceo'],
+ ownerId: taskManagerId,
+ startedAt: null,
+ retryAt: null,
+ scheduledAt: new Date(),
+ },
+ {
+ id: 'already-running',
+ runAt,
+ taskType: 'bar',
+ schedule: { interval: '5m' },
+ attempts: 2,
+ status: TaskStatus.Running,
+ params: { shazm: 1 },
+ state: { henry: 'The 8th' },
+ user: 'dabo',
+ scope: ['reporting', 'ceo'],
+ ownerId: taskManagerId,
+ startedAt: null,
+ retryAt: null,
+ scheduledAt: new Date(),
+ },
+ ];
+
+ return { taskManagerId, runAt, tasks };
+ }
+
+ function instantiateStoreWithMockedApiResponses({
+ taskManagerId = uuid.v4(),
+ definitions = taskDefinitions,
+ getCapacity = () => 10,
+ tasksClaimed,
+ }: Partial> & {
+ taskManagerId?: string;
+ tasksClaimed?: ConcreteTaskInstance[][];
+ } = {}) {
+ const { runAt, tasks: generatedTasks } = generateTasks(taskManagerId);
+ const taskCycles = tasksClaimed ?? [generatedTasks];
+
+ const taskStore = taskStoreMock.create({ taskManagerId });
+ taskStore.convertToSavedObjectIds.mockImplementation((ids) => ids.map((id) => `task:${id}`));
+ for (const docs of taskCycles) {
+ taskStore.fetch.mockResolvedValueOnce({ docs });
+ taskStore.updateByQuery.mockResolvedValueOnce({
+ updated: docs.length,
+ version_conflicts: 0,
+ total: docs.length,
+ });
+ }
+
+ taskStore.fetch.mockResolvedValue({ docs: [] });
+ taskStore.updateByQuery.mockResolvedValue({
+ updated: 0,
+ version_conflicts: 0,
+ total: 0,
+ });
+
+ const taskClaiming = new TaskClaiming({
+ logger: taskManagerLogger,
+ definitions,
+ taskStore,
+ maxAttempts: 2,
+ getCapacity,
+ });
+
+ return { taskManagerId, runAt, taskClaiming };
+ }
+
+ test('emits an event when a task is succesfully claimed by id', async () => {
+ const { taskManagerId, runAt, taskClaiming } = instantiateStoreWithMockedApiResponses();
+
+ const promise = taskClaiming.events
+ .pipe(
+ filter(
+ (event: TaskEvent) => event.id === 'claimed-by-id'
+ ),
+ take(1)
+ )
+ .toPromise();
+
+ await getFirstAsPromise(
+ taskClaiming.claimAvailableTasks({
+ claimTasksById: ['claimed-by-id'],
+ claimOwnershipUntil: new Date(),
+ })
+ );
+
+ const event = await promise;
+ expect(event).toMatchObject(
+ asTaskClaimEvent(
+ 'claimed-by-id',
+ asOk({
+ id: 'claimed-by-id',
+ runAt,
+ taskType: 'foo',
+ schedule: undefined,
+ attempts: 0,
+ status: 'claiming' as TaskStatus,
+ params: { hello: 'world' },
+ state: { baby: 'Henhen' },
+ user: 'jimbo',
+ scope: ['reporting'],
+ ownerId: taskManagerId,
+ startedAt: null,
+ retryAt: null,
+ scheduledAt: new Date(),
+ })
+ )
+ );
+ });
+
+ test('emits an event when a task is succesfully claimed by id by is rejected as it would exceed maxCapacity of its taskType', async () => {
+ const definitions = new TaskTypeDictionary(mockLogger());
+ definitions.registerTaskDefinitions({
+ unlimited: {
+ title: 'unlimited',
+ createTaskRunner: jest.fn(),
+ },
+ limitedToOne: {
+ title: 'limitedToOne',
+ maxConcurrency: 1,
+ createTaskRunner: jest.fn(),
+ },
+ anotherLimitedToOne: {
+ title: 'anotherLimitedToOne',
+ maxConcurrency: 1,
+ createTaskRunner: jest.fn(),
+ },
+ });
+
+ const taskManagerId = uuid.v4();
+ const { runAt, taskClaiming } = instantiateStoreWithMockedApiResponses({
+ taskManagerId,
+ definitions,
+ getCapacity: (type) => {
+ switch (type) {
+ case 'limitedToOne':
+ // return 0 as there's already a `limitedToOne` task running
+ return 0;
+ default:
+ return 10;
+ }
+ },
+ tasksClaimed: [
+ // find on first claim cycle
+ [
+ {
+ id: 'claimed-by-id-limited-concurrency',
+ runAt: new Date(),
+ taskType: 'limitedToOne',
+ schedule: undefined,
+ attempts: 0,
+ status: TaskStatus.Claiming,
+ params: { hello: 'world' },
+ state: { baby: 'Henhen' },
+ user: 'jimbo',
+ scope: ['reporting'],
+ ownerId: taskManagerId,
+ startedAt: null,
+ retryAt: null,
+ scheduledAt: new Date(),
+ },
+ ],
+ // second cycle
+ [
+ {
+ id: 'claimed-by-schedule-unlimited',
+ runAt: new Date(),
+ taskType: 'unlimited',
+ schedule: undefined,
+ attempts: 0,
+ status: TaskStatus.Claiming,
+ params: { hello: 'world' },
+ state: { baby: 'Henhen' },
+ user: 'jimbo',
+ scope: ['reporting'],
+ ownerId: taskManagerId,
+ startedAt: null,
+ retryAt: null,
+ scheduledAt: new Date(),
+ },
+ ],
+ ],
+ });
+
+ const promise = taskClaiming.events
+ .pipe(
+ filter(
+ (event: TaskEvent) =>
+ event.id === 'claimed-by-id-limited-concurrency'
+ ),
+ take(1)
+ )
+ .toPromise();
+
+ const [firstCycleResult, secondCycleResult] = await getAllAsPromise(
+ taskClaiming.claimAvailableTasks({
+ claimTasksById: ['claimed-by-id-limited-concurrency'],
+ claimOwnershipUntil: new Date(),
+ })
+ );
+
+ expect(firstCycleResult.stats.tasksClaimed).toEqual(0);
+ expect(firstCycleResult.stats.tasksRejected).toEqual(1);
+ expect(firstCycleResult.stats.tasksUpdated).toEqual(1);
+
+ // values accumulate from cycle to cycle
+ expect(secondCycleResult.stats.tasksClaimed).toEqual(0);
+ expect(secondCycleResult.stats.tasksRejected).toEqual(1);
+ expect(secondCycleResult.stats.tasksUpdated).toEqual(1);
+
+ const event = await promise;
+ expect(event).toMatchObject(
+ asTaskClaimEvent(
+ 'claimed-by-id-limited-concurrency',
+ asErr({
+ task: some({
+ id: 'claimed-by-id-limited-concurrency',
+ runAt,
+ taskType: 'limitedToOne',
+ schedule: undefined,
+ attempts: 0,
+ status: 'claiming' as TaskStatus,
+ params: { hello: 'world' },
+ state: { baby: 'Henhen' },
+ user: 'jimbo',
+ scope: ['reporting'],
+ ownerId: taskManagerId,
+ startedAt: null,
+ retryAt: null,
+ scheduledAt: new Date(),
+ }),
+ errorType: TaskClaimErrorType.CLAIMED_BY_ID_OUT_OF_CAPACITY,
+ })
+ )
+ );
+ });
+
+ test('emits an event when a task is succesfully by scheduling', async () => {
+ const { taskManagerId, runAt, taskClaiming } = instantiateStoreWithMockedApiResponses();
+
+ const promise = taskClaiming.events
+ .pipe(
+ filter(
+ (event: TaskEvent) =>
+ event.id === 'claimed-by-schedule'
+ ),
+ take(1)
+ )
+ .toPromise();
+
+ await getFirstAsPromise(
+ taskClaiming.claimAvailableTasks({
+ claimTasksById: ['claimed-by-id'],
+ claimOwnershipUntil: new Date(),
+ })
+ );
+
+ const event = await promise;
+ expect(event).toMatchObject(
+ asTaskClaimEvent(
+ 'claimed-by-schedule',
+ asOk({
+ id: 'claimed-by-schedule',
+ runAt,
+ taskType: 'bar',
+ schedule: { interval: '5m' },
+ attempts: 2,
+ status: 'claiming' as TaskStatus,
+ params: { shazm: 1 },
+ state: { henry: 'The 8th' },
+ user: 'dabo',
+ scope: ['reporting', 'ceo'],
+ ownerId: taskManagerId,
+ startedAt: null,
+ retryAt: null,
+ scheduledAt: new Date(),
+ })
+ )
+ );
+ });
+
+ test('emits an event when the store fails to claim a required task by id', async () => {
+ const { taskManagerId, runAt, taskClaiming } = instantiateStoreWithMockedApiResponses();
+
+ const promise = taskClaiming.events
+ .pipe(
+ filter(
+ (event: TaskEvent) => event.id === 'already-running'
+ ),
+ take(1)
+ )
+ .toPromise();
+
+ await getFirstAsPromise(
+ taskClaiming.claimAvailableTasks({
+ claimTasksById: ['already-running'],
+ claimOwnershipUntil: new Date(),
+ })
+ );
+
+ const event = await promise;
+ expect(event).toMatchObject(
+ asTaskClaimEvent(
+ 'already-running',
+ asErr({
+ task: some({
+ id: 'already-running',
+ runAt,
+ taskType: 'bar',
+ schedule: { interval: '5m' },
+ attempts: 2,
+ status: 'running' as TaskStatus,
+ params: { shazm: 1 },
+ state: { henry: 'The 8th' },
+ user: 'dabo',
+ scope: ['reporting', 'ceo'],
+ ownerId: taskManagerId,
+ startedAt: null,
+ retryAt: null,
+ scheduledAt: new Date(),
+ }),
+ errorType: TaskClaimErrorType.CLAIMED_BY_ID_NOT_IN_CLAIMING_STATUS,
+ })
+ )
+ );
+ });
+
+ test('emits an event when the store fails to find a task which was required by id', async () => {
+ const { taskClaiming } = instantiateStoreWithMockedApiResponses();
+
+ const promise = taskClaiming.events
+ .pipe(
+ filter(
+ (event: TaskEvent) => event.id === 'unknown-task'
+ ),
+ take(1)
+ )
+ .toPromise();
+
+ await getFirstAsPromise(
+ taskClaiming.claimAvailableTasks({
+ claimTasksById: ['unknown-task'],
+ claimOwnershipUntil: new Date(),
+ })
+ );
+
+ const event = await promise;
+ expect(event).toMatchObject(
+ asTaskClaimEvent(
+ 'unknown-task',
+ asErr({
+ task: none,
+ errorType: TaskClaimErrorType.CLAIMED_BY_ID_NOT_RETURNED,
+ })
+ )
+ );
+ });
+ });
+});
+
+function generateFakeTasks(count: number = 1) {
+ return _.times(count, (index) => mockInstance({ id: `task:id-${index}` }));
+}
+
+function mockInstance(instance: Partial = {}) {
+ return Object.assign(
+ {
+ id: uuid.v4(),
+ taskType: 'bar',
+ sequenceNumber: 32,
+ primaryTerm: 32,
+ runAt: new Date(),
+ scheduledAt: new Date(),
+ startedAt: null,
+ retryAt: null,
+ attempts: 0,
+ params: {},
+ scope: ['reporting'],
+ state: {},
+ status: 'idle',
+ user: 'example',
+ ownerId: null,
+ },
+ instance
+ );
+}
+
+function getFirstAsPromise(obs$: Observable): Promise {
+ return new Promise((resolve, reject) => {
+ obs$.subscribe(resolve, reject);
+ });
+}
+function getAllAsPromise(obs$: Observable): Promise {
+ return new Promise((resolve, reject) => {
+ obs$.pipe(toArray()).subscribe(resolve, reject);
+ });
+}
diff --git a/x-pack/plugins/task_manager/server/queries/task_claiming.ts b/x-pack/plugins/task_manager/server/queries/task_claiming.ts
new file mode 100644
index 0000000000000..b4e11dbf81eb1
--- /dev/null
+++ b/x-pack/plugins/task_manager/server/queries/task_claiming.ts
@@ -0,0 +1,488 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+/*
+ * This module contains helpers for managing the task manager storage layer.
+ */
+import apm from 'elastic-apm-node';
+import { Subject, Observable, from, of } from 'rxjs';
+import { map, mergeScan } from 'rxjs/operators';
+import { difference, partition, groupBy, mapValues, countBy, pick } from 'lodash';
+import { some, none } from 'fp-ts/lib/Option';
+
+import { Logger } from '../../../../../src/core/server';
+
+import { asOk, asErr, Result } from '../lib/result_type';
+import { ConcreteTaskInstance, TaskStatus } from '../task';
+import {
+ TaskClaim,
+ asTaskClaimEvent,
+ TaskClaimErrorType,
+ startTaskTimer,
+ TaskTiming,
+} from '../task_events';
+
+import {
+ asUpdateByQuery,
+ shouldBeOneOf,
+ mustBeAllOf,
+ filterDownBy,
+ asPinnedQuery,
+ matchesClauses,
+ SortOptions,
+} from './query_clauses';
+
+import {
+ updateFieldsAndMarkAsFailed,
+ IdleTaskWithExpiredRunAt,
+ InactiveTasks,
+ RunningOrClaimingTaskWithExpiredRetryAt,
+ SortByRunAtAndRetryAt,
+ tasksClaimedByOwner,
+ tasksOfType,
+} from './mark_available_tasks_as_claimed';
+import { TaskTypeDictionary } from '../task_type_dictionary';
+import {
+ correctVersionConflictsForContinuation,
+ TaskStore,
+ UpdateByQueryResult,
+} from '../task_store';
+import { FillPoolResult } from '../lib/fill_pool';
+
+export interface TaskClaimingOpts {
+ logger: Logger;
+ definitions: TaskTypeDictionary;
+ taskStore: TaskStore;
+ maxAttempts: number;
+ getCapacity: (taskType?: string) => number;
+}
+
+export interface OwnershipClaimingOpts {
+ claimOwnershipUntil: Date;
+ claimTasksById?: string[];
+ size: number;
+ taskTypes: Set;
+}
+export type IncrementalOwnershipClaimingOpts = OwnershipClaimingOpts & {
+ precedingQueryResult: UpdateByQueryResult;
+};
+export type IncrementalOwnershipClaimingReduction = (
+ opts: IncrementalOwnershipClaimingOpts
+) => Promise;
+
+export interface FetchResult {
+ docs: ConcreteTaskInstance[];
+}
+
+export interface ClaimOwnershipResult {
+ stats: {
+ tasksUpdated: number;
+ tasksConflicted: number;
+ tasksClaimed: number;
+ tasksRejected: number;
+ };
+ docs: ConcreteTaskInstance[];
+ timing?: TaskTiming;
+}
+
+enum BatchConcurrency {
+ Unlimited,
+ Limited,
+}
+
+type TaskClaimingBatches = Array;
+interface TaskClaimingBatch {
+ concurrency: Concurrency;
+ tasksTypes: TaskType;
+}
+type UnlimitedBatch = TaskClaimingBatch>;
+type LimitedBatch = TaskClaimingBatch;
+
+export class TaskClaiming {
+ public readonly errors$ = new Subject();
+ public readonly maxAttempts: number;
+
+ private definitions: TaskTypeDictionary;
+ private events$: Subject;
+ private taskStore: TaskStore;
+ private getCapacity: (taskType?: string) => number;
+ private logger: Logger;
+ private readonly taskClaimingBatchesByType: TaskClaimingBatches;
+ private readonly taskMaxAttempts: Record;
+
+ /**
+ * Constructs a new TaskStore.
+ * @param {TaskClaimingOpts} opts
+ * @prop {number} maxAttempts - The maximum number of attempts before a task will be abandoned
+ * @prop {TaskDefinition} definition - The definition of the task being run
+ */
+ constructor(opts: TaskClaimingOpts) {
+ this.definitions = opts.definitions;
+ this.maxAttempts = opts.maxAttempts;
+ this.taskStore = opts.taskStore;
+ this.getCapacity = opts.getCapacity;
+ this.logger = opts.logger;
+ this.taskClaimingBatchesByType = this.partitionIntoClaimingBatches(this.definitions);
+ this.taskMaxAttempts = Object.fromEntries(this.normalizeMaxAttempts(this.definitions));
+
+ this.events$ = new Subject();
+ }
+
+ private partitionIntoClaimingBatches(definitions: TaskTypeDictionary): TaskClaimingBatches {
+ const {
+ limitedConcurrency,
+ unlimitedConcurrency,
+ skippedTypes,
+ } = groupBy(definitions.getAllDefinitions(), (definition) =>
+ definition.maxConcurrency
+ ? 'limitedConcurrency'
+ : definition.maxConcurrency === 0
+ ? 'skippedTypes'
+ : 'unlimitedConcurrency'
+ );
+
+ if (skippedTypes?.length) {
+ this.logger.info(
+ `Task Manager will never claim tasks of the following types as their "maxConcurrency" is set to 0: ${skippedTypes
+ .map(({ type }) => type)
+ .join(', ')}`
+ );
+ }
+ return [
+ ...(unlimitedConcurrency
+ ? [asUnlimited(new Set(unlimitedConcurrency.map(({ type }) => type)))]
+ : []),
+ ...(limitedConcurrency ? limitedConcurrency.map(({ type }) => asLimited(type)) : []),
+ ];
+ }
+
+ private normalizeMaxAttempts(definitions: TaskTypeDictionary) {
+ return new Map(
+ [...definitions].map(([type, { maxAttempts }]) => [type, maxAttempts || this.maxAttempts])
+ );
+ }
+
+ private claimingBatchIndex = 0;
+ private getClaimingBatches() {
+ // return all batches, starting at index and cycling back to where we began
+ const batch = [
+ ...this.taskClaimingBatchesByType.slice(this.claimingBatchIndex),
+ ...this.taskClaimingBatchesByType.slice(0, this.claimingBatchIndex),
+ ];
+ // shift claimingBatchIndex by one so that next cycle begins at the next index
+ this.claimingBatchIndex = (this.claimingBatchIndex + 1) % this.taskClaimingBatchesByType.length;
+ return batch;
+ }
+
+ public get events(): Observable {
+ return this.events$;
+ }
+
+ private emitEvents = (events: TaskClaim[]) => {
+ events.forEach((event) => this.events$.next(event));
+ };
+
+ public claimAvailableTasksIfCapacityIsAvailable(
+ claimingOptions: Omit
+ ): Observable> {
+ if (this.getCapacity()) {
+ return this.claimAvailableTasks(claimingOptions).pipe(
+ map((claimResult) => asOk(claimResult))
+ );
+ }
+ this.logger.debug(
+ `[Task Ownership]: Task Manager has skipped Claiming Ownership of available tasks at it has ran out Available Workers.`
+ );
+ return of(asErr(FillPoolResult.NoAvailableWorkers));
+ }
+
+ public claimAvailableTasks({
+ claimOwnershipUntil,
+ claimTasksById = [],
+ }: Omit): Observable {
+ const initialCapacity = this.getCapacity();
+ return from(this.getClaimingBatches()).pipe(
+ mergeScan(
+ (accumulatedResult, batch) => {
+ const stopTaskTimer = startTaskTimer();
+ const capacity = Math.min(
+ initialCapacity - accumulatedResult.stats.tasksClaimed,
+ isLimited(batch) ? this.getCapacity(batch.tasksTypes) : this.getCapacity()
+ );
+ // if we have no more capacity, short circuit here
+ if (capacity <= 0) {
+ return of(accumulatedResult);
+ }
+ return from(
+ this.executClaimAvailableTasks({
+ claimOwnershipUntil,
+ claimTasksById: claimTasksById.splice(0, capacity),
+ size: capacity,
+ taskTypes: isLimited(batch) ? new Set([batch.tasksTypes]) : batch.tasksTypes,
+ }).then((result) => {
+ const { stats, docs } = accumulateClaimOwnershipResults(accumulatedResult, result);
+ stats.tasksConflicted = correctVersionConflictsForContinuation(
+ stats.tasksClaimed,
+ stats.tasksConflicted,
+ initialCapacity
+ );
+ return { stats, docs, timing: stopTaskTimer() };
+ })
+ );
+ },
+ // initialise the accumulation with no results
+ accumulateClaimOwnershipResults(),
+ // only run one batch at a time
+ 1
+ )
+ );
+ }
+
+ private executClaimAvailableTasks = async ({
+ claimOwnershipUntil,
+ claimTasksById = [],
+ size,
+ taskTypes,
+ }: OwnershipClaimingOpts): Promise => {
+ const claimTasksByIdWithRawIds = this.taskStore.convertToSavedObjectIds(claimTasksById);
+ const {
+ updated: tasksUpdated,
+ version_conflicts: tasksConflicted,
+ } = await this.markAvailableTasksAsClaimed({
+ claimOwnershipUntil,
+ claimTasksById: claimTasksByIdWithRawIds,
+ size,
+ taskTypes,
+ });
+
+ const docs =
+ tasksUpdated > 0
+ ? await this.sweepForClaimedTasks(claimTasksByIdWithRawIds, taskTypes, size)
+ : [];
+
+ const [documentsReturnedById, documentsClaimedBySchedule] = partition(docs, (doc) =>
+ claimTasksById.includes(doc.id)
+ );
+
+ const [documentsClaimedById, documentsRequestedButNotClaimed] = partition(
+ documentsReturnedById,
+ // we filter the schduled tasks down by status is 'claiming' in the esearch,
+ // but we do not apply this limitation on tasks claimed by ID so that we can
+ // provide more detailed error messages when we fail to claim them
+ (doc) => doc.status === TaskStatus.Claiming
+ );
+
+ // count how many tasks we've claimed by ID and validate we have capacity for them to run
+ const remainingCapacityOfClaimByIdByType = mapValues(
+ // This means we take the tasks that were claimed by their ID and count them by their type
+ countBy(documentsClaimedById, (doc) => doc.taskType),
+ (count, type) => this.getCapacity(type) - count
+ );
+
+ const [documentsClaimedByIdWithinCapacity, documentsClaimedByIdOutOfCapacity] = partition(
+ documentsClaimedById,
+ (doc) => {
+ // if we've exceeded capacity, we reject this task
+ if (remainingCapacityOfClaimByIdByType[doc.taskType] < 0) {
+ // as we're rejecting this task we can inc the count so that we know
+ // to keep the next one returned by ID of the same type
+ remainingCapacityOfClaimByIdByType[doc.taskType]++;
+ return false;
+ }
+ return true;
+ }
+ );
+
+ const documentsRequestedButNotReturned = difference(
+ claimTasksById,
+ documentsReturnedById.map((doc) => doc.id)
+ );
+
+ this.emitEvents([
+ ...documentsClaimedByIdWithinCapacity.map((doc) => asTaskClaimEvent(doc.id, asOk(doc))),
+ ...documentsClaimedByIdOutOfCapacity.map((doc) =>
+ asTaskClaimEvent(
+ doc.id,
+ asErr({
+ task: some(doc),
+ errorType: TaskClaimErrorType.CLAIMED_BY_ID_OUT_OF_CAPACITY,
+ })
+ )
+ ),
+ ...documentsClaimedBySchedule.map((doc) => asTaskClaimEvent(doc.id, asOk(doc))),
+ ...documentsRequestedButNotClaimed.map((doc) =>
+ asTaskClaimEvent(
+ doc.id,
+ asErr({
+ task: some(doc),
+ errorType: TaskClaimErrorType.CLAIMED_BY_ID_NOT_IN_CLAIMING_STATUS,
+ })
+ )
+ ),
+ ...documentsRequestedButNotReturned.map((id) =>
+ asTaskClaimEvent(
+ id,
+ asErr({ task: none, errorType: TaskClaimErrorType.CLAIMED_BY_ID_NOT_RETURNED })
+ )
+ ),
+ ]);
+
+ const stats = {
+ tasksUpdated,
+ tasksConflicted,
+ tasksRejected: documentsClaimedByIdOutOfCapacity.length,
+ tasksClaimed: documentsClaimedByIdWithinCapacity.length + documentsClaimedBySchedule.length,
+ };
+
+ if (docs.length !== stats.tasksClaimed + stats.tasksRejected) {
+ this.logger.warn(
+ `[Task Ownership error]: ${stats.tasksClaimed} tasks were claimed by Kibana, but ${
+ docs.length
+ } task(s) were fetched (${docs.map((doc) => doc.id).join(', ')})`
+ );
+ }
+
+ return {
+ stats,
+ docs: [...documentsClaimedByIdWithinCapacity, ...documentsClaimedBySchedule],
+ };
+ };
+
+ private async markAvailableTasksAsClaimed({
+ claimOwnershipUntil,
+ claimTasksById,
+ size,
+ taskTypes,
+ }: OwnershipClaimingOpts): Promise {
+ const { taskTypesToSkip = [], taskTypesToClaim = [] } = groupBy(
+ this.definitions.getAllTypes(),
+ (type) => (taskTypes.has(type) ? 'taskTypesToClaim' : 'taskTypesToSkip')
+ );
+
+ const queryForScheduledTasks = mustBeAllOf(
+ // Either a task with idle status and runAt <= now or
+ // status running or claiming with a retryAt <= now.
+ shouldBeOneOf(IdleTaskWithExpiredRunAt, RunningOrClaimingTaskWithExpiredRetryAt)
+ );
+
+ // The documents should be sorted by runAt/retryAt, unless there are pinned
+ // tasks being queried, in which case we want to sort by score first, and then
+ // the runAt/retryAt. That way we'll get the pinned tasks first. Note that
+ // the score seems to favor newer documents rather than older documents, so
+ // if there are not pinned tasks being queried, we do NOT want to sort by score
+ // at all, just by runAt/retryAt.
+ const sort: SortOptions = [SortByRunAtAndRetryAt];
+ if (claimTasksById && claimTasksById.length) {
+ sort.unshift('_score');
+ }
+
+ const apmTrans = apm.startTransaction(`taskManager markAvailableTasksAsClaimed`, 'taskManager');
+ const result = await this.taskStore.updateByQuery(
+ asUpdateByQuery({
+ query: matchesClauses(
+ claimTasksById && claimTasksById.length
+ ? mustBeAllOf(asPinnedQuery(claimTasksById, queryForScheduledTasks))
+ : queryForScheduledTasks,
+ filterDownBy(InactiveTasks)
+ ),
+ update: updateFieldsAndMarkAsFailed(
+ {
+ ownerId: this.taskStore.taskManagerId,
+ retryAt: claimOwnershipUntil,
+ },
+ claimTasksById || [],
+ taskTypesToClaim,
+ taskTypesToSkip,
+ pick(this.taskMaxAttempts, taskTypesToClaim)
+ ),
+ sort,
+ }),
+ {
+ max_docs: size,
+ }
+ );
+
+ if (apmTrans) apmTrans.end();
+ return result;
+ }
+
+ /**
+ * Fetches tasks from the index, which are owned by the current Kibana instance
+ */
+ private async sweepForClaimedTasks(
+ claimTasksById: OwnershipClaimingOpts['claimTasksById'],
+ taskTypes: Set,
+ size: number
+ ): Promise {
+ const claimedTasksQuery = tasksClaimedByOwner(
+ this.taskStore.taskManagerId,
+ tasksOfType([...taskTypes])
+ );
+ const { docs } = await this.taskStore.fetch({
+ query:
+ claimTasksById && claimTasksById.length
+ ? asPinnedQuery(claimTasksById, claimedTasksQuery)
+ : claimedTasksQuery,
+ size,
+ sort: SortByRunAtAndRetryAt,
+ seq_no_primary_term: true,
+ });
+
+ return docs;
+ }
+}
+
+const emptyClaimOwnershipResult = () => {
+ return {
+ stats: {
+ tasksUpdated: 0,
+ tasksConflicted: 0,
+ tasksClaimed: 0,
+ tasksRejected: 0,
+ },
+ docs: [],
+ };
+};
+
+function accumulateClaimOwnershipResults(
+ prev: ClaimOwnershipResult = emptyClaimOwnershipResult(),
+ next?: ClaimOwnershipResult
+) {
+ if (next) {
+ const { stats, docs, timing } = next;
+ const res = {
+ stats: {
+ tasksUpdated: stats.tasksUpdated + prev.stats.tasksUpdated,
+ tasksConflicted: stats.tasksConflicted + prev.stats.tasksConflicted,
+ tasksClaimed: stats.tasksClaimed + prev.stats.tasksClaimed,
+ tasksRejected: stats.tasksRejected + prev.stats.tasksRejected,
+ },
+ docs,
+ timing,
+ };
+ return res;
+ }
+ return prev;
+}
+
+function isLimited(
+ batch: TaskClaimingBatch
+): batch is LimitedBatch {
+ return batch.concurrency === BatchConcurrency.Limited;
+}
+function asLimited(tasksType: string): LimitedBatch {
+ return {
+ concurrency: BatchConcurrency.Limited,
+ tasksTypes: tasksType,
+ };
+}
+function asUnlimited(tasksTypes: Set): UnlimitedBatch {
+ return {
+ concurrency: BatchConcurrency.Unlimited,
+ tasksTypes,
+ };
+}
diff --git a/x-pack/plugins/task_manager/server/task.ts b/x-pack/plugins/task_manager/server/task.ts
index 04589d696427a..4b86943ff8eca 100644
--- a/x-pack/plugins/task_manager/server/task.ts
+++ b/x-pack/plugins/task_manager/server/task.ts
@@ -127,6 +127,16 @@ export const taskDefinitionSchema = schema.object(
min: 1,
})
),
+ /**
+ * The maximum number tasks of this type that can be run concurrently per Kibana instance.
+ * Setting this value will force Task Manager to poll for this task type seperatly from other task types
+ * which can add significant load to the ES cluster, so please use this configuration only when absolutly necesery.
+ */
+ maxConcurrency: schema.maybe(
+ schema.number({
+ min: 0,
+ })
+ ),
},
{
validate({ timeout }) {
diff --git a/x-pack/plugins/task_manager/server/task_events.ts b/x-pack/plugins/task_manager/server/task_events.ts
index d3fb68aa367c1..aecf7c9a2b7e8 100644
--- a/x-pack/plugins/task_manager/server/task_events.ts
+++ b/x-pack/plugins/task_manager/server/task_events.ts
@@ -23,6 +23,12 @@ export enum TaskEventType {
TASK_MANAGER_STAT = 'TASK_MANAGER_STAT',
}
+export enum TaskClaimErrorType {
+ CLAIMED_BY_ID_OUT_OF_CAPACITY = 'CLAIMED_BY_ID_OUT_OF_CAPACITY',
+ CLAIMED_BY_ID_NOT_RETURNED = 'CLAIMED_BY_ID_NOT_RETURNED',
+ CLAIMED_BY_ID_NOT_IN_CLAIMING_STATUS = 'CLAIMED_BY_ID_NOT_IN_CLAIMING_STATUS',
+}
+
export interface TaskTiming {
start: number;
stop: number;
@@ -47,14 +53,18 @@ export interface RanTask {
export type ErroredTask = RanTask & {
error: Error;
};
+export interface ClaimTaskErr {
+ task: Option;
+ errorType: TaskClaimErrorType;
+}
export type TaskMarkRunning = TaskEvent;
export type TaskRun = TaskEvent;
-export type TaskClaim = TaskEvent>;
+export type TaskClaim = TaskEvent;
export type TaskRunRequest = TaskEvent;
export type TaskPollingCycle = TaskEvent>;
-export type TaskManagerStats = 'load' | 'pollingDelay';
+export type TaskManagerStats = 'load' | 'pollingDelay' | 'claimDuration';
export type TaskManagerStat = TaskEvent;
export type OkResultOf = EventType extends TaskEvent
@@ -92,7 +102,7 @@ export function asTaskRunEvent(
export function asTaskClaimEvent(
id: string,
- event: Result>,
+ event: Result,
timing?: TaskTiming
): TaskClaim {
return {
diff --git a/x-pack/plugins/task_manager/server/task_pool.test.ts b/x-pack/plugins/task_manager/server/task_pool.test.ts
index 6f82c477dca9e..05eb7bd1b43e1 100644
--- a/x-pack/plugins/task_manager/server/task_pool.test.ts
+++ b/x-pack/plugins/task_manager/server/task_pool.test.ts
@@ -15,6 +15,7 @@ import { asOk } from './lib/result_type';
import { SavedObjectsErrorHelpers } from '../../../../src/core/server';
import moment from 'moment';
import uuid from 'uuid';
+import { TaskRunningStage } from './task_running';
describe('TaskPool', () => {
test('occupiedWorkers are a sum of running tasks', async () => {
@@ -370,6 +371,7 @@ describe('TaskPool', () => {
cancel: async () => undefined,
markTaskAsRunning: jest.fn(async () => true),
run: mockRun(),
+ stage: TaskRunningStage.PENDING,
toString: () => `TaskType "shooooo"`,
get expiration() {
return new Date();
diff --git a/x-pack/plugins/task_manager/server/task_pool.ts b/x-pack/plugins/task_manager/server/task_pool.ts
index e30f9ef3154b2..14c0c4581a15b 100644
--- a/x-pack/plugins/task_manager/server/task_pool.ts
+++ b/x-pack/plugins/task_manager/server/task_pool.ts
@@ -25,6 +25,8 @@ interface Opts {
}
export enum TaskPoolRunResult {
+ // This mean we have no Run Result becuse no tasks were Ran in this cycle
+ NoTaskWereRan = 'NoTaskWereRan',
// This means we're running all the tasks we claimed
RunningAllClaimedTasks = 'RunningAllClaimedTasks',
// This means we're running all the tasks we claimed and we're at capacity
@@ -40,7 +42,7 @@ const VERSION_CONFLICT_MESSAGE = 'Task has been claimed by another Kibana servic
*/
export class TaskPool {
private maxWorkers: number = 0;
- private running = new Set();
+ private tasksInPool = new Map();
private logger: Logger;
private load$ = new Subject();
@@ -68,7 +70,7 @@ export class TaskPool {
* Gets how many workers are currently in use.
*/
public get occupiedWorkers() {
- return this.running.size;
+ return this.tasksInPool.size;
}
/**
@@ -93,6 +95,16 @@ export class TaskPool {
return this.maxWorkers - this.occupiedWorkers;
}
+ /**
+ * Gets how many workers are currently in use by type.
+ */
+ public getOccupiedWorkersByType(type: string) {
+ return [...this.tasksInPool.values()].reduce(
+ (count, runningTask) => (runningTask.definition.type === type ? ++count : count),
+ 0
+ );
+ }
+
/**
* Attempts to run the specified list of tasks. Returns true if it was able
* to start every task in the list, false if there was not enough capacity
@@ -106,9 +118,11 @@ export class TaskPool {
if (tasksToRun.length) {
performance.mark('attemptToRun_start');
await Promise.all(
- tasksToRun.map(
- async (taskRunner) =>
- await taskRunner
+ tasksToRun
+ .filter((taskRunner) => !this.tasksInPool.has(taskRunner.id))
+ .map(async (taskRunner) => {
+ this.tasksInPool.set(taskRunner.id, taskRunner);
+ return taskRunner
.markTaskAsRunning()
.then((hasTaskBeenMarkAsRunning: boolean) =>
hasTaskBeenMarkAsRunning
@@ -118,8 +132,8 @@ export class TaskPool {
message: VERSION_CONFLICT_MESSAGE,
})
)
- .catch((err) => this.handleFailureOfMarkAsRunning(taskRunner, err))
- )
+ .catch((err) => this.handleFailureOfMarkAsRunning(taskRunner, err));
+ })
);
performance.mark('attemptToRun_stop');
@@ -139,13 +153,12 @@ export class TaskPool {
public cancelRunningTasks() {
this.logger.debug('Cancelling running tasks.');
- for (const task of this.running) {
+ for (const task of this.tasksInPool.values()) {
this.cancelTask(task);
}
}
private handleMarkAsRunning(taskRunner: TaskRunner) {
- this.running.add(taskRunner);
taskRunner
.run()
.catch((err) => {
@@ -161,26 +174,31 @@ export class TaskPool {
this.logger.warn(errorLogLine);
}
})
- .then(() => this.running.delete(taskRunner));
+ .then(() => this.tasksInPool.delete(taskRunner.id));
}
private handleFailureOfMarkAsRunning(task: TaskRunner, err: Error) {
+ this.tasksInPool.delete(task.id);
this.logger.error(`Failed to mark Task ${task.toString()} as running: ${err.message}`);
}
private cancelExpiredTasks() {
- for (const task of this.running) {
- if (task.isExpired) {
+ for (const taskRunner of this.tasksInPool.values()) {
+ if (taskRunner.isExpired) {
this.logger.warn(
- `Cancelling task ${task.toString()} as it expired at ${task.expiration.toISOString()}${
- task.startedAt
+ `Cancelling task ${taskRunner.toString()} as it expired at ${taskRunner.expiration.toISOString()}${
+ taskRunner.startedAt
? ` after running for ${durationAsString(
- moment.duration(moment(new Date()).utc().diff(task.startedAt))
+ moment.duration(moment(new Date()).utc().diff(taskRunner.startedAt))
)}`
: ``
- }${task.definition.timeout ? ` (with timeout set at ${task.definition.timeout})` : ``}.`
+ }${
+ taskRunner.definition.timeout
+ ? ` (with timeout set at ${taskRunner.definition.timeout})`
+ : ``
+ }.`
);
- this.cancelTask(task);
+ this.cancelTask(taskRunner);
}
}
}
@@ -188,7 +206,7 @@ export class TaskPool {
private async cancelTask(task: TaskRunner) {
try {
this.logger.debug(`Cancelling task ${task.toString()}.`);
- this.running.delete(task);
+ this.tasksInPool.delete(task.id);
await task.cancel();
} catch (err) {
this.logger.error(`Failed to cancel task ${task.toString()}: ${err}`);
diff --git a/x-pack/plugins/task_manager/server/task_running/task_runner.test.ts b/x-pack/plugins/task_manager/server/task_running/task_runner.test.ts
index dff8c1f24de0a..5a36d6affe686 100644
--- a/x-pack/plugins/task_manager/server/task_running/task_runner.test.ts
+++ b/x-pack/plugins/task_manager/server/task_running/task_runner.test.ts
@@ -9,7 +9,7 @@ import _ from 'lodash';
import sinon from 'sinon';
import { secondsFromNow } from '../lib/intervals';
import { asOk, asErr } from '../lib/result_type';
-import { TaskManagerRunner, TaskRunResult } from '../task_running';
+import { TaskManagerRunner, TaskRunningStage, TaskRunResult } from '../task_running';
import { TaskEvent, asTaskRunEvent, asTaskMarkRunningEvent, TaskRun } from '../task_events';
import { ConcreteTaskInstance, TaskStatus } from '../task';
import { SavedObjectsErrorHelpers } from '../../../../../src/core/server';
@@ -17,6 +17,7 @@ import moment from 'moment';
import { TaskDefinitionRegistry, TaskTypeDictionary } from '../task_type_dictionary';
import { mockLogger } from '../test_utils';
import { throwUnrecoverableError } from './errors';
+import { taskStoreMock } from '../task_store.mock';
const minutesFromNow = (mins: number): Date => secondsFromNow(mins * 60);
@@ -29,980 +30,834 @@ beforeAll(() => {
afterAll(() => fakeTimer.restore());
describe('TaskManagerRunner', () => {
- test('provides details about the task that is running', () => {
- const { runner } = testOpts({
- instance: {
- id: 'foo',
- taskType: 'bar',
- },
- });
+ const pendingStageSetup = (opts: TestOpts) => testOpts(TaskRunningStage.PENDING, opts);
+ const readyToRunStageSetup = (opts: TestOpts) => testOpts(TaskRunningStage.READY_TO_RUN, opts);
- expect(runner.id).toEqual('foo');
- expect(runner.taskType).toEqual('bar');
- expect(runner.toString()).toEqual('bar "foo"');
- });
-
- test('queues a reattempt if the task fails', async () => {
- const initialAttempts = _.random(0, 2);
- const id = Date.now().toString();
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- params: { a: 'b' },
- state: { hey: 'there' },
- },
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- async run() {
- throw new Error('Dangit!');
- },
- }),
+ describe('Pending Stage', () => {
+ test('provides details about the task that is running', async () => {
+ const { runner } = await pendingStageSetup({
+ instance: {
+ id: 'foo',
+ taskType: 'bar',
},
- },
+ });
+
+ expect(runner.id).toEqual('foo');
+ expect(runner.taskType).toEqual('bar');
+ expect(runner.toString()).toEqual('bar "foo"');
});
- await runner.run();
+ test('calculates retryAt by schedule when running a recurring task', async () => {
+ const intervalMinutes = 10;
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(0, 2);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: {
+ interval: `${intervalMinutes}m`,
+ },
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- sinon.assert.calledOnce(store.update);
- const instance = store.update.args[0][0];
+ await runner.markTaskAsRunning();
- expect(instance.id).toEqual(id);
- expect(instance.runAt.getTime()).toEqual(minutesFromNow(initialAttempts * 5).getTime());
- expect(instance.params).toEqual({ a: 'b' });
- expect(instance.state).toEqual({ hey: 'there' });
- });
+ expect(store.update).toHaveBeenCalledTimes(1);
+ const instance = store.update.mock.calls[0][0];
- test('reschedules tasks that have an schedule', async () => {
- const { runner, store } = testOpts({
- instance: {
- schedule: { interval: '10m' },
- status: TaskStatus.Running,
- startedAt: new Date(),
- },
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- async run() {
- return { state: {} };
- },
- }),
- },
- },
+ expect(instance.retryAt!.getTime()).toEqual(
+ instance.startedAt!.getTime() + intervalMinutes * 60 * 1000
+ );
});
- await runner.run();
+ test('calculates retryAt by default timout when it exceeds the schedule of a recurring task', async () => {
+ const intervalSeconds = 20;
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(0, 2);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: {
+ interval: `${intervalSeconds}s`,
+ },
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- sinon.assert.calledOnce(store.update);
- const instance = store.update.args[0][0];
+ await runner.markTaskAsRunning();
- expect(instance.runAt.getTime()).toBeGreaterThan(minutesFromNow(9).getTime());
- expect(instance.runAt.getTime()).toBeLessThanOrEqual(minutesFromNow(10).getTime());
- });
+ expect(store.update).toHaveBeenCalledTimes(1);
+ const instance = store.update.mock.calls[0][0];
- test('expiration returns time after which timeout will have elapsed from start', async () => {
- const now = moment();
- const { runner } = testOpts({
- instance: {
- schedule: { interval: '10m' },
- status: TaskStatus.Running,
- startedAt: now.toDate(),
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `1m`,
- createTaskRunner: () => ({
- async run() {
- return { state: {} };
- },
- }),
- },
- },
+ expect(instance.retryAt!.getTime()).toEqual(instance.startedAt!.getTime() + 5 * 60 * 1000);
});
- await runner.run();
-
- expect(runner.isExpired).toBe(false);
- expect(runner.expiration).toEqual(now.add(1, 'm').toDate());
- });
-
- test('runDuration returns duration which has elapsed since start', async () => {
- const now = moment().subtract(30, 's').toDate();
- const { runner } = testOpts({
- instance: {
- schedule: { interval: '10m' },
- status: TaskStatus.Running,
- startedAt: now,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `1m`,
- createTaskRunner: () => ({
- async run() {
- return { state: {} };
- },
- }),
+ test('calculates retryAt by timeout if it exceeds the schedule when running a recurring task', async () => {
+ const timeoutMinutes = 1;
+ const intervalSeconds = 20;
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(0, 2);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: {
+ interval: `${intervalSeconds}s`,
+ },
},
- },
- });
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- await runner.run();
+ await runner.markTaskAsRunning();
- expect(runner.isExpired).toBe(false);
- expect(runner.startedAt).toEqual(now);
- });
+ expect(store.update).toHaveBeenCalledTimes(1);
+ const instance = store.update.mock.calls[0][0];
- test('reschedules tasks that return a runAt', async () => {
- const runAt = minutesFromNow(_.random(1, 10));
- const { runner, store } = testOpts({
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- async run() {
- return { runAt, state: {} };
- },
- }),
- },
- },
+ expect(instance.retryAt!.getTime()).toEqual(
+ instance.startedAt!.getTime() + timeoutMinutes * 60 * 1000
+ );
});
- await runner.run();
-
- sinon.assert.calledOnce(store.update);
- sinon.assert.calledWithMatch(store.update, { runAt });
- });
-
- test('reschedules tasks that return a schedule', async () => {
- const runAt = minutesFromNow(1);
- const schedule = {
- interval: '1m',
- };
- const { runner, store } = testOpts({
- instance: {
- status: TaskStatus.Running,
- startedAt: new Date(),
- },
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- async run() {
- return { schedule, state: {} };
- },
- }),
+ test('sets startedAt, status, attempts and retryAt when claiming a task', async () => {
+ const timeoutMinutes = 1;
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(0, 2);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
},
- },
- });
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- await runner.run();
+ await runner.markTaskAsRunning();
- sinon.assert.calledOnce(store.update);
- sinon.assert.calledWithMatch(store.update, { runAt });
- });
+ expect(store.update).toHaveBeenCalledTimes(1);
+ const instance = store.update.mock.calls[0][0];
- test(`doesn't reschedule recurring tasks that throw an unrecoverable error`, async () => {
- const id = _.random(1, 20).toString();
- const error = new Error('Dangit!');
- const onTaskEvent = jest.fn();
- const { runner, store, instance: originalInstance } = testOpts({
- onTaskEvent,
- instance: { id, status: TaskStatus.Running, startedAt: new Date() },
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- async run() {
- throwUnrecoverableError(error);
- },
- }),
- },
- },
+ expect(instance.attempts).toEqual(initialAttempts + 1);
+ expect(instance.status).toBe('running');
+ expect(instance.startedAt!.getTime()).toEqual(Date.now());
+ expect(instance.retryAt!.getTime()).toEqual(
+ minutesFromNow((initialAttempts + 1) * 5).getTime() + timeoutMinutes * 60 * 1000
+ );
});
- await runner.run();
-
- const instance = store.update.args[0][0];
- expect(instance.status).toBe('failed');
-
- expect(onTaskEvent).toHaveBeenCalledWith(
- withAnyTiming(
- asTaskRunEvent(
+ test('uses getRetry (returning date) to set retryAt when defined', async () => {
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(1, 3);
+ const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
+ const timeoutMinutes = 1;
+ const getRetryStub = sinon.stub().returns(nextRetry);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
id,
- asErr({
- error,
- task: originalInstance,
- result: TaskRunResult.Failed,
- })
- )
- )
- );
- expect(onTaskEvent).toHaveBeenCalledTimes(1);
- });
-
- test('tasks that return runAt override the schedule', async () => {
- const runAt = minutesFromNow(_.random(5));
- const { runner, store } = testOpts({
- instance: {
- schedule: { interval: '20m' },
- },
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- async run() {
- return { runAt, state: {} };
- },
- }),
+ attempts: initialAttempts,
+ schedule: undefined,
},
- },
- });
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ getRetry: getRetryStub,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- await runner.run();
+ await runner.markTaskAsRunning();
- sinon.assert.calledOnce(store.update);
- sinon.assert.calledWithMatch(store.update, { runAt });
- });
+ expect(store.update).toHaveBeenCalledTimes(1);
+ sinon.assert.calledWith(getRetryStub, initialAttempts + 1);
+ const instance = store.update.mock.calls[0][0];
- test('removes non-recurring tasks after they complete', async () => {
- const id = _.random(1, 20).toString();
- const { runner, store } = testOpts({
- instance: {
- id,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- async run() {
- return undefined;
- },
- }),
- },
- },
+ expect(instance.retryAt!.getTime()).toEqual(
+ new Date(nextRetry.getTime() + timeoutMinutes * 60 * 1000).getTime()
+ );
});
- await runner.run();
-
- sinon.assert.calledOnce(store.remove);
- sinon.assert.calledWith(store.remove, id);
- });
-
- test('cancel cancels the task runner, if it is cancellable', async () => {
- let wasCancelled = false;
- const { runner, logger } = testOpts({
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- async run() {
- const promise = new Promise((r) => setTimeout(r, 1000));
- fakeTimer.tick(1000);
- await promise;
- },
- async cancel() {
- wasCancelled = true;
- },
- }),
+ test('it returns false when markTaskAsRunning fails due to VERSION_CONFLICT_STATUS', async () => {
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(1, 3);
+ const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
+ const timeoutMinutes = 1;
+ const getRetryStub = sinon.stub().returns(nextRetry);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
},
- },
- });
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ getRetry: getRetryStub,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- const promise = runner.run();
- await Promise.resolve();
- await runner.cancel();
- await promise;
+ store.update.mockRejectedValue(
+ SavedObjectsErrorHelpers.decorateConflictError(new Error('repo error'))
+ );
- expect(wasCancelled).toBeTruthy();
- expect(logger.warn).not.toHaveBeenCalled();
- });
+ expect(await runner.markTaskAsRunning()).toEqual(false);
+ });
- test('debug logs if cancel is called on a non-cancellable task', async () => {
- const { runner, logger } = testOpts({
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
+ test('it throw when markTaskAsRunning fails for unexpected reasons', async () => {
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(1, 3);
+ const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
+ const timeoutMinutes = 1;
+ const getRetryStub = sinon.stub().returns(nextRetry);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
},
- },
- });
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ getRetry: getRetryStub,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- const promise = runner.run();
- await runner.cancel();
- await promise;
+ store.update.mockRejectedValue(
+ SavedObjectsErrorHelpers.createGenericNotFoundError('type', 'id')
+ );
- expect(logger.debug).toHaveBeenCalledWith(`The task bar "foo" is not cancellable.`);
- });
+ return expect(runner.markTaskAsRunning()).rejects.toMatchInlineSnapshot(
+ `[Error: Saved object [type/id] not found]`
+ );
+ });
- test('sets startedAt, status, attempts and retryAt when claiming a task', async () => {
- const timeoutMinutes = 1;
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(0, 2);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
+ test(`it tries to increment a task's attempts when markTaskAsRunning fails for unexpected reasons`, async () => {
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(1, 3);
+ const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
+ const timeoutMinutes = 1;
+ const getRetryStub = sinon.stub().returns(nextRetry);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
},
- },
- });
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ getRetry: getRetryStub,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- await runner.markTaskAsRunning();
+ store.update.mockRejectedValueOnce(SavedObjectsErrorHelpers.createBadRequestError('type'));
+ store.update.mockResolvedValueOnce(
+ mockInstance({
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
+ })
+ );
- sinon.assert.calledOnce(store.update);
- const instance = store.update.args[0][0];
+ await expect(runner.markTaskAsRunning()).rejects.toMatchInlineSnapshot(
+ `[Error: type: Bad Request]`
+ );
- expect(instance.attempts).toEqual(initialAttempts + 1);
- expect(instance.status).toBe('running');
- expect(instance.startedAt.getTime()).toEqual(Date.now());
- expect(instance.retryAt.getTime()).toEqual(
- minutesFromNow((initialAttempts + 1) * 5).getTime() + timeoutMinutes * 60 * 1000
- );
- });
+ expect(store.update).toHaveBeenCalledWith({
+ ...mockInstance({
+ id,
+ attempts: initialAttempts + 1,
+ schedule: undefined,
+ }),
+ status: TaskStatus.Idle,
+ startedAt: null,
+ retryAt: null,
+ ownerId: null,
+ });
+ });
- test('calculates retryAt by schedule when running a recurring task', async () => {
- const intervalMinutes = 10;
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(0, 2);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: {
- interval: `${intervalMinutes}m`,
+ test(`it doesnt try to increment a task's attempts when markTaskAsRunning fails for version conflict`, async () => {
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(1, 3);
+ const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
+ const timeoutMinutes = 1;
+ const getRetryStub = sinon.stub().returns(nextRetry);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
},
- },
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ getRetry: getRetryStub,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
},
- },
- });
+ });
- await runner.markTaskAsRunning();
+ store.update.mockRejectedValueOnce(
+ SavedObjectsErrorHelpers.createConflictError('type', 'id')
+ );
+ store.update.mockResolvedValueOnce(
+ mockInstance({
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
+ })
+ );
- sinon.assert.calledOnce(store.update);
- const instance = store.update.args[0][0];
+ await expect(runner.markTaskAsRunning()).resolves.toMatchInlineSnapshot(`false`);
- expect(instance.retryAt.getTime()).toEqual(
- instance.startedAt.getTime() + intervalMinutes * 60 * 1000
- );
- });
+ expect(store.update).toHaveBeenCalledTimes(1);
+ });
- test('calculates retryAt by default timout when it exceeds the schedule of a recurring task', async () => {
- const intervalSeconds = 20;
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(0, 2);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: {
- interval: `${intervalSeconds}s`,
+ test(`it doesnt try to increment a task's attempts when markTaskAsRunning fails due to Saved Object not being found`, async () => {
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(1, 3);
+ const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
+ const timeoutMinutes = 1;
+ const getRetryStub = sinon.stub().returns(nextRetry);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
},
- },
- definitions: {
- bar: {
- title: 'Bar!',
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ getRetry: getRetryStub,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
},
- },
- });
+ });
- await runner.markTaskAsRunning();
+ store.update.mockRejectedValueOnce(
+ SavedObjectsErrorHelpers.createGenericNotFoundError('type', 'id')
+ );
+ store.update.mockResolvedValueOnce(
+ mockInstance({
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
+ })
+ );
- sinon.assert.calledOnce(store.update);
- const instance = store.update.args[0][0];
+ await expect(runner.markTaskAsRunning()).rejects.toMatchInlineSnapshot(
+ `[Error: Saved object [type/id] not found]`
+ );
- expect(instance.retryAt.getTime()).toEqual(instance.startedAt.getTime() + 5 * 60 * 1000);
- });
+ expect(store.update).toHaveBeenCalledTimes(1);
+ });
- test('calculates retryAt by timeout if it exceeds the schedule when running a recurring task', async () => {
- const timeoutMinutes = 1;
- const intervalSeconds = 20;
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(0, 2);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: {
- interval: `${intervalSeconds}s`,
+ test('uses getRetry (returning true) to set retryAt when defined', async () => {
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(1, 3);
+ const timeoutMinutes = 1;
+ const getRetryStub = sinon.stub().returns(true);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
},
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ getRetry: getRetryStub,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
},
- },
- });
+ });
- await runner.markTaskAsRunning();
+ await runner.markTaskAsRunning();
- sinon.assert.calledOnce(store.update);
- const instance = store.update.args[0][0];
+ expect(store.update).toHaveBeenCalledTimes(1);
+ sinon.assert.calledWith(getRetryStub, initialAttempts + 1);
+ const instance = store.update.mock.calls[0][0];
- expect(instance.retryAt.getTime()).toEqual(
- instance.startedAt.getTime() + timeoutMinutes * 60 * 1000
- );
- });
+ const attemptDelay = (initialAttempts + 1) * 5 * 60 * 1000;
+ const timeoutDelay = timeoutMinutes * 60 * 1000;
+ expect(instance.retryAt!.getTime()).toEqual(
+ new Date(Date.now() + attemptDelay + timeoutDelay).getTime()
+ );
+ });
- test('uses getRetry function (returning date) on error when defined', async () => {
- const initialAttempts = _.random(1, 3);
- const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
- const id = Date.now().toString();
- const getRetryStub = sinon.stub().returns(nextRetry);
- const error = new Error('Dangit!');
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- async run() {
- throw error;
- },
- }),
+ test('uses getRetry (returning false) to set retryAt when defined', async () => {
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(1, 3);
+ const timeoutMinutes = 1;
+ const getRetryStub = sinon.stub().returns(false);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: undefined,
},
- },
- });
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ getRetry: getRetryStub,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- await runner.run();
+ await runner.markTaskAsRunning();
- sinon.assert.calledOnce(store.update);
- sinon.assert.calledWith(getRetryStub, initialAttempts, error);
- const instance = store.update.args[0][0];
+ expect(store.update).toHaveBeenCalledTimes(1);
+ sinon.assert.calledWith(getRetryStub, initialAttempts + 1);
+ const instance = store.update.mock.calls[0][0];
- expect(instance.runAt.getTime()).toEqual(nextRetry.getTime());
- });
+ expect(instance.retryAt!).toBeNull();
+ expect(instance.status).toBe('running');
+ });
- test('uses getRetry function (returning true) on error when defined', async () => {
- const initialAttempts = _.random(1, 3);
- const id = Date.now().toString();
- const getRetryStub = sinon.stub().returns(true);
- const error = new Error('Dangit!');
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- async run() {
- throw error;
- },
- }),
+ test('bypasses getRetry (returning false) of a recurring task to set retryAt when defined', async () => {
+ const id = _.random(1, 20).toString();
+ const initialAttempts = _.random(1, 3);
+ const timeoutMinutes = 1;
+ const getRetryStub = sinon.stub().returns(false);
+ const { runner, store } = await pendingStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ schedule: { interval: '1m' },
+ startedAt: new Date(),
},
- },
- });
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `${timeoutMinutes}m`,
+ getRetry: getRetryStub,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- await runner.run();
+ await runner.markTaskAsRunning();
- sinon.assert.calledOnce(store.update);
- sinon.assert.calledWith(getRetryStub, initialAttempts, error);
- const instance = store.update.args[0][0];
+ expect(store.update).toHaveBeenCalledTimes(1);
+ sinon.assert.notCalled(getRetryStub);
+ const instance = store.update.mock.calls[0][0];
- const expectedRunAt = new Date(Date.now() + initialAttempts * 5 * 60 * 1000);
- expect(instance.runAt.getTime()).toEqual(expectedRunAt.getTime());
- });
+ const timeoutDelay = timeoutMinutes * 60 * 1000;
+ expect(instance.retryAt!.getTime()).toEqual(new Date(Date.now() + timeoutDelay).getTime());
+ });
- test('uses getRetry function (returning false) on error when defined', async () => {
- const initialAttempts = _.random(1, 3);
- const id = Date.now().toString();
- const getRetryStub = sinon.stub().returns(false);
- const error = new Error('Dangit!');
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- async run() {
- throw error;
+ describe('TaskEvents', () => {
+ test('emits TaskEvent when a task is marked as running', async () => {
+ const id = _.random(1, 20).toString();
+ const onTaskEvent = jest.fn();
+ const { runner, instance, store } = await pendingStageSetup({
+ onTaskEvent,
+ instance: {
+ id,
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `1m`,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
},
- }),
- },
- },
- });
+ },
+ });
- await runner.run();
+ store.update.mockResolvedValueOnce(instance);
- sinon.assert.calledOnce(store.update);
- sinon.assert.calledWith(getRetryStub, initialAttempts, error);
- const instance = store.update.args[0][0];
+ await runner.markTaskAsRunning();
- expect(instance.status).toBe('failed');
- });
+ expect(onTaskEvent).toHaveBeenCalledWith(asTaskMarkRunningEvent(id, asOk(instance)));
+ });
- test('bypasses getRetry function (returning false) on error of a recurring task', async () => {
- const initialAttempts = _.random(1, 3);
- const id = Date.now().toString();
- const getRetryStub = sinon.stub().returns(false);
- const error = new Error('Dangit!');
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: { interval: '1m' },
- startedAt: new Date(),
- },
- definitions: {
- bar: {
- title: 'Bar!',
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- async run() {
- throw error;
- },
- }),
- },
- },
- });
+ test('emits TaskEvent when a task fails to be marked as running', async () => {
+ expect.assertions(2);
- await runner.run();
+ const id = _.random(1, 20).toString();
+ const onTaskEvent = jest.fn();
+ const { runner, store } = await pendingStageSetup({
+ onTaskEvent,
+ instance: {
+ id,
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `1m`,
+ createTaskRunner: () => ({
+ run: async () => undefined,
+ }),
+ },
+ },
+ });
- sinon.assert.calledOnce(store.update);
- sinon.assert.notCalled(getRetryStub);
- const instance = store.update.args[0][0];
+ store.update.mockRejectedValueOnce(new Error('cant mark as running'));
- const nextIntervalDelay = 60000; // 1m
- const expectedRunAt = new Date(Date.now() + nextIntervalDelay);
- expect(instance.runAt.getTime()).toEqual(expectedRunAt.getTime());
+ try {
+ await runner.markTaskAsRunning();
+ } catch (err) {
+ expect(onTaskEvent).toHaveBeenCalledWith(asTaskMarkRunningEvent(id, asErr(err)));
+ }
+ expect(onTaskEvent).toHaveBeenCalledTimes(1);
+ });
+ });
});
- test('uses getRetry (returning date) to set retryAt when defined', async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(1, 3);
- const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
- const timeoutMinutes = 1;
- const getRetryStub = sinon.stub().returns(nextRetry);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
+ describe('Ready To Run Stage', () => {
+ test('queues a reattempt if the task fails', async () => {
+ const initialAttempts = _.random(0, 2);
+ const id = Date.now().toString();
+ const { runner, store } = await readyToRunStageSetup({
+ instance: {
+ id,
+ attempts: initialAttempts,
+ params: { a: 'b' },
+ state: { hey: 'there' },
},
- },
- });
-
- await runner.markTaskAsRunning();
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ throw new Error('Dangit!');
+ },
+ }),
+ },
+ },
+ });
- sinon.assert.calledOnce(store.update);
- sinon.assert.calledWith(getRetryStub, initialAttempts + 1);
- const instance = store.update.args[0][0];
+ await runner.run();
- expect(instance.retryAt.getTime()).toEqual(
- new Date(nextRetry.getTime() + timeoutMinutes * 60 * 1000).getTime()
- );
- });
+ expect(store.update).toHaveBeenCalledTimes(1);
+ const instance = store.update.mock.calls[0][0];
- test('it returns false when markTaskAsRunning fails due to VERSION_CONFLICT_STATUS', async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(1, 3);
- const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
- const timeoutMinutes = 1;
- const getRetryStub = sinon.stub().returns(nextRetry);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
- },
- },
+ expect(instance.id).toEqual(id);
+ expect(instance.runAt.getTime()).toEqual(minutesFromNow(initialAttempts * 5).getTime());
+ expect(instance.params).toEqual({ a: 'b' });
+ expect(instance.state).toEqual({ hey: 'there' });
});
- store.update = sinon
- .stub()
- .throws(SavedObjectsErrorHelpers.decorateConflictError(new Error('repo error')));
-
- expect(await runner.markTaskAsRunning()).toEqual(false);
- });
-
- test('it throw when markTaskAsRunning fails for unexpected reasons', async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(1, 3);
- const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
- const timeoutMinutes = 1;
- const getRetryStub = sinon.stub().returns(nextRetry);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
+ test('reschedules tasks that have an schedule', async () => {
+ const { runner, store } = await readyToRunStageSetup({
+ instance: {
+ schedule: { interval: '10m' },
+ status: TaskStatus.Running,
+ startedAt: new Date(),
},
- },
- });
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ return { state: {} };
+ },
+ }),
+ },
+ },
+ });
- store.update = sinon
- .stub()
- .throws(SavedObjectsErrorHelpers.createGenericNotFoundError('type', 'id'));
+ await runner.run();
- return expect(runner.markTaskAsRunning()).rejects.toMatchInlineSnapshot(
- `[Error: Saved object [type/id] not found]`
- );
- });
+ expect(store.update).toHaveBeenCalledTimes(1);
+ const instance = store.update.mock.calls[0][0];
- test(`it tries to increment a task's attempts when markTaskAsRunning fails for unexpected reasons`, async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(1, 3);
- const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
- const timeoutMinutes = 1;
- const getRetryStub = sinon.stub().returns(nextRetry);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
- },
- },
+ expect(instance.runAt.getTime()).toBeGreaterThan(minutesFromNow(9).getTime());
+ expect(instance.runAt.getTime()).toBeLessThanOrEqual(minutesFromNow(10).getTime());
});
- store.update = sinon.stub();
- store.update.onFirstCall().throws(SavedObjectsErrorHelpers.createBadRequestError('type'));
- store.update.onSecondCall().resolves();
+ test('expiration returns time after which timeout will have elapsed from start', async () => {
+ const now = moment();
+ const { runner } = await readyToRunStageSetup({
+ instance: {
+ schedule: { interval: '10m' },
+ status: TaskStatus.Running,
+ startedAt: now.toDate(),
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `1m`,
+ createTaskRunner: () => ({
+ async run() {
+ return { state: {} };
+ },
+ }),
+ },
+ },
+ });
- await expect(runner.markTaskAsRunning()).rejects.toMatchInlineSnapshot(
- `[Error: type: Bad Request]`
- );
+ await runner.run();
- sinon.assert.calledWith(store.update, {
- ...mockInstance({
- id,
- attempts: initialAttempts + 1,
- schedule: undefined,
- }),
- status: TaskStatus.Idle,
- startedAt: null,
- retryAt: null,
- ownerId: null,
+ expect(runner.isExpired).toBe(false);
+ expect(runner.expiration).toEqual(now.add(1, 'm').toDate());
});
- });
- test(`it doesnt try to increment a task's attempts when markTaskAsRunning fails for version conflict`, async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(1, 3);
- const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
- const timeoutMinutes = 1;
- const getRetryStub = sinon.stub().returns(nextRetry);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
+ test('runDuration returns duration which has elapsed since start', async () => {
+ const now = moment().subtract(30, 's').toDate();
+ const { runner } = await readyToRunStageSetup({
+ instance: {
+ schedule: { interval: '10m' },
+ status: TaskStatus.Running,
+ startedAt: now,
},
- },
- });
-
- store.update = sinon.stub();
- store.update.onFirstCall().throws(SavedObjectsErrorHelpers.createConflictError('type', 'id'));
- store.update.onSecondCall().resolves();
-
- await expect(runner.markTaskAsRunning()).resolves.toMatchInlineSnapshot(`false`);
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ timeout: `1m`,
+ createTaskRunner: () => ({
+ async run() {
+ return { state: {} };
+ },
+ }),
+ },
+ },
+ });
- sinon.assert.calledOnce(store.update);
- });
+ await runner.run();
- test(`it doesnt try to increment a task's attempts when markTaskAsRunning fails due to Saved Object not being found`, async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(1, 3);
- const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
- const timeoutMinutes = 1;
- const getRetryStub = sinon.stub().returns(nextRetry);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
- },
- },
+ expect(runner.isExpired).toBe(false);
+ expect(runner.startedAt).toEqual(now);
});
- store.update = sinon.stub();
- store.update
- .onFirstCall()
- .throws(SavedObjectsErrorHelpers.createGenericNotFoundError('type', 'id'));
- store.update.onSecondCall().resolves();
-
- await expect(runner.markTaskAsRunning()).rejects.toMatchInlineSnapshot(
- `[Error: Saved object [type/id] not found]`
- );
+ test('reschedules tasks that return a runAt', async () => {
+ const runAt = minutesFromNow(_.random(1, 10));
+ const { runner, store } = await readyToRunStageSetup({
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ return { runAt, state: {} };
+ },
+ }),
+ },
+ },
+ });
- sinon.assert.calledOnce(store.update);
- });
+ await runner.run();
- test('uses getRetry (returning true) to set retryAt when defined', async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(1, 3);
- const timeoutMinutes = 1;
- const getRetryStub = sinon.stub().returns(true);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
- },
- },
+ expect(store.update).toHaveBeenCalledTimes(1);
+ expect(store.update).toHaveBeenCalledWith(expect.objectContaining({ runAt }));
});
- await runner.markTaskAsRunning();
-
- sinon.assert.calledOnce(store.update);
- sinon.assert.calledWith(getRetryStub, initialAttempts + 1);
- const instance = store.update.args[0][0];
+ test('reschedules tasks that return a schedule', async () => {
+ const runAt = minutesFromNow(1);
+ const schedule = {
+ interval: '1m',
+ };
+ const { runner, store } = await readyToRunStageSetup({
+ instance: {
+ status: TaskStatus.Running,
+ startedAt: new Date(),
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ return { schedule, state: {} };
+ },
+ }),
+ },
+ },
+ });
- const attemptDelay = (initialAttempts + 1) * 5 * 60 * 1000;
- const timeoutDelay = timeoutMinutes * 60 * 1000;
- expect(instance.retryAt.getTime()).toEqual(
- new Date(Date.now() + attemptDelay + timeoutDelay).getTime()
- );
- });
+ await runner.run();
- test('uses getRetry (returning false) to set retryAt when defined', async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(1, 3);
- const timeoutMinutes = 1;
- const getRetryStub = sinon.stub().returns(false);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
- },
- },
+ expect(store.update).toHaveBeenCalledTimes(1);
+ expect(store.update).toHaveBeenCalledWith(expect.objectContaining({ runAt }));
});
- await runner.markTaskAsRunning();
+ test(`doesn't reschedule recurring tasks that throw an unrecoverable error`, async () => {
+ const id = _.random(1, 20).toString();
+ const error = new Error('Dangit!');
+ const onTaskEvent = jest.fn();
+ const { runner, store, instance: originalInstance } = await readyToRunStageSetup({
+ onTaskEvent,
+ instance: { id, status: TaskStatus.Running, startedAt: new Date() },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ throwUnrecoverableError(error);
+ },
+ }),
+ },
+ },
+ });
- sinon.assert.calledOnce(store.update);
- sinon.assert.calledWith(getRetryStub, initialAttempts + 1);
- const instance = store.update.args[0][0];
+ await runner.run();
- expect(instance.retryAt).toBeNull();
- expect(instance.status).toBe('running');
- });
+ const instance = store.update.mock.calls[0][0];
+ expect(instance.status).toBe('failed');
- test('bypasses getRetry (returning false) of a recurring task to set retryAt when defined', async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = _.random(1, 3);
- const timeoutMinutes = 1;
- const getRetryStub = sinon.stub().returns(false);
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: { interval: '1m' },
- startedAt: new Date(),
- },
- definitions: {
- bar: {
- title: 'Bar!',
- timeout: `${timeoutMinutes}m`,
- getRetry: getRetryStub,
- createTaskRunner: () => ({
- run: async () => undefined,
- }),
- },
- },
+ expect(onTaskEvent).toHaveBeenCalledWith(
+ withAnyTiming(
+ asTaskRunEvent(
+ id,
+ asErr({
+ error,
+ task: originalInstance,
+ result: TaskRunResult.Failed,
+ })
+ )
+ )
+ );
+ expect(onTaskEvent).toHaveBeenCalledTimes(1);
});
- await runner.markTaskAsRunning();
+ test('tasks that return runAt override the schedule', async () => {
+ const runAt = minutesFromNow(_.random(5));
+ const { runner, store } = await readyToRunStageSetup({
+ instance: {
+ schedule: { interval: '20m' },
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ return { runAt, state: {} };
+ },
+ }),
+ },
+ },
+ });
- sinon.assert.calledOnce(store.update);
- sinon.assert.notCalled(getRetryStub);
- const instance = store.update.args[0][0];
+ await runner.run();
- const timeoutDelay = timeoutMinutes * 60 * 1000;
- expect(instance.retryAt.getTime()).toEqual(new Date(Date.now() + timeoutDelay).getTime());
- });
+ expect(store.update).toHaveBeenCalledTimes(1);
+ expect(store.update).toHaveBeenCalledWith(expect.objectContaining({ runAt }));
+ });
- test('Fails non-recurring task when maxAttempts reached', async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = 3;
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: undefined,
- },
- definitions: {
- bar: {
- title: 'Bar!',
- maxAttempts: 3,
- createTaskRunner: () => ({
- run: async () => {
- throw new Error();
- },
- }),
+ test('removes non-recurring tasks after they complete', async () => {
+ const id = _.random(1, 20).toString();
+ const { runner, store } = await readyToRunStageSetup({
+ instance: {
+ id,
+ schedule: undefined,
},
- },
- });
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ return undefined;
+ },
+ }),
+ },
+ },
+ });
- await runner.run();
+ await runner.run();
- sinon.assert.calledOnce(store.update);
- const instance = store.update.args[0][0];
- expect(instance.attempts).toEqual(3);
- expect(instance.status).toEqual('failed');
- expect(instance.retryAt).toBeNull();
- expect(instance.runAt.getTime()).toBeLessThanOrEqual(Date.now());
- });
+ expect(store.remove).toHaveBeenCalledTimes(1);
+ expect(store.remove).toHaveBeenCalledWith(id);
+ });
- test(`Doesn't fail recurring tasks when maxAttempts reached`, async () => {
- const id = _.random(1, 20).toString();
- const initialAttempts = 3;
- const intervalSeconds = 10;
- const { runner, store } = testOpts({
- instance: {
- id,
- attempts: initialAttempts,
- schedule: { interval: `${intervalSeconds}s` },
- startedAt: new Date(),
- },
- definitions: {
- bar: {
- title: 'Bar!',
- maxAttempts: 3,
- createTaskRunner: () => ({
- run: async () => {
- throw new Error();
- },
- }),
+ test('cancel cancels the task runner, if it is cancellable', async () => {
+ let wasCancelled = false;
+ const { runner, logger } = await readyToRunStageSetup({
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ const promise = new Promise((r) => setTimeout(r, 1000));
+ fakeTimer.tick(1000);
+ await promise;
+ },
+ async cancel() {
+ wasCancelled = true;
+ },
+ }),
+ },
},
- },
- });
+ });
- await runner.run();
+ const promise = runner.run();
+ await Promise.resolve();
+ await runner.cancel();
+ await promise;
- sinon.assert.calledOnce(store.update);
- const instance = store.update.args[0][0];
- expect(instance.attempts).toEqual(3);
- expect(instance.status).toEqual('idle');
- expect(instance.runAt.getTime()).toEqual(
- new Date(Date.now() + intervalSeconds * 1000).getTime()
- );
- });
+ expect(wasCancelled).toBeTruthy();
+ expect(logger.warn).not.toHaveBeenCalled();
+ });
- describe('TaskEvents', () => {
- test('emits TaskEvent when a task is marked as running', async () => {
- const id = _.random(1, 20).toString();
- const onTaskEvent = jest.fn();
- const { runner, instance, store } = testOpts({
- onTaskEvent,
- instance: {
- id,
- },
+ test('debug logs if cancel is called on a non-cancellable task', async () => {
+ const { runner, logger } = await readyToRunStageSetup({
definitions: {
bar: {
title: 'Bar!',
- timeout: `1m`,
createTaskRunner: () => ({
run: async () => undefined,
}),
@@ -1010,58 +865,63 @@ describe('TaskManagerRunner', () => {
},
});
- store.update.returns(instance);
+ const promise = runner.run();
+ await runner.cancel();
+ await promise;
- await runner.markTaskAsRunning();
-
- expect(onTaskEvent).toHaveBeenCalledWith(asTaskMarkRunningEvent(id, asOk(instance)));
+ expect(logger.debug).toHaveBeenCalledWith(`The task bar "foo" is not cancellable.`);
});
- test('emits TaskEvent when a task fails to be marked as running', async () => {
- expect.assertions(2);
-
- const id = _.random(1, 20).toString();
- const onTaskEvent = jest.fn();
- const { runner, store } = testOpts({
- onTaskEvent,
+ test('uses getRetry function (returning date) on error when defined', async () => {
+ const initialAttempts = _.random(1, 3);
+ const nextRetry = new Date(Date.now() + _.random(15, 100) * 1000);
+ const id = Date.now().toString();
+ const getRetryStub = sinon.stub().returns(nextRetry);
+ const error = new Error('Dangit!');
+ const { runner, store } = await readyToRunStageSetup({
instance: {
id,
+ attempts: initialAttempts,
},
definitions: {
bar: {
title: 'Bar!',
- timeout: `1m`,
+ getRetry: getRetryStub,
createTaskRunner: () => ({
- run: async () => undefined,
+ async run() {
+ throw error;
+ },
}),
},
},
});
- store.update.throws(new Error('cant mark as running'));
+ await runner.run();
- try {
- await runner.markTaskAsRunning();
- } catch (err) {
- expect(onTaskEvent).toHaveBeenCalledWith(asTaskMarkRunningEvent(id, asErr(err)));
- }
- expect(onTaskEvent).toHaveBeenCalledTimes(1);
+ expect(store.update).toHaveBeenCalledTimes(1);
+ sinon.assert.calledWith(getRetryStub, initialAttempts, error);
+ const instance = store.update.mock.calls[0][0];
+
+ expect(instance.runAt.getTime()).toEqual(nextRetry.getTime());
});
- test('emits TaskEvent when a task is run successfully', async () => {
- const id = _.random(1, 20).toString();
- const onTaskEvent = jest.fn();
- const { runner, instance } = testOpts({
- onTaskEvent,
+ test('uses getRetry function (returning true) on error when defined', async () => {
+ const initialAttempts = _.random(1, 3);
+ const id = Date.now().toString();
+ const getRetryStub = sinon.stub().returns(true);
+ const error = new Error('Dangit!');
+ const { runner, store } = await readyToRunStageSetup({
instance: {
id,
+ attempts: initialAttempts,
},
definitions: {
bar: {
title: 'Bar!',
+ getRetry: getRetryStub,
createTaskRunner: () => ({
async run() {
- return { state: {} };
+ throw error;
},
}),
},
@@ -1070,27 +930,31 @@ describe('TaskManagerRunner', () => {
await runner.run();
- expect(onTaskEvent).toHaveBeenCalledWith(
- withAnyTiming(asTaskRunEvent(id, asOk({ task: instance, result: TaskRunResult.Success })))
- );
+ expect(store.update).toHaveBeenCalledTimes(1);
+ sinon.assert.calledWith(getRetryStub, initialAttempts, error);
+ const instance = store.update.mock.calls[0][0];
+
+ const expectedRunAt = new Date(Date.now() + initialAttempts * 5 * 60 * 1000);
+ expect(instance.runAt.getTime()).toEqual(expectedRunAt.getTime());
});
- test('emits TaskEvent when a recurring task is run successfully', async () => {
- const id = _.random(1, 20).toString();
- const runAt = minutesFromNow(_.random(5));
- const onTaskEvent = jest.fn();
- const { runner, instance } = testOpts({
- onTaskEvent,
+ test('uses getRetry function (returning false) on error when defined', async () => {
+ const initialAttempts = _.random(1, 3);
+ const id = Date.now().toString();
+ const getRetryStub = sinon.stub().returns(false);
+ const error = new Error('Dangit!');
+ const { runner, store } = await readyToRunStageSetup({
instance: {
id,
- schedule: { interval: '1m' },
+ attempts: initialAttempts,
},
definitions: {
bar: {
title: 'Bar!',
+ getRetry: getRetryStub,
createTaskRunner: () => ({
async run() {
- return { runAt, state: {} };
+ throw error;
},
}),
},
@@ -1099,23 +963,29 @@ describe('TaskManagerRunner', () => {
await runner.run();
- expect(onTaskEvent).toHaveBeenCalledWith(
- withAnyTiming(asTaskRunEvent(id, asOk({ task: instance, result: TaskRunResult.Success })))
- );
+ expect(store.update).toHaveBeenCalledTimes(1);
+ sinon.assert.calledWith(getRetryStub, initialAttempts, error);
+ const instance = store.update.mock.calls[0][0];
+
+ expect(instance.status).toBe('failed');
});
- test('emits TaskEvent when a task run throws an error', async () => {
- const id = _.random(1, 20).toString();
+ test('bypasses getRetry function (returning false) on error of a recurring task', async () => {
+ const initialAttempts = _.random(1, 3);
+ const id = Date.now().toString();
+ const getRetryStub = sinon.stub().returns(false);
const error = new Error('Dangit!');
- const onTaskEvent = jest.fn();
- const { runner, instance } = testOpts({
- onTaskEvent,
+ const { runner, store } = await readyToRunStageSetup({
instance: {
id,
+ attempts: initialAttempts,
+ schedule: { interval: '1m' },
+ startedAt: new Date(),
},
definitions: {
bar: {
title: 'Bar!',
+ getRetry: getRetryStub,
createTaskRunner: () => ({
async run() {
throw error;
@@ -1124,33 +994,34 @@ describe('TaskManagerRunner', () => {
},
},
});
+
await runner.run();
- expect(onTaskEvent).toHaveBeenCalledWith(
- withAnyTiming(
- asTaskRunEvent(id, asErr({ error, task: instance, result: TaskRunResult.RetryScheduled }))
- )
- );
- expect(onTaskEvent).toHaveBeenCalledTimes(1);
+ expect(store.update).toHaveBeenCalledTimes(1);
+ sinon.assert.notCalled(getRetryStub);
+ const instance = store.update.mock.calls[0][0];
+
+ const nextIntervalDelay = 60000; // 1m
+ const expectedRunAt = new Date(Date.now() + nextIntervalDelay);
+ expect(instance.runAt.getTime()).toEqual(expectedRunAt.getTime());
});
- test('emits TaskEvent when a task run returns an error', async () => {
+ test('Fails non-recurring task when maxAttempts reached', async () => {
const id = _.random(1, 20).toString();
- const error = new Error('Dangit!');
- const onTaskEvent = jest.fn();
- const { runner, instance } = testOpts({
- onTaskEvent,
+ const initialAttempts = 3;
+ const { runner, store } = await readyToRunStageSetup({
instance: {
id,
- schedule: { interval: '1m' },
- startedAt: new Date(),
+ attempts: initialAttempts,
+ schedule: undefined,
},
definitions: {
bar: {
title: 'Bar!',
+ maxAttempts: 3,
createTaskRunner: () => ({
- async run() {
- return { error, state: {} };
+ run: async () => {
+ throw new Error();
},
}),
},
@@ -1159,31 +1030,32 @@ describe('TaskManagerRunner', () => {
await runner.run();
- expect(onTaskEvent).toHaveBeenCalledWith(
- withAnyTiming(
- asTaskRunEvent(id, asErr({ error, task: instance, result: TaskRunResult.RetryScheduled }))
- )
- );
- expect(onTaskEvent).toHaveBeenCalledTimes(1);
+ expect(store.update).toHaveBeenCalledTimes(1);
+ const instance = store.update.mock.calls[0][0];
+ expect(instance.attempts).toEqual(3);
+ expect(instance.status).toEqual('failed');
+ expect(instance.retryAt!).toBeNull();
+ expect(instance.runAt.getTime()).toBeLessThanOrEqual(Date.now());
});
- test('emits TaskEvent when a task returns an error and is marked as failed', async () => {
+ test(`Doesn't fail recurring tasks when maxAttempts reached`, async () => {
const id = _.random(1, 20).toString();
- const error = new Error('Dangit!');
- const onTaskEvent = jest.fn();
- const { runner, store, instance: originalInstance } = testOpts({
- onTaskEvent,
+ const initialAttempts = 3;
+ const intervalSeconds = 10;
+ const { runner, store } = await readyToRunStageSetup({
instance: {
id,
+ attempts: initialAttempts,
+ schedule: { interval: `${intervalSeconds}s` },
startedAt: new Date(),
},
definitions: {
bar: {
title: 'Bar!',
- getRetry: () => false,
+ maxAttempts: 3,
createTaskRunner: () => ({
- async run() {
- return { error, state: {} };
+ run: async () => {
+ throw new Error();
},
}),
},
@@ -1192,29 +1064,190 @@ describe('TaskManagerRunner', () => {
await runner.run();
- const instance = store.update.args[0][0];
- expect(instance.status).toBe('failed');
+ expect(store.update).toHaveBeenCalledTimes(1);
+ const instance = store.update.mock.calls[0][0];
+ expect(instance.attempts).toEqual(3);
+ expect(instance.status).toEqual('idle');
+ expect(instance.runAt.getTime()).toEqual(
+ new Date(Date.now() + intervalSeconds * 1000).getTime()
+ );
+ });
- expect(onTaskEvent).toHaveBeenCalledWith(
- withAnyTiming(
- asTaskRunEvent(
+ describe('TaskEvents', () => {
+ test('emits TaskEvent when a task is run successfully', async () => {
+ const id = _.random(1, 20).toString();
+ const onTaskEvent = jest.fn();
+ const { runner, instance } = await readyToRunStageSetup({
+ onTaskEvent,
+ instance: {
id,
- asErr({
- error,
- task: originalInstance,
- result: TaskRunResult.Failed,
- })
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ return { state: {} };
+ },
+ }),
+ },
+ },
+ });
+
+ await runner.run();
+
+ expect(onTaskEvent).toHaveBeenCalledWith(
+ withAnyTiming(asTaskRunEvent(id, asOk({ task: instance, result: TaskRunResult.Success })))
+ );
+ });
+
+ test('emits TaskEvent when a recurring task is run successfully', async () => {
+ const id = _.random(1, 20).toString();
+ const runAt = minutesFromNow(_.random(5));
+ const onTaskEvent = jest.fn();
+ const { runner, instance } = await readyToRunStageSetup({
+ onTaskEvent,
+ instance: {
+ id,
+ schedule: { interval: '1m' },
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ return { runAt, state: {} };
+ },
+ }),
+ },
+ },
+ });
+
+ await runner.run();
+
+ expect(onTaskEvent).toHaveBeenCalledWith(
+ withAnyTiming(asTaskRunEvent(id, asOk({ task: instance, result: TaskRunResult.Success })))
+ );
+ });
+
+ test('emits TaskEvent when a task run throws an error', async () => {
+ const id = _.random(1, 20).toString();
+ const error = new Error('Dangit!');
+ const onTaskEvent = jest.fn();
+ const { runner, instance } = await readyToRunStageSetup({
+ onTaskEvent,
+ instance: {
+ id,
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ throw error;
+ },
+ }),
+ },
+ },
+ });
+ await runner.run();
+
+ expect(onTaskEvent).toHaveBeenCalledWith(
+ withAnyTiming(
+ asTaskRunEvent(
+ id,
+ asErr({ error, task: instance, result: TaskRunResult.RetryScheduled })
+ )
)
- )
- );
- expect(onTaskEvent).toHaveBeenCalledTimes(1);
+ );
+ expect(onTaskEvent).toHaveBeenCalledTimes(1);
+ });
+
+ test('emits TaskEvent when a task run returns an error', async () => {
+ const id = _.random(1, 20).toString();
+ const error = new Error('Dangit!');
+ const onTaskEvent = jest.fn();
+ const { runner, instance } = await readyToRunStageSetup({
+ onTaskEvent,
+ instance: {
+ id,
+ schedule: { interval: '1m' },
+ startedAt: new Date(),
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ createTaskRunner: () => ({
+ async run() {
+ return { error, state: {} };
+ },
+ }),
+ },
+ },
+ });
+
+ await runner.run();
+
+ expect(onTaskEvent).toHaveBeenCalledWith(
+ withAnyTiming(
+ asTaskRunEvent(
+ id,
+ asErr({ error, task: instance, result: TaskRunResult.RetryScheduled })
+ )
+ )
+ );
+ expect(onTaskEvent).toHaveBeenCalledTimes(1);
+ });
+
+ test('emits TaskEvent when a task returns an error and is marked as failed', async () => {
+ const id = _.random(1, 20).toString();
+ const error = new Error('Dangit!');
+ const onTaskEvent = jest.fn();
+ const { runner, store, instance: originalInstance } = await readyToRunStageSetup({
+ onTaskEvent,
+ instance: {
+ id,
+ startedAt: new Date(),
+ },
+ definitions: {
+ bar: {
+ title: 'Bar!',
+ getRetry: () => false,
+ createTaskRunner: () => ({
+ async run() {
+ return { error, state: {} };
+ },
+ }),
+ },
+ },
+ });
+
+ await runner.run();
+
+ const instance = store.update.mock.calls[0][0];
+ expect(instance.status).toBe('failed');
+
+ expect(onTaskEvent).toHaveBeenCalledWith(
+ withAnyTiming(
+ asTaskRunEvent(
+ id,
+ asErr({
+ error,
+ task: originalInstance,
+ result: TaskRunResult.Failed,
+ })
+ )
+ )
+ );
+ expect(onTaskEvent).toHaveBeenCalledTimes(1);
+ });
});
});
interface TestOpts {
instance?: Partial;
definitions?: TaskDefinitionRegistry;
- onTaskEvent?: (event: TaskEvent) => void;
+ onTaskEvent?: jest.Mock<(event: TaskEvent) => void>;
}
function withAnyTiming(taskRun: TaskRun) {
@@ -1247,20 +1280,16 @@ describe('TaskManagerRunner', () => {
);
}
- function testOpts(opts: TestOpts) {
+ async function testOpts(stage: TaskRunningStage, opts: TestOpts) {
const callCluster = sinon.stub();
const createTaskRunner = sinon.stub();
const logger = mockLogger();
const instance = mockInstance(opts.instance);
- const store = {
- update: sinon.stub(),
- remove: sinon.stub(),
- maxAttempts: 5,
- };
+ const store = taskStoreMock.create();
- store.update.returns(instance);
+ store.update.mockResolvedValue(instance);
const definitions = new TaskTypeDictionary(logger);
definitions.registerTaskDefinitions({
@@ -1274,6 +1303,7 @@ describe('TaskManagerRunner', () => {
}
const runner = new TaskManagerRunner({
+ defaultMaxAttempts: 5,
beforeRun: (context) => Promise.resolve(context),
beforeMarkRunning: (context) => Promise.resolve(context),
logger,
@@ -1283,6 +1313,15 @@ describe('TaskManagerRunner', () => {
onTaskEvent: opts.onTaskEvent,
});
+ if (stage === TaskRunningStage.READY_TO_RUN) {
+ await runner.markTaskAsRunning();
+ // as we're testing the ReadyToRun stage specifically, clear mocks cakked by setup
+ store.update.mockClear();
+ if (opts.onTaskEvent) {
+ opts.onTaskEvent.mockClear();
+ }
+ }
+
return {
callCluster,
createTaskRunner,
diff --git a/x-pack/plugins/task_manager/server/task_running/task_runner.ts b/x-pack/plugins/task_manager/server/task_running/task_runner.ts
index ad5a2e11409ec..8e061eae46028 100644
--- a/x-pack/plugins/task_manager/server/task_running/task_runner.ts
+++ b/x-pack/plugins/task_manager/server/task_running/task_runner.ts
@@ -63,11 +63,22 @@ export interface TaskRunner {
markTaskAsRunning: () => Promise;
run: () => Promise>;
id: string;
+ stage: string;
toString: () => string;
}
+export enum TaskRunningStage {
+ PENDING = 'PENDING',
+ READY_TO_RUN = 'READY_TO_RUN',
+ RAN = 'RAN',
+}
+export interface TaskRunning {
+ timestamp: Date;
+ stage: Stage;
+ task: Instance;
+}
+
export interface Updatable {
- readonly maxAttempts: number;
update(doc: ConcreteTaskInstance): Promise;
remove(id: string): Promise;
}
@@ -78,6 +89,7 @@ type Opts = {
instance: ConcreteTaskInstance;
store: Updatable;
onTaskEvent?: (event: TaskRun | TaskMarkRunning) => void;
+ defaultMaxAttempts: number;
} & Pick;
export enum TaskRunResult {
@@ -91,6 +103,16 @@ export enum TaskRunResult {
Failed = 'Failed',
}
+// A ConcreteTaskInstance which we *know* has a `startedAt` Date on it
+type ConcreteTaskInstanceWithStartedAt = ConcreteTaskInstance & { startedAt: Date };
+
+// The three possible stages for a Task Runner - Pending -> ReadyToRun -> Ran
+type PendingTask = TaskRunning;
+type ReadyToRunTask = TaskRunning;
+type RanTask = TaskRunning;
+
+type TaskRunningInstance = PendingTask | ReadyToRunTask | RanTask;
+
/**
* Runs a background task, ensures that errors are properly handled,
* allows for cancellation.
@@ -101,13 +123,14 @@ export enum TaskRunResult {
*/
export class TaskManagerRunner implements TaskRunner {
private task?: CancellableTask;
- private instance: ConcreteTaskInstance;
+ private instance: TaskRunningInstance;
private definitions: TaskTypeDictionary;
private logger: Logger;
private bufferedTaskStore: Updatable;
private beforeRun: Middleware['beforeRun'];
private beforeMarkRunning: Middleware['beforeMarkRunning'];
private onTaskEvent: (event: TaskRun | TaskMarkRunning) => void;
+ private defaultMaxAttempts: number;
/**
* Creates an instance of TaskManagerRunner.
@@ -126,29 +149,38 @@ export class TaskManagerRunner implements TaskRunner {
store,
beforeRun,
beforeMarkRunning,
+ defaultMaxAttempts,
onTaskEvent = identity,
}: Opts) {
- this.instance = sanitizeInstance(instance);
+ this.instance = asPending(sanitizeInstance(instance));
this.definitions = definitions;
this.logger = logger;
this.bufferedTaskStore = store;
this.beforeRun = beforeRun;
this.beforeMarkRunning = beforeMarkRunning;
this.onTaskEvent = onTaskEvent;
+ this.defaultMaxAttempts = defaultMaxAttempts;
}
/**
* Gets the id of this task instance.
*/
public get id() {
- return this.instance.id;
+ return this.instance.task.id;
}
/**
* Gets the task type of this task instance.
*/
public get taskType() {
- return this.instance.taskType;
+ return this.instance.task.taskType;
+ }
+
+ /**
+ * Get the stage this TaskRunner is at
+ */
+ public get stage() {
+ return this.instance.stage;
}
/**
@@ -162,14 +194,21 @@ export class TaskManagerRunner implements TaskRunner {
* Gets the time at which this task will expire.
*/
public get expiration() {
- return intervalFromDate(this.instance.startedAt!, this.definition.timeout)!;
+ return intervalFromDate(
+ // if the task is running, use it's started at, otherwise use the timestamp at
+ // which it was last updated
+ // this allows us to catch tasks that remain in Pending/Finalizing without being
+ // cleaned up
+ isReadyToRun(this.instance) ? this.instance.task.startedAt : this.instance.timestamp,
+ this.definition.timeout
+ )!;
}
/**
* Gets the duration of the current task run
*/
public get startedAt() {
- return this.instance.startedAt;
+ return this.instance.task.startedAt;
}
/**
@@ -195,9 +234,16 @@ export class TaskManagerRunner implements TaskRunner {
* @returns {Promise>}
*/
public async run(): Promise> {
+ if (!isReadyToRun(this.instance)) {
+ throw new Error(
+ `Running task ${this} failed as it ${
+ isPending(this.instance) ? `isn't ready to be ran` : `has already been ran`
+ }`
+ );
+ }
this.logger.debug(`Running task ${this}`);
const modifiedContext = await this.beforeRun({
- taskInstance: this.instance,
+ taskInstance: this.instance.task,
});
const stopTaskTimer = startTaskTimer();
@@ -230,10 +276,16 @@ export class TaskManagerRunner implements TaskRunner {
* @returns {Promise}
*/
public async markTaskAsRunning(): Promise {
+ if (!isPending(this.instance)) {
+ throw new Error(
+ `Marking task ${this} as running has failed as it ${
+ isReadyToRun(this.instance) ? `is already running` : `has already been ran`
+ }`
+ );
+ }
performance.mark('markTaskAsRunning_start');
const apmTrans = apm.startTransaction(`taskManager markTaskAsRunning`, 'taskManager');
-
apmTrans?.addLabels({
taskType: this.taskType,
});
@@ -241,7 +293,7 @@ export class TaskManagerRunner implements TaskRunner {
const now = new Date();
try {
const { taskInstance } = await this.beforeMarkRunning({
- taskInstance: this.instance,
+ taskInstance: this.instance.task,
});
const attempts = taskInstance.attempts + 1;
@@ -258,22 +310,29 @@ export class TaskManagerRunner implements TaskRunner {
);
}
- this.instance = await this.bufferedTaskStore.update({
- ...taskInstance,
- status: TaskStatus.Running,
- startedAt: now,
- attempts,
- retryAt:
- (this.instance.schedule
- ? maxIntervalFromDate(now, this.instance.schedule!.interval, this.definition.timeout)
- : this.getRetryDelay({
- attempts,
- // Fake an error. This allows retry logic when tasks keep timing out
- // and lets us set a proper "retryAt" value each time.
- error: new Error('Task timeout'),
- addDuration: this.definition.timeout,
- })) ?? null,
- });
+ this.instance = asReadyToRun(
+ (await this.bufferedTaskStore.update({
+ ...taskInstance,
+ status: TaskStatus.Running,
+ startedAt: now,
+ attempts,
+ retryAt:
+ (this.instance.task.schedule
+ ? maxIntervalFromDate(
+ now,
+ this.instance.task.schedule.interval,
+ this.definition.timeout
+ )
+ : this.getRetryDelay({
+ attempts,
+ // Fake an error. This allows retry logic when tasks keep timing out
+ // and lets us set a proper "retryAt" value each time.
+ error: new Error('Task timeout'),
+ addDuration: this.definition.timeout,
+ })) ?? null,
+ // This is a safe convertion as we're setting the startAt above
+ })) as ConcreteTaskInstanceWithStartedAt
+ );
const timeUntilClaimExpiresAfterUpdate = howManyMsUntilOwnershipClaimExpires(
ownershipClaimedUntil
@@ -288,7 +347,7 @@ export class TaskManagerRunner implements TaskRunner {
if (apmTrans) apmTrans.end('success');
performanceStopMarkingTaskAsRunning();
- this.onTaskEvent(asTaskMarkRunningEvent(this.id, asOk(this.instance)));
+ this.onTaskEvent(asTaskMarkRunningEvent(this.id, asOk(this.instance.task)));
return true;
} catch (error) {
if (apmTrans) apmTrans.end('failure');
@@ -299,7 +358,7 @@ export class TaskManagerRunner implements TaskRunner {
// try to release claim as an unknown failure prevented us from marking as running
mapErr((errReleaseClaim: Error) => {
this.logger.error(
- `[Task Runner] Task ${this.instance.id} failed to release claim after failure: ${errReleaseClaim}`
+ `[Task Runner] Task ${this.id} failed to release claim after failure: ${errReleaseClaim}`
);
}, await this.releaseClaimAndIncrementAttempts());
}
@@ -336,9 +395,9 @@ export class TaskManagerRunner implements TaskRunner {
private async releaseClaimAndIncrementAttempts(): Promise> {
return promiseResult(
this.bufferedTaskStore.update({
- ...this.instance,
+ ...this.instance.task,
status: TaskStatus.Idle,
- attempts: this.instance.attempts + 1,
+ attempts: this.instance.task.attempts + 1,
startedAt: null,
retryAt: null,
ownerId: null,
@@ -347,12 +406,12 @@ export class TaskManagerRunner implements TaskRunner {
}
private shouldTryToScheduleRetry(): boolean {
- if (this.instance.schedule) {
+ if (this.instance.task.schedule) {
return true;
}
- const maxAttempts = this.definition.maxAttempts || this.bufferedTaskStore.maxAttempts;
- return this.instance.attempts < maxAttempts;
+ const maxAttempts = this.definition.maxAttempts || this.defaultMaxAttempts;
+ return this.instance.task.attempts < maxAttempts;
}
private rescheduleFailedRun = (
@@ -361,7 +420,7 @@ export class TaskManagerRunner implements TaskRunner {
const { state, error } = failureResult;
if (this.shouldTryToScheduleRetry() && !isUnrecoverableError(error)) {
// if we're retrying, keep the number of attempts
- const { schedule, attempts } = this.instance;
+ const { schedule, attempts } = this.instance.task;
const reschedule = failureResult.runAt
? { runAt: failureResult.runAt }
@@ -399,7 +458,7 @@ export class TaskManagerRunner implements TaskRunner {
// if retrying is possible (new runAt) or this is an recurring task - reschedule
mapOk(
({ runAt, schedule: reschedule, state, attempts = 0 }: Partial) => {
- const { startedAt, schedule } = this.instance;
+ const { startedAt, schedule } = this.instance.task;
return asOk({
runAt:
runAt || intervalFromDate(startedAt!, reschedule?.interval ?? schedule?.interval)!,
@@ -413,16 +472,18 @@ export class TaskManagerRunner implements TaskRunner {
unwrap
)(result);
- await this.bufferedTaskStore.update(
- defaults(
- {
- ...fieldUpdates,
- // reset fields that track the lifecycle of the concluded `task run`
- startedAt: null,
- retryAt: null,
- ownerId: null,
- },
- this.instance
+ this.instance = asRan(
+ await this.bufferedTaskStore.update(
+ defaults(
+ {
+ ...fieldUpdates,
+ // reset fields that track the lifecycle of the concluded `task run`
+ startedAt: null,
+ retryAt: null,
+ ownerId: null,
+ },
+ this.instance.task
+ )
)
);
@@ -436,7 +497,8 @@ export class TaskManagerRunner implements TaskRunner {
private async processResultWhenDone(): Promise {
// not a recurring task: clean up by removing the task instance from store
try {
- await this.bufferedTaskStore.remove(this.instance.id);
+ await this.bufferedTaskStore.remove(this.id);
+ this.instance = asRan(this.instance.task);
} catch (err) {
if (err.statusCode === 404) {
this.logger.warn(`Task cleanup of ${this} failed in processing. Was remove called twice?`);
@@ -451,7 +513,7 @@ export class TaskManagerRunner implements TaskRunner {
result: Result,
taskTiming: TaskTiming
): Promise> {
- const task = this.instance;
+ const { task } = this.instance;
await eitherAsync(
result,
async ({ runAt, schedule }: SuccessfulRunResult) => {
@@ -528,3 +590,38 @@ function performanceStopMarkingTaskAsRunning() {
'markTaskAsRunning_stop'
);
}
+
+// A type that extracts the Instance type out of TaskRunningStage
+// This helps us to better communicate to the developer what the expected "stage"
+// in a specific place in the code might be
+type InstanceOf = T extends TaskRunning ? I : never;
+
+function isPending(taskRunning: TaskRunningInstance): taskRunning is PendingTask {
+ return taskRunning.stage === TaskRunningStage.PENDING;
+}
+function asPending(task: InstanceOf): PendingTask {
+ return {
+ timestamp: new Date(),
+ stage: TaskRunningStage.PENDING,
+ task,
+ };
+}
+function isReadyToRun(taskRunning: TaskRunningInstance): taskRunning is ReadyToRunTask {
+ return taskRunning.stage === TaskRunningStage.READY_TO_RUN;
+}
+function asReadyToRun(
+ task: InstanceOf
+): ReadyToRunTask {
+ return {
+ timestamp: new Date(),
+ stage: TaskRunningStage.READY_TO_RUN,
+ task,
+ };
+}
+function asRan(task: InstanceOf): RanTask {
+ return {
+ timestamp: new Date(),
+ stage: TaskRunningStage.RAN,
+ task,
+ };
+}
diff --git a/x-pack/plugins/task_manager/server/task_scheduling.test.ts b/x-pack/plugins/task_manager/server/task_scheduling.test.ts
index e495d416d5ab8..b142f2091291e 100644
--- a/x-pack/plugins/task_manager/server/task_scheduling.test.ts
+++ b/x-pack/plugins/task_manager/server/task_scheduling.test.ts
@@ -7,13 +7,14 @@
import _ from 'lodash';
import { Subject } from 'rxjs';
-import { none } from 'fp-ts/lib/Option';
+import { none, some } from 'fp-ts/lib/Option';
import {
asTaskMarkRunningEvent,
asTaskRunEvent,
asTaskClaimEvent,
asTaskRunRequestEvent,
+ TaskClaimErrorType,
} from './task_events';
import { TaskLifecycleEvent } from './polling_lifecycle';
import { taskPollingLifecycleMock } from './polling_lifecycle.mock';
@@ -24,17 +25,28 @@ import { createInitialMiddleware } from './lib/middleware';
import { taskStoreMock } from './task_store.mock';
import { TaskRunResult } from './task_running';
import { mockLogger } from './test_utils';
+import { TaskTypeDictionary } from './task_type_dictionary';
describe('TaskScheduling', () => {
const mockTaskStore = taskStoreMock.create({});
const mockTaskManager = taskPollingLifecycleMock.create({});
+ const definitions = new TaskTypeDictionary(mockLogger());
const taskSchedulingOpts = {
taskStore: mockTaskStore,
taskPollingLifecycle: mockTaskManager,
logger: mockLogger(),
middleware: createInitialMiddleware(),
+ definitions,
};
+ definitions.registerTaskDefinitions({
+ foo: {
+ title: 'foo',
+ maxConcurrency: 2,
+ createTaskRunner: jest.fn(),
+ },
+ });
+
beforeEach(() => {
jest.resetAllMocks();
});
@@ -114,7 +126,7 @@ describe('TaskScheduling', () => {
const result = taskScheduling.runNow(id);
- const task = { id } as ConcreteTaskInstance;
+ const task = mockTask({ id });
events$.next(asTaskRunEvent(id, asOk({ task, result: TaskRunResult.Success })));
return expect(result).resolves.toEqual({ id });
@@ -131,7 +143,7 @@ describe('TaskScheduling', () => {
const result = taskScheduling.runNow(id);
- const task = { id } as ConcreteTaskInstance;
+ const task = mockTask({ id });
events$.next(asTaskClaimEvent(id, asOk(task)));
events$.next(asTaskMarkRunningEvent(id, asOk(task)));
events$.next(
@@ -161,7 +173,7 @@ describe('TaskScheduling', () => {
const result = taskScheduling.runNow(id);
- const task = { id } as ConcreteTaskInstance;
+ const task = mockTask({ id });
events$.next(asTaskClaimEvent(id, asOk(task)));
events$.next(asTaskMarkRunningEvent(id, asErr(new Error('some thing gone wrong'))));
@@ -183,7 +195,12 @@ describe('TaskScheduling', () => {
const result = taskScheduling.runNow(id);
- events$.next(asTaskClaimEvent(id, asErr(none)));
+ events$.next(
+ asTaskClaimEvent(
+ id,
+ asErr({ task: none, errorType: TaskClaimErrorType.CLAIMED_BY_ID_NOT_RETURNED })
+ )
+ );
await expect(result).rejects.toEqual(
new Error(`Failed to run task "${id}" as it does not exist`)
@@ -192,6 +209,34 @@ describe('TaskScheduling', () => {
expect(mockTaskStore.getLifecycle).toHaveBeenCalledWith(id);
});
+ test('when a task claim due to insufficient capacity we return an explciit message', async () => {
+ const events$ = new Subject();
+ const id = '01ddff11-e88a-4d13-bc4e-256164e755e2';
+
+ mockTaskStore.getLifecycle.mockResolvedValue(TaskLifecycleResult.NotFound);
+
+ const taskScheduling = new TaskScheduling({
+ ...taskSchedulingOpts,
+ taskPollingLifecycle: taskPollingLifecycleMock.create({ events$ }),
+ });
+
+ const result = taskScheduling.runNow(id);
+
+ const task = mockTask({ id, taskType: 'foo' });
+ events$.next(
+ asTaskClaimEvent(
+ id,
+ asErr({ task: some(task), errorType: TaskClaimErrorType.CLAIMED_BY_ID_OUT_OF_CAPACITY })
+ )
+ );
+
+ await expect(result).rejects.toEqual(
+ new Error(
+ `Failed to run task "${id}" as we would exceed the max concurrency of "${task.taskType}" which is 2. Rescheduled the task to ensure it is picked up as soon as possible.`
+ )
+ );
+ });
+
test('when a task claim fails we ensure the task isnt already claimed', async () => {
const events$ = new Subject();
const id = '01ddff11-e88a-4d13-bc4e-256164e755e2';
@@ -205,7 +250,12 @@ describe('TaskScheduling', () => {
const result = taskScheduling.runNow(id);
- events$.next(asTaskClaimEvent(id, asErr(none)));
+ events$.next(
+ asTaskClaimEvent(
+ id,
+ asErr({ task: none, errorType: TaskClaimErrorType.CLAIMED_BY_ID_NOT_RETURNED })
+ )
+ );
await expect(result).rejects.toEqual(
new Error(`Failed to run task "${id}" as it is currently running`)
@@ -227,7 +277,12 @@ describe('TaskScheduling', () => {
const result = taskScheduling.runNow(id);
- events$.next(asTaskClaimEvent(id, asErr(none)));
+ events$.next(
+ asTaskClaimEvent(
+ id,
+ asErr({ task: none, errorType: TaskClaimErrorType.CLAIMED_BY_ID_NOT_RETURNED })
+ )
+ );
await expect(result).rejects.toEqual(
new Error(`Failed to run task "${id}" as it is currently running`)
@@ -270,7 +325,12 @@ describe('TaskScheduling', () => {
const result = taskScheduling.runNow(id);
- events$.next(asTaskClaimEvent(id, asErr(none)));
+ events$.next(
+ asTaskClaimEvent(
+ id,
+ asErr({ task: none, errorType: TaskClaimErrorType.CLAIMED_BY_ID_NOT_RETURNED })
+ )
+ );
await expect(result).rejects.toMatchInlineSnapshot(
`[Error: Failed to run task "01ddff11-e88a-4d13-bc4e-256164e755e2" for unknown reason (Current Task Lifecycle is "idle")]`
@@ -292,7 +352,12 @@ describe('TaskScheduling', () => {
const result = taskScheduling.runNow(id);
- events$.next(asTaskClaimEvent(id, asErr(none)));
+ events$.next(
+ asTaskClaimEvent(
+ id,
+ asErr({ task: none, errorType: TaskClaimErrorType.CLAIMED_BY_ID_NOT_RETURNED })
+ )
+ );
await expect(result).rejects.toMatchInlineSnapshot(
`[Error: Failed to run task "01ddff11-e88a-4d13-bc4e-256164e755e2" for unknown reason (Current Task Lifecycle is "failed")]`
@@ -313,7 +378,7 @@ describe('TaskScheduling', () => {
const result = taskScheduling.runNow(id);
- const task = { id } as ConcreteTaskInstance;
+ const task = mockTask({ id });
const otherTask = { id: differentTask } as ConcreteTaskInstance;
events$.next(asTaskClaimEvent(id, asOk(task)));
events$.next(asTaskClaimEvent(differentTask, asOk(otherTask)));
@@ -338,3 +403,23 @@ describe('TaskScheduling', () => {
});
});
});
+
+function mockTask(overrides: Partial = {}): ConcreteTaskInstance {
+ return {
+ id: 'claimed-by-id',
+ runAt: new Date(),
+ taskType: 'foo',
+ schedule: undefined,
+ attempts: 0,
+ status: TaskStatus.Claiming,
+ params: { hello: 'world' },
+ state: { baby: 'Henhen' },
+ user: 'jimbo',
+ scope: ['reporting'],
+ ownerId: '',
+ startedAt: null,
+ retryAt: null,
+ scheduledAt: new Date(),
+ ...overrides,
+ };
+}
diff --git a/x-pack/plugins/task_manager/server/task_scheduling.ts b/x-pack/plugins/task_manager/server/task_scheduling.ts
index 8ccedb85c560d..29e83ec911b79 100644
--- a/x-pack/plugins/task_manager/server/task_scheduling.ts
+++ b/x-pack/plugins/task_manager/server/task_scheduling.ts
@@ -8,7 +8,7 @@
import { filter } from 'rxjs/operators';
import { pipe } from 'fp-ts/lib/pipeable';
-import { Option, map as mapOptional, getOrElse } from 'fp-ts/lib/Option';
+import { Option, map as mapOptional, getOrElse, isSome } from 'fp-ts/lib/Option';
import { Logger } from '../../../../src/core/server';
import { asOk, either, map, mapErr, promiseResult } from './lib/result_type';
@@ -20,6 +20,8 @@ import {
ErroredTask,
OkResultOf,
ErrResultOf,
+ ClaimTaskErr,
+ TaskClaimErrorType,
} from './task_events';
import { Middleware } from './lib/middleware';
import {
@@ -33,6 +35,7 @@ import {
import { TaskStore } from './task_store';
import { ensureDeprecatedFieldsAreCorrected } from './lib/correct_deprecated_fields';
import { TaskLifecycleEvent, TaskPollingLifecycle } from './polling_lifecycle';
+import { TaskTypeDictionary } from './task_type_dictionary';
const VERSION_CONFLICT_STATUS = 409;
@@ -41,6 +44,7 @@ export interface TaskSchedulingOpts {
taskStore: TaskStore;
taskPollingLifecycle: TaskPollingLifecycle;
middleware: Middleware;
+ definitions: TaskTypeDictionary;
}
interface RunNowResult {
@@ -52,6 +56,7 @@ export class TaskScheduling {
private taskPollingLifecycle: TaskPollingLifecycle;
private logger: Logger;
private middleware: Middleware;
+ private definitions: TaskTypeDictionary;
/**
* Initializes the task manager, preventing any further addition of middleware,
@@ -63,6 +68,7 @@ export class TaskScheduling {
this.middleware = opts.middleware;
this.taskPollingLifecycle = opts.taskPollingLifecycle;
this.store = opts.taskStore;
+ this.definitions = opts.definitions;
}
/**
@@ -122,10 +128,27 @@ export class TaskScheduling {
.pipe(filter(({ id }: TaskLifecycleEvent) => id === taskId))
.subscribe((taskEvent: TaskLifecycleEvent) => {
if (isTaskClaimEvent(taskEvent)) {
- mapErr(async (error: Option) => {
+ mapErr(async (error: ClaimTaskErr) => {
// reject if any error event takes place for the requested task
subscription.unsubscribe();
- return reject(await this.identifyTaskFailureReason(taskId, error));
+ if (
+ isSome(error.task) &&
+ error.errorType === TaskClaimErrorType.CLAIMED_BY_ID_OUT_OF_CAPACITY
+ ) {
+ const task = error.task.value;
+ const definition = this.definitions.get(task.taskType);
+ return reject(
+ new Error(
+ `Failed to run task "${taskId}" as we would exceed the max concurrency of "${
+ definition?.title ?? task.taskType
+ }" which is ${
+ definition?.maxConcurrency
+ }. Rescheduled the task to ensure it is picked up as soon as possible.`
+ )
+ );
+ } else {
+ return reject(await this.identifyTaskFailureReason(taskId, error.task));
+ }
}, taskEvent.event);
} else {
either, ErrResultOf>(
diff --git a/x-pack/plugins/task_manager/server/task_store.mock.ts b/x-pack/plugins/task_manager/server/task_store.mock.ts
index d4f863af6fe3b..38d570f96220b 100644
--- a/x-pack/plugins/task_manager/server/task_store.mock.ts
+++ b/x-pack/plugins/task_manager/server/task_store.mock.ts
@@ -5,38 +5,27 @@
* 2.0.
*/
-import { Observable, Subject } from 'rxjs';
-import { TaskClaim } from './task_events';
-
import { TaskStore } from './task_store';
interface TaskStoreOptions {
- maxAttempts?: number;
index?: string;
taskManagerId?: string;
- events?: Observable;
}
export const taskStoreMock = {
- create({
- maxAttempts = 0,
- index = '',
- taskManagerId = '',
- events = new Subject(),
- }: TaskStoreOptions) {
+ create({ index = '', taskManagerId = '' }: TaskStoreOptions = {}) {
const mocked = ({
+ convertToSavedObjectIds: jest.fn(),
update: jest.fn(),
remove: jest.fn(),
schedule: jest.fn(),
- claimAvailableTasks: jest.fn(),
bulkUpdate: jest.fn(),
get: jest.fn(),
getLifecycle: jest.fn(),
fetch: jest.fn(),
aggregate: jest.fn(),
- maxAttempts,
+ updateByQuery: jest.fn(),
index,
taskManagerId,
- events,
} as unknown) as jest.Mocked;
return mocked;
},
diff --git a/x-pack/plugins/task_manager/server/task_store.test.ts b/x-pack/plugins/task_manager/server/task_store.test.ts
index dbf13a5f27281..25ee8cb0e2374 100644
--- a/x-pack/plugins/task_manager/server/task_store.test.ts
+++ b/x-pack/plugins/task_manager/server/task_store.test.ts
@@ -6,19 +6,16 @@
*/
import _ from 'lodash';
-import uuid from 'uuid';
-import { filter, take, first } from 'rxjs/operators';
-import { Option, some, none } from 'fp-ts/lib/Option';
+import { first } from 'rxjs/operators';
import {
TaskInstance,
TaskStatus,
TaskLifecycleResult,
SerializedConcreteTaskInstance,
- ConcreteTaskInstance,
} from './task';
import { elasticsearchServiceMock } from '../../../../src/core/server/mocks';
-import { StoreOpts, OwnershipClaimingOpts, TaskStore, SearchOpts } from './task_store';
+import { TaskStore, SearchOpts } from './task_store';
import { savedObjectsRepositoryMock } from 'src/core/server/mocks';
import {
SavedObjectsSerializer,
@@ -26,12 +23,8 @@ import {
SavedObjectAttributes,
SavedObjectsErrorHelpers,
} from 'src/core/server';
-import { asTaskClaimEvent, TaskEvent } from './task_events';
-import { asOk, asErr } from './lib/result_type';
import { TaskTypeDictionary } from './task_type_dictionary';
import { RequestEvent } from '@elastic/elasticsearch/lib/Transport';
-import { Search, UpdateByQuery } from '@elastic/elasticsearch/api/requestParams';
-import { BoolClauseWithAnyCondition, TermFilter } from './queries/query_clauses';
import { mockLogger } from './test_utils';
const savedObjectsClient = savedObjectsRepositoryMock.create();
@@ -76,7 +69,6 @@ describe('TaskStore', () => {
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
- maxAttempts: 2,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
});
@@ -209,7 +201,6 @@ describe('TaskStore', () => {
taskManagerId: '',
serializer,
esClient,
- maxAttempts: 2,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
});
@@ -265,809 +256,6 @@ describe('TaskStore', () => {
});
});
- describe('claimAvailableTasks', () => {
- async function testClaimAvailableTasks({
- opts = {},
- hits = generateFakeTasks(1),
- claimingOpts,
- versionConflicts = 2,
- }: {
- opts: Partial;
- hits?: unknown[];
- claimingOpts: OwnershipClaimingOpts;
- versionConflicts?: number;
- }) {
- const esClient = elasticsearchServiceMock.createClusterClient().asInternalUser;
- esClient.search.mockResolvedValue(asApiResponse({ hits: { hits } }));
- esClient.updateByQuery.mockResolvedValue(
- asApiResponse({
- total: hits.length + versionConflicts,
- updated: hits.length,
- version_conflicts: versionConflicts,
- })
- );
-
- const store = new TaskStore({
- esClient,
- maxAttempts: 2,
- definitions: taskDefinitions,
- serializer,
- savedObjectsRepository: savedObjectsClient,
- taskManagerId: '',
- index: '',
- ...opts,
- });
-
- const result = await store.claimAvailableTasks(claimingOpts);
-
- expect(esClient.updateByQuery.mock.calls[0][0]).toMatchObject({
- max_docs: claimingOpts.size,
- });
- expect(esClient.search.mock.calls[0][0]).toMatchObject({ body: { size: claimingOpts.size } });
- return {
- result,
- args: {
- search: esClient.search.mock.calls[0][0]! as Search<{
- query: BoolClauseWithAnyCondition;
- size: number;
- sort: string | string[];
- }>,
- updateByQuery: esClient.updateByQuery.mock.calls[0][0]! as UpdateByQuery<{
- query: BoolClauseWithAnyCondition;
- size: number;
- sort: string | string[];
- script: object;
- }>,
- },
- };
- }
-
- test('it returns normally with no tasks when the index does not exist.', async () => {
- const esClient = elasticsearchServiceMock.createClusterClient().asInternalUser;
- esClient.updateByQuery.mockResolvedValue(
- asApiResponse({
- total: 0,
- updated: 0,
- })
- );
- const store = new TaskStore({
- index: 'tasky',
- taskManagerId: '',
- serializer,
- esClient,
- definitions: taskDefinitions,
- maxAttempts: 2,
- savedObjectsRepository: savedObjectsClient,
- });
- const { docs } = await store.claimAvailableTasks({
- claimOwnershipUntil: new Date(),
- size: 10,
- });
- expect(esClient.updateByQuery.mock.calls[0][0]).toMatchObject({
- ignore_unavailable: true,
- max_docs: 10,
- });
- expect(docs.length).toBe(0);
- });
-
- test('it filters claimed tasks down by supported types, maxAttempts, status, and runAt', async () => {
- const maxAttempts = _.random(2, 43);
- const customMaxAttempts = _.random(44, 100);
-
- const definitions = new TaskTypeDictionary(mockLogger());
- definitions.registerTaskDefinitions({
- foo: {
- title: 'foo',
- createTaskRunner: jest.fn(),
- },
- bar: {
- title: 'bar',
- maxAttempts: customMaxAttempts,
- createTaskRunner: jest.fn(),
- },
- });
-
- const {
- args: {
- updateByQuery: { body: { query, sort } = {} },
- },
- } = await testClaimAvailableTasks({
- opts: {
- maxAttempts,
- definitions,
- },
- claimingOpts: { claimOwnershipUntil: new Date(), size: 10 },
- });
- expect(query).toMatchObject({
- bool: {
- must: [
- { term: { type: 'task' } },
- {
- bool: {
- must: [
- {
- bool: {
- must: [
- {
- bool: {
- should: [
- {
- bool: {
- must: [
- { term: { 'task.status': 'idle' } },
- { range: { 'task.runAt': { lte: 'now' } } },
- ],
- },
- },
- {
- bool: {
- must: [
- {
- bool: {
- should: [
- { term: { 'task.status': 'running' } },
- { term: { 'task.status': 'claiming' } },
- ],
- },
- },
- { range: { 'task.retryAt': { lte: 'now' } } },
- ],
- },
- },
- ],
- },
- },
- ],
- },
- },
- ],
- filter: [
- {
- bool: {
- must_not: [
- {
- bool: {
- should: [
- { term: { 'task.status': 'running' } },
- { term: { 'task.status': 'claiming' } },
- ],
- must: { range: { 'task.retryAt': { gt: 'now' } } },
- },
- },
- ],
- },
- },
- ],
- },
- },
- ],
- },
- });
- expect(sort).toMatchObject([
- {
- _script: {
- type: 'number',
- order: 'asc',
- script: {
- lang: 'painless',
- source: `
-if (doc['task.retryAt'].size()!=0) {
- return doc['task.retryAt'].value.toInstant().toEpochMilli();
-}
-if (doc['task.runAt'].size()!=0) {
- return doc['task.runAt'].value.toInstant().toEpochMilli();
-}
- `,
- },
- },
- },
- ]);
- });
-
- test('it supports claiming specific tasks by id', async () => {
- const maxAttempts = _.random(2, 43);
- const customMaxAttempts = _.random(44, 100);
- const definitions = new TaskTypeDictionary(mockLogger());
- const taskManagerId = uuid.v1();
- const fieldUpdates = {
- ownerId: taskManagerId,
- retryAt: new Date(Date.now()),
- };
- definitions.registerTaskDefinitions({
- foo: {
- title: 'foo',
- createTaskRunner: jest.fn(),
- },
- bar: {
- title: 'bar',
- maxAttempts: customMaxAttempts,
- createTaskRunner: jest.fn(),
- },
- });
- const {
- args: {
- updateByQuery: { body: { query, script, sort } = {} },
- },
- } = await testClaimAvailableTasks({
- opts: {
- taskManagerId,
- maxAttempts,
- definitions,
- },
- claimingOpts: {
- claimOwnershipUntil: new Date(),
- size: 10,
- claimTasksById: [
- '33c6977a-ed6d-43bd-98d9-3f827f7b7cd8',
- 'a208b22c-14ec-4fb4-995f-d2ff7a3b03b8',
- ],
- },
- });
-
- expect(query).toMatchObject({
- bool: {
- must: [
- { term: { type: 'task' } },
- {
- bool: {
- must: [
- {
- pinned: {
- ids: [
- 'task:33c6977a-ed6d-43bd-98d9-3f827f7b7cd8',
- 'task:a208b22c-14ec-4fb4-995f-d2ff7a3b03b8',
- ],
- organic: {
- bool: {
- must: [
- {
- bool: {
- should: [
- {
- bool: {
- must: [
- { term: { 'task.status': 'idle' } },
- { range: { 'task.runAt': { lte: 'now' } } },
- ],
- },
- },
- {
- bool: {
- must: [
- {
- bool: {
- should: [
- { term: { 'task.status': 'running' } },
- { term: { 'task.status': 'claiming' } },
- ],
- },
- },
- { range: { 'task.retryAt': { lte: 'now' } } },
- ],
- },
- },
- ],
- },
- },
- ],
- },
- },
- },
- },
- ],
- filter: [
- {
- bool: {
- must_not: [
- {
- bool: {
- should: [
- { term: { 'task.status': 'running' } },
- { term: { 'task.status': 'claiming' } },
- ],
- must: { range: { 'task.retryAt': { gt: 'now' } } },
- },
- },
- ],
- },
- },
- ],
- },
- },
- ],
- },
- });
-
- expect(script).toMatchObject({
- source: `
- if (params.registeredTaskTypes.contains(ctx._source.task.taskType)) {
- if (ctx._source.task.schedule != null || ctx._source.task.attempts < params.taskMaxAttempts[ctx._source.task.taskType] || params.claimTasksById.contains(ctx._id)) {
- ctx._source.task.status = "claiming"; ${Object.keys(fieldUpdates)
- .map((field) => `ctx._source.task.${field}=params.fieldUpdates.${field};`)
- .join(' ')}
- } else {
- ctx._source.task.status = "failed";
- }
- } else {
- ctx._source.task.status = "unrecognized";
- }
- `,
- lang: 'painless',
- params: {
- fieldUpdates,
- claimTasksById: [
- 'task:33c6977a-ed6d-43bd-98d9-3f827f7b7cd8',
- 'task:a208b22c-14ec-4fb4-995f-d2ff7a3b03b8',
- ],
- registeredTaskTypes: ['foo', 'bar'],
- taskMaxAttempts: {
- bar: customMaxAttempts,
- foo: maxAttempts,
- },
- },
- });
-
- expect(sort).toMatchObject([
- '_score',
- {
- _script: {
- type: 'number',
- order: 'asc',
- script: {
- lang: 'painless',
- source: `
-if (doc['task.retryAt'].size()!=0) {
- return doc['task.retryAt'].value.toInstant().toEpochMilli();
-}
-if (doc['task.runAt'].size()!=0) {
- return doc['task.runAt'].value.toInstant().toEpochMilli();
-}
- `,
- },
- },
- },
- ]);
- });
-
- test('it claims tasks by setting their ownerId, status and retryAt', async () => {
- const taskManagerId = uuid.v1();
- const claimOwnershipUntil = new Date(Date.now());
- const fieldUpdates = {
- ownerId: taskManagerId,
- retryAt: claimOwnershipUntil,
- };
- const {
- args: {
- updateByQuery: { body: { script } = {} },
- },
- } = await testClaimAvailableTasks({
- opts: {
- taskManagerId,
- },
- claimingOpts: {
- claimOwnershipUntil,
- size: 10,
- },
- });
- expect(script).toMatchObject({
- source: `
- if (params.registeredTaskTypes.contains(ctx._source.task.taskType)) {
- if (ctx._source.task.schedule != null || ctx._source.task.attempts < params.taskMaxAttempts[ctx._source.task.taskType] || params.claimTasksById.contains(ctx._id)) {
- ctx._source.task.status = "claiming"; ${Object.keys(fieldUpdates)
- .map((field) => `ctx._source.task.${field}=params.fieldUpdates.${field};`)
- .join(' ')}
- } else {
- ctx._source.task.status = "failed";
- }
- } else {
- ctx._source.task.status = "unrecognized";
- }
- `,
- lang: 'painless',
- params: {
- fieldUpdates,
- claimTasksById: [],
- registeredTaskTypes: ['report', 'dernstraight', 'yawn'],
- taskMaxAttempts: {
- dernstraight: 2,
- report: 2,
- yawn: 2,
- },
- },
- });
- });
-
- test('it filters out running tasks', async () => {
- const taskManagerId = uuid.v1();
- const claimOwnershipUntil = new Date(Date.now());
- const runAt = new Date();
- const tasks = [
- {
- _id: 'task:aaa',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'foo',
- schedule: undefined,
- attempts: 0,
- status: 'claiming',
- params: '{ "hello": "world" }',
- state: '{ "baby": "Henhen" }',
- user: 'jimbo',
- scope: ['reporting'],
- ownerId: taskManagerId,
- },
- },
- _seq_no: 1,
- _primary_term: 2,
- sort: ['a', 1],
- },
- {
- // this is invalid as it doesn't have the `type` prefix
- _id: 'bbb',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'bar',
- schedule: { interval: '5m' },
- attempts: 2,
- status: 'claiming',
- params: '{ "shazm": 1 }',
- state: '{ "henry": "The 8th" }',
- user: 'dabo',
- scope: ['reporting', 'ceo'],
- ownerId: taskManagerId,
- },
- },
- _seq_no: 3,
- _primary_term: 4,
- sort: ['b', 2],
- },
- ];
- const {
- result: { docs },
- args: {
- search: { body: { query } = {} },
- },
- } = await testClaimAvailableTasks({
- opts: {
- taskManagerId,
- },
- claimingOpts: {
- claimOwnershipUntil,
- size: 10,
- },
- hits: tasks,
- });
-
- expect(query?.bool?.must).toContainEqual({
- bool: {
- must: [
- {
- term: {
- 'task.ownerId': taskManagerId,
- },
- },
- { term: { 'task.status': 'claiming' } },
- ],
- },
- });
-
- expect(docs).toMatchObject([
- {
- attempts: 0,
- id: 'aaa',
- schedule: undefined,
- params: { hello: 'world' },
- runAt,
- scope: ['reporting'],
- state: { baby: 'Henhen' },
- status: 'claiming',
- taskType: 'foo',
- user: 'jimbo',
- ownerId: taskManagerId,
- },
- ]);
- });
-
- test('it filters out invalid tasks that arent SavedObjects', async () => {
- const taskManagerId = uuid.v1();
- const claimOwnershipUntil = new Date(Date.now());
- const runAt = new Date();
- const tasks = [
- {
- _id: 'task:aaa',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'foo',
- schedule: undefined,
- attempts: 0,
- status: 'claiming',
- params: '{ "hello": "world" }',
- state: '{ "baby": "Henhen" }',
- user: 'jimbo',
- scope: ['reporting'],
- ownerId: taskManagerId,
- },
- },
- _seq_no: 1,
- _primary_term: 2,
- sort: ['a', 1],
- },
- {
- _id: 'task:bbb',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'bar',
- schedule: { interval: '5m' },
- attempts: 2,
- status: 'running',
- params: '{ "shazm": 1 }',
- state: '{ "henry": "The 8th" }',
- user: 'dabo',
- scope: ['reporting', 'ceo'],
- ownerId: taskManagerId,
- },
- },
- _seq_no: 3,
- _primary_term: 4,
- sort: ['b', 2],
- },
- ];
- const {
- result: { docs } = {},
- args: {
- search: { body: { query } = {} },
- },
- } = await testClaimAvailableTasks({
- opts: {
- taskManagerId,
- },
- claimingOpts: {
- claimOwnershipUntil,
- size: 10,
- },
- hits: tasks,
- });
-
- expect(query?.bool?.must).toContainEqual({
- bool: {
- must: [
- {
- term: {
- 'task.ownerId': taskManagerId,
- },
- },
- { term: { 'task.status': 'claiming' } },
- ],
- },
- });
-
- expect(docs).toMatchObject([
- {
- attempts: 0,
- id: 'aaa',
- schedule: undefined,
- params: { hello: 'world' },
- runAt,
- scope: ['reporting'],
- state: { baby: 'Henhen' },
- status: 'claiming',
- taskType: 'foo',
- user: 'jimbo',
- ownerId: taskManagerId,
- },
- ]);
- });
-
- test('it returns task objects', async () => {
- const taskManagerId = uuid.v1();
- const claimOwnershipUntil = new Date(Date.now());
- const runAt = new Date();
- const tasks = [
- {
- _id: 'task:aaa',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'foo',
- schedule: undefined,
- attempts: 0,
- status: 'claiming',
- params: '{ "hello": "world" }',
- state: '{ "baby": "Henhen" }',
- user: 'jimbo',
- scope: ['reporting'],
- ownerId: taskManagerId,
- },
- },
- _seq_no: 1,
- _primary_term: 2,
- sort: ['a', 1],
- },
- {
- _id: 'task:bbb',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'bar',
- schedule: { interval: '5m' },
- attempts: 2,
- status: 'claiming',
- params: '{ "shazm": 1 }',
- state: '{ "henry": "The 8th" }',
- user: 'dabo',
- scope: ['reporting', 'ceo'],
- ownerId: taskManagerId,
- },
- },
- _seq_no: 3,
- _primary_term: 4,
- sort: ['b', 2],
- },
- ];
- const {
- result: { docs } = {},
- args: {
- search: { body: { query } = {} },
- },
- } = await testClaimAvailableTasks({
- opts: {
- taskManagerId,
- },
- claimingOpts: {
- claimOwnershipUntil,
- size: 10,
- },
- hits: tasks,
- });
-
- expect(query?.bool?.must).toContainEqual({
- bool: {
- must: [
- {
- term: {
- 'task.ownerId': taskManagerId,
- },
- },
- { term: { 'task.status': 'claiming' } },
- ],
- },
- });
-
- expect(docs).toMatchObject([
- {
- attempts: 0,
- id: 'aaa',
- schedule: undefined,
- params: { hello: 'world' },
- runAt,
- scope: ['reporting'],
- state: { baby: 'Henhen' },
- status: 'claiming',
- taskType: 'foo',
- user: 'jimbo',
- ownerId: taskManagerId,
- },
- {
- attempts: 2,
- id: 'bbb',
- schedule: { interval: '5m' },
- params: { shazm: 1 },
- runAt,
- scope: ['reporting', 'ceo'],
- state: { henry: 'The 8th' },
- status: 'claiming',
- taskType: 'bar',
- user: 'dabo',
- ownerId: taskManagerId,
- },
- ]);
- });
-
- test('it returns version_conflicts that do not include conflicts that were proceeded against', async () => {
- const taskManagerId = uuid.v1();
- const claimOwnershipUntil = new Date(Date.now());
- const runAt = new Date();
- const tasks = [
- {
- _id: 'task:aaa',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'foo',
- schedule: undefined,
- attempts: 0,
- status: 'claiming',
- params: '{ "hello": "world" }',
- state: '{ "baby": "Henhen" }',
- user: 'jimbo',
- scope: ['reporting'],
- ownerId: taskManagerId,
- },
- },
- _seq_no: 1,
- _primary_term: 2,
- sort: ['a', 1],
- },
- {
- _id: 'task:bbb',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'bar',
- schedule: { interval: '5m' },
- attempts: 2,
- status: 'claiming',
- params: '{ "shazm": 1 }',
- state: '{ "henry": "The 8th" }',
- user: 'dabo',
- scope: ['reporting', 'ceo'],
- ownerId: taskManagerId,
- },
- },
- _seq_no: 3,
- _primary_term: 4,
- sort: ['b', 2],
- },
- ];
- const maxDocs = 10;
- const {
- result: { stats: { tasksUpdated, tasksConflicted, tasksClaimed } = {} } = {},
- } = await testClaimAvailableTasks({
- opts: {
- taskManagerId,
- },
- claimingOpts: {
- claimOwnershipUntil,
- size: maxDocs,
- },
- hits: tasks,
- // assume there were 20 version conflists, but thanks to `conflicts="proceed"`
- // we proceeded to claim tasks
- versionConflicts: 20,
- });
-
- expect(tasksUpdated).toEqual(2);
- // ensure we only count conflicts that *may* have counted against max_docs, no more than that
- expect(tasksConflicted).toEqual(10 - tasksUpdated!);
- expect(tasksClaimed).toEqual(2);
- });
-
- test('pushes error from saved objects client to errors$', async () => {
- const esClient = elasticsearchServiceMock.createClusterClient().asInternalUser;
- const store = new TaskStore({
- index: 'tasky',
- taskManagerId: '',
- serializer,
- esClient,
- definitions: taskDefinitions,
- maxAttempts: 2,
- savedObjectsRepository: savedObjectsClient,
- });
-
- const firstErrorPromise = store.errors$.pipe(first()).toPromise();
- esClient.updateByQuery.mockRejectedValue(new Error('Failure'));
- await expect(
- store.claimAvailableTasks({
- claimOwnershipUntil: new Date(),
- size: 10,
- })
- ).rejects.toThrowErrorMatchingInlineSnapshot(`"Failure"`);
- expect(await firstErrorPromise).toMatchInlineSnapshot(`[Error: Failure]`);
- });
- });
-
describe('update', () => {
let store: TaskStore;
let esClient: ReturnType['asInternalUser'];
@@ -1079,7 +267,6 @@ if (doc['task.runAt'].size()!=0) {
taskManagerId: '',
serializer,
esClient,
- maxAttempts: 2,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
});
@@ -1179,7 +366,6 @@ if (doc['task.runAt'].size()!=0) {
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
- maxAttempts: 2,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
});
@@ -1219,7 +405,6 @@ if (doc['task.runAt'].size()!=0) {
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
- maxAttempts: 2,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
});
@@ -1251,7 +436,6 @@ if (doc['task.runAt'].size()!=0) {
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
- maxAttempts: 2,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
});
@@ -1335,7 +519,6 @@ if (doc['task.runAt'].size()!=0) {
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
- maxAttempts: 2,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
});
@@ -1355,7 +538,6 @@ if (doc['task.runAt'].size()!=0) {
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
- maxAttempts: 2,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
});
@@ -1373,7 +555,6 @@ if (doc['task.runAt'].size()!=0) {
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
- maxAttempts: 2,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
});
@@ -1381,283 +562,8 @@ if (doc['task.runAt'].size()!=0) {
return expect(store.getLifecycle(randomId())).rejects.toThrow('Bad Request');
});
});
-
- describe('task events', () => {
- function generateTasks() {
- const taskManagerId = uuid.v1();
- const runAt = new Date();
- const tasks = [
- {
- _id: 'task:claimed-by-id',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'foo',
- schedule: undefined,
- attempts: 0,
- status: 'claiming',
- params: '{ "hello": "world" }',
- state: '{ "baby": "Henhen" }',
- user: 'jimbo',
- scope: ['reporting'],
- ownerId: taskManagerId,
- startedAt: null,
- retryAt: null,
- scheduledAt: new Date(),
- },
- },
- _seq_no: 1,
- _primary_term: 2,
- sort: ['a', 1],
- },
- {
- _id: 'task:claimed-by-schedule',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'bar',
- schedule: { interval: '5m' },
- attempts: 2,
- status: 'claiming',
- params: '{ "shazm": 1 }',
- state: '{ "henry": "The 8th" }',
- user: 'dabo',
- scope: ['reporting', 'ceo'],
- ownerId: taskManagerId,
- startedAt: null,
- retryAt: null,
- scheduledAt: new Date(),
- },
- },
- _seq_no: 3,
- _primary_term: 4,
- sort: ['b', 2],
- },
- {
- _id: 'task:already-running',
- _source: {
- type: 'task',
- task: {
- runAt,
- taskType: 'bar',
- schedule: { interval: '5m' },
- attempts: 2,
- status: 'running',
- params: '{ "shazm": 1 }',
- state: '{ "henry": "The 8th" }',
- user: 'dabo',
- scope: ['reporting', 'ceo'],
- ownerId: taskManagerId,
- startedAt: null,
- retryAt: null,
- scheduledAt: new Date(),
- },
- },
- _seq_no: 3,
- _primary_term: 4,
- sort: ['b', 2],
- },
- ];
-
- return { taskManagerId, runAt, tasks };
- }
-
- function instantiateStoreWithMockedApiResponses() {
- const { taskManagerId, runAt, tasks } = generateTasks();
-
- const esClient = elasticsearchServiceMock.createClusterClient().asInternalUser;
- esClient.search.mockResolvedValue(asApiResponse({ hits: { hits: tasks } }));
- esClient.updateByQuery.mockResolvedValue(
- asApiResponse({
- total: tasks.length,
- updated: tasks.length,
- })
- );
-
- const store = new TaskStore({
- esClient,
- maxAttempts: 2,
- definitions: taskDefinitions,
- serializer,
- savedObjectsRepository: savedObjectsClient,
- taskManagerId,
- index: '',
- });
-
- return { taskManagerId, runAt, store };
- }
-
- test('emits an event when a task is succesfully claimed by id', async () => {
- const { taskManagerId, runAt, store } = instantiateStoreWithMockedApiResponses();
-
- const promise = store.events
- .pipe(
- filter(
- (event: TaskEvent>) =>
- event.id === 'claimed-by-id'
- ),
- take(1)
- )
- .toPromise();
-
- await store.claimAvailableTasks({
- claimTasksById: ['claimed-by-id'],
- claimOwnershipUntil: new Date(),
- size: 10,
- });
-
- const event = await promise;
- expect(event).toMatchObject(
- asTaskClaimEvent(
- 'claimed-by-id',
- asOk({
- id: 'claimed-by-id',
- runAt,
- taskType: 'foo',
- schedule: undefined,
- attempts: 0,
- status: 'claiming' as TaskStatus,
- params: { hello: 'world' },
- state: { baby: 'Henhen' },
- user: 'jimbo',
- scope: ['reporting'],
- ownerId: taskManagerId,
- startedAt: null,
- retryAt: null,
- scheduledAt: new Date(),
- })
- )
- );
- });
-
- test('emits an event when a task is succesfully by scheduling', async () => {
- const { taskManagerId, runAt, store } = instantiateStoreWithMockedApiResponses();
-
- const promise = store.events
- .pipe(
- filter(
- (event: TaskEvent>) =>
- event.id === 'claimed-by-schedule'
- ),
- take(1)
- )
- .toPromise();
-
- await store.claimAvailableTasks({
- claimTasksById: ['claimed-by-id'],
- claimOwnershipUntil: new Date(),
- size: 10,
- });
-
- const event = await promise;
- expect(event).toMatchObject(
- asTaskClaimEvent(
- 'claimed-by-schedule',
- asOk({
- id: 'claimed-by-schedule',
- runAt,
- taskType: 'bar',
- schedule: { interval: '5m' },
- attempts: 2,
- status: 'claiming' as TaskStatus,
- params: { shazm: 1 },
- state: { henry: 'The 8th' },
- user: 'dabo',
- scope: ['reporting', 'ceo'],
- ownerId: taskManagerId,
- startedAt: null,
- retryAt: null,
- scheduledAt: new Date(),
- })
- )
- );
- });
-
- test('emits an event when the store fails to claim a required task by id', async () => {
- const { taskManagerId, runAt, store } = instantiateStoreWithMockedApiResponses();
-
- const promise = store.events
- .pipe(
- filter(
- (event: TaskEvent>) =>
- event.id === 'already-running'
- ),
- take(1)
- )
- .toPromise();
-
- await store.claimAvailableTasks({
- claimTasksById: ['already-running'],
- claimOwnershipUntil: new Date(),
- size: 10,
- });
-
- const event = await promise;
- expect(event).toMatchObject(
- asTaskClaimEvent(
- 'already-running',
- asErr(
- some({
- id: 'already-running',
- runAt,
- taskType: 'bar',
- schedule: { interval: '5m' },
- attempts: 2,
- status: 'running' as TaskStatus,
- params: { shazm: 1 },
- state: { henry: 'The 8th' },
- user: 'dabo',
- scope: ['reporting', 'ceo'],
- ownerId: taskManagerId,
- startedAt: null,
- retryAt: null,
- scheduledAt: new Date(),
- })
- )
- )
- );
- });
-
- test('emits an event when the store fails to find a task which was required by id', async () => {
- const { store } = instantiateStoreWithMockedApiResponses();
-
- const promise = store.events
- .pipe(
- filter(
- (event: TaskEvent>) =>
- event.id === 'unknown-task'
- ),
- take(1)
- )
- .toPromise();
-
- await store.claimAvailableTasks({
- claimTasksById: ['unknown-task'],
- claimOwnershipUntil: new Date(),
- size: 10,
- });
-
- const event = await promise;
- expect(event).toMatchObject(asTaskClaimEvent('unknown-task', asErr(none)));
- });
- });
});
-function generateFakeTasks(count: number = 1) {
- return _.times(count, (index) => ({
- _id: `task:id-${index}`,
- _source: {
- type: 'task',
- task: {},
- },
- _seq_no: _.random(1, 5),
- _primary_term: _.random(1, 5),
- sort: ['a', _.random(1, 5)],
- }));
-}
-
const asApiResponse = (body: T): RequestEvent =>
({
body,
diff --git a/x-pack/plugins/task_manager/server/task_store.ts b/x-pack/plugins/task_manager/server/task_store.ts
index b72f1826b813b..0b54f2779065f 100644
--- a/x-pack/plugins/task_manager/server/task_store.ts
+++ b/x-pack/plugins/task_manager/server/task_store.ts
@@ -8,13 +8,9 @@
/*
* This module contains helpers for managing the task manager storage layer.
*/
-import apm from 'elastic-apm-node';
-import { Subject, Observable } from 'rxjs';
-import { omit, difference, partition, map, defaults } from 'lodash';
-
-import { some, none } from 'fp-ts/lib/Option';
-
-import { SearchResponse, UpdateDocumentByQueryResponse } from 'elasticsearch';
+import { Subject } from 'rxjs';
+import { omit, defaults } from 'lodash';
+import { ReindexResponseBase, SearchResponse, UpdateDocumentByQueryResponse } from 'elasticsearch';
import {
SavedObject,
SavedObjectsSerializer,
@@ -32,38 +28,15 @@ import {
TaskLifecycle,
TaskLifecycleResult,
SerializedConcreteTaskInstance,
- TaskStatus,
} from './task';
-import { TaskClaim, asTaskClaimEvent } from './task_events';
-
-import {
- asUpdateByQuery,
- shouldBeOneOf,
- mustBeAllOf,
- filterDownBy,
- asPinnedQuery,
- matchesClauses,
- SortOptions,
-} from './queries/query_clauses';
-
-import {
- updateFieldsAndMarkAsFailed,
- IdleTaskWithExpiredRunAt,
- InactiveTasks,
- RunningOrClaimingTaskWithExpiredRetryAt,
- SortByRunAtAndRetryAt,
- tasksClaimedByOwner,
-} from './queries/mark_available_tasks_as_claimed';
import { TaskTypeDictionary } from './task_type_dictionary';
-
import { ESSearchResponse, ESSearchBody } from '../../../typings/elasticsearch';
export interface StoreOpts {
esClient: ElasticsearchClient;
index: string;
taskManagerId: string;
- maxAttempts: number;
definitions: TaskTypeDictionary;
savedObjectsRepository: ISavedObjectsRepository;
serializer: SavedObjectsSerializer;
@@ -88,25 +61,10 @@ export interface UpdateByQueryOpts extends SearchOpts {
max_docs?: number;
}
-export interface OwnershipClaimingOpts {
- claimOwnershipUntil: Date;
- claimTasksById?: string[];
- size: number;
-}
-
export interface FetchResult {
docs: ConcreteTaskInstance[];
}
-export interface ClaimOwnershipResult {
- stats: {
- tasksUpdated: number;
- tasksConflicted: number;
- tasksClaimed: number;
- };
- docs: ConcreteTaskInstance[];
-}
-
export type BulkUpdateResult = Result<
ConcreteTaskInstance,
{ entity: ConcreteTaskInstance; error: Error }
@@ -123,7 +81,6 @@ export interface UpdateByQueryResult {
* interface into the index.
*/
export class TaskStore {
- public readonly maxAttempts: number;
public readonly index: string;
public readonly taskManagerId: string;
public readonly errors$ = new Subject();
@@ -132,14 +89,12 @@ export class TaskStore {
private definitions: TaskTypeDictionary;
private savedObjectsRepository: ISavedObjectsRepository;
private serializer: SavedObjectsSerializer;
- private events$: Subject;
/**
* Constructs a new TaskStore.
* @param {StoreOpts} opts
* @prop {esClient} esClient - An elasticsearch client
* @prop {string} index - The name of the task manager index
- * @prop {number} maxAttempts - The maximum number of attempts before a task will be abandoned
* @prop {TaskDefinition} definition - The definition of the task being run
* @prop {serializer} - The saved object serializer
* @prop {savedObjectsRepository} - An instance to the saved objects repository
@@ -148,21 +103,22 @@ export class TaskStore {
this.esClient = opts.esClient;
this.index = opts.index;
this.taskManagerId = opts.taskManagerId;
- this.maxAttempts = opts.maxAttempts;
this.definitions = opts.definitions;
this.serializer = opts.serializer;
this.savedObjectsRepository = opts.savedObjectsRepository;
- this.events$ = new Subject();
}
- public get events(): Observable {
- return this.events$;
+ /**
+ * Convert ConcreteTaskInstance Ids to match their SavedObject format as serialized
+ * in Elasticsearch
+ * @param tasks - The task being scheduled.
+ */
+ public convertToSavedObjectIds(
+ taskIds: Array
+ ): Array {
+ return taskIds.map((id) => this.serializer.generateRawId(undefined, 'task', id));
}
- private emitEvents = (events: TaskClaim[]) => {
- events.forEach((event) => this.events$.next(event));
- };
-
/**
* Schedules a task.
*
@@ -201,144 +157,6 @@ export class TaskStore {
});
}
- /**
- * Claims available tasks from the index, which are ready to be run.
- * - runAt is now or past
- * - is not currently claimed by any instance of Kibana
- * - has a type that is in our task definitions
- *
- * @param {OwnershipClaimingOpts} options
- * @returns {Promise}
- */
- public claimAvailableTasks = async ({
- claimOwnershipUntil,
- claimTasksById = [],
- size,
- }: OwnershipClaimingOpts): Promise => {
- const claimTasksByIdWithRawIds = claimTasksById.map((id) =>
- this.serializer.generateRawId(undefined, 'task', id)
- );
-
- const {
- updated: tasksUpdated,
- version_conflicts: tasksConflicted,
- } = await this.markAvailableTasksAsClaimed(claimOwnershipUntil, claimTasksByIdWithRawIds, size);
-
- const docs =
- tasksUpdated > 0 ? await this.sweepForClaimedTasks(claimTasksByIdWithRawIds, size) : [];
-
- const [documentsReturnedById, documentsClaimedBySchedule] = partition(docs, (doc) =>
- claimTasksById.includes(doc.id)
- );
-
- const [documentsClaimedById, documentsRequestedButNotClaimed] = partition(
- documentsReturnedById,
- // we filter the schduled tasks down by status is 'claiming' in the esearch,
- // but we do not apply this limitation on tasks claimed by ID so that we can
- // provide more detailed error messages when we fail to claim them
- (doc) => doc.status === TaskStatus.Claiming
- );
-
- const documentsRequestedButNotReturned = difference(
- claimTasksById,
- map(documentsReturnedById, 'id')
- );
-
- this.emitEvents([
- ...documentsClaimedById.map((doc) => asTaskClaimEvent(doc.id, asOk(doc))),
- ...documentsClaimedBySchedule.map((doc) => asTaskClaimEvent(doc.id, asOk(doc))),
- ...documentsRequestedButNotClaimed.map((doc) => asTaskClaimEvent(doc.id, asErr(some(doc)))),
- ...documentsRequestedButNotReturned.map((id) => asTaskClaimEvent(id, asErr(none))),
- ]);
-
- return {
- stats: {
- tasksUpdated,
- tasksConflicted,
- tasksClaimed: documentsClaimedById.length + documentsClaimedBySchedule.length,
- },
- docs: docs.filter((doc) => doc.status === TaskStatus.Claiming),
- };
- };
-
- private async markAvailableTasksAsClaimed(
- claimOwnershipUntil: OwnershipClaimingOpts['claimOwnershipUntil'],
- claimTasksById: OwnershipClaimingOpts['claimTasksById'],
- size: OwnershipClaimingOpts['size']
- ): Promise {
- const registeredTaskTypes = this.definitions.getAllTypes();
- const taskMaxAttempts = [...this.definitions].reduce((accumulator, [type, { maxAttempts }]) => {
- return { ...accumulator, [type]: maxAttempts || this.maxAttempts };
- }, {});
- const queryForScheduledTasks = mustBeAllOf(
- // Either a task with idle status and runAt <= now or
- // status running or claiming with a retryAt <= now.
- shouldBeOneOf(IdleTaskWithExpiredRunAt, RunningOrClaimingTaskWithExpiredRetryAt)
- );
-
- // The documents should be sorted by runAt/retryAt, unless there are pinned
- // tasks being queried, in which case we want to sort by score first, and then
- // the runAt/retryAt. That way we'll get the pinned tasks first. Note that
- // the score seems to favor newer documents rather than older documents, so
- // if there are not pinned tasks being queried, we do NOT want to sort by score
- // at all, just by runAt/retryAt.
- const sort: SortOptions = [SortByRunAtAndRetryAt];
- if (claimTasksById && claimTasksById.length) {
- sort.unshift('_score');
- }
-
- const apmTrans = apm.startTransaction(`taskManager markAvailableTasksAsClaimed`, 'taskManager');
- const result = await this.updateByQuery(
- asUpdateByQuery({
- query: matchesClauses(
- mustBeAllOf(
- claimTasksById && claimTasksById.length
- ? asPinnedQuery(claimTasksById, queryForScheduledTasks)
- : queryForScheduledTasks
- ),
- filterDownBy(InactiveTasks)
- ),
- update: updateFieldsAndMarkAsFailed(
- {
- ownerId: this.taskManagerId,
- retryAt: claimOwnershipUntil,
- },
- claimTasksById || [],
- registeredTaskTypes,
- taskMaxAttempts
- ),
- sort,
- }),
- {
- max_docs: size,
- }
- );
-
- if (apmTrans) apmTrans.end();
- return result;
- }
-
- /**
- * Fetches tasks from the index, which are owned by the current Kibana instance
- */
- private async sweepForClaimedTasks(
- claimTasksById: OwnershipClaimingOpts['claimTasksById'],
- size: OwnershipClaimingOpts['size']
- ): Promise {
- const claimedTasksQuery = tasksClaimedByOwner(this.taskManagerId);
- const { docs } = await this.search({
- query:
- claimTasksById && claimTasksById.length
- ? asPinnedQuery(claimTasksById, claimedTasksQuery)
- : claimedTasksQuery,
- size,
- sort: SortByRunAtAndRetryAt,
- seq_no_primary_term: true,
- });
-
- return docs;
- }
-
/**
* Updates the specified doc in the index, returning the doc
* with its version up to date.
@@ -527,7 +345,7 @@ export class TaskStore {
return body;
}
- private async updateByQuery(
+ public async updateByQuery(
opts: UpdateByQuerySearchOpts = {},
// eslint-disable-next-line @typescript-eslint/naming-convention
{ max_docs: max_docs }: UpdateByQueryOpts = {}
@@ -549,17 +367,11 @@ export class TaskStore {
},
});
- /**
- * When we run updateByQuery with conflicts='proceed', it's possible for the `version_conflicts`
- * to count against the specified `max_docs`, as per https://github.com/elastic/elasticsearch/issues/63671
- * In order to correct for that happening, we only count `version_conflicts` if we haven't updated as
- * many docs as we could have.
- * This is still no more than an estimation, as there might have been less docuemnt to update that the
- * `max_docs`, but we bias in favour of over zealous `version_conflicts` as that's the best indicator we
- * have for an unhealthy cluster distribution of Task Manager polling intervals
- */
- const conflictsCorrectedForContinuation =
- max_docs && version_conflicts + updated > max_docs ? max_docs - updated : version_conflicts;
+ const conflictsCorrectedForContinuation = correctVersionConflictsForContinuation(
+ updated,
+ version_conflicts,
+ max_docs
+ );
return {
total,
@@ -572,6 +384,22 @@ export class TaskStore {
}
}
}
+/**
+ * When we run updateByQuery with conflicts='proceed', it's possible for the `version_conflicts`
+ * to count against the specified `max_docs`, as per https://github.com/elastic/elasticsearch/issues/63671
+ * In order to correct for that happening, we only count `version_conflicts` if we haven't updated as
+ * many docs as we could have.
+ * This is still no more than an estimation, as there might have been less docuemnt to update that the
+ * `max_docs`, but we bias in favour of over zealous `version_conflicts` as that's the best indicator we
+ * have for an unhealthy cluster distribution of Task Manager polling intervals
+ */
+export function correctVersionConflictsForContinuation(
+ updated: ReindexResponseBase['updated'],
+ versionConflicts: ReindexResponseBase['version_conflicts'],
+ maxDocs?: number
+) {
+ return maxDocs && versionConflicts + updated > maxDocs ? maxDocs - updated : versionConflicts;
+}
function taskInstanceToAttributes(doc: TaskInstance): SerializedConcreteTaskInstance {
return {
diff --git a/x-pack/plugins/task_manager/server/task_type_dictionary.ts b/x-pack/plugins/task_manager/server/task_type_dictionary.ts
index 4230eb9ce4b73..63a0548d79d32 100644
--- a/x-pack/plugins/task_manager/server/task_type_dictionary.ts
+++ b/x-pack/plugins/task_manager/server/task_type_dictionary.ts
@@ -28,6 +28,10 @@ export class TaskTypeDictionary {
return [...this.definitions.keys()];
}
+ public getAllDefinitions() {
+ return [...this.definitions.values()];
+ }
+
public has(type: string) {
return this.definitions.has(type);
}
diff --git a/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/init_routes.ts b/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/init_routes.ts
index 2878d7d5f8220..57beb40b16459 100644
--- a/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/init_routes.ts
+++ b/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/init_routes.ts
@@ -218,10 +218,9 @@ export function initRoutes(
await ensureIndexIsRefreshed();
const taskManager = await taskManagerStart;
return res.ok({ body: await taskManager.get(req.params.taskId) });
- } catch (err) {
- return res.ok({ body: err });
+ } catch ({ isBoom, output, message }) {
+ return res.ok({ body: isBoom ? output.payload : { message } });
}
- return res.ok({ body: {} });
}
);
@@ -251,6 +250,7 @@ export function initRoutes(
res: KibanaResponseFactory
): Promise> {
try {
+ await ensureIndexIsRefreshed();
let tasksFound = 0;
const taskManager = await taskManagerStart;
do {
@@ -261,8 +261,8 @@ export function initRoutes(
await Promise.all(tasks.map((task) => taskManager.remove(task.id)));
} while (tasksFound > 0);
return res.ok({ body: 'OK' });
- } catch (err) {
- return res.ok({ body: err });
+ } catch ({ isBoom, output, message }) {
+ return res.ok({ body: isBoom ? output.payload : { message } });
}
}
);
diff --git a/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/plugin.ts b/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/plugin.ts
index 3aee35ed0bff3..2031551410894 100644
--- a/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/plugin.ts
+++ b/x-pack/test/plugin_api_integration/plugins/sample_task_plugin/server/plugin.ts
@@ -105,6 +105,20 @@ export class SampleTaskManagerFixturePlugin
// fail after the first failed run
maxAttempts: 1,
},
+ sampleTaskWithSingleConcurrency: {
+ ...defaultSampleTaskConfig,
+ title: 'Sample Task With Single Concurrency',
+ maxConcurrency: 1,
+ timeout: '60s',
+ description: 'A sample task that can only have one concurrent instance.',
+ },
+ sampleTaskWithLimitedConcurrency: {
+ ...defaultSampleTaskConfig,
+ title: 'Sample Task With Max Concurrency of 2',
+ maxConcurrency: 2,
+ timeout: '60s',
+ description: 'A sample task that can only have two concurrent instance.',
+ },
sampleRecurringTaskTimingOut: {
title: 'Sample Recurring Task that Times Out',
description: 'A sample task that times out each run.',
diff --git a/x-pack/test/plugin_api_integration/test_suites/task_manager/health_route.ts b/x-pack/test/plugin_api_integration/test_suites/task_manager/health_route.ts
index 231150a814835..d99c1dac9a25e 100644
--- a/x-pack/test/plugin_api_integration/test_suites/task_manager/health_route.ts
+++ b/x-pack/test/plugin_api_integration/test_suites/task_manager/health_route.ts
@@ -34,6 +34,7 @@ interface MonitoringStats {
timestamp: string;
value: {
drift: Record;
+ drift_by_type: Record>;
load: Record;
execution: {
duration: Record>;
@@ -43,6 +44,7 @@ interface MonitoringStats {
last_successful_poll: string;
last_polling_delay: string;
duration: Record;
+ claim_duration: Record;
result_frequency_percent_as_number: Record;
};
};
@@ -174,7 +176,8 @@ export default function ({ getService }: FtrProviderContext) {
const {
runtime: {
- value: { drift, load, polling, execution },
+ // eslint-disable-next-line @typescript-eslint/naming-convention
+ value: { drift, drift_by_type, load, polling, execution },
},
} = (await getHealth()).stats;
@@ -192,11 +195,21 @@ export default function ({ getService }: FtrProviderContext) {
expect(typeof polling.duration.p95).to.eql('number');
expect(typeof polling.duration.p99).to.eql('number');
+ expect(typeof polling.claim_duration.p50).to.eql('number');
+ expect(typeof polling.claim_duration.p90).to.eql('number');
+ expect(typeof polling.claim_duration.p95).to.eql('number');
+ expect(typeof polling.claim_duration.p99).to.eql('number');
+
expect(typeof drift.p50).to.eql('number');
expect(typeof drift.p90).to.eql('number');
expect(typeof drift.p95).to.eql('number');
expect(typeof drift.p99).to.eql('number');
+ expect(typeof drift_by_type.sampleTask.p50).to.eql('number');
+ expect(typeof drift_by_type.sampleTask.p90).to.eql('number');
+ expect(typeof drift_by_type.sampleTask.p95).to.eql('number');
+ expect(typeof drift_by_type.sampleTask.p99).to.eql('number');
+
expect(typeof load.p50).to.eql('number');
expect(typeof load.p90).to.eql('number');
expect(typeof load.p95).to.eql('number');
diff --git a/x-pack/test/plugin_api_integration/test_suites/task_manager/task_management.ts b/x-pack/test/plugin_api_integration/test_suites/task_manager/task_management.ts
index 353be5e872aed..26333ecabd505 100644
--- a/x-pack/test/plugin_api_integration/test_suites/task_manager/task_management.ts
+++ b/x-pack/test/plugin_api_integration/test_suites/task_manager/task_management.ts
@@ -51,7 +51,7 @@ type SerializedConcreteTaskInstance = Omit<
};
export default function ({ getService }: FtrProviderContext) {
- const es = getService('legacyEs');
+ const es = getService('es');
const log = getService('log');
const retry = getService('retry');
const config = getService('config');
@@ -59,30 +59,46 @@ export default function ({ getService }: FtrProviderContext) {
const supertest = supertestAsPromised(url.format(config.get('servers.kibana')));
describe('scheduling and running tasks', () => {
- beforeEach(
- async () => await supertest.delete('/api/sample_tasks').set('kbn-xsrf', 'xxx').expect(200)
- );
+ beforeEach(async () => {
+ // clean up before each test
+ return await supertest.delete('/api/sample_tasks').set('kbn-xsrf', 'xxx').expect(200);
+ });
beforeEach(async () => {
const exists = await es.indices.exists({ index: testHistoryIndex });
- if (exists) {
+ if (exists.body) {
await es.deleteByQuery({
index: testHistoryIndex,
- q: 'type:task',
refresh: true,
+ body: { query: { term: { type: 'task' } } },
});
} else {
await es.indices.create({
index: testHistoryIndex,
body: {
mappings: {
- properties: taskManagerIndexMapping,
+ properties: {
+ type: {
+ type: 'keyword',
+ },
+ taskId: {
+ type: 'keyword',
+ },
+ params: taskManagerIndexMapping.params,
+ state: taskManagerIndexMapping.state,
+ runAt: taskManagerIndexMapping.runAt,
+ },
},
},
});
}
});
+ after(async () => {
+ // clean up after last test
+ return await supertest.delete('/api/sample_tasks').set('kbn-xsrf', 'xxx').expect(200);
+ });
+
function currentTasks(): Promise<{
docs: Array>;
}> {
@@ -98,7 +114,27 @@ export default function ({ getService }: FtrProviderContext) {
return supertest
.get(`/api/sample_tasks/task/${task}`)
.send({ task })
- .expect(200)
+ .expect((response) => {
+ expect(response.status).to.eql(200);
+ expect(typeof JSON.parse(response.text).id).to.eql(`string`);
+ })
+ .then((response) => response.body);
+ }
+
+ function currentTaskError(
+ task: string
+ ): Promise<{
+ statusCode: number;
+ error: string;
+ message: string;
+ }> {
+ return supertest
+ .get(`/api/sample_tasks/task/${task}`)
+ .send({ task })
+ .expect(function (response) {
+ expect(response.status).to.eql(200);
+ expect(typeof JSON.parse(response.text).message).to.eql(`string`);
+ })
.then((response) => response.body);
}
@@ -106,13 +142,21 @@ export default function ({ getService }: FtrProviderContext) {
return supertest.get(`/api/ensure_tasks_index_refreshed`).send({}).expect(200);
}
- function historyDocs(taskId?: string): Promise {
+ async function historyDocs(taskId?: string): Promise {
return es
.search({
index: testHistoryIndex,
- q: taskId ? `taskId:${taskId}` : 'type:task',
+ body: {
+ query: {
+ term: { type: 'task' },
+ },
+ },
})
- .then((result: SearchResults) => result.hits.hits);
+ .then((result) =>
+ ((result.body as unknown) as SearchResults).hits.hits.filter((task) =>
+ taskId ? task._source?.taskId === taskId : true
+ )
+ );
}
function scheduleTask(
@@ -123,7 +167,10 @@ export default function ({ getService }: FtrProviderContext) {
.set('kbn-xsrf', 'xxx')
.send({ task })
.expect(200)
- .then((response: { body: SerializedConcreteTaskInstance }) => response.body);
+ .then((response: { body: SerializedConcreteTaskInstance }) => {
+ log.debug(`Task Scheduled: ${response.body.id}`);
+ return response.body;
+ });
}
function runTaskNow(task: { id: string }) {
@@ -252,8 +299,7 @@ export default function ({ getService }: FtrProviderContext) {
});
await retry.try(async () => {
- const [scheduledTask] = (await currentTasks()).docs;
- expect(scheduledTask.id).to.eql(task.id);
+ const scheduledTask = await currentTask(task.id);
expect(scheduledTask.attempts).to.be.greaterThan(0);
expect(Date.parse(scheduledTask.runAt)).to.be.greaterThan(
Date.parse(task.runAt) + 5 * 60 * 1000
@@ -271,8 +317,7 @@ export default function ({ getService }: FtrProviderContext) {
});
await retry.try(async () => {
- const [scheduledTask] = (await currentTasks()).docs;
- expect(scheduledTask.id).to.eql(task.id);
+ const scheduledTask = await currentTask(task.id);
const retryAt = Date.parse(scheduledTask.retryAt!);
expect(isNaN(retryAt)).to.be(false);
@@ -296,7 +341,7 @@ export default function ({ getService }: FtrProviderContext) {
await retry.try(async () => {
expect((await historyDocs(originalTask.id)).length).to.eql(1);
- const [task] = (await currentTasks<{ count: number }>()).docs;
+ const task = await currentTask<{ count: number }>(originalTask.id);
expect(task.attempts).to.eql(0);
expect(task.state.count).to.eql(count + 1);
@@ -467,6 +512,134 @@ export default function ({ getService }: FtrProviderContext) {
});
});
+ it('should only run as many instances of a task as its maxConcurrency will allow', async () => {
+ // should run as there's only one and maxConcurrency on this TaskType is 1
+ const firstWithSingleConcurrency = await scheduleTask({
+ taskType: 'sampleTaskWithSingleConcurrency',
+ params: {
+ waitForEvent: 'releaseFirstWaveOfTasks',
+ },
+ });
+
+ // should run as there's only two and maxConcurrency on this TaskType is 2
+ const [firstLimitedConcurrency, secondLimitedConcurrency] = await Promise.all([
+ scheduleTask({
+ taskType: 'sampleTaskWithLimitedConcurrency',
+ params: {
+ waitForEvent: 'releaseFirstWaveOfTasks',
+ },
+ }),
+ scheduleTask({
+ taskType: 'sampleTaskWithLimitedConcurrency',
+ params: {
+ waitForEvent: 'releaseSecondWaveOfTasks',
+ },
+ }),
+ ]);
+
+ await retry.try(async () => {
+ expect((await historyDocs(firstWithSingleConcurrency.id)).length).to.eql(1);
+ expect((await historyDocs(firstLimitedConcurrency.id)).length).to.eql(1);
+ expect((await historyDocs(secondLimitedConcurrency.id)).length).to.eql(1);
+ });
+
+ // should not run as there one running and maxConcurrency on this TaskType is 1
+ const secondWithSingleConcurrency = await scheduleTask({
+ taskType: 'sampleTaskWithSingleConcurrency',
+ params: {
+ waitForEvent: 'releaseSecondWaveOfTasks',
+ },
+ });
+
+ // should not run as there are two running and maxConcurrency on this TaskType is 2
+ const thirdWithLimitedConcurrency = await scheduleTask({
+ taskType: 'sampleTaskWithLimitedConcurrency',
+ params: {
+ waitForEvent: 'releaseSecondWaveOfTasks',
+ },
+ });
+
+ // schedule a task that should get picked up before the two blocked tasks
+ const taskWithUnlimitedConcurrency = await scheduleTask({
+ taskType: 'sampleTask',
+ params: {},
+ });
+
+ await retry.try(async () => {
+ expect((await historyDocs(taskWithUnlimitedConcurrency.id)).length).to.eql(1);
+ expect((await currentTask(secondWithSingleConcurrency.id)).status).to.eql('idle');
+ expect((await currentTask(thirdWithLimitedConcurrency.id)).status).to.eql('idle');
+ });
+
+ // release the running SingleConcurrency task and only one of the LimitedConcurrency tasks
+ await releaseTasksWaitingForEventToComplete('releaseFirstWaveOfTasks');
+
+ await retry.try(async () => {
+ // ensure the completed tasks were deleted
+ expect((await currentTaskError(firstWithSingleConcurrency.id)).message).to.eql(
+ `Saved object [task/${firstWithSingleConcurrency.id}] not found`
+ );
+ expect((await currentTaskError(firstLimitedConcurrency.id)).message).to.eql(
+ `Saved object [task/${firstLimitedConcurrency.id}] not found`
+ );
+
+ // ensure blocked tasks is still running
+ expect((await currentTask(secondLimitedConcurrency.id)).status).to.eql('running');
+
+ // ensure the blocked tasks begin running
+ expect((await currentTask(secondWithSingleConcurrency.id)).status).to.eql('running');
+ expect((await currentTask(thirdWithLimitedConcurrency.id)).status).to.eql('running');
+ });
+
+ // release blocked task
+ await releaseTasksWaitingForEventToComplete('releaseSecondWaveOfTasks');
+ });
+
+ it('should return a task run error result when RunNow is called at a time that would cause the task to exceed its maxConcurrency', async () => {
+ // should run as there's only one and maxConcurrency on this TaskType is 1
+ const firstWithSingleConcurrency = await scheduleTask({
+ taskType: 'sampleTaskWithSingleConcurrency',
+ // include a schedule so that the task isn't deleted after completion
+ schedule: { interval: `30m` },
+ params: {
+ waitForEvent: 'releaseRunningTaskWithSingleConcurrency',
+ },
+ });
+
+ // should not run as the first is running
+ const secondWithSingleConcurrency = await scheduleTask({
+ taskType: 'sampleTaskWithSingleConcurrency',
+ params: {
+ waitForEvent: 'releaseRunningTaskWithSingleConcurrency',
+ },
+ });
+
+ // run the first tasks once just so that we can be sure it runs in response to our
+ // runNow callm, rather than the initial execution
+ await retry.try(async () => {
+ expect((await historyDocs(firstWithSingleConcurrency.id)).length).to.eql(1);
+ });
+ await releaseTasksWaitingForEventToComplete('releaseRunningTaskWithSingleConcurrency');
+
+ // wait for second task to stall
+ await retry.try(async () => {
+ expect((await historyDocs(secondWithSingleConcurrency.id)).length).to.eql(1);
+ });
+
+ // run the first task again using runNow - should fail due to concurrency concerns
+ const failedRunNowResult = await runTaskNow({
+ id: firstWithSingleConcurrency.id,
+ });
+
+ expect(failedRunNowResult).to.eql({
+ id: firstWithSingleConcurrency.id,
+ error: `Error: Failed to run task "${firstWithSingleConcurrency.id}" as we would exceed the max concurrency of "Sample Task With Single Concurrency" which is 1. Rescheduled the task to ensure it is picked up as soon as possible.`,
+ });
+
+ // release the second task
+ await releaseTasksWaitingForEventToComplete('releaseRunningTaskWithSingleConcurrency');
+ });
+
it('should return a task run error result when running a task now fails', async () => {
const originalTask = await scheduleTask({
taskType: 'sampleTask',
From 12a06da81099b1af1b8d2bf48fe7f71a9d2650db Mon Sep 17 00:00:00 2001
From: Jonathan Budzenski
Date: Thu, 11 Feb 2021 09:29:29 -0600
Subject: [PATCH 06/72] skip grokdebugger tests. #84440
---
x-pack/test/functional/apps/grok_debugger/grok_debugger.js | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/x-pack/test/functional/apps/grok_debugger/grok_debugger.js b/x-pack/test/functional/apps/grok_debugger/grok_debugger.js
index b2a1c5363fcb6..c21731a2bdc8a 100644
--- a/x-pack/test/functional/apps/grok_debugger/grok_debugger.js
+++ b/x-pack/test/functional/apps/grok_debugger/grok_debugger.js
@@ -11,8 +11,8 @@ export default function ({ getService, getPageObjects }) {
const esArchiver = getService('esArchiver');
const PageObjects = getPageObjects(['grokDebugger']);
-
- describe('grok debugger app', function () {
+ // https://github.com/elastic/kibana/issues/84440
+ describe.skip('grok debugger app', function () {
this.tags('includeFirefox');
before(async () => {
await esArchiver.load('empty_kibana');
From facdd55b1d963abe795c860b86098c7e5a6e0905 Mon Sep 17 00:00:00 2001
From: Dave Snider
Date: Thu, 11 Feb 2021 07:47:44 -0800
Subject: [PATCH 07/72] Fix datagrid issue in Discover for Firefox (#90906)
* Fix datagrid issue in Discover for Firefox
* small visual cleanup while im in here
---
.../components/discover_grid/discover_grid.scss | 12 ++++++++++++
.../discover_grid/get_render_cell_value.test.tsx | 2 +-
.../discover_grid/get_render_cell_value.tsx | 7 +++++--
3 files changed, 18 insertions(+), 3 deletions(-)
diff --git a/src/plugins/discover/public/application/components/discover_grid/discover_grid.scss b/src/plugins/discover/public/application/components/discover_grid/discover_grid.scss
index 64a7eda963349..4754c1700f28d 100644
--- a/src/plugins/discover/public/application/components/discover_grid/discover_grid.scss
+++ b/src/plugins/discover/public/application/components/discover_grid/discover_grid.scss
@@ -9,6 +9,14 @@
border-bottom: $euiBorderThin;
}
+ .euiDataGridRowCell.euiDataGridRowCell--firstColumn {
+ border-left: none;
+ }
+
+ .euiDataGridRowCell.euiDataGridRowCell--lastColumn {
+ border-right: none;
+ }
+
.euiDataGridRowCell:first-of-type,
.euiDataGrid--headerShade.euiDataGrid--bordersAll .euiDataGridHeaderCell:first-of-type {
border-left: none;
@@ -66,3 +74,7 @@
.dscFormatSource {
@include euiTextTruncate;
}
+
+.dscDiscoverGrid__descriptionListDescription {
+ word-break: normal !important;
+}
diff --git a/src/plugins/discover/public/application/components/discover_grid/get_render_cell_value.test.tsx b/src/plugins/discover/public/application/components/discover_grid/get_render_cell_value.test.tsx
index 49dc43d88fa10..594aaac2168d4 100644
--- a/src/plugins/discover/public/application/components/discover_grid/get_render_cell_value.test.tsx
+++ b/src/plugins/discover/public/application/components/discover_grid/get_render_cell_value.test.tsx
@@ -56,7 +56,7 @@ describe('Discover grid cell rendering', function () {
/>
);
expect(component.html()).toMatchInlineSnapshot(
- `"- bytes
- 100
"`
+ `"- bytes
- 100
"`
);
});
diff --git a/src/plugins/discover/public/application/components/discover_grid/get_render_cell_value.tsx b/src/plugins/discover/public/application/components/discover_grid/get_render_cell_value.tsx
index 840b4d398be0e..6ed19813830c8 100644
--- a/src/plugins/discover/public/application/components/discover_grid/get_render_cell_value.tsx
+++ b/src/plugins/discover/public/application/components/discover_grid/get_render_cell_value.tsx
@@ -60,11 +60,14 @@ export const getRenderCellValueFn = (
const formatted = indexPattern.formatHit(row);
return (
-
+
{Object.keys(formatted).map((key) => (
{key}
-
+
))}
From 1fdd6ad63903adfeb692e90335451ffb32bba8fa Mon Sep 17 00:00:00 2001
From: Michael Olorunnisola
Date: Thu, 11 Feb 2021 11:15:11 -0500
Subject: [PATCH 08/72] [Security Solution][Timeline] - Open Host & Network
details in side panel (#90064)
---
.../common/types/timeline/index.ts | 36 +-
.../cases/components/case_view/index.test.tsx | 7 +-
.../cases/components/case_view/index.tsx | 12 +-
.../events_viewer/event_details_flyout.tsx | 106 --
.../events_viewer/events_viewer.test.tsx | 7 +-
.../events_viewer/events_viewer.tsx | 8 +-
.../common/components/events_viewer/index.tsx | 14 +-
.../public/common/components/links/index.tsx | 31 +-
.../overview_description_list/index.tsx | 26 +
.../public/common/mock/global_state.ts | 2 +-
.../public/common/mock/timeline_results.ts | 4 +-
.../components/alerts_table/actions.test.tsx | 2 +-
.../public/hosts/pages/details/index.tsx | 1 +
.../details/__snapshots__/index.test.tsx.snap | 152 +++
.../network/components/details/index.test.tsx | 15 +
.../network/components/details/index.tsx | 47 +-
.../network/components/ip/index.test.tsx | 7 +-
.../public/network/pages/details/index.tsx | 1 +
.../__snapshots__/index.test.tsx.snap | 205 ++++
.../host_overview/endpoint_overview/index.tsx | 139 +--
.../components/host_overview/index.test.tsx | 42 +-
.../components/host_overview/index.tsx | 46 +-
.../field_renderers/field_renderers.tsx | 34 +-
.../components/formatted_ip/index.tsx | 74 +-
.../components/open_timeline/helpers.test.ts | 16 +-
.../open_timeline/note_previews/index.tsx | 5 +-
.../__snapshots__/index.test.tsx.snap | 1029 +++++++++++++++++
.../event_details/expandable_event.tsx} | 4 +-
.../side_panel/event_details/index.tsx | 109 ++
.../event_details/translations.ts} | 14 -
.../host_details/expandable_host.tsx | 94 ++
.../side_panel/host_details/index.tsx | 116 ++
.../components/side_panel/index.test.tsx | 204 ++++
.../timelines/components/side_panel/index.tsx | 120 ++
.../network_details/expandable_network.tsx | 134 +++
.../side_panel/network_details/index.tsx | 113 ++
.../timeline/body/actions/index.test.tsx | 6 +-
.../timeline/body/actions/index.tsx | 9 +-
.../body/events/event_column_view.test.tsx | 2 +-
.../body/events/event_column_view.tsx | 10 +-
.../timeline/body/events/stateful_event.tsx | 174 +--
.../body/events/stateful_event_context.tsx | 17 +
.../components/timeline/body/index.test.tsx | 15 +-
.../components/timeline/body/index.tsx | 4 +
.../body/renderers/formatted_field.test.tsx | 8 +-
.../timeline/body/renderers/host_name.tsx | 58 +-
.../components/timeline/event_details.tsx | 85 --
.../timelines/components/timeline/index.tsx | 6 +-
.../timeline/notes_tab_content/index.tsx | 20 +-
.../timeline/notes_tab_content/selectors.ts | 2 +-
.../__snapshots__/index.test.tsx.snap | 2 +-
.../pinned_tab_content/index.test.tsx | 2 +-
.../timeline/pinned_tab_content/index.tsx | 25 +-
.../__snapshots__/index.test.tsx.snap | 4 +-
.../timeline/query_tab_content/index.test.tsx | 4 +-
.../timeline/query_tab_content/index.tsx | 42 +-
.../containers/active_timeline_context.ts | 44 +-
.../public/timelines/containers/index.tsx | 4 +-
.../timelines/store/timeline/actions.ts | 14 +-
.../timelines/store/timeline/defaults.ts | 2 +-
.../timelines/store/timeline/epic.test.ts | 2 +-
.../timeline/epic_local_storage.test.tsx | 4 +-
.../timelines/store/timeline/helpers.ts | 30 +-
.../public/timelines/store/timeline/model.ts | 7 +-
.../timelines/store/timeline/reducer.test.ts | 2 +-
.../timelines/store/timeline/reducer.ts | 32 +-
.../translations/translations/ja-JP.json | 2 -
.../translations/translations/zh-CN.json | 2 -
68 files changed, 3000 insertions(+), 616 deletions(-)
delete mode 100644 x-pack/plugins/security_solution/public/common/components/events_viewer/event_details_flyout.tsx
create mode 100644 x-pack/plugins/security_solution/public/common/components/overview_description_list/index.tsx
create mode 100644 x-pack/plugins/security_solution/public/timelines/components/side_panel/__snapshots__/index.test.tsx.snap
rename x-pack/plugins/security_solution/public/timelines/components/{timeline/expandable_event/index.tsx => side_panel/event_details/expandable_event.tsx} (96%)
create mode 100644 x-pack/plugins/security_solution/public/timelines/components/side_panel/event_details/index.tsx
rename x-pack/plugins/security_solution/public/timelines/components/{timeline/expandable_event/translations.tsx => side_panel/event_details/translations.ts} (77%)
create mode 100644 x-pack/plugins/security_solution/public/timelines/components/side_panel/host_details/expandable_host.tsx
create mode 100644 x-pack/plugins/security_solution/public/timelines/components/side_panel/host_details/index.tsx
create mode 100644 x-pack/plugins/security_solution/public/timelines/components/side_panel/index.test.tsx
create mode 100644 x-pack/plugins/security_solution/public/timelines/components/side_panel/index.tsx
create mode 100644 x-pack/plugins/security_solution/public/timelines/components/side_panel/network_details/expandable_network.tsx
create mode 100644 x-pack/plugins/security_solution/public/timelines/components/side_panel/network_details/index.tsx
create mode 100644 x-pack/plugins/security_solution/public/timelines/components/timeline/body/events/stateful_event_context.tsx
delete mode 100644 x-pack/plugins/security_solution/public/timelines/components/timeline/event_details.tsx
diff --git a/x-pack/plugins/security_solution/common/types/timeline/index.ts b/x-pack/plugins/security_solution/common/types/timeline/index.ts
index 26a30e7c8f239..cee8ccdea3e9e 100644
--- a/x-pack/plugins/security_solution/common/types/timeline/index.ts
+++ b/x-pack/plugins/security_solution/common/types/timeline/index.ts
@@ -14,6 +14,7 @@ import {
success,
success_count as successCount,
} from '../../detection_engine/schemas/common/schemas';
+import { FlowTarget } from '../../search_strategy/security_solution/network';
import { PositiveInteger } from '../../detection_engine/schemas/types';
import { errorSchema } from '../../detection_engine/schemas/response/error_schema';
@@ -423,11 +424,38 @@ type EmptyObject = Record;
export type TimelineExpandedEventType =
| {
- eventId: string;
- indexName: string;
+ panelView?: 'eventDetail';
+ params?: {
+ eventId: string;
+ indexName: string;
+ };
}
| EmptyObject;
-export type TimelineExpandedEvent = {
- [tab in TimelineTabs]?: TimelineExpandedEventType;
+export type TimelineExpandedHostType =
+ | {
+ panelView?: 'hostDetail';
+ params?: {
+ hostName: string;
+ };
+ }
+ | EmptyObject;
+
+export type TimelineExpandedNetworkType =
+ | {
+ panelView?: 'networkDetail';
+ params?: {
+ ip: string;
+ flowTarget: FlowTarget;
+ };
+ }
+ | EmptyObject;
+
+export type TimelineExpandedDetailType =
+ | TimelineExpandedEventType
+ | TimelineExpandedHostType
+ | TimelineExpandedNetworkType;
+
+export type TimelineExpandedDetail = {
+ [tab in TimelineTabs]?: TimelineExpandedDetailType;
};
diff --git a/x-pack/plugins/security_solution/public/cases/components/case_view/index.test.tsx b/x-pack/plugins/security_solution/public/cases/components/case_view/index.test.tsx
index e74b66eeeb9f0..dc0ef9ad026a4 100644
--- a/x-pack/plugins/security_solution/public/cases/components/case_view/index.test.tsx
+++ b/x-pack/plugins/security_solution/public/cases/components/case_view/index.test.tsx
@@ -615,7 +615,7 @@ describe('CaseView ', () => {
type: 'x-pack/security_solution/local/timeline/CREATE_TIMELINE',
payload: {
columns: [],
- expandedEvent: {},
+ expandedDetail: {},
id: 'timeline-case',
indexNames: [],
show: false,
@@ -661,9 +661,10 @@ describe('CaseView ', () => {
.first()
.simulate('click');
expect(mockDispatch).toHaveBeenCalledWith({
- type: 'x-pack/security_solution/local/timeline/TOGGLE_EXPANDED_EVENT',
+ type: 'x-pack/security_solution/local/timeline/TOGGLE_DETAIL_PANEL',
payload: {
- event: { eventId: 'alert-id-1', indexName: 'alert-index-1' },
+ panelView: 'eventDetail',
+ params: { eventId: 'alert-id-1', indexName: 'alert-index-1' },
timelineId: 'timeline-case',
},
});
diff --git a/x-pack/plugins/security_solution/public/cases/components/case_view/index.tsx b/x-pack/plugins/security_solution/public/cases/components/case_view/index.tsx
index e690a01dca54b..0eaa867077a4a 100644
--- a/x-pack/plugins/security_solution/public/cases/components/case_view/index.tsx
+++ b/x-pack/plugins/security_solution/public/cases/components/case_view/index.tsx
@@ -44,7 +44,7 @@ import {
} from '../configure_cases/utils';
import { useQueryAlerts } from '../../../detections/containers/detection_engine/alerts/use_query';
import { buildAlertsQuery, getRuleIdsFromComments } from './helpers';
-import { EventDetailsFlyout } from '../../../common/components/events_viewer/event_details_flyout';
+import { DetailsPanel } from '../../../timelines/components/side_panel';
import { useSourcererScope } from '../../../common/containers/sourcerer';
import { SourcererScopeName } from '../../../common/store/sourcerer/model';
import { TimelineId } from '../../../../common/types/timeline';
@@ -368,9 +368,10 @@ export const CaseComponent = React.memo(
const showAlert = useCallback(
(alertId: string, index: string) => {
dispatch(
- timelineActions.toggleExpandedEvent({
+ timelineActions.toggleDetailPanel({
+ panelView: 'eventDetail',
timelineId: TimelineId.casePage,
- event: {
+ params: {
eventId: alertId,
indexName: index,
},
@@ -390,7 +391,7 @@ export const CaseComponent = React.memo(
id: TimelineId.casePage,
columns: [],
indexNames: [],
- expandedEvent: {},
+ expandedDetail: {},
show: false,
})
);
@@ -500,9 +501,10 @@ export const CaseComponent = React.memo(
-
diff --git a/x-pack/plugins/security_solution/public/common/components/events_viewer/event_details_flyout.tsx b/x-pack/plugins/security_solution/public/common/components/events_viewer/event_details_flyout.tsx
deleted file mode 100644
index 60418f3a2a080..0000000000000
--- a/x-pack/plugins/security_solution/public/common/components/events_viewer/event_details_flyout.tsx
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-import { some } from 'lodash/fp';
-import { EuiFlyout, EuiFlyoutHeader, EuiFlyoutBody } from '@elastic/eui';
-import React, { useCallback, useMemo } from 'react';
-import styled from 'styled-components';
-import deepEqual from 'fast-deep-equal';
-import { useDispatch } from 'react-redux';
-
-import { BrowserFields, DocValueFields } from '../../containers/source';
-import {
- ExpandableEvent,
- ExpandableEventTitle,
-} from '../../../timelines/components/timeline/expandable_event';
-import { useDeepEqualSelector } from '../../hooks/use_selector';
-import { useTimelineEventsDetails } from '../../../timelines/containers/details';
-import { timelineActions, timelineSelectors } from '../../../timelines/store/timeline';
-import { timelineDefaults } from '../../../timelines/store/timeline/defaults';
-
-const StyledEuiFlyout = styled(EuiFlyout)`
- z-index: ${({ theme }) => theme.eui.euiZLevel7};
-`;
-
-const StyledEuiFlyoutBody = styled(EuiFlyoutBody)`
- .euiFlyoutBody__overflow {
- display: flex;
- flex: 1;
- overflow: hidden;
-
- .euiFlyoutBody__overflowContent {
- flex: 1;
- overflow: hidden;
- padding: ${({ theme }) => `${theme.eui.paddingSizes.xs} ${theme.eui.paddingSizes.m} 64px`};
- }
- }
-`;
-
-interface EventDetailsFlyoutProps {
- browserFields: BrowserFields;
- docValueFields: DocValueFields[];
- timelineId: string;
-}
-
-const EventDetailsFlyoutComponent: React.FC = ({
- browserFields,
- docValueFields,
- timelineId,
-}) => {
- const dispatch = useDispatch();
- const getTimeline = useMemo(() => timelineSelectors.getTimelineByIdSelector(), []);
- const expandedEvent = useDeepEqualSelector(
- (state) => (getTimeline(state, timelineId) ?? timelineDefaults)?.expandedEvent?.query ?? {}
- );
-
- const handleClearSelection = useCallback(() => {
- dispatch(timelineActions.toggleExpandedEvent({ timelineId }));
- }, [dispatch, timelineId]);
-
- const [loading, detailsData] = useTimelineEventsDetails({
- docValueFields,
- indexName: expandedEvent?.indexName ?? '',
- eventId: expandedEvent?.eventId ?? '',
- skip: !expandedEvent.eventId,
- });
-
- const isAlert = useMemo(
- () => some({ category: 'signal', field: 'signal.rule.id' }, detailsData),
- [detailsData]
- );
-
- if (!expandedEvent.eventId) {
- return null;
- }
-
- return (
-
-
-
-
-
-
-
-
- );
-};
-
-export const EventDetailsFlyout = React.memo(
- EventDetailsFlyoutComponent,
- (prevProps, nextProps) =>
- deepEqual(prevProps.browserFields, nextProps.browserFields) &&
- deepEqual(prevProps.docValueFields, nextProps.docValueFields) &&
- prevProps.timelineId === nextProps.timelineId
-);
diff --git a/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.test.tsx b/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.test.tsx
index 6dad6c439ce46..a37528fcb24d7 100644
--- a/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.test.tsx
+++ b/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.test.tsx
@@ -86,7 +86,6 @@ const eventsViewerDefaultProps = {
deletedEventIds: [],
docValueFields: [],
end: to,
- expandedEvent: {},
filters: [],
id: TimelineId.detectionsPage,
indexNames: mockIndexNames,
@@ -100,7 +99,6 @@ const eventsViewerDefaultProps = {
query: '',
language: 'kql',
},
- handleCloseExpandedEvent: jest.fn(),
start: from,
sort: [
{
@@ -150,14 +148,15 @@ describe('EventsViewer', () => {
expect(mockDispatch).toBeCalledTimes(2);
expect(mockDispatch.mock.calls[1][0]).toEqual({
payload: {
- event: {
+ panelView: 'eventDetail',
+ params: {
eventId: 'yb8TkHYBRgU82_bJu_rY',
indexName: 'auditbeat-7.10.1-2020.12.18-000001',
},
tabType: 'query',
timelineId: TimelineId.test,
},
- type: 'x-pack/security_solution/local/timeline/TOGGLE_EXPANDED_EVENT',
+ type: 'x-pack/security_solution/local/timeline/TOGGLE_DETAIL_PANEL',
});
});
});
diff --git a/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.tsx b/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.tsx
index 254309aee906b..012c9a3a450c0 100644
--- a/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.tsx
+++ b/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.tsx
@@ -40,11 +40,7 @@ import { inputsModel } from '../../store';
import { useManageTimeline } from '../../../timelines/components/manage_timeline';
import { ExitFullScreen } from '../exit_full_screen';
import { useGlobalFullScreen } from '../../containers/use_full_screen';
-import {
- TimelineExpandedEventType,
- TimelineId,
- TimelineTabs,
-} from '../../../../common/types/timeline';
+import { TimelineId, TimelineTabs } from '../../../../common/types/timeline';
import { GraphOverlay } from '../../../timelines/components/graph_overlay';
import { SELECTOR_TIMELINE_GLOBAL_CONTAINER } from '../../../timelines/components/timeline/styles';
@@ -113,7 +109,6 @@ interface Props {
deletedEventIds: Readonly;
docValueFields: DocValueFields[];
end: string;
- expandedEvent: TimelineExpandedEventType;
filters: Filter[];
headerFilterGroup?: React.ReactNode;
height?: number;
@@ -141,7 +136,6 @@ const EventsViewerComponent: React.FC = ({
deletedEventIds,
docValueFields,
end,
- expandedEvent,
filters,
headerFilterGroup,
id,
diff --git a/x-pack/plugins/security_solution/public/common/components/events_viewer/index.tsx b/x-pack/plugins/security_solution/public/common/components/events_viewer/index.tsx
index 2b5420674b89c..59dc756bb2b3e 100644
--- a/x-pack/plugins/security_solution/public/common/components/events_viewer/index.tsx
+++ b/x-pack/plugins/security_solution/public/common/components/events_viewer/index.tsx
@@ -21,7 +21,7 @@ import { InspectButtonContainer } from '../inspect';
import { useGlobalFullScreen } from '../../containers/use_full_screen';
import { SourcererScopeName } from '../../store/sourcerer/model';
import { useSourcererScope } from '../../containers/sourcerer';
-import { EventDetailsFlyout } from './event_details_flyout';
+import { DetailsPanel } from '../../../timelines/components/side_panel';
const DEFAULT_EVENTS_VIEWER_HEIGHT = 652;
@@ -46,6 +46,11 @@ export interface OwnProps {
type Props = OwnProps & PropsFromRedux;
+/**
+ * The stateful events viewer component is the highest level component that is utilized across the security_solution pages layer where
+ * timeline is used BESIDES the flyout. The flyout makes use of the `EventsViewer` component which is a subcomponent here
+ * NOTE: As of writting, it is not used in the Case_View component
+ */
const StatefulEventsViewerComponent: React.FC = ({
createTimeline,
columns,
@@ -53,7 +58,6 @@ const StatefulEventsViewerComponent: React.FC = ({
deletedEventIds,
deleteEventQuery,
end,
- expandedEvent,
excludedRowRendererIds,
filters,
headerFilterGroup,
@@ -114,7 +118,6 @@ const StatefulEventsViewerComponent: React.FC = ({
dataProviders={dataProviders!}
deletedEventIds={deletedEventIds}
end={end}
- expandedEvent={expandedEvent}
isLoadingIndexPattern={isLoadingIndexPattern}
filters={globalFilters}
headerFilterGroup={headerFilterGroup}
@@ -133,9 +136,10 @@ const StatefulEventsViewerComponent: React.FC = ({
/>
-
>
@@ -155,7 +159,6 @@ const makeMapStateToProps = () => {
dataProviders,
deletedEventIds,
excludedRowRendererIds,
- expandedEvent,
graphEventId,
itemsPerPage,
itemsPerPageOptions,
@@ -168,7 +171,6 @@ const makeMapStateToProps = () => {
columns,
dataProviders,
deletedEventIds,
- expandedEvent: expandedEvent?.query ?? {},
excludedRowRendererIds,
filters: getGlobalFiltersQuerySelector(state),
id,
diff --git a/x-pack/plugins/security_solution/public/common/components/links/index.tsx b/x-pack/plugins/security_solution/public/common/components/links/index.tsx
index 49d739b3f6679..6b4148db2b1ee 100644
--- a/x-pack/plugins/security_solution/public/common/components/links/index.tsx
+++ b/x-pack/plugins/security_solution/public/common/components/links/index.tsx
@@ -55,10 +55,11 @@ export const LinkAnchor: React.FC = ({ children, ...props }) => (
);
// Internal Links
-const HostDetailsLinkComponent: React.FC<{ children?: React.ReactNode; hostName: string }> = ({
- children,
- hostName,
-}) => {
+const HostDetailsLinkComponent: React.FC<{
+ children?: React.ReactNode;
+ hostName: string;
+ isButton?: boolean;
+}> = ({ children, hostName, isButton }) => {
const { formatUrl, search } = useFormatUrl(SecurityPageName.hosts);
const { navigateToApp } = useKibana().services.application;
const goToHostDetails = useCallback(
@@ -71,7 +72,14 @@ const HostDetailsLinkComponent: React.FC<{ children?: React.ReactNode; hostName:
[hostName, navigateToApp, search]
);
- return (
+ return isButton ? (
+
+ {children ? children : hostName}
+
+ ) : (
);
};
+
export const HostDetailsLink = React.memo(HostDetailsLinkComponent);
const allowedUrlSchemes = ['http://', 'https://'];
@@ -119,7 +128,8 @@ const NetworkDetailsLinkComponent: React.FC<{
children?: React.ReactNode;
ip: string;
flowTarget?: FlowTarget | FlowTargetSourceDest;
-}> = ({ children, ip, flowTarget = FlowTarget.source }) => {
+ isButton?: boolean;
+}> = ({ children, ip, flowTarget = FlowTarget.source, isButton }) => {
const { formatUrl, search } = useFormatUrl(SecurityPageName.network);
const { navigateToApp } = useKibana().services.application;
const goToNetworkDetails = useCallback(
@@ -132,7 +142,14 @@ const NetworkDetailsLinkComponent: React.FC<{
[flowTarget, ip, navigateToApp, search]
);
- return (
+ return isButton ? (
+
+ {children ? children : ip}
+
+ ) : (
(
+
+
+
+);
diff --git a/x-pack/plugins/security_solution/public/common/mock/global_state.ts b/x-pack/plugins/security_solution/public/common/mock/global_state.ts
index 21e4ef6a46c8c..bfd25aa469c93 100644
--- a/x-pack/plugins/security_solution/public/common/mock/global_state.ts
+++ b/x-pack/plugins/security_solution/public/common/mock/global_state.ts
@@ -214,7 +214,7 @@ export const mockGlobalState: State = {
description: '',
eventIdToNoteIds: {},
excludedRowRendererIds: [],
- expandedEvent: {},
+ expandedDetail: {},
highlightedDropAndProviderId: '',
historyIds: [],
isFavorite: false,
diff --git a/x-pack/plugins/security_solution/public/common/mock/timeline_results.ts b/x-pack/plugins/security_solution/public/common/mock/timeline_results.ts
index 79486f773b1f2..351caa2df3e31 100644
--- a/x-pack/plugins/security_solution/public/common/mock/timeline_results.ts
+++ b/x-pack/plugins/security_solution/public/common/mock/timeline_results.ts
@@ -2109,7 +2109,7 @@ export const mockTimelineModel: TimelineModel = {
eventIdToNoteIds: {},
eventType: 'all',
excludedRowRendererIds: [],
- expandedEvent: {},
+ expandedDetail: {},
filters: [
{
$state: {
@@ -2232,7 +2232,7 @@ export const defaultTimelineProps: CreateTimelineProps = {
eventIdToNoteIds: {},
eventType: 'all',
excludedRowRendererIds: [],
- expandedEvent: {},
+ expandedDetail: {},
filters: [],
highlightedDropAndProviderId: '',
historyIds: [],
diff --git a/x-pack/plugins/security_solution/public/detections/components/alerts_table/actions.test.tsx b/x-pack/plugins/security_solution/public/detections/components/alerts_table/actions.test.tsx
index a2dbeedb3f016..3c3d79c0c518f 100644
--- a/x-pack/plugins/security_solution/public/detections/components/alerts_table/actions.test.tsx
+++ b/x-pack/plugins/security_solution/public/detections/components/alerts_table/actions.test.tsx
@@ -156,7 +156,7 @@ describe('alert actions', () => {
eventIdToNoteIds: {},
eventType: 'all',
excludedRowRendererIds: [],
- expandedEvent: {},
+ expandedDetail: {},
filters: [
{
$state: {
diff --git a/x-pack/plugins/security_solution/public/hosts/pages/details/index.tsx b/x-pack/plugins/security_solution/public/hosts/pages/details/index.tsx
index 18ab93dbb340c..faa240f98e53e 100644
--- a/x-pack/plugins/security_solution/public/hosts/pages/details/index.tsx
+++ b/x-pack/plugins/security_solution/public/hosts/pages/details/index.tsx
@@ -151,6 +151,7 @@ const HostDetailsComponent: React.FC = ({ detailName, hostDeta
docValueFields={docValueFields}
id={id}
inspect={inspect}
+ isInDetailsSidePanel={false}
refetch={refetch}
setQuery={setQuery}
data={hostOverview as HostItem}
diff --git a/x-pack/plugins/security_solution/public/network/components/details/__snapshots__/index.test.tsx.snap b/x-pack/plugins/security_solution/public/network/components/details/__snapshots__/index.test.tsx.snap
index ca2ce4ee921c7..c22c3bf680781 100644
--- a/x-pack/plugins/security_solution/public/network/components/details/__snapshots__/index.test.tsx.snap
+++ b/x-pack/plugins/security_solution/public/network/components/details/__snapshots__/index.test.tsx.snap
@@ -141,6 +141,158 @@ exports[`IP Overview Component rendering it renders the default IP Overview 1`]
flowTarget="source"
id="ipOverview"
ip="10.10.10.10"
+ isInDetailsSidePanel={false}
+ isLoadingAnomaliesData={false}
+ loading={false}
+ narrowDateRange={[MockFunction]}
+ startDate="2019-06-15T06:00:00.000Z"
+ type="details"
+ updateFlowTargetAction={[MockFunction]}
+/>
+`;
+
+exports[`IP Overview Component rendering it renders the side panel IP overview 1`] = `
+ {
loading: false,
id: 'ipOverview',
ip: '10.10.10.10',
+ isInDetailsSidePanel: false,
isLoadingAnomaliesData: false,
narrowDateRange: (jest.fn() as unknown) as NarrowDateRange,
startDate: '2019-06-15T06:00:00.000Z',
@@ -76,5 +77,19 @@ describe('IP Overview Component', () => {
expect(wrapper.find('IpOverview')).toMatchSnapshot();
});
+
+ test('it renders the side panel IP overview', () => {
+ const panelViewProps = {
+ ...mockProps,
+ isInDetailsSidePanel: true,
+ };
+ const wrapper = shallow(
+
+
+
+ );
+
+ expect(wrapper.find('IpOverview')).toMatchSnapshot();
+ });
});
});
diff --git a/x-pack/plugins/security_solution/public/network/components/details/index.tsx b/x-pack/plugins/security_solution/public/network/components/details/index.tsx
index 384fffc472e21..e263d49e22fc0 100644
--- a/x-pack/plugins/security_solution/public/network/components/details/index.tsx
+++ b/x-pack/plugins/security_solution/public/network/components/details/index.tsx
@@ -5,7 +5,6 @@
* 2.0.
*/
-import { EuiFlexItem } from '@elastic/eui';
import darkTheme from '@elastic/eui/dist/eui_theme_dark.json';
import lightTheme from '@elastic/eui/dist/eui_theme_light.json';
import React from 'react';
@@ -27,39 +26,38 @@ import {
whoisRenderer,
} from '../../../timelines/components/field_renderers/field_renderers';
import * as i18n from './translations';
-import { DescriptionListStyled, OverviewWrapper } from '../../../common/components/page';
+import { OverviewWrapper } from '../../../common/components/page';
import { Loader } from '../../../common/components/loader';
import { Anomalies, NarrowDateRange } from '../../../common/components/ml/types';
import { AnomalyScores } from '../../../common/components/ml/score/anomaly_scores';
import { useMlCapabilities } from '../../../common/components/ml/hooks/use_ml_capabilities';
import { hasMlUserPermissions } from '../../../../common/machine_learning/has_ml_user_permissions';
import { InspectButton, InspectButtonContainer } from '../../../common/components/inspect';
+import { OverviewDescriptionList } from '../../../common/components/overview_description_list';
export interface IpOverviewProps {
+ anomaliesData: Anomalies | null;
+ contextID?: string; // used to provide unique draggable context when viewing in the side panel
data: NetworkDetailsStrategyResponse['networkDetails'];
+ endDate: string;
flowTarget: FlowTarget;
id: string;
ip: string;
- loading: boolean;
+ isInDetailsSidePanel: boolean;
isLoadingAnomaliesData: boolean;
- anomaliesData: Anomalies | null;
+ loading: boolean;
+ narrowDateRange: NarrowDateRange;
startDate: string;
- endDate: string;
type: networkModel.NetworkType;
- narrowDateRange: NarrowDateRange;
}
-const getDescriptionList = (descriptionList: DescriptionList[], key: number) => (
-
-
-
-);
-
export const IpOverview = React.memo(
({
+ contextID,
id,
ip,
data,
+ isInDetailsSidePanel = false, // Rather than duplicate the component, alter the structure based on it's location
loading,
flowTarget,
startDate,
@@ -77,13 +75,14 @@ export const IpOverview = React.memo(
title: i18n.LOCATION,
description: locationRenderer(
[`${flowTarget}.geo.city_name`, `${flowTarget}.geo.region_name`],
- data
+ data,
+ contextID
),
},
{
title: i18n.AUTONOMOUS_SYSTEM,
description: typeData
- ? autonomousSystemRenderer(typeData.autonomousSystem, flowTarget)
+ ? autonomousSystemRenderer(typeData.autonomousSystem, flowTarget, contextID)
: getEmptyTagValue(),
},
];
@@ -123,12 +122,13 @@ export const IpOverview = React.memo(
title: i18n.HOST_ID,
description:
typeData && data.host
- ? hostIdRenderer({ host: data.host, ipFilter: ip })
+ ? hostIdRenderer({ host: data.host, ipFilter: ip, contextID })
: getEmptyTagValue(),
},
{
title: i18n.HOST_NAME,
- description: typeData && data.host ? hostNameRenderer(data.host, ip) : getEmptyTagValue(),
+ description:
+ typeData && data.host ? hostNameRenderer(data.host, ip, contextID) : getEmptyTagValue(),
},
],
[
@@ -139,12 +139,17 @@ export const IpOverview = React.memo(
return (
-
-
-
- {descriptionLists.map((descriptionList, index) =>
- getDescriptionList(descriptionList, index)
+
+ {!isInDetailsSidePanel && (
+
)}
+ {descriptionLists.map((descriptionList, index) => (
+
+ ))}
{loading && (
{
expect(wrapper.find('[data-test-subj="formatted-ip"]').first().text()).toEqual('10.1.2.3');
});
- test('it hyperlinks to the network/ip page', () => {
+ test('it dispalys a button which opens the network/ip side panel', () => {
const wrapper = mount(
@@ -53,8 +53,7 @@ describe('Port', () => {
);
expect(
- wrapper.find('[data-test-subj="draggable-truncatable-content"]').find('a').first().props()
- .href
- ).toEqual('/ip/10.1.2.3/source');
+ wrapper.find('[data-test-subj="draggable-truncatable-content"]').find('a').first().text()
+ ).toEqual('10.1.2.3');
});
});
diff --git a/x-pack/plugins/security_solution/public/network/pages/details/index.tsx b/x-pack/plugins/security_solution/public/network/pages/details/index.tsx
index 124b400d56e92..896eec39c125c 100644
--- a/x-pack/plugins/security_solution/public/network/pages/details/index.tsx
+++ b/x-pack/plugins/security_solution/public/network/pages/details/index.tsx
@@ -147,6 +147,7 @@ const NetworkDetailsComponent: React.FC = () => {
id={id}
inspect={inspect}
ip={ip}
+ isInDetailsSidePanel={false}
data={networkDetails}
anomaliesData={anomaliesData}
loading={loading}
diff --git a/x-pack/plugins/security_solution/public/overview/components/host_overview/__snapshots__/index.test.tsx.snap b/x-pack/plugins/security_solution/public/overview/components/host_overview/__snapshots__/index.test.tsx.snap
index 47d45ab740dcf..5d7b2d5b85af6 100644
--- a/x-pack/plugins/security_solution/public/overview/components/host_overview/__snapshots__/index.test.tsx.snap
+++ b/x-pack/plugins/security_solution/public/overview/components/host_overview/__snapshots__/index.test.tsx.snap
@@ -196,6 +196,211 @@ exports[`Host Summary Component rendering it renders the default Host Summary 1`
endDate="2019-06-18T06:00:00.000Z"
id="hostOverview"
indexNames={Array []}
+ isInDetailsSidePanel={false}
+ isLoadingAnomaliesData={false}
+ loading={false}
+ narrowDateRange={[MockFunction]}
+ startDate="2019-06-15T06:00:00.000Z"
+/>
+`;
+
+exports[`Host Summary Component rendering it renders the panel view Host Summary 1`] = `
+ (
-
-
-
-);
-
-export const EndpointOverview = React.memo(({ data }) => {
- const getDefaultRenderer = useCallback(
- (fieldName: string, fieldData: EndpointFields, attrName: string) => (
-
- ),
- []
- );
- const descriptionLists: Readonly = useMemo(
- () => [
- [
- {
- title: i18n.ENDPOINT_POLICY,
- description:
- data != null && data.endpointPolicy != null ? data.endpointPolicy : getEmptyTagValue(),
- },
- ],
- [
- {
- title: i18n.POLICY_STATUS,
- description:
- data != null && data.policyStatus != null ? (
-
- {data.policyStatus}
-
- ) : (
- getEmptyTagValue()
- ),
- },
+export const EndpointOverview = React.memo(
+ ({ contextID, data, isInDetailsSidePanel = false }) => {
+ const getDefaultRenderer = useCallback(
+ (fieldName: string, fieldData: EndpointFields, attrName: string) => (
+
+ ),
+ [contextID]
+ );
+ const descriptionLists: Readonly = useMemo(
+ () => [
+ [
+ {
+ title: i18n.ENDPOINT_POLICY,
+ description:
+ data != null && data.endpointPolicy != null
+ ? data.endpointPolicy
+ : getEmptyTagValue(),
+ },
+ ],
+ [
+ {
+ title: i18n.POLICY_STATUS,
+ description:
+ data != null && data.policyStatus != null ? (
+
+ {data.policyStatus}
+
+ ) : (
+ getEmptyTagValue()
+ ),
+ },
+ ],
+ [
+ {
+ title: i18n.SENSORVERSION,
+ description:
+ data != null && data.sensorVersion != null
+ ? getDefaultRenderer('sensorVersion', data, 'agent.version')
+ : getEmptyTagValue(),
+ },
+ ],
+ [], // needs 4 columns for design
],
- [
- {
- title: i18n.SENSORVERSION,
- description:
- data != null && data.sensorVersion != null
- ? getDefaultRenderer('sensorVersion', data, 'agent.version')
- : getEmptyTagValue(),
- },
- ],
- [], // needs 4 columns for design
- ],
- [data, getDefaultRenderer]
- );
+ [data, getDefaultRenderer]
+ );
- return (
- <>
- {descriptionLists.map((descriptionList, index) => getDescriptionList(descriptionList, index))}
- >
- );
-});
+ return (
+ <>
+ {descriptionLists.map((descriptionList, index) => (
+
+ ))}
+ >
+ );
+ }
+);
EndpointOverview.displayName = 'EndpointOverview';
diff --git a/x-pack/plugins/security_solution/public/overview/components/host_overview/index.test.tsx b/x-pack/plugins/security_solution/public/overview/components/host_overview/index.test.tsx
index 3292f0297fa2d..e1c12ac6383a6 100644
--- a/x-pack/plugins/security_solution/public/overview/components/host_overview/index.test.tsx
+++ b/x-pack/plugins/security_solution/public/overview/components/host_overview/index.test.tsx
@@ -15,21 +15,39 @@ import { mockData } from './mock';
import { mockAnomalies } from '../../../common/components/ml/mock';
describe('Host Summary Component', () => {
describe('rendering', () => {
+ const mockProps = {
+ anomaliesData: mockAnomalies,
+ data: mockData.Hosts.edges[0].node,
+ docValueFields: [],
+ endDate: '2019-06-18T06:00:00.000Z',
+ id: 'hostOverview',
+ indexNames: [],
+ isInDetailsSidePanel: false,
+ isLoadingAnomaliesData: false,
+ loading: false,
+ narrowDateRange: jest.fn(),
+ startDate: '2019-06-15T06:00:00.000Z',
+ };
+
test('it renders the default Host Summary', () => {
const wrapper = shallow(
-
+
+
+ );
+
+ expect(wrapper.find('HostOverview')).toMatchSnapshot();
+ });
+
+ test('it renders the panel view Host Summary', () => {
+ const panelViewProps = {
+ ...mockProps,
+ isInDetailsSidePanel: true,
+ };
+
+ const wrapper = shallow(
+
+
);
diff --git a/x-pack/plugins/security_solution/public/overview/components/host_overview/index.tsx b/x-pack/plugins/security_solution/public/overview/components/host_overview/index.tsx
index 90dc681617328..de0d782b3ceb7 100644
--- a/x-pack/plugins/security_solution/public/overview/components/host_overview/index.tsx
+++ b/x-pack/plugins/security_solution/public/overview/components/host_overview/index.tsx
@@ -5,7 +5,7 @@
* 2.0.
*/
-import { EuiFlexItem, EuiHorizontalRule } from '@elastic/eui';
+import { EuiHorizontalRule } from '@elastic/eui';
import darkTheme from '@elastic/eui/dist/eui_theme_dark.json';
import lightTheme from '@elastic/eui/dist/eui_theme_light.json';
import { getOr } from 'lodash/fp';
@@ -27,7 +27,7 @@ import { hasMlUserPermissions } from '../../../../common/machine_learning/has_ml
import { useMlCapabilities } from '../../../common/components/ml/hooks/use_ml_capabilities';
import { AnomalyScores } from '../../../common/components/ml/score/anomaly_scores';
import { Anomalies, NarrowDateRange } from '../../../common/components/ml/types';
-import { DescriptionListStyled, OverviewWrapper } from '../../../common/components/page';
+import { OverviewWrapper } from '../../../common/components/page';
import {
FirstLastSeenHost,
FirstLastSeenHostType,
@@ -35,11 +35,14 @@ import {
import * as i18n from './translations';
import { EndpointOverview } from './endpoint_overview';
+import { OverviewDescriptionList } from '../../../common/components/overview_description_list';
interface HostSummaryProps {
+ contextID?: string; // used to provide unique draggable context when viewing in the side panel
data: HostItem;
docValueFields: DocValueFields[];
id: string;
+ isInDetailsSidePanel: boolean;
loading: boolean;
isLoadingAnomaliesData: boolean;
indexNames: string[];
@@ -49,19 +52,15 @@ interface HostSummaryProps {
narrowDateRange: NarrowDateRange;
}
-const getDescriptionList = (descriptionList: DescriptionList[], key: number) => (
-
-
-
-);
-
export const HostOverview = React.memo(
({
anomaliesData,
+ contextID,
data,
docValueFields,
endDate,
id,
+ isInDetailsSidePanel = false, // Rather than duplicate the component, alter the structure based on it's location
isLoadingAnomaliesData,
indexNames,
loading,
@@ -77,10 +76,10 @@ export const HostOverview = React.memo(
),
- []
+ [contextID]
);
const column: DescriptionList[] = useMemo(
@@ -162,7 +161,7 @@ export const HostOverview = React.memo(
(ip != null ? : getEmptyTagValue())}
/>
),
@@ -198,17 +197,22 @@ export const HostOverview = React.memo(
},
],
],
- [data, firstColumn, getDefaultRenderer]
+ [contextID, data, firstColumn, getDefaultRenderer]
);
return (
<>
-
-
-
- {descriptionLists.map((descriptionList, index) =>
- getDescriptionList(descriptionList, index)
+
+ {!isInDetailsSidePanel && (
+
)}
+ {descriptionLists.map((descriptionList, index) => (
+
+ ))}
{loading && (
(
{data.endpoint != null ? (
<>
-
-
+
+
{loading && (
fieldNames.length > 0 && fieldNames.every((fieldName) => getOr(null, fieldName, data)) ? (
@@ -52,7 +53,9 @@ export const locationRenderer = (
{index ? ',\u00A0' : ''}
@@ -71,13 +74,16 @@ export const dateRenderer = (timestamp?: string | null): React.ReactElement => (
export const autonomousSystemRenderer = (
as: AutonomousSystem,
- flowTarget: FlowTarget
+ flowTarget: FlowTarget,
+ contextID?: string
): React.ReactElement =>
as && as.organization && as.organization.name && as.number ? (
@@ -85,7 +91,9 @@ export const autonomousSystemRenderer = (
{'/'}
@@ -96,12 +104,14 @@ export const autonomousSystemRenderer = (
);
interface HostIdRendererTypes {
+ contextID?: string;
host: HostEcs;
ipFilter?: string;
noLink?: boolean;
}
export const hostIdRenderer = ({
+ contextID,
host,
ipFilter,
noLink,
@@ -110,7 +120,9 @@ export const hostIdRenderer = ({
<>
{host.name && host.name[0] != null ? (
@@ -128,14 +140,20 @@ export const hostIdRenderer = ({
getEmptyTagValue()
);
-export const hostNameRenderer = (host?: HostEcs, ipFilter?: string): React.ReactElement =>
+export const hostNameRenderer = (
+ host?: HostEcs,
+ ipFilter?: string,
+ contextID?: string
+): React.ReactElement =>
host &&
host.name &&
host.name[0] &&
host.ip &&
(!(ipFilter != null) || host.ip.includes(ipFilter)) ? (
diff --git a/x-pack/plugins/security_solution/public/timelines/components/formatted_ip/index.tsx b/x-pack/plugins/security_solution/public/timelines/components/formatted_ip/index.tsx
index a3ac543ac6682..e1331f1b496ba 100644
--- a/x-pack/plugins/security_solution/public/timelines/components/formatted_ip/index.tsx
+++ b/x-pack/plugins/security_solution/public/timelines/components/formatted_ip/index.tsx
@@ -6,9 +6,11 @@
*/
import { isArray, isEmpty, isString, uniq } from 'lodash/fp';
-import React, { useCallback, useMemo } from 'react';
+import React, { useCallback, useMemo, useContext } from 'react';
+import { useDispatch } from 'react-redux';
import deepEqual from 'fast-deep-equal';
+import { FlowTarget } from '../../../../common/search_strategy/security_solution/network';
import {
DragEffects,
DraggableWrapper,
@@ -16,13 +18,21 @@ import {
import { escapeDataProviderId } from '../../../common/components/drag_and_drop/helpers';
import { Content } from '../../../common/components/draggables';
import { getOrEmptyTagFromValue } from '../../../common/components/empty_value';
-import { NetworkDetailsLink } from '../../../common/components/links';
import { parseQueryValue } from '../../../timelines/components/timeline/body/renderers/parse_query_value';
import {
DataProvider,
IS_OPERATOR,
} from '../../../timelines/components/timeline/data_providers/data_provider';
import { Provider } from '../../../timelines/components/timeline/data_providers/provider';
+import {
+ TimelineExpandedDetailType,
+ TimelineId,
+ TimelineTabs,
+} from '../../../../common/types/timeline';
+import { activeTimeline } from '../../containers/active_timeline_context';
+import { timelineActions } from '../../store/timeline';
+import { StatefulEventContext } from '../timeline/body/events/stateful_event_context';
+import { LinkAnchor } from '../../../common/components/links';
const getUniqueId = ({
contextId,
@@ -128,22 +138,52 @@ const AddressLinksItemComponent: React.FC = ({
fieldName,
truncate,
}) => {
- const key = useMemo(
- () =>
- `address-links-draggable-wrapper-${getUniqueId({
- contextId,
- eventId,
- fieldName,
- address,
- })}`,
- [address, contextId, eventId, fieldName]
- );
+ const key = `address-links-draggable-wrapper-${getUniqueId({
+ contextId,
+ eventId,
+ fieldName,
+ address,
+ })}`;
const dataProviderProp = useMemo(
() => getDataProvider({ contextId, eventId, fieldName, address }),
[address, contextId, eventId, fieldName]
);
+ const dispatch = useDispatch();
+ const eventContext = useContext(StatefulEventContext);
+
+ const openNetworkDetailsSidePanel = useCallback(
+ (e) => {
+ e.preventDefault();
+ if (address && eventContext?.timelineID && eventContext?.tabType) {
+ const { tabType, timelineID } = eventContext;
+ const updatedExpandedDetail: TimelineExpandedDetailType = {
+ panelView: 'networkDetail',
+ params: {
+ ip: address,
+ flowTarget: fieldName.includes(FlowTarget.destination)
+ ? FlowTarget.destination
+ : FlowTarget.source,
+ },
+ };
+
+ dispatch(
+ timelineActions.toggleDetailPanel({
+ ...updatedExpandedDetail,
+ tabType,
+ timelineId: timelineID,
+ })
+ );
+
+ if (timelineID === TimelineId.active && tabType === TimelineTabs.query) {
+ activeTimeline.toggleExpandedDetail({ ...updatedExpandedDetail });
+ }
+ }
+ },
+ [dispatch, eventContext, address, fieldName]
+ );
+
const render = useCallback(
(_props, _provided, snapshot) =>
snapshot.isDragging ? (
@@ -152,10 +192,16 @@ const AddressLinksItemComponent: React.FC = ({
) : (
-