From c9081703f316ab428c3f2969bcdebfb571837e5d Mon Sep 17 00:00:00 2001
From: Josh Dover <me@joshdover.com>
Date: Fri, 14 Aug 2020 07:31:48 -0600
Subject: [PATCH 1/5] Remove degraded state from ES status service (#75007)
 (#75015)

---
 src/core/server/elasticsearch/status.test.ts | 6 +++---
 src/core/server/elasticsearch/status.ts      | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/src/core/server/elasticsearch/status.test.ts b/src/core/server/elasticsearch/status.test.ts
index ef7ca7cd04608..5dfadba4c88b2 100644
--- a/src/core/server/elasticsearch/status.test.ts
+++ b/src/core/server/elasticsearch/status.test.ts
@@ -65,7 +65,7 @@ describe('calculateStatus', () => {
     });
   });
 
-  it('changes to degraded when isCompatible and warningNodes present', async () => {
+  it('changes to available with a differemnt message when isCompatible and warningNodes present', async () => {
     expect(
       await calculateStatus$(
         of({
@@ -81,7 +81,7 @@ describe('calculateStatus', () => {
         .pipe(take(2))
         .toPromise()
     ).toEqual({
-      level: ServiceStatusLevels.degraded,
+      level: ServiceStatusLevels.available,
       summary: 'Some nodes are a different version',
       meta: {
         incompatibleNodes: [],
@@ -188,7 +188,7 @@ describe('calculateStatus', () => {
           "summary": "Incompatible with Elasticsearch",
         },
         Object {
-          "level": degraded,
+          "level": available,
           "meta": Object {
             "incompatibleNodes": Array [],
             "warningNodes": Array [
diff --git a/src/core/server/elasticsearch/status.ts b/src/core/server/elasticsearch/status.ts
index 1eaa338af1239..1be32d03c60cb 100644
--- a/src/core/server/elasticsearch/status.ts
+++ b/src/core/server/elasticsearch/status.ts
@@ -55,7 +55,7 @@ export const calculateStatus$ = (
             };
           } else if (warningNodes.length > 0) {
             return {
-              level: ServiceStatusLevels.degraded,
+              level: ServiceStatusLevels.available,
               summary:
                 // Message should always be present, but this is a safe fallback
                 message ??

From ef650a249e17d8c03f33849f8724a13c50b60de6 Mon Sep 17 00:00:00 2001
From: Lee Drengenberg <lee.drengenberg@elastic.co>
Date: Fri, 14 Aug 2020 14:41:52 -0500
Subject: [PATCH 2/5] [7.9] [functional test][saved objects] update tests for
 additional copy saved objects to space (#74907) (#75070)

Co-authored-by: Larry Gregory <lgregorydev@gmail.com>
Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
---
 .../apps/management/_import_objects.js        | 21 ++---
 .../management/saved_objects_page.ts          | 91 +++++++++++++++++--
 .../apps/spaces/copy_saved_objects.ts         |  4 +-
 .../copy_saved_objects_to_space_page.ts       | 26 +-----
 4 files changed, 99 insertions(+), 43 deletions(-)

diff --git a/test/functional/apps/management/_import_objects.js b/test/functional/apps/management/_import_objects.js
index 03db3a2b108f2..5fbeb978f9a1c 100644
--- a/test/functional/apps/management/_import_objects.js
+++ b/test/functional/apps/management/_import_objects.js
@@ -49,12 +49,13 @@ export default function ({ getService, getPageObjects }) {
         await PageObjects.savedObjects.checkImportSucceeded();
         await PageObjects.savedObjects.clickImportDone();
 
-        // get all the elements in the table, and index them by the 'title' visible text field
-        const elements = keyBy(await PageObjects.savedObjects.getElementsInTable(), 'title');
         log.debug("check that 'Log Agents' is in table as a visualization");
-        expect(elements['Log Agents'].objectType).to.eql('visualization');
+        expect(await PageObjects.savedObjects.getObjectTypeByTitle('Log Agents')).to.eql(
+          'visualization'
+        );
+
+        await PageObjects.savedObjects.clickRelationshipsByTitle('logstash-*');
 
-        await elements['logstash-*'].relationshipsElement.click();
         const flyout = keyBy(await PageObjects.savedObjects.getRelationshipFlyout(), 'title');
         log.debug(
           "check that 'Shared-Item Visualization AreaChart' shows 'logstash-*' as it's Parent"
@@ -150,8 +151,7 @@ export default function ({ getService, getPageObjects }) {
       });
 
       it('should not import saved objects linked to saved searches when saved search index pattern does not exist', async function () {
-        const elements = keyBy(await PageObjects.savedObjects.getElementsInTable(), 'title');
-        await elements['logstash-*'].checkbox.click();
+        await PageObjects.savedObjects.clickCheckboxByTitle('logstash-*');
         await PageObjects.savedObjects.clickDelete();
 
         await PageObjects.savedObjects.importFile(
@@ -182,8 +182,7 @@ export default function ({ getService, getPageObjects }) {
 
       it('should import saved objects with index patterns when index patterns does not exists', async () => {
         // First, we need to delete the index pattern
-        const elements = keyBy(await PageObjects.savedObjects.getElementsInTable(), 'title');
-        await elements['logstash-*'].checkbox.click();
+        await PageObjects.savedObjects.clickCheckboxByTitle('logstash-*');
         await PageObjects.savedObjects.clickDelete();
 
         // Then, import the objects
@@ -321,8 +320,7 @@ export default function ({ getService, getPageObjects }) {
         await PageObjects.savedObjects.clickImportDone();
 
         // Second, we need to delete the index pattern
-        const elements = keyBy(await PageObjects.savedObjects.getElementsInTable(), 'title');
-        await elements['logstash-*'].checkbox.click();
+        await PageObjects.savedObjects.clickCheckboxByTitle('logstash-*');
         await PageObjects.savedObjects.clickDelete();
 
         // Last, import a saved object connected to the saved search
@@ -353,8 +351,7 @@ export default function ({ getService, getPageObjects }) {
 
       it('should import saved objects with index patterns when index patterns does not exists', async () => {
         // First, we need to delete the index pattern
-        const elements = keyBy(await PageObjects.savedObjects.getElementsInTable(), 'title');
-        await elements['logstash-*'].checkbox.click();
+        await PageObjects.savedObjects.clickCheckboxByTitle('logstash-*');
         await PageObjects.savedObjects.clickDelete();
 
         // Then, import the objects
diff --git a/test/functional/page_objects/management/saved_objects_page.ts b/test/functional/page_objects/management/saved_objects_page.ts
index 03d21aa4aa52f..ad82ea9b6fbc1 100644
--- a/test/functional/page_objects/management/saved_objects_page.ts
+++ b/test/functional/page_objects/management/saved_objects_page.ts
@@ -17,6 +17,7 @@
  * under the License.
  */
 
+import { keyBy } from 'lodash';
 import { map as mapAsync } from 'bluebird';
 import { FtrProviderContext } from '../../ftr_provider_context';
 
@@ -34,6 +35,8 @@ export function SavedObjectsPageProvider({ getService, getPageObjects }: FtrProv
       await searchBox.clearValue();
       await searchBox.type(objectName);
       await searchBox.pressKeys(browser.keys.ENTER);
+      await PageObjects.header.waitUntilLoadingHasFinished();
+      await this.waitTableIsLoaded();
     }
 
     async importFile(path: string, overwriteAll = true) {
@@ -99,6 +102,56 @@ export function SavedObjectsPageProvider({ getService, getPageObjects }: FtrProv
       });
     }
 
+    async clickRelationshipsByTitle(title: string) {
+      const table = keyBy(await this.getElementsInTable(), 'title');
+      // should we check if table size > 0 and log error if not?
+      if (table[title].menuElement) {
+        log.debug(`we found a context menu element for (${title}) so click it`);
+        await table[title].menuElement?.click();
+        // Wait for context menu to render
+        const menuPanel = await find.byCssSelector('.euiContextMenuPanel');
+        await (await menuPanel.findByTestSubject('savedObjectsTableAction-relationships')).click();
+      } else {
+        log.debug(
+          `we didn't find a menu element so should be a relastionships element for (${title}) to click`
+        );
+        // or the action elements are on the row without the menu
+        await table[title].relationshipsElement?.click();
+      }
+    }
+
+    async clickCopyToSpaceByTitle(title: string) {
+      const table = keyBy(await this.getElementsInTable(), 'title');
+      // should we check if table size > 0 and log error if not?
+      if (table[title].menuElement) {
+        log.debug(`we found a context menu element for (${title}) so click it`);
+        await table[title].menuElement?.click();
+        // Wait for context menu to render
+        const menuPanel = await find.byCssSelector('.euiContextMenuPanel');
+        await (
+          await menuPanel.findByTestSubject('savedObjectsTableAction-copy_saved_objects_to_space')
+        ).click();
+      } else {
+        log.debug(
+          `we didn't find a menu element so should be a "copy to space" element for (${title}) to click`
+        );
+        // or the action elements are on the row without the menu
+        await table[title].copySaveObjectsElement?.click();
+      }
+    }
+
+    async clickCheckboxByTitle(title: string) {
+      const table = keyBy(await this.getElementsInTable(), 'title');
+      // should we check if table size > 0 and log error if not?
+      await table[title].checkbox.click();
+    }
+
+    async getObjectTypeByTitle(title: string) {
+      const table = keyBy(await this.getElementsInTable(), 'title');
+      // should we check if table size > 0 and log error if not?
+      return table[title].objectType;
+    }
+
     async getElementsInTable() {
       const rows = await testSubjects.findAll('~savedObjectsTableRow');
       return mapAsync(rows, async (row) => {
@@ -107,23 +160,45 @@ export function SavedObjectsPageProvider({ getService, getPageObjects }: FtrProv
         const objectType = await row.findByTestSubject('objectType');
         const titleElement = await row.findByTestSubject('savedObjectsTableRowTitle');
         // not all rows have inspect button - Advanced Settings objects don't
-        let inspectElement;
-        const innerHtml = await row.getAttribute('innerHTML');
-        if (innerHtml.includes('Inspect')) {
+        // Advanced Settings has 2 actions,
+        //   data-test-subj="savedObjectsTableAction-relationships"
+        //   data-test-subj="savedObjectsTableAction-copy_saved_objects_to_space"
+        // Some other objects have the ...
+        //   data-test-subj="euiCollapsedItemActionsButton"
+        // Maybe some objects still have the inspect element visible?
+        // !!! Also note that since we don't have spaces on OSS, the actions for the same object can be different depending on OSS or not
+        let menuElement = null;
+        let inspectElement = null;
+        let relationshipsElement = null;
+        let copySaveObjectsElement = null;
+        const actions = await row.findByClassName('euiTableRowCell--hasActions');
+        // getting the innerHTML and checking if it 'includes' a string is faster than a timeout looking for each element
+        const actionsHTML = await actions.getAttribute('innerHTML');
+        if (actionsHTML.includes('euiCollapsedItemActionsButton')) {
+          menuElement = await row.findByTestSubject('euiCollapsedItemActionsButton');
+        }
+        if (actionsHTML.includes('savedObjectsTableAction-inspect')) {
           inspectElement = await row.findByTestSubject('savedObjectsTableAction-inspect');
-        } else {
-          inspectElement = null;
         }
-        const relationshipsElement = await row.findByTestSubject(
-          'savedObjectsTableAction-relationships'
-        );
+        if (actionsHTML.includes('savedObjectsTableAction-relationships')) {
+          relationshipsElement = await row.findByTestSubject(
+            'savedObjectsTableAction-relationships'
+          );
+        }
+        if (actionsHTML.includes('savedObjectsTableAction-copy_saved_objects_to_space')) {
+          copySaveObjectsElement = await row.findByTestSubject(
+            'savedObjectsTableAction-copy_saved_objects_to_space'
+          );
+        }
         return {
           checkbox,
           objectType: await objectType.getAttribute('aria-label'),
           titleElement,
           title: await titleElement.getVisibleText(),
+          menuElement,
           inspectElement,
           relationshipsElement,
+          copySaveObjectsElement,
         };
       });
     }
diff --git a/x-pack/test/functional/apps/spaces/copy_saved_objects.ts b/x-pack/test/functional/apps/spaces/copy_saved_objects.ts
index 074b6fc528157..05d497c235dad 100644
--- a/x-pack/test/functional/apps/spaces/copy_saved_objects.ts
+++ b/x-pack/test/functional/apps/spaces/copy_saved_objects.ts
@@ -15,8 +15,7 @@ export default function spaceSelectorFunctonalTests({
   const testSubjects = getService('testSubjects');
   const PageObjects = getPageObjects(['security', 'settings', 'copySavedObjectsToSpace']);
 
-  // TODO: Flakey again https://github.com/elastic/kibana/issues/44575#issuecomment-528864287
-  describe.skip('Copy Saved Objects to Space', function () {
+  describe('Copy Saved Objects to Space', function () {
     before(async () => {
       await esArchiver.load('spaces/copy_saved_objects');
 
@@ -32,6 +31,7 @@ export default function spaceSelectorFunctonalTests({
         disabledFeatures: [],
       });
 
+      await PageObjects.security.forceLogout();
       await PageObjects.security.login(undefined, undefined, {
         expectSpaceSelector: true,
       });
diff --git a/x-pack/test/functional/page_objects/copy_saved_objects_to_space_page.ts b/x-pack/test/functional/page_objects/copy_saved_objects_to_space_page.ts
index 03596aa68dbc6..629a86520389d 100644
--- a/x-pack/test/functional/page_objects/copy_saved_objects_to_space_page.ts
+++ b/x-pack/test/functional/page_objects/copy_saved_objects_to_space_page.ts
@@ -15,31 +15,15 @@ export function CopySavedObjectsToSpacePageProvider({
   getPageObjects,
 }: FtrProviderContext) {
   const testSubjects = getService('testSubjects');
-  const find = getService('find');
-  const { savedObjects } = getPageObjects(['savedObjects']);
+  const { savedObjects, common } = getPageObjects(['savedObjects', 'common']);
 
   return {
     async openCopyToSpaceFlyoutForObject(objectName: string) {
+      // This searchForObject narrows down the objects to those matching ANY of the words in the objectName.
+      // Hopefully the one we want is on the first page of results.
       await savedObjects.searchForObject(objectName);
-
-      // Click action button to show context menu
-      await find.clickByCssSelector(
-        'table.euiTable tbody tr.euiTableRow td.euiTableRowCell:last-child .euiButtonIcon'
-      );
-
-      // Wait for context menu to render
-      await find.existsByCssSelector('.euiContextMenuPanel');
-
-      const actions = await find.allByCssSelector('.euiContextMenuItem');
-
-      for (const action of actions) {
-        const actionText = await action.getVisibleText();
-        if (actionText === 'Copy to space') {
-          await action.click();
-          break;
-        }
-      }
-
+      await common.sleep(1000);
+      await savedObjects.clickCopyToSpaceByTitle(objectName);
       await testSubjects.existOrFail('copy-to-space-flyout');
     },
 

From 5ba2020fc769e37065694241fbdf5c0f97f29bdb Mon Sep 17 00:00:00 2001
From: Brian Seeders <brian.seeders@elastic.co>
Date: Fri, 14 Aug 2020 16:43:09 -0400
Subject: [PATCH 3/5] [7.9] [CI] Add pipeline task queue framework and merge
 workers into one (#71268) (#74542)

---
 .ci/Dockerfile                                |  38 +++
 .ci/Jenkinsfile_baseline_capture              |  28 +-
 .ci/runbld_no_junit.yml                       |   2 +-
 .gitignore                                    |   2 +
 Jenkinsfile                                   |  44 +---
 packages/kbn-dev-utils/src/run/help.test.ts   |   6 +-
 src/dev/ci_setup/checkout_sibling_es.sh       |  12 +-
 src/dev/ci_setup/setup_env.sh                 |   4 +-
 src/dev/notice/generate_notice_from_source.ts |  11 +-
 tasks/test_jest.js                            |   6 +-
 test/scripts/checks/doc_api_changes.sh        |   5 +
 test/scripts/checks/file_casing.sh            |   5 +
 test/scripts/checks/i18n.sh                   |   5 +
 test/scripts/checks/licenses.sh               |   5 +
 test/scripts/checks/lock_file_symlinks.sh     |   5 +
 test/scripts/checks/telemetry.sh              |   5 +
 test/scripts/checks/test_hardening.sh         |   5 +
 test/scripts/checks/test_projects.sh          |   5 +
 test/scripts/checks/ts_projects.sh            |   5 +
 test/scripts/checks/type_check.sh             |   5 +
 .../checks/verify_dependency_versions.sh      |   5 +
 test/scripts/checks/verify_notice.sh          |   5 +
 test/scripts/jenkins_accessibility.sh         |   2 +-
 .../jenkins_build_kbn_sample_panel_action.sh  |   0
 test/scripts/jenkins_build_kibana.sh          |  13 +-
 test/scripts/jenkins_build_plugins.sh         |  12 +
 test/scripts/jenkins_ci_group.sh              |   2 +-
 test/scripts/jenkins_firefox_smoke.sh         |   2 +-
 test/scripts/jenkins_plugin_functional.sh     |  15 ++
 .../jenkins_security_solution_cypress.sh      |   8 +-
 .../jenkins_setup_parallel_workspace.sh       |  32 +++
 test/scripts/jenkins_test_setup.sh            |   6 +
 test/scripts/jenkins_test_setup_oss.sh        |  15 +-
 test/scripts/jenkins_test_setup_xpack.sh      |  15 +-
 test/scripts/jenkins_xpack_accessibility.sh   |   2 +-
 test/scripts/jenkins_xpack_build_kibana.sh    |  17 +-
 test/scripts/jenkins_xpack_build_plugins.sh   |  14 +
 ...nkins_xpack_saved_objects_field_metrics.sh |   2 +-
 .../jenkins_xpack_visual_regression.sh        |   5 +-
 test/scripts/lint/eslint.sh                   |   5 +
 test/scripts/lint/sasslint.sh                 |   5 +
 test/scripts/test/api_integration.sh          |   5 +
 test/scripts/test/jest_integration.sh         |   5 +
 test/scripts/test/jest_unit.sh                |   5 +
 test/scripts/test/karma_ci.sh                 |   5 +
 test/scripts/test/mocha.sh                    |   5 +
 test/scripts/test/safer_lodash_set.sh         |   5 +
 test/scripts/test/xpack_jest_unit.sh          |   6 +
 test/scripts/test/xpack_karma.sh              |   6 +
 .../test/xpack_list_cyclic_dependency.sh      |   6 +
 .../test/xpack_siem_cyclic_dependency.sh      |   6 +
 vars/catchErrors.groovy                       |  11 +-
 vars/kibanaPipeline.groovy                    | 241 +++++++++++++++---
 vars/task.groovy                              |   5 +
 vars/tasks.groovy                             | 119 +++++++++
 vars/withTaskQueue.groovy                     | 154 +++++++++++
 vars/workers.groovy                           |  12 +-
 .../canvas/.storybook/storyshots.test.js      |  10 +-
 58 files changed, 842 insertions(+), 149 deletions(-)
 create mode 100644 .ci/Dockerfile
 create mode 100755 test/scripts/checks/doc_api_changes.sh
 create mode 100755 test/scripts/checks/file_casing.sh
 create mode 100755 test/scripts/checks/i18n.sh
 create mode 100755 test/scripts/checks/licenses.sh
 create mode 100755 test/scripts/checks/lock_file_symlinks.sh
 create mode 100755 test/scripts/checks/telemetry.sh
 create mode 100755 test/scripts/checks/test_hardening.sh
 create mode 100755 test/scripts/checks/test_projects.sh
 create mode 100755 test/scripts/checks/ts_projects.sh
 create mode 100755 test/scripts/checks/type_check.sh
 create mode 100755 test/scripts/checks/verify_dependency_versions.sh
 create mode 100755 test/scripts/checks/verify_notice.sh
 mode change 100644 => 100755 test/scripts/jenkins_build_kbn_sample_panel_action.sh
 create mode 100755 test/scripts/jenkins_build_plugins.sh
 create mode 100755 test/scripts/jenkins_plugin_functional.sh
 mode change 100644 => 100755 test/scripts/jenkins_security_solution_cypress.sh
 create mode 100755 test/scripts/jenkins_setup_parallel_workspace.sh
 mode change 100644 => 100755 test/scripts/jenkins_test_setup.sh
 mode change 100644 => 100755 test/scripts/jenkins_test_setup_oss.sh
 mode change 100644 => 100755 test/scripts/jenkins_test_setup_xpack.sh
 create mode 100755 test/scripts/jenkins_xpack_build_plugins.sh
 create mode 100755 test/scripts/lint/eslint.sh
 create mode 100755 test/scripts/lint/sasslint.sh
 create mode 100755 test/scripts/test/api_integration.sh
 create mode 100755 test/scripts/test/jest_integration.sh
 create mode 100755 test/scripts/test/jest_unit.sh
 create mode 100755 test/scripts/test/karma_ci.sh
 create mode 100755 test/scripts/test/mocha.sh
 create mode 100755 test/scripts/test/safer_lodash_set.sh
 create mode 100755 test/scripts/test/xpack_jest_unit.sh
 create mode 100755 test/scripts/test/xpack_karma.sh
 create mode 100755 test/scripts/test/xpack_list_cyclic_dependency.sh
 create mode 100755 test/scripts/test/xpack_siem_cyclic_dependency.sh
 create mode 100644 vars/task.groovy
 create mode 100644 vars/tasks.groovy
 create mode 100644 vars/withTaskQueue.groovy

diff --git a/.ci/Dockerfile b/.ci/Dockerfile
new file mode 100644
index 0000000000000..d90d9f4710b5b
--- /dev/null
+++ b/.ci/Dockerfile
@@ -0,0 +1,38 @@
+# NOTE: This Dockerfile is ONLY used to run certain tasks in CI. It is not used to run Kibana or as a distributable.
+# If you're looking for the Kibana Docker image distributable, please see: src/dev/build/tasks/os_packages/docker_generator/templates/dockerfile.template.ts
+
+ARG NODE_VERSION=10.21.0
+
+FROM node:${NODE_VERSION} AS base
+
+RUN apt-get update && \
+    apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \
+      libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \
+      libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \
+      libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 \
+      libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget openjdk-8-jre && \
+    rm -rf /var/lib/apt/lists/*
+
+RUN curl -sSL https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \
+  && sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \
+  && apt-get update \
+  && apt-get install -y rsync jq bsdtar google-chrome-stable \
+  --no-install-recommends \
+  && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+
+RUN LATEST_VAULT_RELEASE=$(curl -s https://api.github.com/repos/hashicorp/vault/tags | jq --raw-output .[0].name[1:]) \
+  && curl -L https://releases.hashicorp.com/vault/${LATEST_VAULT_RELEASE}/vault_${LATEST_VAULT_RELEASE}_linux_amd64.zip -o vault.zip \
+  && unzip vault.zip \
+  && rm vault.zip \
+  && chmod +x vault \
+  && mv vault /usr/local/bin/vault
+
+RUN groupadd -r kibana && useradd -r -g kibana kibana && mkdir /home/kibana && chown kibana:kibana /home/kibana
+
+COPY ./bash_standard_lib.sh /usr/local/bin/bash_standard_lib.sh
+RUN chmod +x /usr/local/bin/bash_standard_lib.sh
+
+COPY ./runbld /usr/local/bin/runbld
+RUN chmod +x /usr/local/bin/runbld
+
+USER kibana
diff --git a/.ci/Jenkinsfile_baseline_capture b/.ci/Jenkinsfile_baseline_capture
index b0d3591821642..9a49c19b94df2 100644
--- a/.ci/Jenkinsfile_baseline_capture
+++ b/.ci/Jenkinsfile_baseline_capture
@@ -7,18 +7,22 @@ kibanaPipeline(timeoutMinutes: 120) {
   githubCommitStatus.trackBuild(params.commit, 'kibana-ci-baseline') {
     ciStats.trackBuild {
       catchError {
-        parallel([
-          'oss-visualRegression': {
-            workers.ci(name: 'oss-visualRegression', size: 's-highmem', ramDisk: true) {
-              kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')(1)
-            }
-          },
-          'xpack-visualRegression': {
-            workers.ci(name: 'xpack-visualRegression', size: 's-highmem', ramDisk: true) {
-              kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')(1)
-            }
-          },
-        ])
+        withEnv([
+          'CI_PARALLEL_PROCESS_NUMBER=1'
+        ]) {
+          parallel([
+            'oss-visualRegression': {
+              workers.ci(name: 'oss-visualRegression', size: 's-highmem', ramDisk: true) {
+                kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')()
+              }
+            },
+            'xpack-visualRegression': {
+              workers.ci(name: 'xpack-visualRegression', size: 's-highmem', ramDisk: true) {
+                kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')()
+              }
+            },
+          ])
+        }
       }
 
       kibanaPipeline.sendMail()
diff --git a/.ci/runbld_no_junit.yml b/.ci/runbld_no_junit.yml
index 67b5002c1c437..1bcb7e22a2648 100644
--- a/.ci/runbld_no_junit.yml
+++ b/.ci/runbld_no_junit.yml
@@ -3,4 +3,4 @@
 profiles:
 - ".*": # Match any job
     tests:
-      junit-filename-pattern: "8d8bd494-d909-4e67-a052-7e8b5aaeb5e4" # A bogus path that should never exist
+      junit-filename-pattern: false
diff --git a/.gitignore b/.gitignore
index dfd02de7b1186..1d12ef2a9cff3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -49,6 +49,8 @@ npm-debug.log*
 .tern-project
 .nyc_output
 .ci/pipeline-library/build/
+.ci/runbld
+.ci/bash_standard_lib.sh
 .gradle
 
 # apm plugin
diff --git a/Jenkinsfile b/Jenkinsfile
index 00529e834bd25..3b68cde206573 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -9,49 +9,7 @@ kibanaPipeline(timeoutMinutes: 155, checkPrChanges: true, setCommitStatus: true)
       ciStats.trackBuild {
         catchError {
           retryable.enable()
-          parallel([
-            'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
-            'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
-            'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
-              'oss-firefoxSmoke': kibanaPipeline.functionalTestProcess('kibana-firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh'),
-              'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
-              'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
-              'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
-              'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
-              'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
-              'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
-              'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
-              'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
-              'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
-              'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
-              'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
-              'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
-              'oss-accessibility': kibanaPipeline.functionalTestProcess('kibana-accessibility', './test/scripts/jenkins_accessibility.sh'),
-              // 'oss-visualRegression': kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh'),
-            ]),
-            'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
-              'xpack-firefoxSmoke': kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh'),
-              'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
-              'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
-              'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
-              'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
-              'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
-              'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
-              'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7),
-              'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8),
-              'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9),
-              'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
-              'xpack-accessibility': kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'),
-              'xpack-savedObjectsFieldMetrics': kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh'),
-              // 'xpack-pageLoadMetrics': kibanaPipeline.functionalTestProcess('xpack-pageLoadMetrics', './test/scripts/jenkins_xpack_page_load_metrics.sh'),
-              'xpack-securitySolutionCypress': { processNumber ->
-                whenChanged(['x-pack/plugins/security_solution/', 'x-pack/test/security_solution_cypress/', 'x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/', 'x-pack/plugins/triggers_actions_ui/public/application/context/actions_connectors_context.tsx']) {
-                  kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh')(processNumber)
-                }
-              },
-              // 'xpack-visualRegression': kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'),
-            ]),
-          ])
+          kibanaPipeline.allCiTasks()
         }
       }
     }
diff --git a/packages/kbn-dev-utils/src/run/help.test.ts b/packages/kbn-dev-utils/src/run/help.test.ts
index 27be7ad28b81a..300f1cba7eb7d 100644
--- a/packages/kbn-dev-utils/src/run/help.test.ts
+++ b/packages/kbn-dev-utils/src/run/help.test.ts
@@ -57,7 +57,7 @@ const barCommand: Command<any> = {
   usage: 'bar [...names]',
 };
 
-describe('getHelp()', () => {
+describe.skip('getHelp()', () => {
   it('returns the expected output', () => {
     expect(
       getHelp({
@@ -95,7 +95,7 @@ describe('getHelp()', () => {
   });
 });
 
-describe('getCommandLevelHelp()', () => {
+describe.skip('getCommandLevelHelp()', () => {
   it('returns the expected output', () => {
     expect(
       getCommandLevelHelp({
@@ -141,7 +141,7 @@ describe('getCommandLevelHelp()', () => {
   });
 });
 
-describe('getHelpForAllCommands()', () => {
+describe.skip('getHelpForAllCommands()', () => {
   it('returns the expected output', () => {
     expect(
       getHelpForAllCommands({
diff --git a/src/dev/ci_setup/checkout_sibling_es.sh b/src/dev/ci_setup/checkout_sibling_es.sh
index 915759d4214f9..3832ec9b4076a 100755
--- a/src/dev/ci_setup/checkout_sibling_es.sh
+++ b/src/dev/ci_setup/checkout_sibling_es.sh
@@ -7,10 +7,11 @@ function checkout_sibling {
   targetDir=$2
   useExistingParamName=$3
   useExisting="$(eval "echo "\$$useExistingParamName"")"
+  repoAddress="https://github.com/"
 
   if [ -z ${useExisting:+x} ]; then
     if [ -d "$targetDir" ]; then
-      echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$PARENT_DIR]!"
+      echo "I expected a clean workspace but an '${project}' sibling directory already exists in [$WORKSPACE]!"
       echo
       echo "Either define '${useExistingParamName}' or remove the existing '${project}' sibling."
       exit 1
@@ -21,8 +22,9 @@ function checkout_sibling {
     cloneBranch=""
 
     function clone_target_is_valid {
+
       echo " -> checking for '${cloneBranch}' branch at ${cloneAuthor}/${project}"
-      if [[ -n "$(git ls-remote --heads "git@github.com:${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then
+      if [[ -n "$(git ls-remote --heads "${repoAddress}${cloneAuthor}/${project}.git" ${cloneBranch} 2>/dev/null)" ]]; then
         return 0
       else
         return 1
@@ -71,7 +73,7 @@ function checkout_sibling {
       fi
 
       echo " -> checking out '${cloneBranch}' branch from ${cloneAuthor}/${project}..."
-      git clone -b "$cloneBranch" "git@github.com:${cloneAuthor}/${project}.git" "$targetDir" --depth=1
+      git clone -b "$cloneBranch" "${repoAddress}${cloneAuthor}/${project}.git" "$targetDir" --depth=1
       echo " -> checked out ${project} revision: $(git -C "${targetDir}" rev-parse HEAD)"
       echo
     }
@@ -87,12 +89,12 @@ function checkout_sibling {
   fi
 }
 
-checkout_sibling "elasticsearch" "${PARENT_DIR}/elasticsearch" "USE_EXISTING_ES"
+checkout_sibling "elasticsearch" "${WORKSPACE}/elasticsearch" "USE_EXISTING_ES"
 export TEST_ES_FROM=${TEST_ES_FROM:-snapshot}
 
 # Set the JAVA_HOME based on the Java property file in the ES repo
 # This assumes the naming convention used on CI (ex: ~/.java/java10)
-ES_DIR="$PARENT_DIR/elasticsearch"
+ES_DIR="$WORKSPACE/elasticsearch"
 ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties
 
 
diff --git a/src/dev/ci_setup/setup_env.sh b/src/dev/ci_setup/setup_env.sh
index 343ff47199375..f96a2240917e2 100644
--- a/src/dev/ci_setup/setup_env.sh
+++ b/src/dev/ci_setup/setup_env.sh
@@ -53,6 +53,8 @@ export PARENT_DIR="$parentDir"
 kbnBranch="$(jq -r .branch "$KIBANA_DIR/package.json")"
 export KIBANA_PKG_BRANCH="$kbnBranch"
 
+export WORKSPACE="${WORKSPACE:-$PARENT_DIR}"
+
 ###
 ### download node
 ###
@@ -161,7 +163,7 @@ export -f checks-reporter-with-killswitch
 
 source "$KIBANA_DIR/src/dev/ci_setup/load_env_keys.sh"
 
-ES_DIR="$PARENT_DIR/elasticsearch"
+ES_DIR="$WORKSPACE/elasticsearch"
 ES_JAVA_PROP_PATH=$ES_DIR/.ci/java-versions.properties
 
 if [[ -d "$ES_DIR" && -f "$ES_JAVA_PROP_PATH" ]]; then
diff --git a/src/dev/notice/generate_notice_from_source.ts b/src/dev/notice/generate_notice_from_source.ts
index 4d516e5ac0ffc..0bef5bc5f32d4 100644
--- a/src/dev/notice/generate_notice_from_source.ts
+++ b/src/dev/notice/generate_notice_from_source.ts
@@ -47,10 +47,13 @@ export async function generateNoticeFromSource({ productName, directory, log }:
     cwd: directory,
     nodir: true,
     ignore: [
-      '{node_modules,build,target,dist,optimize,built_assets}/**',
-      'packages/*/{node_modules,build,target,dist}/**',
-      'x-pack/{node_modules,build,target,dist,optimize}/**',
-      'x-pack/packages/*/{node_modules,build,target,dist}/**',
+      '{node_modules,build,dist,data,built_assets}/**',
+      'packages/*/{node_modules,build,dist}/**',
+      'src/plugins/*/{node_modules,build,dist}/**',
+      'x-pack/{node_modules,build,dist,data}/**',
+      'x-pack/packages/*/{node_modules,build,dist}/**',
+      'x-pack/plugins/*/{node_modules,build,dist}/**',
+      '**/target/**',
     ],
   };
 
diff --git a/tasks/test_jest.js b/tasks/test_jest.js
index d8f51806e8ddc..810ed42324840 100644
--- a/tasks/test_jest.js
+++ b/tasks/test_jest.js
@@ -22,7 +22,7 @@ const { resolve } = require('path');
 module.exports = function (grunt) {
   grunt.registerTask('test:jest', function () {
     const done = this.async();
-    runJest(resolve(__dirname, '../scripts/jest.js')).then(done, done);
+    runJest(resolve(__dirname, '../scripts/jest.js'), ['--maxWorkers=10']).then(done, done);
   });
 
   grunt.registerTask('test:jest_integration', function () {
@@ -30,10 +30,10 @@ module.exports = function (grunt) {
     runJest(resolve(__dirname, '../scripts/jest_integration.js')).then(done, done);
   });
 
-  function runJest(jestScript) {
+  function runJest(jestScript, args = []) {
     const serverCmd = {
       cmd: 'node',
-      args: [jestScript, '--ci'],
+      args: [jestScript, '--ci', ...args],
       opts: { stdio: 'inherit' },
     };
 
diff --git a/test/scripts/checks/doc_api_changes.sh b/test/scripts/checks/doc_api_changes.sh
new file mode 100755
index 0000000000000..503d12b2f6d73
--- /dev/null
+++ b/test/scripts/checks/doc_api_changes.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:checkDocApiChanges
diff --git a/test/scripts/checks/file_casing.sh b/test/scripts/checks/file_casing.sh
new file mode 100755
index 0000000000000..513664263791b
--- /dev/null
+++ b/test/scripts/checks/file_casing.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:checkFileCasing
diff --git a/test/scripts/checks/i18n.sh b/test/scripts/checks/i18n.sh
new file mode 100755
index 0000000000000..7a6fd46c46c76
--- /dev/null
+++ b/test/scripts/checks/i18n.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:i18nCheck
diff --git a/test/scripts/checks/licenses.sh b/test/scripts/checks/licenses.sh
new file mode 100755
index 0000000000000..a08d7d07a24a1
--- /dev/null
+++ b/test/scripts/checks/licenses.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:licenses
diff --git a/test/scripts/checks/lock_file_symlinks.sh b/test/scripts/checks/lock_file_symlinks.sh
new file mode 100755
index 0000000000000..1d43d32c9feb8
--- /dev/null
+++ b/test/scripts/checks/lock_file_symlinks.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:checkLockfileSymlinks
diff --git a/test/scripts/checks/telemetry.sh b/test/scripts/checks/telemetry.sh
new file mode 100755
index 0000000000000..c74ec295b385c
--- /dev/null
+++ b/test/scripts/checks/telemetry.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:telemetryCheck
diff --git a/test/scripts/checks/test_hardening.sh b/test/scripts/checks/test_hardening.sh
new file mode 100755
index 0000000000000..9184758577654
--- /dev/null
+++ b/test/scripts/checks/test_hardening.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:test_hardening
diff --git a/test/scripts/checks/test_projects.sh b/test/scripts/checks/test_projects.sh
new file mode 100755
index 0000000000000..5f9aafe80e10e
--- /dev/null
+++ b/test/scripts/checks/test_projects.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:test_projects
diff --git a/test/scripts/checks/ts_projects.sh b/test/scripts/checks/ts_projects.sh
new file mode 100755
index 0000000000000..d667c753baec2
--- /dev/null
+++ b/test/scripts/checks/ts_projects.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:checkTsProjects
diff --git a/test/scripts/checks/type_check.sh b/test/scripts/checks/type_check.sh
new file mode 100755
index 0000000000000..07c49638134be
--- /dev/null
+++ b/test/scripts/checks/type_check.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:typeCheck
diff --git a/test/scripts/checks/verify_dependency_versions.sh b/test/scripts/checks/verify_dependency_versions.sh
new file mode 100755
index 0000000000000..b73a71e7ff7fd
--- /dev/null
+++ b/test/scripts/checks/verify_dependency_versions.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:verifyDependencyVersions
diff --git a/test/scripts/checks/verify_notice.sh b/test/scripts/checks/verify_notice.sh
new file mode 100755
index 0000000000000..9f8343e540861
--- /dev/null
+++ b/test/scripts/checks/verify_notice.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:verifyNotice
diff --git a/test/scripts/jenkins_accessibility.sh b/test/scripts/jenkins_accessibility.sh
index c122d71b58edb..fa7cbd41d7078 100755
--- a/test/scripts/jenkins_accessibility.sh
+++ b/test/scripts/jenkins_accessibility.sh
@@ -5,5 +5,5 @@ source test/scripts/jenkins_test_setup_oss.sh
 checks-reporter-with-killswitch "Kibana accessibility tests" \
   node scripts/functional_tests \
     --debug --bail \
-    --kibana-install-dir "$installDir" \
+    --kibana-install-dir "$KIBANA_INSTALL_DIR" \
     --config test/accessibility/config.ts;
diff --git a/test/scripts/jenkins_build_kbn_sample_panel_action.sh b/test/scripts/jenkins_build_kbn_sample_panel_action.sh
old mode 100644
new mode 100755
diff --git a/test/scripts/jenkins_build_kibana.sh b/test/scripts/jenkins_build_kibana.sh
index 0960e12ed99e9..55eb06a43864c 100755
--- a/test/scripts/jenkins_build_kibana.sh
+++ b/test/scripts/jenkins_build_kibana.sh
@@ -2,13 +2,9 @@
 
 source src/dev/ci_setup/setup_env.sh
 
-echo " -> building kibana platform plugins"
-node scripts/build_kibana_platform_plugins \
-  --oss \
-  --filter '!alertingExample' \
-  --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
-  --scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \
-  --verbose;
+if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
+  ./test/scripts/jenkins_build_plugins.sh
+fi
 
 # doesn't persist, also set in kibanaPipeline.groovy
 export KBN_NP_PLUGINS_BUILT=true
@@ -18,3 +14,6 @@ yarn run grunt functionalTests:ensureAllTestsInCiGroup;
 
 echo " -> building and extracting OSS Kibana distributable for use in functional tests"
 node scripts/build --debug --oss
+
+mkdir -p "$WORKSPACE/kibana-build-oss"
+cp -pR build/oss/kibana-*-SNAPSHOT-linux-x86_64/. $WORKSPACE/kibana-build-oss/
diff --git a/test/scripts/jenkins_build_plugins.sh b/test/scripts/jenkins_build_plugins.sh
new file mode 100755
index 0000000000000..0c3ee4e3f261f
--- /dev/null
+++ b/test/scripts/jenkins_build_plugins.sh
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+echo " -> building kibana platform plugins"
+node scripts/build_kibana_platform_plugins \
+  --oss \
+  --filter '!alertingExample' \
+  --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
+  --scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \
+  --workers 6 \
+  --verbose
diff --git a/test/scripts/jenkins_ci_group.sh b/test/scripts/jenkins_ci_group.sh
index 9110a13c452c3..2bda9dc367852 100755
--- a/test/scripts/jenkins_ci_group.sh
+++ b/test/scripts/jenkins_ci_group.sh
@@ -4,7 +4,7 @@ source test/scripts/jenkins_test_setup_oss.sh
 
 checks-reporter-with-killswitch "Functional tests / Group ${CI_GROUP}" yarn run grunt "run:functionalTests_ciGroup${CI_GROUP}";
 
-if [ "$CI_GROUP" == "1" ]; then
+if [[ ! "$TASK_QUEUE_PROCESS_ID" && "$CI_GROUP" == "1" ]]; then
   source test/scripts/jenkins_build_kbn_sample_panel_action.sh
   yarn run grunt run:pluginFunctionalTestsRelease --from=source;
   yarn run grunt run:exampleFunctionalTestsRelease --from=source;
diff --git a/test/scripts/jenkins_firefox_smoke.sh b/test/scripts/jenkins_firefox_smoke.sh
index 2bba6e06d76d7..247ab360b7912 100755
--- a/test/scripts/jenkins_firefox_smoke.sh
+++ b/test/scripts/jenkins_firefox_smoke.sh
@@ -5,6 +5,6 @@ source test/scripts/jenkins_test_setup_oss.sh
 checks-reporter-with-killswitch "Firefox smoke test" \
   node scripts/functional_tests \
     --bail --debug \
-    --kibana-install-dir "$installDir" \
+    --kibana-install-dir "$KIBANA_INSTALL_DIR" \
     --include-tag "includeFirefox" \
     --config test/functional/config.firefox.js;
diff --git a/test/scripts/jenkins_plugin_functional.sh b/test/scripts/jenkins_plugin_functional.sh
new file mode 100755
index 0000000000000..1d691d98982de
--- /dev/null
+++ b/test/scripts/jenkins_plugin_functional.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+source test/scripts/jenkins_test_setup_oss.sh
+
+cd test/plugin_functional/plugins/kbn_sample_panel_action;
+if [[ ! -d "target" ]]; then
+  yarn build;
+fi
+cd -;
+
+pwd
+
+yarn run grunt run:pluginFunctionalTestsRelease --from=source;
+yarn run grunt run:exampleFunctionalTestsRelease --from=source;
+yarn run grunt run:interpreterFunctionalTestsRelease;
diff --git a/test/scripts/jenkins_security_solution_cypress.sh b/test/scripts/jenkins_security_solution_cypress.sh
old mode 100644
new mode 100755
index 204911a3eedaa..a5a1a2103801f
--- a/test/scripts/jenkins_security_solution_cypress.sh
+++ b/test/scripts/jenkins_security_solution_cypress.sh
@@ -1,12 +1,6 @@
 #!/usr/bin/env bash
 
-source test/scripts/jenkins_test_setup.sh
-
-installDir="$PARENT_DIR/install/kibana"
-destDir="${installDir}-${CI_WORKER_NUMBER}"
-cp -R "$installDir" "$destDir"
-
-export KIBANA_INSTALL_DIR="$destDir"
+source test/scripts/jenkins_test_setup_xpack.sh
 
 echo " -> Running security solution cypress tests"
 cd "$XPACK_DIR"
diff --git a/test/scripts/jenkins_setup_parallel_workspace.sh b/test/scripts/jenkins_setup_parallel_workspace.sh
new file mode 100755
index 0000000000000..5274d05572e71
--- /dev/null
+++ b/test/scripts/jenkins_setup_parallel_workspace.sh
@@ -0,0 +1,32 @@
+#!/usr/bin/env bash
+set -e
+
+CURRENT_DIR=$(pwd)
+
+# Copy everything except node_modules into the current workspace
+rsync -a ${WORKSPACE}/kibana/* . --exclude node_modules
+rsync -a ${WORKSPACE}/kibana/.??* .
+
+# Symlink all non-root, non-fixture node_modules into our new workspace
+cd ${WORKSPACE}/kibana
+find . -type d -name node_modules -not -path '*__fixtures__*' -not -path './node_modules*' -prune -print0 | xargs -0I % ln -s "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%"
+find . -type d -wholename '*__fixtures__*node_modules' -not -path './node_modules*' -prune -print0 | xargs -0I % cp -R "${WORKSPACE}/kibana/%" "${CURRENT_DIR}/%"
+cd "${CURRENT_DIR}"
+
+# Symlink all of the individual root-level node_modules into the node_modules/ directory
+mkdir -p node_modules
+ln -s ${WORKSPACE}/kibana/node_modules/* node_modules/
+ln -s ${WORKSPACE}/kibana/node_modules/.??* node_modules/
+
+# Copy a few node_modules instead of symlinking them. They don't work correctly if symlinked
+unlink node_modules/@kbn
+unlink node_modules/css-loader
+unlink node_modules/style-loader
+
+# packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts will fail if this is a symlink
+unlink node_modules/val-loader
+
+cp -R ${WORKSPACE}/kibana/node_modules/@kbn node_modules/
+cp -R ${WORKSPACE}/kibana/node_modules/css-loader node_modules/
+cp -R ${WORKSPACE}/kibana/node_modules/style-loader node_modules/
+cp -R ${WORKSPACE}/kibana/node_modules/val-loader node_modules/
diff --git a/test/scripts/jenkins_test_setup.sh b/test/scripts/jenkins_test_setup.sh
old mode 100644
new mode 100755
index 49ee8a6b526ca..05b88aa2dd0a2
--- a/test/scripts/jenkins_test_setup.sh
+++ b/test/scripts/jenkins_test_setup.sh
@@ -14,3 +14,9 @@ trap 'post_work' EXIT
 export TEST_BROWSER_HEADLESS=1
 
 source src/dev/ci_setup/setup_env.sh
+
+# For parallel workspaces, we should copy the .es directory from the root, because it should already have downloaded snapshots in it
+# This isn't part of jenkins_setup_parallel_workspace.sh just because not all tasks require ES
+if [[ ! -d .es && -d "$WORKSPACE/kibana/.es" ]]; then
+  cp -R $WORKSPACE/kibana/.es ./
+fi
diff --git a/test/scripts/jenkins_test_setup_oss.sh b/test/scripts/jenkins_test_setup_oss.sh
old mode 100644
new mode 100755
index 7bbb867526384..b7eac33f35176
--- a/test/scripts/jenkins_test_setup_oss.sh
+++ b/test/scripts/jenkins_test_setup_oss.sh
@@ -2,10 +2,17 @@
 
 source test/scripts/jenkins_test_setup.sh
 
-if [[ -z "$CODE_COVERAGE" ]] ; then
-  installDir="$(realpath $PARENT_DIR/kibana/build/oss/kibana-*-SNAPSHOT-linux-x86_64)"
-  destDir=${installDir}-${CI_PARALLEL_PROCESS_NUMBER}
-  cp -R "$installDir" "$destDir"
+if [[ -z "$CODE_COVERAGE" ]]; then
+
+  destDir="build/kibana-build-oss"
+  if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
+    destDir="${destDir}-${CI_PARALLEL_PROCESS_NUMBER}"
+  fi
+
+  if [[ ! -d $destDir ]]; then
+    mkdir -p $destDir
+    cp -pR "$WORKSPACE/kibana-build-oss/." $destDir/
+  fi
 
   export KIBANA_INSTALL_DIR="$destDir"
 fi
diff --git a/test/scripts/jenkins_test_setup_xpack.sh b/test/scripts/jenkins_test_setup_xpack.sh
old mode 100644
new mode 100755
index a72e9749ebbd5..74a3de77e3a76
--- a/test/scripts/jenkins_test_setup_xpack.sh
+++ b/test/scripts/jenkins_test_setup_xpack.sh
@@ -3,11 +3,18 @@
 source test/scripts/jenkins_test_setup.sh
 
 if [[ -z "$CODE_COVERAGE" ]]; then
-  installDir="$PARENT_DIR/install/kibana"
-  destDir="${installDir}-${CI_PARALLEL_PROCESS_NUMBER}"
-  cp -R "$installDir" "$destDir"
 
-  export KIBANA_INSTALL_DIR="$destDir"
+  destDir="build/kibana-build-xpack"
+  if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
+    destDir="${destDir}-${CI_PARALLEL_PROCESS_NUMBER}"
+  fi
+
+  if [[ ! -d $destDir ]]; then
+    mkdir -p $destDir
+    cp -pR "$WORKSPACE/kibana-build-xpack/." $destDir/
+  fi
+
+  export KIBANA_INSTALL_DIR="$(realpath $destDir)"
 
   cd "$XPACK_DIR"
 fi
diff --git a/test/scripts/jenkins_xpack_accessibility.sh b/test/scripts/jenkins_xpack_accessibility.sh
index a3c03dd780886..3afd4bfb76396 100755
--- a/test/scripts/jenkins_xpack_accessibility.sh
+++ b/test/scripts/jenkins_xpack_accessibility.sh
@@ -5,5 +5,5 @@ source test/scripts/jenkins_test_setup_xpack.sh
 checks-reporter-with-killswitch "X-Pack accessibility tests" \
   node scripts/functional_tests \
     --debug --bail \
-    --kibana-install-dir "$installDir" \
+    --kibana-install-dir "$KIBANA_INSTALL_DIR" \
     --config test/accessibility/config.ts;
diff --git a/test/scripts/jenkins_xpack_build_kibana.sh b/test/scripts/jenkins_xpack_build_kibana.sh
index def6f1f4346ff..912130d973eb4 100755
--- a/test/scripts/jenkins_xpack_build_kibana.sh
+++ b/test/scripts/jenkins_xpack_build_kibana.sh
@@ -3,15 +3,9 @@
 cd "$KIBANA_DIR"
 source src/dev/ci_setup/setup_env.sh
 
-echo " -> building kibana platform plugins"
-node scripts/build_kibana_platform_plugins \
-  --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
-  --scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
-  --scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \
-  --scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
-  --scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
-  --scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
-  --verbose;
+if [[ ! "$TASK_QUEUE_PROCESS_ID" ]]; then
+  ./test/scripts/jenkins_xpack_build_plugins.sh
+fi
 
 # doesn't persist, also set in kibanaPipeline.groovy
 export KBN_NP_PLUGINS_BUILT=true
@@ -34,6 +28,9 @@ echo " -> building and extracting default Kibana distributable for use in functi
 cd "$KIBANA_DIR"
 node scripts/build --debug --no-oss
 linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
-installDir="$PARENT_DIR/install/kibana"
+installDir="$KIBANA_DIR/install/kibana"
 mkdir -p "$installDir"
 tar -xzf "$linuxBuild" -C "$installDir" --strip=1
+
+mkdir -p "$WORKSPACE/kibana-build-xpack"
+cp -pR install/kibana/. $WORKSPACE/kibana-build-xpack/
diff --git a/test/scripts/jenkins_xpack_build_plugins.sh b/test/scripts/jenkins_xpack_build_plugins.sh
new file mode 100755
index 0000000000000..3fd3d02de1304
--- /dev/null
+++ b/test/scripts/jenkins_xpack_build_plugins.sh
@@ -0,0 +1,14 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+echo " -> building kibana platform plugins"
+node scripts/build_kibana_platform_plugins \
+  --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \
+  --scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \
+  --scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \
+  --scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \
+  --scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \
+  --scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \
+  --workers 12 \
+  --verbose
diff --git a/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh b/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh
index d3ca8839a7dab..e3b0fe778bdfb 100755
--- a/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh
+++ b/test/scripts/jenkins_xpack_saved_objects_field_metrics.sh
@@ -5,5 +5,5 @@ source test/scripts/jenkins_test_setup_xpack.sh
 checks-reporter-with-killswitch "Capture Kibana Saved Objects field count metrics" \
   node scripts/functional_tests \
     --debug --bail \
-    --kibana-install-dir "$installDir" \
+    --kibana-install-dir "$KIBANA_INSTALL_DIR" \
     --config test/saved_objects_field_count/config.ts;
diff --git a/test/scripts/jenkins_xpack_visual_regression.sh b/test/scripts/jenkins_xpack_visual_regression.sh
index 06a53277b8688..55d4a524820c5 100755
--- a/test/scripts/jenkins_xpack_visual_regression.sh
+++ b/test/scripts/jenkins_xpack_visual_regression.sh
@@ -7,10 +7,13 @@ echo " -> building and extracting default Kibana distributable"
 cd "$KIBANA_DIR"
 node scripts/build --debug --no-oss
 linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')"
-installDir="$PARENT_DIR/install/kibana"
+installDir="$KIBANA_DIR/install/kibana"
 mkdir -p "$installDir"
 tar -xzf "$linuxBuild" -C "$installDir" --strip=1
 
+mkdir -p "$WORKSPACE/kibana-build-xpack"
+cp -pR install/kibana/. $WORKSPACE/kibana-build-xpack/
+
 # cd "$KIBANA_DIR"
 # source "test/scripts/jenkins_xpack_page_load_metrics.sh"
 
diff --git a/test/scripts/lint/eslint.sh b/test/scripts/lint/eslint.sh
new file mode 100755
index 0000000000000..c3211300b96c5
--- /dev/null
+++ b/test/scripts/lint/eslint.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:eslint
diff --git a/test/scripts/lint/sasslint.sh b/test/scripts/lint/sasslint.sh
new file mode 100755
index 0000000000000..b9c683bcb049e
--- /dev/null
+++ b/test/scripts/lint/sasslint.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:sasslint
diff --git a/test/scripts/test/api_integration.sh b/test/scripts/test/api_integration.sh
new file mode 100755
index 0000000000000..152c97a3ca7df
--- /dev/null
+++ b/test/scripts/test/api_integration.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:apiIntegrationTests
diff --git a/test/scripts/test/jest_integration.sh b/test/scripts/test/jest_integration.sh
new file mode 100755
index 0000000000000..73dbbddfb38f6
--- /dev/null
+++ b/test/scripts/test/jest_integration.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:test_jest_integration
diff --git a/test/scripts/test/jest_unit.sh b/test/scripts/test/jest_unit.sh
new file mode 100755
index 0000000000000..e25452698cebc
--- /dev/null
+++ b/test/scripts/test/jest_unit.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:test_jest
diff --git a/test/scripts/test/karma_ci.sh b/test/scripts/test/karma_ci.sh
new file mode 100755
index 0000000000000..e9985300ba19d
--- /dev/null
+++ b/test/scripts/test/karma_ci.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:test_karma_ci
diff --git a/test/scripts/test/mocha.sh b/test/scripts/test/mocha.sh
new file mode 100755
index 0000000000000..43c00f0a09dcf
--- /dev/null
+++ b/test/scripts/test/mocha.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:mocha
diff --git a/test/scripts/test/safer_lodash_set.sh b/test/scripts/test/safer_lodash_set.sh
new file mode 100755
index 0000000000000..4d7f9c28210d1
--- /dev/null
+++ b/test/scripts/test/safer_lodash_set.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+yarn run grunt run:test_package_safer_lodash_set
diff --git a/test/scripts/test/xpack_jest_unit.sh b/test/scripts/test/xpack_jest_unit.sh
new file mode 100755
index 0000000000000..93d70ec355391
--- /dev/null
+++ b/test/scripts/test/xpack_jest_unit.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+cd x-pack
+checks-reporter-with-killswitch "X-Pack Jest" node --max-old-space-size=6144 scripts/jest --ci --verbose --maxWorkers=10
diff --git a/test/scripts/test/xpack_karma.sh b/test/scripts/test/xpack_karma.sh
new file mode 100755
index 0000000000000..9078f01f1b870
--- /dev/null
+++ b/test/scripts/test/xpack_karma.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+cd x-pack
+checks-reporter-with-killswitch "X-Pack Karma Tests" yarn test:karma
diff --git a/test/scripts/test/xpack_list_cyclic_dependency.sh b/test/scripts/test/xpack_list_cyclic_dependency.sh
new file mode 100755
index 0000000000000..493fe9f58d322
--- /dev/null
+++ b/test/scripts/test/xpack_list_cyclic_dependency.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+cd x-pack
+checks-reporter-with-killswitch "X-Pack List cyclic dependency test" node plugins/lists/scripts/check_circular_deps
diff --git a/test/scripts/test/xpack_siem_cyclic_dependency.sh b/test/scripts/test/xpack_siem_cyclic_dependency.sh
new file mode 100755
index 0000000000000..b21301f25ad08
--- /dev/null
+++ b/test/scripts/test/xpack_siem_cyclic_dependency.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+source src/dev/ci_setup/setup_env.sh
+
+cd x-pack
+checks-reporter-with-killswitch "X-Pack SIEM cyclic dependency test" node plugins/security_solution/scripts/check_circular_deps
diff --git a/vars/catchErrors.groovy b/vars/catchErrors.groovy
index 460a90b8ec0c0..2a1b55d832606 100644
--- a/vars/catchErrors.groovy
+++ b/vars/catchErrors.groovy
@@ -1,8 +1,15 @@
 // Basically, this is a shortcut for catchError(catchInterruptions: false) {}
 // By default, catchError will swallow aborts/timeouts, which we almost never want
+// Also, by wrapping it in an additional try/catch, we cut down on spam in Pipeline Steps
 def call(Map params = [:], Closure closure) {
-  params.catchInterruptions = false
-  return catchError(params, closure)
+  try {
+    closure()
+  } catch (ex) {
+    params.catchInterruptions = false
+    catchError(params) {
+      throw ex
+    }
+  }
 }
 
 return this
diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy
index 0f1e11a1fb70a..736b9e6ad84aa 100644
--- a/vars/kibanaPipeline.groovy
+++ b/vars/kibanaPipeline.groovy
@@ -2,18 +2,61 @@ def withPostBuildReporting(Closure closure) {
   try {
     closure()
   } finally {
-    catchErrors {
-      runErrorReporter()
+    def parallelWorkspaces = []
+    try {
+      parallelWorkspaces = getParallelWorkspaces()
+    } catch(ex) {
+      print ex
     }
 
     catchErrors {
-      runbld.junit()
+      runErrorReporter([pwd()] + parallelWorkspaces)
     }
 
     catchErrors {
       publishJunit()
     }
+
+    catchErrors {
+      def parallelWorkspace = "${env.WORKSPACE}/parallel"
+      if (fileExists(parallelWorkspace)) {
+        dir(parallelWorkspace) {
+          def workspaceTasks = [:]
+
+          parallelWorkspaces.each { workspaceDir ->
+            workspaceTasks[workspaceDir] = {
+              dir(workspaceDir) {
+                catchErrors {
+                  runbld.junit()
+                }
+              }
+            }
+          }
+
+          if (workspaceTasks) {
+            parallel(workspaceTasks)
+          }
+        }
+      }
+    }
+  }
+}
+
+def getParallelWorkspaces() {
+  def workspaces = []
+  def parallelWorkspace = "${env.WORKSPACE}/parallel"
+  if (fileExists(parallelWorkspace)) {
+    dir(parallelWorkspace) {
+      // findFiles only returns files if you use glob, so look for a file that should be in every valid workspace
+      workspaces = findFiles(glob: '*/kibana/package.json')
+        .collect {
+          // get the paths to the kibana directories for the parallel workspaces
+          return parallelWorkspace + '/' + it.path.tokenize('/').dropRight(1).join('/')
+        }
+    }
   }
+
+  return workspaces
 }
 
 def notifyOnError(Closure closure) {
@@ -35,36 +78,43 @@ def notifyOnError(Closure closure) {
   }
 }
 
-def functionalTestProcess(String name, Closure closure) {
-  return { processNumber ->
-    def kibanaPort = "61${processNumber}1"
-    def esPort = "61${processNumber}2"
-    def esTransportPort = "61${processNumber}3"
-    def ingestManagementPackageRegistryPort = "61${processNumber}4"
+def withFunctionalTestEnv(List additionalEnvs = [], Closure closure) {
+  // This can go away once everything that uses the deprecated workers.parallelProcesses() is moved to task queue
+  def parallelId = env.TASK_QUEUE_PROCESS_ID ?: env.CI_PARALLEL_PROCESS_NUMBER
 
-    withEnv([
-      "CI_PARALLEL_PROCESS_NUMBER=${processNumber}",
-      "TEST_KIBANA_HOST=localhost",
-      "TEST_KIBANA_PORT=${kibanaPort}",
-      "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
-      "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
-      "TEST_ES_TRANSPORT_PORT=${esTransportPort}",
-      "INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}",
-      "IS_PIPELINE_JOB=1",
-      "JOB=${name}",
-      "KBN_NP_PLUGINS_BUILT=true",
-    ]) {
-      notifyOnError {
-        closure()
-      }
-    }
+  def kibanaPort = "61${parallelId}1"
+  def esPort = "61${parallelId}2"
+  def esTransportPort = "61${parallelId}3"
+  def ingestManagementPackageRegistryPort = "61${parallelId}4"
+
+  withEnv([
+    "CI_GROUP=${parallelId}",
+    "REMOVE_KIBANA_INSTALL_DIR=1",
+    "CI_PARALLEL_PROCESS_NUMBER=${parallelId}",
+    "TEST_KIBANA_HOST=localhost",
+    "TEST_KIBANA_PORT=${kibanaPort}",
+    "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
+    "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
+    "TEST_ES_TRANSPORT_PORT=${esTransportPort}",
+    "KBN_NP_PLUGINS_BUILT=true",
+    "INGEST_MANAGEMENT_PACKAGE_REGISTRY_PORT=${ingestManagementPackageRegistryPort}",
+  ] + additionalEnvs) {
+    closure()
+  }
+}
+
+def functionalTestProcess(String name, Closure closure) {
+  return {
+    withFunctionalTestEnv(["JOB=${name}"], closure)
   }
 }
 
 def functionalTestProcess(String name, String script) {
   return functionalTestProcess(name) {
-    retryable(name) {
-      runbld(script, "Execute ${name}")
+    notifyOnError {
+      retryable(name) {
+        runbld(script, "Execute ${name}")
+      }
     }
   }
 }
@@ -109,11 +159,17 @@ def withGcsArtifactUpload(workerName, closure) {
   def uploadPrefix = "kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}"
   def ARTIFACT_PATTERNS = [
     'target/kibana-*',
+    'target/test-metrics/*',
     'target/kibana-security-solution/**/*.png',
     'target/junit/**/*',
-    'test/**/screenshots/**/*.png',
+    'target/test-suites-ci-plan.json',
+    'test/**/screenshots/session/*.png',
+    'test/**/screenshots/failure/*.png',
+    'test/**/screenshots/diff/*.png',
     'test/functional/failure_debug/html/*.html',
-    'x-pack/test/**/screenshots/**/*.png',
+    'x-pack/test/**/screenshots/session/*.png',
+    'x-pack/test/**/screenshots/failure/*.png',
+    'x-pack/test/**/screenshots/diff/*.png',
     'x-pack/test/functional/failure_debug/html/*.html',
     'x-pack/test/functional/apps/reporting/reports/session/*.pdf',
   ]
@@ -128,6 +184,12 @@ def withGcsArtifactUpload(workerName, closure) {
         ARTIFACT_PATTERNS.each { pattern ->
           uploadGcsArtifact(uploadPrefix, pattern)
         }
+
+        dir(env.WORKSPACE) {
+          ARTIFACT_PATTERNS.each { pattern ->
+            uploadGcsArtifact(uploadPrefix, "parallel/*/kibana/${pattern}")
+          }
+        }
       }
     }
   })
@@ -135,6 +197,10 @@ def withGcsArtifactUpload(workerName, closure) {
 
 def publishJunit() {
   junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
+
+  dir(env.WORKSPACE) {
+    junit(testResults: 'parallel/*/kibana/target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
+  }
 }
 
 def sendMail() {
@@ -200,26 +266,36 @@ def doSetup() {
   }
 }
 
-def buildOss() {
+def buildOss(maxWorkers = '') {
   notifyOnError {
-    runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana")
+    withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) {
+      runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana")
+    }
   }
 }
 
-def buildXpack() {
+def buildXpack(maxWorkers = '') {
   notifyOnError {
-    runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana")
+    withEnv(["KBN_OPTIMIZER_MAX_WORKERS=${maxWorkers}"]) {
+      runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana")
+    }
   }
 }
 
 def runErrorReporter() {
+  return runErrorReporter([pwd()])
+}
+
+def runErrorReporter(workspaces) {
   def status = buildUtils.getBuildStatus()
   def dryRun = status != "ABORTED" ? "" : "--no-github-update"
 
+  def globs = workspaces.collect { "'${it}/target/junit/**/*.xml'" }.join(" ")
+
   bash(
     """
       source src/dev/ci_setup/setup_env.sh
-      node scripts/report_failed_tests ${dryRun} target/junit/**/*.xml
+      node scripts/report_failed_tests ${dryRun} ${globs}
     """,
     "Report failed tests, if necessary"
   )
@@ -258,6 +334,102 @@ def call(Map params = [:], Closure closure) {
   }
 }
 
+// Creates a task queue using withTaskQueue, and copies the bootstrapped kibana repo into each process's workspace
+// Note that node_modules are mostly symlinked to save time/space. See test/scripts/jenkins_setup_parallel_workspace.sh
+def withCiTaskQueue(Map options = [:], Closure closure) {
+  def setupClosure = {
+    // This can't use runbld, because it expects the source to be there, which isn't yet
+    bash("${env.WORKSPACE}/kibana/test/scripts/jenkins_setup_parallel_workspace.sh", "Set up duplicate workspace for parallel process")
+  }
+
+  def config = [parallel: 24, setup: setupClosure] + options
+
+  withTaskQueue(config) {
+    closure.call()
+  }
+}
+
+def scriptTask(description, script) {
+  return {
+    withFunctionalTestEnv {
+      notifyOnError {
+        runbld(script, description)
+      }
+    }
+  }
+}
+
+def scriptTaskDocker(description, script) {
+  return {
+    withDocker(scriptTask(description, script))
+  }
+}
+
+def buildDocker() {
+  sh(
+    script: """
+      cp /usr/local/bin/runbld .ci/
+      cp /usr/local/bin/bash_standard_lib.sh .ci/
+      cd .ci
+      docker build -t kibana-ci -f ./Dockerfile .
+    """,
+    label: 'Build CI Docker image'
+  )
+}
+
+def withDocker(Closure closure) {
+  docker
+    .image('kibana-ci')
+    .inside(
+      "-v /etc/runbld:/etc/runbld:ro -v '${env.JENKINS_HOME}:${env.JENKINS_HOME}' -v '/dev/shm/workspace:/dev/shm/workspace' --shm-size 2GB --cpus 4",
+      closure
+    )
+}
+
+def buildOssPlugins() {
+  runbld('./test/scripts/jenkins_build_plugins.sh', 'Build OSS Plugins')
+}
+
+def buildXpackPlugins() {
+  runbld('./test/scripts/jenkins_xpack_build_plugins.sh', 'Build X-Pack Plugins')
+}
+
+def withTasks(Map params = [worker: [:]], Closure closure) {
+  catchErrors {
+    def config = [name: 'ci-worker', size: 'xxl', ramDisk: true] + (params.worker ?: [:])
+
+    workers.ci(config) {
+      withCiTaskQueue(parallel: 24) {
+        parallel([
+          docker: {
+            retry(2) {
+              buildDocker()
+            }
+          },
+
+          // There are integration tests etc that require the plugins to be built first, so let's go ahead and build them before set up the parallel workspaces
+          ossPlugins: { buildOssPlugins() },
+          xpackPlugins: { buildXpackPlugins() },
+        ])
+
+        catchErrors {
+          closure()
+        }
+      }
+    }
+  }
+}
+
+def allCiTasks() {
+  withTasks {
+    tasks.check()
+    tasks.lint()
+    tasks.test()
+    tasks.functionalOss()
+    tasks.functionalXpack()
+  }
+}
+
 def pipelineLibraryTests() {
   whenChanged(['vars/', '.ci/pipeline-library/']) {
     workers.base(size: 'flyweight', bootstrapped: false, ramDisk: false) {
@@ -268,5 +440,4 @@ def pipelineLibraryTests() {
   }
 }
 
-
 return this
diff --git a/vars/task.groovy b/vars/task.groovy
new file mode 100644
index 0000000000000..0c07b519b6fef
--- /dev/null
+++ b/vars/task.groovy
@@ -0,0 +1,5 @@
+def call(Closure closure) {
+  withTaskQueue.addTask(closure)
+}
+
+return this
diff --git a/vars/tasks.groovy b/vars/tasks.groovy
new file mode 100644
index 0000000000000..52641ce31f0be
--- /dev/null
+++ b/vars/tasks.groovy
@@ -0,0 +1,119 @@
+def call(List<Closure> closures) {
+  withTaskQueue.addTasks(closures)
+}
+
+def check() {
+  tasks([
+    kibanaPipeline.scriptTask('Check Telemetry Schema', 'test/scripts/checks/telemetry.sh'),
+    kibanaPipeline.scriptTask('Check TypeScript Projects', 'test/scripts/checks/ts_projects.sh'),
+    kibanaPipeline.scriptTask('Check Doc API Changes', 'test/scripts/checks/doc_api_changes.sh'),
+    kibanaPipeline.scriptTask('Check Types', 'test/scripts/checks/type_check.sh'),
+    kibanaPipeline.scriptTask('Check i18n', 'test/scripts/checks/i18n.sh'),
+    kibanaPipeline.scriptTask('Check File Casing', 'test/scripts/checks/file_casing.sh'),
+    kibanaPipeline.scriptTask('Check Lockfile Symlinks', 'test/scripts/checks/lock_file_symlinks.sh'),
+    kibanaPipeline.scriptTask('Check Licenses', 'test/scripts/checks/licenses.sh'),
+    kibanaPipeline.scriptTask('Verify Dependency Versions', 'test/scripts/checks/verify_dependency_versions.sh'),
+    kibanaPipeline.scriptTask('Verify NOTICE', 'test/scripts/checks/verify_notice.sh'),
+    kibanaPipeline.scriptTask('Test Projects', 'test/scripts/checks/test_projects.sh'),
+    kibanaPipeline.scriptTask('Test Hardening', 'test/scripts/checks/test_hardening.sh'),
+  ])
+}
+
+def lint() {
+  tasks([
+    kibanaPipeline.scriptTask('Lint: eslint', 'test/scripts/lint/eslint.sh'),
+    kibanaPipeline.scriptTask('Lint: sasslint', 'test/scripts/lint/sasslint.sh'),
+  ])
+}
+
+def test() {
+  tasks([
+    // These 2 tasks require isolation because of hard-coded, conflicting ports and such, so let's use Docker here
+    kibanaPipeline.scriptTaskDocker('Jest Integration Tests', 'test/scripts/test/jest_integration.sh'),
+    kibanaPipeline.scriptTaskDocker('Mocha Tests', 'test/scripts/test/mocha.sh'),
+
+    kibanaPipeline.scriptTask('Jest Unit Tests', 'test/scripts/test/jest_unit.sh'),
+    kibanaPipeline.scriptTask('API Integration Tests', 'test/scripts/test/api_integration.sh'),
+    kibanaPipeline.scriptTask('@elastic/safer-lodash-set Tests', 'test/scripts/test/safer_lodash_set.sh'),
+    kibanaPipeline.scriptTask('X-Pack SIEM cyclic dependency', 'test/scripts/test/xpack_siem_cyclic_dependency.sh'),
+    kibanaPipeline.scriptTask('X-Pack List cyclic dependency', 'test/scripts/test/xpack_list_cyclic_dependency.sh'),
+    kibanaPipeline.scriptTask('X-Pack Jest Unit Tests', 'test/scripts/test/xpack_jest_unit.sh'),
+  ])
+}
+
+def functionalOss(Map params = [:]) {
+  def config = params ?: [ciGroups: true, firefox: true, accessibility: true, pluginFunctional: true, visualRegression: false]
+
+  task {
+    kibanaPipeline.buildOss(6)
+
+    if (config.ciGroups) {
+      def ciGroups = 1..12
+      tasks(ciGroups.collect { kibanaPipeline.ossCiGroupProcess(it) })
+    }
+
+    if (config.firefox) {
+      task(kibanaPipeline.functionalTestProcess('oss-firefox', './test/scripts/jenkins_firefox_smoke.sh'))
+    }
+
+    if (config.accessibility) {
+      task(kibanaPipeline.functionalTestProcess('oss-accessibility', './test/scripts/jenkins_accessibility.sh'))
+    }
+
+    if (config.pluginFunctional) {
+      task(kibanaPipeline.functionalTestProcess('oss-pluginFunctional', './test/scripts/jenkins_plugin_functional.sh'))
+    }
+
+    if (config.visualRegression) {
+      task(kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh'))
+    }
+  }
+}
+
+def functionalXpack(Map params = [:]) {
+  def config = params ?: [
+    ciGroups: true,
+    firefox: true,
+    accessibility: true,
+    pluginFunctional: true,
+    savedObjectsFieldMetrics:true,
+    pageLoadMetrics: false,
+    visualRegression: false,
+  ]
+
+  task {
+    kibanaPipeline.buildXpack(10)
+
+    if (config.ciGroups) {
+      def ciGroups = 1..10
+      tasks(ciGroups.collect { kibanaPipeline.xpackCiGroupProcess(it) })
+    }
+
+    if (config.firefox) {
+      task(kibanaPipeline.functionalTestProcess('xpack-firefox', './test/scripts/jenkins_xpack_firefox_smoke.sh'))
+    }
+
+    if (config.accessibility) {
+      task(kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'))
+    }
+
+    if (config.visualRegression) {
+      task(kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'))
+    }
+
+    if (config.savedObjectsFieldMetrics) {
+      task(kibanaPipeline.functionalTestProcess('xpack-savedObjectsFieldMetrics', './test/scripts/jenkins_xpack_saved_objects_field_metrics.sh'))
+    }
+
+    whenChanged([
+      'x-pack/plugins/security_solution/',
+      'x-pack/test/security_solution_cypress/',
+      'x-pack/plugins/triggers_actions_ui/public/application/sections/action_connector_form/',
+      'x-pack/plugins/triggers_actions_ui/public/application/context/actions_connectors_context.tsx',
+    ]) {
+      task(kibanaPipeline.functionalTestProcess('xpack-securitySolutionCypress', './test/scripts/jenkins_security_solution_cypress.sh'))
+    }
+  }
+}
+
+return this
diff --git a/vars/withTaskQueue.groovy b/vars/withTaskQueue.groovy
new file mode 100644
index 0000000000000..8132d6264744f
--- /dev/null
+++ b/vars/withTaskQueue.groovy
@@ -0,0 +1,154 @@
+import groovy.transform.Field
+
+public static @Field TASK_QUEUES = [:]
+public static @Field TASK_QUEUES_COUNTER = 0
+
+/**
+  withTaskQueue creates a queue of "tasks" (just plain closures to execute), and executes them with your desired level of concurrency.
+  This way, you can define, for example, 40 things that need to execute, then only allow 10 of them to execute at once.
+
+  Each "process" will execute in a separate, unique, empty directory.
+  If you want each process to have a bootstrapped kibana repo, check out kibanaPipeline.withCiTaskQueue
+
+  Using the queue currently requires an agent/worker.
+
+  Usage:
+
+  withTaskQueue(parallel: 10) {
+    task { print "This is a task" }
+
+    // This is the same as calling task() multiple times
+    tasks([ { print "Another task" }, { print "And another task" } ])
+
+    // Tasks can queue up subsequent tasks
+    task {
+      buildThing()
+      task { print "I depend on buildThing()" }
+    }
+  }
+
+  You can also define a setup task that each process should execute one time before executing tasks:
+  withTaskQueue(parallel: 10, setup: { sh "my-setup-scrupt.sh" }) {
+    ...
+  }
+
+*/
+def call(Map options = [:], Closure closure) {
+  def config = [ parallel: 10 ] + options
+  def counter = ++TASK_QUEUES_COUNTER
+
+  // We're basically abusing withEnv() to create a "scope" for all steps inside of a withTaskQueue block
+  // This way, we could have multiple task queue instances in the same pipeline
+  withEnv(["TASK_QUEUE_ID=${counter}"]) {
+    withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID] = [
+      tasks: [],
+      tmpFile: sh(script: 'mktemp', returnStdout: true).trim()
+    ]
+
+    closure.call()
+
+    def processesExecuting = 0
+    def processes = [:]
+    def iterationId = 0
+
+    for(def i = 1; i <= config.parallel; i++) {
+      def j = i
+      processes["task-queue-process-${j}"] = {
+        catchErrors {
+          withEnv([
+            "TASK_QUEUE_PROCESS_ID=${j}",
+            "TASK_QUEUE_ITERATION_ID=${++iterationId}"
+          ]) {
+            dir("${WORKSPACE}/parallel/${j}/kibana") {
+              if (config.setup) {
+                config.setup.call(j)
+              }
+
+              def isDone = false
+              while(!isDone) { // TODO some kind of timeout?
+                catchErrors {
+                  if (!getTasks().isEmpty()) {
+                    processesExecuting++
+                    catchErrors {
+                      def task
+                      try {
+                        task = getTasks().pop()
+                      } catch (java.util.NoSuchElementException ex) {
+                        return
+                      }
+
+                      task.call()
+                    }
+                    processesExecuting--
+                    // If a task finishes, and no new tasks were queued up, and nothing else is executing
+                    // Then all of the processes should wake up and exit
+                    if (processesExecuting < 1 && getTasks().isEmpty()) {
+                      taskNotify()
+                    }
+                    return
+                  }
+
+                  if (processesExecuting > 0) {
+                    taskSleep()
+                    return
+                  }
+
+                  // Queue is empty, no processes are executing
+                  isDone = true
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+    parallel(processes)
+  }
+}
+
+// If we sleep in a loop using Groovy code, Pipeline Steps is flooded with Sleep steps
+// So, instead, we just watch a file and `touch` it whenever something happens that could modify the queue
+// There's a 20 minute timeout just in case something goes wrong,
+//    in which case this method will get called again if the process is actually supposed to be waiting.
+def taskSleep() {
+  sh(script: """#!/bin/bash
+    TIMESTAMP=\$(date '+%s' -d "0 seconds ago")
+    for (( i=1; i<=240; i++ ))
+    do
+      if [ "\$(stat -c %Y '${getTmpFile()}')" -ge "\$TIMESTAMP" ]
+      then
+        break
+      else
+        sleep 5
+        if [[ \$i == 240 ]]; then
+          echo "Waited for new tasks for 20 minutes, exiting in case something went wrong"
+        fi
+      fi
+    done
+  """, label: "Waiting for new tasks...")
+}
+
+// Used to let the task queue processes know that either a new task has been queued up, or work is complete
+def taskNotify() {
+  sh "touch '${getTmpFile()}'"
+}
+
+def getTasks() {
+  return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tasks
+}
+
+def getTmpFile() {
+  return withTaskQueue.TASK_QUEUES[env.TASK_QUEUE_ID].tmpFile
+}
+
+def addTask(Closure closure) {
+  getTasks() << closure
+  taskNotify()
+}
+
+def addTasks(List<Closure> closures) {
+  closures.reverse().each {
+    getTasks() << it
+  }
+  taskNotify()
+}
diff --git a/vars/workers.groovy b/vars/workers.groovy
index f5a28c97c6812..e582e996a78b5 100644
--- a/vars/workers.groovy
+++ b/vars/workers.groovy
@@ -13,6 +13,8 @@ def label(size) {
       return 'docker && tests-l'
     case 'xl':
       return 'docker && tests-xl'
+    case 'xl-highmem':
+      return 'docker && tests-xl-highmem'
     case 'xxl':
       return 'docker && tests-xxl'
   }
@@ -55,6 +57,11 @@ def base(Map params, Closure closure) {
       }
     }
 
+    sh(
+      script: "mkdir -p ${env.WORKSPACE}/tmp",
+      label: "Create custom temp directory"
+    )
+
     def checkoutInfo = [:]
 
     if (config.scm) {
@@ -89,6 +96,7 @@ def base(Map params, Closure closure) {
       "PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
       "TEST_BROWSER_HEADLESS=1",
       "GIT_BRANCH=${checkoutInfo.branch}",
+      "TMPDIR=${env.WORKSPACE}/tmp", // For Chrome and anything else that respects it
     ]) {
       withCredentials([
         string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
@@ -169,7 +177,9 @@ def parallelProcesses(Map params) {
           sleep(delay)
         }
 
-        processClosure(processNumber)
+        withEnv(["CI_PARALLEL_PROCESS_NUMBER=${processNumber}"]) {
+          processClosure()
+        }
       }
     }
 
diff --git a/x-pack/plugins/canvas/.storybook/storyshots.test.js b/x-pack/plugins/canvas/.storybook/storyshots.test.js
index 7195b97712464..d6ae061850661 100644
--- a/x-pack/plugins/canvas/.storybook/storyshots.test.js
+++ b/x-pack/plugins/canvas/.storybook/storyshots.test.js
@@ -4,6 +4,8 @@
  * you may not use this file except in compliance with the Elastic License.
  */
 
+import fs from 'fs';
+import { ReactChildren } from 'react';
 import path from 'path';
 import moment from 'moment';
 import 'moment-timezone';
@@ -67,7 +69,7 @@ jest.mock('@elastic/eui/lib/services/accessibility/html_id_generator', () => {
 // https://github.com/elastic/eui/issues/3712
 jest.mock('@elastic/eui/lib/components/overlay_mask/overlay_mask', () => {
   return {
-    EuiOverlayMask: ({children}) => children,
+    EuiOverlayMask: ({ children }) => children,
   };
 });
 
@@ -88,6 +90,12 @@ import { EuiObserver } from '@elastic/eui/test-env/components/observer/observer'
 jest.mock('@elastic/eui/test-env/components/observer/observer');
 EuiObserver.mockImplementation(() => 'EuiObserver');
 
+// Some of the code requires that this directory exists, but the tests don't actually require any css to be present
+const cssDir = path.resolve(__dirname, '../../../../built_assets/css');
+if (!fs.existsSync(cssDir)) {
+  fs.mkdirSync(cssDir, { recursive: true });
+}
+
 addSerializer(styleSheetSerializer);
 
 // Initialize Storyshots and build the Jest Snapshots

From 7f5b9a403141c55b0dba5c8724bb50eac2641b85 Mon Sep 17 00:00:00 2001
From: spalger <spalger@users.noreply.github.com>
Date: Fri, 14 Aug 2020 17:29:07 -0700
Subject: [PATCH 4/5] skip flaky suite (#75044)

(cherry picked from commit d4f52471bfa916602dfed92d12e2aa12b3435d1c)
---
 .../functional/apps/reporting_management/report_listing.ts     | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/x-pack/test/functional/apps/reporting_management/report_listing.ts b/x-pack/test/functional/apps/reporting_management/report_listing.ts
index d97af7f49059d..ae395fc5914e1 100644
--- a/x-pack/test/functional/apps/reporting_management/report_listing.ts
+++ b/x-pack/test/functional/apps/reporting_management/report_listing.ts
@@ -29,7 +29,8 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
   const findInstance = getService('find');
   const esArchiver = getService('esArchiver');
 
-  describe('Listing of Reports', function () {
+  // FLAKY: https://github.com/elastic/kibana/issues/75044
+  describe.skip('Listing of Reports', function () {
     before(async () => {
       await security.testUser.setRoles(['kibana_admin', 'reporting_user']);
       await esArchiver.load('empty_kibana');

From 7d6525ea9fd3722a407b50c6d194e9d175ebae11 Mon Sep 17 00:00:00 2001
From: Spencer <email@spalger.com>
Date: Fri, 14 Aug 2020 22:11:46 -0700
Subject: [PATCH 5/5] [jenkins] add pipeline for hourly security solution
 cypress tests (#75087) (#75110)

* [jenkins] add pipeline for hourly security solution cypress tests

* support customizing email for status emails

* apply review feedback

Co-authored-by: spalger <spalger@users.noreply.github.com>
Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>

Co-authored-by: spalger <spalger@users.noreply.github.com>
Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
---
 .ci/Jenkinsfile_security_cypress | 21 +++++++++++++++++++++
 vars/kibanaPipeline.groovy       | 10 ++++++----
 2 files changed, 27 insertions(+), 4 deletions(-)
 create mode 100644 .ci/Jenkinsfile_security_cypress

diff --git a/.ci/Jenkinsfile_security_cypress b/.ci/Jenkinsfile_security_cypress
new file mode 100644
index 0000000000000..bdfef18024b78
--- /dev/null
+++ b/.ci/Jenkinsfile_security_cypress
@@ -0,0 +1,21 @@
+#!/bin/groovy
+
+library 'kibana-pipeline-library'
+kibanaLibrary.load()
+
+kibanaPipeline(timeoutMinutes: 180) {
+  slackNotifications.onFailure(
+    disabled: !params.NOTIFY_ON_FAILURE,
+    channel: '#security-solution-slack-testing'
+  ) {
+    catchError {
+      workers.base(size: 's', ramDisk: false) {
+        kibanaPipeline.bash('test/scripts/jenkins_security_solution_cypress.sh', 'Execute Security Solution Cypress Tests')
+      }
+    }
+  }
+
+  if (params.NOTIFY_ON_FAILURE) {
+    kibanaPipeline.sendMail(to: 'gloria.delatorre@elastic.co')
+  }
+}
diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy
index 736b9e6ad84aa..221641f859d7b 100644
--- a/vars/kibanaPipeline.groovy
+++ b/vars/kibanaPipeline.groovy
@@ -203,7 +203,7 @@ def publishJunit() {
   }
 }
 
-def sendMail() {
+def sendMail(Map params = [:]) {
   // If the build doesn't have a result set by this point, there haven't been any errors and it can be marked as a success
   // The e-mail plugin for the infra e-mail depends upon this being set
   currentBuild.result = currentBuild.result ?: 'SUCCESS'
@@ -212,7 +212,7 @@ def sendMail() {
   if (buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
     node('flyweight') {
       sendInfraMail()
-      sendKibanaMail()
+      sendKibanaMail(params)
     }
   }
 }
@@ -228,12 +228,14 @@ def sendInfraMail() {
   }
 }
 
-def sendKibanaMail() {
+def sendKibanaMail(Map params = [:]) {
+  def config = [to: 'build-kibana@elastic.co'] + params
+
   catchErrors {
     def buildStatus = buildUtils.getBuildStatus()
     if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
       emailext(
-        to: 'build-kibana@elastic.co',
+        config.to,
         subject: "${env.JOB_NAME} - Build # ${env.BUILD_NUMBER} - ${buildStatus}",
         body: '${SCRIPT,template="groovy-html.template"}',
         mimeType: 'text/html',