/packages/elastic-datemath'],
+ testEnvironment: 'jsdom',
+};
diff --git a/packages/kbn-apm-config-loader/src/config.ts b/packages/kbn-apm-config-loader/src/config.ts
index 6e5a830d04b17..5e3d52cfd27d1 100644
--- a/packages/kbn-apm-config-loader/src/config.ts
+++ b/packages/kbn-apm-config-loader/src/config.ts
@@ -153,8 +153,8 @@ export class ApmConfiguration {
return {
globalLabels: {
- branch: process.env.ghprbSourceBranch || '',
- targetBranch: process.env.ghprbTargetBranch || '',
+ branch: process.env.GIT_BRANCH || '',
+ targetBranch: process.env.PR_TARGET_BRANCH || '',
ciBuildNumber: process.env.BUILD_NUMBER || '',
isPr: process.env.GITHUB_PR_NUMBER ? true : false,
prId: process.env.GITHUB_PR_NUMBER || '',
diff --git a/packages/kbn-es-archiver/src/actions/empty_kibana_index.ts b/packages/kbn-es-archiver/src/actions/empty_kibana_index.ts
index d61d544deadc4..5f4e37ee35edf 100644
--- a/packages/kbn-es-archiver/src/actions/empty_kibana_index.ts
+++ b/packages/kbn-es-archiver/src/actions/empty_kibana_index.ts
@@ -20,7 +20,7 @@
import { Client } from 'elasticsearch';
import { ToolingLog, KbnClient } from '@kbn/dev-utils';
-import { migrateKibanaIndex, deleteKibanaIndices, createStats } from '../lib';
+import { migrateKibanaIndex, createStats, cleanKibanaIndices } from '../lib';
export async function emptyKibanaIndexAction({
client,
@@ -32,8 +32,9 @@ export async function emptyKibanaIndexAction({
kbnClient: KbnClient;
}) {
const stats = createStats('emptyKibanaIndex', log);
+ const kibanaPluginIds = await kbnClient.plugins.getEnabledIds();
- await deleteKibanaIndices({ client, stats, log });
+ await cleanKibanaIndices({ client, stats, log, kibanaPluginIds });
await migrateKibanaIndex({ client, kbnClient });
return stats;
}
diff --git a/packages/kbn-es-archiver/src/lib/archives/__tests__/format.ts b/packages/kbn-es-archiver/src/lib/archives/format.test.ts
similarity index 80%
rename from packages/kbn-es-archiver/src/lib/archives/__tests__/format.ts
rename to packages/kbn-es-archiver/src/lib/archives/format.test.ts
index 91c38d0dd1438..5190ea0128173 100644
--- a/packages/kbn-es-archiver/src/lib/archives/__tests__/format.ts
+++ b/packages/kbn-es-archiver/src/lib/archives/format.test.ts
@@ -20,10 +20,9 @@
import Stream, { Readable, Writable } from 'stream';
import { createGunzip } from 'zlib';
-import expect from '@kbn/expect';
import { createListStream, createPromiseFromStreams, createConcatStream } from '@kbn/utils';
-import { createFormatArchiveStreams } from '../format';
+import { createFormatArchiveStreams } from './format';
const INPUTS = [1, 2, { foo: 'bar' }, [1, 2]];
const INPUT_JSON = INPUTS.map((i) => JSON.stringify(i, null, 2)).join('\n\n');
@@ -32,9 +31,9 @@ describe('esArchiver createFormatArchiveStreams', () => {
describe('{ gzip: false }', () => {
it('returns an array of streams', () => {
const streams = createFormatArchiveStreams({ gzip: false });
- expect(streams).to.be.an('array');
- expect(streams.length).to.be.greaterThan(0);
- streams.forEach((s) => expect(s).to.be.a(Stream));
+ expect(streams).toBeInstanceOf(Array);
+ expect(streams.length).toBeGreaterThan(0);
+ streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
});
it('streams consume js values and produces buffers', async () => {
@@ -44,8 +43,8 @@ describe('esArchiver createFormatArchiveStreams', () => {
createConcatStream([]),
] as [Readable, ...Writable[]]);
- expect(output.length).to.be.greaterThan(0);
- output.forEach((b) => expect(b).to.be.a(Buffer));
+ expect(output.length).toBeGreaterThan(0);
+ output.forEach((b) => expect(b).toBeInstanceOf(Buffer));
});
it('product is pretty-printed JSON separated by two newlines', async () => {
@@ -55,16 +54,16 @@ describe('esArchiver createFormatArchiveStreams', () => {
createConcatStream(''),
] as [Readable, ...Writable[]]);
- expect(json).to.be(INPUT_JSON);
+ expect(json).toBe(INPUT_JSON);
});
});
describe('{ gzip: true }', () => {
it('returns an array of streams', () => {
const streams = createFormatArchiveStreams({ gzip: true });
- expect(streams).to.be.an('array');
- expect(streams.length).to.be.greaterThan(0);
- streams.forEach((s) => expect(s).to.be.a(Stream));
+ expect(streams).toBeInstanceOf(Array);
+ expect(streams.length).toBeGreaterThan(0);
+ streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
});
it('streams consume js values and produces buffers', async () => {
@@ -74,8 +73,8 @@ describe('esArchiver createFormatArchiveStreams', () => {
createConcatStream([]),
] as [Readable, ...Writable[]]);
- expect(output.length).to.be.greaterThan(0);
- output.forEach((b) => expect(b).to.be.a(Buffer));
+ expect(output.length).toBeGreaterThan(0);
+ output.forEach((b) => expect(b).toBeInstanceOf(Buffer));
});
it('output can be gunzipped', async () => {
@@ -85,7 +84,7 @@ describe('esArchiver createFormatArchiveStreams', () => {
createGunzip(),
createConcatStream(''),
] as [Readable, ...Writable[]]);
- expect(output).to.be(INPUT_JSON);
+ expect(output).toBe(INPUT_JSON);
});
});
@@ -97,7 +96,7 @@ describe('esArchiver createFormatArchiveStreams', () => {
createConcatStream(''),
] as [Readable, ...Writable[]]);
- expect(json).to.be(INPUT_JSON);
+ expect(json).toBe(INPUT_JSON);
});
});
});
diff --git a/packages/kbn-es-archiver/src/lib/archives/__tests__/parse.ts b/packages/kbn-es-archiver/src/lib/archives/parse.test.ts
similarity index 85%
rename from packages/kbn-es-archiver/src/lib/archives/__tests__/parse.ts
rename to packages/kbn-es-archiver/src/lib/archives/parse.test.ts
index deaea5cd4532e..70be5308ddfd4 100644
--- a/packages/kbn-es-archiver/src/lib/archives/__tests__/parse.ts
+++ b/packages/kbn-es-archiver/src/lib/archives/parse.test.ts
@@ -20,18 +20,17 @@
import Stream, { PassThrough, Readable, Writable, Transform } from 'stream';
import { createGzip } from 'zlib';
-import expect from '@kbn/expect';
import { createConcatStream, createListStream, createPromiseFromStreams } from '@kbn/utils';
-import { createParseArchiveStreams } from '../parse';
+import { createParseArchiveStreams } from './parse';
describe('esArchiver createParseArchiveStreams', () => {
describe('{ gzip: false }', () => {
it('returns an array of streams', () => {
const streams = createParseArchiveStreams({ gzip: false });
- expect(streams).to.be.an('array');
- expect(streams.length).to.be.greaterThan(0);
- streams.forEach((s) => expect(s).to.be.a(Stream));
+ expect(streams).toBeInstanceOf(Array);
+ expect(streams.length).toBeGreaterThan(0);
+ streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
});
describe('streams', () => {
@@ -46,7 +45,7 @@ describe('esArchiver createParseArchiveStreams', () => {
...createParseArchiveStreams({ gzip: false }),
]);
- expect(output).to.eql({ a: 1 });
+ expect(output).toEqual({ a: 1 });
});
it('consume buffers of valid JSON separated by two newlines', async () => {
const output = await createPromiseFromStreams([
@@ -63,7 +62,7 @@ describe('esArchiver createParseArchiveStreams', () => {
createConcatStream([]),
] as [Readable, ...Writable[]]);
- expect(output).to.eql([{ a: 1 }, 1]);
+ expect(output).toEqual([{ a: 1 }, 1]);
});
it('provides each JSON object as soon as it is parsed', async () => {
@@ -87,10 +86,10 @@ describe('esArchiver createParseArchiveStreams', () => {
] as [Readable, ...Writable[]]);
input.write(Buffer.from('{"a": 1}\n\n{"a":'));
- expect(await receivedPromise).to.eql({ a: 1 });
+ expect(await receivedPromise).toEqual({ a: 1 });
input.write(Buffer.from('2}'));
input.end();
- expect(await finalPromise).to.eql([{ a: 1 }, { a: 2 }]);
+ expect(await finalPromise).toEqual([{ a: 1 }, { a: 2 }]);
});
});
@@ -108,7 +107,7 @@ describe('esArchiver createParseArchiveStreams', () => {
] as [Readable, ...Writable[]]);
throw new Error('should have failed');
} catch (err) {
- expect(err.message).to.contain('Unexpected number');
+ expect(err.message).toEqual(expect.stringContaining('Unexpected number'));
}
});
});
@@ -117,9 +116,9 @@ describe('esArchiver createParseArchiveStreams', () => {
describe('{ gzip: true }', () => {
it('returns an array of streams', () => {
const streams = createParseArchiveStreams({ gzip: true });
- expect(streams).to.be.an('array');
- expect(streams.length).to.be.greaterThan(0);
- streams.forEach((s) => expect(s).to.be.a(Stream));
+ expect(streams).toBeInstanceOf(Array);
+ expect(streams.length).toBeGreaterThan(0);
+ streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
});
describe('streams', () => {
@@ -135,7 +134,7 @@ describe('esArchiver createParseArchiveStreams', () => {
...createParseArchiveStreams({ gzip: true }),
]);
- expect(output).to.eql({ a: 1 });
+ expect(output).toEqual({ a: 1 });
});
it('parses valid gzipped JSON strings separated by two newlines', async () => {
@@ -146,7 +145,7 @@ describe('esArchiver createParseArchiveStreams', () => {
createConcatStream([]),
] as [Readable, ...Writable[]]);
- expect(output).to.eql([{ a: 1 }, { a: 2 }]);
+ expect(output).toEqual([{ a: 1 }, { a: 2 }]);
});
});
@@ -158,7 +157,7 @@ describe('esArchiver createParseArchiveStreams', () => {
createConcatStream([]),
] as [Readable, ...Writable[]]);
- expect(output).to.eql([]);
+ expect(output).toEqual([]);
});
describe('stream errors', () => {
@@ -171,7 +170,7 @@ describe('esArchiver createParseArchiveStreams', () => {
] as [Readable, ...Writable[]]);
throw new Error('should have failed');
} catch (err) {
- expect(err.message).to.contain('incorrect header check');
+ expect(err.message).toEqual(expect.stringContaining('incorrect header check'));
}
});
});
@@ -183,7 +182,7 @@ describe('esArchiver createParseArchiveStreams', () => {
createListStream([Buffer.from('{"a": 1}')]),
...createParseArchiveStreams(),
]);
- expect(output).to.eql({ a: 1 });
+ expect(output).toEqual({ a: 1 });
});
});
});
diff --git a/packages/kbn-es-archiver/src/lib/docs/__tests__/stubs.ts b/packages/kbn-es-archiver/src/lib/docs/__mocks__/stubs.ts
similarity index 100%
rename from packages/kbn-es-archiver/src/lib/docs/__tests__/stubs.ts
rename to packages/kbn-es-archiver/src/lib/docs/__mocks__/stubs.ts
diff --git a/packages/kbn-es-archiver/src/lib/docs/__tests__/generate_doc_records_stream.ts b/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts
similarity index 71%
rename from packages/kbn-es-archiver/src/lib/docs/__tests__/generate_doc_records_stream.ts
rename to packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts
index 074333eb6028f..dad6008c89824 100644
--- a/packages/kbn-es-archiver/src/lib/docs/__tests__/generate_doc_records_stream.ts
+++ b/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts
@@ -18,22 +18,21 @@
*/
import sinon from 'sinon';
-import expect from '@kbn/expect';
import { delay } from 'bluebird';
import { createListStream, createPromiseFromStreams, createConcatStream } from '@kbn/utils';
-import { createGenerateDocRecordsStream } from '../generate_doc_records_stream';
-import { Progress } from '../../progress';
-import { createStubStats, createStubClient } from './stubs';
+import { createGenerateDocRecordsStream } from './generate_doc_records_stream';
+import { Progress } from '../progress';
+import { createStubStats, createStubClient } from './__mocks__/stubs';
describe('esArchiver: createGenerateDocRecordsStream()', () => {
it('scolls 1000 documents at a time', async () => {
const stats = createStubStats();
const client = createStubClient([
(name, params) => {
- expect(name).to.be('search');
- expect(params).to.have.property('index', 'logstash-*');
- expect(params).to.have.property('size', 1000);
+ expect(name).toBe('search');
+ expect(params).toHaveProperty('index', 'logstash-*');
+ expect(params).toHaveProperty('size', 1000);
return {
hits: {
total: 0,
@@ -49,18 +48,18 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
createGenerateDocRecordsStream({ client, stats, progress }),
]);
- expect(progress.getTotal()).to.be(0);
- expect(progress.getComplete()).to.be(0);
+ expect(progress.getTotal()).toBe(0);
+ expect(progress.getComplete()).toBe(0);
});
it('uses a 1 minute scroll timeout', async () => {
const stats = createStubStats();
const client = createStubClient([
(name, params) => {
- expect(name).to.be('search');
- expect(params).to.have.property('index', 'logstash-*');
- expect(params).to.have.property('scroll', '1m');
- expect(params).to.have.property('rest_total_hits_as_int', true);
+ expect(name).toBe('search');
+ expect(params).toHaveProperty('index', 'logstash-*');
+ expect(params).toHaveProperty('scroll', '1m');
+ expect(params).toHaveProperty('rest_total_hits_as_int', true);
return {
hits: {
total: 0,
@@ -76,8 +75,8 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
createGenerateDocRecordsStream({ client, stats, progress }),
]);
- expect(progress.getTotal()).to.be(0);
- expect(progress.getComplete()).to.be(0);
+ expect(progress.getTotal()).toBe(0);
+ expect(progress.getComplete()).toBe(0);
});
it('consumes index names and scrolls completely before continuing', async () => {
@@ -85,8 +84,8 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
let checkpoint = Date.now();
const client = createStubClient([
async (name, params) => {
- expect(name).to.be('search');
- expect(params).to.have.property('index', 'index1');
+ expect(name).toBe('search');
+ expect(params).toHaveProperty('index', 'index1');
await delay(200);
return {
_scroll_id: 'index1ScrollId',
@@ -94,17 +93,17 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
};
},
async (name, params) => {
- expect(name).to.be('scroll');
- expect(params).to.have.property('scrollId', 'index1ScrollId');
- expect(Date.now() - checkpoint).to.not.be.lessThan(200);
+ expect(name).toBe('scroll');
+ expect(params).toHaveProperty('scrollId', 'index1ScrollId');
+ expect(Date.now() - checkpoint).not.toBeLessThan(200);
checkpoint = Date.now();
await delay(200);
return { hits: { total: 2, hits: [{ _id: 2, _index: 'foo' }] } };
},
async (name, params) => {
- expect(name).to.be('search');
- expect(params).to.have.property('index', 'index2');
- expect(Date.now() - checkpoint).to.not.be.lessThan(200);
+ expect(name).toBe('search');
+ expect(params).toHaveProperty('index', 'index2');
+ expect(Date.now() - checkpoint).not.toBeLessThan(200);
checkpoint = Date.now();
await delay(200);
return { hits: { total: 0, hits: [] } };
@@ -118,7 +117,7 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
createConcatStream([]),
]);
- expect(docRecords).to.eql([
+ expect(docRecords).toEqual([
{
type: 'doc',
value: {
@@ -139,7 +138,7 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
},
]);
sinon.assert.calledTwice(stats.archivedDoc as any);
- expect(progress.getTotal()).to.be(2);
- expect(progress.getComplete()).to.be(2);
+ expect(progress.getTotal()).toBe(2);
+ expect(progress.getComplete()).toBe(2);
});
});
diff --git a/packages/kbn-es-archiver/src/lib/docs/__tests__/index_doc_records_stream.ts b/packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts
similarity index 77%
rename from packages/kbn-es-archiver/src/lib/docs/__tests__/index_doc_records_stream.ts
rename to packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts
index 5ce1a0d434ae6..c30efaf679d5d 100644
--- a/packages/kbn-es-archiver/src/lib/docs/__tests__/index_doc_records_stream.ts
+++ b/packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts
@@ -17,13 +17,12 @@
* under the License.
*/
-import expect from '@kbn/expect';
import { delay } from 'bluebird';
import { createListStream, createPromiseFromStreams } from '@kbn/utils';
-import { Progress } from '../../progress';
-import { createIndexDocRecordsStream } from '../index_doc_records_stream';
-import { createStubStats, createStubClient, createPersonDocRecords } from './stubs';
+import { Progress } from '../progress';
+import { createIndexDocRecordsStream } from './index_doc_records_stream';
+import { createStubStats, createStubClient, createPersonDocRecords } from './__mocks__/stubs';
const recordsToBulkBody = (records: any[]) => {
return records.reduce((acc, record) => {
@@ -38,8 +37,8 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
const records = createPersonDocRecords(1);
const client = createStubClient([
async (name, params) => {
- expect(name).to.be('bulk');
- expect(params).to.eql({
+ expect(name).toBe('bulk');
+ expect(params).toEqual({
body: recordsToBulkBody(records),
requestTimeout: 120000,
});
@@ -55,24 +54,24 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
]);
client.assertNoPendingResponses();
- expect(progress.getComplete()).to.be(1);
- expect(progress.getTotal()).to.be(undefined);
+ expect(progress.getComplete()).toBe(1);
+ expect(progress.getTotal()).toBe(undefined);
});
it('consumes multiple doc records and sends to `_bulk` api together', async () => {
const records = createPersonDocRecords(10);
const client = createStubClient([
async (name, params) => {
- expect(name).to.be('bulk');
- expect(params).to.eql({
+ expect(name).toBe('bulk');
+ expect(params).toEqual({
body: recordsToBulkBody(records.slice(0, 1)),
requestTimeout: 120000,
});
return { ok: true };
},
async (name, params) => {
- expect(name).to.be('bulk');
- expect(params).to.eql({
+ expect(name).toBe('bulk');
+ expect(params).toEqual({
body: recordsToBulkBody(records.slice(1)),
requestTimeout: 120000,
});
@@ -88,8 +87,8 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
]);
client.assertNoPendingResponses();
- expect(progress.getComplete()).to.be(10);
- expect(progress.getTotal()).to.be(undefined);
+ expect(progress.getComplete()).toBe(10);
+ expect(progress.getTotal()).toBe(undefined);
});
it('waits until request is complete before sending more', async () => {
@@ -99,8 +98,8 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
const delayMs = 1234;
const client = createStubClient([
async (name, params) => {
- expect(name).to.be('bulk');
- expect(params).to.eql({
+ expect(name).toBe('bulk');
+ expect(params).toEqual({
body: recordsToBulkBody(records.slice(0, 1)),
requestTimeout: 120000,
});
@@ -108,12 +107,12 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
return { ok: true };
},
async (name, params) => {
- expect(name).to.be('bulk');
- expect(params).to.eql({
+ expect(name).toBe('bulk');
+ expect(params).toEqual({
body: recordsToBulkBody(records.slice(1)),
requestTimeout: 120000,
});
- expect(Date.now() - start).to.not.be.lessThan(delayMs);
+ expect(Date.now() - start).not.toBeLessThan(delayMs);
return { ok: true };
},
]);
@@ -125,8 +124,8 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
]);
client.assertNoPendingResponses();
- expect(progress.getComplete()).to.be(10);
- expect(progress.getTotal()).to.be(undefined);
+ expect(progress.getComplete()).toBe(10);
+ expect(progress.getTotal()).toBe(undefined);
});
it('sends a maximum of 300 documents at a time', async () => {
@@ -134,18 +133,18 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
const stats = createStubStats();
const client = createStubClient([
async (name, params) => {
- expect(name).to.be('bulk');
- expect(params.body.length).to.eql(1 * 2);
+ expect(name).toBe('bulk');
+ expect(params.body.length).toEqual(1 * 2);
return { ok: true };
},
async (name, params) => {
- expect(name).to.be('bulk');
- expect(params.body.length).to.eql(299 * 2);
+ expect(name).toBe('bulk');
+ expect(params.body.length).toEqual(299 * 2);
return { ok: true };
},
async (name, params) => {
- expect(name).to.be('bulk');
- expect(params.body.length).to.eql(1 * 2);
+ expect(name).toBe('bulk');
+ expect(params.body.length).toEqual(1 * 2);
return { ok: true };
},
]);
@@ -157,8 +156,8 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
]);
client.assertNoPendingResponses();
- expect(progress.getComplete()).to.be(301);
- expect(progress.getTotal()).to.be(undefined);
+ expect(progress.getComplete()).toBe(301);
+ expect(progress.getTotal()).toBe(undefined);
});
it('emits an error if any request fails', async () => {
@@ -177,11 +176,11 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
]);
throw new Error('expected stream to emit error');
} catch (err) {
- expect(err.message).to.match(/"forcedError":\s*true/);
+ expect(err.message).toMatch(/"forcedError":\s*true/);
}
client.assertNoPendingResponses();
- expect(progress.getComplete()).to.be(1);
- expect(progress.getTotal()).to.be(undefined);
+ expect(progress.getComplete()).toBe(1);
+ expect(progress.getTotal()).toBe(undefined);
});
});
diff --git a/packages/kbn-es-archiver/src/lib/index.ts b/packages/kbn-es-archiver/src/lib/index.ts
index 960d51e411859..ac7569ba735ac 100644
--- a/packages/kbn-es-archiver/src/lib/index.ts
+++ b/packages/kbn-es-archiver/src/lib/index.ts
@@ -25,6 +25,7 @@ export {
createGenerateIndexRecordsStream,
deleteKibanaIndices,
migrateKibanaIndex,
+ cleanKibanaIndices,
createDefaultSpace,
} from './indices';
diff --git a/packages/kbn-es-archiver/src/lib/indices/__tests__/stubs.ts b/packages/kbn-es-archiver/src/lib/indices/__mocks__/stubs.ts
similarity index 100%
rename from packages/kbn-es-archiver/src/lib/indices/__tests__/stubs.ts
rename to packages/kbn-es-archiver/src/lib/indices/__mocks__/stubs.ts
diff --git a/packages/kbn-es-archiver/src/lib/indices/__tests__/create_index_stream.ts b/packages/kbn-es-archiver/src/lib/indices/create_index_stream.test.ts
similarity index 92%
rename from packages/kbn-es-archiver/src/lib/indices/__tests__/create_index_stream.ts
rename to packages/kbn-es-archiver/src/lib/indices/create_index_stream.test.ts
index b1a83046f40d6..db3de3378eee1 100644
--- a/packages/kbn-es-archiver/src/lib/indices/__tests__/create_index_stream.ts
+++ b/packages/kbn-es-archiver/src/lib/indices/create_index_stream.test.ts
@@ -17,12 +17,11 @@
* under the License.
*/
-import expect from '@kbn/expect';
import sinon from 'sinon';
import Chance from 'chance';
import { createPromiseFromStreams, createConcatStream, createListStream } from '@kbn/utils';
-import { createCreateIndexStream } from '../create_index_stream';
+import { createCreateIndexStream } from './create_index_stream';
import {
createStubStats,
@@ -30,7 +29,7 @@ import {
createStubDocRecord,
createStubClient,
createStubLogger,
-} from './stubs';
+} from './__mocks__/stubs';
const chance = new Chance();
@@ -49,7 +48,7 @@ describe('esArchiver: createCreateIndexStream()', () => {
createCreateIndexStream({ client, stats, log }),
]);
- expect(stats.getTestSummary()).to.eql({
+ expect(stats.getTestSummary()).toEqual({
deletedIndex: 1,
createdIndex: 2,
});
@@ -68,13 +67,13 @@ describe('esArchiver: createCreateIndexStream()', () => {
createCreateIndexStream({ client, stats, log }),
]);
- expect((client.indices.getAlias as sinon.SinonSpy).calledOnce).to.be.ok();
- expect((client.indices.getAlias as sinon.SinonSpy).args[0][0]).to.eql({
+ expect((client.indices.getAlias as sinon.SinonSpy).calledOnce).toBe(true);
+ expect((client.indices.getAlias as sinon.SinonSpy).args[0][0]).toEqual({
name: 'existing-index',
ignore: [404],
});
- expect((client.indices.delete as sinon.SinonSpy).calledOnce).to.be.ok();
- expect((client.indices.delete as sinon.SinonSpy).args[0][0]).to.eql({
+ expect((client.indices.delete as sinon.SinonSpy).calledOnce).toBe(true);
+ expect((client.indices.delete as sinon.SinonSpy).args[0][0]).toEqual({
index: ['actual-index'],
});
sinon.assert.callCount(client.indices.create as sinon.SinonSpy, 3); // one failed create because of existing
@@ -93,7 +92,7 @@ describe('esArchiver: createCreateIndexStream()', () => {
createConcatStream([]),
]);
- expect(output).to.eql([createStubDocRecord('index', 1), createStubDocRecord('index', 2)]);
+ expect(output).toEqual([createStubDocRecord('index', 1), createStubDocRecord('index', 2)]);
});
it('creates aliases', async () => {
@@ -133,7 +132,7 @@ describe('esArchiver: createCreateIndexStream()', () => {
createConcatStream([]),
]);
- expect(output).to.eql(randoms);
+ expect(output).toEqual(randoms);
});
it('passes through non-record values', async () => {
@@ -147,7 +146,7 @@ describe('esArchiver: createCreateIndexStream()', () => {
createConcatStream([]),
]);
- expect(output).to.eql(nonRecordValues);
+ expect(output).toEqual(nonRecordValues);
});
});
@@ -169,13 +168,13 @@ describe('esArchiver: createCreateIndexStream()', () => {
}),
]);
- expect(stats.getTestSummary()).to.eql({
+ expect(stats.getTestSummary()).toEqual({
skippedIndex: 1,
createdIndex: 1,
});
sinon.assert.callCount(client.indices.delete as sinon.SinonSpy, 0);
sinon.assert.callCount(client.indices.create as sinon.SinonSpy, 2); // one failed create because of existing
- expect((client.indices.create as sinon.SinonSpy).args[0][0]).to.have.property(
+ expect((client.indices.create as sinon.SinonSpy).args[0][0]).toHaveProperty(
'index',
'new-index'
);
@@ -203,15 +202,15 @@ describe('esArchiver: createCreateIndexStream()', () => {
createConcatStream([]),
]);
- expect(stats.getTestSummary()).to.eql({
+ expect(stats.getTestSummary()).toEqual({
skippedIndex: 1,
createdIndex: 1,
});
sinon.assert.callCount(client.indices.delete as sinon.SinonSpy, 0);
sinon.assert.callCount(client.indices.create as sinon.SinonSpy, 2); // one failed create because of existing
- expect(output).to.have.length(2);
- expect(output).to.eql([
+ expect(output).toHaveLength(2);
+ expect(output).toEqual([
createStubDocRecord('new-index', 1),
createStubDocRecord('new-index', 2),
]);
diff --git a/packages/kbn-es-archiver/src/lib/indices/__tests__/delete_index_stream.ts b/packages/kbn-es-archiver/src/lib/indices/delete_index_stream.test.ts
similarity index 96%
rename from packages/kbn-es-archiver/src/lib/indices/__tests__/delete_index_stream.ts
rename to packages/kbn-es-archiver/src/lib/indices/delete_index_stream.test.ts
index 3c9d866700005..ec588d5e7dae2 100644
--- a/packages/kbn-es-archiver/src/lib/indices/__tests__/delete_index_stream.ts
+++ b/packages/kbn-es-archiver/src/lib/indices/delete_index_stream.test.ts
@@ -21,14 +21,14 @@ import sinon from 'sinon';
import { createListStream, createPromiseFromStreams } from '@kbn/utils';
-import { createDeleteIndexStream } from '../delete_index_stream';
+import { createDeleteIndexStream } from './delete_index_stream';
import {
createStubStats,
createStubClient,
createStubIndexRecord,
createStubLogger,
-} from './stubs';
+} from './__mocks__/stubs';
const log = createStubLogger();
diff --git a/packages/kbn-es-archiver/src/lib/indices/__tests__/generate_index_records_stream.ts b/packages/kbn-es-archiver/src/lib/indices/generate_index_records_stream.test.ts
similarity index 76%
rename from packages/kbn-es-archiver/src/lib/indices/__tests__/generate_index_records_stream.ts
rename to packages/kbn-es-archiver/src/lib/indices/generate_index_records_stream.test.ts
index d2c9f1274e60f..fc5e86217038f 100644
--- a/packages/kbn-es-archiver/src/lib/indices/__tests__/generate_index_records_stream.ts
+++ b/packages/kbn-es-archiver/src/lib/indices/generate_index_records_stream.test.ts
@@ -18,12 +18,11 @@
*/
import sinon from 'sinon';
-import expect from '@kbn/expect';
import { createListStream, createPromiseFromStreams, createConcatStream } from '@kbn/utils';
-import { createStubClient, createStubStats } from './stubs';
+import { createStubClient, createStubStats } from './__mocks__/stubs';
-import { createGenerateIndexRecordsStream } from '../generate_index_records_stream';
+import { createGenerateIndexRecordsStream } from './generate_index_records_stream';
describe('esArchiver: createGenerateIndexRecordsStream()', () => {
it('consumes index names and queries for the mapping of each', async () => {
@@ -36,7 +35,7 @@ describe('esArchiver: createGenerateIndexRecordsStream()', () => {
createGenerateIndexRecordsStream(client, stats),
]);
- expect(stats.getTestSummary()).to.eql({
+ expect(stats.getTestSummary()).toEqual({
archivedIndex: 4,
});
@@ -56,12 +55,12 @@ describe('esArchiver: createGenerateIndexRecordsStream()', () => {
]);
const params = (client.indices.get as sinon.SinonSpy).args[0][0];
- expect(params).to.have.property('filterPath');
+ expect(params).toHaveProperty('filterPath');
const filters: string[] = params.filterPath;
- expect(filters.some((path) => path.includes('index.creation_date'))).to.be(true);
- expect(filters.some((path) => path.includes('index.uuid'))).to.be(true);
- expect(filters.some((path) => path.includes('index.version'))).to.be(true);
- expect(filters.some((path) => path.includes('index.provided_name'))).to.be(true);
+ expect(filters.some((path) => path.includes('index.creation_date'))).toBe(true);
+ expect(filters.some((path) => path.includes('index.uuid'))).toBe(true);
+ expect(filters.some((path) => path.includes('index.version'))).toBe(true);
+ expect(filters.some((path) => path.includes('index.provided_name'))).toBe(true);
});
it('produces one index record for each index name it receives', async () => {
@@ -74,19 +73,19 @@ describe('esArchiver: createGenerateIndexRecordsStream()', () => {
createConcatStream([]),
]);
- expect(indexRecords).to.have.length(3);
+ expect(indexRecords).toHaveLength(3);
- expect(indexRecords[0]).to.have.property('type', 'index');
- expect(indexRecords[0]).to.have.property('value');
- expect(indexRecords[0].value).to.have.property('index', 'index1');
+ expect(indexRecords[0]).toHaveProperty('type', 'index');
+ expect(indexRecords[0]).toHaveProperty('value');
+ expect(indexRecords[0].value).toHaveProperty('index', 'index1');
- expect(indexRecords[1]).to.have.property('type', 'index');
- expect(indexRecords[1]).to.have.property('value');
- expect(indexRecords[1].value).to.have.property('index', 'index2');
+ expect(indexRecords[1]).toHaveProperty('type', 'index');
+ expect(indexRecords[1]).toHaveProperty('value');
+ expect(indexRecords[1].value).toHaveProperty('index', 'index2');
- expect(indexRecords[2]).to.have.property('type', 'index');
- expect(indexRecords[2]).to.have.property('value');
- expect(indexRecords[2].value).to.have.property('index', 'index3');
+ expect(indexRecords[2]).toHaveProperty('type', 'index');
+ expect(indexRecords[2]).toHaveProperty('value');
+ expect(indexRecords[2].value).toHaveProperty('index', 'index3');
});
it('understands aliases', async () => {
@@ -99,7 +98,7 @@ describe('esArchiver: createGenerateIndexRecordsStream()', () => {
createConcatStream([]),
]);
- expect(indexRecords).to.eql([
+ expect(indexRecords).toEqual([
{
type: 'index',
value: {
diff --git a/packages/kbn-es-archiver/src/lib/indices/index.ts b/packages/kbn-es-archiver/src/lib/indices/index.ts
index 289ac87feb9a5..076582ddde8ab 100644
--- a/packages/kbn-es-archiver/src/lib/indices/index.ts
+++ b/packages/kbn-es-archiver/src/lib/indices/index.ts
@@ -20,4 +20,9 @@
export { createCreateIndexStream } from './create_index_stream';
export { createDeleteIndexStream } from './delete_index_stream';
export { createGenerateIndexRecordsStream } from './generate_index_records_stream';
-export { migrateKibanaIndex, deleteKibanaIndices, createDefaultSpace } from './kibana_index';
+export {
+ migrateKibanaIndex,
+ deleteKibanaIndices,
+ cleanKibanaIndices,
+ createDefaultSpace,
+} from './kibana_index';
diff --git a/packages/kbn-es-archiver/src/lib/indices/kibana_index.ts b/packages/kbn-es-archiver/src/lib/indices/kibana_index.ts
index 3599911735b8d..50fabad1fa26f 100644
--- a/packages/kbn-es-archiver/src/lib/indices/kibana_index.ts
+++ b/packages/kbn-es-archiver/src/lib/indices/kibana_index.ts
@@ -73,6 +73,7 @@ export async function migrateKibanaIndex({
body: {
dynamic: true,
},
+ ignore: [404],
} as any);
await kbnClient.savedObjects.migrate();
diff --git a/packages/kbn-es-archiver/src/lib/records/__tests__/filter_records_stream.ts b/packages/kbn-es-archiver/src/lib/records/filter_records_stream.test.ts
similarity index 89%
rename from packages/kbn-es-archiver/src/lib/records/__tests__/filter_records_stream.ts
rename to packages/kbn-es-archiver/src/lib/records/filter_records_stream.test.ts
index cf67ee2071c10..8fba5668e972d 100644
--- a/packages/kbn-es-archiver/src/lib/records/__tests__/filter_records_stream.ts
+++ b/packages/kbn-es-archiver/src/lib/records/filter_records_stream.test.ts
@@ -18,11 +18,10 @@
*/
import Chance from 'chance';
-import expect from '@kbn/expect';
import { createListStream, createPromiseFromStreams, createConcatStream } from '@kbn/utils';
-import { createFilterRecordsStream } from '../filter_records_stream';
+import { createFilterRecordsStream } from './filter_records_stream';
const chance = new Chance();
@@ -42,7 +41,7 @@ describe('esArchiver: createFilterRecordsStream()', () => {
createConcatStream([]),
]);
- expect(output).to.eql([]);
+ expect(output).toEqual([]);
});
it('produces record values that have a matching type', async () => {
@@ -61,7 +60,7 @@ describe('esArchiver: createFilterRecordsStream()', () => {
createConcatStream([]),
]);
- expect(output).to.have.length(3);
- expect(output.map((o) => o.type)).to.eql([type1, type1, type1]);
+ expect(output).toHaveLength(3);
+ expect(output.map((o) => o.type)).toEqual([type1, type1, type1]);
});
});
diff --git a/packages/kbn-i18n/GUIDELINE.md b/packages/kbn-i18n/GUIDELINE.md
index b7c1371d59ea4..437e73bb27019 100644
--- a/packages/kbn-i18n/GUIDELINE.md
+++ b/packages/kbn-i18n/GUIDELINE.md
@@ -387,6 +387,50 @@ Splitting sentences into several keys often inadvertently presumes a grammar, a
### Unit tests
+#### How to test `FormattedMessage` and `i18n.translate()` components.
+
+To make `FormattedMessage` component work properly, wrapping it with `I18nProvider` is required. In development/production app, this is done in the ancestor components and developers don't have to worry about that.
+
+But when unit-testing them, no other component provides that wrapping. That's why `shallowWithI18nProvider` and `mountWithI18nProvider` helpers are created.
+
+For example, there is a component that has `FormattedMessage` inside, like `SaveModal` component:
+
+```js
+// ...
+export const SaveModal = (props) => {
+ return (
+
+ {/* Other things. */}
+
+
+
+ {/* More other things. */}
+
+ )
+}
+```
+
+To test `SaveModal` component, it should be wrapped with `I18nProvider` by using `shallowWithI18nProvider`:
+
+```js
+// ...
+it('should render normally', async () => {
+ const component = shallowWithI18nProvider(
+
+ );
+
+ expect(component).toMatchSnapshot();
+});
+// ...
+```
+
+If a component uses only `i18n.translate()`, it doesn't need `I18nProvider`. In that case, you can test them with `shallow` and `mount` functions that `enzyme` providers out of the box.
+
+#### How to test `injectI18n` HOC components.
+
Testing React component that uses the `injectI18n` higher-order component is more complicated because `injectI18n()` creates a wrapper component around the original component.
With shallow rendering only top level component is rendered, that is a wrapper itself, not the original component. Since we want to test the rendering of the original component, we need to access it via the wrapper's `WrappedComponent` property. Its value will be the component we passed into `injectI18n()`.
diff --git a/packages/kbn-optimizer/limits.yml b/packages/kbn-optimizer/limits.yml
index 08d883a7cbb4d..67287089489e1 100644
--- a/packages/kbn-optimizer/limits.yml
+++ b/packages/kbn-optimizer/limits.yml
@@ -102,6 +102,7 @@ pageLoadAssetSize:
visualizations: 295025
visualize: 57431
watcher: 43598
- runtimeFields: 41752
+ runtimeFields: 10000
stackAlerts: 29684
presentationUtil: 28545
+ runtimeFieldEditor: 46986
diff --git a/src/core/server/http/prototype_pollution/__snapshots__/validate_object.test.ts.snap b/packages/kbn-std/src/__snapshots__/ensure_no_unsafe_properties.test.ts.snap
similarity index 100%
rename from src/core/server/http/prototype_pollution/__snapshots__/validate_object.test.ts.snap
rename to packages/kbn-std/src/__snapshots__/ensure_no_unsafe_properties.test.ts.snap
diff --git a/src/core/server/http/prototype_pollution/validate_object.test.ts b/packages/kbn-std/src/ensure_no_unsafe_properties.test.ts
similarity index 89%
rename from src/core/server/http/prototype_pollution/validate_object.test.ts
rename to packages/kbn-std/src/ensure_no_unsafe_properties.test.ts
index 23d6c4ae3b49f..c12626b8d777e 100644
--- a/src/core/server/http/prototype_pollution/validate_object.test.ts
+++ b/packages/kbn-std/src/ensure_no_unsafe_properties.test.ts
@@ -17,14 +17,14 @@
* under the License.
*/
-import { validateObject } from './validate_object';
+import { ensureNoUnsafeProperties } from './ensure_no_unsafe_properties';
test(`fails on circular references`, () => {
const foo: Record = {};
foo.myself = foo;
expect(() =>
- validateObject({
+ ensureNoUnsafeProperties({
payload: foo,
})
).toThrowErrorMatchingInlineSnapshot(`"circular reference detected"`);
@@ -57,7 +57,7 @@ test(`fails on circular references`, () => {
[property]: value,
};
test(`can submit ${JSON.stringify(obj)}`, () => {
- expect(() => validateObject(obj)).not.toThrowError();
+ expect(() => ensureNoUnsafeProperties(obj)).not.toThrowError();
});
});
});
@@ -74,6 +74,6 @@ test(`fails on circular references`, () => {
JSON.parse(`{ "foo": { "bar": { "constructor": { "prototype" : null } } } }`),
].forEach((value) => {
test(`can't submit ${JSON.stringify(value)}`, () => {
- expect(() => validateObject(value)).toThrowErrorMatchingSnapshot();
+ expect(() => ensureNoUnsafeProperties(value)).toThrowErrorMatchingSnapshot();
});
});
diff --git a/src/core/server/http/prototype_pollution/validate_object.ts b/packages/kbn-std/src/ensure_no_unsafe_properties.ts
similarity index 97%
rename from src/core/server/http/prototype_pollution/validate_object.ts
rename to packages/kbn-std/src/ensure_no_unsafe_properties.ts
index cab6ce295ce92..47cbea5ecf3ee 100644
--- a/src/core/server/http/prototype_pollution/validate_object.ts
+++ b/packages/kbn-std/src/ensure_no_unsafe_properties.ts
@@ -31,7 +31,7 @@ const hasOwnProperty = (obj: any, property: string) =>
const isObject = (obj: any) => typeof obj === 'object' && obj !== null;
// we're using a stack instead of recursion so we aren't limited by the call stack
-export function validateObject(obj: any) {
+export function ensureNoUnsafeProperties(obj: any) {
if (!isObject(obj)) {
return;
}
diff --git a/packages/kbn-std/src/index.ts b/packages/kbn-std/src/index.ts
index c111428017539..a5b5088f9105f 100644
--- a/packages/kbn-std/src/index.ts
+++ b/packages/kbn-std/src/index.ts
@@ -27,4 +27,5 @@ export { withTimeout } from './promise';
export { isRelativeUrl, modifyUrl, getUrlOrigin, URLMeaningfulParts } from './url';
export { unset } from './unset';
export { getFlattenedObject } from './get_flattened_object';
+export { ensureNoUnsafeProperties } from './ensure_no_unsafe_properties';
export * from './rxjs_7';
diff --git a/packages/kbn-test/src/functional_test_runner/cli.ts b/packages/kbn-test/src/functional_test_runner/cli.ts
index 8f53d6f7cf58b..2dfc9ded66201 100644
--- a/packages/kbn-test/src/functional_test_runner/cli.ts
+++ b/packages/kbn-test/src/functional_test_runner/cli.ts
@@ -141,22 +141,27 @@ export function runFtrCli() {
config: 'test/functional/config.js',
},
help: `
- --config=path path to a config file
- --bail stop tests after the first failure
- --grep pattern used to select which tests to run
- --invert invert grep to exclude tests
- --include=file a test file to be included, pass multiple times for multiple files
- --exclude=file a test file to be excluded, pass multiple times for multiple files
- --include-tag=tag a tag to be included, pass multiple times for multiple tags
- --exclude-tag=tag a tag to be excluded, pass multiple times for multiple tags
- --test-stats print the number of tests (included and excluded) to STDERR
- --updateBaselines replace baseline screenshots with whatever is generated from the test
- --updateSnapshots replace inline and file snapshots with whatever is generated from the test
- -u replace both baseline screenshots and snapshots
- --kibana-install-dir directory where the Kibana install being tested resides
- --throttle enable network throttling in Chrome browser
- --headless run browser in headless mode
- `,
+ --config=path path to a config file
+ --bail stop tests after the first failure
+ --grep pattern used to select which tests to run
+ --invert invert grep to exclude tests
+ --include=file a test file to be included, pass multiple times for multiple files
+ --exclude=file a test file to be excluded, pass multiple times for multiple files
+ --include-tag=tag a tag to be included, pass multiple times for multiple tags. Only
+ suites which have one of the passed include-tag tags will be executed.
+ When combined with the --exclude-tag flag both conditions must be met
+ for a suite to run.
+ --exclude-tag=tag a tag to be excluded, pass multiple times for multiple tags. Any suite
+ which has any of the exclude-tags will be excluded. When combined with
+ the --include-tag flag both conditions must be met for a suite to run.
+ --test-stats print the number of tests (included and excluded) to STDERR
+ --updateBaselines replace baseline screenshots with whatever is generated from the test
+ --updateSnapshots replace inline and file snapshots with whatever is generated from the test
+ -u replace both baseline screenshots and snapshots
+ --kibana-install-dir directory where the Kibana install being tested resides
+ --throttle enable network throttling in Chrome browser
+ --headless run browser in headless mode
+ `,
},
}
);
diff --git a/rfcs/images/background_sessions_client.png b/rfcs/images/search_sessions_client.png
similarity index 100%
rename from rfcs/images/background_sessions_client.png
rename to rfcs/images/search_sessions_client.png
diff --git a/rfcs/images/background_sessions_server.png b/rfcs/images/search_sessions_server.png
similarity index 100%
rename from rfcs/images/background_sessions_server.png
rename to rfcs/images/search_sessions_server.png
diff --git a/rfcs/text/0013_background_sessions.md b/rfcs/text/0013_search_sessions.md
similarity index 81%
rename from rfcs/text/0013_background_sessions.md
rename to rfcs/text/0013_search_sessions.md
index 056149e770448..659f1933a86f9 100644
--- a/rfcs/text/0013_background_sessions.md
+++ b/rfcs/text/0013_search_sessions.md
@@ -5,19 +5,19 @@
- Architecture diagram: https://app.lucidchart.com/documents/edit/cf35b512-616a-4734-bc72-43dde70dbd44/0_0
- Mockups: https://www.figma.com/proto/FD2M7MUpLScJKOyYjfbmev/ES-%2F-Query-Management-v4?node-id=440%3A1&viewport=984%2C-99%2C0.09413627535104752&scaling=scale-down
- Old issue: https://github.com/elastic/kibana/issues/53335
-- Background search roadmap: https://github.com/elastic/kibana/issues/61738
+- Search Sessions roadmap: https://github.com/elastic/kibana/issues/61738
- POC: https://github.com/elastic/kibana/pull/64641
# Summary
-Background Sessions will enable Kibana applications and solutions to start a group of related search requests (such as those coming from a single load of a dashboard or SIEM timeline), navigate away or close the browser, then retrieve the results when they have completed.
+Search Sessions will enable Kibana applications and solutions to start a group of related search requests (such as those coming from a single load of a dashboard or SIEM timeline), navigate away or close the browser, then retrieve the results when they have completed.
# Basic example
-At its core, background sessions are enabled via several new APIs, that:
+At its core, search sessions are enabled via several new APIs, that:
- Start a session, associating multiple search requests with a single entity
- Store the session (and continue search requests in the background)
-- Restore the background session
+- Restore the saved search session
```ts
const searchService = dataPluginStart.search;
@@ -26,7 +26,7 @@ if (appState.sessionId) {
// If we are restoring a session, set the session ID in the search service
searchService.session.restore(sessionId);
} else {
- // Otherwise, start a new background session to associate our search requests
+ // Otherwise, start a new search session to associate our search requests
appState.sessionId = searchService.session.start();
}
@@ -41,7 +41,7 @@ const response$ = await searchService.search(request);
// Calling `session.store()`, creates a saved object for this session, allowing the user to navigate away.
// The session object will be saved with all async search IDs that were executed so far.
// Any follow up searches executed with this sessionId will be saved into this object as well.
-const backgroundSession = await searchService.session.store();
+const searchSession = await searchService.session.store();
```
# Motivation
@@ -73,20 +73,20 @@ We call this entity a `session`, and when a user decides that they want to conti
This diagram matches any case where `data.search` is called from the front end:
-![image](../images/background_sessions_client.png)
+![image](../images/search_sessions_client.png)
### Server side search
This case happens if the server is the one to invoke the `data.search` endpoint, for example with TSVB.
-![image](../images/background_sessions_server.png)
+![image](../images/search_sessions_server.png)
## Data and Saved Objects
-### Background Session Status
+### Search Session Status
```ts
-export enum BackgroundSessionStatus {
+export enum SearchSessionStatus {
Running, // The session has at least one running search ID associated with it.
Done, // All search IDs associated with this session have completed.
Error, // At least one search ID associated with this session had an error.
@@ -96,27 +96,27 @@ export enum BackgroundSessionStatus {
### Saved Object Structure
-The saved object created for a background session will be scoped to a single space, and will be a `hidden` saved object
+The saved object created for a search session will be scoped to a single space, and will be a `hidden` saved object
(so that it doesn't show in the management listings). We will provide a separate interface for users to manage their own
-background sessions (which will use the `list`, `expire`, and `extend` methods described below, which will be restricted
+saved search sessions (which will use the `list`, `expire`, and `extend` methods described below, which will be restricted
per-user).
```ts
-interface BackgroundSessionAttributes extends SavedObjectAttributes {
+interface SearchSessionAttributes extends SavedObjectAttributes {
sessionId: string;
userId: string; // Something unique to the user who generated this session, like username/realm-name/realm-type
- status: BackgroundSessionStatus;
+ status: SearchSessionStatus;
name: string;
creation: Date;
expiration: Date;
idMapping: { [key: string]: string };
- url: string; // A URL relative to the Kibana root to retrieve the results of a completed background session (and/or to return to an incomplete view)
- metadata: { [key: string]: any } // Any data the specific application requires to restore a background session view
+ url: string; // A URL relative to the Kibana root to retrieve the results of a completed search session (and/or to return to an incomplete view)
+ metadata: { [key: string]: any } // Any data the specific application requires to restore a search session view
}
```
-The URL that is provided will need to be generated by the specific application implementing background sessions. We
-recommend using the URL generator to ensure that URLs are backwards-compatible since background sessions may exist as
+The URL that is provided will need to be generated by the specific application implementing search sessions. We
+recommend using the URL generator to ensure that URLs are backwards-compatible since search sessions may exist as
long as a user continues to extend the expiration.
## Frontend Services
@@ -153,10 +153,10 @@ interface ISessionService {
* @param sessionId Session ID to store. Probably retrieved from `sessionService.get()`.
* @param name A display name for the session.
* @param url TODO: is the URL provided here? How?
- * @returns The stored `BackgroundSessionAttributes` object
+ * @returns The stored `SearchSessionAttributes` object
* @throws Throws an error in OSS.
*/
- store: (sessionId: string, name: string, url: string) => Promise
+ store: (sessionId: string, name: string, url: string) => Promise
/**
* @returns Is the current session stored (i.e. is there a saved object corresponding with this sessionId).
@@ -188,17 +188,17 @@ interface ISessionService {
/**
* @param sessionId the ID of the session to retrieve the saved object.
- * @returns a filtered list of BackgroundSessionAttributes objects.
+ * @returns a filtered list of SearchSessionAttributes objects.
* @throws Throws an error in OSS.
*/
- get: (sessionId: string) => Promise
+ get: (sessionId: string) => Promise
/**
- * @param options The options to query for specific background session saved objects.
- * @returns a filtered list of BackgroundSessionAttributes objects.
+ * @param options The options to query for specific search session saved objects.
+ * @returns a filtered list of SearchSessionAttributes objects.
* @throws Throws an error in OSS.
*/
- list: (options: SavedObjectsFindOptions) => Promise
+ list: (options: SavedObjectsFindOptions) => Promise
/**
* Clears out any session info as well as the current session. Called internally whenever the user navigates
@@ -241,12 +241,12 @@ attempt to find the correct id within the saved object, and use it to retrieve t
```ts
interface ISessionService {
/**
- * Adds a search ID to a Background Session, if it exists.
+ * Adds a search ID to a Search Session, if it exists.
* Also extends the expiration of the search ID to match the session's expiration.
* @param request
* @param sessionId
* @param searchId
- * @returns true if id was added, false if Background Session doesn't exist or if there was an error while updating.
+ * @returns true if id was added, false if Search Session doesn't exist or if there was an error while updating.
* @throws an error if `searchId` already exists in the mapping for this `sessionId`
*/
trackSearchId: (
@@ -256,21 +256,21 @@ interface ISessionService {
) => Promise
/**
- * Get a Background Session object.
+ * Get a Search Session object.
* @param request
* @param sessionId
- * @returns the Background Session object if exists, or undefined.
+ * @returns the Search Session object if exists, or undefined.
*/
get: async (
request: KibanaRequest,
sessionId: string
- ) => Promise
+ ) => Promise
/**
- * Get a searchId from a Background Session object.
+ * Get a searchId from a Search Session object.
* @param request
* @param sessionId
- * @returns the searchID if exists on the Background Session, or undefined.
+ * @returns the searchID if exists on the Search Session, or undefined.
*/
getSearchId: async (
request: KibanaRequest,
@@ -283,7 +283,7 @@ interface ISessionService {
* @param sessionId Session ID to store. Probably retrieved from `sessionService.get()`.
* @param searchIdMap A mapping of hashed requests mapped to the corresponding searchId.
* @param url TODO: is the URL provided here? How?
- * @returns The stored `BackgroundSessionAttributes` object
+ * @returns The stored `SearchSessionAttributes` object
* @throws Throws an error in OSS.
* @internal (Consumers should use searchInterceptor.sendToBackground())
*/
@@ -293,7 +293,7 @@ interface ISessionService {
name: string,
url: string,
searchIdMapping?: Record
- ) => Promise
+ ) => Promise
/**
* Mark a session as and all associated searchIds as expired.
@@ -322,7 +322,7 @@ interface ISessionService {
) => Promise
/**
- * Get a list of background session objects.
+ * Get a list of Search Session objects.
* @param request
* @param sessionId
* @returns success status
@@ -330,7 +330,7 @@ interface ISessionService {
*/
list: async (
request: KibanaRequest,
- ) => Promise
+ ) => Promise
/**
* Update the status of a given session
@@ -343,7 +343,7 @@ interface ISessionService {
updateStatus: async (
request: KibanaRequest,
sessionId: string,
- status: BackgroundSessionStatus
+ status: SearchSessionStatus
) => Promise
}
@@ -381,13 +381,13 @@ Each route exposes the corresponding method from the Session Service (used only
### Search Strategy Integration
-If the `EnhancedEsSearchStrategy` receives a `restore` option, it will attempt reloading data using the Background Session saved object matching the provided `sessionId`. If there are any errors during that process, the strategy will return an error response and *not attempt to re-run the request.
+If the `EnhancedEsSearchStrategy` receives a `restore` option, it will attempt reloading data using the Search Session saved object matching the provided `sessionId`. If there are any errors during that process, the strategy will return an error response and *not attempt to re-run the request.
The strategy will track the asyncId on the server side, if `trackId` option is provided.
### Monitoring Service
-The `data` plugin will register a task with the task manager, periodically monitoring the status of incomplete background sessions.
+The `data` plugin will register a task with the task manager, periodically monitoring the status of incomplete search sessions.
It will query the list of all incomplete sessions, and check the status of each search that is executing. If the search requests are all complete, it will update the corresponding saved object to have a `status` of `complete`. If any of the searches return an error, it will update the saved object to an `error` state. If the search requests have expired, it will update the saved object to an `expired` state. Expired sessions will be purged once they are older than the time definedby the `EXPIRED_SESSION_TTL` advanced setting.
@@ -405,23 +405,23 @@ There are two potential scenarios:
Both scenarios require careful attention during the UI design and implementation.
-The former can be resolved by clearly displaying the creation time of the restored Background Session. We could also attempt translating relative dates to absolute one's, but this might be challenging as relative dates may appear deeply nested within the DSL.
+The former can be resolved by clearly displaying the creation time of the restored Search Session. We could also attempt translating relative dates to absolute one's, but this might be challenging as relative dates may appear deeply nested within the DSL.
The latter case happens at the moment for the timepicker only: The relative date is being translated each time into an absolute one, before being sent to Elasticsearch. In order to avoid issues, we'll have to make sure that restore URLs are generated with an absolute date, to make sure they are restored correctly.
#### Changing a restored session
-If you have restored a Background Session, making any type of change to it (time range, filters, etc.) will trigger new (potentially long) searches. There should be a clear indication in the UI that the data is no longer stored. A user then may choose to send it to background, resulting in a new Background Session being saved.
+If you have restored a Search Session, making any type of change to it (time range, filters, etc.) will trigger new (potentially long) searches. There should be a clear indication in the UI that the data is no longer stored. A user then may choose to send it to background, resulting in a new Search Session being saved.
#### Loading an errored \ expired \ canceled session
-When trying to restore a Background Session, if any of the requests hashes don't match the ones saved, or if any of the saved async search IDs are expired, a meaningful error code will be returned by the server **by those requests**. It is each application's responsibility to handle these errors appropriately.
+When trying to restore a Search Session, if any of the requests hashes don't match the ones saved, or if any of the saved async search IDs are expired, a meaningful error code will be returned by the server **by those requests**. It is each application's responsibility to handle these errors appropriately.
In such a scenario, the session will be partially restored.
#### Extending Expiration
-Sessions are given an expiration date defined in an advanced setting (5 days by default). This expiration date is measured from the time the Background Session is saved, and it includes the time it takes to generate the results.
+Sessions are given an expiration date defined in an advanced setting (5 days by default). This expiration date is measured from the time the Search Session is saved, and it includes the time it takes to generate the results.
A session's expiration date may be extended indefinitely. However, if a session was canceled or has already expired, it needs to be re-run.
@@ -444,7 +444,7 @@ so we feel comfortable moving forward with this approach.
Two potential drawbacks stem from storing things in server memory. If a Kibana server is restarted, in-memory results
will be lost. (This can be an issue if a search request has started, and the user has sent to background, but the
-background session saved object has not yet been updated with the search request ID.) In such cases, the user interface
+search session saved object has not yet been updated with the search request ID.) In such cases, the user interface
will need to indicate errors for requests that were not stored in the saved object.
There is also the consideration of the memory footprint of the Kibana server; however, since
@@ -452,7 +452,7 @@ we are only storing a hash of the request and search request ID, and are periodi
Services and Routes), we do not anticipate the footprint to increase significantly.
The results of search requests that have been sent to the background will be stored in Elasticsearch for several days,
-even if they will only be retrieved once. This will be mitigated by allowing the user manually delete a background
+even if they will only be retrieved once. This will be mitigated by allowing the user manually delete a search
session object after it has been accessed.
# Alternatives
@@ -463,7 +463,7 @@ What other designs have been considered? What is the impact of not doing this?
(See "Basic example" above.)
-Any application or solution that uses the `data` plugin `search` services will be able to facilitate background sessions
+Any application or solution that uses the `data` plugin `search` services will be able to facilitate search sessions
fairly simply. The public side will need to create/clear sessions when appropriate, and ensure the `sessionId` is sent
with all search requests. It will also need to ensure that any necessary application data, as well as a `restoreUrl` is
sent when creating the saved object.
diff --git a/scripts/ensure_all_tests_in_ci_group.js b/scripts/ensure_all_tests_in_ci_group.js
new file mode 100644
index 0000000000000..d189aac8f62e8
--- /dev/null
+++ b/scripts/ensure_all_tests_in_ci_group.js
@@ -0,0 +1,21 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+require('../src/setup_node_env');
+require('../src/dev/run_ensure_all_tests_in_ci_group');
diff --git a/src/core/public/application/capabilities/capabilities_service.test.ts b/src/core/public/application/capabilities/capabilities_service.test.ts
index 286a93fdc2398..aa9c10ecfb2b2 100644
--- a/src/core/public/application/capabilities/capabilities_service.test.ts
+++ b/src/core/public/application/capabilities/capabilities_service.test.ts
@@ -41,11 +41,36 @@ describe('#start', () => {
http.post.mockReturnValue(Promise.resolve(mockedCapabilities));
});
+ it('requests default capabilities on anonymous paths', async () => {
+ http.anonymousPaths.isAnonymous.mockReturnValue(true);
+ const service = new CapabilitiesService();
+ const appIds = ['app1', 'app2', 'legacyApp1', 'legacyApp2'];
+ const { capabilities } = await service.start({
+ http,
+ appIds,
+ });
+
+ expect(http.post).toHaveBeenCalledWith('/api/core/capabilities', {
+ query: {
+ useDefaultCapabilities: true,
+ },
+ body: JSON.stringify({ applications: appIds }),
+ });
+
+ // @ts-expect-error TypeScript knows this shouldn't be possible
+ expect(() => (capabilities.foo = 'foo')).toThrowError();
+ });
+
it('only returns capabilities for given appIds', async () => {
const service = new CapabilitiesService();
+ const appIds = ['app1', 'app2', 'legacyApp1', 'legacyApp2'];
const { capabilities } = await service.start({
http,
- appIds: ['app1', 'app2', 'legacyApp1', 'legacyApp2'],
+ appIds,
+ });
+
+ expect(http.post).toHaveBeenCalledWith('/api/core/capabilities', {
+ body: JSON.stringify({ applications: appIds }),
});
// @ts-expect-error TypeScript knows this shouldn't be possible
diff --git a/src/core/public/application/capabilities/capabilities_service.tsx b/src/core/public/application/capabilities/capabilities_service.tsx
index 1164164aec4c5..156b75b2d8abe 100644
--- a/src/core/public/application/capabilities/capabilities_service.tsx
+++ b/src/core/public/application/capabilities/capabilities_service.tsx
@@ -38,7 +38,9 @@ export interface CapabilitiesStart {
*/
export class CapabilitiesService {
public async start({ appIds, http }: StartDeps): Promise {
+ const useDefaultCapabilities = http.anonymousPaths.isAnonymous(window.location.pathname);
const capabilities = await http.post('/api/core/capabilities', {
+ query: useDefaultCapabilities ? { useDefaultCapabilities } : undefined,
body: JSON.stringify({ applications: appIds }),
});
diff --git a/src/core/public/chrome/ui/header/_index.scss b/src/core/public/chrome/ui/header/_index.scss
index 44cd864278325..b11e7e47f4ae7 100644
--- a/src/core/public/chrome/ui/header/_index.scss
+++ b/src/core/public/chrome/ui/header/_index.scss
@@ -1,5 +1,19 @@
@include euiHeaderAffordForFixed;
+.euiDataGrid__restrictBody {
+ .headerGlobalNav,
+ .kbnQueryBar {
+ display: none;
+ }
+}
+
+.euiDataGrid__restrictBody.euiBody--headerIsFixed {
+ .euiFlyout {
+ top: 0;
+ height: 100%;
+ }
+}
+
.chrHeaderHelpMenu__version {
text-transform: none;
}
diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts
index b8843b5c85595..12266ec8de2e4 100644
--- a/src/core/public/doc_links/doc_links_service.ts
+++ b/src/core/public/doc_links/doc_links_service.ts
@@ -43,6 +43,9 @@ export class DocLinksService {
urlDrilldownTemplateSyntax: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/url_templating-language.html`,
urlDrilldownVariables: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/url_templating-language.html#url-template-variables`,
},
+ discover: {
+ guide: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/discover.html`,
+ },
filebeat: {
base: `${ELASTIC_WEBSITE_URL}guide/en/beats/filebeat/${DOC_LINK_VERSION}`,
installation: `${ELASTIC_WEBSITE_URL}guide/en/beats/filebeat/${DOC_LINK_VERSION}/filebeat-installation-configuration.html`,
@@ -72,6 +75,7 @@ export class DocLinksService {
aggs: {
date_histogram: `${ELASTICSEARCH_DOCS}search-aggregations-bucket-datehistogram-aggregation.html`,
date_range: `${ELASTICSEARCH_DOCS}search-aggregations-bucket-daterange-aggregation.html`,
+ date_format_pattern: `${ELASTICSEARCH_DOCS}search-aggregations-bucket-daterange-aggregation.html#date-format-pattern`,
filter: `${ELASTICSEARCH_DOCS}search-aggregations-bucket-filter-aggregation.html`,
filters: `${ELASTICSEARCH_DOCS}search-aggregations-bucket-filters-aggregation.html`,
geohash_grid: `${ELASTICSEARCH_DOCS}search-aggregations-bucket-geohashgrid-aggregation.html`,
@@ -101,12 +105,14 @@ export class DocLinksService {
sum: `${ELASTICSEARCH_DOCS}search-aggregations-metrics-sum-aggregation.html`,
top_hits: `${ELASTICSEARCH_DOCS}search-aggregations-metrics-top-hits-aggregation.html`,
},
+ runtimeFields: `${ELASTICSEARCH_DOCS}runtime.html`,
scriptedFields: {
scriptFields: `${ELASTICSEARCH_DOCS}search-request-script-fields.html`,
scriptAggs: `${ELASTICSEARCH_DOCS}search-aggregations.html#_values_source`,
painless: `${ELASTICSEARCH_DOCS}modules-scripting-painless.html`,
painlessApi: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/painless/${DOC_LINK_VERSION}/painless-api-reference.html`,
painlessSyntax: `${ELASTICSEARCH_DOCS}modules-scripting-painless-syntax.html`,
+ painlessLanguage: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/painless/${DOC_LINK_VERSION}/painless-lang-spec.html`,
luceneExpressions: `${ELASTICSEARCH_DOCS}modules-scripting-expression.html`,
},
indexPatterns: {
@@ -115,6 +121,13 @@ export class DocLinksService {
},
addData: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/connect-to-elasticsearch.html`,
kibana: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/index.html`,
+ elasticsearch: {
+ remoteClusters: `${ELASTICSEARCH_DOCS}modules-remote-clusters.html`,
+ remoteClustersProxy: `${ELASTICSEARCH_DOCS}modules-remote-clusters.html#proxy-mode`,
+ remoteClusersProxySettings: `${ELASTICSEARCH_DOCS}modules-remote-clusters.html#remote-cluster-proxy-settings`,
+ scriptParameters: `${ELASTICSEARCH_DOCS}modules-scripting-using.html#prefer-params`,
+ transportSettings: `${ELASTICSEARCH_DOCS}modules-transport.html`,
+ },
siem: {
guide: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/index.html`,
gettingStarted: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/index.html`,
@@ -147,16 +160,76 @@ export class DocLinksService {
featureImportance: `${ELASTIC_WEBSITE_URL}guide/en/machine-learning/${DOC_LINK_VERSION}/ml-feature-importance.html`,
outlierDetectionRoc: `${ELASTIC_WEBSITE_URL}guide/en/machine-learning/${DOC_LINK_VERSION}/ml-dfanalytics-evaluate.html#ml-dfanalytics-roc`,
regressionEvaluation: `${ELASTIC_WEBSITE_URL}guide/en/machine-learning/${DOC_LINK_VERSION}/ml-dfanalytics-evaluate.html#ml-dfanalytics-regression-evaluation`,
+ classificationAucRoc: `${ELASTIC_WEBSITE_URL}guide/en/machine-learning/${DOC_LINK_VERSION}/ml-dfanalytics-evaluate.html#ml-dfanalytics-class-aucroc`,
},
transforms: {
- guide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/reference/${DOC_LINK_VERSION}/transforms.html`,
+ guide: `${ELASTICSEARCH_DOCS}transforms.html`,
},
visualize: {
- guide: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/visualize.html`,
+ guide: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/dashboard.html`,
timelionDeprecation: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/dashboard.html#timelion-deprecation`,
lens: `${ELASTIC_WEBSITE_URL}what-is/kibana-lens`,
+ lensPanels: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/dashboard.html#create-panels-with-lens`,
maps: `${ELASTIC_WEBSITE_URL}maps`,
},
+ observability: {
+ guide: `${ELASTIC_WEBSITE_URL}guide/en/observability/${DOC_LINK_VERSION}/index.html`,
+ },
+ alerting: {
+ guide: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/managing-alerts-and-actions.html`,
+ actionTypes: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/action-types.html`,
+ emailAction: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/email-action-type.html`,
+ generalSettings: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/alert-action-settings-kb.html#general-alert-action-settings`,
+ indexAction: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/index-action-type.html`,
+ indexThreshold: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/alert-types.html#alert-type-index-threshold`,
+ pagerDutyAction: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/pagerduty-action-type.html`,
+ preconfiguredConnectors: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/pre-configured-action-types-and-connectors.html`,
+ serviceNowAction: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/servicenow-action-type.html`,
+ setupPrerequisites: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/alerting-getting-started.html#alerting-setup-prerequisites`,
+ slackAction: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/slack-action-type.html`,
+ teamsAction: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/teams-action-type.html`,
+ },
+ maps: {
+ guide: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/kibana-maps.html`,
+ },
+ monitoring: {
+ alertsKibana: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/kibana-alerts.html`,
+ monitorElasticsearch: `${ELASTICSEARCH_DOCS}configuring-metricbeat.html`,
+ monitorKibana: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/monitoring-metricbeat.html`,
+ },
+ security: {
+ apiKeyServiceSettings: `${ELASTICSEARCH_DOCS}security-settings.html#api-key-service-settings`,
+ clusterPrivileges: `${ELASTICSEARCH_DOCS}security-privileges.html#privileges-list-cluster`,
+ elasticsearchSettings: `${ELASTICSEARCH_DOCS}security-settings.html`,
+ indicesPrivileges: `${ELASTICSEARCH_DOCS}security-privileges.html#privileges-list-indices`,
+ kibanaTLS: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/configuring-tls.html`,
+ kibanaPrivileges: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/kibana-privileges.html`,
+ mappingRoles: `${ELASTICSEARCH_DOCS}mapping-roles.html`,
+ },
+ watcher: {
+ jiraAction: `${ELASTICSEARCH_DOCS}actions-jira.html`,
+ pagerDutyAction: `${ELASTICSEARCH_DOCS}actions-pagerduty.html`,
+ slackAction: `${ELASTICSEARCH_DOCS}actions-slack.html`,
+ ui: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/watcher-ui.html`,
+ },
+ ccs: {
+ guide: `${ELASTICSEARCH_DOCS}modules-cross-cluster-search.html`,
+ },
+ apis: {
+ createIndex: `${ELASTICSEARCH_DOCS}indices-create-index.html`,
+ createSnapshotLifecylePolicy: `${ELASTICSEARCH_DOCS}slm-api-put-policy.html`,
+ createRoleMapping: `${ELASTICSEARCH_DOCS}security-api-put-role-mapping.html`,
+ createApiKey: `${ELASTICSEARCH_DOCS}security-api-create-api-key.html`,
+ createPipeline: `${ELASTICSEARCH_DOCS}put-pipeline-api.html`,
+ createTransformRequest: `${ELASTICSEARCH_DOCS}put-transform.html#put-transform-request-body`,
+ executeWatchActionModes: `${ELASTICSEARCH_DOCS}watcher-api-execute-watch.html#watcher-api-execute-watch-action-mode`,
+ openIndex: `${ELASTICSEARCH_DOCS}indices-open-close.html`,
+ putComponentTemplate: `${ELASTICSEARCH_DOCS}indices-component-template.html`,
+ painlessExecute: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/painless/${DOC_LINK_VERSION}/painless-execute-api.html`,
+ putComponentTemplateMetadata: `${ELASTICSEARCH_DOCS}indices-component-template.html#component-templates-metadata`,
+ putWatch: `${ELASTICSEARCH_DOCS}/watcher-api-put-watch.html`,
+ updateTransform: `${ELASTICSEARCH_DOCS}update-transform.html`,
+ },
},
});
}
@@ -174,6 +247,7 @@ export interface DocLinksStart {
readonly urlDrilldownTemplateSyntax: string;
readonly urlDrilldownVariables: string;
};
+ readonly discover: Record;
readonly filebeat: {
readonly base: string;
readonly installation: string;
@@ -203,6 +277,7 @@ export interface DocLinksStart {
readonly aggs: {
readonly date_histogram: string;
readonly date_range: string;
+ readonly date_format_pattern: string;
readonly filter: string;
readonly filters: string;
readonly geohash_grid: string;
@@ -232,6 +307,7 @@ export interface DocLinksStart {
readonly sum: string;
readonly top_hits: string;
};
+ readonly runtimeFields: string;
readonly scriptedFields: {
readonly scriptFields: string;
readonly scriptAggs: string;
@@ -246,6 +322,7 @@ export interface DocLinksStart {
};
readonly addData: string;
readonly kibana: string;
+ readonly elasticsearch: Record;
readonly siem: {
readonly guide: string;
readonly gettingStarted: string;
@@ -263,5 +340,13 @@ export interface DocLinksStart {
readonly ml: Record;
readonly transforms: Record;
readonly visualize: Record;
+ readonly apis: Record;
+ readonly observability: Record;
+ readonly alerting: Record;
+ readonly maps: Record;
+ readonly monitoring: Record;
+ readonly security: Record;
+ readonly watcher: Record;
+ readonly ccs: Record;
};
}
diff --git a/src/core/public/index.ts b/src/core/public/index.ts
index 51375072d3e5a..ea83674ed9d9c 100644
--- a/src/core/public/index.ts
+++ b/src/core/public/index.ts
@@ -148,7 +148,7 @@ export {
SavedObjectsImportUnsupportedTypeError,
SavedObjectsImportMissingReferencesError,
SavedObjectsImportUnknownError,
- SavedObjectsImportError,
+ SavedObjectsImportFailure,
SavedObjectsImportRetry,
SavedObjectsNamespaceType,
} from './saved_objects';
diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md
index 0303eb62b6419..c5b49519ef7b2 100644
--- a/src/core/public/public.api.md
+++ b/src/core/public/public.api.md
@@ -494,6 +494,7 @@ export interface DocLinksStart {
readonly urlDrilldownTemplateSyntax: string;
readonly urlDrilldownVariables: string;
};
+ readonly discover: Record;
readonly filebeat: {
readonly base: string;
readonly installation: string;
@@ -523,6 +524,7 @@ export interface DocLinksStart {
readonly aggs: {
readonly date_histogram: string;
readonly date_range: string;
+ readonly date_format_pattern: string;
readonly filter: string;
readonly filters: string;
readonly geohash_grid: string;
@@ -552,6 +554,7 @@ export interface DocLinksStart {
readonly sum: string;
readonly top_hits: string;
};
+ readonly runtimeFields: string;
readonly scriptedFields: {
readonly scriptFields: string;
readonly scriptAggs: string;
@@ -566,6 +569,7 @@ export interface DocLinksStart {
};
readonly addData: string;
readonly kibana: string;
+ readonly elasticsearch: Record;
readonly siem: {
readonly guide: string;
readonly gettingStarted: string;
@@ -583,6 +587,14 @@ export interface DocLinksStart {
readonly ml: Record;
readonly transforms: Record;
readonly visualize: Record;
+ readonly apis: Record;
+ readonly observability: Record;
+ readonly alerting: Record;
+ readonly maps: Record;
+ readonly monitoring: Record;
+ readonly security: Record;
+ readonly watcher: Record;
+ readonly ccs: Record;
};
}
@@ -1234,7 +1246,7 @@ export interface SavedObjectsImportConflictError {
}
// @public
-export interface SavedObjectsImportError {
+export interface SavedObjectsImportFailure {
// (undocumented)
error: SavedObjectsImportConflictError | SavedObjectsImportAmbiguousConflictError | SavedObjectsImportUnsupportedTypeError | SavedObjectsImportMissingReferencesError | SavedObjectsImportUnknownError;
// (undocumented)
@@ -1265,7 +1277,7 @@ export interface SavedObjectsImportMissingReferencesError {
// @public
export interface SavedObjectsImportResponse {
// (undocumented)
- errors?: SavedObjectsImportError[];
+ errors?: SavedObjectsImportFailure[];
// (undocumented)
success: boolean;
// (undocumented)
diff --git a/src/core/public/saved_objects/index.ts b/src/core/public/saved_objects/index.ts
index cc8fce0884ddf..54427638e9154 100644
--- a/src/core/public/saved_objects/index.ts
+++ b/src/core/public/saved_objects/index.ts
@@ -43,7 +43,7 @@ export {
SavedObjectsImportUnsupportedTypeError,
SavedObjectsImportMissingReferencesError,
SavedObjectsImportUnknownError,
- SavedObjectsImportError,
+ SavedObjectsImportFailure,
SavedObjectsImportRetry,
SavedObjectsNamespaceType,
} from '../../server/types';
diff --git a/src/core/server/capabilities/capabilities_service.ts b/src/core/server/capabilities/capabilities_service.ts
index f0be9743d4d60..9af945d17b2ad 100644
--- a/src/core/server/capabilities/capabilities_service.ts
+++ b/src/core/server/capabilities/capabilities_service.ts
@@ -76,7 +76,19 @@ export interface CapabilitiesSetup {
* ```ts
* // my-plugin/server/plugin.ts
* public setup(core: CoreSetup, deps: {}) {
- * core.capabilities.registerSwitcher((request, capabilities) => {
+ * core.capabilities.registerSwitcher((request, capabilities, useDefaultCapabilities) => {
+ * // useDefaultCapabilities is a special case that switchers typically don't have to concern themselves with.
+ * // The default capabilities are typically the ones you provide in your CapabilitiesProvider, but this flag
+ * // gives each switcher an opportunity to change the default capabilities of other plugins' capabilities.
+ * // For example, you may decide to flip another plugin's capability to false if today is Tuesday,
+ * // but you wouldn't want to do this when we are requesting the default set of capabilities.
+ * if (useDefaultCapabilities) {
+ * return {
+ * somePlugin: {
+ * featureEnabledByDefault: true
+ * }
+ * }
+ * }
* if(myPluginApi.shouldRestrictSomePluginBecauseOf(request)) {
* return {
* somePlugin: {
@@ -150,7 +162,7 @@ export class CapabilitiesService {
public start(): CapabilitiesStart {
return {
- resolveCapabilities: (request) => this.resolveCapabilities(request, []),
+ resolveCapabilities: (request) => this.resolveCapabilities(request, [], false),
};
}
}
diff --git a/src/core/server/capabilities/integration_tests/capabilities_service.test.ts b/src/core/server/capabilities/integration_tests/capabilities_service.test.ts
index 17f2c77bbf660..4217dd98ae735 100644
--- a/src/core/server/capabilities/integration_tests/capabilities_service.test.ts
+++ b/src/core/server/capabilities/integration_tests/capabilities_service.test.ts
@@ -72,17 +72,57 @@ describe('CapabilitiesService', () => {
`);
});
- it('uses the service capabilities providers', async () => {
- serviceSetup.registerProvider(() => ({
+ it('uses the service capabilities providers and switchers', async () => {
+ const getInitialCapabilities = () => ({
catalogue: {
something: true,
},
- }));
+ management: {},
+ navLinks: {},
+ });
+ serviceSetup.registerProvider(() => getInitialCapabilities());
+
+ const switcher = jest.fn((_, capabilities) => capabilities);
+ serviceSetup.registerSwitcher(switcher);
const result = await supertest(httpSetup.server.listener)
.post('/api/core/capabilities')
.send({ applications: [] })
.expect(200);
+
+ expect(switcher).toHaveBeenCalledTimes(1);
+ expect(switcher).toHaveBeenCalledWith(expect.anything(), getInitialCapabilities(), false);
+ expect(result.body).toMatchInlineSnapshot(`
+ Object {
+ "catalogue": Object {
+ "something": true,
+ },
+ "management": Object {},
+ "navLinks": Object {},
+ }
+ `);
+ });
+
+ it('passes useDefaultCapabilities to registered switchers', async () => {
+ const getInitialCapabilities = () => ({
+ catalogue: {
+ something: true,
+ },
+ management: {},
+ navLinks: {},
+ });
+ serviceSetup.registerProvider(() => getInitialCapabilities());
+
+ const switcher = jest.fn((_, capabilities) => capabilities);
+ serviceSetup.registerSwitcher(switcher);
+
+ const result = await supertest(httpSetup.server.listener)
+ .post('/api/core/capabilities?useDefaultCapabilities=true')
+ .send({ applications: [] })
+ .expect(200);
+
+ expect(switcher).toHaveBeenCalledTimes(1);
+ expect(switcher).toHaveBeenCalledWith(expect.anything(), getInitialCapabilities(), true);
expect(result.body).toMatchInlineSnapshot(`
Object {
"catalogue": Object {
diff --git a/src/core/server/capabilities/resolve_capabilities.test.ts b/src/core/server/capabilities/resolve_capabilities.test.ts
index 372efeff21ae2..21c723ea1ddc3 100644
--- a/src/core/server/capabilities/resolve_capabilities.test.ts
+++ b/src/core/server/capabilities/resolve_capabilities.test.ts
@@ -36,7 +36,7 @@ describe('resolveCapabilities', () => {
});
it('returns the initial capabilities if no switcher are used', async () => {
- const result = await resolveCapabilities(defaultCaps, [], request, []);
+ const result = await resolveCapabilities(defaultCaps, [], request, [], true);
expect(result).toEqual(defaultCaps);
});
@@ -55,7 +55,7 @@ describe('resolveCapabilities', () => {
A: false,
},
});
- const result = await resolveCapabilities(caps, [switcher], request, []);
+ const result = await resolveCapabilities(caps, [switcher], request, [], true);
expect(result).toMatchInlineSnapshot(`
Object {
"catalogue": Object {
@@ -83,7 +83,7 @@ describe('resolveCapabilities', () => {
A: false,
},
});
- await resolveCapabilities(caps, [switcher], request, []);
+ await resolveCapabilities(caps, [switcher], request, [], true);
expect(caps.catalogue).toEqual({
A: true,
B: true,
@@ -105,7 +105,7 @@ describe('resolveCapabilities', () => {
C: false,
},
});
- const result = await resolveCapabilities(caps, [switcher], request, []);
+ const result = await resolveCapabilities(caps, [switcher], request, [], true);
expect(result.catalogue).toEqual({
A: true,
B: true,
@@ -127,7 +127,7 @@ describe('resolveCapabilities', () => {
.filter(([key]) => key !== 'B')
.reduce((acc, [key, value]) => ({ ...acc, [key]: value }), {}),
});
- const result = await resolveCapabilities(caps, [switcher], request, []);
+ const result = await resolveCapabilities(caps, [switcher], request, [], true);
expect(result.catalogue).toEqual({
A: true,
B: true,
@@ -153,7 +153,7 @@ describe('resolveCapabilities', () => {
record: false,
},
});
- const result = await resolveCapabilities(caps, [switcher], request, []);
+ const result = await resolveCapabilities(caps, [switcher], request, [], true);
expect(result.section).toEqual({
boolean: true,
record: {
diff --git a/src/core/server/capabilities/resolve_capabilities.ts b/src/core/server/capabilities/resolve_capabilities.ts
index 1be504d4bc314..6f4eff6b882d0 100644
--- a/src/core/server/capabilities/resolve_capabilities.ts
+++ b/src/core/server/capabilities/resolve_capabilities.ts
@@ -23,7 +23,8 @@ import { KibanaRequest } from '../http';
export type CapabilitiesResolver = (
request: KibanaRequest,
- applications: string[]
+ applications: string[],
+ useDefaultCapabilities: boolean
) => Promise;
export const getCapabilitiesResolver = (
@@ -31,16 +32,24 @@ export const getCapabilitiesResolver = (
switchers: () => CapabilitiesSwitcher[]
): CapabilitiesResolver => async (
request: KibanaRequest,
- applications: string[]
+ applications: string[],
+ useDefaultCapabilities: boolean
): Promise => {
- return resolveCapabilities(capabilities(), switchers(), request, applications);
+ return resolveCapabilities(
+ capabilities(),
+ switchers(),
+ request,
+ applications,
+ useDefaultCapabilities
+ );
};
export const resolveCapabilities = async (
capabilities: Capabilities,
switchers: CapabilitiesSwitcher[],
request: KibanaRequest,
- applications: string[]
+ applications: string[],
+ useDefaultCapabilities: boolean
): Promise => {
const mergedCaps = cloneDeep({
...capabilities,
@@ -54,7 +63,7 @@ export const resolveCapabilities = async (
});
return switchers.reduce(async (caps, switcher) => {
const resolvedCaps = await caps;
- const changes = await switcher(request, resolvedCaps);
+ const changes = await switcher(request, resolvedCaps, useDefaultCapabilities);
return recursiveApplyChanges(resolvedCaps, changes);
}, Promise.resolve(mergedCaps));
};
diff --git a/src/core/server/capabilities/routes/resolve_capabilities.ts b/src/core/server/capabilities/routes/resolve_capabilities.ts
index 3fb1bb3d13d0b..3694c4b894684 100644
--- a/src/core/server/capabilities/routes/resolve_capabilities.ts
+++ b/src/core/server/capabilities/routes/resolve_capabilities.ts
@@ -29,14 +29,18 @@ export function registerCapabilitiesRoutes(router: IRouter, resolver: Capabiliti
authRequired: 'optional',
},
validate: {
+ query: schema.object({
+ useDefaultCapabilities: schema.boolean({ defaultValue: false }),
+ }),
body: schema.object({
applications: schema.arrayOf(schema.string()),
}),
},
},
async (ctx, req, res) => {
+ const { useDefaultCapabilities } = req.query;
const { applications } = req.body;
- const capabilities = await resolver(req, applications);
+ const capabilities = await resolver(req, applications, useDefaultCapabilities);
return res.ok({
body: capabilities,
});
diff --git a/src/core/server/capabilities/types.ts b/src/core/server/capabilities/types.ts
index 105233761a437..efef31dcc8417 100644
--- a/src/core/server/capabilities/types.ts
+++ b/src/core/server/capabilities/types.ts
@@ -34,5 +34,6 @@ export type CapabilitiesProvider = () => Partial;
*/
export type CapabilitiesSwitcher = (
request: KibanaRequest,
- uiCapabilities: Capabilities
+ uiCapabilities: Capabilities,
+ useDefaultCapabilities: boolean
) => Partial | Promise>;
diff --git a/src/core/server/core_app/assets/favicons/android-chrome-192x192.png b/src/core/server/core_app/assets/favicons/android-chrome-192x192.png
deleted file mode 100644
index 18a86e5b95c46..0000000000000
Binary files a/src/core/server/core_app/assets/favicons/android-chrome-192x192.png and /dev/null differ
diff --git a/src/core/server/core_app/assets/favicons/android-chrome-256x256.png b/src/core/server/core_app/assets/favicons/android-chrome-256x256.png
deleted file mode 100644
index 8238d772ce40b..0000000000000
Binary files a/src/core/server/core_app/assets/favicons/android-chrome-256x256.png and /dev/null differ
diff --git a/src/core/server/core_app/assets/favicons/apple-touch-icon.png b/src/core/server/core_app/assets/favicons/apple-touch-icon.png
deleted file mode 100644
index 1ffeb0852a170..0000000000000
Binary files a/src/core/server/core_app/assets/favicons/apple-touch-icon.png and /dev/null differ
diff --git a/src/core/server/core_app/assets/favicons/browserconfig.xml b/src/core/server/core_app/assets/favicons/browserconfig.xml
deleted file mode 100644
index b3930d0f04718..0000000000000
--- a/src/core/server/core_app/assets/favicons/browserconfig.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-
-
- #da532c
-
-
-
diff --git a/src/core/server/core_app/assets/favicons/favicon-16x16.png b/src/core/server/core_app/assets/favicons/favicon-16x16.png
deleted file mode 100644
index 631f5b7c7d74b..0000000000000
Binary files a/src/core/server/core_app/assets/favicons/favicon-16x16.png and /dev/null differ
diff --git a/src/core/server/core_app/assets/favicons/favicon-32x32.png b/src/core/server/core_app/assets/favicons/favicon-32x32.png
deleted file mode 100644
index bf94dfa995f37..0000000000000
Binary files a/src/core/server/core_app/assets/favicons/favicon-32x32.png and /dev/null differ
diff --git a/src/core/server/core_app/assets/favicons/favicon.distribution.png b/src/core/server/core_app/assets/favicons/favicon.distribution.png
new file mode 100644
index 0000000000000..9be046aba59b6
Binary files /dev/null and b/src/core/server/core_app/assets/favicons/favicon.distribution.png differ
diff --git a/src/core/server/core_app/assets/favicons/favicon.distribution.svg b/src/core/server/core_app/assets/favicons/favicon.distribution.svg
new file mode 100644
index 0000000000000..2d02461a0b8f9
--- /dev/null
+++ b/src/core/server/core_app/assets/favicons/favicon.distribution.svg
@@ -0,0 +1,4 @@
+
\ No newline at end of file
diff --git a/src/core/server/core_app/assets/favicons/favicon.ico b/src/core/server/core_app/assets/favicons/favicon.ico
deleted file mode 100644
index db30798a6cf32..0000000000000
Binary files a/src/core/server/core_app/assets/favicons/favicon.ico and /dev/null differ
diff --git a/src/core/server/core_app/assets/favicons/favicon.png b/src/core/server/core_app/assets/favicons/favicon.png
new file mode 100644
index 0000000000000..cba7a268c6c59
Binary files /dev/null and b/src/core/server/core_app/assets/favicons/favicon.png differ
diff --git a/src/core/server/core_app/assets/favicons/favicon.svg b/src/core/server/core_app/assets/favicons/favicon.svg
new file mode 100644
index 0000000000000..4ae6524bf0d18
--- /dev/null
+++ b/src/core/server/core_app/assets/favicons/favicon.svg
@@ -0,0 +1,4 @@
+
\ No newline at end of file
diff --git a/src/core/server/core_app/assets/favicons/manifest.json b/src/core/server/core_app/assets/favicons/manifest.json
deleted file mode 100644
index de65106f489b7..0000000000000
--- a/src/core/server/core_app/assets/favicons/manifest.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
- "name": "",
- "short_name": "",
- "icons": [
- {
- "src": "/android-chrome-192x192.png",
- "sizes": "192x192",
- "type": "image/png"
- },
- {
- "src": "/android-chrome-256x256.png",
- "sizes": "256x256",
- "type": "image/png"
- }
- ],
- "theme_color": "#ffffff",
- "background_color": "#ffffff",
- "display": "standalone"
-}
diff --git a/src/core/server/core_app/assets/favicons/mstile-150x150.png b/src/core/server/core_app/assets/favicons/mstile-150x150.png
deleted file mode 100644
index 82769c1ef242b..0000000000000
Binary files a/src/core/server/core_app/assets/favicons/mstile-150x150.png and /dev/null differ
diff --git a/src/core/server/core_app/assets/favicons/safari-pinned-tab.svg b/src/core/server/core_app/assets/favicons/safari-pinned-tab.svg
deleted file mode 100644
index 38a64142be0b7..0000000000000
--- a/src/core/server/core_app/assets/favicons/safari-pinned-tab.svg
+++ /dev/null
@@ -1,34 +0,0 @@
-
-
-
diff --git a/src/core/server/core_app/integration_tests/static_assets.test.ts b/src/core/server/core_app/integration_tests/static_assets.test.ts
index ca03c4228221f..45e7b79b5d5e6 100644
--- a/src/core/server/core_app/integration_tests/static_assets.test.ts
+++ b/src/core/server/core_app/integration_tests/static_assets.test.ts
@@ -34,11 +34,11 @@ describe('Platform assets', function () {
});
it('exposes static assets', async () => {
- await kbnTestServer.request.get(root, '/ui/favicons/favicon.ico').expect(200);
+ await kbnTestServer.request.get(root, '/ui/favicons/favicon.svg').expect(200);
});
it('returns 404 if not found', async function () {
- await kbnTestServer.request.get(root, '/ui/favicons/not-a-favicon.ico').expect(404);
+ await kbnTestServer.request.get(root, '/ui/favicons/not-a-favicon.svg').expect(404);
});
it('does not expose folder content', async function () {
diff --git a/src/core/server/core_route_handler_context.ts b/src/core/server/core_route_handler_context.ts
index 520c5bd3f685b..ffb1c762b00ef 100644
--- a/src/core/server/core_route_handler_context.ts
+++ b/src/core/server/core_route_handler_context.ts
@@ -21,7 +21,12 @@
import { InternalCoreStart } from './internal_types';
import { KibanaRequest } from './http/router';
import { SavedObjectsClientContract } from './saved_objects/types';
-import { InternalSavedObjectsServiceStart, ISavedObjectTypeRegistry } from './saved_objects';
+import {
+ InternalSavedObjectsServiceStart,
+ ISavedObjectTypeRegistry,
+ ISavedObjectsExporter,
+ ISavedObjectsImporter,
+} from './saved_objects';
import {
InternalElasticsearchServiceStart,
IScopedClusterClient,
@@ -64,6 +69,8 @@ class CoreSavedObjectsRouteHandlerContext {
) {}
#scopedSavedObjectsClient?: SavedObjectsClientContract;
#typeRegistry?: ISavedObjectTypeRegistry;
+ #exporter?: ISavedObjectsExporter;
+ #importer?: ISavedObjectsImporter;
public get client() {
if (this.#scopedSavedObjectsClient == null) {
@@ -78,6 +85,20 @@ class CoreSavedObjectsRouteHandlerContext {
}
return this.#typeRegistry;
}
+
+ public get exporter() {
+ if (this.#exporter == null) {
+ this.#exporter = this.savedObjectsStart.createExporter(this.client);
+ }
+ return this.#exporter;
+ }
+
+ public get importer() {
+ if (this.#importer == null) {
+ this.#importer = this.savedObjectsStart.createImporter(this.client);
+ }
+ return this.#importer;
+ }
}
class CoreUiSettingsRouteHandlerContext {
diff --git a/src/core/server/core_usage_data/core_usage_stats_client.ts b/src/core/server/core_usage_data/core_usage_stats_client.ts
index c8d48597fae88..7c3047ecd96e4 100644
--- a/src/core/server/core_usage_data/core_usage_stats_client.ts
+++ b/src/core/server/core_usage_data/core_usage_stats_client.ts
@@ -24,7 +24,6 @@ import {
ISavedObjectsRepository,
SavedObjectsImportOptions,
SavedObjectsResolveImportErrorsOptions,
- SavedObjectsExportOptions,
KibanaRequest,
IBasePath,
} from '..';
@@ -40,8 +39,10 @@ export type IncrementSavedObjectsImportOptions = BaseIncrementOptions &
export type IncrementSavedObjectsResolveImportErrorsOptions = BaseIncrementOptions &
Pick;
/** @internal */
-export type IncrementSavedObjectsExportOptions = BaseIncrementOptions &
- Pick & { supportedTypes: string[] };
+export type IncrementSavedObjectsExportOptions = BaseIncrementOptions & {
+ types?: string[];
+ supportedTypes: string[];
+};
export const BULK_CREATE_STATS_PREFIX = 'apiCalls.savedObjectsBulkCreate';
export const BULK_GET_STATS_PREFIX = 'apiCalls.savedObjectsBulkGet';
diff --git a/src/core/server/http/__snapshots__/http_config.test.ts.snap b/src/core/server/http/__snapshots__/http_config.test.ts.snap
index 9b667f888771e..4545396c27b5e 100644
--- a/src/core/server/http/__snapshots__/http_config.test.ts.snap
+++ b/src/core/server/http/__snapshots__/http_config.test.ts.snap
@@ -24,6 +24,12 @@ Object {
}
`;
+exports[`accepts valid hostnames 5`] = `
+Object {
+ "host": "0.0.0.0",
+}
+`;
+
exports[`basePath throws if appends a slash 1`] = `"[basePath]: must start with a slash, don't end with one"`;
exports[`basePath throws if is an empty string 1`] = `"[basePath]: must start with a slash, don't end with one"`;
@@ -105,6 +111,8 @@ Object {
exports[`throws if invalid hostname 1`] = `"[host]: value must be a valid hostname (see RFC 1123)."`;
+exports[`throws if invalid hostname 2`] = `"[host]: value 0 is not a valid hostname (use \\"0.0.0.0\\" to bind to all interfaces)"`;
+
exports[`with TLS throws if TLS is enabled but \`redirectHttpFromPort\` is equal to \`port\` 1`] = `"Kibana does not accept http traffic to [port] when ssl is enabled (only https is allowed), so [ssl.redirectHttpFromPort] cannot be configured to the same value. Both are [1234]."`;
exports[`with compression accepts valid referrer whitelist 1`] = `
@@ -113,6 +121,7 @@ Array [
"8.8.8.8",
"::1",
"localhost",
+ "0.0.0.0",
]
`;
diff --git a/src/core/server/http/base_path_proxy_server.test.ts b/src/core/server/http/base_path_proxy_server.test.ts
new file mode 100644
index 0000000000000..9f4ffdcf8e081
--- /dev/null
+++ b/src/core/server/http/base_path_proxy_server.test.ts
@@ -0,0 +1,1052 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { BasePathProxyServer, BasePathProxyServerOptions } from './base_path_proxy_server';
+import { loggingSystemMock } from '../logging/logging_system.mock';
+import { DevConfig } from '../dev/dev_config';
+import { EMPTY } from 'rxjs';
+import { HttpConfig } from './http_config';
+import { ByteSizeValue, schema } from '@kbn/config-schema';
+import {
+ KibanaRequest,
+ KibanaResponseFactory,
+ Router,
+ RouteValidationFunction,
+ RouteValidationResultFactory,
+} from './router';
+import { HttpServer } from './http_server';
+import supertest from 'supertest';
+import { RequestHandlerContext } from 'kibana/server';
+import { readFileSync } from 'fs';
+import { KBN_CERT_PATH, KBN_KEY_PATH } from '@kbn/dev-utils';
+import { omit } from 'lodash';
+import { Readable } from 'stream';
+
+/**
+ * Most of these tests are inspired by:
+ * src/core/server/http/http_server.test.ts
+ * and copied for completeness from that file. The modifications are that these tests use the developer proxy.
+ */
+describe('BasePathProxyServer', () => {
+ let server: HttpServer;
+ let proxyServer: BasePathProxyServer;
+ let config: HttpConfig;
+ let configWithSSL: HttpConfig;
+ let basePath: string;
+ let certificate: string;
+ let key: string;
+ let proxySupertest: supertest.SuperTest;
+ const logger = loggingSystemMock.createLogger();
+ const enhanceWithContext = (fn: (...args: any[]) => any) => fn.bind(null, {});
+
+ beforeAll(() => {
+ certificate = readFileSync(KBN_CERT_PATH, 'utf8');
+ key = readFileSync(KBN_KEY_PATH, 'utf8');
+ });
+
+ beforeEach(async () => {
+ // setup the server but don't start it until each individual test so that routes can be dynamically configured per unit test.
+ server = new HttpServer(logger, 'tests');
+ config = ({
+ name: 'kibana',
+ host: '127.0.0.1',
+ port: 10012,
+ compression: { enabled: true },
+ requestId: {
+ allowFromAnyIp: true,
+ ipAllowlist: [],
+ },
+ autoListen: true,
+ keepaliveTimeout: 1000,
+ socketTimeout: 1000,
+ cors: {
+ enabled: false,
+ allowCredentials: false,
+ allowOrigin: [],
+ },
+ ssl: { enabled: false },
+ customResponseHeaders: {},
+ maxPayload: new ByteSizeValue(1024),
+ rewriteBasePath: true,
+ } as unknown) as HttpConfig;
+
+ configWithSSL = {
+ ...config,
+ ssl: {
+ enabled: true,
+ certificate,
+ cipherSuites: ['TLS_AES_256_GCM_SHA384'],
+ getSecureOptions: () => 0,
+ key,
+ redirectHttpFromPort: config.port + 1,
+ },
+ } as HttpConfig;
+
+ // setup and start the proxy server
+ const proxyConfig: HttpConfig = { ...config, port: 10013 };
+ const devConfig = new DevConfig({ basePathProxyTarget: config.port });
+ proxyServer = new BasePathProxyServer(logger, proxyConfig, devConfig);
+ const options: Readonly = {
+ shouldRedirectFromOldBasePath: () => true,
+ delayUntil: () => EMPTY,
+ };
+ await proxyServer.start(options);
+
+ // set the base path or throw if for some unknown reason it is not setup
+ if (proxyServer.basePath == null) {
+ throw new Error('Invalid null base path, all tests will fail');
+ } else {
+ basePath = proxyServer.basePath;
+ }
+ proxySupertest = supertest(`http://127.0.0.1:${proxyConfig.port}`);
+ });
+
+ afterEach(async () => {
+ await server.stop();
+ await proxyServer.stop();
+ jest.clearAllMocks();
+ });
+
+ test('root URL will return a 302 redirect', async () => {
+ await proxySupertest.get('/').expect(302);
+ });
+
+ test('root URL will return a redirect location with exactly 3 characters that are a-z', async () => {
+ const res = await proxySupertest.get('/');
+ const location = res.header.location;
+ expect(location).toMatch(/[a-z]{3}/);
+ });
+
+ test('valid params', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+ router.get(
+ {
+ path: '/{test}',
+ validate: {
+ params: schema.object({
+ test: schema.string(),
+ }),
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: req.params.test });
+ }
+ );
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+ await server.start();
+
+ await proxySupertest
+ .get(`${basePath}/foo/some-string`)
+ .expect(200)
+ .then((res) => {
+ expect(res.text).toBe('some-string');
+ });
+ });
+
+ test('invalid params', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ router.get(
+ {
+ path: '/{test}',
+ validate: {
+ params: schema.object({
+ test: schema.number(),
+ }),
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: String(req.params.test) });
+ }
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .get(`${basePath}/foo/some-string`)
+ .expect(400)
+ .then((res) => {
+ expect(res.body).toEqual({
+ error: 'Bad Request',
+ statusCode: 400,
+ message: '[request params.test]: expected value of type [number] but got [string]',
+ });
+ });
+ });
+
+ test('valid query', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ router.get(
+ {
+ path: '/',
+ validate: {
+ query: schema.object({
+ bar: schema.string(),
+ quux: schema.number(),
+ }),
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: req.query });
+ }
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .get(`${basePath}/foo/?bar=test&quux=123`)
+ .expect(200)
+ .then((res) => {
+ expect(res.body).toEqual({ bar: 'test', quux: 123 });
+ });
+ });
+
+ test('invalid query', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ router.get(
+ {
+ path: '/',
+ validate: {
+ query: schema.object({
+ bar: schema.number(),
+ }),
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: req.query });
+ }
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .get(`${basePath}/foo/?bar=test`)
+ .expect(400)
+ .then((res) => {
+ expect(res.body).toEqual({
+ error: 'Bad Request',
+ statusCode: 400,
+ message: '[request query.bar]: expected value of type [number] but got [string]',
+ });
+ });
+ });
+
+ test('valid body', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ router.post(
+ {
+ path: '/',
+ validate: {
+ body: schema.object({
+ bar: schema.string(),
+ baz: schema.number(),
+ }),
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: req.body });
+ }
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .post(`${basePath}/foo/`)
+ .send({
+ bar: 'test',
+ baz: 123,
+ })
+ .expect(200)
+ .then((res) => {
+ expect(res.body).toEqual({ bar: 'test', baz: 123 });
+ });
+ });
+
+ test('valid body with validate function', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ router.post(
+ {
+ path: '/',
+ validate: {
+ body: ({ bar, baz } = {}, { ok, badRequest }) => {
+ if (typeof bar === 'string' && typeof baz === 'number') {
+ return ok({ bar, baz });
+ } else {
+ return badRequest('Wrong payload', ['body']);
+ }
+ },
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: req.body });
+ }
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .post(`${basePath}/foo/`)
+ .send({
+ bar: 'test',
+ baz: 123,
+ })
+ .expect(200)
+ .then((res) => {
+ expect(res.body).toEqual({ bar: 'test', baz: 123 });
+ });
+ });
+
+ test('not inline validation - specifying params', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ const bodyValidation = (
+ { bar, baz }: any = {},
+ { ok, badRequest }: RouteValidationResultFactory
+ ) => {
+ if (typeof bar === 'string' && typeof baz === 'number') {
+ return ok({ bar, baz });
+ } else {
+ return badRequest('Wrong payload', ['body']);
+ }
+ };
+
+ router.post(
+ {
+ path: '/',
+ validate: {
+ body: bodyValidation,
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: req.body });
+ }
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .post(`${basePath}/foo/`)
+ .send({
+ bar: 'test',
+ baz: 123,
+ })
+ .expect(200)
+ .then((res) => {
+ expect(res.body).toEqual({ bar: 'test', baz: 123 });
+ });
+ });
+
+ test('not inline validation - specifying validation handler', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ const bodyValidation: RouteValidationFunction<{ bar: string; baz: number }> = (
+ { bar, baz } = {},
+ { ok, badRequest }
+ ) => {
+ if (typeof bar === 'string' && typeof baz === 'number') {
+ return ok({ bar, baz });
+ } else {
+ return badRequest('Wrong payload', ['body']);
+ }
+ };
+
+ router.post(
+ {
+ path: '/',
+ validate: {
+ body: bodyValidation,
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: req.body });
+ }
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .post(`${basePath}/foo/`)
+ .send({
+ bar: 'test',
+ baz: 123,
+ })
+ .expect(200)
+ .then((res) => {
+ expect(res.body).toEqual({ bar: 'test', baz: 123 });
+ });
+ });
+
+ test('not inline handler - KibanaRequest', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ const handler = (
+ context: RequestHandlerContext,
+ req: KibanaRequest,
+ res: KibanaResponseFactory
+ ) => {
+ const body = {
+ bar: req.body.bar.toUpperCase(),
+ baz: req.body.baz.toString(),
+ };
+
+ return res.ok({ body });
+ };
+
+ router.post(
+ {
+ path: '/',
+ validate: {
+ body: ({ bar, baz } = {}, { ok, badRequest }) => {
+ if (typeof bar === 'string' && typeof baz === 'number') {
+ return ok({ bar, baz });
+ } else {
+ return badRequest('Wrong payload', ['body']);
+ }
+ },
+ },
+ },
+ handler
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .post(`${basePath}/foo/`)
+ .send({
+ bar: 'test',
+ baz: 123,
+ })
+ .expect(200)
+ .then((res) => {
+ expect(res.body).toEqual({ bar: 'TEST', baz: '123' });
+ });
+ });
+
+ test('invalid body', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ router.post(
+ {
+ path: '/',
+ validate: {
+ body: schema.object({
+ bar: schema.number(),
+ }),
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: req.body });
+ }
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .post(`${basePath}/foo/`)
+ .send({ bar: 'test' })
+ .expect(400)
+ .then((res) => {
+ expect(res.body).toEqual({
+ error: 'Bad Request',
+ statusCode: 400,
+ message: '[request body.bar]: expected value of type [number] but got [string]',
+ });
+ });
+ });
+
+ test('handles putting', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ router.put(
+ {
+ path: '/',
+ validate: {
+ body: schema.object({
+ key: schema.string(),
+ }),
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: req.body });
+ }
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .put(`${basePath}/foo/`)
+ .send({ key: 'new value' })
+ .expect(200)
+ .then((res) => {
+ expect(res.body).toEqual({ key: 'new value' });
+ });
+ });
+
+ test('handles deleting', async () => {
+ const router = new Router(`${basePath}/foo`, logger, enhanceWithContext);
+
+ router.delete(
+ {
+ path: '/{id}',
+ validate: {
+ params: schema.object({
+ id: schema.number(),
+ }),
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: { key: req.params.id } });
+ }
+ );
+
+ const { registerRouter } = await server.setup(config);
+ registerRouter(router);
+
+ await server.start();
+
+ await proxySupertest
+ .delete(`${basePath}/foo/3`)
+ .expect(200)
+ .then((res) => {
+ expect(res.body).toEqual({ key: 3 });
+ });
+ });
+
+ describe('with `basepath: /bar` and `rewriteBasePath: false`', () => {
+ let configWithBasePath: HttpConfig;
+
+ beforeEach(async () => {
+ configWithBasePath = {
+ ...config,
+ basePath: '/bar',
+ rewriteBasePath: false,
+ } as HttpConfig;
+
+ const router = new Router(`${basePath}/`, logger, enhanceWithContext);
+ router.get({ path: '/', validate: false }, (_, __, res) => res.ok({ body: 'value:/' }));
+ router.get({ path: '/foo', validate: false }, (_, __, res) => res.ok({ body: 'value:/foo' }));
+
+ const { registerRouter } = await server.setup(configWithBasePath);
+ registerRouter(router);
+
+ await server.start();
+ });
+
+ test('/bar => 404', async () => {
+ await proxySupertest.get(`${basePath}/bar`).expect(404);
+ });
+
+ test('/bar/ => 404', async () => {
+ await proxySupertest.get(`${basePath}/bar/`).expect(404);
+ });
+
+ test('/bar/foo => 404', async () => {
+ await proxySupertest.get(`${basePath}/bar/foo`).expect(404);
+ });
+
+ test('/ => /', async () => {
+ await proxySupertest
+ .get(`${basePath}/`)
+ .expect(200)
+ .then((res) => {
+ expect(res.text).toBe('value:/');
+ });
+ });
+
+ test('/foo => /foo', async () => {
+ await proxySupertest
+ .get(`${basePath}/foo`)
+ .expect(200)
+ .then((res) => {
+ expect(res.text).toBe('value:/foo');
+ });
+ });
+ });
+
+ test('with defined `redirectHttpFromPort`', async () => {
+ const router = new Router(`${basePath}/`, logger, enhanceWithContext);
+ router.get({ path: '/', validate: false }, (_, __, res) => res.ok({ body: 'value:/' }));
+
+ const { registerRouter } = await server.setup(configWithSSL);
+ registerRouter(router);
+
+ await server.start();
+ });
+
+ test('allows attaching metadata to attach meta-data tag strings to a route', async () => {
+ const tags = ['my:tag'];
+ const { registerRouter } = await server.setup(config);
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.get({ path: '/with-tags', validate: false, options: { tags } }, (_, req, res) =>
+ res.ok({ body: { tags: req.route.options.tags } })
+ );
+ router.get({ path: '/without-tags', validate: false }, (_, req, res) =>
+ res.ok({ body: { tags: req.route.options.tags } })
+ );
+ registerRouter(router);
+
+ await server.start();
+ await proxySupertest.get(`${basePath}/with-tags`).expect(200, { tags });
+
+ await proxySupertest.get(`${basePath}/without-tags`).expect(200, { tags: [] });
+ });
+
+ describe('response headers', () => {
+ test('default headers', async () => {
+ const { registerRouter } = await server.setup(config);
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.get({ path: '/', validate: false }, (_, req, res) => res.ok({ body: req.route }));
+ registerRouter(router);
+
+ await server.start();
+ const response = await proxySupertest.get(`${basePath}/`).expect(200);
+
+ const restHeaders = omit(response.header, ['date', 'content-length']);
+ expect(restHeaders).toMatchInlineSnapshot(`
+ Object {
+ "accept-ranges": "bytes",
+ "cache-control": "private, no-cache, no-store, must-revalidate",
+ "connection": "close",
+ "content-type": "application/json; charset=utf-8",
+ }
+ `);
+ });
+ });
+
+ test('exposes route details of incoming request to a route handler (POST + payload options)', async () => {
+ const { registerRouter } = await server.setup(config);
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.post(
+ {
+ path: '/',
+ validate: { body: schema.object({ test: schema.number() }) },
+ options: { body: { accepts: 'application/json' } },
+ },
+ (_, req, res) => res.ok({ body: req.route })
+ );
+ registerRouter(router);
+
+ await server.start();
+ await proxySupertest
+ .post(`${basePath}/`)
+ .send({ test: 1 })
+ .expect(200, {
+ method: 'post',
+ path: `${basePath}/`,
+ options: {
+ authRequired: true,
+ xsrfRequired: true,
+ tags: [],
+ timeout: {
+ payload: 10000,
+ idleSocket: 1000,
+ },
+ body: {
+ parse: true, // hapi populates the default
+ maxBytes: 1024, // hapi populates the default
+ accepts: ['application/json'],
+ output: 'data',
+ },
+ },
+ });
+ });
+
+ test('should return a stream in the body', async () => {
+ const { registerRouter } = await server.setup(config);
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.put(
+ {
+ path: '/',
+ validate: { body: schema.stream() },
+ options: { body: { output: 'stream' } },
+ },
+ (_, req, res) => {
+ try {
+ expect(req.body).toBeInstanceOf(Readable);
+ return res.ok({ body: req.route.options.body });
+ } catch (err) {
+ return res.internalError({ body: err.message });
+ }
+ }
+ );
+ registerRouter(router);
+
+ await server.start();
+ await proxySupertest.put(`${basePath}/`).send({ test: 1 }).expect(200, {
+ parse: true,
+ maxBytes: 1024, // hapi populates the default
+ output: 'stream',
+ });
+ });
+
+ describe('timeout options', () => {
+ describe('payload timeout', () => {
+ test('POST routes set the payload timeout', async () => {
+ const { registerRouter } = await server.setup(config);
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.post(
+ {
+ path: '/',
+ validate: false,
+ options: {
+ timeout: {
+ payload: 300000,
+ },
+ },
+ },
+ (_, req, res) => {
+ try {
+ return res.ok({
+ body: {
+ timeout: req.route.options.timeout,
+ },
+ });
+ } catch (err) {
+ return res.internalError({ body: err.message });
+ }
+ }
+ );
+ registerRouter(router);
+ await server.start();
+ await proxySupertest
+ .post(`${basePath}/`)
+ .send({ test: 1 })
+ .expect(200, {
+ timeout: {
+ payload: 300000,
+ idleSocket: 1000, // This is an extra option added by the proxy
+ },
+ });
+ });
+
+ test('DELETE routes set the payload timeout', async () => {
+ const { registerRouter } = await server.setup(config);
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.delete(
+ {
+ path: '/',
+ validate: false,
+ options: {
+ timeout: {
+ payload: 300000,
+ },
+ },
+ },
+ (context, req, res) => {
+ try {
+ return res.ok({
+ body: {
+ timeout: req.route.options.timeout,
+ },
+ });
+ } catch (err) {
+ return res.internalError({ body: err.message });
+ }
+ }
+ );
+ registerRouter(router);
+ await server.start();
+ await proxySupertest.delete(`${basePath}/`).expect(200, {
+ timeout: {
+ payload: 300000,
+ idleSocket: 1000, // This is an extra option added by the proxy
+ },
+ });
+ });
+
+ test('PUT routes set the payload timeout and automatically adjusts the idle socket timeout', async () => {
+ const { registerRouter } = await server.setup(config);
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.put(
+ {
+ path: '/',
+ validate: false,
+ options: {
+ timeout: {
+ payload: 300000,
+ },
+ },
+ },
+ (_, req, res) => {
+ try {
+ return res.ok({
+ body: {
+ timeout: req.route.options.timeout,
+ },
+ });
+ } catch (err) {
+ return res.internalError({ body: err.message });
+ }
+ }
+ );
+ registerRouter(router);
+ await server.start();
+ await proxySupertest.put(`${basePath}/`).expect(200, {
+ timeout: {
+ payload: 300000,
+ idleSocket: 1000, // This is an extra option added by the proxy
+ },
+ });
+ });
+
+ test('PATCH routes set the payload timeout and automatically adjusts the idle socket timeout', async () => {
+ const { registerRouter } = await server.setup(config);
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.patch(
+ {
+ path: '/',
+ validate: false,
+ options: {
+ timeout: {
+ payload: 300000,
+ },
+ },
+ },
+ (_, req, res) => {
+ try {
+ return res.ok({
+ body: {
+ timeout: req.route.options.timeout,
+ },
+ });
+ } catch (err) {
+ return res.internalError({ body: err.message });
+ }
+ }
+ );
+ registerRouter(router);
+ await server.start();
+ await proxySupertest.patch(`${basePath}/`).expect(200, {
+ timeout: {
+ payload: 300000,
+ idleSocket: 1000, // This is an extra option added by the proxy
+ },
+ });
+ });
+ });
+
+ describe('idleSocket timeout', () => {
+ test('uses server socket timeout when not specified in the route', async () => {
+ const { registerRouter } = await server.setup({
+ ...config,
+ socketTimeout: 11000,
+ });
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.get(
+ {
+ path: '/',
+ validate: { body: schema.maybe(schema.any()) },
+ },
+ (_, req, res) => {
+ return res.ok({
+ body: {
+ timeout: req.route.options.timeout,
+ },
+ });
+ }
+ );
+ registerRouter(router);
+
+ await server.start();
+ await proxySupertest
+ .get(`${basePath}/`)
+ .send()
+ .expect(200, {
+ timeout: {
+ idleSocket: 11000,
+ },
+ });
+ });
+
+ test('sets the socket timeout when specified in the route', async () => {
+ const { registerRouter } = await server.setup({
+ ...config,
+ socketTimeout: 11000,
+ });
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.get(
+ {
+ path: '/',
+ validate: { body: schema.maybe(schema.any()) },
+ options: { timeout: { idleSocket: 12000 } },
+ },
+ (context, req, res) => {
+ return res.ok({
+ body: {
+ timeout: req.route.options.timeout,
+ },
+ });
+ }
+ );
+ registerRouter(router);
+
+ await server.start();
+ await proxySupertest
+ .get(`${basePath}/`)
+ .send()
+ .expect(200, {
+ timeout: {
+ idleSocket: 12000,
+ },
+ });
+ });
+
+ test('idleSocket timeout can be smaller than the payload timeout', async () => {
+ const { registerRouter } = await server.setup(config);
+
+ const router = new Router(basePath, logger, enhanceWithContext);
+ router.post(
+ {
+ path: `${basePath}/`,
+ validate: { body: schema.any() },
+ options: {
+ timeout: {
+ payload: 1000,
+ idleSocket: 10,
+ },
+ },
+ },
+ (_, req, res) => {
+ return res.ok({ body: { timeout: req.route.options.timeout } });
+ }
+ );
+
+ registerRouter(router);
+
+ await server.start();
+ });
+ });
+ });
+
+ describe('shouldRedirect', () => {
+ let proxyServerWithoutShouldRedirect: BasePathProxyServer;
+ let proxyWithoutShouldRedirectSupertest: supertest.SuperTest;
+
+ beforeEach(async () => {
+ // setup and start a proxy server which does not use "shouldRedirectFromOldBasePath"
+ const proxyConfig: HttpConfig = { ...config, port: 10004 };
+ const devConfig = new DevConfig({ basePathProxyTarget: config.port });
+ proxyServerWithoutShouldRedirect = new BasePathProxyServer(logger, proxyConfig, devConfig);
+ const options: Readonly = {
+ shouldRedirectFromOldBasePath: () => false, // Return false to not redirect
+ delayUntil: () => EMPTY,
+ };
+ await proxyServerWithoutShouldRedirect.start(options);
+ proxyWithoutShouldRedirectSupertest = supertest(`http://127.0.0.1:${proxyConfig.port}`);
+ });
+
+ afterEach(async () => {
+ await proxyServerWithoutShouldRedirect.stop();
+ });
+
+ test('it will do a redirect if it detects what looks like a stale or previously used base path', async () => {
+ const fakeBasePath = basePath !== 'abc' ? 'abc' : 'efg';
+ const res = await proxySupertest.get(`/${fakeBasePath}`).expect(302);
+ const location = res.header.location;
+ expect(location).toEqual(`${basePath}/`);
+ });
+
+ test('it will NOT do a redirect if it detects what looks like a stale or previously used base path if we intentionally turn it off', async () => {
+ const fakeBasePath = basePath !== 'abc' ? 'abc' : 'efg';
+ await proxyWithoutShouldRedirectSupertest.get(`/${fakeBasePath}`).expect(404);
+ });
+
+ test('it will NOT redirect if it detects a larger path than 3 characters', async () => {
+ await proxySupertest.get('/abcde').expect(404);
+ });
+
+ test('it will NOT redirect if it is not a GET verb', async () => {
+ const fakeBasePath = basePath !== 'abc' ? 'abc' : 'efg';
+ await proxySupertest.put(`/${fakeBasePath}`).expect(404);
+ });
+ });
+
+ describe('constructor option for sending in a custom basePath', () => {
+ let proxyServerWithFooBasePath: BasePathProxyServer;
+ let proxyWithFooBasePath: supertest.SuperTest;
+
+ beforeEach(async () => {
+ // setup and start a proxy server which uses a basePath of "foo"
+ const proxyConfig: HttpConfig = { ...config, port: 10004, basePath: '/foo' }; // <-- "foo" here in basePath
+ const devConfig = new DevConfig({ basePathProxyTarget: config.port });
+ proxyServerWithFooBasePath = new BasePathProxyServer(logger, proxyConfig, devConfig);
+ const options: Readonly = {
+ shouldRedirectFromOldBasePath: () => true,
+ delayUntil: () => EMPTY,
+ };
+ await proxyServerWithFooBasePath.start(options);
+ proxyWithFooBasePath = supertest(`http://127.0.0.1:${proxyConfig.port}`);
+ });
+
+ afterEach(async () => {
+ await proxyServerWithFooBasePath.stop();
+ });
+
+ test('it will do a redirect to foo which is our passed in value for the configuration', async () => {
+ const res = await proxyWithFooBasePath.get('/bar').expect(302);
+ const location = res.header.location;
+ expect(location).toEqual('/foo/');
+ });
+ });
+});
diff --git a/src/core/server/http/base_path_proxy_server.ts b/src/core/server/http/base_path_proxy_server.ts
index d461abe54ccbd..dfcd0757c2d1e 100644
--- a/src/core/server/http/base_path_proxy_server.ts
+++ b/src/core/server/http/base_path_proxy_server.ts
@@ -143,12 +143,25 @@ export class BasePathProxyServer {
handler: {
proxy: {
agent: this.httpsAgent,
- host: this.server.info.host,
passThrough: true,
- port: this.devConfig.basePathProxyTargetPort,
- // typings mismatch. h2o2 doesn't support "socket"
- protocol: this.server.info.protocol as HapiProxy.ProxyHandlerOptions['protocol'],
xforward: true,
+ mapUri: async (request) => {
+ return {
+ // Passing in this header to merge it is a workaround until this is fixed:
+ // https://github.com/hapijs/h2o2/issues/124
+ headers:
+ request.headers['content-length'] != null
+ ? { 'content-length': request.headers['content-length'] }
+ : undefined,
+ uri: Url.format({
+ hostname: request.server.info.host,
+ port: this.devConfig.basePathProxyTargetPort,
+ protocol: request.server.info.protocol,
+ pathname: request.path,
+ query: request.query,
+ }),
+ };
+ },
},
},
method: '*',
diff --git a/src/core/server/http/cookie_session_storage.ts b/src/core/server/http/cookie_session_storage.ts
index 1ff0670d78f4e..40bca89c21cb3 100644
--- a/src/core/server/http/cookie_session_storage.ts
+++ b/src/core/server/http/cookie_session_storage.ts
@@ -19,8 +19,6 @@
import { Request, Server } from '@hapi/hapi';
import hapiAuthCookie from '@hapi/cookie';
-// @ts-expect-error no TS definitions
-import Statehood from '@hapi/statehood';
import { KibanaRequest, ensureRawRequest } from './router';
import { SessionStorageFactory, SessionStorage } from './session_storage';
@@ -148,7 +146,7 @@ export async function createCookieSessionStorageFactory(
path: basePath === undefined ? '/' : basePath,
clearInvalid: false,
isHttpOnly: true,
- isSameSite: cookieOptions.sameSite === 'None' ? false : cookieOptions.sameSite ?? false,
+ isSameSite: cookieOptions.sameSite ?? false,
},
validateFunc: async (req: Request, session: T | T[]) => {
const result = cookieOptions.validate(session);
@@ -159,23 +157,6 @@ export async function createCookieSessionStorageFactory(
},
});
- // A hack to support SameSite: 'None'.
- // Remove it after update Hapi to v19 that supports SameSite: 'None' out of the box.
- if (cookieOptions.sameSite === 'None') {
- log.debug('Patching Statehood.prepareValue');
- const originalPrepareValue = Statehood.prepareValue;
- Statehood.prepareValue = function kibanaStatehoodPrepareValueWrapper(
- name: string,
- value: unknown,
- options: any
- ) {
- if (name === cookieOptions.name) {
- options.isSameSite = cookieOptions.sameSite;
- }
- return originalPrepareValue(name, value, options);
- };
- }
-
return {
asScoped(request: KibanaRequest) {
return new ScopedCookieSessionStorage(log, server, ensureRawRequest(request));
diff --git a/src/core/server/http/http_config.test.ts b/src/core/server/http/http_config.test.ts
index b71763e8a2e14..b1b2ba5b295a7 100644
--- a/src/core/server/http/http_config.test.ts
+++ b/src/core/server/http/http_config.test.ts
@@ -22,8 +22,8 @@ import { config, HttpConfig } from './http_config';
import { CspConfig } from '../csp';
import { ExternalUrlConfig } from '../external_url';
-const validHostnames = ['www.example.com', '8.8.8.8', '::1', 'localhost'];
-const invalidHostname = 'asdf$%^';
+const validHostnames = ['www.example.com', '8.8.8.8', '::1', 'localhost', '0.0.0.0'];
+const invalidHostnames = ['asdf$%^', '0'];
jest.mock('os', () => {
const original = jest.requireActual('os');
@@ -48,11 +48,10 @@ test('accepts valid hostnames', () => {
});
test('throws if invalid hostname', () => {
- const httpSchema = config.schema;
- const obj = {
- host: invalidHostname,
- };
- expect(() => httpSchema.validate(obj)).toThrowErrorMatchingSnapshot();
+ for (const host of invalidHostnames) {
+ const httpSchema = config.schema;
+ expect(() => httpSchema.validate({ host })).toThrowErrorMatchingSnapshot();
+ }
});
describe('requestId', () => {
@@ -304,9 +303,9 @@ describe('with compression', () => {
test('throws if invalid referrer whitelist', () => {
const httpSchema = config.schema;
- const invalidHostnames = {
+ const nonEmptyArray = {
compression: {
- referrerWhitelist: [invalidHostname],
+ referrerWhitelist: invalidHostnames,
},
};
const emptyArray = {
@@ -314,7 +313,7 @@ describe('with compression', () => {
referrerWhitelist: [],
},
};
- expect(() => httpSchema.validate(invalidHostnames)).toThrowErrorMatchingSnapshot();
+ expect(() => httpSchema.validate(nonEmptyArray)).toThrowErrorMatchingSnapshot();
expect(() => httpSchema.validate(emptyArray)).toThrowErrorMatchingSnapshot();
});
diff --git a/src/core/server/http/http_config.ts b/src/core/server/http/http_config.ts
index 2bd296fe338ab..aa4db6f88d338 100644
--- a/src/core/server/http/http_config.ts
+++ b/src/core/server/http/http_config.ts
@@ -73,6 +73,11 @@ export const config = {
host: schema.string({
defaultValue: 'localhost',
hostname: true,
+ validate(value) {
+ if (value === '0') {
+ return 'value 0 is not a valid hostname (use "0.0.0.0" to bind to all interfaces)';
+ }
+ },
}),
maxPayload: schema.byteSize({
defaultValue: '1048576b',
diff --git a/src/core/server/http/http_server.test.ts b/src/core/server/http/http_server.test.ts
index cbb60480c4cf1..70c346a5333cc 100644
--- a/src/core/server/http/http_server.test.ts
+++ b/src/core/server/http/http_server.test.ts
@@ -888,52 +888,48 @@ describe('conditional compression', () => {
expect(response.header).not.toHaveProperty('content-encoding');
});
});
+});
- describe('response headers', () => {
- it('allows to configure "keep-alive" header', async () => {
- const { registerRouter, server: innerServer } = await server.setup({
- ...config,
- keepaliveTimeout: 100_000,
- });
+describe('response headers', () => {
+ test('allows to configure "keep-alive" header', async () => {
+ const { registerRouter, server: innerServer } = await server.setup({
+ ...config,
+ keepaliveTimeout: 100_000,
+ });
- const router = new Router('', logger, enhanceWithContext);
- router.get({ path: '/', validate: false }, (context, req, res) =>
- res.ok({ body: req.route })
- );
- registerRouter(router);
+ const router = new Router('', logger, enhanceWithContext);
+ router.get({ path: '/', validate: false }, (context, req, res) => res.ok({ body: req.route }));
+ registerRouter(router);
- await server.start();
- const response = await supertest(innerServer.listener)
- .get('/')
- .set('Connection', 'keep-alive')
- .expect(200);
+ await server.start();
+ const response = await supertest(innerServer.listener)
+ .get('/')
+ .set('Connection', 'keep-alive')
+ .expect(200);
- expect(response.header.connection).toBe('keep-alive');
- expect(response.header['keep-alive']).toBe('timeout=100');
- });
+ expect(response.header.connection).toBe('keep-alive');
+ expect(response.header['keep-alive']).toBe('timeout=100');
+ });
- it('default headers', async () => {
- const { registerRouter, server: innerServer } = await server.setup(config);
+ test('default headers', async () => {
+ const { registerRouter, server: innerServer } = await server.setup(config);
- const router = new Router('', logger, enhanceWithContext);
- router.get({ path: '/', validate: false }, (context, req, res) =>
- res.ok({ body: req.route })
- );
- registerRouter(router);
+ const router = new Router('', logger, enhanceWithContext);
+ router.get({ path: '/', validate: false }, (context, req, res) => res.ok({ body: req.route }));
+ registerRouter(router);
- await server.start();
- const response = await supertest(innerServer.listener).get('/').expect(200);
-
- const restHeaders = omit(response.header, ['date', 'content-length']);
- expect(restHeaders).toMatchInlineSnapshot(`
- Object {
- "accept-ranges": "bytes",
- "cache-control": "private, no-cache, no-store, must-revalidate",
- "connection": "close",
- "content-type": "application/json; charset=utf-8",
- }
- `);
- });
+ await server.start();
+ const response = await supertest(innerServer.listener).get('/').expect(200);
+
+ const restHeaders = omit(response.header, ['date', 'content-length']);
+ expect(restHeaders).toMatchInlineSnapshot(`
+ Object {
+ "accept-ranges": "bytes",
+ "cache-control": "private, no-cache, no-store, must-revalidate",
+ "connection": "close",
+ "content-type": "application/json; charset=utf-8",
+ }
+ `);
});
});
@@ -1270,31 +1266,31 @@ describe('timeout options', () => {
},
});
});
- });
- test(`idleSocket timeout can be smaller than the payload timeout`, async () => {
- const { registerRouter } = await server.setup(config);
+ test('idleSocket timeout can be smaller than the payload timeout', async () => {
+ const { registerRouter } = await server.setup(config);
- const router = new Router('', logger, enhanceWithContext);
- router.post(
- {
- path: '/',
- validate: { body: schema.any() },
- options: {
- timeout: {
- payload: 1000,
- idleSocket: 10,
+ const router = new Router('', logger, enhanceWithContext);
+ router.post(
+ {
+ path: '/',
+ validate: { body: schema.any() },
+ options: {
+ timeout: {
+ payload: 1000,
+ idleSocket: 10,
+ },
},
},
- },
- (context, req, res) => {
- return res.ok({ body: { timeout: req.route.options.timeout } });
- }
- );
+ (context, req, res) => {
+ return res.ok({ body: { timeout: req.route.options.timeout } });
+ }
+ );
- registerRouter(router);
+ registerRouter(router);
- await server.start();
+ await server.start();
+ });
});
});
@@ -1329,13 +1325,14 @@ test('should return a stream in the body', async () => {
describe('setup contract', () => {
describe('#createSessionStorage', () => {
- it('creates session storage factory', async () => {
+ test('creates session storage factory', async () => {
const { createCookieSessionStorageFactory } = await server.setup(config);
const sessionStorageFactory = await createCookieSessionStorageFactory(cookieOptions);
expect(sessionStorageFactory.asScoped).toBeDefined();
});
- it('creates session storage factory only once', async () => {
+
+ test('creates session storage factory only once', async () => {
const { createCookieSessionStorageFactory } = await server.setup(config);
const create = async () => await createCookieSessionStorageFactory(cookieOptions);
@@ -1343,7 +1340,7 @@ describe('setup contract', () => {
expect(create()).rejects.toThrowError('A cookieSessionStorageFactory was already created');
});
- it('does not throw if called after stop', async () => {
+ test('does not throw if called after stop', async () => {
const { createCookieSessionStorageFactory } = await server.setup(config);
await server.stop();
expect(() => {
@@ -1353,7 +1350,7 @@ describe('setup contract', () => {
});
describe('#getServerInfo', () => {
- it('returns correct information', async () => {
+ test('returns correct information', async () => {
let { getServerInfo } = await server.setup(config);
expect(getServerInfo()).toEqual({
@@ -1378,7 +1375,7 @@ describe('setup contract', () => {
});
});
- it('returns correct protocol when ssl is enabled', async () => {
+ test('returns correct protocol when ssl is enabled', async () => {
const { getServerInfo } = await server.setup(configWithSSL);
expect(getServerInfo().protocol).toEqual('https');
@@ -1386,7 +1383,7 @@ describe('setup contract', () => {
});
describe('#registerStaticDir', () => {
- it('does not throw if called after stop', async () => {
+ test('does not throw if called after stop', async () => {
const { registerStaticDir } = await server.setup(config);
await server.stop();
expect(() => {
diff --git a/src/core/server/http/http_server.ts b/src/core/server/http/http_server.ts
index 42e89b66d9c51..81f7c9c45ba50 100644
--- a/src/core/server/http/http_server.ts
+++ b/src/core/server/http/http_server.ts
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-import { Server, ServerRoute } from '@hapi/hapi';
+import { Server } from '@hapi/hapi';
import HapiStaticFiles from '@hapi/inert';
import url from 'url';
import uuid from 'uuid';
@@ -167,6 +167,8 @@ export class HttpServer {
for (const router of this.registeredRouters) {
for (const route of router.getRoutes()) {
this.log.debug(`registering route handler for [${route.path}]`);
+ // Hapi does not allow payload validation to be specified for 'head' or 'get' requests
+ const validate = isSafeMethod(route.method) ? undefined : { payload: true };
const { authRequired, tags, body = {}, timeout } = route.options;
const { accepts: allow, maxBytes, output, parse } = body;
@@ -174,7 +176,7 @@ export class HttpServer {
xsrfRequired: route.options.xsrfRequired ?? !isSafeMethod(route.method),
};
- const routeOpts: ServerRoute = {
+ this.server.route({
handler: route.handler,
method: route.method,
path: route.path,
@@ -182,6 +184,11 @@ export class HttpServer {
auth: this.getAuthOption(authRequired),
app: kibanaRouteOptions,
tags: tags ? Array.from(tags) : undefined,
+ // TODO: This 'validate' section can be removed once the legacy platform is completely removed.
+ // We are telling Hapi that NP routes can accept any payload, so that it can bypass the default
+ // validation applied in ./http_tools#getServerOptions
+ // (All NP routes are already required to specify their own validation in order to access the payload)
+ validate,
// @ts-expect-error Types are outdated and doesn't allow `payload.multipart` to be `true`
payload: [allow, maxBytes, output, parse, timeout?.payload].some((x) => x !== undefined)
? {
@@ -197,22 +204,7 @@ export class HttpServer {
socket: timeout?.idleSocket ?? this.config!.socketTimeout,
},
},
- };
-
- // Hapi does not allow payload validation to be specified for 'head' or 'get' requests
- if (!isSafeMethod(route.method)) {
- // TODO: This 'validate' section can be removed once the legacy platform is completely removed.
- // We are telling Hapi that NP routes can accept any payload, so that it can bypass the default
- // validation applied in ./http_tools#getServerOptions
- // (All NP routes are already required to specify their own validation in order to access the payload)
- // TODO: Move the setting of the validate option back up to being set at `routeOpts` creation-time once
- // https://github.com/hapijs/hoek/pull/365 is merged and released in @hapi/hoek v9.1.1. At that point I
- // imagine the ts-error below will go away as well.
- // @ts-expect-error "Property 'validate' does not exist on type 'RouteOptions'" <-- ehh?!? yes it does!
- routeOpts.options!.validate = { payload: true };
- }
-
- this.server.route(routeOpts);
+ });
}
}
diff --git a/src/core/server/http/http_tools.ts b/src/core/server/http/http_tools.ts
index 8bec26f31fa26..f09f3dc2730a1 100644
--- a/src/core/server/http/http_tools.ts
+++ b/src/core/server/http/http_tools.ts
@@ -29,8 +29,8 @@ import Hoek from '@hapi/hoek';
import type { ServerOptions as TLSOptions } from 'https';
import type { ValidationError } from 'joi';
import uuid from 'uuid';
+import { ensureNoUnsafeProperties } from '@kbn/std';
import { HttpConfig } from './http_config';
-import { validateObject } from './prototype_pollution';
const corsAllowedHeaders = ['Accept', 'Authorization', 'Content-Type', 'If-None-Match', 'kbn-xsrf'];
/**
@@ -69,7 +69,7 @@ export function getServerOptions(config: HttpConfig, { configureTLS = true } = {
// This is a default payload validation which applies to all LP routes which do not specify their own
// `validate.payload` handler, in order to reduce the likelyhood of prototype pollution vulnerabilities.
// (All NP routes are already required to specify their own validation in order to access the payload)
- payload: (value) => Promise.resolve(validateObject(value)),
+ payload: (value) => Promise.resolve(ensureNoUnsafeProperties(value)),
},
},
state: {
diff --git a/src/core/server/index.ts b/src/core/server/index.ts
index 0f2761b67437d..0dae17b4c211e 100644
--- a/src/core/server/index.ts
+++ b/src/core/server/index.ts
@@ -58,6 +58,8 @@ import {
ISavedObjectTypeRegistry,
SavedObjectsServiceSetup,
SavedObjectsServiceStart,
+ ISavedObjectsExporter,
+ ISavedObjectsImporter,
} from './saved_objects';
import { CapabilitiesSetup, CapabilitiesStart } from './capabilities';
import { MetricsServiceSetup, MetricsServiceStart } from './metrics';
@@ -265,13 +267,12 @@ export {
SavedObjectsClientFactoryProvider,
SavedObjectsCreateOptions,
SavedObjectsErrorHelpers,
- SavedObjectsExportOptions,
SavedObjectsExportResultDetails,
SavedObjectsFindResult,
SavedObjectsFindResponse,
SavedObjectsImportConflictError,
SavedObjectsImportAmbiguousConflictError,
- SavedObjectsImportError,
+ SavedObjectsImportFailure,
SavedObjectsImportMissingReferencesError,
SavedObjectsImportOptions,
SavedObjectsImportResponse,
@@ -317,9 +318,15 @@ export {
SavedObjectMigrationMap,
SavedObjectMigrationFn,
SavedObjectsUtils,
- exportSavedObjectsToStream,
- importSavedObjectsFromStream,
- resolveSavedObjectsImportErrors,
+ SavedObjectsExporter,
+ ISavedObjectsExporter,
+ SavedObjectExportBaseOptions,
+ SavedObjectsExportByObjectOptions,
+ SavedObjectsExportByTypeOptions,
+ SavedObjectsExportError,
+ SavedObjectsImporter,
+ ISavedObjectsImporter,
+ SavedObjectsImportError,
} from './saved_objects';
export {
@@ -399,6 +406,8 @@ export interface RequestHandlerContext {
savedObjects: {
client: SavedObjectsClientContract;
typeRegistry: ISavedObjectTypeRegistry;
+ exporter: ISavedObjectsExporter;
+ importer: ISavedObjectsImporter;
};
elasticsearch: {
client: IScopedClusterClient;
diff --git a/src/core/server/legacy/legacy_service.ts b/src/core/server/legacy/legacy_service.ts
index 669286ccb2318..609555e4e34c1 100644
--- a/src/core/server/legacy/legacy_service.ts
+++ b/src/core/server/legacy/legacy_service.ts
@@ -211,6 +211,8 @@ export class LegacyService implements CoreService {
createScopedRepository: startDeps.core.savedObjects.createScopedRepository,
createInternalRepository: startDeps.core.savedObjects.createInternalRepository,
createSerializer: startDeps.core.savedObjects.createSerializer,
+ createExporter: startDeps.core.savedObjects.createExporter,
+ createImporter: startDeps.core.savedObjects.createImporter,
getTypeRegistry: startDeps.core.savedObjects.getTypeRegistry,
},
metrics: {
@@ -265,7 +267,6 @@ export class LegacyService implements CoreService {
setClientFactoryProvider: setupDeps.core.savedObjects.setClientFactoryProvider,
addClientWrapper: setupDeps.core.savedObjects.addClientWrapper,
registerType: setupDeps.core.savedObjects.registerType,
- getImportExportObjectLimit: setupDeps.core.savedObjects.getImportExportObjectLimit,
},
status: {
isStatusPageAnonymous: setupDeps.core.status.isStatusPageAnonymous,
diff --git a/src/core/server/logging/layouts/json_layout.ts b/src/core/server/logging/layouts/json_layout.ts
index 7573d0b837416..34c3c325e7328 100644
--- a/src/core/server/logging/layouts/json_layout.ts
+++ b/src/core/server/logging/layouts/json_layout.ts
@@ -18,7 +18,7 @@
*/
import moment from 'moment-timezone';
-import { merge } from 'lodash';
+import { merge } from '@kbn/std';
import { schema } from '@kbn/config-schema';
import { LogRecord, Layout } from '@kbn/logging';
@@ -53,22 +53,19 @@ export class JsonLayout implements Layout {
}
public format(record: LogRecord): string {
- return JSON.stringify(
- merge(
- {
- '@timestamp': moment(record.timestamp).format('YYYY-MM-DDTHH:mm:ss.SSSZ'),
- message: record.message,
- error: JsonLayout.errorToSerializableObject(record.error),
- log: {
- level: record.level.id.toUpperCase(),
- logger: record.context,
- },
- process: {
- pid: record.pid,
- },
- },
- record.meta
- )
- );
+ const log = {
+ '@timestamp': moment(record.timestamp).format('YYYY-MM-DDTHH:mm:ss.SSSZ'),
+ message: record.message,
+ error: JsonLayout.errorToSerializableObject(record.error),
+ log: {
+ level: record.level.id.toUpperCase(),
+ logger: record.context,
+ },
+ process: {
+ pid: record.pid,
+ },
+ };
+ const output = record.meta ? merge(log, record.meta) : log;
+ return JSON.stringify(output);
}
}
diff --git a/src/core/server/mocks.ts b/src/core/server/mocks.ts
index 03a0ae2d6443a..c4f0cea428ea5 100644
--- a/src/core/server/mocks.ts
+++ b/src/core/server/mocks.ts
@@ -203,6 +203,8 @@ function createCoreRequestHandlerContextMock() {
savedObjects: {
client: savedObjectsClientMock.create(),
typeRegistry: savedObjectsTypeRegistryMock.create(),
+ exporter: savedObjectsServiceMock.createExporter(),
+ importer: savedObjectsServiceMock.createImporter(),
},
elasticsearch: {
client: elasticsearchServiceMock.createScopedClusterClient(),
diff --git a/src/core/server/plugins/plugin_context.ts b/src/core/server/plugins/plugin_context.ts
index 3b2634ddbe315..42f44e4405443 100644
--- a/src/core/server/plugins/plugin_context.ts
+++ b/src/core/server/plugins/plugin_context.ts
@@ -188,7 +188,6 @@ export function createPluginSetupContext(
setClientFactoryProvider: deps.savedObjects.setClientFactoryProvider,
addClientWrapper: deps.savedObjects.addClientWrapper,
registerType: deps.savedObjects.registerType,
- getImportExportObjectLimit: deps.savedObjects.getImportExportObjectLimit,
},
status: {
core$: deps.status.core$,
@@ -241,6 +240,8 @@ export function createPluginStartContext(
createInternalRepository: deps.savedObjects.createInternalRepository,
createScopedRepository: deps.savedObjects.createScopedRepository,
createSerializer: deps.savedObjects.createSerializer,
+ createExporter: deps.savedObjects.createExporter,
+ createImporter: deps.savedObjects.createImporter,
getTypeRegistry: deps.savedObjects.getTypeRegistry,
},
metrics: {
diff --git a/src/core/server/rendering/views/template.tsx b/src/core/server/rendering/views/template.tsx
index 76af229ac02ba..e4787ee26e12c 100644
--- a/src/core/server/rendering/views/template.tsx
+++ b/src/core/server/rendering/views/template.tsx
@@ -76,33 +76,11 @@ export const Template: FunctionComponent = ({
Elastic
- {/* Favicons (generated from http://realfavicongenerator.net/) */}
-
-
-
-
-
-
-
+ {/* The alternate icon is a fallback for Safari which does not yet support SVG favicons */}
+
+
+
{/* Inject stylesheets into the before scripts so that KP plugins with bundled styles will override them */}
diff --git a/src/core/server/saved_objects/export/errors.ts b/src/core/server/saved_objects/export/errors.ts
new file mode 100644
index 0000000000000..3a26b092ab489
--- /dev/null
+++ b/src/core/server/saved_objects/export/errors.ts
@@ -0,0 +1,50 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { SavedObject } from '../../../types';
+
+/**
+ * @public
+ */
+export class SavedObjectsExportError extends Error {
+ constructor(
+ public readonly type: string,
+ message: string,
+ public readonly attributes?: Record
+ ) {
+ super(message);
+
+ // Set the prototype explicitly, see:
+ // https://github.com/Microsoft/TypeScript/wiki/Breaking-Changes#extending-built-ins-like-error-array-and-map-may-no-longer-work
+ Object.setPrototypeOf(this, SavedObjectsExportError.prototype);
+ }
+
+ static exportSizeExceeded(limit: number) {
+ return new SavedObjectsExportError(
+ 'export-size-exceeded',
+ `Can't export more than ${limit} objects`
+ );
+ }
+
+ static objectFetchError(objects: SavedObject[]) {
+ return new SavedObjectsExportError('object-fetch-error', 'Error fetching objects to export', {
+ objects,
+ });
+ }
+}
diff --git a/src/core/server/saved_objects/export/inject_nested_depdendencies.test.ts b/src/core/server/saved_objects/export/fetch_nested_dependencies.test.ts
similarity index 99%
rename from src/core/server/saved_objects/export/inject_nested_depdendencies.test.ts
rename to src/core/server/saved_objects/export/fetch_nested_dependencies.test.ts
index 862d11cfa663a..62ee402c4da92 100644
--- a/src/core/server/saved_objects/export/inject_nested_depdendencies.test.ts
+++ b/src/core/server/saved_objects/export/fetch_nested_dependencies.test.ts
@@ -19,7 +19,7 @@
import { SavedObject } from '../types';
import { savedObjectsClientMock } from '../../mocks';
-import { getObjectReferencesToFetch, fetchNestedDependencies } from './inject_nested_depdendencies';
+import { getObjectReferencesToFetch, fetchNestedDependencies } from './fetch_nested_dependencies';
import { SavedObjectsErrorHelpers } from '..';
describe('getObjectReferencesToFetch()', () => {
diff --git a/src/core/server/saved_objects/export/inject_nested_depdendencies.ts b/src/core/server/saved_objects/export/fetch_nested_dependencies.ts
similarity index 100%
rename from src/core/server/saved_objects/export/inject_nested_depdendencies.ts
rename to src/core/server/saved_objects/export/fetch_nested_dependencies.ts
diff --git a/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts b/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts
deleted file mode 100644
index 8f397c01ffa71..0000000000000
--- a/src/core/server/saved_objects/export/get_sorted_objects_for_export.test.ts
+++ /dev/null
@@ -1,955 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import { exportSavedObjectsToStream } from './get_sorted_objects_for_export';
-import { savedObjectsClientMock } from '../service/saved_objects_client.mock';
-import { Readable } from 'stream';
-import { createPromiseFromStreams, createConcatStream } from '@kbn/utils';
-
-async function readStreamToCompletion(stream: Readable) {
- return createPromiseFromStreams([stream, createConcatStream([])]);
-}
-
-describe('getSortedObjectsForExport()', () => {
- const savedObjectsClient = savedObjectsClientMock.create();
-
- afterEach(() => {
- savedObjectsClient.find.mockReset();
- savedObjectsClient.bulkGet.mockReset();
- savedObjectsClient.create.mockReset();
- savedObjectsClient.bulkCreate.mockReset();
- savedObjectsClient.delete.mockReset();
- savedObjectsClient.get.mockReset();
- savedObjectsClient.update.mockReset();
- });
-
- test('exports selected types and sorts them', async () => {
- savedObjectsClient.find.mockResolvedValueOnce({
- total: 2,
- saved_objects: [
- {
- id: '2',
- type: 'search',
- attributes: {},
- score: 1,
- references: [
- {
- name: 'name',
- type: 'index-pattern',
- id: '1',
- },
- ],
- },
- {
- id: '1',
- type: 'index-pattern',
- attributes: {},
- score: 1,
- references: [],
- },
- ],
- per_page: 1,
- page: 0,
- });
- const exportStream = await exportSavedObjectsToStream({
- savedObjectsClient,
- exportSizeLimit: 500,
- types: ['index-pattern', 'search'],
- });
-
- const response = await readStreamToCompletion(exportStream);
-
- expect(response).toMatchInlineSnapshot(`
- Array [
- Object {
- "attributes": Object {},
- "id": "1",
- "references": Array [],
- "type": "index-pattern",
- },
- Object {
- "attributes": Object {},
- "id": "2",
- "references": Array [
- Object {
- "id": "1",
- "name": "name",
- "type": "index-pattern",
- },
- ],
- "type": "search",
- },
- Object {
- "exportedCount": 2,
- "missingRefCount": 0,
- "missingReferences": Array [],
- },
- ]
- `);
- expect(savedObjectsClient.find).toMatchInlineSnapshot(`
- [MockFunction] {
- "calls": Array [
- Array [
- Object {
- "hasReference": undefined,
- "hasReferenceOperator": undefined,
- "namespaces": undefined,
- "perPage": 500,
- "search": undefined,
- "type": Array [
- "index-pattern",
- "search",
- ],
- },
- ],
- ],
- "results": Array [
- Object {
- "type": "return",
- "value": Promise {},
- },
- ],
- }
- `);
- });
-
- test('omits the `namespaces` property from the export', async () => {
- savedObjectsClient.find.mockResolvedValueOnce({
- total: 2,
- saved_objects: [
- {
- id: '2',
- type: 'search',
- attributes: {},
- namespaces: ['foo', 'bar'],
- score: 0,
- references: [
- {
- name: 'name',
- type: 'index-pattern',
- id: '1',
- },
- ],
- },
- {
- id: '1',
- type: 'index-pattern',
- attributes: {},
- namespaces: ['foo', 'bar'],
- score: 0,
- references: [],
- },
- ],
- per_page: 1,
- page: 0,
- });
- const exportStream = await exportSavedObjectsToStream({
- savedObjectsClient,
- exportSizeLimit: 500,
- types: ['index-pattern', 'search'],
- });
-
- const response = await readStreamToCompletion(exportStream);
-
- expect(response).toMatchInlineSnapshot(`
- Array [
- Object {
- "attributes": Object {},
- "id": "1",
- "references": Array [],
- "type": "index-pattern",
- },
- Object {
- "attributes": Object {},
- "id": "2",
- "references": Array [
- Object {
- "id": "1",
- "name": "name",
- "type": "index-pattern",
- },
- ],
- "type": "search",
- },
- Object {
- "exportedCount": 2,
- "missingRefCount": 0,
- "missingReferences": Array [],
- },
- ]
- `);
- expect(savedObjectsClient.find).toMatchInlineSnapshot(`
- [MockFunction] {
- "calls": Array [
- Array [
- Object {
- "hasReference": undefined,
- "hasReferenceOperator": undefined,
- "namespaces": undefined,
- "perPage": 500,
- "search": undefined,
- "type": Array [
- "index-pattern",
- "search",
- ],
- },
- ],
- ],
- "results": Array [
- Object {
- "type": "return",
- "value": Promise {},
- },
- ],
- }
- `);
- });
-
- test('exclude export details if option is specified', async () => {
- savedObjectsClient.find.mockResolvedValueOnce({
- total: 2,
- saved_objects: [
- {
- id: '2',
- type: 'search',
- attributes: {},
- score: 1,
- references: [
- {
- name: 'name',
- type: 'index-pattern',
- id: '1',
- },
- ],
- },
- {
- id: '1',
- type: 'index-pattern',
- attributes: {},
- score: 1,
- references: [],
- },
- ],
- per_page: 1,
- page: 0,
- });
- const exportStream = await exportSavedObjectsToStream({
- savedObjectsClient,
- exportSizeLimit: 500,
- types: ['index-pattern', 'search'],
- excludeExportDetails: true,
- });
-
- const response = await readStreamToCompletion(exportStream);
-
- expect(response).toMatchInlineSnapshot(`
- Array [
- Object {
- "attributes": Object {},
- "id": "1",
- "references": Array [],
- "type": "index-pattern",
- },
- Object {
- "attributes": Object {},
- "id": "2",
- "references": Array [
- Object {
- "id": "1",
- "name": "name",
- "type": "index-pattern",
- },
- ],
- "type": "search",
- },
- ]
- `);
- });
-
- test('exports selected types with search string when present', async () => {
- savedObjectsClient.find.mockResolvedValueOnce({
- total: 2,
- saved_objects: [
- {
- id: '2',
- type: 'search',
- attributes: {},
- score: 1,
- references: [
- {
- name: 'name',
- type: 'index-pattern',
- id: '1',
- },
- ],
- },
- {
- id: '1',
- type: 'index-pattern',
- attributes: {},
- score: 1,
- references: [],
- },
- ],
- per_page: 1,
- page: 0,
- });
- const exportStream = await exportSavedObjectsToStream({
- savedObjectsClient,
- exportSizeLimit: 500,
- types: ['index-pattern', 'search'],
- search: 'foo',
- });
-
- const response = await readStreamToCompletion(exportStream);
-
- expect(response).toMatchInlineSnapshot(`
- Array [
- Object {
- "attributes": Object {},
- "id": "1",
- "references": Array [],
- "type": "index-pattern",
- },
- Object {
- "attributes": Object {},
- "id": "2",
- "references": Array [
- Object {
- "id": "1",
- "name": "name",
- "type": "index-pattern",
- },
- ],
- "type": "search",
- },
- Object {
- "exportedCount": 2,
- "missingRefCount": 0,
- "missingReferences": Array [],
- },
- ]
- `);
- expect(savedObjectsClient.find).toMatchInlineSnapshot(`
- [MockFunction] {
- "calls": Array [
- Array [
- Object {
- "hasReference": undefined,
- "hasReferenceOperator": undefined,
- "namespaces": undefined,
- "perPage": 500,
- "search": "foo",
- "type": Array [
- "index-pattern",
- "search",
- ],
- },
- ],
- ],
- "results": Array [
- Object {
- "type": "return",
- "value": Promise {},
- },
- ],
- }
- `);
- });
-
- test('exports selected types with references when present', async () => {
- savedObjectsClient.find.mockResolvedValueOnce({
- total: 1,
- saved_objects: [
- {
- id: '2',
- type: 'search',
- attributes: {},
- score: 1,
- references: [
- {
- name: 'name',
- type: 'index-pattern',
- id: '1',
- },
- ],
- },
- ],
- per_page: 1,
- page: 0,
- });
- const exportStream = await exportSavedObjectsToStream({
- savedObjectsClient,
- exportSizeLimit: 500,
- types: ['index-pattern', 'search'],
- hasReference: [
- {
- id: '1',
- type: 'index-pattern',
- },
- ],
- });
-
- const response = await readStreamToCompletion(exportStream);
-
- expect(response).toMatchInlineSnapshot(`
- Array [
- Object {
- "attributes": Object {},
- "id": "2",
- "references": Array [
- Object {
- "id": "1",
- "name": "name",
- "type": "index-pattern",
- },
- ],
- "type": "search",
- },
- Object {
- "exportedCount": 1,
- "missingRefCount": 0,
- "missingReferences": Array [],
- },
- ]
- `);
- expect(savedObjectsClient.find).toMatchInlineSnapshot(`
- [MockFunction] {
- "calls": Array [
- Array [
- Object {
- "hasReference": Array [
- Object {
- "id": "1",
- "type": "index-pattern",
- },
- ],
- "hasReferenceOperator": "OR",
- "namespaces": undefined,
- "perPage": 500,
- "search": undefined,
- "type": Array [
- "index-pattern",
- "search",
- ],
- },
- ],
- ],
- "results": Array [
- Object {
- "type": "return",
- "value": Promise {},
- },
- ],
- }
- `);
- });
-
- test('exports from the provided namespace when present', async () => {
- savedObjectsClient.find.mockResolvedValueOnce({
- total: 2,
- saved_objects: [
- {
- id: '2',
- type: 'search',
- attributes: {},
- score: 1,
- references: [
- {
- name: 'name',
- type: 'index-pattern',
- id: '1',
- },
- ],
- },
- {
- id: '1',
- type: 'index-pattern',
- attributes: {},
- score: 1,
- references: [],
- },
- ],
- per_page: 1,
- page: 0,
- });
- const exportStream = await exportSavedObjectsToStream({
- savedObjectsClient,
- exportSizeLimit: 500,
- types: ['index-pattern', 'search'],
- namespace: 'foo',
- });
-
- const response = await readStreamToCompletion(exportStream);
-
- expect(response).toMatchInlineSnapshot(`
- Array [
- Object {
- "attributes": Object {},
- "id": "1",
- "references": Array [],
- "type": "index-pattern",
- },
- Object {
- "attributes": Object {},
- "id": "2",
- "references": Array [
- Object {
- "id": "1",
- "name": "name",
- "type": "index-pattern",
- },
- ],
- "type": "search",
- },
- Object {
- "exportedCount": 2,
- "missingRefCount": 0,
- "missingReferences": Array [],
- },
- ]
- `);
- expect(savedObjectsClient.find).toMatchInlineSnapshot(`
- [MockFunction] {
- "calls": Array [
- Array [
- Object {
- "hasReference": undefined,
- "hasReferenceOperator": undefined,
- "namespaces": Array [
- "foo",
- ],
- "perPage": 500,
- "search": undefined,
- "type": Array [
- "index-pattern",
- "search",
- ],
- },
- ],
- ],
- "results": Array [
- Object {
- "type": "return",
- "value": Promise {},
- },
- ],
- }
- `);
- });
-
- test('export selected types throws error when exceeding exportSizeLimit', async () => {
- savedObjectsClient.find.mockResolvedValueOnce({
- total: 2,
- saved_objects: [
- {
- id: '2',
- type: 'search',
- attributes: {},
- score: 1,
- references: [
- {
- type: 'index-pattern',
- name: 'name',
- id: '1',
- },
- ],
- },
- {
- id: '1',
- type: 'index-pattern',
- attributes: {},
- score: 1,
- references: [],
- },
- ],
- per_page: 1,
- page: 0,
- });
- await expect(
- exportSavedObjectsToStream({
- savedObjectsClient,
- exportSizeLimit: 1,
- types: ['index-pattern', 'search'],
- })
- ).rejects.toThrowErrorMatchingInlineSnapshot(`"Can't export more than 1 objects"`);
- });
-
- test('sorts objects within type', async () => {
- savedObjectsClient.find.mockResolvedValueOnce({
- total: 3,
- per_page: 10000,
- page: 1,
- saved_objects: [
- {
- id: '3',
- type: 'index-pattern',
- attributes: {
- name: 'baz',
- },
- score: 1,
- references: [],
- },
- {
- id: '1',
- type: 'index-pattern',
- attributes: {
- name: 'foo',
- },
- score: 1,
- references: [],
- },
- {
- id: '2',
- type: 'index-pattern',
- attributes: {
- name: 'bar',
- },
- score: 1,
- references: [],
- },
- ],
- });
- const exportStream = await exportSavedObjectsToStream({
- exportSizeLimit: 10000,
- savedObjectsClient,
- types: ['index-pattern'],
- });
- const response = await readStreamToCompletion(exportStream);
- expect(response).toMatchInlineSnapshot(`
- Array [
- Object {
- "attributes": Object {
- "name": "foo",
- },
- "id": "1",
- "references": Array [],
- "type": "index-pattern",
- },
- Object {
- "attributes": Object {
- "name": "bar",
- },
- "id": "2",
- "references": Array [],
- "type": "index-pattern",
- },
- Object {
- "attributes": Object {
- "name": "baz",
- },
- "id": "3",
- "references": Array [],
- "type": "index-pattern",
- },
- Object {
- "exportedCount": 3,
- "missingRefCount": 0,
- "missingReferences": Array [],
- },
- ]
- `);
- });
-
- test('exports selected objects and sorts them', async () => {
- savedObjectsClient.bulkGet.mockResolvedValueOnce({
- saved_objects: [
- {
- id: '2',
- type: 'search',
- attributes: {},
- references: [
- {
- id: '1',
- name: 'name',
- type: 'index-pattern',
- },
- ],
- },
- {
- id: '1',
- type: 'index-pattern',
- attributes: {},
- references: [],
- },
- ],
- });
- const exportStream = await exportSavedObjectsToStream({
- exportSizeLimit: 10000,
- savedObjectsClient,
- objects: [
- {
- type: 'index-pattern',
- id: '1',
- },
- {
- type: 'search',
- id: '2',
- },
- ],
- });
- const response = await readStreamToCompletion(exportStream);
- expect(response).toMatchInlineSnapshot(`
- Array [
- Object {
- "attributes": Object {},
- "id": "1",
- "references": Array [],
- "type": "index-pattern",
- },
- Object {
- "attributes": Object {},
- "id": "2",
- "references": Array [
- Object {
- "id": "1",
- "name": "name",
- "type": "index-pattern",
- },
- ],
- "type": "search",
- },
- Object {
- "exportedCount": 2,
- "missingRefCount": 0,
- "missingReferences": Array [],
- },
- ]
- `);
- expect(savedObjectsClient.bulkGet).toMatchInlineSnapshot(`
- [MockFunction] {
- "calls": Array [
- Array [
- Array [
- Object {
- "id": "1",
- "type": "index-pattern",
- },
- Object {
- "id": "2",
- "type": "search",
- },
- ],
- Object {
- "namespace": undefined,
- },
- ],
- ],
- "results": Array [
- Object {
- "type": "return",
- "value": Promise {},
- },
- ],
- }
- `);
- });
-
- test('modifies return results to redact `namespaces` attribute', async () => {
- const createSavedObject = (obj: any) => ({ ...obj, attributes: {}, references: [] });
- savedObjectsClient.bulkGet.mockResolvedValueOnce({
- saved_objects: [
- createSavedObject({ type: 'multi', id: '1', namespaces: ['foo'] }),
- createSavedObject({ type: 'multi', id: '2', namespaces: ['bar'] }),
- createSavedObject({ type: 'other', id: '3' }),
- ],
- });
- const exportStream = await exportSavedObjectsToStream({
- exportSizeLimit: 10000,
- savedObjectsClient,
- objects: [
- { type: 'multi', id: '1' },
- { type: 'multi', id: '2' },
- { type: 'other', id: '3' },
- ],
- });
- const response = await readStreamToCompletion(exportStream);
- expect(response).toEqual([
- createSavedObject({ type: 'multi', id: '1' }),
- createSavedObject({ type: 'multi', id: '2' }),
- createSavedObject({ type: 'other', id: '3' }),
- expect.objectContaining({ exportedCount: 3 }),
- ]);
- });
-
- test('includes nested dependencies when passed in', async () => {
- savedObjectsClient.bulkGet.mockResolvedValueOnce({
- saved_objects: [
- {
- id: '2',
- type: 'search',
- attributes: {},
- references: [
- {
- type: 'index-pattern',
- name: 'name',
- id: '1',
- },
- ],
- },
- ],
- });
- savedObjectsClient.bulkGet.mockResolvedValueOnce({
- saved_objects: [
- {
- id: '1',
- type: 'index-pattern',
- attributes: {},
- references: [],
- },
- ],
- });
- const exportStream = await exportSavedObjectsToStream({
- exportSizeLimit: 10000,
- savedObjectsClient,
- objects: [
- {
- type: 'search',
- id: '2',
- },
- ],
- includeReferencesDeep: true,
- });
- const response = await readStreamToCompletion(exportStream);
- expect(response).toMatchInlineSnapshot(`
- Array [
- Object {
- "attributes": Object {},
- "id": "1",
- "references": Array [],
- "type": "index-pattern",
- },
- Object {
- "attributes": Object {},
- "id": "2",
- "references": Array [
- Object {
- "id": "1",
- "name": "name",
- "type": "index-pattern",
- },
- ],
- "type": "search",
- },
- Object {
- "exportedCount": 2,
- "missingRefCount": 0,
- "missingReferences": Array [],
- },
- ]
- `);
- expect(savedObjectsClient.bulkGet).toMatchInlineSnapshot(`
- [MockFunction] {
- "calls": Array [
- Array [
- Array [
- Object {
- "id": "2",
- "type": "search",
- },
- ],
- Object {
- "namespace": undefined,
- },
- ],
- Array [
- Array [
- Object {
- "id": "1",
- "type": "index-pattern",
- },
- ],
- Object {
- "namespace": undefined,
- },
- ],
- ],
- "results": Array [
- Object {
- "type": "return",
- "value": Promise {},
- },
- Object {
- "type": "return",
- "value": Promise {},
- },
- ],
- }
- `);
- });
-
- test('export selected objects throws error when exceeding exportSizeLimit', async () => {
- const exportOpts = {
- exportSizeLimit: 1,
- savedObjectsClient,
- objects: [
- {
- type: 'index-pattern',
- id: '1',
- },
- {
- type: 'search',
- id: '2',
- },
- ],
- };
- await expect(exportSavedObjectsToStream(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
- `"Can't export more than 1 objects"`
- );
- });
-
- test('rejects when neither type nor objects paramaters are passed in', () => {
- const exportOpts = {
- exportSizeLimit: 1,
- savedObjectsClient,
- types: undefined,
- objects: undefined,
- };
-
- expect(exportSavedObjectsToStream(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
- `"Either \`type\` or \`objects\` are required."`
- );
- });
-
- test('rejects when both objects and search are passed in', () => {
- const exportOpts = {
- exportSizeLimit: 1,
- savedObjectsClient,
- objects: [{ type: 'index-pattern', id: '1' }],
- search: 'foo',
- };
-
- expect(exportSavedObjectsToStream(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
- `"Can't specify both \\"search\\" and \\"objects\\" properties when exporting"`
- );
- });
-
- test('rejects when both objects and references are passed in', () => {
- const exportOpts = {
- exportSizeLimit: 1,
- savedObjectsClient,
- objects: [{ type: 'index-pattern', id: '1' }],
- hasReference: [{ type: 'index-pattern', id: '1' }],
- };
-
- expect(exportSavedObjectsToStream(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
- `"Can't specify both \\"references\\" and \\"objects\\" properties when exporting"`
- );
- });
-});
diff --git a/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts b/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts
deleted file mode 100644
index 84b14d0a5f02c..0000000000000
--- a/src/core/server/saved_objects/export/get_sorted_objects_for_export.ts
+++ /dev/null
@@ -1,266 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import Boom from '@hapi/boom';
-import { createListStream } from '@kbn/utils';
-import {
- SavedObjectsClientContract,
- SavedObject,
- SavedObjectsFindOptionsReference,
-} from '../types';
-import { fetchNestedDependencies } from './inject_nested_depdendencies';
-import { sortObjects } from './sort_objects';
-
-/**
- * Options controlling the export operation.
- * @public
- */
-export interface SavedObjectsExportOptions {
- /** optional array of saved object types. */
- types?: string[];
- /** optional array of references to search object for when exporting by types */
- hasReference?: SavedObjectsFindOptionsReference[];
- /** optional array of objects to export. */
- objects?: Array<{
- /** the saved object id. */
- id: string;
- /** the saved object type. */
- type: string;
- }>;
- /** optional query string to filter exported objects. */
- search?: string;
- /** an instance of the SavedObjectsClient. */
- savedObjectsClient: SavedObjectsClientContract;
- /** the maximum number of objects to export. */
- exportSizeLimit: number;
- /** flag to also include all related saved objects in the export stream. */
- includeReferencesDeep?: boolean;
- /** flag to not append {@link SavedObjectsExportResultDetails | export details} to the end of the export stream. */
- excludeExportDetails?: boolean;
- /** optional namespace to override the namespace used by the savedObjectsClient. */
- namespace?: string;
-}
-
-interface SavedObjectsFetchByTypeOptions {
- /** array of saved object types. */
- types: string[];
- /** optional array of references to search object for when exporting by types */
- hasReference?: SavedObjectsFindOptionsReference[];
- /** optional query string to filter exported objects. */
- search?: string;
- /** an instance of the SavedObjectsClient. */
- savedObjectsClient: SavedObjectsClientContract;
- /** the maximum number of objects to export. */
- exportSizeLimit: number;
- /** optional namespace to override the namespace used by the savedObjectsClient. */
- namespace?: string;
-}
-
-interface SavedObjectsFetchByObjectOptions {
- /** optional array of objects to export. */
- objects: Array<{
- /** the saved object id. */
- id: string;
- /** the saved object type. */
- type: string;
- }>;
- /** an instance of the SavedObjectsClient. */
- savedObjectsClient: SavedObjectsClientContract;
- /** the maximum number of objects to export. */
- exportSizeLimit: number;
- /** optional namespace to override the namespace used by the savedObjectsClient. */
- namespace?: string;
-}
-
-const isFetchByTypeOptions = (
- options: SavedObjectsFetchByTypeOptions | SavedObjectsFetchByObjectOptions
-): options is SavedObjectsFetchByTypeOptions => {
- return Boolean((options as SavedObjectsFetchByTypeOptions).types);
-};
-
-/**
- * Structure of the export result details entry
- * @public
- */
-export interface SavedObjectsExportResultDetails {
- /** number of successfully exported objects */
- exportedCount: number;
- /** number of missing references */
- missingRefCount: number;
- /** missing references details */
- missingReferences: Array<{
- /** the missing reference id. */
- id: string;
- /** the missing reference type. */
- type: string;
- }>;
-}
-
-async function fetchByType({
- types,
- namespace,
- exportSizeLimit,
- hasReference,
- search,
- savedObjectsClient,
-}: SavedObjectsFetchByTypeOptions) {
- const findResponse = await savedObjectsClient.find({
- type: types,
- hasReference,
- hasReferenceOperator: hasReference ? 'OR' : undefined,
- search,
- perPage: exportSizeLimit,
- namespaces: namespace ? [namespace] : undefined,
- });
- if (findResponse.total > exportSizeLimit) {
- throw Boom.badRequest(`Can't export more than ${exportSizeLimit} objects`);
- }
-
- // sorts server-side by _id, since it's only available in fielddata
- return (
- findResponse.saved_objects
- // exclude the find-specific `score` property from the exported objects
- .map(({ score, ...obj }) => obj)
- .sort((a: SavedObject, b: SavedObject) => (a.id > b.id ? 1 : -1))
- );
-}
-
-async function fetchByObjects({
- objects,
- exportSizeLimit,
- namespace,
- savedObjectsClient,
-}: SavedObjectsFetchByObjectOptions) {
- if (objects.length > exportSizeLimit) {
- throw Boom.badRequest(`Can't export more than ${exportSizeLimit} objects`);
- }
- const bulkGetResult = await savedObjectsClient.bulkGet(objects, { namespace });
- const erroredObjects = bulkGetResult.saved_objects.filter((obj) => !!obj.error);
- if (erroredObjects.length) {
- const err = Boom.badRequest();
- err.output.payload.attributes = {
- objects: erroredObjects,
- };
- throw err;
- }
- return bulkGetResult.saved_objects;
-}
-
-const validateOptions = ({
- objects,
- search,
- hasReference,
- exportSizeLimit,
- namespace,
- savedObjectsClient,
- types,
-}: SavedObjectsExportOptions):
- | SavedObjectsFetchByTypeOptions
- | SavedObjectsFetchByObjectOptions => {
- if ((types?.length ?? 0) > 0 && (objects?.length ?? 0) > 0) {
- throw Boom.badRequest(`Can't specify both "types" and "objects" properties when exporting`);
- }
- if (objects && objects.length > 0) {
- if (objects.length > exportSizeLimit) {
- throw Boom.badRequest(`Can't export more than ${exportSizeLimit} objects`);
- }
- if (typeof search === 'string') {
- throw Boom.badRequest(`Can't specify both "search" and "objects" properties when exporting`);
- }
- if (hasReference && hasReference.length) {
- throw Boom.badRequest(
- `Can't specify both "references" and "objects" properties when exporting`
- );
- }
- return {
- objects,
- exportSizeLimit,
- savedObjectsClient,
- namespace,
- } as SavedObjectsFetchByObjectOptions;
- } else if (types && types.length > 0) {
- return {
- types,
- hasReference,
- search,
- exportSizeLimit,
- savedObjectsClient,
- namespace,
- } as SavedObjectsFetchByTypeOptions;
- } else {
- throw Boom.badRequest('Either `type` or `objects` are required.');
- }
-};
-
-/**
- * Generates sorted saved object stream to be used for export.
- * See the {@link SavedObjectsExportOptions | options} for more detailed information.
- *
- * @public
- */
-export async function exportSavedObjectsToStream({
- types,
- hasReference,
- objects,
- search,
- savedObjectsClient,
- exportSizeLimit,
- includeReferencesDeep = false,
- excludeExportDetails = false,
- namespace,
-}: SavedObjectsExportOptions) {
- const fetchOptions = validateOptions({
- savedObjectsClient,
- namespace,
- exportSizeLimit,
- hasReference,
- search,
- objects,
- excludeExportDetails,
- includeReferencesDeep,
- types,
- });
-
- const rootObjects = isFetchByTypeOptions(fetchOptions)
- ? await fetchByType(fetchOptions)
- : await fetchByObjects(fetchOptions);
-
- let exportedObjects: Array> = [];
- let missingReferences: SavedObjectsExportResultDetails['missingReferences'] = [];
-
- if (includeReferencesDeep) {
- const fetchResult = await fetchNestedDependencies(rootObjects, savedObjectsClient, namespace);
- exportedObjects = sortObjects(fetchResult.objects);
- missingReferences = fetchResult.missingRefs;
- } else {
- exportedObjects = sortObjects(rootObjects);
- }
-
- // redact attributes that should not be exported
- const redactedObjects = exportedObjects.map>(
- ({ namespaces, ...object }) => object
- );
-
- const exportDetails: SavedObjectsExportResultDetails = {
- exportedCount: exportedObjects.length,
- missingRefCount: missingReferences.length,
- missingReferences,
- };
- return createListStream([...redactedObjects, ...(excludeExportDetails ? [] : [exportDetails])]);
-}
diff --git a/src/core/server/saved_objects/export/index.ts b/src/core/server/saved_objects/export/index.ts
index 37824cceb18cb..5166f20b3d1c1 100644
--- a/src/core/server/saved_objects/export/index.ts
+++ b/src/core/server/saved_objects/export/index.ts
@@ -18,7 +18,10 @@
*/
export {
- exportSavedObjectsToStream,
- SavedObjectsExportOptions,
+ SavedObjectsExportByObjectOptions,
+ SavedObjectExportBaseOptions,
+ SavedObjectsExportByTypeOptions,
SavedObjectsExportResultDetails,
-} from './get_sorted_objects_for_export';
+} from './types';
+export { ISavedObjectsExporter, SavedObjectsExporter } from './saved_objects_exporter';
+export { SavedObjectsExportError } from './errors';
diff --git a/src/core/server/saved_objects/export/saved_objects_exporter.mock.ts b/src/core/server/saved_objects/export/saved_objects_exporter.mock.ts
new file mode 100644
index 0000000000000..71f08a17e3251
--- /dev/null
+++ b/src/core/server/saved_objects/export/saved_objects_exporter.mock.ts
@@ -0,0 +1,33 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { ISavedObjectsExporter } from './saved_objects_exporter';
+
+const createExporterMock = () => {
+ const mock: jest.Mocked = {
+ exportByObjects: jest.fn(),
+ exportByTypes: jest.fn(),
+ };
+
+ return mock;
+};
+
+export const savedObjectsExporterMock = {
+ create: createExporterMock,
+};
diff --git a/src/core/server/saved_objects/export/saved_objects_exporter.test.ts b/src/core/server/saved_objects/export/saved_objects_exporter.test.ts
new file mode 100644
index 0000000000000..b382a36a35ef7
--- /dev/null
+++ b/src/core/server/saved_objects/export/saved_objects_exporter.test.ts
@@ -0,0 +1,936 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { SavedObjectsExporter } from './saved_objects_exporter';
+import { savedObjectsClientMock } from '../service/saved_objects_client.mock';
+import { Readable } from 'stream';
+import { createPromiseFromStreams, createConcatStream } from '@kbn/utils';
+
+async function readStreamToCompletion(stream: Readable) {
+ return createPromiseFromStreams([stream, createConcatStream([])]);
+}
+
+const exportSizeLimit = 500;
+
+describe('getSortedObjectsForExport()', () => {
+ let savedObjectsClient: ReturnType;
+ let exporter: SavedObjectsExporter;
+
+ beforeEach(() => {
+ savedObjectsClient = savedObjectsClientMock.create();
+ exporter = new SavedObjectsExporter({ savedObjectsClient, exportSizeLimit });
+ });
+
+ describe('#exportByTypes', () => {
+ test('exports selected types and sorts them', async () => {
+ savedObjectsClient.find.mockResolvedValueOnce({
+ total: 2,
+ saved_objects: [
+ {
+ id: '2',
+ type: 'search',
+ attributes: {},
+ score: 1,
+ references: [
+ {
+ name: 'name',
+ type: 'index-pattern',
+ id: '1',
+ },
+ ],
+ },
+ {
+ id: '1',
+ type: 'index-pattern',
+ attributes: {},
+ score: 1,
+ references: [],
+ },
+ ],
+ per_page: 1,
+ page: 0,
+ });
+ const exportStream = await exporter.exportByTypes({
+ types: ['index-pattern', 'search'],
+ });
+
+ const response = await readStreamToCompletion(exportStream);
+
+ expect(response).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "attributes": Object {},
+ "id": "1",
+ "references": Array [],
+ "type": "index-pattern",
+ },
+ Object {
+ "attributes": Object {},
+ "id": "2",
+ "references": Array [
+ Object {
+ "id": "1",
+ "name": "name",
+ "type": "index-pattern",
+ },
+ ],
+ "type": "search",
+ },
+ Object {
+ "exportedCount": 2,
+ "missingRefCount": 0,
+ "missingReferences": Array [],
+ },
+ ]
+ `);
+ expect(savedObjectsClient.find).toMatchInlineSnapshot(`
+ [MockFunction] {
+ "calls": Array [
+ Array [
+ Object {
+ "hasReference": undefined,
+ "hasReferenceOperator": undefined,
+ "namespaces": undefined,
+ "perPage": 500,
+ "search": undefined,
+ "type": Array [
+ "index-pattern",
+ "search",
+ ],
+ },
+ ],
+ ],
+ "results": Array [
+ Object {
+ "type": "return",
+ "value": Promise {},
+ },
+ ],
+ }
+ `);
+ });
+
+ test('omits the `namespaces` property from the export', async () => {
+ savedObjectsClient.find.mockResolvedValueOnce({
+ total: 2,
+ saved_objects: [
+ {
+ id: '2',
+ type: 'search',
+ attributes: {},
+ namespaces: ['foo', 'bar'],
+ score: 0,
+ references: [
+ {
+ name: 'name',
+ type: 'index-pattern',
+ id: '1',
+ },
+ ],
+ },
+ {
+ id: '1',
+ type: 'index-pattern',
+ attributes: {},
+ namespaces: ['foo', 'bar'],
+ score: 0,
+ references: [],
+ },
+ ],
+ per_page: 1,
+ page: 0,
+ });
+ const exportStream = await exporter.exportByTypes({
+ types: ['index-pattern', 'search'],
+ });
+
+ const response = await readStreamToCompletion(exportStream);
+
+ expect(response).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "attributes": Object {},
+ "id": "1",
+ "references": Array [],
+ "type": "index-pattern",
+ },
+ Object {
+ "attributes": Object {},
+ "id": "2",
+ "references": Array [
+ Object {
+ "id": "1",
+ "name": "name",
+ "type": "index-pattern",
+ },
+ ],
+ "type": "search",
+ },
+ Object {
+ "exportedCount": 2,
+ "missingRefCount": 0,
+ "missingReferences": Array [],
+ },
+ ]
+ `);
+ expect(savedObjectsClient.find).toMatchInlineSnapshot(`
+ [MockFunction] {
+ "calls": Array [
+ Array [
+ Object {
+ "hasReference": undefined,
+ "hasReferenceOperator": undefined,
+ "namespaces": undefined,
+ "perPage": 500,
+ "search": undefined,
+ "type": Array [
+ "index-pattern",
+ "search",
+ ],
+ },
+ ],
+ ],
+ "results": Array [
+ Object {
+ "type": "return",
+ "value": Promise {},
+ },
+ ],
+ }
+ `);
+ });
+
+ test('exclude export details if option is specified', async () => {
+ savedObjectsClient.find.mockResolvedValueOnce({
+ total: 2,
+ saved_objects: [
+ {
+ id: '2',
+ type: 'search',
+ attributes: {},
+ score: 1,
+ references: [
+ {
+ name: 'name',
+ type: 'index-pattern',
+ id: '1',
+ },
+ ],
+ },
+ {
+ id: '1',
+ type: 'index-pattern',
+ attributes: {},
+ score: 1,
+ references: [],
+ },
+ ],
+ per_page: 1,
+ page: 0,
+ });
+ const exportStream = await exporter.exportByTypes({
+ types: ['index-pattern', 'search'],
+ excludeExportDetails: true,
+ });
+
+ const response = await readStreamToCompletion(exportStream);
+
+ expect(response).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "attributes": Object {},
+ "id": "1",
+ "references": Array [],
+ "type": "index-pattern",
+ },
+ Object {
+ "attributes": Object {},
+ "id": "2",
+ "references": Array [
+ Object {
+ "id": "1",
+ "name": "name",
+ "type": "index-pattern",
+ },
+ ],
+ "type": "search",
+ },
+ ]
+ `);
+ });
+
+ test('exports selected types with search string when present', async () => {
+ savedObjectsClient.find.mockResolvedValueOnce({
+ total: 2,
+ saved_objects: [
+ {
+ id: '2',
+ type: 'search',
+ attributes: {},
+ score: 1,
+ references: [
+ {
+ name: 'name',
+ type: 'index-pattern',
+ id: '1',
+ },
+ ],
+ },
+ {
+ id: '1',
+ type: 'index-pattern',
+ attributes: {},
+ score: 1,
+ references: [],
+ },
+ ],
+ per_page: 1,
+ page: 0,
+ });
+ const exportStream = await exporter.exportByTypes({
+ types: ['index-pattern', 'search'],
+ search: 'foo',
+ });
+
+ const response = await readStreamToCompletion(exportStream);
+
+ expect(response).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "attributes": Object {},
+ "id": "1",
+ "references": Array [],
+ "type": "index-pattern",
+ },
+ Object {
+ "attributes": Object {},
+ "id": "2",
+ "references": Array [
+ Object {
+ "id": "1",
+ "name": "name",
+ "type": "index-pattern",
+ },
+ ],
+ "type": "search",
+ },
+ Object {
+ "exportedCount": 2,
+ "missingRefCount": 0,
+ "missingReferences": Array [],
+ },
+ ]
+ `);
+ expect(savedObjectsClient.find).toMatchInlineSnapshot(`
+ [MockFunction] {
+ "calls": Array [
+ Array [
+ Object {
+ "hasReference": undefined,
+ "hasReferenceOperator": undefined,
+ "namespaces": undefined,
+ "perPage": 500,
+ "search": "foo",
+ "type": Array [
+ "index-pattern",
+ "search",
+ ],
+ },
+ ],
+ ],
+ "results": Array [
+ Object {
+ "type": "return",
+ "value": Promise {},
+ },
+ ],
+ }
+ `);
+ });
+
+ test('exports selected types with references when present', async () => {
+ savedObjectsClient.find.mockResolvedValueOnce({
+ total: 1,
+ saved_objects: [
+ {
+ id: '2',
+ type: 'search',
+ attributes: {},
+ score: 1,
+ references: [
+ {
+ name: 'name',
+ type: 'index-pattern',
+ id: '1',
+ },
+ ],
+ },
+ ],
+ per_page: 1,
+ page: 0,
+ });
+ const exportStream = await exporter.exportByTypes({
+ types: ['index-pattern', 'search'],
+ hasReference: [
+ {
+ id: '1',
+ type: 'index-pattern',
+ },
+ ],
+ });
+
+ const response = await readStreamToCompletion(exportStream);
+
+ expect(response).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "attributes": Object {},
+ "id": "2",
+ "references": Array [
+ Object {
+ "id": "1",
+ "name": "name",
+ "type": "index-pattern",
+ },
+ ],
+ "type": "search",
+ },
+ Object {
+ "exportedCount": 1,
+ "missingRefCount": 0,
+ "missingReferences": Array [],
+ },
+ ]
+ `);
+ expect(savedObjectsClient.find).toMatchInlineSnapshot(`
+ [MockFunction] {
+ "calls": Array [
+ Array [
+ Object {
+ "hasReference": Array [
+ Object {
+ "id": "1",
+ "type": "index-pattern",
+ },
+ ],
+ "hasReferenceOperator": "OR",
+ "namespaces": undefined,
+ "perPage": 500,
+ "search": undefined,
+ "type": Array [
+ "index-pattern",
+ "search",
+ ],
+ },
+ ],
+ ],
+ "results": Array [
+ Object {
+ "type": "return",
+ "value": Promise {},
+ },
+ ],
+ }
+ `);
+ });
+
+ test('exports from the provided namespace when present', async () => {
+ savedObjectsClient.find.mockResolvedValueOnce({
+ total: 2,
+ saved_objects: [
+ {
+ id: '2',
+ type: 'search',
+ attributes: {},
+ score: 1,
+ references: [
+ {
+ name: 'name',
+ type: 'index-pattern',
+ id: '1',
+ },
+ ],
+ },
+ {
+ id: '1',
+ type: 'index-pattern',
+ attributes: {},
+ score: 1,
+ references: [],
+ },
+ ],
+ per_page: 1,
+ page: 0,
+ });
+ const exportStream = await exporter.exportByTypes({
+ types: ['index-pattern', 'search'],
+ namespace: 'foo',
+ });
+
+ const response = await readStreamToCompletion(exportStream);
+
+ expect(response).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "attributes": Object {},
+ "id": "1",
+ "references": Array [],
+ "type": "index-pattern",
+ },
+ Object {
+ "attributes": Object {},
+ "id": "2",
+ "references": Array [
+ Object {
+ "id": "1",
+ "name": "name",
+ "type": "index-pattern",
+ },
+ ],
+ "type": "search",
+ },
+ Object {
+ "exportedCount": 2,
+ "missingRefCount": 0,
+ "missingReferences": Array [],
+ },
+ ]
+ `);
+ expect(savedObjectsClient.find).toMatchInlineSnapshot(`
+ [MockFunction] {
+ "calls": Array [
+ Array [
+ Object {
+ "hasReference": undefined,
+ "hasReferenceOperator": undefined,
+ "namespaces": Array [
+ "foo",
+ ],
+ "perPage": 500,
+ "search": undefined,
+ "type": Array [
+ "index-pattern",
+ "search",
+ ],
+ },
+ ],
+ ],
+ "results": Array [
+ Object {
+ "type": "return",
+ "value": Promise {},
+ },
+ ],
+ }
+ `);
+ });
+
+ test('export selected types throws error when exceeding exportSizeLimit', async () => {
+ exporter = new SavedObjectsExporter({ savedObjectsClient, exportSizeLimit: 1 });
+
+ savedObjectsClient.find.mockResolvedValueOnce({
+ total: 2,
+ saved_objects: [
+ {
+ id: '2',
+ type: 'search',
+ attributes: {},
+ score: 1,
+ references: [
+ {
+ type: 'index-pattern',
+ name: 'name',
+ id: '1',
+ },
+ ],
+ },
+ {
+ id: '1',
+ type: 'index-pattern',
+ attributes: {},
+ score: 1,
+ references: [],
+ },
+ ],
+ per_page: 1,
+ page: 0,
+ });
+ await expect(
+ exporter.exportByTypes({
+ types: ['index-pattern', 'search'],
+ })
+ ).rejects.toThrowErrorMatchingInlineSnapshot(`"Can't export more than 1 objects"`);
+ });
+
+ test('sorts objects within type', async () => {
+ savedObjectsClient.find.mockResolvedValueOnce({
+ total: 3,
+ per_page: 10000,
+ page: 1,
+ saved_objects: [
+ {
+ id: '3',
+ type: 'index-pattern',
+ attributes: {
+ name: 'baz',
+ },
+ score: 1,
+ references: [],
+ },
+ {
+ id: '1',
+ type: 'index-pattern',
+ attributes: {
+ name: 'foo',
+ },
+ score: 1,
+ references: [],
+ },
+ {
+ id: '2',
+ type: 'index-pattern',
+ attributes: {
+ name: 'bar',
+ },
+ score: 1,
+ references: [],
+ },
+ ],
+ });
+ const exportStream = await exporter.exportByTypes({
+ types: ['index-pattern'],
+ });
+ const response = await readStreamToCompletion(exportStream);
+ expect(response).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "attributes": Object {
+ "name": "foo",
+ },
+ "id": "1",
+ "references": Array [],
+ "type": "index-pattern",
+ },
+ Object {
+ "attributes": Object {
+ "name": "bar",
+ },
+ "id": "2",
+ "references": Array [],
+ "type": "index-pattern",
+ },
+ Object {
+ "attributes": Object {
+ "name": "baz",
+ },
+ "id": "3",
+ "references": Array [],
+ "type": "index-pattern",
+ },
+ Object {
+ "exportedCount": 3,
+ "missingRefCount": 0,
+ "missingReferences": Array [],
+ },
+ ]
+ `);
+ });
+ });
+
+ describe('#exportByObjects', () => {
+ test('exports selected objects and sorts them', async () => {
+ savedObjectsClient.bulkGet.mockResolvedValueOnce({
+ saved_objects: [
+ {
+ id: '2',
+ type: 'search',
+ attributes: {},
+ references: [
+ {
+ id: '1',
+ name: 'name',
+ type: 'index-pattern',
+ },
+ ],
+ },
+ {
+ id: '1',
+ type: 'index-pattern',
+ attributes: {},
+ references: [],
+ },
+ ],
+ });
+ const exportStream = await exporter.exportByObjects({
+ objects: [
+ {
+ type: 'index-pattern',
+ id: '1',
+ },
+ {
+ type: 'search',
+ id: '2',
+ },
+ ],
+ });
+ const response = await readStreamToCompletion(exportStream);
+ expect(response).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "attributes": Object {},
+ "id": "1",
+ "references": Array [],
+ "type": "index-pattern",
+ },
+ Object {
+ "attributes": Object {},
+ "id": "2",
+ "references": Array [
+ Object {
+ "id": "1",
+ "name": "name",
+ "type": "index-pattern",
+ },
+ ],
+ "type": "search",
+ },
+ Object {
+ "exportedCount": 2,
+ "missingRefCount": 0,
+ "missingReferences": Array [],
+ },
+ ]
+ `);
+ expect(savedObjectsClient.bulkGet).toMatchInlineSnapshot(`
+ [MockFunction] {
+ "calls": Array [
+ Array [
+ Array [
+ Object {
+ "id": "1",
+ "type": "index-pattern",
+ },
+ Object {
+ "id": "2",
+ "type": "search",
+ },
+ ],
+ Object {
+ "namespace": undefined,
+ },
+ ],
+ ],
+ "results": Array [
+ Object {
+ "type": "return",
+ "value": Promise {},
+ },
+ ],
+ }
+ `);
+ });
+
+ test('throws when `bulkGet` returns any errored object', async () => {
+ savedObjectsClient.bulkGet.mockResolvedValueOnce({
+ saved_objects: [
+ {
+ id: '1',
+ type: 'search',
+ attributes: {},
+ references: [],
+ },
+ {
+ id: '2',
+ type: 'index-pattern',
+ error: {
+ error: 'NotFound',
+ message: 'NotFound',
+ statusCode: 404,
+ },
+ attributes: {},
+ references: [],
+ },
+ ],
+ });
+ await expect(
+ exporter.exportByObjects({
+ objects: [
+ {
+ type: 'index-pattern',
+ id: '1',
+ },
+ {
+ type: 'search',
+ id: '2',
+ },
+ ],
+ })
+ ).rejects.toThrowErrorMatchingInlineSnapshot(`"Error fetching objects to export"`);
+ });
+
+ test('export selected objects throws error when exceeding exportSizeLimit', async () => {
+ exporter = new SavedObjectsExporter({ savedObjectsClient, exportSizeLimit: 1 });
+
+ const exportOpts = {
+ objects: [
+ {
+ type: 'index-pattern',
+ id: '1',
+ },
+ {
+ type: 'search',
+ id: '2',
+ },
+ ],
+ };
+ await expect(exporter.exportByObjects(exportOpts)).rejects.toThrowErrorMatchingInlineSnapshot(
+ `"Can't export more than 1 objects"`
+ );
+ });
+
+ test('modifies return results to redact `namespaces` attribute', async () => {
+ const createSavedObject = (obj: any) => ({ ...obj, attributes: {}, references: [] });
+ savedObjectsClient.bulkGet.mockResolvedValueOnce({
+ saved_objects: [
+ createSavedObject({ type: 'multi', id: '1', namespaces: ['foo'] }),
+ createSavedObject({ type: 'multi', id: '2', namespaces: ['bar'] }),
+ createSavedObject({ type: 'other', id: '3' }),
+ ],
+ });
+ const exportStream = await exporter.exportByObjects({
+ objects: [
+ { type: 'multi', id: '1' },
+ { type: 'multi', id: '2' },
+ { type: 'other', id: '3' },
+ ],
+ });
+ const response = await readStreamToCompletion(exportStream);
+ expect(response).toEqual([
+ createSavedObject({ type: 'multi', id: '1' }),
+ createSavedObject({ type: 'multi', id: '2' }),
+ createSavedObject({ type: 'other', id: '3' }),
+ expect.objectContaining({ exportedCount: 3 }),
+ ]);
+ });
+
+ test('includes nested dependencies when passed in', async () => {
+ savedObjectsClient.bulkGet.mockResolvedValueOnce({
+ saved_objects: [
+ {
+ id: '2',
+ type: 'search',
+ attributes: {},
+ references: [
+ {
+ type: 'index-pattern',
+ name: 'name',
+ id: '1',
+ },
+ ],
+ },
+ ],
+ });
+ savedObjectsClient.bulkGet.mockResolvedValueOnce({
+ saved_objects: [
+ {
+ id: '1',
+ type: 'index-pattern',
+ attributes: {},
+ references: [],
+ },
+ ],
+ });
+ const exportStream = await exporter.exportByObjects({
+ objects: [
+ {
+ type: 'search',
+ id: '2',
+ },
+ ],
+ includeReferencesDeep: true,
+ });
+ const response = await readStreamToCompletion(exportStream);
+ expect(response).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "attributes": Object {},
+ "id": "1",
+ "references": Array [],
+ "type": "index-pattern",
+ },
+ Object {
+ "attributes": Object {},
+ "id": "2",
+ "references": Array [
+ Object {
+ "id": "1",
+ "name": "name",
+ "type": "index-pattern",
+ },
+ ],
+ "type": "search",
+ },
+ Object {
+ "exportedCount": 2,
+ "missingRefCount": 0,
+ "missingReferences": Array [],
+ },
+ ]
+ `);
+ expect(savedObjectsClient.bulkGet).toMatchInlineSnapshot(`
+ [MockFunction] {
+ "calls": Array [
+ Array [
+ Array [
+ Object {
+ "id": "2",
+ "type": "search",
+ },
+ ],
+ Object {
+ "namespace": undefined,
+ },
+ ],
+ Array [
+ Array [
+ Object {
+ "id": "1",
+ "type": "index-pattern",
+ },
+ ],
+ Object {
+ "namespace": undefined,
+ },
+ ],
+ ],
+ "results": Array [
+ Object {
+ "type": "return",
+ "value": Promise {},
+ },
+ Object {
+ "type": "return",
+ "value": Promise {},
+ },
+ ],
+ }
+ `);
+ });
+ });
+});
diff --git a/src/core/server/saved_objects/export/saved_objects_exporter.ts b/src/core/server/saved_objects/export/saved_objects_exporter.ts
new file mode 100644
index 0000000000000..94b21dda56be1
--- /dev/null
+++ b/src/core/server/saved_objects/export/saved_objects_exporter.ts
@@ -0,0 +1,162 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { createListStream } from '@kbn/utils';
+import { PublicMethodsOf } from '@kbn/utility-types';
+import { SavedObject, SavedObjectsClientContract } from '../types';
+import { fetchNestedDependencies } from './fetch_nested_dependencies';
+import { sortObjects } from './sort_objects';
+import {
+ SavedObjectsExportResultDetails,
+ SavedObjectExportBaseOptions,
+ SavedObjectsExportByObjectOptions,
+ SavedObjectsExportByTypeOptions,
+} from './types';
+import { SavedObjectsExportError } from './errors';
+
+/**
+ * @public
+ */
+export type ISavedObjectsExporter = PublicMethodsOf;
+
+/**
+ * @public
+ */
+export class SavedObjectsExporter {
+ readonly #savedObjectsClient: SavedObjectsClientContract;
+ readonly #exportSizeLimit: number;
+
+ constructor({
+ savedObjectsClient,
+ exportSizeLimit,
+ }: {
+ savedObjectsClient: SavedObjectsClientContract;
+ exportSizeLimit: number;
+ }) {
+ this.#savedObjectsClient = savedObjectsClient;
+ this.#exportSizeLimit = exportSizeLimit;
+ }
+
+ /**
+ * Generates an export stream for given types.
+ *
+ * See the {@link SavedObjectsExportByTypeOptions | options} for more detailed information.
+ *
+ * @throws SavedObjectsExportError
+ */
+ public async exportByTypes(options: SavedObjectsExportByTypeOptions) {
+ const objects = await this.fetchByTypes(options);
+ return this.processObjects(objects, {
+ includeReferencesDeep: options.includeReferencesDeep,
+ excludeExportDetails: options.excludeExportDetails,
+ namespace: options.namespace,
+ });
+ }
+
+ /**
+ * Generates an export stream for given object references.
+ *
+ * See the {@link SavedObjectsExportByObjectOptions | options} for more detailed information.
+ *
+ * @throws SavedObjectsExportError
+ */
+ public async exportByObjects(options: SavedObjectsExportByObjectOptions) {
+ if (options.objects.length > this.#exportSizeLimit) {
+ throw SavedObjectsExportError.exportSizeExceeded(this.#exportSizeLimit);
+ }
+ const objects = await this.fetchByObjects(options);
+ return this.processObjects(objects, {
+ includeReferencesDeep: options.includeReferencesDeep,
+ excludeExportDetails: options.excludeExportDetails,
+ namespace: options.namespace,
+ });
+ }
+
+ private async processObjects(
+ savedObjects: SavedObject[],
+ {
+ excludeExportDetails = false,
+ includeReferencesDeep = false,
+ namespace,
+ }: SavedObjectExportBaseOptions
+ ) {
+ let exportedObjects: Array>;
+ let missingReferences: SavedObjectsExportResultDetails['missingReferences'] = [];
+
+ if (includeReferencesDeep) {
+ const fetchResult = await fetchNestedDependencies(
+ savedObjects,
+ this.#savedObjectsClient,
+ namespace
+ );
+ exportedObjects = sortObjects(fetchResult.objects);
+ missingReferences = fetchResult.missingRefs;
+ } else {
+ exportedObjects = sortObjects(savedObjects);
+ }
+
+ // redact attributes that should not be exported
+ const redactedObjects = exportedObjects.map>(
+ ({ namespaces, ...object }) => object
+ );
+
+ const exportDetails: SavedObjectsExportResultDetails = {
+ exportedCount: exportedObjects.length,
+ missingRefCount: missingReferences.length,
+ missingReferences,
+ };
+ return createListStream([...redactedObjects, ...(excludeExportDetails ? [] : [exportDetails])]);
+ }
+
+ private async fetchByObjects({ objects, namespace }: SavedObjectsExportByObjectOptions) {
+ const bulkGetResult = await this.#savedObjectsClient.bulkGet(objects, { namespace });
+ const erroredObjects = bulkGetResult.saved_objects.filter((obj) => !!obj.error);
+ if (erroredObjects.length) {
+ throw SavedObjectsExportError.objectFetchError(erroredObjects);
+ }
+ return bulkGetResult.saved_objects;
+ }
+
+ private async fetchByTypes({
+ types,
+ namespace,
+ hasReference,
+ search,
+ }: SavedObjectsExportByTypeOptions) {
+ const findResponse = await this.#savedObjectsClient.find({
+ type: types,
+ hasReference,
+ hasReferenceOperator: hasReference ? 'OR' : undefined,
+ search,
+ perPage: this.#exportSizeLimit,
+ namespaces: namespace ? [namespace] : undefined,
+ });
+ if (findResponse.total > this.#exportSizeLimit) {
+ throw SavedObjectsExportError.exportSizeExceeded(this.#exportSizeLimit);
+ }
+
+ // sorts server-side by _id, since it's only available in fielddata
+ return (
+ findResponse.saved_objects
+ // exclude the find-specific `score` property from the exported objects
+ .map(({ score, ...obj }) => obj)
+ .sort((a: SavedObject, b: SavedObject) => (a.id > b.id ? 1 : -1))
+ );
+ }
+}
diff --git a/src/core/server/saved_objects/export/types.ts b/src/core/server/saved_objects/export/types.ts
new file mode 100644
index 0000000000000..0ddcdc361c896
--- /dev/null
+++ b/src/core/server/saved_objects/export/types.ts
@@ -0,0 +1,77 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { SavedObjectsFindOptionsReference } from '../types';
+
+/** @public */
+export interface SavedObjectExportBaseOptions {
+ /** flag to also include all related saved objects in the export stream. */
+ includeReferencesDeep?: boolean;
+ /** flag to not append {@link SavedObjectsExportResultDetails | export details} to the end of the export stream. */
+ excludeExportDetails?: boolean;
+ /** optional namespace to override the namespace used by the savedObjectsClient. */
+ namespace?: string;
+}
+
+/**
+ * Options for the {@link SavedObjectsExporter.exportByTypes | export by type API}
+ *
+ * @public
+ */
+export interface SavedObjectsExportByTypeOptions extends SavedObjectExportBaseOptions {
+ /** array of saved object types. */
+ types: string[];
+ /** optional array of references to search object for. */
+ hasReference?: SavedObjectsFindOptionsReference[];
+ /** optional query string to filter exported objects. */
+ search?: string;
+}
+
+/**
+ * Options for the {@link SavedObjectsExporter.exportByObjects | export by objects API}
+ *
+ * @public
+ */
+export interface SavedObjectsExportByObjectOptions extends SavedObjectExportBaseOptions {
+ /** optional array of objects to export. */
+ objects: Array<{
+ /** the saved object id. */
+ id: string;
+ /** the saved object type. */
+ type: string;
+ }>;
+}
+
+/**
+ * Structure of the export result details entry
+ * @public
+ */
+export interface SavedObjectsExportResultDetails {
+ /** number of successfully exported objects */
+ exportedCount: number;
+ /** number of missing references */
+ missingRefCount: number;
+ /** missing references details */
+ missingReferences: Array<{
+ /** the missing reference id. */
+ id: string;
+ /** the missing reference type. */
+ type: string;
+ }>;
+}
diff --git a/src/core/server/saved_objects/import/errors.ts b/src/core/server/saved_objects/import/errors.ts
new file mode 100644
index 0000000000000..eab39fa848523
--- /dev/null
+++ b/src/core/server/saved_objects/import/errors.ts
@@ -0,0 +1,75 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { SavedObject } from '../../../types';
+
+/**
+ * @public
+ */
+export class SavedObjectsImportError extends Error {
+ private constructor(
+ public readonly type: string,
+ message: string,
+ public readonly attributes?: Record
+ ) {
+ super(message);
+
+ // Set the prototype explicitly, see:
+ // https://github.com/Microsoft/TypeScript/wiki/Breaking-Changes#extending-built-ins-like-error-array-and-map-may-no-longer-work
+ Object.setPrototypeOf(this, SavedObjectsImportError.prototype);
+ }
+
+ static importSizeExceeded(limit: number) {
+ return new SavedObjectsImportError(
+ 'import-size-exceeded',
+ `Can't import more than ${limit} objects`
+ );
+ }
+
+ static nonUniqueImportObjects(nonUniqueEntries: string[]) {
+ return new SavedObjectsImportError(
+ 'non-unique-entries',
+ `Non-unique import objects detected: [${nonUniqueEntries.join()}]`
+ );
+ }
+
+ static nonUniqueRetryObjects(nonUniqueRetryObjects: string[]) {
+ return new SavedObjectsImportError(
+ 'non-unique-retry-objects',
+ `Non-unique retry objects: [${nonUniqueRetryObjects.join()}]`
+ );
+ }
+
+ static nonUniqueRetryDestinations(nonUniqueRetryDestinations: string[]) {
+ return new SavedObjectsImportError(
+ 'non-unique-retry-destination',
+ `Non-unique retry destinations: [${nonUniqueRetryDestinations.join()}]`
+ );
+ }
+
+ static referencesFetchError(objects: SavedObject[]) {
+ return new SavedObjectsImportError(
+ 'references-fetch-error',
+ 'Error fetching references for imported objects',
+ {
+ objects,
+ }
+ );
+ }
+}
diff --git a/src/core/server/saved_objects/import/import_saved_objects.test.ts b/src/core/server/saved_objects/import/import_saved_objects.test.ts
index 77f49e336a7b9..d9f6ffc280078 100644
--- a/src/core/server/saved_objects/import/import_saved_objects.test.ts
+++ b/src/core/server/saved_objects/import/import_saved_objects.test.ts
@@ -23,26 +23,28 @@ import {
SavedObjectsClientContract,
SavedObjectsType,
SavedObject,
- SavedObjectsImportError,
+ SavedObjectsImportFailure,
} from '../types';
import { savedObjectsClientMock } from '../../mocks';
-import { SavedObjectsImportOptions, ISavedObjectTypeRegistry } from '..';
+import { ISavedObjectTypeRegistry } from '..';
import { typeRegistryMock } from '../saved_objects_type_registry.mock';
-import { importSavedObjectsFromStream } from './import_saved_objects';
-
-import { collectSavedObjects } from './collect_saved_objects';
-import { regenerateIds } from './regenerate_ids';
-import { validateReferences } from './validate_references';
-import { checkConflicts } from './check_conflicts';
-import { checkOriginConflicts } from './check_origin_conflicts';
-import { createSavedObjects } from './create_saved_objects';
-
-jest.mock('./collect_saved_objects');
-jest.mock('./regenerate_ids');
-jest.mock('./validate_references');
-jest.mock('./check_conflicts');
-jest.mock('./check_origin_conflicts');
-jest.mock('./create_saved_objects');
+import { importSavedObjectsFromStream, ImportSavedObjectsOptions } from './import_saved_objects';
+
+import {
+ collectSavedObjects,
+ regenerateIds,
+ validateReferences,
+ checkConflicts,
+ checkOriginConflicts,
+ createSavedObjects,
+} from './lib';
+
+jest.mock('./lib/collect_saved_objects');
+jest.mock('./lib/regenerate_ids');
+jest.mock('./lib/validate_references');
+jest.mock('./lib/check_conflicts');
+jest.mock('./lib/check_origin_conflicts');
+jest.mock('./lib/create_saved_objects');
const getMockFn = any, U>(fn: (...args: Parameters) => U) =>
fn as jest.MockedFunction<(...args: Parameters