diff --git a/docs/api/saved-objects/find.asciidoc b/docs/api/saved-objects/find.asciidoc
index c43b58d3aa989..f04aeb8420620 100644
--- a/docs/api/saved-objects/find.asciidoc
+++ b/docs/api/saved-objects/find.asciidoc
@@ -53,9 +53,14 @@ experimental[] Retrieve a paginated set of {kib} saved objects by various condit
(Optional, object) Filters to objects that have a relationship with the type and ID combination.
`filter`::
- (Optional, string) The filter is a KQL string with the caveat that if you filter with an attribute from your type saved object.
- It should look like that savedObjectType.attributes.title: "myTitle". However, If you used a direct attribute of a saved object like `updatedAt`,
- you will have to define your filter like that savedObjectType.updatedAt > 2018-12-22.
+ (Optional, string) The filter is a KQL string with the caveat that if you filter with an attribute from your saved object type,
+ it should look like that: `savedObjectType.attributes.title: "myTitle"`. However, If you use a root attribute of a saved
+ object such as `updated_at`, you will have to define your filter like that: `savedObjectType.updated_at > 2018-12-22`.
+
+`aggs`::
+ (Optional, string) **experimental** An aggregation structure, serialized as a string. The field format is similar to `filter`, meaning
+ that to use a saved object type attribute in the aggregation, the `savedObjectType.attributes.title`: "myTitle"` format
+ must be used. For root fields, the syntax is `savedObjectType.rootField`
NOTE: As objects change in {kib}, the results on each page of the response also
change. Use the find API for traditional paginated results, but avoid using it to export large amounts of data.
diff --git a/docs/development/core/public/kibana-plugin-core-public.savedobjectsclient.find.md b/docs/development/core/public/kibana-plugin-core-public.savedobjectsclient.find.md
index ddd8b207e3d78..fc9652b96450f 100644
--- a/docs/development/core/public/kibana-plugin-core-public.savedobjectsclient.find.md
+++ b/docs/development/core/public/kibana-plugin-core-public.savedobjectsclient.find.md
@@ -9,5 +9,5 @@ Search for objects
Signature:
```typescript
-find: (options: SavedObjectsFindOptions) => Promise>;
+find: (options: SavedObjectsFindOptions) => Promise>;
```
diff --git a/docs/development/core/public/kibana-plugin-core-public.savedobjectsclient.md b/docs/development/core/public/kibana-plugin-core-public.savedobjectsclient.md
index 6e53b169b8bed..1ec756f8d743d 100644
--- a/docs/development/core/public/kibana-plugin-core-public.savedobjectsclient.md
+++ b/docs/development/core/public/kibana-plugin-core-public.savedobjectsclient.md
@@ -24,7 +24,7 @@ The constructor for this class is marked as internal. Third-party code should no
| [bulkGet](./kibana-plugin-core-public.savedobjectsclient.bulkget.md) | | (objects?: Array<{
id: string;
type: string;
}>) => Promise<SavedObjectsBatchResponse<unknown>>
| Returns an array of objects by id |
| [create](./kibana-plugin-core-public.savedobjectsclient.create.md) | | <T = unknown>(type: string, attributes: T, options?: SavedObjectsCreateOptions) => Promise<SimpleSavedObject<T>>
| Persists an object |
| [delete](./kibana-plugin-core-public.savedobjectsclient.delete.md) | | (type: string, id: string, options?: SavedObjectsDeleteOptions | undefined) => ReturnType<SavedObjectsApi['delete']>
| Deletes an object |
-| [find](./kibana-plugin-core-public.savedobjectsclient.find.md) | | <T = unknown>(options: SavedObjectsFindOptions) => Promise<SavedObjectsFindResponsePublic<T>>
| Search for objects |
+| [find](./kibana-plugin-core-public.savedobjectsclient.find.md) | | <T = unknown, A = unknown>(options: SavedObjectsFindOptions) => Promise<SavedObjectsFindResponsePublic<T, unknown>>
| Search for objects |
| [get](./kibana-plugin-core-public.savedobjectsclient.get.md) | | <T = unknown>(type: string, id: string) => Promise<SimpleSavedObject<T>>
| Fetches a single object |
## Methods
diff --git a/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindresponsepublic.aggregations.md b/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindresponsepublic.aggregations.md
new file mode 100644
index 0000000000000..14401b02f25c7
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindresponsepublic.aggregations.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [SavedObjectsFindResponsePublic](./kibana-plugin-core-public.savedobjectsfindresponsepublic.md) > [aggregations](./kibana-plugin-core-public.savedobjectsfindresponsepublic.aggregations.md)
+
+## SavedObjectsFindResponsePublic.aggregations property
+
+Signature:
+
+```typescript
+aggregations?: A;
+```
diff --git a/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindresponsepublic.md b/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindresponsepublic.md
index 7d75878041264..6f2276194f054 100644
--- a/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindresponsepublic.md
+++ b/docs/development/core/public/kibana-plugin-core-public.savedobjectsfindresponsepublic.md
@@ -11,13 +11,14 @@ Return type of the Saved Objects `find()` method.
Signature:
```typescript
-export interface SavedObjectsFindResponsePublic extends SavedObjectsBatchResponse
+export interface SavedObjectsFindResponsePublic extends SavedObjectsBatchResponse
```
## Properties
| Property | Type | Description |
| --- | --- | --- |
+| [aggregations](./kibana-plugin-core-public.savedobjectsfindresponsepublic.aggregations.md) | A
| |
| [page](./kibana-plugin-core-public.savedobjectsfindresponsepublic.page.md) | number
| |
| [perPage](./kibana-plugin-core-public.savedobjectsfindresponsepublic.perpage.md) | number
| |
| [total](./kibana-plugin-core-public.savedobjectsfindresponsepublic.total.md) | number
| |
diff --git a/docs/development/core/server/kibana-plugin-core-server.savedobjectsclient.find.md b/docs/development/core/server/kibana-plugin-core-server.savedobjectsclient.find.md
index 9a4c3df5d2d92..56d76125108d1 100644
--- a/docs/development/core/server/kibana-plugin-core-server.savedobjectsclient.find.md
+++ b/docs/development/core/server/kibana-plugin-core-server.savedobjectsclient.find.md
@@ -9,7 +9,7 @@ Find all SavedObjects matching the search query
Signature:
```typescript
-find(options: SavedObjectsFindOptions): Promise>;
+find(options: SavedObjectsFindOptions): Promise>;
```
## Parameters
@@ -20,5 +20,5 @@ find(options: SavedObjectsFindOptions): PromiseReturns:
-`Promise>`
+`Promise>`
diff --git a/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindresponse.aggregations.md b/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindresponse.aggregations.md
new file mode 100644
index 0000000000000..17a899f4c8280
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindresponse.aggregations.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [SavedObjectsFindResponse](./kibana-plugin-core-server.savedobjectsfindresponse.md) > [aggregations](./kibana-plugin-core-server.savedobjectsfindresponse.aggregations.md)
+
+## SavedObjectsFindResponse.aggregations property
+
+Signature:
+
+```typescript
+aggregations?: A;
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindresponse.md b/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindresponse.md
index fd56e8ce40e24..8176baf44acbd 100644
--- a/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindresponse.md
+++ b/docs/development/core/server/kibana-plugin-core-server.savedobjectsfindresponse.md
@@ -11,13 +11,14 @@ Return type of the Saved Objects `find()` method.
Signature:
```typescript
-export interface SavedObjectsFindResponse
+export interface SavedObjectsFindResponse
```
## Properties
| Property | Type | Description |
| --- | --- | --- |
+| [aggregations](./kibana-plugin-core-server.savedobjectsfindresponse.aggregations.md) | A
| |
| [page](./kibana-plugin-core-server.savedobjectsfindresponse.page.md) | number
| |
| [per\_page](./kibana-plugin-core-server.savedobjectsfindresponse.per_page.md) | number
| |
| [pit\_id](./kibana-plugin-core-server.savedobjectsfindresponse.pit_id.md) | string
| |
diff --git a/docs/development/core/server/kibana-plugin-core-server.savedobjectsrepository.find.md b/docs/development/core/server/kibana-plugin-core-server.savedobjectsrepository.find.md
index d3e93e7af2aa0..5c823b7567918 100644
--- a/docs/development/core/server/kibana-plugin-core-server.savedobjectsrepository.find.md
+++ b/docs/development/core/server/kibana-plugin-core-server.savedobjectsrepository.find.md
@@ -7,7 +7,7 @@
Signature:
```typescript
-find(options: SavedObjectsFindOptions): Promise>;
+find(options: SavedObjectsFindOptions): Promise>;
```
## Parameters
@@ -18,7 +18,7 @@ find(options: SavedObjectsFindOptions): PromiseReturns:
-`Promise>`
+`Promise>`
{promise} - { saved\_objects: \[{ id, type, version, attributes }\], total, per\_page, page }
diff --git a/docs/development/core/server/kibana-plugin-core-server.savedobjectsutils.createemptyfindresponse.md b/docs/development/core/server/kibana-plugin-core-server.savedobjectsutils.createemptyfindresponse.md
index 40e865cb02ce8..23cbebf22aa21 100644
--- a/docs/development/core/server/kibana-plugin-core-server.savedobjectsutils.createemptyfindresponse.md
+++ b/docs/development/core/server/kibana-plugin-core-server.savedobjectsutils.createemptyfindresponse.md
@@ -9,5 +9,5 @@ Creates an empty response for a find operation. This is only intended to be used
Signature:
```typescript
-static createEmptyFindResponse: ({ page, perPage, }: SavedObjectsFindOptions) => SavedObjectsFindResponse;
+static createEmptyFindResponse: ({ page, perPage, }: SavedObjectsFindOptions) => SavedObjectsFindResponse;
```
diff --git a/docs/development/core/server/kibana-plugin-core-server.savedobjectsutils.md b/docs/development/core/server/kibana-plugin-core-server.savedobjectsutils.md
index 8c787364c4cbe..0148621e757b7 100644
--- a/docs/development/core/server/kibana-plugin-core-server.savedobjectsutils.md
+++ b/docs/development/core/server/kibana-plugin-core-server.savedobjectsutils.md
@@ -15,7 +15,7 @@ export declare class SavedObjectsUtils
| Property | Modifiers | Type | Description |
| --- | --- | --- | --- |
-| [createEmptyFindResponse](./kibana-plugin-core-server.savedobjectsutils.createemptyfindresponse.md) | static
| <T>({ page, perPage, }: SavedObjectsFindOptions) => SavedObjectsFindResponse<T>
| Creates an empty response for a find operation. This is only intended to be used by saved objects client wrappers. |
+| [createEmptyFindResponse](./kibana-plugin-core-server.savedobjectsutils.createemptyfindresponse.md) | static
| <T, A>({ page, perPage, }: SavedObjectsFindOptions) => SavedObjectsFindResponse<T, A>
| Creates an empty response for a find operation. This is only intended to be used by saved objects client wrappers. |
| [namespaceIdToString](./kibana-plugin-core-server.savedobjectsutils.namespaceidtostring.md) | static
| (namespace?: string | undefined) => string
| Converts a given saved object namespace ID to its string representation. All namespace IDs have an identical string representation, with the exception of the undefined
namespace ID (which has a namespace string of 'default'
). |
| [namespaceStringToId](./kibana-plugin-core-server.savedobjectsutils.namespacestringtoid.md) | static
| (namespace: string) => string | undefined
| Converts a given saved object namespace string to its ID representation. All namespace strings have an identical ID representation, with the exception of the 'default'
namespace string (which has a namespace ID of undefined
). |
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.getserializableoptions.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.getserializableoptions.md
new file mode 100644
index 0000000000000..984f99004ebe8
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.getserializableoptions.md
@@ -0,0 +1,22 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [SearchInterceptor](./kibana-plugin-plugins-data-public.searchinterceptor.md) > [getSerializableOptions](./kibana-plugin-plugins-data-public.searchinterceptor.getserializableoptions.md)
+
+## SearchInterceptor.getSerializableOptions() method
+
+Signature:
+
+```typescript
+protected getSerializableOptions(options?: ISearchOptions): Pick;
+```
+
+## Parameters
+
+| Parameter | Type | Description |
+| --- | --- | --- |
+| options | ISearchOptions
| |
+
+Returns:
+
+`Pick`
+
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.md
index 9d18309fc07be..653f052dd5a3a 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.searchinterceptor.md
@@ -26,6 +26,7 @@ export declare class SearchInterceptor
| Method | Modifiers | Description |
| --- | --- | --- |
+| [getSerializableOptions(options)](./kibana-plugin-plugins-data-public.searchinterceptor.getserializableoptions.md) | | |
| [getTimeoutMode()](./kibana-plugin-plugins-data-public.searchinterceptor.gettimeoutmode.md) | | |
| [handleSearchError(e, options, isTimeout)](./kibana-plugin-plugins-data-public.searchinterceptor.handlesearcherror.md) | | |
| [search(request, options)](./kibana-plugin-plugins-data-public.searchinterceptor.search.md) | | Searches using the given search
method. Overrides the AbortSignal
with one that will abort either when the request times out, or when the original AbortSignal
is aborted. Updates pendingCount$
when the request is started/finalized. |
diff --git a/examples/search_examples/common/index.ts b/examples/search_examples/common/index.ts
index dd953b1ec8982..cc47c0f575973 100644
--- a/examples/search_examples/common/index.ts
+++ b/examples/search_examples/common/index.ts
@@ -16,6 +16,7 @@ export interface IMyStrategyRequest extends IEsSearchRequest {
}
export interface IMyStrategyResponse extends IEsSearchResponse {
cool: string;
+ executed_at: number;
}
export const SERVER_SEARCH_ROUTE_PATH = '/api/examples/search';
diff --git a/examples/search_examples/public/search/app.tsx b/examples/search_examples/public/search/app.tsx
index 3bac445581ae7..8f31d242faf5e 100644
--- a/examples/search_examples/public/search/app.tsx
+++ b/examples/search_examples/public/search/app.tsx
@@ -111,7 +111,7 @@ export const SearchExamplesApp = ({
setSelectedNumericField(fields?.length ? getNumeric(fields)[0] : null);
}, [fields]);
- const doAsyncSearch = async (strategy?: string) => {
+ const doAsyncSearch = async (strategy?: string, sessionId?: string) => {
if (!indexPattern || !selectedNumericField) return;
// Construct the query portion of the search request
@@ -138,6 +138,7 @@ export const SearchExamplesApp = ({
const searchSubscription$ = data.search
.search(req, {
strategy,
+ sessionId,
})
.subscribe({
next: (res) => {
@@ -148,19 +149,30 @@ export const SearchExamplesApp = ({
? // @ts-expect-error @elastic/elasticsearch no way to declare a type for aggregation in the search response
res.rawResponse.aggregations[1].value
: undefined;
+ const isCool = (res as IMyStrategyResponse).cool;
+ const executedAt = (res as IMyStrategyResponse).executed_at;
const message = (
Searched {res.rawResponse.hits.total} documents.
The average of {selectedNumericField!.name} is{' '}
{avgResult ? Math.floor(avgResult) : 0}.
- Is it Cool? {String((res as IMyStrategyResponse).cool)}
+ {isCool ? `Is it Cool? ${isCool}` : undefined}
+
+
+ {executedAt ? `Executed at? ${executedAt}` : undefined}
+
);
- notifications.toasts.addSuccess({
- title: 'Query result',
- text: mountReactNode(message),
- });
+ notifications.toasts.addSuccess(
+ {
+ title: 'Query result',
+ text: mountReactNode(message),
+ },
+ {
+ toastLifeTimeMs: 300000,
+ }
+ );
searchSubscription$.unsubscribe();
} else if (isErrorResponse(res)) {
// TODO: Make response error status clearer
@@ -227,6 +239,10 @@ export const SearchExamplesApp = ({
doAsyncSearch('myStrategy');
};
+ const onClientSideSessionCacheClickHandler = () => {
+ doAsyncSearch('myStrategy', data.search.session.getSessionId());
+ };
+
const onServerClickHandler = async () => {
if (!indexPattern || !selectedNumericField) return;
try {
@@ -374,6 +390,45 @@ export const SearchExamplesApp = ({
+
+ Client side search session caching
+
+
+ data.search.session.start()}
+ iconType="alert"
+ data-test-subj="searchExamplesStartSession"
+ >
+
+
+ data.search.session.clear()}
+ iconType="alert"
+ data-test-subj="searchExamplesClearSession"
+ >
+
+
+
+
+
+
+
Using search on the server
diff --git a/examples/search_examples/server/my_strategy.ts b/examples/search_examples/server/my_strategy.ts
index 2cf039e99f6e9..0a64788960091 100644
--- a/examples/search_examples/server/my_strategy.ts
+++ b/examples/search_examples/server/my_strategy.ts
@@ -20,6 +20,7 @@ export const mySearchStrategyProvider = (
map((esSearchRes) => ({
...esSearchRes,
cool: request.get_cool ? 'YES' : 'NOPE',
+ executed_at: new Date().getTime(),
}))
),
cancel: async (id, options, deps) => {
diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md
index 8c1753c2cabab..18133ebec3353 100644
--- a/src/core/public/public.api.md
+++ b/src/core/public/public.api.md
@@ -1224,7 +1224,7 @@ export class SavedObjectsClient {
// Warning: (ae-forgotten-export) The symbol "SavedObjectsClientContract" needs to be exported by the entry point index.d.ts
delete: (type: string, id: string, options?: SavedObjectsDeleteOptions | undefined) => ReturnType;
// Warning: (ae-forgotten-export) The symbol "SavedObjectsFindOptions" needs to be exported by the entry point index.d.ts
- find: (options: SavedObjectsFindOptions_2) => Promise>;
+ find: (options: SavedObjectsFindOptions_2) => Promise>;
get: (type: string, id: string) => Promise>;
update(type: string, id: string, attributes: T, { version, migrationVersion, references }?: SavedObjectsUpdateOptions): Promise>;
}
@@ -1244,6 +1244,8 @@ export interface SavedObjectsCreateOptions {
// @public (undocumented)
export interface SavedObjectsFindOptions {
+ // @alpha
+ aggs?: Record;
defaultSearchOperator?: 'AND' | 'OR';
fields?: string[];
// Warning: (ae-forgotten-export) The symbol "KueryNode" needs to be exported by the entry point index.d.ts
@@ -1284,7 +1286,9 @@ export interface SavedObjectsFindOptionsReference {
}
// @public
-export interface SavedObjectsFindResponsePublic extends SavedObjectsBatchResponse {
+export interface SavedObjectsFindResponsePublic extends SavedObjectsBatchResponse {
+ // (undocumented)
+ aggregations?: A;
// (undocumented)
page: number;
// (undocumented)
diff --git a/src/core/public/saved_objects/saved_objects_client.ts b/src/core/public/saved_objects/saved_objects_client.ts
index 44466025de7e3..782ffa6897048 100644
--- a/src/core/public/saved_objects/saved_objects_client.ts
+++ b/src/core/public/saved_objects/saved_objects_client.ts
@@ -103,7 +103,9 @@ export interface SavedObjectsDeleteOptions {
*
* @public
*/
-export interface SavedObjectsFindResponsePublic extends SavedObjectsBatchResponse {
+export interface SavedObjectsFindResponsePublic
+ extends SavedObjectsBatchResponse {
+ aggregations?: A;
total: number;
perPage: number;
page: number;
@@ -310,7 +312,7 @@ export class SavedObjectsClient {
* @property {object} [options.hasReference] - { type, id }
* @returns A find result with objects matching the specified search.
*/
- public find = (
+ public find = (
options: SavedObjectsFindOptions
): Promise> => {
const path = this.getPath(['_find']);
@@ -326,6 +328,7 @@ export class SavedObjectsClient {
sortField: 'sort_field',
type: 'type',
filter: 'filter',
+ aggs: 'aggs',
namespaces: 'namespaces',
preference: 'preference',
};
@@ -342,6 +345,12 @@ export class SavedObjectsClient {
query.has_reference = JSON.stringify(query.has_reference);
}
+ // `aggs` is a structured object. we need to stringify it before sending it, as `fetch`
+ // is not doing it implicitly.
+ if (query.aggs) {
+ query.aggs = JSON.stringify(query.aggs);
+ }
+
const request: ReturnType = this.savedObjectsFetch(path, {
method: 'GET',
query,
@@ -349,6 +358,7 @@ export class SavedObjectsClient {
return request.then((resp) => {
return renameKeys(
{
+ aggregations: 'aggregations',
saved_objects: 'savedObjects',
total: 'total',
per_page: 'perPage',
diff --git a/src/core/server/saved_objects/routes/find.ts b/src/core/server/saved_objects/routes/find.ts
index 6ba23747cf374..d21039db30e5f 100644
--- a/src/core/server/saved_objects/routes/find.ts
+++ b/src/core/server/saved_objects/routes/find.ts
@@ -44,6 +44,7 @@ export const registerFindRoute = (router: IRouter, { coreUsageData }: RouteDepen
has_reference_operator: searchOperatorSchema,
fields: schema.maybe(schema.oneOf([schema.string(), schema.arrayOf(schema.string())])),
filter: schema.maybe(schema.string()),
+ aggs: schema.maybe(schema.string()),
namespaces: schema.maybe(
schema.oneOf([schema.string(), schema.arrayOf(schema.string())])
),
@@ -59,6 +60,20 @@ export const registerFindRoute = (router: IRouter, { coreUsageData }: RouteDepen
const usageStatsClient = coreUsageData.getClient();
usageStatsClient.incrementSavedObjectsFind({ request: req }).catch(() => {});
+ // manually validation to avoid using JSON.parse twice
+ let aggs;
+ if (query.aggs) {
+ try {
+ aggs = JSON.parse(query.aggs);
+ } catch (e) {
+ return res.badRequest({
+ body: {
+ message: 'invalid aggs value',
+ },
+ });
+ }
+ }
+
const result = await context.core.savedObjects.client.find({
perPage: query.per_page,
page: query.page,
@@ -72,6 +87,7 @@ export const registerFindRoute = (router: IRouter, { coreUsageData }: RouteDepen
hasReferenceOperator: query.has_reference_operator,
fields: typeof query.fields === 'string' ? [query.fields] : query.fields,
filter: query.filter,
+ aggs,
namespaces,
});
diff --git a/src/core/server/saved_objects/service/lib/aggregations/aggs_types/bucket_aggs.ts b/src/core/server/saved_objects/service/lib/aggregations/aggs_types/bucket_aggs.ts
new file mode 100644
index 0000000000000..1508cab69a048
--- /dev/null
+++ b/src/core/server/saved_objects/service/lib/aggregations/aggs_types/bucket_aggs.ts
@@ -0,0 +1,86 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { schema as s, ObjectType } from '@kbn/config-schema';
+
+/**
+ * Schemas for the Bucket aggregations.
+ *
+ * Currently supported:
+ * - filter
+ * - histogram
+ * - terms
+ *
+ * Not implemented:
+ * - adjacency_matrix
+ * - auto_date_histogram
+ * - children
+ * - composite
+ * - date_histogram
+ * - date_range
+ * - diversified_sampler
+ * - filters
+ * - geo_distance
+ * - geohash_grid
+ * - geotile_grid
+ * - global
+ * - ip_range
+ * - missing
+ * - multi_terms
+ * - nested
+ * - parent
+ * - range
+ * - rare_terms
+ * - reverse_nested
+ * - sampler
+ * - significant_terms
+ * - significant_text
+ * - variable_width_histogram
+ */
+export const bucketAggsSchemas: Record = {
+ filter: s.object({
+ term: s.recordOf(s.string(), s.oneOf([s.string(), s.boolean(), s.number()])),
+ }),
+ histogram: s.object({
+ field: s.maybe(s.string()),
+ interval: s.maybe(s.number()),
+ min_doc_count: s.maybe(s.number()),
+ extended_bounds: s.maybe(
+ s.object({
+ min: s.number(),
+ max: s.number(),
+ })
+ ),
+ hard_bounds: s.maybe(
+ s.object({
+ min: s.number(),
+ max: s.number(),
+ })
+ ),
+ missing: s.maybe(s.number()),
+ keyed: s.maybe(s.boolean()),
+ order: s.maybe(
+ s.object({
+ _count: s.string(),
+ _key: s.string(),
+ })
+ ),
+ }),
+ terms: s.object({
+ field: s.maybe(s.string()),
+ collect_mode: s.maybe(s.string()),
+ exclude: s.maybe(s.oneOf([s.string(), s.arrayOf(s.string())])),
+ include: s.maybe(s.oneOf([s.string(), s.arrayOf(s.string())])),
+ execution_hint: s.maybe(s.string()),
+ missing: s.maybe(s.number()),
+ min_doc_count: s.maybe(s.number()),
+ size: s.maybe(s.number()),
+ show_term_doc_count_error: s.maybe(s.boolean()),
+ order: s.maybe(s.oneOf([s.literal('asc'), s.literal('desc')])),
+ }),
+};
diff --git a/src/core/server/saved_objects/service/lib/aggregations/aggs_types/index.ts b/src/core/server/saved_objects/service/lib/aggregations/aggs_types/index.ts
new file mode 100644
index 0000000000000..7967fad0185fb
--- /dev/null
+++ b/src/core/server/saved_objects/service/lib/aggregations/aggs_types/index.ts
@@ -0,0 +1,15 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { bucketAggsSchemas } from './bucket_aggs';
+import { metricsAggsSchemas } from './metrics_aggs';
+
+export const aggregationSchemas = {
+ ...metricsAggsSchemas,
+ ...bucketAggsSchemas,
+};
diff --git a/src/core/server/saved_objects/service/lib/aggregations/aggs_types/metrics_aggs.ts b/src/core/server/saved_objects/service/lib/aggregations/aggs_types/metrics_aggs.ts
new file mode 100644
index 0000000000000..c05ae67cd2164
--- /dev/null
+++ b/src/core/server/saved_objects/service/lib/aggregations/aggs_types/metrics_aggs.ts
@@ -0,0 +1,94 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { schema as s, ObjectType } from '@kbn/config-schema';
+
+/**
+ * Schemas for the metrics Aggregations
+ *
+ * Currently supported:
+ * - avg
+ * - cardinality
+ * - min
+ * - max
+ * - sum
+ * - top_hits
+ * - weighted_avg
+ *
+ * Not implemented:
+ * - boxplot
+ * - extended_stats
+ * - geo_bounds
+ * - geo_centroid
+ * - geo_line
+ * - matrix_stats
+ * - median_absolute_deviation
+ * - percentile_ranks
+ * - percentiles
+ * - rate
+ * - scripted_metric
+ * - stats
+ * - string_stats
+ * - t_test
+ * - value_count
+ */
+export const metricsAggsSchemas: Record = {
+ avg: s.object({
+ field: s.maybe(s.string()),
+ missing: s.maybe(s.oneOf([s.string(), s.number(), s.boolean()])),
+ }),
+ cardinality: s.object({
+ field: s.maybe(s.string()),
+ precision_threshold: s.maybe(s.number()),
+ rehash: s.maybe(s.boolean()),
+ missing: s.maybe(s.oneOf([s.string(), s.number(), s.boolean()])),
+ }),
+ min: s.object({
+ field: s.maybe(s.string()),
+ missing: s.maybe(s.oneOf([s.string(), s.number(), s.boolean()])),
+ format: s.maybe(s.string()),
+ }),
+ max: s.object({
+ field: s.maybe(s.string()),
+ missing: s.maybe(s.oneOf([s.string(), s.number(), s.boolean()])),
+ format: s.maybe(s.string()),
+ }),
+ sum: s.object({
+ field: s.maybe(s.string()),
+ missing: s.maybe(s.oneOf([s.string(), s.number(), s.boolean()])),
+ }),
+ top_hits: s.object({
+ explain: s.maybe(s.boolean()),
+ docvalue_fields: s.maybe(s.oneOf([s.string(), s.arrayOf(s.string())])),
+ stored_fields: s.maybe(s.oneOf([s.string(), s.arrayOf(s.string())])),
+ from: s.maybe(s.number()),
+ size: s.maybe(s.number()),
+ sort: s.maybe(s.oneOf([s.literal('asc'), s.literal('desc')])),
+ seq_no_primary_term: s.maybe(s.boolean()),
+ version: s.maybe(s.boolean()),
+ track_scores: s.maybe(s.boolean()),
+ highlight: s.maybe(s.any()),
+ _source: s.maybe(s.oneOf([s.boolean(), s.string(), s.arrayOf(s.string())])),
+ }),
+ weighted_avg: s.object({
+ format: s.maybe(s.string()),
+ value_type: s.maybe(s.string()),
+ value: s.maybe(
+ s.object({
+ field: s.maybe(s.string()),
+ missing: s.maybe(s.number()),
+ })
+ ),
+ weight: s.maybe(
+ s.object({
+ field: s.maybe(s.string()),
+ missing: s.maybe(s.number()),
+ })
+ ),
+ }),
+};
diff --git a/src/core/server/saved_objects/service/lib/aggregations/index.ts b/src/core/server/saved_objects/service/lib/aggregations/index.ts
new file mode 100644
index 0000000000000..f71d3e8daea9d
--- /dev/null
+++ b/src/core/server/saved_objects/service/lib/aggregations/index.ts
@@ -0,0 +1,9 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+export { validateAndConvertAggregations } from './validation';
diff --git a/src/core/server/saved_objects/service/lib/aggregations/validation.test.ts b/src/core/server/saved_objects/service/lib/aggregations/validation.test.ts
new file mode 100644
index 0000000000000..8a7c1c3719eb0
--- /dev/null
+++ b/src/core/server/saved_objects/service/lib/aggregations/validation.test.ts
@@ -0,0 +1,431 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import type { estypes } from '@elastic/elasticsearch';
+import { validateAndConvertAggregations } from './validation';
+
+type AggsMap = Record;
+
+const mockMappings = {
+ properties: {
+ updated_at: {
+ type: 'date',
+ },
+ foo: {
+ properties: {
+ title: {
+ type: 'text',
+ },
+ description: {
+ type: 'text',
+ },
+ bytes: {
+ type: 'number',
+ },
+ },
+ },
+ bean: {
+ properties: {
+ canned: {
+ fields: {
+ text: {
+ type: 'text',
+ },
+ },
+ type: 'keyword',
+ },
+ },
+ },
+ alert: {
+ properties: {
+ actions: {
+ type: 'nested',
+ properties: {
+ group: {
+ type: 'keyword',
+ },
+ actionRef: {
+ type: 'keyword',
+ },
+ actionTypeId: {
+ type: 'keyword',
+ },
+ params: {
+ enabled: false,
+ type: 'object',
+ },
+ },
+ },
+ params: {
+ type: 'flattened',
+ },
+ },
+ },
+ },
+};
+
+describe('validateAndConvertAggregations', () => {
+ it('validates a simple aggregations', () => {
+ expect(
+ validateAndConvertAggregations(
+ ['foo'],
+ { aggName: { max: { field: 'foo.attributes.bytes' } } },
+ mockMappings
+ )
+ ).toEqual({
+ aggName: {
+ max: {
+ field: 'foo.bytes',
+ },
+ },
+ });
+ });
+
+ it('validates a nested field in simple aggregations', () => {
+ expect(
+ validateAndConvertAggregations(
+ ['alert'],
+ { aggName: { cardinality: { field: 'alert.attributes.actions.group' } } },
+ mockMappings
+ )
+ ).toEqual({
+ aggName: {
+ cardinality: {
+ field: 'alert.actions.group',
+ },
+ },
+ });
+ });
+
+ it('validates a nested aggregations', () => {
+ expect(
+ validateAndConvertAggregations(
+ ['alert'],
+ {
+ aggName: {
+ cardinality: {
+ field: 'alert.attributes.actions.group',
+ },
+ aggs: {
+ aggName: {
+ max: { field: 'alert.attributes.actions.group' },
+ },
+ },
+ },
+ },
+ mockMappings
+ )
+ ).toEqual({
+ aggName: {
+ cardinality: {
+ field: 'alert.actions.group',
+ },
+ aggs: {
+ aggName: {
+ max: {
+ field: 'alert.actions.group',
+ },
+ },
+ },
+ },
+ });
+ });
+
+ it('validates a deeply nested aggregations', () => {
+ expect(
+ validateAndConvertAggregations(
+ ['alert'],
+ {
+ first: {
+ cardinality: {
+ field: 'alert.attributes.actions.group',
+ },
+ aggs: {
+ second: {
+ max: { field: 'alert.attributes.actions.group' },
+ aggs: {
+ third: {
+ min: {
+ field: 'alert.attributes.actions.actionTypeId',
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ mockMappings
+ )
+ ).toEqual({
+ first: {
+ cardinality: {
+ field: 'alert.actions.group',
+ },
+ aggs: {
+ second: {
+ max: { field: 'alert.actions.group' },
+ aggs: {
+ third: {
+ min: {
+ field: 'alert.actions.actionTypeId',
+ },
+ },
+ },
+ },
+ },
+ },
+ });
+ });
+
+ it('rewrites type attributes when valid', () => {
+ const aggregations: AggsMap = {
+ average: {
+ avg: {
+ field: 'alert.attributes.actions.group',
+ missing: 10,
+ },
+ },
+ };
+ expect(validateAndConvertAggregations(['alert'], aggregations, mockMappings)).toEqual({
+ average: {
+ avg: {
+ field: 'alert.actions.group',
+ missing: 10,
+ },
+ },
+ });
+ });
+
+ it('rewrites root attributes when valid', () => {
+ const aggregations: AggsMap = {
+ average: {
+ avg: {
+ field: 'alert.updated_at',
+ missing: 10,
+ },
+ },
+ };
+ expect(validateAndConvertAggregations(['alert'], aggregations, mockMappings)).toEqual({
+ average: {
+ avg: {
+ field: 'updated_at',
+ missing: 10,
+ },
+ },
+ });
+ });
+
+ it('throws an error when the `field` name is not using attributes path', () => {
+ const aggregations: AggsMap = {
+ average: {
+ avg: {
+ field: 'alert.actions.group',
+ missing: 10,
+ },
+ },
+ };
+ expect(() =>
+ validateAndConvertAggregations(['alert'], aggregations, mockMappings)
+ ).toThrowErrorMatchingInlineSnapshot(
+ `"[average.avg.field] Invalid attribute path: alert.actions.group"`
+ );
+ });
+
+ it('throws an error when the `field` name is referencing an invalid field', () => {
+ const aggregations: AggsMap = {
+ average: {
+ avg: {
+ field: 'alert.attributes.actions.non_existing',
+ missing: 10,
+ },
+ },
+ };
+ expect(() =>
+ validateAndConvertAggregations(['alert'], aggregations, mockMappings)
+ ).toThrowErrorMatchingInlineSnapshot(
+ `"[average.avg.field] Invalid attribute path: alert.attributes.actions.non_existing"`
+ );
+ });
+
+ it('throws an error when the attribute path is referencing an invalid root field', () => {
+ const aggregations: AggsMap = {
+ average: {
+ avg: {
+ field: 'alert.bad_root',
+ missing: 10,
+ },
+ },
+ };
+ expect(() =>
+ validateAndConvertAggregations(['alert'], aggregations, mockMappings)
+ ).toThrowErrorMatchingInlineSnapshot(
+ `"[average.avg.field] Invalid attribute path: alert.bad_root"`
+ );
+ });
+
+ it('rewrites the `field` name even when nested', () => {
+ const aggregations: AggsMap = {
+ average: {
+ weighted_avg: {
+ value: {
+ field: 'alert.attributes.actions.group',
+ missing: 10,
+ },
+ weight: {
+ field: 'alert.attributes.actions.actionRef',
+ },
+ },
+ },
+ };
+ expect(validateAndConvertAggregations(['alert'], aggregations, mockMappings)).toEqual({
+ average: {
+ weighted_avg: {
+ value: {
+ field: 'alert.actions.group',
+ missing: 10,
+ },
+ weight: {
+ field: 'alert.actions.actionRef',
+ },
+ },
+ },
+ });
+ });
+
+ it('rewrites the entries of a filter term record', () => {
+ const aggregations: AggsMap = {
+ myFilter: {
+ filter: {
+ term: {
+ 'foo.attributes.description': 'hello',
+ 'foo.attributes.bytes': 10,
+ },
+ },
+ },
+ };
+ expect(validateAndConvertAggregations(['foo'], aggregations, mockMappings)).toEqual({
+ myFilter: {
+ filter: {
+ term: { 'foo.description': 'hello', 'foo.bytes': 10 },
+ },
+ },
+ });
+ });
+
+ it('throws an error when referencing non-allowed types', () => {
+ const aggregations: AggsMap = {
+ myFilter: {
+ max: {
+ field: 'foo.attributes.bytes',
+ },
+ },
+ };
+
+ expect(() => {
+ validateAndConvertAggregations(['alert'], aggregations, mockMappings);
+ }).toThrowErrorMatchingInlineSnapshot(
+ `"[myFilter.max.field] Invalid attribute path: foo.attributes.bytes"`
+ );
+ });
+
+ it('throws an error when an attributes is not respecting its schema definition', () => {
+ const aggregations: AggsMap = {
+ someAgg: {
+ terms: {
+ missing: 'expecting a number',
+ },
+ },
+ };
+
+ expect(() =>
+ validateAndConvertAggregations(['alert'], aggregations, mockMappings)
+ ).toThrowErrorMatchingInlineSnapshot(
+ `"[someAgg.terms.missing]: expected value of type [number] but got [string]"`
+ );
+ });
+
+ it('throws an error when trying to validate an unknown aggregation type', () => {
+ const aggregations: AggsMap = {
+ someAgg: {
+ auto_date_histogram: {
+ field: 'foo.attributes.bytes',
+ },
+ },
+ };
+
+ expect(() => {
+ validateAndConvertAggregations(['foo'], aggregations, mockMappings);
+ }).toThrowErrorMatchingInlineSnapshot(
+ `"[someAgg.auto_date_histogram] auto_date_histogram aggregation is not valid (or not registered yet)"`
+ );
+ });
+
+ it('throws an error when a child aggregation is unknown', () => {
+ const aggregations: AggsMap = {
+ someAgg: {
+ max: {
+ field: 'foo.attributes.bytes',
+ },
+ aggs: {
+ unknownAgg: {
+ cumulative_cardinality: {
+ format: 'format',
+ },
+ },
+ },
+ },
+ };
+
+ expect(() => {
+ validateAndConvertAggregations(['foo'], aggregations, mockMappings);
+ }).toThrowErrorMatchingInlineSnapshot(
+ `"[someAgg.aggs.unknownAgg.cumulative_cardinality] cumulative_cardinality aggregation is not valid (or not registered yet)"`
+ );
+ });
+
+ it('throws an error when using a script attribute', () => {
+ const aggregations: AggsMap = {
+ someAgg: {
+ max: {
+ field: 'foo.attributes.bytes',
+ script: 'This is a bad script',
+ },
+ },
+ };
+
+ expect(() => {
+ validateAndConvertAggregations(['foo'], aggregations, mockMappings);
+ }).toThrowErrorMatchingInlineSnapshot(
+ `"[someAgg.max.script]: definition for this key is missing"`
+ );
+ });
+
+ it('throws an error when using a script attribute in a nested aggregation', () => {
+ const aggregations: AggsMap = {
+ someAgg: {
+ min: {
+ field: 'foo.attributes.bytes',
+ },
+ aggs: {
+ nested: {
+ max: {
+ field: 'foo.attributes.bytes',
+ script: 'This is a bad script',
+ },
+ },
+ },
+ },
+ };
+
+ expect(() => {
+ validateAndConvertAggregations(['foo'], aggregations, mockMappings);
+ }).toThrowErrorMatchingInlineSnapshot(
+ `"[someAgg.aggs.nested.max.script]: definition for this key is missing"`
+ );
+ });
+});
diff --git a/src/core/server/saved_objects/service/lib/aggregations/validation.ts b/src/core/server/saved_objects/service/lib/aggregations/validation.ts
new file mode 100644
index 0000000000000..a2fd392183132
--- /dev/null
+++ b/src/core/server/saved_objects/service/lib/aggregations/validation.ts
@@ -0,0 +1,229 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import type { estypes } from '@elastic/elasticsearch';
+import { ObjectType } from '@kbn/config-schema';
+import { isPlainObject } from 'lodash';
+
+import { IndexMapping } from '../../../mappings';
+import {
+ isObjectTypeAttribute,
+ rewriteObjectTypeAttribute,
+ isRootLevelAttribute,
+ rewriteRootLevelAttribute,
+} from './validation_utils';
+import { aggregationSchemas } from './aggs_types';
+
+const aggregationKeys = ['aggs', 'aggregations'];
+
+interface ValidationContext {
+ allowedTypes: string[];
+ indexMapping: IndexMapping;
+ currentPath: string[];
+}
+
+/**
+ * Validate an aggregation structure against the declared mappings and
+ * aggregation schemas, and rewrite the attribute fields using the KQL-like syntax
+ * - `{type}.attributes.{attribute}` to `{type}.{attribute}`
+ * - `{type}.{rootField}` to `{rootField}`
+ *
+ * throws on the first validation error if any is encountered.
+ */
+export const validateAndConvertAggregations = (
+ allowedTypes: string[],
+ aggs: Record,
+ indexMapping: IndexMapping
+): Record => {
+ return validateAggregations(aggs, {
+ allowedTypes,
+ indexMapping,
+ currentPath: [],
+ });
+};
+
+/**
+ * Validate a record of aggregation containers,
+ * Which can either be the root level aggregations (`SearchRequest.body.aggs`)
+ * Or a nested record of aggregation (`SearchRequest.body.aggs.myAggregation.aggs`)
+ */
+const validateAggregations = (
+ aggregations: Record,
+ context: ValidationContext
+) => {
+ return Object.entries(aggregations).reduce((memo, [aggrName, aggrContainer]) => {
+ memo[aggrName] = validateAggregation(aggrContainer, childContext(context, aggrName));
+ return memo;
+ }, {} as Record);
+};
+
+/**
+ * Validate an aggregation container, e.g an entry of `SearchRequest.body.aggs`, or
+ * from a nested aggregation record, including its potential nested aggregations.
+ */
+const validateAggregation = (
+ aggregation: estypes.AggregationContainer,
+ context: ValidationContext
+) => {
+ const container = validateAggregationContainer(aggregation, context);
+
+ if (aggregation.aggregations) {
+ container.aggregations = validateAggregations(
+ aggregation.aggregations,
+ childContext(context, 'aggregations')
+ );
+ }
+ if (aggregation.aggs) {
+ container.aggs = validateAggregations(aggregation.aggs, childContext(context, 'aggs'));
+ }
+
+ return container;
+};
+
+/**
+ * Validates root-level aggregation of given aggregation container
+ * (ignoring its nested aggregations)
+ */
+const validateAggregationContainer = (
+ container: estypes.AggregationContainer,
+ context: ValidationContext
+) => {
+ return Object.entries(container).reduce((memo, [aggName, aggregation]) => {
+ if (aggregationKeys.includes(aggName)) {
+ return memo;
+ }
+ return {
+ ...memo,
+ [aggName]: validateAggregationType(aggName, aggregation, childContext(context, aggName)),
+ };
+ }, {} as estypes.AggregationContainer);
+};
+
+const validateAggregationType = (
+ aggregationType: string,
+ aggregation: Record,
+ context: ValidationContext
+) => {
+ const aggregationSchema = aggregationSchemas[aggregationType];
+ if (!aggregationSchema) {
+ throw new Error(
+ `[${context.currentPath.join(
+ '.'
+ )}] ${aggregationType} aggregation is not valid (or not registered yet)`
+ );
+ }
+
+ validateAggregationStructure(aggregationSchema, aggregation, context);
+ return validateAndRewriteFieldAttributes(aggregation, context);
+};
+
+/**
+ * Validate an aggregation structure against its declared schema.
+ */
+const validateAggregationStructure = (
+ schema: ObjectType,
+ aggObject: unknown,
+ context: ValidationContext
+) => {
+ return schema.validate(aggObject, {}, context.currentPath.join('.'));
+};
+
+/**
+ * List of fields that have an attribute path as value
+ *
+ * @example
+ * ```ts
+ * avg: {
+ * field: 'alert.attributes.actions.group',
+ * },
+ * ```
+ */
+const attributeFields = ['field'];
+/**
+ * List of fields that have a Record as value
+ *
+ * @example
+ * ```ts
+ * filter: {
+ * term: {
+ * 'alert.attributes.actions.group': 'value'
+ * },
+ * },
+ * ```
+ */
+const attributeMaps = ['term'];
+
+const validateAndRewriteFieldAttributes = (
+ aggregation: Record,
+ context: ValidationContext
+) => {
+ return recursiveRewrite(aggregation, context, []);
+};
+
+const recursiveRewrite = (
+ currentLevel: Record,
+ context: ValidationContext,
+ parents: string[]
+): Record => {
+ return Object.entries(currentLevel).reduce((memo, [key, value]) => {
+ const rewriteKey = isAttributeKey(parents);
+ const rewriteValue = isAttributeValue(key, value);
+
+ const nestedContext = childContext(context, key);
+ const newKey = rewriteKey ? validateAndRewriteAttributePath(key, nestedContext) : key;
+ const newValue = rewriteValue
+ ? validateAndRewriteAttributePath(value, nestedContext)
+ : isPlainObject(value)
+ ? recursiveRewrite(value, nestedContext, [...parents, key])
+ : value;
+
+ return {
+ ...memo,
+ [newKey]: newValue,
+ };
+ }, {});
+};
+
+const childContext = (context: ValidationContext, path: string): ValidationContext => {
+ return {
+ ...context,
+ currentPath: [...context.currentPath, path],
+ };
+};
+
+const lastParent = (parents: string[]) => {
+ if (parents.length) {
+ return parents[parents.length - 1];
+ }
+ return undefined;
+};
+
+const isAttributeKey = (parents: string[]) => {
+ const last = lastParent(parents);
+ if (last) {
+ return attributeMaps.includes(last);
+ }
+ return false;
+};
+
+const isAttributeValue = (fieldName: string, fieldValue: unknown): boolean => {
+ return attributeFields.includes(fieldName) && typeof fieldValue === 'string';
+};
+
+const validateAndRewriteAttributePath = (
+ attributePath: string,
+ { allowedTypes, indexMapping, currentPath }: ValidationContext
+) => {
+ if (isRootLevelAttribute(attributePath, indexMapping, allowedTypes)) {
+ return rewriteRootLevelAttribute(attributePath);
+ }
+ if (isObjectTypeAttribute(attributePath, indexMapping, allowedTypes)) {
+ return rewriteObjectTypeAttribute(attributePath);
+ }
+ throw new Error(`[${currentPath.join('.')}] Invalid attribute path: ${attributePath}`);
+};
diff --git a/src/core/server/saved_objects/service/lib/aggregations/validation_utils.test.ts b/src/core/server/saved_objects/service/lib/aggregations/validation_utils.test.ts
new file mode 100644
index 0000000000000..25c3aea474ece
--- /dev/null
+++ b/src/core/server/saved_objects/service/lib/aggregations/validation_utils.test.ts
@@ -0,0 +1,148 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { IndexMapping } from '../../../mappings';
+import {
+ isRootLevelAttribute,
+ rewriteRootLevelAttribute,
+ isObjectTypeAttribute,
+ rewriteObjectTypeAttribute,
+} from './validation_utils';
+
+const mockMappings: IndexMapping = {
+ properties: {
+ updated_at: {
+ type: 'date',
+ },
+ foo: {
+ properties: {
+ title: {
+ type: 'text',
+ },
+ description: {
+ type: 'text',
+ },
+ bytes: {
+ type: 'number',
+ },
+ },
+ },
+ bean: {
+ properties: {
+ canned: {
+ fields: {
+ text: {
+ type: 'text',
+ },
+ },
+ type: 'keyword',
+ },
+ },
+ },
+ alert: {
+ properties: {
+ actions: {
+ type: 'nested',
+ properties: {
+ group: {
+ type: 'keyword',
+ },
+ actionRef: {
+ type: 'keyword',
+ },
+ actionTypeId: {
+ type: 'keyword',
+ },
+ params: {
+ enabled: false,
+ type: 'object',
+ },
+ },
+ },
+ params: {
+ type: 'flattened',
+ },
+ },
+ },
+ },
+};
+
+describe('isRootLevelAttribute', () => {
+ it('returns true when referring to a path to a valid root level field', () => {
+ expect(isRootLevelAttribute('foo.updated_at', mockMappings, ['foo'])).toBe(true);
+ });
+ it('returns false when referring to a direct path to a valid root level field', () => {
+ expect(isRootLevelAttribute('updated_at', mockMappings, ['foo'])).toBe(false);
+ });
+ it('returns false when referring to a path to a unknown root level field', () => {
+ expect(isRootLevelAttribute('foo.not_present', mockMappings, ['foo'])).toBe(false);
+ });
+ it('returns false when referring to a path to an existing nested field', () => {
+ expect(isRootLevelAttribute('foo.properties.title', mockMappings, ['foo'])).toBe(false);
+ });
+ it('returns false when referring to a path to a valid root level field of an unknown type', () => {
+ expect(isRootLevelAttribute('bar.updated_at', mockMappings, ['foo'])).toBe(false);
+ });
+ it('returns false when referring to a path to a valid root level type field', () => {
+ expect(isRootLevelAttribute('foo.foo', mockMappings, ['foo'])).toBe(false);
+ });
+});
+
+describe('rewriteRootLevelAttribute', () => {
+ it('rewrites the attribute path to strip the type', () => {
+ expect(rewriteRootLevelAttribute('foo.references')).toEqual('references');
+ });
+ it('does not handle real root level path', () => {
+ expect(rewriteRootLevelAttribute('references')).not.toEqual('references');
+ });
+});
+
+describe('isObjectTypeAttribute', () => {
+ it('return true if attribute path is valid', () => {
+ expect(isObjectTypeAttribute('foo.attributes.description', mockMappings, ['foo'])).toEqual(
+ true
+ );
+ });
+
+ it('return true for nested attributes', () => {
+ expect(isObjectTypeAttribute('bean.attributes.canned.text', mockMappings, ['bean'])).toEqual(
+ true
+ );
+ });
+
+ it('return false if attribute path points to an invalid type', () => {
+ expect(isObjectTypeAttribute('foo.attributes.description', mockMappings, ['bean'])).toEqual(
+ false
+ );
+ });
+
+ it('returns false if attribute path refers to a type', () => {
+ expect(isObjectTypeAttribute('bean', mockMappings, ['bean'])).toEqual(false);
+ });
+
+ it('Return error if key does not match SO attribute structure', () => {
+ expect(isObjectTypeAttribute('bean.canned.text', mockMappings, ['bean'])).toEqual(false);
+ });
+
+ it('Return false if key matches nested type attribute parent', () => {
+ expect(isObjectTypeAttribute('alert.actions', mockMappings, ['alert'])).toEqual(false);
+ });
+
+ it('returns false if path refers to a non-existent attribute', () => {
+ expect(isObjectTypeAttribute('bean.attributes.red', mockMappings, ['bean'])).toEqual(false);
+ });
+});
+
+describe('rewriteObjectTypeAttribute', () => {
+ it('rewrites the attribute path to strip the type', () => {
+ expect(rewriteObjectTypeAttribute('foo.attributes.prop')).toEqual('foo.prop');
+ });
+ it('returns invalid input unchanged', () => {
+ expect(rewriteObjectTypeAttribute('foo.references')).toEqual('foo.references');
+ });
+});
diff --git a/src/core/server/saved_objects/service/lib/aggregations/validation_utils.ts b/src/core/server/saved_objects/service/lib/aggregations/validation_utils.ts
new file mode 100644
index 0000000000000..f817497e3759e
--- /dev/null
+++ b/src/core/server/saved_objects/service/lib/aggregations/validation_utils.ts
@@ -0,0 +1,80 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { IndexMapping } from '../../../mappings';
+import { fieldDefined, hasFilterKeyError } from '../filter_utils';
+
+/**
+ * Returns true if the given attribute path is a valid root level SO attribute path
+ *
+ * @example
+ * ```ts
+ * isRootLevelAttribute('myType.updated_at', indexMapping, ['myType']})
+ * // => true
+ * ```
+ */
+export const isRootLevelAttribute = (
+ attributePath: string,
+ indexMapping: IndexMapping,
+ allowedTypes: string[]
+): boolean => {
+ const splits = attributePath.split('.');
+ if (splits.length !== 2) {
+ return false;
+ }
+
+ const [type, fieldName] = splits;
+ if (allowedTypes.includes(fieldName)) {
+ return false;
+ }
+ return allowedTypes.includes(type) && fieldDefined(indexMapping, fieldName);
+};
+
+/**
+ * Rewrites a root level attribute path to strip the type
+ *
+ * @example
+ * ```ts
+ * rewriteRootLevelAttribute('myType.updated_at')
+ * // => 'updated_at'
+ * ```
+ */
+export const rewriteRootLevelAttribute = (attributePath: string) => {
+ return attributePath.split('.')[1];
+};
+
+/**
+ * Returns true if the given attribute path is a valid object type level SO attribute path
+ *
+ * @example
+ * ```ts
+ * isObjectTypeAttribute('myType.attributes.someField', indexMapping, ['myType']})
+ * // => true
+ * ```
+ */
+export const isObjectTypeAttribute = (
+ attributePath: string,
+ indexMapping: IndexMapping,
+ allowedTypes: string[]
+): boolean => {
+ const error = hasFilterKeyError(attributePath, allowedTypes, indexMapping);
+ return error == null;
+};
+
+/**
+ * Rewrites a object type attribute path to strip the type
+ *
+ * @example
+ * ```ts
+ * rewriteObjectTypeAttribute('myType.attributes.foo')
+ * // => 'myType.foo'
+ * ```
+ */
+export const rewriteObjectTypeAttribute = (attributePath: string) => {
+ return attributePath.replace('.attributes', '');
+};
diff --git a/src/core/server/saved_objects/service/lib/filter_utils.test.ts b/src/core/server/saved_objects/service/lib/filter_utils.test.ts
index b50326627cf09..956a60b23809d 100644
--- a/src/core/server/saved_objects/service/lib/filter_utils.test.ts
+++ b/src/core/server/saved_objects/service/lib/filter_utils.test.ts
@@ -18,7 +18,7 @@ import {
const mockMappings = {
properties: {
- updatedAt: {
+ updated_at: {
type: 'date',
},
foo: {
@@ -123,12 +123,12 @@ describe('Filter Utils', () => {
expect(
validateConvertFilterToKueryNode(
['foo'],
- 'foo.updatedAt: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)',
+ 'foo.updated_at: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)',
mockMappings
)
).toEqual(
esKuery.fromKueryExpression(
- '(type: foo and updatedAt: 5678654567) and foo.bytes > 1000 and foo.bytes < 8000 and foo.title: "best" and (foo.description: t* or foo.description :*)'
+ '(type: foo and updated_at: 5678654567) and foo.bytes > 1000 and foo.bytes < 8000 and foo.title: "best" and (foo.description: t* or foo.description :*)'
)
);
});
@@ -137,12 +137,12 @@ describe('Filter Utils', () => {
expect(
validateConvertFilterToKueryNode(
['foo', 'bar'],
- 'foo.updatedAt: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)',
+ 'foo.updated_at: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)',
mockMappings
)
).toEqual(
esKuery.fromKueryExpression(
- '(type: foo and updatedAt: 5678654567) and foo.bytes > 1000 and foo.bytes < 8000 and foo.title: "best" and (foo.description: t* or foo.description :*)'
+ '(type: foo and updated_at: 5678654567) and foo.bytes > 1000 and foo.bytes < 8000 and foo.title: "best" and (foo.description: t* or foo.description :*)'
)
);
});
@@ -151,12 +151,12 @@ describe('Filter Utils', () => {
expect(
validateConvertFilterToKueryNode(
['foo', 'bar'],
- '(bar.updatedAt: 5678654567 OR foo.updatedAt: 5678654567) and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or bar.attributes.description :*)',
+ '(bar.updated_at: 5678654567 OR foo.updated_at: 5678654567) and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or bar.attributes.description :*)',
mockMappings
)
).toEqual(
esKuery.fromKueryExpression(
- '((type: bar and updatedAt: 5678654567) or (type: foo and updatedAt: 5678654567)) and foo.bytes > 1000 and foo.bytes < 8000 and foo.title: "best" and (foo.description: t* or bar.description :*)'
+ '((type: bar and updated_at: 5678654567) or (type: foo and updated_at: 5678654567)) and foo.bytes > 1000 and foo.bytes < 8000 and foo.title: "best" and (foo.description: t* or bar.description :*)'
)
);
});
@@ -181,11 +181,11 @@ describe('Filter Utils', () => {
expect(() => {
validateConvertFilterToKueryNode(
['foo', 'bar'],
- 'updatedAt: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)',
+ 'updated_at: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)',
mockMappings
);
}).toThrowErrorMatchingInlineSnapshot(
- `"This key 'updatedAt' need to be wrapped by a saved object type like foo,bar: Bad Request"`
+ `"This key 'updated_at' need to be wrapped by a saved object type like foo,bar: Bad Request"`
);
});
@@ -200,7 +200,7 @@ describe('Filter Utils', () => {
test('Validate filter query through KueryNode - happy path', () => {
const validationObject = validateFilterKueryNode({
astFilter: esKuery.fromKueryExpression(
- 'foo.updatedAt: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)'
+ 'foo.updated_at: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)'
),
types: ['foo'],
indexMapping: mockMappings,
@@ -211,7 +211,7 @@ describe('Filter Utils', () => {
astPath: 'arguments.0',
error: null,
isSavedObjectAttr: true,
- key: 'foo.updatedAt',
+ key: 'foo.updated_at',
type: 'foo',
},
{
@@ -275,7 +275,7 @@ describe('Filter Utils', () => {
test('Return Error if key is not wrapper by a saved object type', () => {
const validationObject = validateFilterKueryNode({
astFilter: esKuery.fromKueryExpression(
- 'updatedAt: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)'
+ 'updated_at: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)'
),
types: ['foo'],
indexMapping: mockMappings,
@@ -284,9 +284,9 @@ describe('Filter Utils', () => {
expect(validationObject).toEqual([
{
astPath: 'arguments.0',
- error: "This key 'updatedAt' need to be wrapped by a saved object type like foo",
+ error: "This key 'updated_at' need to be wrapped by a saved object type like foo",
isSavedObjectAttr: true,
- key: 'updatedAt',
+ key: 'updated_at',
type: null,
},
{
@@ -330,7 +330,7 @@ describe('Filter Utils', () => {
test('Return Error if key of a saved object type is not wrapped with attributes', () => {
const validationObject = validateFilterKueryNode({
astFilter: esKuery.fromKueryExpression(
- 'foo.updatedAt: 5678654567 and foo.attributes.bytes > 1000 and foo.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.description :*)'
+ 'foo.updated_at: 5678654567 and foo.attributes.bytes > 1000 and foo.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.description :*)'
),
types: ['foo'],
indexMapping: mockMappings,
@@ -341,7 +341,7 @@ describe('Filter Utils', () => {
astPath: 'arguments.0',
error: null,
isSavedObjectAttr: true,
- key: 'foo.updatedAt',
+ key: 'foo.updated_at',
type: 'foo',
},
{
@@ -387,7 +387,7 @@ describe('Filter Utils', () => {
test('Return Error if filter is not using an allowed type', () => {
const validationObject = validateFilterKueryNode({
astFilter: esKuery.fromKueryExpression(
- 'bar.updatedAt: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)'
+ 'bar.updated_at: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.title: "best" and (foo.attributes.description: t* or foo.attributes.description :*)'
),
types: ['foo'],
indexMapping: mockMappings,
@@ -398,7 +398,7 @@ describe('Filter Utils', () => {
astPath: 'arguments.0',
error: 'This type bar is not allowed',
isSavedObjectAttr: true,
- key: 'bar.updatedAt',
+ key: 'bar.updated_at',
type: 'bar',
},
{
@@ -442,7 +442,7 @@ describe('Filter Utils', () => {
test('Return Error if filter is using an non-existing key in the index patterns of the saved object type', () => {
const validationObject = validateFilterKueryNode({
astFilter: esKuery.fromKueryExpression(
- 'foo.updatedAt33: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.header: "best" and (foo.attributes.description: t* or foo.attributes.description :*)'
+ 'foo.updated_at33: 5678654567 and foo.attributes.bytes > 1000 and foo.attributes.bytes < 8000 and foo.attributes.header: "best" and (foo.attributes.description: t* or foo.attributes.description :*)'
),
types: ['foo'],
indexMapping: mockMappings,
@@ -451,9 +451,9 @@ describe('Filter Utils', () => {
expect(validationObject).toEqual([
{
astPath: 'arguments.0',
- error: "This key 'foo.updatedAt33' does NOT exist in foo saved object index patterns",
+ error: "This key 'foo.updated_at33' does NOT exist in foo saved object index patterns",
isSavedObjectAttr: false,
- key: 'foo.updatedAt33',
+ key: 'foo.updated_at33',
type: 'foo',
},
{
@@ -519,6 +519,33 @@ describe('Filter Utils', () => {
},
]);
});
+
+ test('Validate multiple items nested filter query through KueryNode', () => {
+ const validationObject = validateFilterKueryNode({
+ astFilter: esKuery.fromKueryExpression(
+ 'alert.attributes.actions:{ actionTypeId: ".server-log" AND actionRef: "foo" }'
+ ),
+ types: ['alert'],
+ indexMapping: mockMappings,
+ });
+
+ expect(validationObject).toEqual([
+ {
+ astPath: 'arguments.1.arguments.0',
+ error: null,
+ isSavedObjectAttr: false,
+ key: 'alert.attributes.actions.actionTypeId',
+ type: 'alert',
+ },
+ {
+ astPath: 'arguments.1.arguments.1',
+ error: null,
+ isSavedObjectAttr: false,
+ key: 'alert.attributes.actions.actionRef',
+ type: 'alert',
+ },
+ ]);
+ });
});
describe('#hasFilterKeyError', () => {
diff --git a/src/core/server/saved_objects/service/lib/filter_utils.ts b/src/core/server/saved_objects/service/lib/filter_utils.ts
index 688b7ad96e8ed..b3bcef9a62e13 100644
--- a/src/core/server/saved_objects/service/lib/filter_utils.ts
+++ b/src/core/server/saved_objects/service/lib/filter_utils.ts
@@ -109,7 +109,15 @@ export const validateFilterKueryNode = ({
return astFilter.arguments.reduce((kueryNode: string[], ast: KueryNode, index: number) => {
if (hasNestedKey && ast.type === 'literal' && ast.value != null) {
localNestedKeys = ast.value;
+ } else if (ast.type === 'literal' && ast.value && typeof ast.value === 'string') {
+ const key = ast.value.replace('.attributes', '');
+ const mappingKey = 'properties.' + key.split('.').join('.properties.');
+ const field = get(indexMapping, mappingKey);
+ if (field != null && field.type === 'nested') {
+ localNestedKeys = ast.value;
+ }
}
+
if (ast.arguments) {
const myPath = `${path}.${index}`;
return [
@@ -121,7 +129,7 @@ export const validateFilterKueryNode = ({
storeValue: ast.type === 'function' && astFunctionType.includes(ast.function),
path: `${myPath}.arguments`,
hasNestedKey: ast.type === 'function' && ast.function === 'nested',
- nestedKeys: localNestedKeys,
+ nestedKeys: localNestedKeys || nestedKeys,
}),
];
}
@@ -226,7 +234,7 @@ export const fieldDefined = (indexMappings: IndexMapping, key: string): boolean
return true;
}
- // If the path is for a flattned type field, we'll assume the mappings are defined.
+ // If the path is for a flattened type field, we'll assume the mappings are defined.
const keys = key.split('.');
for (let i = 0; i < keys.length; i++) {
const path = `properties.${keys.slice(0, i + 1).join('.properties.')}`;
diff --git a/src/core/server/saved_objects/service/lib/repository.ts b/src/core/server/saved_objects/service/lib/repository.ts
index 7c719ac56a835..c0e2cdc333363 100644
--- a/src/core/server/saved_objects/service/lib/repository.ts
+++ b/src/core/server/saved_objects/service/lib/repository.ts
@@ -66,6 +66,7 @@ import {
import { LegacyUrlAlias, LEGACY_URL_ALIAS_TYPE } from '../../object_types';
import { SavedObjectTypeRegistry } from '../../saved_objects_type_registry';
import { validateConvertFilterToKueryNode } from './filter_utils';
+import { validateAndConvertAggregations } from './aggregations';
import {
ALL_NAMESPACES_STRING,
FIND_DEFAULT_PAGE,
@@ -748,7 +749,9 @@ export class SavedObjectsRepository {
* @property {string} [options.preference]
* @returns {promise} - { saved_objects: [{ id, type, version, attributes }], total, per_page, page }
*/
- async find(options: SavedObjectsFindOptions): Promise> {
+ async find(
+ options: SavedObjectsFindOptions
+ ): Promise> {
const {
search,
defaultSearchOperator = 'OR',
@@ -768,6 +771,7 @@ export class SavedObjectsRepository {
typeToNamespacesMap,
filter,
preference,
+ aggs,
} = options;
if (!type && !typeToNamespacesMap) {
@@ -799,7 +803,7 @@ export class SavedObjectsRepository {
: Array.from(typeToNamespacesMap!.keys());
const allowedTypes = types.filter((t) => this._allowedTypes.includes(t));
if (allowedTypes.length === 0) {
- return SavedObjectsUtils.createEmptyFindResponse(options);
+ return SavedObjectsUtils.createEmptyFindResponse(options);
}
if (searchFields && !Array.isArray(searchFields)) {
@@ -811,16 +815,24 @@ export class SavedObjectsRepository {
}
let kueryNode;
-
- try {
- if (filter) {
+ if (filter) {
+ try {
kueryNode = validateConvertFilterToKueryNode(allowedTypes, filter, this._mappings);
+ } catch (e) {
+ if (e.name === 'KQLSyntaxError') {
+ throw SavedObjectsErrorHelpers.createBadRequestError(`KQLSyntaxError: ${e.message}`);
+ } else {
+ throw e;
+ }
}
- } catch (e) {
- if (e.name === 'KQLSyntaxError') {
- throw SavedObjectsErrorHelpers.createBadRequestError('KQLSyntaxError: ' + e.message);
- } else {
- throw e;
+ }
+
+ let aggsObject;
+ if (aggs) {
+ try {
+ aggsObject = validateAndConvertAggregations(allowedTypes, aggs, this._mappings);
+ } catch (e) {
+ throw SavedObjectsErrorHelpers.createBadRequestError(`Invalid aggregation: ${e.message}`);
}
}
@@ -838,6 +850,7 @@ export class SavedObjectsRepository {
seq_no_primary_term: true,
from: perPage * (page - 1),
_source: includedFields(type, fields),
+ ...(aggsObject ? { aggs: aggsObject } : {}),
...getSearchDsl(this._mappings, this._registry, {
search,
defaultSearchOperator,
@@ -872,6 +885,7 @@ export class SavedObjectsRepository {
}
return {
+ ...(body.aggregations ? { aggregations: (body.aggregations as unknown) as A } : {}),
page,
per_page: perPage,
total: body.hits.total,
@@ -885,7 +899,7 @@ export class SavedObjectsRepository {
})
),
pit_id: body.pit_id,
- } as SavedObjectsFindResponse;
+ } as SavedObjectsFindResponse;
}
/**
diff --git a/src/core/server/saved_objects/service/lib/utils.ts b/src/core/server/saved_objects/service/lib/utils.ts
index ebad13e5edc25..494ac6ce9fad5 100644
--- a/src/core/server/saved_objects/service/lib/utils.ts
+++ b/src/core/server/saved_objects/service/lib/utils.ts
@@ -51,10 +51,10 @@ export class SavedObjectsUtils {
/**
* Creates an empty response for a find operation. This is only intended to be used by saved objects client wrappers.
*/
- public static createEmptyFindResponse = ({
+ public static createEmptyFindResponse = ({
page = FIND_DEFAULT_PAGE,
perPage = FIND_DEFAULT_PER_PAGE,
- }: SavedObjectsFindOptions): SavedObjectsFindResponse => ({
+ }: SavedObjectsFindOptions): SavedObjectsFindResponse => ({
page,
per_page: perPage,
total: 0,
diff --git a/src/core/server/saved_objects/service/saved_objects_client.ts b/src/core/server/saved_objects/service/saved_objects_client.ts
index 9a0ccb88d3555..12451ace02836 100644
--- a/src/core/server/saved_objects/service/saved_objects_client.ts
+++ b/src/core/server/saved_objects/service/saved_objects_client.ts
@@ -173,7 +173,8 @@ export interface SavedObjectsFindResult extends SavedObject {
*
* @public
*/
-export interface SavedObjectsFindResponse {
+export interface SavedObjectsFindResponse {
+ aggregations?: A;
saved_objects: Array>;
total: number;
per_page: number;
@@ -463,7 +464,9 @@ export class SavedObjectsClient {
*
* @param options
*/
- async find(options: SavedObjectsFindOptions): Promise> {
+ async find(
+ options: SavedObjectsFindOptions
+ ): Promise> {
return await this._repository.find(options);
}
diff --git a/src/core/server/saved_objects/types.ts b/src/core/server/saved_objects/types.ts
index ecda120e025d8..d3bfdcc6923dc 100644
--- a/src/core/server/saved_objects/types.ts
+++ b/src/core/server/saved_objects/types.ts
@@ -116,6 +116,28 @@ export interface SavedObjectsFindOptions {
*/
defaultSearchOperator?: 'AND' | 'OR';
filter?: string | KueryNode;
+ /**
+ * A record of aggregations to perform.
+ * The API currently only supports a limited set of metrics and bucket aggregation types.
+ * Additional aggregation types can be contributed to Core.
+ *
+ * @example
+ * Aggregating on SO attribute field
+ * ```ts
+ * const aggs = { latest_version: { max: { field: 'dashboard.attributes.version' } } };
+ * return client.find({ type: 'dashboard', aggs })
+ * ```
+ *
+ * @example
+ * Aggregating on SO root field
+ * ```ts
+ * const aggs = { latest_update: { max: { field: 'dashboard.updated_at' } } };
+ * return client.find({ type: 'dashboard', aggs })
+ * ```
+ *
+ * @alpha
+ */
+ aggs?: Record;
namespaces?: string[];
/**
* This map defines each type to search for, and the namespace(s) to search for the type in; this is only intended to be used by a saved
diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md
index 05af684053f39..e8f9dab435754 100644
--- a/src/core/server/server.api.md
+++ b/src/core/server/server.api.md
@@ -2244,7 +2244,7 @@ export class SavedObjectsClient {
static errors: typeof SavedObjectsErrorHelpers;
// (undocumented)
errors: typeof SavedObjectsErrorHelpers;
- find(options: SavedObjectsFindOptions): Promise>;
+ find(options: SavedObjectsFindOptions): Promise>;
get(type: string, id: string, options?: SavedObjectsBaseOptions): Promise>;
openPointInTimeForType(type: string | string[], options?: SavedObjectsOpenPointInTimeOptions): Promise;
removeReferencesTo(type: string, id: string, options?: SavedObjectsRemoveReferencesToOptions): Promise;
@@ -2501,6 +2501,8 @@ export type SavedObjectsFieldMapping = SavedObjectsCoreFieldMapping | SavedObjec
// @public (undocumented)
export interface SavedObjectsFindOptions {
+ // @alpha
+ aggs?: Record;
defaultSearchOperator?: 'AND' | 'OR';
fields?: string[];
// Warning: (ae-forgotten-export) The symbol "KueryNode" needs to be exported by the entry point index.d.ts
@@ -2539,7 +2541,9 @@ export interface SavedObjectsFindOptionsReference {
}
// @public
-export interface SavedObjectsFindResponse {
+export interface SavedObjectsFindResponse {
+ // (undocumented)
+ aggregations?: A;
// (undocumented)
page: number;
// (undocumented)
@@ -2849,7 +2853,7 @@ export class SavedObjectsRepository {
deleteByNamespace(namespace: string, options?: SavedObjectsDeleteByNamespaceOptions): Promise;
deleteFromNamespaces(type: string, id: string, namespaces: string[], options?: SavedObjectsDeleteFromNamespacesOptions): Promise;
// (undocumented)
- find(options: SavedObjectsFindOptions): Promise>;
+ find(options: SavedObjectsFindOptions): Promise>;
get(type: string, id: string, options?: SavedObjectsBaseOptions): Promise>;
incrementCounter(type: string, id: string, counterFields: Array, options?: SavedObjectsIncrementCounterOptions): Promise>;
openPointInTimeForType(type: string | string[], { keepAlive, preference }?: SavedObjectsOpenPointInTimeOptions): Promise;
@@ -2970,7 +2974,7 @@ export interface SavedObjectsUpdateResponse extends Omit({ page, perPage, }: SavedObjectsFindOptions) => SavedObjectsFindResponse;
+ static createEmptyFindResponse: ({ page, perPage, }: SavedObjectsFindOptions) => SavedObjectsFindResponse;
static generateId(): string;
static isRandomId(id: string | undefined): boolean;
static namespaceIdToString: (namespace?: string | undefined) => string;
diff --git a/src/plugins/data/common/search/tabify/tabify.ts b/src/plugins/data/common/search/tabify/tabify.ts
index 9f096886491ad..4a8972d4384c2 100644
--- a/src/plugins/data/common/search/tabify/tabify.ts
+++ b/src/plugins/data/common/search/tabify/tabify.ts
@@ -139,7 +139,7 @@ export function tabifyAggResponse(
const write = new TabbedAggResponseWriter(aggConfigs, respOpts || {});
const topLevelBucket: AggResponseBucket = {
...esResponse.aggregations,
- doc_count: esResponse.hits.total,
+ doc_count: esResponse.hits?.total,
};
collectBucket(aggConfigs, write, topLevelBucket, '', 1);
diff --git a/src/plugins/data/public/public.api.md b/src/plugins/data/public/public.api.md
index d99d754a3364d..35f13fc855e99 100644
--- a/src/plugins/data/public/public.api.md
+++ b/src/plugins/data/public/public.api.md
@@ -2353,6 +2353,8 @@ export class SearchInterceptor {
// (undocumented)
protected readonly deps: SearchInterceptorDeps;
// (undocumented)
+ protected getSerializableOptions(options?: ISearchOptions): Pick;
+ // (undocumented)
protected getTimeoutMode(): TimeoutErrorMode;
// Warning: (ae-forgotten-export) The symbol "KibanaServerError" needs to be exported by the entry point index.d.ts
// Warning: (ae-forgotten-export) The symbol "AbortError" needs to be exported by the entry point index.d.ts
diff --git a/src/plugins/data/public/search/search_interceptor.ts b/src/plugins/data/public/search/search_interceptor.ts
index 3df2313f83798..e3fb31c9179fd 100644
--- a/src/plugins/data/public/search/search_interceptor.ts
+++ b/src/plugins/data/public/search/search_interceptor.ts
@@ -113,20 +113,14 @@ export class SearchInterceptor {
}
}
- /**
- * @internal
- * @throws `AbortError` | `ErrorLike`
- */
- protected runSearch(
- request: IKibanaSearchRequest,
- options?: ISearchOptions
- ): Promise {
- const { abortSignal, sessionId, ...requestOptions } = options || {};
+ protected getSerializableOptions(options?: ISearchOptions) {
+ const { sessionId, ...requestOptions } = options || {};
+
+ const serializableOptions: ISearchOptionsSerializable = {};
const combined = {
...requestOptions,
...this.deps.session.getSearchOptions(sessionId),
};
- const serializableOptions: ISearchOptionsSerializable = {};
if (combined.sessionId !== undefined) serializableOptions.sessionId = combined.sessionId;
if (combined.isRestore !== undefined) serializableOptions.isRestore = combined.isRestore;
@@ -135,10 +129,22 @@ export class SearchInterceptor {
if (combined.strategy !== undefined) serializableOptions.strategy = combined.strategy;
if (combined.isStored !== undefined) serializableOptions.isStored = combined.isStored;
+ return serializableOptions;
+ }
+
+ /**
+ * @internal
+ * @throws `AbortError` | `ErrorLike`
+ */
+ protected runSearch(
+ request: IKibanaSearchRequest,
+ options?: ISearchOptions
+ ): Promise {
+ const { abortSignal } = options || {};
return this.batchedFetch(
{
request,
- options: serializableOptions,
+ options: this.getSerializableOptions(options),
},
abortSignal
);
diff --git a/src/plugins/data/public/search/session/session_service.ts b/src/plugins/data/public/search/session/session_service.ts
index 381410574ecda..71f51b4bc8d83 100644
--- a/src/plugins/data/public/search/session/session_service.ts
+++ b/src/plugins/data/public/search/session/session_service.ts
@@ -73,7 +73,7 @@ export interface SearchSessionIndicatorUiConfig {
}
/**
- * Responsible for tracking a current search session. Supports only a single session at a time.
+ * Responsible for tracking a current search session. Supports a single session at a time.
*/
export class SessionService {
public readonly state$: Observable;
diff --git a/src/plugins/telemetry_collection_manager/server/telemetry_saved_objects_client.ts b/src/plugins/telemetry_collection_manager/server/telemetry_saved_objects_client.ts
index d639b053565d1..01d89c5731158 100644
--- a/src/plugins/telemetry_collection_manager/server/telemetry_saved_objects_client.ts
+++ b/src/plugins/telemetry_collection_manager/server/telemetry_saved_objects_client.ts
@@ -17,7 +17,9 @@ export class TelemetrySavedObjectsClient extends SavedObjectsClient {
* Find the SavedObjects matching the search query in all the Spaces by default
* @param options
*/
- async find(options: SavedObjectsFindOptions): Promise> {
+ async find(
+ options: SavedObjectsFindOptions
+ ): Promise> {
return super.find({ namespaces: ['*'], ...options });
}
}
diff --git a/test/api_integration/apis/saved_objects/find.ts b/test/api_integration/apis/saved_objects/find.ts
index 28c38ca9e0ded..a4862707e2d0e 100644
--- a/test/api_integration/apis/saved_objects/find.ts
+++ b/test/api_integration/apis/saved_objects/find.ts
@@ -9,7 +9,6 @@
import expect from '@kbn/expect';
import { FtrProviderContext } from '../../ftr_provider_context';
import { SavedObject } from '../../../../src/core/server';
-import { getKibanaVersion } from './lib/saved_objects_test_utils';
export default function ({ getService }: FtrProviderContext) {
const supertest = getService('supertest');
@@ -17,12 +16,6 @@ export default function ({ getService }: FtrProviderContext) {
const esDeleteAllIndices = getService('esDeleteAllIndices');
describe('find', () => {
- let KIBANA_VERSION: string;
-
- before(async () => {
- KIBANA_VERSION = await getKibanaVersion(getService);
- });
-
describe('with kibana index', () => {
before(() => esArchiver.load('saved_objects/basic'));
after(() => esArchiver.unload('saved_objects/basic'));
@@ -32,33 +25,9 @@ export default function ({ getService }: FtrProviderContext) {
.get('/api/saved_objects/_find?type=visualization&fields=title')
.expect(200)
.then((resp) => {
- expect(resp.body).to.eql({
- page: 1,
- per_page: 20,
- total: 1,
- saved_objects: [
- {
- type: 'visualization',
- id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab',
- version: 'WzE4LDJd',
- attributes: {
- title: 'Count of requests',
- },
- score: 0,
- migrationVersion: resp.body.saved_objects[0].migrationVersion,
- coreMigrationVersion: KIBANA_VERSION,
- namespaces: ['default'],
- references: [
- {
- id: '91200a00-9efd-11e7-acb3-3dab96693fab',
- name: 'kibanaSavedObjectMeta.searchSourceJSON.index',
- type: 'index-pattern',
- },
- ],
- updated_at: '2017-09-21T18:51:23.794Z',
- },
- ],
- });
+ expect(resp.body.saved_objects.map((so: { id: string }) => so.id)).to.eql([
+ 'dd7caf20-9efd-11e7-acb3-3dab96693fab',
+ ]);
expect(resp.body.saved_objects[0].migrationVersion).to.be.ok();
}));
@@ -129,33 +98,12 @@ export default function ({ getService }: FtrProviderContext) {
.get('/api/saved_objects/_find?type=visualization&fields=title&namespaces=default')
.expect(200)
.then((resp) => {
- expect(resp.body).to.eql({
- page: 1,
- per_page: 20,
- total: 1,
- saved_objects: [
- {
- type: 'visualization',
- id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab',
- version: 'WzE4LDJd',
- attributes: {
- title: 'Count of requests',
- },
- migrationVersion: resp.body.saved_objects[0].migrationVersion,
- coreMigrationVersion: KIBANA_VERSION,
- namespaces: ['default'],
- score: 0,
- references: [
- {
- id: '91200a00-9efd-11e7-acb3-3dab96693fab',
- name: 'kibanaSavedObjectMeta.searchSourceJSON.index',
- type: 'index-pattern',
- },
- ],
- updated_at: '2017-09-21T18:51:23.794Z',
- },
- ],
- });
+ expect(
+ resp.body.saved_objects.map((so: { id: string; namespaces: string[] }) => ({
+ id: so.id,
+ namespaces: so.namespaces,
+ }))
+ ).to.eql([{ id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', namespaces: ['default'] }]);
expect(resp.body.saved_objects[0].migrationVersion).to.be.ok();
}));
});
@@ -166,53 +114,15 @@ export default function ({ getService }: FtrProviderContext) {
.get('/api/saved_objects/_find?type=visualization&fields=title&namespaces=*')
.expect(200)
.then((resp) => {
- expect(resp.body).to.eql({
- page: 1,
- per_page: 20,
- total: 2,
- saved_objects: [
- {
- type: 'visualization',
- id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab',
- version: 'WzE4LDJd',
- attributes: {
- title: 'Count of requests',
- },
- migrationVersion: resp.body.saved_objects[0].migrationVersion,
- coreMigrationVersion: KIBANA_VERSION,
- namespaces: ['default'],
- score: 0,
- references: [
- {
- id: '91200a00-9efd-11e7-acb3-3dab96693fab',
- name: 'kibanaSavedObjectMeta.searchSourceJSON.index',
- type: 'index-pattern',
- },
- ],
- updated_at: '2017-09-21T18:51:23.794Z',
- },
- {
- attributes: {
- title: 'Count of requests',
- },
- id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab',
- migrationVersion: resp.body.saved_objects[0].migrationVersion,
- coreMigrationVersion: KIBANA_VERSION,
- namespaces: ['foo-ns'],
- references: [
- {
- id: '91200a00-9efd-11e7-acb3-3dab96693fab',
- name: 'kibanaSavedObjectMeta.searchSourceJSON.index',
- type: 'index-pattern',
- },
- ],
- score: 0,
- type: 'visualization',
- updated_at: '2017-09-21T18:51:23.794Z',
- version: 'WzIyLDJd',
- },
- ],
- });
+ expect(
+ resp.body.saved_objects.map((so: { id: string; namespaces: string[] }) => ({
+ id: so.id,
+ namespaces: so.namespaces,
+ }))
+ ).to.eql([
+ { id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', namespaces: ['default'] },
+ { id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab', namespaces: ['foo-ns'] },
+ ]);
}));
});
@@ -224,42 +134,9 @@ export default function ({ getService }: FtrProviderContext) {
)
.expect(200)
.then((resp) => {
- expect(resp.body).to.eql({
- page: 1,
- per_page: 20,
- total: 1,
- saved_objects: [
- {
- type: 'visualization',
- id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab',
- attributes: {
- title: 'Count of requests',
- visState: resp.body.saved_objects[0].attributes.visState,
- uiStateJSON: '{"spy":{"mode":{"name":null,"fill":false}}}',
- description: '',
- version: 1,
- kibanaSavedObjectMeta: {
- searchSourceJSON:
- resp.body.saved_objects[0].attributes.kibanaSavedObjectMeta
- .searchSourceJSON,
- },
- },
- namespaces: ['default'],
- score: 0,
- references: [
- {
- name: 'kibanaSavedObjectMeta.searchSourceJSON.index',
- type: 'index-pattern',
- id: '91200a00-9efd-11e7-acb3-3dab96693fab',
- },
- ],
- migrationVersion: resp.body.saved_objects[0].migrationVersion,
- coreMigrationVersion: KIBANA_VERSION,
- updated_at: '2017-09-21T18:51:23.794Z',
- version: 'WzE4LDJd',
- },
- ],
- });
+ expect(resp.body.saved_objects.map((so: { id: string }) => so.id)).to.eql([
+ 'dd7caf20-9efd-11e7-acb3-3dab96693fab',
+ ]);
}));
it('wrong type should return 400 with Bad Request', async () =>
@@ -293,6 +170,75 @@ export default function ({ getService }: FtrProviderContext) {
}));
});
+ describe('using aggregations', () => {
+ it('should return 200 with valid response for a valid aggregation', async () =>
+ await supertest
+ .get(
+ `/api/saved_objects/_find?type=visualization&per_page=0&aggs=${encodeURIComponent(
+ JSON.stringify({
+ type_count: { max: { field: 'visualization.attributes.version' } },
+ })
+ )}`
+ )
+ .expect(200)
+ .then((resp) => {
+ expect(resp.body).to.eql({
+ aggregations: {
+ type_count: {
+ value: 1,
+ },
+ },
+ page: 1,
+ per_page: 0,
+ saved_objects: [],
+ total: 1,
+ });
+ }));
+
+ it('should return a 400 when referencing an invalid SO attribute', async () =>
+ await supertest
+ .get(
+ `/api/saved_objects/_find?type=visualization&per_page=0&aggs=${encodeURIComponent(
+ JSON.stringify({
+ type_count: { max: { field: 'dashboard.attributes.version' } },
+ })
+ )}`
+ )
+ .expect(400)
+ .then((resp) => {
+ expect(resp.body).to.eql({
+ error: 'Bad Request',
+ message:
+ 'Invalid aggregation: [type_count.max.field] Invalid attribute path: dashboard.attributes.version: Bad Request',
+ statusCode: 400,
+ });
+ }));
+
+ it('should return a 400 when using a forbidden aggregation option', async () =>
+ await supertest
+ .get(
+ `/api/saved_objects/_find?type=visualization&per_page=0&aggs=${encodeURIComponent(
+ JSON.stringify({
+ type_count: {
+ max: {
+ field: 'visualization.attributes.version',
+ script: 'Bad script is bad',
+ },
+ },
+ })
+ )}`
+ )
+ .expect(400)
+ .then((resp) => {
+ expect(resp.body).to.eql({
+ error: 'Bad Request',
+ message:
+ 'Invalid aggregation: [type_count.max.script]: definition for this key is missing: Bad Request',
+ statusCode: 400,
+ });
+ }));
+ });
+
describe('`has_reference` and `has_reference_operator` parameters', () => {
before(() => esArchiver.load('saved_objects/references'));
after(() => esArchiver.unload('saved_objects/references'));
diff --git a/test/api_integration/apis/saved_objects_management/find.ts b/test/api_integration/apis/saved_objects_management/find.ts
index 6ab2352ebb05f..8fb3884a5b37b 100644
--- a/test/api_integration/apis/saved_objects_management/find.ts
+++ b/test/api_integration/apis/saved_objects_management/find.ts
@@ -34,44 +34,9 @@ export default function ({ getService }: FtrProviderContext) {
.get('/api/kibana/management/saved_objects/_find?type=visualization&fields=title')
.expect(200)
.then((resp: Response) => {
- expect(resp.body).to.eql({
- page: 1,
- per_page: 20,
- total: 1,
- saved_objects: [
- {
- type: 'visualization',
- id: 'dd7caf20-9efd-11e7-acb3-3dab96693fab',
- version: 'WzE4LDJd',
- attributes: {
- title: 'Count of requests',
- },
- migrationVersion: resp.body.saved_objects[0].migrationVersion,
- coreMigrationVersion: KIBANA_VERSION,
- namespaces: ['default'],
- references: [
- {
- id: '91200a00-9efd-11e7-acb3-3dab96693fab',
- name: 'kibanaSavedObjectMeta.searchSourceJSON.index',
- type: 'index-pattern',
- },
- ],
- score: 0,
- updated_at: '2017-09-21T18:51:23.794Z',
- meta: {
- editUrl:
- '/management/kibana/objects/savedVisualizations/dd7caf20-9efd-11e7-acb3-3dab96693fab',
- icon: 'visualizeApp',
- inAppUrl: {
- path: '/app/visualize#/edit/dd7caf20-9efd-11e7-acb3-3dab96693fab',
- uiCapabilitiesPath: 'visualize.show',
- },
- title: 'Count of requests',
- namespaceType: 'single',
- },
- },
- ],
- });
+ expect(resp.body.saved_objects.map((so: { id: string }) => so.id)).to.eql([
+ 'dd7caf20-9efd-11e7-acb3-3dab96693fab',
+ ]);
}));
describe('unknown type', () => {
diff --git a/test/plugin_functional/test_suites/saved_objects_management/find.ts b/test/plugin_functional/test_suites/saved_objects_management/find.ts
index 5dce8f43339a1..e5a5d69c7e4d4 100644
--- a/test/plugin_functional/test_suites/saved_objects_management/find.ts
+++ b/test/plugin_functional/test_suites/saved_objects_management/find.ts
@@ -33,28 +33,17 @@ export default function ({ getService }: PluginFunctionalProviderContext) {
.set('kbn-xsrf', 'true')
.expect(200)
.then((resp) => {
- expect(resp.body).to.eql({
- page: 1,
- per_page: 20,
- total: 1,
- saved_objects: [
- {
- type: 'test-hidden-importable-exportable',
- id: 'ff3733a0-9fty-11e7-ahb3-3dcb94193fab',
- attributes: {
- title: 'Hidden Saved object type that is importable/exportable.',
- },
- references: [],
- updated_at: '2021-02-11T18:51:23.794Z',
- version: 'WzIsMl0=',
- namespaces: ['default'],
- score: 0,
- meta: {
- namespaceType: 'single',
- },
- },
- ],
- });
+ expect(
+ resp.body.saved_objects.map((so: { id: string; type: string }) => ({
+ id: so.id,
+ type: so.type,
+ }))
+ ).to.eql([
+ {
+ type: 'test-hidden-importable-exportable',
+ id: 'ff3733a0-9fty-11e7-ahb3-3dcb94193fab',
+ },
+ ]);
}));
it('returns empty response for non importableAndExportable types', async () =>
@@ -65,12 +54,7 @@ export default function ({ getService }: PluginFunctionalProviderContext) {
.set('kbn-xsrf', 'true')
.expect(200)
.then((resp) => {
- expect(resp.body).to.eql({
- page: 1,
- per_page: 20,
- total: 0,
- saved_objects: [],
- });
+ expect(resp.body.saved_objects).to.eql([]);
}));
});
});
diff --git a/x-pack/plugins/data_enhanced/public/search/search_abort_controller.test.ts b/x-pack/plugins/data_enhanced/public/search/search_abort_controller.test.ts
index 68282c1e947f7..a52fdef9819b8 100644
--- a/x-pack/plugins/data_enhanced/public/search/search_abort_controller.test.ts
+++ b/x-pack/plugins/data_enhanced/public/search/search_abort_controller.test.ts
@@ -21,13 +21,15 @@ describe('search abort controller', () => {
test('immediately aborts when passed an aborted signal in the constructor', () => {
const controller = new AbortController();
controller.abort();
- const sac = new SearchAbortController(controller.signal);
+ const sac = new SearchAbortController();
+ sac.addAbortSignal(controller.signal);
expect(sac.getSignal().aborted).toBe(true);
});
test('aborts when input signal is aborted', () => {
const controller = new AbortController();
- const sac = new SearchAbortController(controller.signal);
+ const sac = new SearchAbortController();
+ sac.addAbortSignal(controller.signal);
expect(sac.getSignal().aborted).toBe(false);
controller.abort();
expect(sac.getSignal().aborted).toBe(true);
@@ -35,7 +37,8 @@ describe('search abort controller', () => {
test('aborts when all input signals are aborted', () => {
const controller = new AbortController();
- const sac = new SearchAbortController(controller.signal);
+ const sac = new SearchAbortController();
+ sac.addAbortSignal(controller.signal);
const controller2 = new AbortController();
sac.addAbortSignal(controller2.signal);
@@ -48,7 +51,8 @@ describe('search abort controller', () => {
test('aborts explicitly even if all inputs are not aborted', () => {
const controller = new AbortController();
- const sac = new SearchAbortController(controller.signal);
+ const sac = new SearchAbortController();
+ sac.addAbortSignal(controller.signal);
const controller2 = new AbortController();
sac.addAbortSignal(controller2.signal);
@@ -60,7 +64,8 @@ describe('search abort controller', () => {
test('doesnt abort, if cleared', () => {
const controller = new AbortController();
- const sac = new SearchAbortController(controller.signal);
+ const sac = new SearchAbortController();
+ sac.addAbortSignal(controller.signal);
expect(sac.getSignal().aborted).toBe(false);
sac.cleanup();
controller.abort();
@@ -77,7 +82,7 @@ describe('search abort controller', () => {
});
test('doesnt abort on timeout, if cleared', () => {
- const sac = new SearchAbortController(undefined, 100);
+ const sac = new SearchAbortController(100);
expect(sac.getSignal().aborted).toBe(false);
sac.cleanup();
timeTravel(100);
@@ -85,7 +90,7 @@ describe('search abort controller', () => {
});
test('aborts on timeout, even if no signals passed in', () => {
- const sac = new SearchAbortController(undefined, 100);
+ const sac = new SearchAbortController(100);
expect(sac.getSignal().aborted).toBe(false);
timeTravel(100);
expect(sac.getSignal().aborted).toBe(true);
@@ -94,7 +99,8 @@ describe('search abort controller', () => {
test('aborts on timeout, even if there are unaborted signals', () => {
const controller = new AbortController();
- const sac = new SearchAbortController(controller.signal, 100);
+ const sac = new SearchAbortController(100);
+ sac.addAbortSignal(controller.signal);
expect(sac.getSignal().aborted).toBe(false);
timeTravel(100);
diff --git a/x-pack/plugins/data_enhanced/public/search/search_abort_controller.ts b/x-pack/plugins/data_enhanced/public/search/search_abort_controller.ts
index 4482a7771dc28..7bc74b56a3903 100644
--- a/x-pack/plugins/data_enhanced/public/search/search_abort_controller.ts
+++ b/x-pack/plugins/data_enhanced/public/search/search_abort_controller.ts
@@ -18,11 +18,7 @@ export class SearchAbortController {
private destroyed = false;
private reason?: AbortReason;
- constructor(abortSignal?: AbortSignal, timeout?: number) {
- if (abortSignal) {
- this.addAbortSignal(abortSignal);
- }
-
+ constructor(timeout?: number) {
if (timeout) {
this.timeoutSub = timer(timeout).subscribe(() => {
this.reason = AbortReason.Timeout;
@@ -41,6 +37,7 @@ export class SearchAbortController {
};
public cleanup() {
+ if (this.destroyed) return;
this.destroyed = true;
this.timeoutSub?.unsubscribe();
this.inputAbortSignals.forEach((abortSignal) => {
diff --git a/x-pack/plugins/data_enhanced/public/search/search_interceptor.test.ts b/x-pack/plugins/data_enhanced/public/search/search_interceptor.test.ts
index 02671974e5053..0e511c545f3e2 100644
--- a/x-pack/plugins/data_enhanced/public/search/search_interceptor.test.ts
+++ b/x-pack/plugins/data_enhanced/public/search/search_interceptor.test.ts
@@ -23,9 +23,12 @@ import { bfetchPluginMock } from '../../../../../src/plugins/bfetch/public/mocks
import { BehaviorSubject } from 'rxjs';
import * as xpackResourceNotFoundException from '../../common/search/test_data/search_phase_execution_exception.json';
-const timeTravel = (msToRun = 0) => {
+const flushPromises = () => new Promise((resolve) => setImmediate(resolve));
+
+const timeTravel = async (msToRun = 0) => {
+ await flushPromises();
jest.advanceTimersByTime(msToRun);
- return new Promise((resolve) => setImmediate(resolve));
+ return flushPromises();
};
const next = jest.fn();
@@ -39,10 +42,20 @@ let fetchMock: jest.Mock;
jest.useFakeTimers();
+jest.mock('./utils', () => ({
+ createRequestHash: jest.fn().mockImplementation((input) => {
+ return Promise.resolve(JSON.stringify(input));
+ }),
+}));
+
function mockFetchImplementation(responses: any[]) {
let i = 0;
- fetchMock.mockImplementation(() => {
+ fetchMock.mockImplementation((r) => {
+ if (!r.request.id) i = 0;
const { time = 0, value = {}, isError = false } = responses[i++];
+ value.meta = {
+ size: 10,
+ };
return new Promise((resolve, reject) =>
setTimeout(() => {
return (isError ? reject : resolve)(value);
@@ -452,7 +465,7 @@ describe('EnhancedSearchInterceptor', () => {
});
});
- describe('session', () => {
+ describe('session tracking', () => {
beforeEach(() => {
const responses = [
{
@@ -559,4 +572,540 @@ describe('EnhancedSearchInterceptor', () => {
expect(sessionService.trackSearch).toBeCalledTimes(0);
});
});
+
+ describe('session client caching', () => {
+ const sessionId = 'sessionId';
+ const basicReq = {
+ params: {
+ test: 1,
+ },
+ };
+
+ const basicCompleteResponse = [
+ {
+ time: 10,
+ value: {
+ isPartial: false,
+ isRunning: false,
+ id: 1,
+ rawResponse: {
+ took: 1,
+ },
+ },
+ },
+ ];
+
+ const partialCompleteResponse = [
+ {
+ time: 10,
+ value: {
+ isPartial: true,
+ isRunning: true,
+ id: 1,
+ rawResponse: {
+ took: 1,
+ },
+ },
+ },
+ {
+ time: 20,
+ value: {
+ isPartial: false,
+ isRunning: false,
+ id: 1,
+ rawResponse: {
+ took: 1,
+ },
+ },
+ },
+ ];
+
+ beforeEach(() => {
+ sessionService.isCurrentSession.mockImplementation((_sessionId) => _sessionId === sessionId);
+ sessionService.getSessionId.mockImplementation(() => sessionId);
+ });
+
+ test('should be disabled if there is no session', async () => {
+ mockFetchImplementation(basicCompleteResponse);
+
+ searchInterceptor.search(basicReq, {}).subscribe({ next, error, complete });
+ expect(fetchMock).toBeCalledTimes(1);
+
+ searchInterceptor.search(basicReq, {}).subscribe({ next, error, complete });
+ expect(fetchMock).toBeCalledTimes(2);
+ });
+
+ test('should fetch different requests in a single session', async () => {
+ mockFetchImplementation(basicCompleteResponse);
+
+ const req2 = {
+ params: {
+ test: 2,
+ },
+ };
+
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+
+ searchInterceptor.search(req2, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(2);
+ });
+
+ test('should fetch the same request for two different sessions', async () => {
+ mockFetchImplementation(basicCompleteResponse);
+
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+
+ searchInterceptor
+ .search(basicReq, { sessionId: 'anotherSession' })
+ .subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(2);
+ });
+
+ test('should track searches that come from cache', async () => {
+ mockFetchImplementation(partialCompleteResponse);
+ sessionService.isCurrentSession.mockImplementation((_sessionId) => _sessionId === sessionId);
+ sessionService.getSessionId.mockImplementation(() => sessionId);
+
+ const untrack = jest.fn();
+ sessionService.trackSearch.mockImplementation(() => untrack);
+
+ const req = {
+ params: {
+ test: 200,
+ },
+ };
+
+ const response = searchInterceptor.search(req, { pollInterval: 1, sessionId });
+ const response2 = searchInterceptor.search(req, { pollInterval: 1, sessionId });
+ response.subscribe({ next, error, complete });
+ response2.subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+ expect(sessionService.trackSearch).toBeCalledTimes(2);
+ expect(untrack).not.toBeCalled();
+ await timeTravel(300);
+ // Should be called only 2 times (once per partial response)
+ expect(fetchMock).toBeCalledTimes(2);
+ expect(sessionService.trackSearch).toBeCalledTimes(2);
+ expect(untrack).toBeCalledTimes(2);
+
+ expect(next).toBeCalledTimes(4);
+ expect(error).toBeCalledTimes(0);
+ expect(complete).toBeCalledTimes(2);
+ });
+
+ test('should cache partial responses', async () => {
+ const responses = [
+ {
+ time: 10,
+ value: {
+ isPartial: true,
+ isRunning: true,
+ id: 1,
+ },
+ },
+ ];
+
+ mockFetchImplementation(responses);
+
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+ });
+
+ test('should not cache error responses', async () => {
+ const responses = [
+ {
+ time: 10,
+ value: {
+ isPartial: true,
+ isRunning: false,
+ id: 1,
+ },
+ },
+ ];
+
+ mockFetchImplementation(responses);
+
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(2);
+ });
+
+ test('should deliver error to all replays', async () => {
+ const responses = [
+ {
+ time: 10,
+ value: {
+ isPartial: true,
+ isRunning: false,
+ id: 1,
+ },
+ },
+ ];
+
+ mockFetchImplementation(responses);
+
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+ expect(error).toBeCalledTimes(2);
+ expect(error.mock.calls[0][0].message).toEqual('Received partial response');
+ expect(error.mock.calls[1][0].message).toEqual('Received partial response');
+ });
+
+ test('should ignore anything outside params when hashing', async () => {
+ mockFetchImplementation(basicCompleteResponse);
+
+ const req = {
+ something: 123,
+ params: {
+ test: 1,
+ },
+ };
+
+ const req2 = {
+ something: 321,
+ params: {
+ test: 1,
+ },
+ };
+
+ searchInterceptor.search(req, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+
+ searchInterceptor.search(req2, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+ });
+
+ test('should ignore preference when hashing', async () => {
+ mockFetchImplementation(basicCompleteResponse);
+
+ const req = {
+ params: {
+ test: 1,
+ preference: 123,
+ },
+ };
+
+ const req2 = {
+ params: {
+ test: 1,
+ preference: 321,
+ },
+ };
+
+ searchInterceptor.search(req, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+
+ searchInterceptor.search(req2, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+ });
+
+ test('should return from cache for identical requests in the same session', async () => {
+ mockFetchImplementation(basicCompleteResponse);
+
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+ });
+
+ test('aborting a search that didnt get any response should retrigger search', async () => {
+ mockFetchImplementation(basicCompleteResponse);
+
+ const abortController = new AbortController();
+
+ // Start a search request
+ searchInterceptor
+ .search(basicReq, { sessionId, abortSignal: abortController.signal })
+ .subscribe({ next, error, complete });
+
+ // Abort the search request before it started
+ abortController.abort();
+
+ // Time travel to make sure nothing appens
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(0);
+ expect(next).toBeCalledTimes(0);
+ expect(error).toBeCalledTimes(1);
+ expect(complete).toBeCalledTimes(0);
+
+ const error2 = jest.fn();
+ const next2 = jest.fn();
+ const complete2 = jest.fn();
+
+ // Search for the same thing again
+ searchInterceptor
+ .search(basicReq, { sessionId })
+ .subscribe({ next: next2, error: error2, complete: complete2 });
+
+ // Should search again
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+ expect(next2).toBeCalledTimes(1);
+ expect(error2).toBeCalledTimes(0);
+ expect(complete2).toBeCalledTimes(1);
+ });
+
+ test('aborting a running first search shouldnt clear cache', async () => {
+ mockFetchImplementation(partialCompleteResponse);
+ sessionService.isCurrentSession.mockImplementation((_sessionId) => _sessionId === sessionId);
+ sessionService.getSessionId.mockImplementation(() => sessionId);
+
+ const untrack = jest.fn();
+ sessionService.trackSearch.mockImplementation(() => untrack);
+
+ const req = {
+ params: {
+ test: 200,
+ },
+ };
+
+ const abortController = new AbortController();
+
+ const response = searchInterceptor.search(req, {
+ pollInterval: 1,
+ sessionId,
+ abortSignal: abortController.signal,
+ });
+ response.subscribe({ next, error, complete });
+ await timeTravel(10);
+
+ expect(fetchMock).toBeCalledTimes(1);
+ expect(next).toBeCalledTimes(1);
+ expect(error).toBeCalledTimes(0);
+ expect(complete).toBeCalledTimes(0);
+ expect(sessionService.trackSearch).toBeCalledTimes(1);
+ expect(untrack).not.toBeCalled();
+
+ const next2 = jest.fn();
+ const error2 = jest.fn();
+ const complete2 = jest.fn();
+ const response2 = searchInterceptor.search(req, { pollInterval: 1, sessionId });
+ response2.subscribe({ next: next2, error: error2, complete: complete2 });
+ await timeTravel(0);
+
+ abortController.abort();
+
+ await timeTravel(300);
+ // Both searches should be tracked and untracked
+ expect(sessionService.trackSearch).toBeCalledTimes(2);
+ expect(untrack).toBeCalledTimes(2);
+
+ // First search should error
+ expect(next).toBeCalledTimes(1);
+ expect(error).toBeCalledTimes(1);
+ expect(complete).toBeCalledTimes(0);
+
+ // Second search should complete
+ expect(next2).toBeCalledTimes(2);
+ expect(error2).toBeCalledTimes(0);
+ expect(complete2).toBeCalledTimes(1);
+
+ // Should be called only 2 times (once per partial response)
+ expect(fetchMock).toBeCalledTimes(2);
+ });
+
+ test('aborting a running second search shouldnt clear cache', async () => {
+ mockFetchImplementation(partialCompleteResponse);
+ sessionService.isCurrentSession.mockImplementation((_sessionId) => _sessionId === sessionId);
+ sessionService.getSessionId.mockImplementation(() => sessionId);
+
+ const untrack = jest.fn();
+ sessionService.trackSearch.mockImplementation(() => untrack);
+
+ const req = {
+ params: {
+ test: 200,
+ },
+ };
+
+ const abortController = new AbortController();
+
+ const response = searchInterceptor.search(req, { pollInterval: 1, sessionId });
+ response.subscribe({ next, error, complete });
+ await timeTravel(10);
+
+ expect(fetchMock).toBeCalledTimes(1);
+ expect(next).toBeCalledTimes(1);
+ expect(error).toBeCalledTimes(0);
+ expect(complete).toBeCalledTimes(0);
+ expect(sessionService.trackSearch).toBeCalledTimes(1);
+ expect(untrack).not.toBeCalled();
+
+ const next2 = jest.fn();
+ const error2 = jest.fn();
+ const complete2 = jest.fn();
+ const response2 = searchInterceptor.search(req, {
+ pollInterval: 0,
+ sessionId,
+ abortSignal: abortController.signal,
+ });
+ response2.subscribe({ next: next2, error: error2, complete: complete2 });
+ await timeTravel(0);
+
+ abortController.abort();
+
+ await timeTravel(300);
+ expect(sessionService.trackSearch).toBeCalledTimes(2);
+ expect(untrack).toBeCalledTimes(2);
+
+ expect(next).toBeCalledTimes(2);
+ expect(error).toBeCalledTimes(0);
+ expect(complete).toBeCalledTimes(1);
+
+ expect(next2).toBeCalledTimes(1);
+ expect(error2).toBeCalledTimes(1);
+ expect(complete2).toBeCalledTimes(0);
+
+ // Should be called only 2 times (once per partial response)
+ expect(fetchMock).toBeCalledTimes(2);
+ });
+
+ test('aborting both requests should cancel underlaying search only once', async () => {
+ mockFetchImplementation(partialCompleteResponse);
+ sessionService.isCurrentSession.mockImplementation((_sessionId) => _sessionId === sessionId);
+ sessionService.getSessionId.mockImplementation(() => sessionId);
+ sessionService.trackSearch.mockImplementation(() => jest.fn());
+
+ const req = {
+ params: {
+ test: 200,
+ },
+ };
+
+ const abortController = new AbortController();
+
+ const response = searchInterceptor.search(req, {
+ pollInterval: 1,
+ sessionId,
+ abortSignal: abortController.signal,
+ });
+ response.subscribe({ next, error, complete });
+
+ const response2 = searchInterceptor.search(req, {
+ pollInterval: 1,
+ sessionId,
+ abortSignal: abortController.signal,
+ });
+ response2.subscribe({ next, error, complete });
+ await timeTravel(10);
+
+ abortController.abort();
+
+ await timeTravel(300);
+
+ expect(mockCoreSetup.http.delete).toHaveBeenCalledTimes(1);
+ });
+
+ test('aborting both searches should stop searching and clear cache', async () => {
+ mockFetchImplementation(partialCompleteResponse);
+ sessionService.isCurrentSession.mockImplementation((_sessionId) => _sessionId === sessionId);
+ sessionService.getSessionId.mockImplementation(() => sessionId);
+
+ const untrack = jest.fn();
+ sessionService.trackSearch.mockImplementation(() => untrack);
+
+ const req = {
+ params: {
+ test: 200,
+ },
+ };
+
+ const abortController = new AbortController();
+
+ const response = searchInterceptor.search(req, {
+ pollInterval: 1,
+ sessionId,
+ abortSignal: abortController.signal,
+ });
+ response.subscribe({ next, error, complete });
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+
+ const response2 = searchInterceptor.search(req, {
+ pollInterval: 1,
+ sessionId,
+ abortSignal: abortController.signal,
+ });
+ response2.subscribe({ next, error, complete });
+ await timeTravel(0);
+ expect(fetchMock).toBeCalledTimes(1);
+
+ abortController.abort();
+
+ await timeTravel(300);
+
+ expect(next).toBeCalledTimes(2);
+ expect(error).toBeCalledTimes(2);
+ expect(complete).toBeCalledTimes(0);
+ expect(error.mock.calls[0][0]).toBeInstanceOf(AbortError);
+ expect(error.mock.calls[1][0]).toBeInstanceOf(AbortError);
+
+ // Should be called only 1 times (one partial response)
+ expect(fetchMock).toBeCalledTimes(1);
+
+ // Clear mock and research
+ fetchMock.mockReset();
+ mockFetchImplementation(partialCompleteResponse);
+ // Run the search again to see that we don't hit the cache
+ const response3 = searchInterceptor.search(req, { pollInterval: 1, sessionId });
+ response3.subscribe({ next, error, complete });
+
+ await timeTravel(10);
+ await timeTravel(10);
+ await timeTravel(300);
+
+ // Should be called 2 times (two partial response)
+ expect(fetchMock).toBeCalledTimes(2);
+ expect(complete).toBeCalledTimes(1);
+ });
+
+ test('aborting a completed search shouldnt effect cache', async () => {
+ mockFetchImplementation(basicCompleteResponse);
+
+ const abortController = new AbortController();
+
+ // Start a search request
+ searchInterceptor
+ .search(basicReq, { sessionId, abortSignal: abortController.signal })
+ .subscribe({ next, error, complete });
+
+ // Get a final response
+ await timeTravel(10);
+ expect(fetchMock).toBeCalledTimes(1);
+
+ // Abort the search request
+ abortController.abort();
+
+ // Search for the same thing again
+ searchInterceptor.search(basicReq, { sessionId }).subscribe({ next, error, complete });
+
+ // Get the response from cache
+ expect(fetchMock).toBeCalledTimes(1);
+ });
+ });
});
diff --git a/x-pack/plugins/data_enhanced/public/search/search_interceptor.ts b/x-pack/plugins/data_enhanced/public/search/search_interceptor.ts
index b9d8553d3dc5a..3e7564933a0c6 100644
--- a/x-pack/plugins/data_enhanced/public/search/search_interceptor.ts
+++ b/x-pack/plugins/data_enhanced/public/search/search_interceptor.ts
@@ -6,8 +6,19 @@
*/
import { once } from 'lodash';
-import { throwError, Subscription } from 'rxjs';
-import { tap, finalize, catchError, filter, take, skip } from 'rxjs/operators';
+import { throwError, Subscription, from, of, fromEvent, EMPTY } from 'rxjs';
+import {
+ tap,
+ finalize,
+ catchError,
+ filter,
+ take,
+ skip,
+ switchMap,
+ shareReplay,
+ map,
+ takeUntil,
+} from 'rxjs/operators';
import {
TimeoutErrorMode,
SearchInterceptor,
@@ -16,12 +27,21 @@ import {
IKibanaSearchRequest,
SearchSessionState,
} from '../../../../../src/plugins/data/public';
+import { AbortError } from '../../../../../src/plugins/kibana_utils/public';
import { ENHANCED_ES_SEARCH_STRATEGY, IAsyncSearchOptions, pollSearch } from '../../common';
+import { SearchResponseCache } from './search_response_cache';
+import { createRequestHash } from './utils';
import { SearchAbortController } from './search_abort_controller';
+const MAX_CACHE_ITEMS = 50;
+const MAX_CACHE_SIZE_MB = 10;
export class EnhancedSearchInterceptor extends SearchInterceptor {
private uiSettingsSub: Subscription;
private searchTimeout: number;
+ private readonly responseCache: SearchResponseCache = new SearchResponseCache(
+ MAX_CACHE_ITEMS,
+ MAX_CACHE_SIZE_MB
+ );
/**
* @internal
@@ -38,6 +58,7 @@ export class EnhancedSearchInterceptor extends SearchInterceptor {
}
public stop() {
+ this.responseCache.clear();
this.uiSettingsSub.unsubscribe();
}
@@ -47,19 +68,31 @@ export class EnhancedSearchInterceptor extends SearchInterceptor {
: TimeoutErrorMode.CONTACT;
}
- public search({ id, ...request }: IKibanaSearchRequest, options: IAsyncSearchOptions = {}) {
- const searchOptions = {
- strategy: ENHANCED_ES_SEARCH_STRATEGY,
- ...options,
+ private createRequestHash$(request: IKibanaSearchRequest, options: IAsyncSearchOptions) {
+ const { sessionId, isRestore } = options;
+ // Preference is used to ensure all queries go to the same set of shards and it doesn't need to be hashed
+ // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-shard-routing.html#shard-and-node-preference
+ const { preference, ...params } = request.params || {};
+ const hashOptions = {
+ ...params,
+ sessionId,
+ isRestore,
};
- const { sessionId, strategy, abortSignal } = searchOptions;
- const search = () => this.runSearch({ id, ...request }, searchOptions);
- const searchAbortController = new SearchAbortController(abortSignal, this.searchTimeout);
- this.pendingCount$.next(this.pendingCount$.getValue() + 1);
- const untrackSearch = this.deps.session.isCurrentSession(options.sessionId)
- ? this.deps.session.trackSearch({ abort: () => searchAbortController.abort() })
- : undefined;
+ return from(sessionId ? createRequestHash(hashOptions) : of(undefined));
+ }
+
+ /**
+ * @internal
+ * Creates a new pollSearch that share replays its results
+ */
+ private runSearch$(
+ { id, ...request }: IKibanaSearchRequest,
+ options: IAsyncSearchOptions,
+ searchAbortController: SearchAbortController
+ ) {
+ const search = () => this.runSearch({ id, ...request }, options);
+ const { sessionId, strategy } = options;
// track if this search's session will be send to background
// if yes, then we don't need to cancel this search when it is aborted
@@ -91,18 +124,97 @@ export class EnhancedSearchInterceptor extends SearchInterceptor {
tap((response) => (id = response.id)),
catchError((e: Error) => {
cancel();
- return throwError(this.handleSearchError(e, options, searchAbortController.isTimeout()));
+ return throwError(e);
}),
finalize(() => {
- this.pendingCount$.next(this.pendingCount$.getValue() - 1);
searchAbortController.cleanup();
- if (untrackSearch && this.deps.session.isCurrentSession(options.sessionId)) {
- // untrack if this search still belongs to current session
- untrackSearch();
- }
if (savedToBackgroundSub) {
savedToBackgroundSub.unsubscribe();
}
+ }),
+ // This observable is cached in the responseCache.
+ // Using shareReplay makes sure that future subscribers will get the final response
+
+ shareReplay(1)
+ );
+ }
+
+ /**
+ * @internal
+ * Creates a new search observable and a corresponding search abort controller
+ * If requestHash is defined, tries to return them first from cache.
+ */
+ private getSearchResponse$(
+ request: IKibanaSearchRequest,
+ options: IAsyncSearchOptions,
+ requestHash?: string
+ ) {
+ const cached = requestHash ? this.responseCache.get(requestHash) : undefined;
+
+ const searchAbortController =
+ cached?.searchAbortController || new SearchAbortController(this.searchTimeout);
+
+ // Create a new abort signal if one was not passed. This fake signal will never be aborted,
+ // So the underlaying search will not be aborted, even if the other consumers abort.
+ searchAbortController.addAbortSignal(options.abortSignal ?? new AbortController().signal);
+ const response$ = cached?.response$ || this.runSearch$(request, options, searchAbortController);
+
+ if (requestHash && !this.responseCache.has(requestHash)) {
+ this.responseCache.set(requestHash, {
+ response$,
+ searchAbortController,
+ });
+ }
+
+ return {
+ response$,
+ searchAbortController,
+ };
+ }
+
+ public search({ id, ...request }: IKibanaSearchRequest, options: IAsyncSearchOptions = {}) {
+ const searchOptions = {
+ strategy: ENHANCED_ES_SEARCH_STRATEGY,
+ ...options,
+ };
+ const { sessionId, abortSignal } = searchOptions;
+
+ return this.createRequestHash$(request, searchOptions).pipe(
+ switchMap((requestHash) => {
+ const { searchAbortController, response$ } = this.getSearchResponse$(
+ request,
+ searchOptions,
+ requestHash
+ );
+
+ this.pendingCount$.next(this.pendingCount$.getValue() + 1);
+ const untrackSearch = this.deps.session.isCurrentSession(sessionId)
+ ? this.deps.session.trackSearch({ abort: () => searchAbortController.abort() })
+ : undefined;
+
+ // Abort the replay if the abortSignal is aborted.
+ // The underlaying search will not abort unless searchAbortController fires.
+ const aborted$ = (abortSignal ? fromEvent(abortSignal, 'abort') : EMPTY).pipe(
+ map(() => {
+ throw new AbortError();
+ })
+ );
+
+ return response$.pipe(
+ takeUntil(aborted$),
+ catchError((e) => {
+ return throwError(
+ this.handleSearchError(e, searchOptions, searchAbortController.isTimeout())
+ );
+ }),
+ finalize(() => {
+ this.pendingCount$.next(this.pendingCount$.getValue() - 1);
+ if (untrackSearch && this.deps.session.isCurrentSession(sessionId)) {
+ // untrack if this search still belongs to current session
+ untrackSearch();
+ }
+ })
+ );
})
);
}
diff --git a/x-pack/plugins/data_enhanced/public/search/search_response_cache.test.ts b/x-pack/plugins/data_enhanced/public/search/search_response_cache.test.ts
new file mode 100644
index 0000000000000..e985de5e23f7d
--- /dev/null
+++ b/x-pack/plugins/data_enhanced/public/search/search_response_cache.test.ts
@@ -0,0 +1,318 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { interval, Observable, of, throwError } from 'rxjs';
+import { shareReplay, switchMap, take } from 'rxjs/operators';
+import { IKibanaSearchResponse } from 'src/plugins/data/public';
+import { SearchAbortController } from './search_abort_controller';
+import { SearchResponseCache } from './search_response_cache';
+
+describe('SearchResponseCache', () => {
+ let cache: SearchResponseCache;
+ let searchAbortController: SearchAbortController;
+ const r: Array> = [
+ {
+ isPartial: true,
+ isRunning: true,
+ rawResponse: {
+ t: 1,
+ },
+ },
+ {
+ isPartial: true,
+ isRunning: true,
+ rawResponse: {
+ t: 2,
+ },
+ },
+ {
+ isPartial: true,
+ isRunning: true,
+ rawResponse: {
+ t: 3,
+ },
+ },
+ {
+ isPartial: false,
+ isRunning: false,
+ rawResponse: {
+ t: 4,
+ },
+ },
+ ];
+
+ function getSearchObservable$(responses: Array> = r) {
+ return interval(100).pipe(
+ take(responses.length),
+ switchMap((value: number, i: number) => {
+ if (responses[i].rawResponse.throw === true) {
+ return throwError('nooo');
+ } else {
+ return of(responses[i]);
+ }
+ }),
+ shareReplay(1)
+ );
+ }
+
+ function wrapWithAbortController(response$: Observable>) {
+ return {
+ response$,
+ searchAbortController,
+ };
+ }
+
+ beforeEach(() => {
+ cache = new SearchResponseCache(3, 0.1);
+ searchAbortController = new SearchAbortController();
+ });
+
+ describe('Cache eviction', () => {
+ test('clear evicts all', () => {
+ const finalResult = r[r.length - 1];
+ cache.set('123', wrapWithAbortController(of(finalResult)));
+ cache.set('234', wrapWithAbortController(of(finalResult)));
+
+ cache.clear();
+
+ expect(cache.get('123')).toBeUndefined();
+ expect(cache.get('234')).toBeUndefined();
+ });
+
+ test('evicts searches that threw an exception', async () => {
+ const res$ = getSearchObservable$();
+ const err$ = getSearchObservable$([
+ {
+ isPartial: true,
+ isRunning: true,
+ rawResponse: {
+ t: 'a'.repeat(1000),
+ },
+ },
+ {
+ isPartial: true,
+ isRunning: true,
+ rawResponse: {
+ throw: true,
+ },
+ },
+ ]);
+ cache.set('123', wrapWithAbortController(err$));
+ cache.set('234', wrapWithAbortController(res$));
+
+ const errHandler = jest.fn();
+ await err$.toPromise().catch(errHandler);
+ await res$.toPromise().catch(errHandler);
+
+ expect(errHandler).toBeCalledTimes(1);
+ expect(cache.get('123')).toBeUndefined();
+ expect(cache.get('234')).not.toBeUndefined();
+ });
+
+ test('evicts searches that returned an error response', async () => {
+ const err$ = getSearchObservable$([
+ {
+ isPartial: true,
+ isRunning: true,
+ rawResponse: {
+ t: 1,
+ },
+ },
+ {
+ isPartial: true,
+ isRunning: false,
+ rawResponse: {
+ t: 2,
+ },
+ },
+ ]);
+ cache.set('123', wrapWithAbortController(err$));
+
+ const errHandler = jest.fn();
+ await err$.toPromise().catch(errHandler);
+
+ expect(errHandler).toBeCalledTimes(0);
+ expect(cache.get('123')).toBeUndefined();
+ });
+
+ test('evicts oldest item if has too many cached items', async () => {
+ const finalResult = r[r.length - 1];
+ cache.set('123', wrapWithAbortController(of(finalResult)));
+ cache.set('234', wrapWithAbortController(of(finalResult)));
+ cache.set('345', wrapWithAbortController(of(finalResult)));
+ cache.set('456', wrapWithAbortController(of(finalResult)));
+
+ expect(cache.get('123')).toBeUndefined();
+ expect(cache.get('234')).not.toBeUndefined();
+ expect(cache.get('345')).not.toBeUndefined();
+ expect(cache.get('456')).not.toBeUndefined();
+ });
+
+ test('evicts oldest item if cache gets bigger than max size', async () => {
+ const largeResult$ = getSearchObservable$([
+ {
+ isPartial: true,
+ isRunning: true,
+ rawResponse: {
+ t: 'a'.repeat(1000),
+ },
+ },
+ {
+ isPartial: false,
+ isRunning: false,
+ rawResponse: {
+ t: 'a'.repeat(50000),
+ },
+ },
+ ]);
+
+ cache.set('123', wrapWithAbortController(largeResult$));
+ cache.set('234', wrapWithAbortController(largeResult$));
+ cache.set('345', wrapWithAbortController(largeResult$));
+
+ await largeResult$.toPromise();
+
+ expect(cache.get('123')).toBeUndefined();
+ expect(cache.get('234')).not.toBeUndefined();
+ expect(cache.get('345')).not.toBeUndefined();
+ });
+
+ test('evicts from cache any single item that gets bigger than max size', async () => {
+ const largeResult$ = getSearchObservable$([
+ {
+ isPartial: true,
+ isRunning: true,
+ rawResponse: {
+ t: 'a'.repeat(500),
+ },
+ },
+ {
+ isPartial: false,
+ isRunning: false,
+ rawResponse: {
+ t: 'a'.repeat(500000),
+ },
+ },
+ ]);
+
+ cache.set('234', wrapWithAbortController(largeResult$));
+ await largeResult$.toPromise();
+ expect(cache.get('234')).toBeUndefined();
+ });
+
+ test('get updates the insertion time of an item', async () => {
+ const finalResult = r[r.length - 1];
+ cache.set('123', wrapWithAbortController(of(finalResult)));
+ cache.set('234', wrapWithAbortController(of(finalResult)));
+ cache.set('345', wrapWithAbortController(of(finalResult)));
+
+ cache.get('123');
+ cache.get('234');
+
+ cache.set('456', wrapWithAbortController(of(finalResult)));
+
+ expect(cache.get('123')).not.toBeUndefined();
+ expect(cache.get('234')).not.toBeUndefined();
+ expect(cache.get('345')).toBeUndefined();
+ expect(cache.get('456')).not.toBeUndefined();
+ });
+ });
+
+ describe('Observable behavior', () => {
+ test('caches a response and re-emits it', async () => {
+ const s$ = getSearchObservable$();
+ cache.set('123', wrapWithAbortController(s$));
+ const finalRes = await cache.get('123')!.response$.toPromise();
+ expect(finalRes).toStrictEqual(r[r.length - 1]);
+ });
+
+ test('cached$ should emit same as original search$', async () => {
+ const s$ = getSearchObservable$();
+ cache.set('123', wrapWithAbortController(s$));
+
+ const next = jest.fn();
+ const cached$ = cache.get('123');
+
+ cached$!.response$.subscribe({
+ next,
+ });
+
+ // wait for original search to complete
+ await s$!.toPromise();
+
+ // get final response from cached$
+ const finalRes = await cached$!.response$.toPromise();
+ expect(finalRes).toStrictEqual(r[r.length - 1]);
+ expect(next).toHaveBeenCalledTimes(4);
+ });
+
+ test('cached$ should emit only current value and keep emitting if subscribed while search$ is running', async () => {
+ const s$ = getSearchObservable$();
+ cache.set('123', wrapWithAbortController(s$));
+
+ const next = jest.fn();
+ let cached$: Observable> | undefined;
+ s$.subscribe({
+ next: (res) => {
+ if (res.rawResponse.t === 3) {
+ cached$ = cache.get('123')!.response$;
+ cached$!.subscribe({
+ next,
+ });
+ }
+ },
+ });
+
+ // wait for original search to complete
+ await s$!.toPromise();
+
+ const finalRes = await cached$!.toPromise();
+
+ expect(finalRes).toStrictEqual(r[r.length - 1]);
+ expect(next).toHaveBeenCalledTimes(2);
+ });
+
+ test('cached$ should emit only last value if subscribed after search$ was complete 1', async () => {
+ const finalResult = r[r.length - 1];
+ const s$ = wrapWithAbortController(of(finalResult));
+ cache.set('123', s$);
+
+ // wait for original search to complete
+ await s$!.response$.toPromise();
+
+ const next = jest.fn();
+ const cached$ = cache.get('123');
+ cached$!.response$.subscribe({
+ next,
+ });
+
+ const finalRes = await cached$!.response$.toPromise();
+
+ expect(finalRes).toStrictEqual(r[r.length - 1]);
+ expect(next).toHaveBeenCalledTimes(1);
+ });
+
+ test('cached$ should emit only last value if subscribed after search$ was complete', async () => {
+ const s$ = getSearchObservable$();
+ cache.set('123', wrapWithAbortController(s$));
+
+ // wait for original search to complete
+ await s$!.toPromise();
+
+ const next = jest.fn();
+ const cached$ = cache.get('123');
+ cached$!.response$.subscribe({
+ next,
+ });
+
+ const finalRes = await cached$!.response$.toPromise();
+
+ expect(finalRes).toStrictEqual(r[r.length - 1]);
+ expect(next).toHaveBeenCalledTimes(1);
+ });
+ });
+});
diff --git a/x-pack/plugins/data_enhanced/public/search/search_response_cache.ts b/x-pack/plugins/data_enhanced/public/search/search_response_cache.ts
new file mode 100644
index 0000000000000..1467e5bf234ff
--- /dev/null
+++ b/x-pack/plugins/data_enhanced/public/search/search_response_cache.ts
@@ -0,0 +1,136 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { Observable, Subscription } from 'rxjs';
+import { IKibanaSearchResponse, isErrorResponse } from '../../../../../src/plugins/data/public';
+import { SearchAbortController } from './search_abort_controller';
+
+interface ResponseCacheItem {
+ response$: Observable>;
+ searchAbortController: SearchAbortController;
+}
+
+interface ResponseCacheItemInternal {
+ response$: Observable>;
+ searchAbortController: SearchAbortController;
+ size: number;
+ subs: Subscription;
+}
+
+export class SearchResponseCache {
+ private responseCache: Map;
+ private cacheSize = 0;
+
+ constructor(private maxItems: number, private maxCacheSizeMB: number) {
+ this.responseCache = new Map();
+ }
+
+ private byteToMb(size: number) {
+ return size / (1024 * 1024);
+ }
+
+ private deleteItem(key: string, clearSubs = true) {
+ const item = this.responseCache.get(key);
+ if (item) {
+ if (clearSubs) {
+ item.subs.unsubscribe();
+ }
+ this.cacheSize -= item.size;
+ this.responseCache.delete(key);
+ }
+ }
+
+ private setItem(key: string, item: ResponseCacheItemInternal) {
+ // The deletion of the key will move it to the end of the Map's entries.
+ this.deleteItem(key, false);
+ this.cacheSize += item.size;
+ this.responseCache.set(key, item);
+ }
+
+ public clear() {
+ this.cacheSize = 0;
+ this.responseCache.forEach((item) => {
+ item.subs.unsubscribe();
+ });
+ this.responseCache.clear();
+ }
+
+ private shrink() {
+ while (
+ this.responseCache.size > this.maxItems ||
+ this.byteToMb(this.cacheSize) > this.maxCacheSizeMB
+ ) {
+ const [key] = [...this.responseCache.keys()];
+ this.deleteItem(key);
+ }
+ }
+
+ public has(key: string) {
+ return this.responseCache.has(key);
+ }
+
+ /**
+ *
+ * @param key key to cache
+ * @param response$
+ * @returns A ReplaySubject that mimics the behavior of the original observable
+ * @throws error if key already exists
+ */
+ public set(key: string, item: ResponseCacheItem) {
+ if (this.responseCache.has(key)) {
+ throw new Error('duplicate key');
+ }
+
+ const { response$, searchAbortController } = item;
+
+ const cacheItem: ResponseCacheItemInternal = {
+ response$,
+ searchAbortController,
+ subs: new Subscription(),
+ size: 0,
+ };
+
+ this.setItem(key, cacheItem);
+
+ cacheItem.subs.add(
+ response$.subscribe({
+ next: (r) => {
+ // TODO: avoid stringiying. Get the size some other way!
+ const newSize = new Blob([JSON.stringify(r)]).size;
+ if (this.byteToMb(newSize) < this.maxCacheSizeMB && !isErrorResponse(r)) {
+ this.setItem(key, {
+ ...cacheItem,
+ size: newSize,
+ });
+ this.shrink();
+ } else {
+ // Single item is too large to be cached, or an error response returned.
+ // Evict and ignore.
+ this.deleteItem(key);
+ }
+ },
+ error: (e) => {
+ // Evict item on error
+ this.deleteItem(key);
+ },
+ })
+ );
+ this.shrink();
+ }
+
+ public get(key: string): ResponseCacheItem | undefined {
+ const item = this.responseCache.get(key);
+ if (item) {
+ // touch the item, and move it to the end of the map's entries
+ this.setItem(key, item);
+ return {
+ response$: item.response$,
+ searchAbortController: item.searchAbortController,
+ };
+ }
+ }
+}
diff --git a/x-pack/plugins/data_enhanced/public/search/utils.ts b/x-pack/plugins/data_enhanced/public/search/utils.ts
new file mode 100644
index 0000000000000..c6c648dbb5488
--- /dev/null
+++ b/x-pack/plugins/data_enhanced/public/search/utils.ts
@@ -0,0 +1,15 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import stringify from 'json-stable-stringify';
+
+export async function createRequestHash(keys: Record) {
+ const msgBuffer = new TextEncoder().encode(stringify(keys));
+ const hashBuffer = await crypto.subtle.digest('SHA-256', msgBuffer);
+ const hashArray = Array.from(new Uint8Array(hashBuffer));
+ return hashArray.map((b) => ('00' + b.toString(16)).slice(-2)).join('');
+}
diff --git a/x-pack/plugins/encrypted_saved_objects/server/saved_objects/encrypted_saved_objects_client_wrapper.ts b/x-pack/plugins/encrypted_saved_objects/server/saved_objects/encrypted_saved_objects_client_wrapper.ts
index 88a89af6be3d0..9b699d6ce007c 100644
--- a/x-pack/plugins/encrypted_saved_objects/server/saved_objects/encrypted_saved_objects_client_wrapper.ts
+++ b/x-pack/plugins/encrypted_saved_objects/server/saved_objects/encrypted_saved_objects_client_wrapper.ts
@@ -162,9 +162,9 @@ export class EncryptedSavedObjectsClientWrapper implements SavedObjectsClientCon
return await this.options.baseClient.delete(type, id, options);
}
- public async find(options: SavedObjectsFindOptions) {
+ public async find(options: SavedObjectsFindOptions) {
return await this.handleEncryptedAttributesInBulkResponse(
- await this.options.baseClient.find(options),
+ await this.options.baseClient.find(options),
undefined
);
}
diff --git a/x-pack/plugins/lens/public/app_plugin/app.tsx b/x-pack/plugins/lens/public/app_plugin/app.tsx
index 39163101fc7bd..8caa1737c00ad 100644
--- a/x-pack/plugins/lens/public/app_plugin/app.tsx
+++ b/x-pack/plugins/lens/public/app_plugin/app.tsx
@@ -82,6 +82,8 @@ export function App({
dashboardFeatureFlag,
} = useKibana().services;
+ const startSession = useCallback(() => data.search.session.start(), [data]);
+
const [state, setState] = useState(() => {
return {
query: data.query.queryString.getQuery(),
@@ -96,7 +98,7 @@ export function App({
isSaveModalVisible: false,
indicateNoData: false,
isSaveable: false,
- searchSessionId: data.search.session.start(),
+ searchSessionId: startSession(),
};
});
@@ -178,7 +180,7 @@ export function App({
setState((s) => ({
...s,
filters: data.query.filterManager.getFilters(),
- searchSessionId: data.search.session.start(),
+ searchSessionId: startSession(),
}));
trackUiEvent('app_filters_updated');
},
@@ -188,7 +190,7 @@ export function App({
next: () => {
setState((s) => ({
...s,
- searchSessionId: data.search.session.start(),
+ searchSessionId: startSession(),
}));
},
});
@@ -199,7 +201,7 @@ export function App({
tap(() => {
setState((s) => ({
...s,
- searchSessionId: data.search.session.start(),
+ searchSessionId: startSession(),
}));
}),
switchMap((done) =>
@@ -234,6 +236,7 @@ export function App({
data.query,
history,
initialContext,
+ startSession,
]);
useEffect(() => {
@@ -652,7 +655,7 @@ export function App({
// Time change will be picked up by the time subscription
setState((s) => ({
...s,
- searchSessionId: data.search.session.start(),
+ searchSessionId: startSession(),
}));
trackUiEvent('app_query_change');
}
diff --git a/x-pack/plugins/reporting/common/types.ts b/x-pack/plugins/reporting/common/types.ts
index 5e20381e35898..2148cf983d889 100644
--- a/x-pack/plugins/reporting/common/types.ts
+++ b/x-pack/plugins/reporting/common/types.ts
@@ -40,8 +40,8 @@ export interface LayoutParams {
export interface ReportDocumentHead {
_id: string;
_index: string;
- _seq_no: unknown;
- _primary_term: unknown;
+ _seq_no: number;
+ _primary_term: number;
}
export interface TaskRunResult {
diff --git a/x-pack/plugins/reporting/server/core.ts b/x-pack/plugins/reporting/server/core.ts
index b0f0a8c8c7ece..03c76941a6e99 100644
--- a/x-pack/plugins/reporting/server/core.ts
+++ b/x-pack/plugins/reporting/server/core.ts
@@ -10,7 +10,6 @@ import * as Rx from 'rxjs';
import { first, map, take } from 'rxjs/operators';
import {
BasePath,
- ElasticsearchServiceSetup,
IClusterClient,
KibanaRequest,
PluginInitializerContext,
@@ -38,7 +37,6 @@ export interface ReportingInternalSetup {
basePath: Pick;
router: ReportingPluginRouter;
features: FeaturesPluginSetup;
- elasticsearch: ElasticsearchServiceSetup;
licensing: LicensingPluginSetup;
security?: SecurityPluginSetup;
spaces?: SpacesPluginSetup;
@@ -212,11 +210,6 @@ export class ReportingCore {
return this.pluginSetupDeps;
}
- // NOTE: Uses the Legacy API
- public getElasticsearchService() {
- return this.getPluginSetupDeps().elasticsearch;
- }
-
private async getSavedObjectsClient(request: KibanaRequest) {
const { savedObjects } = await this.getPluginStartDeps();
return savedObjects.getScopedClient(request) as SavedObjectsClientContract;
diff --git a/x-pack/plugins/reporting/server/export_types/csv/execute_job.test.ts b/x-pack/plugins/reporting/server/export_types/csv/execute_job.test.ts
index c0235ee56e00f..f63c07e51dd03 100644
--- a/x-pack/plugins/reporting/server/export_types/csv/execute_job.test.ts
+++ b/x-pack/plugins/reporting/server/export_types/csv/execute_job.test.ts
@@ -5,8 +5,9 @@
* 2.0.
*/
+import type { DeeplyMockedKeys } from '@kbn/utility-types/jest';
import nodeCrypto from '@elastic/node-crypto';
-import { ElasticsearchServiceSetup, IUiSettingsClient } from 'kibana/server';
+import { ElasticsearchClient, IUiSettingsClient } from 'kibana/server';
import moment from 'moment';
// @ts-ignore
import Puid from 'puid';
@@ -50,20 +51,12 @@ describe('CSV Execute Job', function () {
let defaultElasticsearchResponse: any;
let encryptedHeaders: any;
- let clusterStub: any;
let configGetStub: any;
+ let mockEsClient: DeeplyMockedKeys;
let mockReportingConfig: ReportingConfig;
let mockReportingCore: ReportingCore;
- let callAsCurrentUserStub: any;
let cancellationToken: any;
- const mockElasticsearch = {
- legacy: {
- client: {
- asScoped: () => clusterStub,
- },
- },
- };
const mockUiSettingsClient = {
get: sinon.stub(),
};
@@ -85,10 +78,10 @@ describe('CSV Execute Job', function () {
mockReportingCore = await createMockReportingCore(mockReportingConfig);
mockReportingCore.getUiSettingsServiceFactory = () =>
Promise.resolve((mockUiSettingsClient as unknown) as IUiSettingsClient);
- mockReportingCore.getElasticsearchService = () =>
- mockElasticsearch as ElasticsearchServiceSetup;
mockReportingCore.setConfig(mockReportingConfig);
+ mockEsClient = (await mockReportingCore.getEsClient()).asScoped({} as any)
+ .asCurrentUser as typeof mockEsClient;
cancellationToken = new CancellationToken();
defaultElasticsearchResponse = {
@@ -97,14 +90,9 @@ describe('CSV Execute Job', function () {
},
_scroll_id: 'defaultScrollId',
};
- clusterStub = {
- callAsCurrentUser() {},
- };
-
- callAsCurrentUserStub = sinon
- .stub(clusterStub, 'callAsCurrentUser')
- .resolves(defaultElasticsearchResponse);
+ mockEsClient.search.mockResolvedValue({ body: defaultElasticsearchResponse } as any);
+ mockEsClient.scroll.mockResolvedValue({ body: defaultElasticsearchResponse } as any);
mockUiSettingsClient.get.withArgs(CSV_SEPARATOR_SETTING).returns(',');
mockUiSettingsClient.get.withArgs(CSV_QUOTE_VALUES_SETTING).returns(true);
@@ -127,7 +115,7 @@ describe('CSV Execute Job', function () {
});
describe('basic Elasticsearch call behavior', function () {
- it('should decrypt encrypted headers and pass to callAsCurrentUser', async function () {
+ it('should decrypt encrypted headers and pass to the elasticsearch client', async function () {
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
await runTask(
'job456',
@@ -138,8 +126,7 @@ describe('CSV Execute Job', function () {
}),
cancellationToken
);
- expect(callAsCurrentUserStub.called).toBe(true);
- expect(callAsCurrentUserStub.firstCall.args[0]).toEqual('search');
+ expect(mockEsClient.search).toHaveBeenCalled();
});
it('should pass the index and body to execute the initial search', async function () {
@@ -160,21 +147,22 @@ describe('CSV Execute Job', function () {
await runTask('job777', job, cancellationToken);
- const searchCall = callAsCurrentUserStub.firstCall;
- expect(searchCall.args[0]).toBe('search');
- expect(searchCall.args[1].index).toBe(index);
- expect(searchCall.args[1].body).toBe(body);
+ expect(mockEsClient.search).toHaveBeenCalledWith(expect.objectContaining({ body, index }));
});
it('should pass the scrollId from the initial search to the subsequent scroll', async function () {
const scrollId = getRandomScrollId();
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{}],
+
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: scrollId,
},
- _scroll_id: scrollId,
- });
- callAsCurrentUserStub.onSecondCall().resolves(defaultElasticsearchResponse);
+ } as any);
+ mockEsClient.scroll.mockResolvedValue({ body: defaultElasticsearchResponse } as any);
+
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
await runTask(
'job456',
@@ -186,10 +174,9 @@ describe('CSV Execute Job', function () {
cancellationToken
);
- const scrollCall = callAsCurrentUserStub.secondCall;
-
- expect(scrollCall.args[0]).toBe('scroll');
- expect(scrollCall.args[1].scrollId).toBe(scrollId);
+ expect(mockEsClient.scroll).toHaveBeenCalledWith(
+ expect.objectContaining({ scroll_id: scrollId })
+ );
});
it('should not execute scroll if there are no hits from the search', async function () {
@@ -204,28 +191,27 @@ describe('CSV Execute Job', function () {
cancellationToken
);
- expect(callAsCurrentUserStub.callCount).toBe(2);
-
- const searchCall = callAsCurrentUserStub.firstCall;
- expect(searchCall.args[0]).toBe('search');
-
- const clearScrollCall = callAsCurrentUserStub.secondCall;
- expect(clearScrollCall.args[0]).toBe('clearScroll');
+ expect(mockEsClient.search).toHaveBeenCalled();
+ expect(mockEsClient.clearScroll).toHaveBeenCalled();
});
it('should stop executing scroll if there are no hits', async function () {
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{}],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
- callAsCurrentUserStub.onSecondCall().resolves({
- hits: {
- hits: [],
+ } as any);
+ mockEsClient.scroll.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
await runTask(
@@ -238,33 +224,30 @@ describe('CSV Execute Job', function () {
cancellationToken
);
- expect(callAsCurrentUserStub.callCount).toBe(3);
-
- const searchCall = callAsCurrentUserStub.firstCall;
- expect(searchCall.args[0]).toBe('search');
-
- const scrollCall = callAsCurrentUserStub.secondCall;
- expect(scrollCall.args[0]).toBe('scroll');
-
- const clearScroll = callAsCurrentUserStub.thirdCall;
- expect(clearScroll.args[0]).toBe('clearScroll');
+ expect(mockEsClient.search).toHaveBeenCalled();
+ expect(mockEsClient.scroll).toHaveBeenCalled();
+ expect(mockEsClient.clearScroll).toHaveBeenCalled();
});
it('should call clearScroll with scrollId when there are no more hits', async function () {
const lastScrollId = getRandomScrollId();
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{}],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
- callAsCurrentUserStub.onSecondCall().resolves({
- hits: {
- hits: [],
+ mockEsClient.scroll.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [],
+ },
+ _scroll_id: lastScrollId,
},
- _scroll_id: lastScrollId,
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
await runTask(
@@ -277,26 +260,28 @@ describe('CSV Execute Job', function () {
cancellationToken
);
- const lastCall = callAsCurrentUserStub.getCall(callAsCurrentUserStub.callCount - 1);
- expect(lastCall.args[0]).toBe('clearScroll');
- expect(lastCall.args[1].scrollId).toEqual([lastScrollId]);
+ expect(mockEsClient.clearScroll).toHaveBeenCalledWith(
+ expect.objectContaining({ scroll_id: lastScrollId })
+ );
});
it('calls clearScroll when there is an error iterating the hits', async function () {
const lastScrollId = getRandomScrollId();
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [
- {
- _source: {
- one: 'foo',
- two: 'bar',
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [
+ {
+ _source: {
+ one: 'foo',
+ two: 'bar',
+ },
},
- },
- ],
+ ],
+ },
+ _scroll_id: lastScrollId,
},
- _scroll_id: lastScrollId,
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -309,21 +294,23 @@ describe('CSV Execute Job', function () {
`[TypeError: Cannot read property 'indexOf' of undefined]`
);
- const lastCall = callAsCurrentUserStub.getCall(callAsCurrentUserStub.callCount - 1);
- expect(lastCall.args[0]).toBe('clearScroll');
- expect(lastCall.args[1].scrollId).toEqual([lastScrollId]);
+ expect(mockEsClient.clearScroll).toHaveBeenCalledWith(
+ expect.objectContaining({ scroll_id: lastScrollId })
+ );
});
});
describe('Warning when cells have formulas', () => {
it('returns `csv_contains_formulas` when cells contain formulas', async function () {
configGetStub.withArgs('csv', 'checkForFormulas').returns(true);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -343,12 +330,14 @@ describe('CSV Execute Job', function () {
it('returns warnings when headings contain formulas', async function () {
configGetStub.withArgs('csv', 'checkForFormulas').returns(true);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { '=SUM(A1:A2)': 'foo', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { '=SUM(A1:A2)': 'foo', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -369,12 +358,14 @@ describe('CSV Execute Job', function () {
it('returns no warnings when cells have no formulas', async function () {
configGetStub.withArgs('csv', 'checkForFormulas').returns(true);
configGetStub.withArgs('csv', 'escapeFormulaValues').returns(false);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -395,12 +386,14 @@ describe('CSV Execute Job', function () {
it('returns no warnings when cells have formulas but are escaped', async function () {
configGetStub.withArgs('csv', 'checkForFormulas').returns(true);
configGetStub.withArgs('csv', 'escapeFormulaValues').returns(true);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { '=SUM(A1:A2)': 'foo', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { '=SUM(A1:A2)': 'foo', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -421,12 +414,14 @@ describe('CSV Execute Job', function () {
it('returns no warnings when configured not to', async () => {
configGetStub.withArgs('csv', 'checkForFormulas').returns(false);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -448,12 +443,14 @@ describe('CSV Execute Job', function () {
describe('Byte order mark encoding', () => {
it('encodes CSVs with BOM', async () => {
configGetStub.withArgs('csv', 'useByteOrderMarkEncoding').returns(true);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { one: 'one', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: 'one', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -469,12 +466,14 @@ describe('CSV Execute Job', function () {
it('encodes CSVs without BOM', async () => {
configGetStub.withArgs('csv', 'useByteOrderMarkEncoding').returns(false);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { one: 'one', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: 'one', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -492,12 +491,14 @@ describe('CSV Execute Job', function () {
describe('Escaping cells with formulas', () => {
it('escapes values with formulas', async () => {
configGetStub.withArgs('csv', 'escapeFormulaValues').returns(true);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { one: `=cmd|' /C calc'!A0`, two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: `=cmd|' /C calc'!A0`, two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -513,12 +514,14 @@ describe('CSV Execute Job', function () {
it('does not escapes values with formulas', async () => {
configGetStub.withArgs('csv', 'escapeFormulaValues').returns(false);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { one: `=cmd|' /C calc'!A0`, two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: `=cmd|' /C calc'!A0`, two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -535,7 +538,7 @@ describe('CSV Execute Job', function () {
describe('Elasticsearch call errors', function () {
it('should reject Promise if search call errors out', async function () {
- callAsCurrentUserStub.rejects(new Error());
+ mockEsClient.search.mockRejectedValueOnce(new Error());
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
headers: encryptedHeaders,
@@ -548,13 +551,15 @@ describe('CSV Execute Job', function () {
});
it('should reject Promise if scroll call errors out', async function () {
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{}],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
- callAsCurrentUserStub.onSecondCall().rejects(new Error());
+ } as any);
+ mockEsClient.scroll.mockRejectedValueOnce(new Error());
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
headers: encryptedHeaders,
@@ -569,12 +574,14 @@ describe('CSV Execute Job', function () {
describe('invalid responses', function () {
it('should reject Promise if search returns hits but no _scroll_id', async function () {
- callAsCurrentUserStub.resolves({
- hits: {
- hits: [{}],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: undefined,
},
- _scroll_id: undefined,
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -588,12 +595,14 @@ describe('CSV Execute Job', function () {
});
it('should reject Promise if search returns no hits and no _scroll_id', async function () {
- callAsCurrentUserStub.resolves({
- hits: {
- hits: [],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [],
+ },
+ _scroll_id: undefined,
},
- _scroll_id: undefined,
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -607,19 +616,23 @@ describe('CSV Execute Job', function () {
});
it('should reject Promise if scroll returns hits but no _scroll_id', async function () {
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{}],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
- callAsCurrentUserStub.onSecondCall().resolves({
- hits: {
- hits: [{}],
+ mockEsClient.scroll.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: undefined,
},
- _scroll_id: undefined,
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -633,19 +646,23 @@ describe('CSV Execute Job', function () {
});
it('should reject Promise if scroll returns no hits and no _scroll_id', async function () {
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{}],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
- callAsCurrentUserStub.onSecondCall().resolves({
- hits: {
- hits: [],
+ mockEsClient.scroll.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [],
+ },
+ _scroll_id: undefined,
},
- _scroll_id: undefined,
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -663,21 +680,20 @@ describe('CSV Execute Job', function () {
const scrollId = getRandomScrollId();
beforeEach(function () {
- // We have to "re-stub" the callAsCurrentUser stub here so that we can use the fakeFunction
- // that delays the Promise resolution so we have a chance to call cancellationToken.cancel().
- // Otherwise, we get into an endless loop, and don't have a chance to call cancel
- callAsCurrentUserStub.restore();
- callAsCurrentUserStub = sinon
- .stub(clusterStub, 'callAsCurrentUser')
- .callsFake(async function () {
- await delay(1);
- return {
+ const searchStub = async () => {
+ await delay(1);
+ return {
+ body: {
hits: {
hits: [{}],
},
_scroll_id: scrollId,
- };
- });
+ },
+ };
+ };
+
+ mockEsClient.search.mockImplementation(searchStub as typeof mockEsClient.search);
+ mockEsClient.scroll.mockImplementation(searchStub as typeof mockEsClient.scroll);
});
it('should stop calling Elasticsearch when cancellationToken.cancel is called', async function () {
@@ -693,10 +709,15 @@ describe('CSV Execute Job', function () {
);
await delay(250);
- const callCount = callAsCurrentUserStub.callCount;
+
+ expect(mockEsClient.search).toHaveBeenCalled();
+ expect(mockEsClient.scroll).toHaveBeenCalled();
+ expect(mockEsClient.clearScroll).not.toHaveBeenCalled();
+
cancellationToken.cancel();
await delay(250);
- expect(callAsCurrentUserStub.callCount).toBe(callCount + 1); // last call is to clear the scroll
+
+ expect(mockEsClient.clearScroll).toHaveBeenCalled();
});
it(`shouldn't call clearScroll if it never got a scrollId`, async function () {
@@ -712,9 +733,7 @@ describe('CSV Execute Job', function () {
);
cancellationToken.cancel();
- for (let i = 0; i < callAsCurrentUserStub.callCount; ++i) {
- expect(callAsCurrentUserStub.getCall(i).args[1]).not.toBe('clearScroll'); // dead code?
- }
+ expect(mockEsClient.clearScroll).not.toHaveBeenCalled();
});
it('should call clearScroll if it got a scrollId', async function () {
@@ -732,9 +751,11 @@ describe('CSV Execute Job', function () {
cancellationToken.cancel();
await delay(100);
- const lastCall = callAsCurrentUserStub.getCall(callAsCurrentUserStub.callCount - 1);
- expect(lastCall.args[0]).toBe('clearScroll');
- expect(lastCall.args[1].scrollId).toEqual([scrollId]);
+ expect(mockEsClient.clearScroll).toHaveBeenCalledWith(
+ expect.objectContaining({
+ scroll_id: scrollId,
+ })
+ );
});
});
@@ -788,12 +809,14 @@ describe('CSV Execute Job', function () {
it('should write column headers to output, when there are results', async function () {
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{ one: '1', two: '2' }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ one: '1', two: '2' }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const jobParams = getBasePayload({
headers: encryptedHeaders,
@@ -809,12 +832,14 @@ describe('CSV Execute Job', function () {
it('should use comma separated values of non-nested fields from _source', async function () {
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const jobParams = getBasePayload({
headers: encryptedHeaders,
@@ -831,18 +856,22 @@ describe('CSV Execute Job', function () {
it('should concatenate the hits from multiple responses', async function () {
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
- callAsCurrentUserStub.onSecondCall().resolves({
- hits: {
- hits: [{ _source: { one: 'baz', two: 'qux' } }],
+ } as any);
+ mockEsClient.scroll.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: 'baz', two: 'qux' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const jobParams = getBasePayload({
headers: encryptedHeaders,
@@ -860,12 +889,14 @@ describe('CSV Execute Job', function () {
it('should use field formatters to format fields', async function () {
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const jobParams = getBasePayload({
headers: encryptedHeaders,
@@ -962,12 +993,14 @@ describe('CSV Execute Job', function () {
beforeEach(async function () {
configGetStub.withArgs('csv', 'maxSizeBytes').returns(9);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -1002,12 +1035,14 @@ describe('CSV Execute Job', function () {
Promise.resolve((mockUiSettingsClient as unknown) as IUiSettingsClient);
configGetStub.withArgs('csv', 'maxSizeBytes').returns(18);
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{ _source: { one: 'foo', two: 'bar' } }],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -1039,12 +1074,14 @@ describe('CSV Execute Job', function () {
const scrollDuration = 'test';
configGetStub.withArgs('csv', 'scroll').returns({ duration: scrollDuration });
- callAsCurrentUserStub.onFirstCall().returns({
- hits: {
- hits: [{}],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -1056,21 +1093,23 @@ describe('CSV Execute Job', function () {
await runTask('job123', jobParams, cancellationToken);
- const searchCall = callAsCurrentUserStub.firstCall;
- expect(searchCall.args[0]).toBe('search');
- expect(searchCall.args[1].scroll).toBe(scrollDuration);
+ expect(mockEsClient.search).toHaveBeenCalledWith(
+ expect.objectContaining({ scroll: scrollDuration })
+ );
});
it('passes scroll size to initial search call', async function () {
const scrollSize = 100;
configGetStub.withArgs('csv', 'scroll').returns({ size: scrollSize });
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{}],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -1082,21 +1121,23 @@ describe('CSV Execute Job', function () {
await runTask('job123', jobParams, cancellationToken);
- const searchCall = callAsCurrentUserStub.firstCall;
- expect(searchCall.args[0]).toBe('search');
- expect(searchCall.args[1].size).toBe(scrollSize);
+ expect(mockEsClient.search).toHaveBeenCalledWith(
+ expect.objectContaining({ size: scrollSize })
+ );
});
it('passes scroll duration to subsequent scroll call', async function () {
const scrollDuration = 'test';
configGetStub.withArgs('csv', 'scroll').returns({ duration: scrollDuration });
- callAsCurrentUserStub.onFirstCall().resolves({
- hits: {
- hits: [{}],
+ mockEsClient.search.mockResolvedValueOnce({
+ body: {
+ hits: {
+ hits: [{}],
+ },
+ _scroll_id: 'scrollId',
},
- _scroll_id: 'scrollId',
- });
+ } as any);
const runTask = runTaskFnFactory(mockReportingCore, mockLogger);
const jobParams = getBasePayload({
@@ -1108,9 +1149,9 @@ describe('CSV Execute Job', function () {
await runTask('job123', jobParams, cancellationToken);
- const scrollCall = callAsCurrentUserStub.secondCall;
- expect(scrollCall.args[0]).toBe('scroll');
- expect(scrollCall.args[1].scroll).toBe(scrollDuration);
+ expect(mockEsClient.scroll).toHaveBeenCalledWith(
+ expect.objectContaining({ scroll: scrollDuration })
+ );
});
});
});
diff --git a/x-pack/plugins/reporting/server/export_types/csv/execute_job.ts b/x-pack/plugins/reporting/server/export_types/csv/execute_job.ts
index 0e13a91649406..57559d136ff3e 100644
--- a/x-pack/plugins/reporting/server/export_types/csv/execute_job.ts
+++ b/x-pack/plugins/reporting/server/export_types/csv/execute_job.ts
@@ -17,7 +17,7 @@ export const runTaskFnFactory: RunTaskFnFactory<
const config = reporting.getConfig();
return async function runTask(jobId, job, cancellationToken) {
- const elasticsearch = reporting.getElasticsearchService();
+ const elasticsearch = await reporting.getEsClient();
const logger = parentLogger.clone([jobId]);
const generateCsv = createGenerateCsv(logger);
@@ -25,16 +25,13 @@ export const runTaskFnFactory: RunTaskFnFactory<
const headers = await decryptJobHeaders(encryptionKey, job.headers, logger);
const fakeRequest = reporting.getFakeRequest({ headers }, job.spaceId, logger);
const uiSettingsClient = await reporting.getUiSettingsClient(fakeRequest, logger);
-
- const { callAsCurrentUser } = elasticsearch.legacy.client.asScoped(fakeRequest);
- const callEndpoint = (endpoint: string, clientParams = {}, options = {}) =>
- callAsCurrentUser(endpoint, clientParams, options);
+ const { asCurrentUser: elasticsearchClient } = elasticsearch.asScoped(fakeRequest);
const { content, maxSizeReached, size, csvContainsFormulas, warnings } = await generateCsv(
job,
config,
uiSettingsClient,
- callEndpoint,
+ elasticsearchClient,
cancellationToken
);
diff --git a/x-pack/plugins/reporting/server/export_types/csv/generate_csv/hit_iterator.test.ts b/x-pack/plugins/reporting/server/export_types/csv/generate_csv/hit_iterator.test.ts
index 4baa81e8be6c9..6b1b7fc98a4b8 100644
--- a/x-pack/plugins/reporting/server/export_types/csv/generate_csv/hit_iterator.test.ts
+++ b/x-pack/plugins/reporting/server/export_types/csv/generate_csv/hit_iterator.test.ts
@@ -7,16 +7,18 @@
import expect from '@kbn/expect';
import sinon from 'sinon';
+import { elasticsearchServiceMock } from 'src/core/server/mocks';
import { CancellationToken } from '../../../../common';
import { createMockLevelLogger } from '../../../test_helpers/create_mock_levellogger';
import { ScrollConfig } from '../../../types';
import { createHitIterator } from './hit_iterator';
+const { asInternalUser: mockEsClient } = elasticsearchServiceMock.createClusterClient();
const mockLogger = createMockLevelLogger();
const debugLogStub = sinon.stub(mockLogger, 'debug');
const warnLogStub = sinon.stub(mockLogger, 'warn');
const errorLogStub = sinon.stub(mockLogger, 'error');
-const mockCallEndpoint = sinon.stub();
+
const mockSearchRequest = {};
const mockConfig: ScrollConfig = { duration: '2s', size: 123 };
let realCancellationToken = new CancellationToken();
@@ -27,10 +29,30 @@ describe('hitIterator', function () {
debugLogStub.resetHistory();
warnLogStub.resetHistory();
errorLogStub.resetHistory();
- mockCallEndpoint.resetHistory();
- mockCallEndpoint.resetBehavior();
- mockCallEndpoint.resolves({ _scroll_id: '123blah', hits: { hits: ['you found me'] } });
- mockCallEndpoint.onCall(11).resolves({ _scroll_id: '123blah', hits: {} });
+
+ mockEsClient.search.mockClear();
+ mockEsClient.search.mockResolvedValue({
+ body: {
+ _scroll_id: '123blah',
+ hits: { hits: ['you found me'] },
+ },
+ } as any);
+
+ mockEsClient.scroll.mockClear();
+ for (let i = 0; i < 10; i++) {
+ mockEsClient.scroll.mockResolvedValueOnce({
+ body: {
+ _scroll_id: '123blah',
+ hits: { hits: ['you found me'] },
+ },
+ } as any);
+ }
+ mockEsClient.scroll.mockResolvedValueOnce({
+ body: {
+ _scroll_id: '123blah',
+ hits: {},
+ },
+ } as any);
isCancelledStub = sinon.stub(realCancellationToken, 'isCancelled');
isCancelledStub.returns(false);
@@ -45,7 +67,7 @@ describe('hitIterator', function () {
const hitIterator = createHitIterator(mockLogger);
const iterator = hitIterator(
mockConfig,
- mockCallEndpoint,
+ mockEsClient,
mockSearchRequest,
realCancellationToken
);
@@ -58,7 +80,7 @@ describe('hitIterator', function () {
expect(hit).to.be('you found me');
}
- expect(mockCallEndpoint.callCount).to.be(13);
+ expect(mockEsClient.scroll.mock.calls.length).to.be(11);
expect(debugLogStub.callCount).to.be(13);
expect(warnLogStub.callCount).to.be(0);
expect(errorLogStub.callCount).to.be(0);
@@ -73,7 +95,7 @@ describe('hitIterator', function () {
const hitIterator = createHitIterator(mockLogger);
const iterator = hitIterator(
mockConfig,
- mockCallEndpoint,
+ mockEsClient,
mockSearchRequest,
realCancellationToken
);
@@ -86,7 +108,7 @@ describe('hitIterator', function () {
expect(hit).to.be('you found me');
}
- expect(mockCallEndpoint.callCount).to.be(3);
+ expect(mockEsClient.scroll.mock.calls.length).to.be(1);
expect(debugLogStub.callCount).to.be(3);
expect(warnLogStub.callCount).to.be(1);
expect(errorLogStub.callCount).to.be(0);
@@ -98,13 +120,20 @@ describe('hitIterator', function () {
it('handles time out', async () => {
// Setup
- mockCallEndpoint.onCall(2).resolves({ status: 404 });
+ mockEsClient.scroll.mockReset();
+ mockEsClient.scroll.mockResolvedValueOnce({
+ body: {
+ _scroll_id: '123blah',
+ hits: { hits: ['you found me'] },
+ },
+ } as any);
+ mockEsClient.scroll.mockResolvedValueOnce({ body: { status: 404 } } as any);
// Begin
const hitIterator = createHitIterator(mockLogger);
const iterator = hitIterator(
mockConfig,
- mockCallEndpoint,
+ mockEsClient,
mockSearchRequest,
realCancellationToken
);
@@ -125,7 +154,7 @@ describe('hitIterator', function () {
errorThrown = true;
}
- expect(mockCallEndpoint.callCount).to.be(4);
+ expect(mockEsClient.scroll.mock.calls.length).to.be(2);
expect(debugLogStub.callCount).to.be(4);
expect(warnLogStub.callCount).to.be(0);
expect(errorLogStub.callCount).to.be(1);
@@ -134,13 +163,13 @@ describe('hitIterator', function () {
it('handles scroll id could not be cleared', async () => {
// Setup
- mockCallEndpoint.withArgs('clearScroll').rejects({ status: 404 });
+ mockEsClient.clearScroll.mockRejectedValueOnce({ status: 404 });
// Begin
const hitIterator = createHitIterator(mockLogger);
const iterator = hitIterator(
mockConfig,
- mockCallEndpoint,
+ mockEsClient,
mockSearchRequest,
realCancellationToken
);
@@ -153,7 +182,7 @@ describe('hitIterator', function () {
expect(hit).to.be('you found me');
}
- expect(mockCallEndpoint.callCount).to.be(13);
+ expect(mockEsClient.scroll.mock.calls.length).to.be(11);
expect(warnLogStub.callCount).to.be(1);
expect(errorLogStub.callCount).to.be(1);
});
diff --git a/x-pack/plugins/reporting/server/export_types/csv/generate_csv/hit_iterator.ts b/x-pack/plugins/reporting/server/export_types/csv/generate_csv/hit_iterator.ts
index b00622399d691..72935e64dd6b5 100644
--- a/x-pack/plugins/reporting/server/export_types/csv/generate_csv/hit_iterator.ts
+++ b/x-pack/plugins/reporting/server/export_types/csv/generate_csv/hit_iterator.ts
@@ -5,54 +5,55 @@
* 2.0.
*/
+import { UnwrapPromise } from '@kbn/utility-types';
import { i18n } from '@kbn/i18n';
-import { SearchParams, SearchResponse } from 'elasticsearch';
+import { ElasticsearchClient } from 'src/core/server';
import { CancellationToken } from '../../../../common';
import { LevelLogger } from '../../../lib';
import { ScrollConfig } from '../../../types';
-export type EndpointCaller = (method: string, params: object) => Promise>;
+type SearchResponse = UnwrapPromise>;
+type SearchRequest = Required>[0];
-function parseResponse(request: SearchResponse) {
- const response = request;
- if (!response || !response._scroll_id) {
+function parseResponse(response: SearchResponse) {
+ if (!response?.body._scroll_id) {
throw new Error(
i18n.translate('xpack.reporting.exportTypes.csv.hitIterator.expectedScrollIdErrorMessage', {
defaultMessage: 'Expected {scrollId} in the following Elasticsearch response: {response}',
- values: { response: JSON.stringify(response), scrollId: '_scroll_id' },
+ values: { response: JSON.stringify(response?.body), scrollId: '_scroll_id' },
})
);
}
- if (!response.hits) {
+ if (!response?.body.hits) {
throw new Error(
i18n.translate('xpack.reporting.exportTypes.csv.hitIterator.expectedHitsErrorMessage', {
defaultMessage: 'Expected {hits} in the following Elasticsearch response: {response}',
- values: { response: JSON.stringify(response), hits: 'hits' },
+ values: { response: JSON.stringify(response?.body), hits: 'hits' },
})
);
}
return {
- scrollId: response._scroll_id,
- hits: response.hits.hits,
+ scrollId: response.body._scroll_id,
+ hits: response.body.hits.hits,
};
}
export function createHitIterator(logger: LevelLogger) {
return async function* hitIterator(
scrollSettings: ScrollConfig,
- callEndpoint: EndpointCaller,
- searchRequest: SearchParams,
+ elasticsearchClient: ElasticsearchClient,
+ searchRequest: SearchRequest,
cancellationToken: CancellationToken
) {
logger.debug('executing search request');
- async function search(index: string | boolean | string[] | undefined, body: object) {
+ async function search(index: SearchRequest['index'], body: SearchRequest['body']) {
return parseResponse(
- await callEndpoint('search', {
- ignore_unavailable: true, // ignores if the index pattern contains any aliases that point to closed indices
+ await elasticsearchClient.search({
index,
body,
+ ignore_unavailable: true, // ignores if the index pattern contains any aliases that point to closed indices
scroll: scrollSettings.duration,
size: scrollSettings.size,
})
@@ -62,8 +63,8 @@ export function createHitIterator(logger: LevelLogger) {
async function scroll(scrollId: string | undefined) {
logger.debug('executing scroll request');
return parseResponse(
- await callEndpoint('scroll', {
- scrollId,
+ await elasticsearchClient.scroll({
+ scroll_id: scrollId,
scroll: scrollSettings.duration,
})
);
@@ -72,8 +73,8 @@ export function createHitIterator(logger: LevelLogger) {
async function clearScroll(scrollId: string | undefined) {
logger.debug('executing clearScroll request');
try {
- await callEndpoint('clearScroll', {
- scrollId: [scrollId],
+ await elasticsearchClient.clearScroll({
+ scroll_id: scrollId,
});
} catch (err) {
// Do not throw the error, as the job can still be completed successfully
diff --git a/x-pack/plugins/reporting/server/export_types/csv/generate_csv/index.ts b/x-pack/plugins/reporting/server/export_types/csv/generate_csv/index.ts
index 629a81df350be..e5ed04f4cab66 100644
--- a/x-pack/plugins/reporting/server/export_types/csv/generate_csv/index.ts
+++ b/x-pack/plugins/reporting/server/export_types/csv/generate_csv/index.ts
@@ -6,7 +6,7 @@
*/
import { i18n } from '@kbn/i18n';
-import { IUiSettingsClient } from 'src/core/server';
+import { ElasticsearchClient, IUiSettingsClient } from 'src/core/server';
import { ReportingConfig } from '../../../';
import { CancellationToken } from '../../../../../../plugins/reporting/common';
import { CSV_BOM_CHARS } from '../../../../common/constants';
@@ -24,7 +24,7 @@ import { fieldFormatMapFactory } from './field_format_map';
import { createFlattenHit } from './flatten_hit';
import { createFormatCsvValues } from './format_csv_values';
import { getUiSettings } from './get_ui_settings';
-import { createHitIterator, EndpointCaller } from './hit_iterator';
+import { createHitIterator } from './hit_iterator';
interface SearchRequest {
index: string;
@@ -56,7 +56,7 @@ export function createGenerateCsv(logger: LevelLogger) {
job: GenerateCsvParams,
config: ReportingConfig,
uiSettingsClient: IUiSettingsClient,
- callEndpoint: EndpointCaller,
+ elasticsearchClient: ElasticsearchClient,
cancellationToken: CancellationToken
): Promise {
const settings = await getUiSettings(job.browserTimezone, uiSettingsClient, config, logger);
@@ -79,7 +79,7 @@ export function createGenerateCsv(logger: LevelLogger) {
const iterator = hitIterator(
settings.scroll,
- callEndpoint,
+ elasticsearchClient,
job.searchRequest,
cancellationToken
);
diff --git a/x-pack/plugins/reporting/server/export_types/png/execute_job/index.test.ts b/x-pack/plugins/reporting/server/export_types/png/execute_job/index.test.ts
index 3a5298981738d..34fe5360522b1 100644
--- a/x-pack/plugins/reporting/server/export_types/png/execute_job/index.test.ts
+++ b/x-pack/plugins/reporting/server/export_types/png/execute_job/index.test.ts
@@ -59,16 +59,6 @@ beforeEach(async () => {
mockReporting = await createMockReportingCore(mockReportingConfig);
- const mockElasticsearch = {
- legacy: {
- client: {
- asScoped: () => ({ callAsCurrentUser: jest.fn() }),
- },
- },
- };
- const mockGetElasticsearch = jest.fn();
- mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch));
- mockReporting.getElasticsearchService = mockGetElasticsearch;
// @ts-ignore over-riding config method
mockReporting.config = mockReportingConfig;
diff --git a/x-pack/plugins/reporting/server/export_types/printable_pdf/execute_job/index.test.ts b/x-pack/plugins/reporting/server/export_types/printable_pdf/execute_job/index.test.ts
index 0c6a55fb895b5..61eab18987f7c 100644
--- a/x-pack/plugins/reporting/server/export_types/printable_pdf/execute_job/index.test.ts
+++ b/x-pack/plugins/reporting/server/export_types/printable_pdf/execute_job/index.test.ts
@@ -57,16 +57,6 @@ beforeEach(async () => {
mockReporting = await createMockReportingCore(mockReportingConfig);
- const mockElasticsearch = {
- legacy: {
- client: {
- asScoped: () => ({ callAsCurrentUser: jest.fn() }),
- },
- },
- };
- const mockGetElasticsearch = jest.fn();
- mockGetElasticsearch.mockImplementation(() => Promise.resolve(mockElasticsearch));
- mockReporting.getElasticsearchService = mockGetElasticsearch;
// @ts-ignore over-riding config
mockReporting.config = mockReportingConfig;
diff --git a/x-pack/plugins/reporting/server/lib/store/report.ts b/x-pack/plugins/reporting/server/lib/store/report.ts
index 817028cab1a39..9b98650e1d984 100644
--- a/x-pack/plugins/reporting/server/lib/store/report.ts
+++ b/x-pack/plugins/reporting/server/lib/store/report.ts
@@ -25,8 +25,8 @@ const puid = new Puid();
export class Report implements Partial {
public _index?: string;
public _id: string;
- public _primary_term?: unknown; // set by ES
- public _seq_no: unknown; // set by ES
+ public _primary_term?: number; // set by ES
+ public _seq_no?: number; // set by ES
public readonly kibana_name: ReportSource['kibana_name'];
public readonly kibana_id: ReportSource['kibana_id'];
diff --git a/x-pack/plugins/reporting/server/lib/store/store.test.ts b/x-pack/plugins/reporting/server/lib/store/store.test.ts
index 01d91f8bc2ac2..2af0fe7830eea 100644
--- a/x-pack/plugins/reporting/server/lib/store/store.test.ts
+++ b/x-pack/plugins/reporting/server/lib/store/store.test.ts
@@ -5,8 +5,8 @@
* 2.0.
*/
-import sinon from 'sinon';
-import { ElasticsearchServiceSetup } from 'src/core/server';
+import type { DeeplyMockedKeys } from '@kbn/utility-types/jest';
+import { ElasticsearchClient } from 'src/core/server';
import { ReportingConfig, ReportingCore } from '../../';
import {
createMockConfig,
@@ -21,9 +21,7 @@ describe('ReportingStore', () => {
const mockLogger = createMockLevelLogger();
let mockConfig: ReportingConfig;
let mockCore: ReportingCore;
-
- const callClusterStub = sinon.stub();
- const mockElasticsearch = { legacy: { client: { callAsInternalUser: callClusterStub } } };
+ let mockEsClient: DeeplyMockedKeys;
beforeEach(async () => {
const reportingConfig = {
@@ -33,17 +31,14 @@ describe('ReportingStore', () => {
const mockSchema = createMockConfigSchema(reportingConfig);
mockConfig = createMockConfig(mockSchema);
mockCore = await createMockReportingCore(mockConfig);
+ mockEsClient = (await mockCore.getEsClient()).asInternalUser as typeof mockEsClient;
- callClusterStub.reset();
- callClusterStub.withArgs('indices.exists').resolves({});
- callClusterStub.withArgs('indices.create').resolves({});
- callClusterStub.withArgs('index').resolves({ _id: 'stub-id', _index: 'stub-index' });
- callClusterStub.withArgs('indices.refresh').resolves({});
- callClusterStub.withArgs('update').resolves({});
- callClusterStub.withArgs('get').resolves({});
-
- mockCore.getElasticsearchService = () =>
- (mockElasticsearch as unknown) as ElasticsearchServiceSetup;
+ mockEsClient.indices.create.mockResolvedValue({} as any);
+ mockEsClient.indices.exists.mockResolvedValue({} as any);
+ mockEsClient.indices.refresh.mockResolvedValue({} as any);
+ mockEsClient.get.mockResolvedValue({} as any);
+ mockEsClient.index.mockResolvedValue({ body: { _id: 'stub-id', _index: 'stub-index' } } as any);
+ mockEsClient.update.mockResolvedValue({} as any);
});
describe('addReport', () => {
@@ -88,14 +83,14 @@ describe('ReportingStore', () => {
meta: {},
} as any);
expect(store.addReport(mockReport)).rejects.toMatchInlineSnapshot(
- `[TypeError: this.client.callAsInternalUser is not a function]`
+ `[Error: Report object from ES has missing fields!]`
);
});
it('handles error creating the index', async () => {
// setup
- callClusterStub.withArgs('indices.exists').resolves(false);
- callClusterStub.withArgs('indices.create').rejects(new Error('horrible error'));
+ mockEsClient.indices.exists.mockResolvedValue({ body: false } as any);
+ mockEsClient.indices.create.mockRejectedValue(new Error('horrible error'));
const store = new ReportingStore(mockCore, mockLogger);
const mockReport = new Report({
@@ -117,8 +112,8 @@ describe('ReportingStore', () => {
*/
it('ignores index creation error if the index already exists and continues adding the report', async () => {
// setup
- callClusterStub.withArgs('indices.exists').resolves(false);
- callClusterStub.withArgs('indices.create').rejects(new Error('devastating error'));
+ mockEsClient.indices.exists.mockResolvedValue({ body: false } as any);
+ mockEsClient.indices.create.mockRejectedValue(new Error('devastating error'));
const store = new ReportingStore(mockCore, mockLogger);
const mockReport = new Report({
@@ -134,10 +129,9 @@ describe('ReportingStore', () => {
it('skips creating the index if already exists', async () => {
// setup
- callClusterStub.withArgs('indices.exists').resolves(false);
- callClusterStub
- .withArgs('indices.create')
- .rejects(new Error('resource_already_exists_exception')); // will be triggered but ignored
+ mockEsClient.indices.exists.mockResolvedValue({ body: false } as any);
+ // will be triggered but ignored
+ mockEsClient.indices.create.mockRejectedValue(new Error('resource_already_exists_exception'));
const store = new ReportingStore(mockCore, mockLogger);
const mockReport = new Report({
@@ -159,10 +153,9 @@ describe('ReportingStore', () => {
it('allows username string to be `false`', async () => {
// setup
- callClusterStub.withArgs('indices.exists').resolves(false);
- callClusterStub
- .withArgs('indices.create')
- .rejects(new Error('resource_already_exists_exception')); // will be triggered but ignored
+ mockEsClient.indices.exists.mockResolvedValue({ body: false } as any);
+ // will be triggered but ignored
+ mockEsClient.indices.create.mockRejectedValue(new Error('resource_already_exists_exception'));
const store = new ReportingStore(mockCore, mockLogger);
const mockReport = new Report({
@@ -192,8 +185,8 @@ describe('ReportingStore', () => {
const mockReport: ReportDocument = {
_id: '1234-foo-78',
_index: '.reporting-test-17409',
- _primary_term: 'primary_term string',
- _seq_no: 'seq_no string',
+ _primary_term: 1234,
+ _seq_no: 5678,
_source: {
kibana_name: 'test',
kibana_id: 'test123',
@@ -210,7 +203,7 @@ describe('ReportingStore', () => {
output: null,
},
};
- callClusterStub.withArgs('get').resolves(mockReport);
+ mockEsClient.get.mockResolvedValue({ body: mockReport } as any);
const store = new ReportingStore(mockCore, mockLogger);
const report = new Report({
...mockReport,
@@ -221,8 +214,8 @@ describe('ReportingStore', () => {
Report {
"_id": "1234-foo-78",
"_index": ".reporting-test-17409",
- "_primary_term": "primary_term string",
- "_seq_no": "seq_no string",
+ "_primary_term": 1234,
+ "_seq_no": 5678,
"attempts": 0,
"browser_type": "browser type string",
"completed_at": undefined,
@@ -267,10 +260,9 @@ describe('ReportingStore', () => {
await store.setReportClaimed(report, { testDoc: 'test' } as any);
- const updateCall = callClusterStub.getCalls().find((call) => call.args[0] === 'update');
- expect(updateCall && updateCall.args).toMatchInlineSnapshot(`
+ const [updateCall] = mockEsClient.update.mock.calls;
+ expect(updateCall).toMatchInlineSnapshot(`
Array [
- "update",
Object {
"body": Object {
"doc": Object {
@@ -308,10 +300,9 @@ describe('ReportingStore', () => {
await store.setReportFailed(report, { errors: 'yes' } as any);
- const updateCall = callClusterStub.getCalls().find((call) => call.args[0] === 'update');
- expect(updateCall && updateCall.args).toMatchInlineSnapshot(`
+ const [updateCall] = mockEsClient.update.mock.calls;
+ expect(updateCall).toMatchInlineSnapshot(`
Array [
- "update",
Object {
"body": Object {
"doc": Object {
@@ -349,10 +340,9 @@ describe('ReportingStore', () => {
await store.setReportCompleted(report, { certainly_completed: 'yes' } as any);
- const updateCall = callClusterStub.getCalls().find((call) => call.args[0] === 'update');
- expect(updateCall && updateCall.args).toMatchInlineSnapshot(`
+ const [updateCall] = mockEsClient.update.mock.calls;
+ expect(updateCall).toMatchInlineSnapshot(`
Array [
- "update",
Object {
"body": Object {
"doc": Object {
@@ -395,10 +385,9 @@ describe('ReportingStore', () => {
},
} as any);
- const updateCall = callClusterStub.getCalls().find((call) => call.args[0] === 'update');
- expect(updateCall && updateCall.args).toMatchInlineSnapshot(`
+ const [updateCall] = mockEsClient.update.mock.calls;
+ expect(updateCall).toMatchInlineSnapshot(`
Array [
- "update",
Object {
"body": Object {
"doc": Object {
diff --git a/x-pack/plugins/reporting/server/lib/store/store.ts b/x-pack/plugins/reporting/server/lib/store/store.ts
index fdac471c26cb0..fc7bd9c23d769 100644
--- a/x-pack/plugins/reporting/server/lib/store/store.ts
+++ b/x-pack/plugins/reporting/server/lib/store/store.ts
@@ -5,8 +5,7 @@
* 2.0.
*/
-import { SearchParams } from 'elasticsearch';
-import { ElasticsearchServiceSetup } from 'src/core/server';
+import { ElasticsearchClient } from 'src/core/server';
import { LevelLogger, statuses } from '../';
import { ReportingCore } from '../../';
import { numberToDuration } from '../../../common/schema_utils';
@@ -14,7 +13,7 @@ import { JobStatus } from '../../../common/types';
import { ReportTaskParams } from '../tasks';
import { indexTimestamp } from './index_timestamp';
import { mapping } from './mapping';
-import { Report, ReportDocument } from './report';
+import { Report, ReportDocument, ReportSource } from './report';
/*
* When searching for long-pending reports, we get a subset of fields
@@ -45,59 +44,60 @@ export class ReportingStore {
private readonly indexPrefix: string; // config setting of index prefix in system index name
private readonly indexInterval: string; // config setting of index prefix: how often to poll for pending work
private readonly queueTimeoutMins: number; // config setting of queue timeout, rounded up to nearest minute
- private client: ElasticsearchServiceSetup['legacy']['client'];
- private logger: LevelLogger;
+ private client?: ElasticsearchClient;
- constructor(reporting: ReportingCore, logger: LevelLogger) {
- const config = reporting.getConfig();
- const elasticsearch = reporting.getElasticsearchService();
+ constructor(private reportingCore: ReportingCore, private logger: LevelLogger) {
+ const config = reportingCore.getConfig();
- this.client = elasticsearch.legacy.client;
this.indexPrefix = config.get('index');
this.indexInterval = config.get('queue', 'indexInterval');
this.logger = logger.clone(['store']);
this.queueTimeoutMins = Math.ceil(numberToDuration(config.get('queue', 'timeout')).asMinutes());
}
+ private async getClient() {
+ if (!this.client) {
+ ({ asInternalUser: this.client } = await this.reportingCore.getEsClient());
+ }
+
+ return this.client;
+ }
+
private async createIndex(indexName: string) {
- return await this.client
- .callAsInternalUser('indices.exists', {
- index: indexName,
- })
- .then((exists) => {
- if (exists) {
- return exists;
- }
-
- const indexSettings = {
- number_of_shards: 1,
- auto_expand_replicas: '0-1',
- };
- const body = {
- settings: indexSettings,
- mappings: {
- properties: mapping,
- },
- };
-
- return this.client
- .callAsInternalUser('indices.create', {
- index: indexName,
- body,
- })
- .then(() => true)
- .catch((err: Error) => {
- const isIndexExistsError = err.message.match(/resource_already_exists_exception/);
- if (isIndexExistsError) {
- // Do not fail a job if the job runner hits the race condition.
- this.logger.warn(`Automatic index creation failed: index already exists: ${err}`);
- return;
- }
-
- this.logger.error(err);
- throw err;
- });
- });
+ const client = await this.getClient();
+ const { body: exists } = await client.indices.exists({ index: indexName });
+
+ if (exists) {
+ return exists;
+ }
+
+ const indexSettings = {
+ number_of_shards: 1,
+ auto_expand_replicas: '0-1',
+ };
+ const body = {
+ settings: indexSettings,
+ mappings: {
+ properties: mapping,
+ },
+ };
+
+ try {
+ await client.indices.create({ index: indexName, body });
+
+ return true;
+ } catch (error) {
+ const isIndexExistsError = error.message.match(/resource_already_exists_exception/);
+ if (isIndexExistsError) {
+ // Do not fail a job if the job runner hits the race condition.
+ this.logger.warn(`Automatic index creation failed: index already exists: ${error}`);
+ return;
+ }
+
+ this.logger.error(error);
+
+ throw error;
+ }
}
/*
@@ -105,7 +105,7 @@ export class ReportingStore {
*/
private async indexReport(report: Report) {
const doc = {
- index: report._index,
+ index: report._index!,
id: report._id,
body: {
...report.toEsDocsJSON()._source,
@@ -114,14 +114,20 @@ export class ReportingStore {
status: statuses.JOB_STATUS_PENDING,
},
};
- return await this.client.callAsInternalUser('index', doc);
+
+ const client = await this.getClient();
+ const { body } = await client.index(doc);
+
+ return body;
}
/*
* Called from addReport, which handles any errors
*/
private async refreshIndex(index: string) {
- return await this.client.callAsInternalUser('indices.refresh', { index });
+ const client = await this.getClient();
+
+ return client.indices.refresh({ index });
}
public async addReport(report: Report): Promise {
@@ -156,7 +162,8 @@ export class ReportingStore {
}
try {
- const document = await this.client.callAsInternalUser('get', {
+ const client = await this.getClient();
+ const { body: document } = await client.get({
index: taskJson.index,
id: taskJson.id,
});
@@ -166,17 +173,17 @@ export class ReportingStore {
_index: document._index,
_seq_no: document._seq_no,
_primary_term: document._primary_term,
- jobtype: document._source.jobtype,
- attempts: document._source.attempts,
- browser_type: document._source.browser_type,
- created_at: document._source.created_at,
- created_by: document._source.created_by,
- max_attempts: document._source.max_attempts,
- meta: document._source.meta,
- payload: document._source.payload,
- process_expiration: document._source.process_expiration,
- status: document._source.status,
- timeout: document._source.timeout,
+ jobtype: document._source?.jobtype,
+ attempts: document._source?.attempts,
+ browser_type: document._source?.browser_type,
+ created_at: document._source?.created_at,
+ created_by: document._source?.created_by,
+ max_attempts: document._source?.max_attempts,
+ meta: document._source?.meta,
+ payload: document._source?.payload,
+ process_expiration: document._source?.process_expiration,
+ status: document._source?.status,
+ timeout: document._source?.timeout,
});
} catch (err) {
this.logger.error('Error in finding a report! ' + JSON.stringify({ report: taskJson }));
@@ -191,14 +198,17 @@ export class ReportingStore {
try {
checkReportIsEditable(report);
- return await this.client.callAsInternalUser('update', {
+ const client = await this.getClient();
+ const { body } = await client.update({
id: report._id,
- index: report._index,
+ index: report._index!,
if_seq_no: report._seq_no,
if_primary_term: report._primary_term,
refresh: true,
body: { doc },
});
+
+ return (body as unknown) as ReportDocument;
} catch (err) {
this.logger.error('Error in setting report pending status!');
this.logger.error(err);
@@ -215,14 +225,17 @@ export class ReportingStore {
try {
checkReportIsEditable(report);
- return await this.client.callAsInternalUser('update', {
+ const client = await this.getClient();
+ const { body } = await client.update({
id: report._id,
- index: report._index,
+ index: report._index!,
if_seq_no: report._seq_no,
if_primary_term: report._primary_term,
refresh: true,
body: { doc },
});
+
+ return (body as unknown) as ReportDocument;
} catch (err) {
this.logger.error('Error in setting report processing status!');
this.logger.error(err);
@@ -239,14 +252,17 @@ export class ReportingStore {
try {
checkReportIsEditable(report);
- return await this.client.callAsInternalUser('update', {
+ const client = await this.getClient();
+ const { body } = await client.update({
id: report._id,
- index: report._index,
+ index: report._index!,
if_seq_no: report._seq_no,
if_primary_term: report._primary_term,
refresh: true,
body: { doc },
});
+
+ return (body as unknown) as ReportDocument;
} catch (err) {
this.logger.error('Error in setting report failed status!');
this.logger.error(err);
@@ -267,14 +283,17 @@ export class ReportingStore {
};
checkReportIsEditable(report);
- return await this.client.callAsInternalUser('update', {
+ const client = await this.getClient();
+ const { body } = await client.update({
id: report._id,
- index: report._index,
+ index: report._index!,
if_seq_no: report._seq_no,
if_primary_term: report._primary_term,
refresh: true,
body: { doc },
});
+
+ return (body as unknown) as ReportDocument;
} catch (err) {
this.logger.error('Error in setting report complete status!');
this.logger.error(err);
@@ -286,16 +305,17 @@ export class ReportingStore {
try {
checkReportIsEditable(report);
- const updateParams = {
+ const client = await this.getClient();
+ const { body } = await client.update({
id: report._id,
- index: report._index,
+ index: report._index!,
if_seq_no: report._seq_no,
if_primary_term: report._primary_term,
refresh: true,
body: { doc: { process_expiration: null } },
- };
+ });
- return await this.client.callAsInternalUser('update', updateParams);
+ return (body as unknown) as ReportDocument;
} catch (err) {
this.logger.error('Error in clearing expiration!');
this.logger.error(err);
@@ -312,12 +332,11 @@ export class ReportingStore {
* Pending reports are not included in this search: they may be scheduled in TM just not run yet.
* TODO Should we get a list of the reports that are pending and scheduled in TM so we can exclude them from this query?
*/
- public async findZombieReportDocuments(
- logger = this.logger
- ): Promise {
- const searchParams: SearchParams = {
+ public async findZombieReportDocuments(): Promise {
+ const client = await this.getClient();
+ const { body } = await client.search({
index: this.indexPrefix + '-*',
- filterPath: 'hits.hits',
+ filter_path: 'hits.hits',
body: {
sort: { created_at: { order: 'desc' } },
query: {
@@ -335,13 +354,8 @@ export class ReportingStore {
},
},
},
- };
-
- const result = await this.client.callAsInternalUser(
- 'search',
- searchParams
- );
+ });
- return result.hits?.hits;
+ return body.hits?.hits as ReportRecordTimeout[];
}
}
diff --git a/x-pack/plugins/reporting/server/plugin.ts b/x-pack/plugins/reporting/server/plugin.ts
index 3dc7e7ef3df92..75411b30ec0bd 100644
--- a/x-pack/plugins/reporting/server/plugin.ts
+++ b/x-pack/plugins/reporting/server/plugin.ts
@@ -47,7 +47,7 @@ export class ReportingPlugin
registerUiSettings(core);
- const { elasticsearch, http } = core;
+ const { http } = core;
const { features, licensing, security, spaces, taskManager } = plugins;
const { initializerContext: initContext, reportingCore } = this;
@@ -56,7 +56,6 @@ export class ReportingPlugin
reportingCore.pluginSetup({
features,
- elasticsearch,
licensing,
basePath,
router,
diff --git a/x-pack/plugins/reporting/server/routes/diagnostic/config.test.ts b/x-pack/plugins/reporting/server/routes/diagnostic/config.test.ts
index f35d8f5910da0..952a33ff64190 100644
--- a/x-pack/plugins/reporting/server/routes/diagnostic/config.test.ts
+++ b/x-pack/plugins/reporting/server/routes/diagnostic/config.test.ts
@@ -6,6 +6,8 @@
*/
import { UnwrapPromise } from '@kbn/utility-types';
+import type { DeeplyMockedKeys } from '@kbn/utility-types/jest';
+import { ElasticsearchClient } from 'kibana/server';
import { setupServer } from 'src/core/server/test_utils';
import supertest from 'supertest';
import { ReportingCore } from '../..';
@@ -26,6 +28,7 @@ describe('POST /diagnose/config', () => {
let core: ReportingCore;
let mockSetupDeps: any;
let config: any;
+ let mockEsClient: DeeplyMockedKeys;
const mockLogger = createMockLevelLogger();
@@ -38,9 +41,6 @@ describe('POST /diagnose/config', () => {
);
mockSetupDeps = createMockPluginSetup({
- elasticsearch: {
- legacy: { client: { callAsInternalUser: jest.fn() } },
- },
router: httpSetup.createRouter(''),
} as unknown) as any;
@@ -58,6 +58,7 @@ describe('POST /diagnose/config', () => {
};
core = await createMockReportingCore(config, mockSetupDeps);
+ mockEsClient = (await core.getEsClient()).asInternalUser as typeof mockEsClient;
});
afterEach(async () => {
@@ -65,15 +66,15 @@ describe('POST /diagnose/config', () => {
});
it('returns a 200 by default when configured properly', async () => {
- mockSetupDeps.elasticsearch.legacy.client.callAsInternalUser.mockImplementation(() =>
- Promise.resolve({
+ mockEsClient.cluster.getSettings.mockResolvedValueOnce({
+ body: {
defaults: {
http: {
max_content_length: '100mb',
},
},
- })
- );
+ },
+ } as any);
registerDiagnoseConfig(core, mockLogger);
await server.start();
@@ -94,15 +95,15 @@ describe('POST /diagnose/config', () => {
it('returns a 200 with help text when not configured properly', async () => {
config.get.mockImplementation(() => 10485760);
- mockSetupDeps.elasticsearch.legacy.client.callAsInternalUser.mockImplementation(() =>
- Promise.resolve({
+ mockEsClient.cluster.getSettings.mockResolvedValueOnce({
+ body: {
defaults: {
http: {
max_content_length: '5mb',
},
},
- })
- );
+ },
+ } as any);
registerDiagnoseConfig(core, mockLogger);
await server.start();
diff --git a/x-pack/plugins/reporting/server/routes/diagnostic/config.ts b/x-pack/plugins/reporting/server/routes/diagnostic/config.ts
index e3a01c464c36d..109849aa302f2 100644
--- a/x-pack/plugins/reporting/server/routes/diagnostic/config.ts
+++ b/x-pack/plugins/reporting/server/routes/diagnostic/config.ts
@@ -28,7 +28,7 @@ const numberToByteSizeValue = (value: number | ByteSizeValue) => {
export const registerDiagnoseConfig = (reporting: ReportingCore, logger: Logger) => {
const setupDeps = reporting.getPluginSetupDeps();
const userHandler = authorizedUserPreRoutingFactory(reporting);
- const { router, elasticsearch } = setupDeps;
+ const { router } = setupDeps;
router.post(
{
@@ -37,13 +37,13 @@ export const registerDiagnoseConfig = (reporting: ReportingCore, logger: Logger)
},
userHandler(async (user, context, req, res) => {
const warnings = [];
- const { callAsInternalUser } = elasticsearch.legacy.client;
+ const { asInternalUser: elasticsearchClient } = await reporting.getEsClient();
const config = reporting.getConfig();
- const elasticClusterSettingsResponse = await callAsInternalUser('cluster.getSettings', {
- includeDefaults: true,
+ const { body: clusterSettings } = await elasticsearchClient.cluster.getSettings({
+ include_defaults: true,
});
- const { persistent, transient, defaults: defaultSettings } = elasticClusterSettingsResponse;
+ const { persistent, transient, defaults: defaultSettings } = clusterSettings;
const elasticClusterSettings = defaults({}, persistent, transient, defaultSettings);
const elasticSearchMaxContent = get(
diff --git a/x-pack/plugins/reporting/server/routes/generation.test.ts b/x-pack/plugins/reporting/server/routes/generation.test.ts
index f6966a3b28ea9..0ce977e0a5431 100644
--- a/x-pack/plugins/reporting/server/routes/generation.test.ts
+++ b/x-pack/plugins/reporting/server/routes/generation.test.ts
@@ -6,8 +6,9 @@
*/
import { UnwrapPromise } from '@kbn/utility-types';
+import type { DeeplyMockedKeys } from '@kbn/utility-types/jest';
import { of } from 'rxjs';
-import sinon from 'sinon';
+import { ElasticsearchClient } from 'kibana/server';
import { setupServer } from 'src/core/server/test_utils';
import supertest from 'supertest';
import { ReportingCore } from '..';
@@ -24,8 +25,8 @@ describe('POST /api/reporting/generate', () => {
let server: SetupServerReturn['server'];
let httpSetup: SetupServerReturn['httpSetup'];
let mockExportTypesRegistry: ExportTypesRegistry;
- let callClusterStub: any;
let core: ReportingCore;
+ let mockEsClient: DeeplyMockedKeys;
const config = {
get: jest.fn().mockImplementation((...args) => {
@@ -55,12 +56,7 @@ describe('POST /api/reporting/generate', () => {
() => ({})
);
- callClusterStub = sinon.stub().resolves({});
-
const mockSetupDeps = createMockPluginSetup({
- elasticsearch: {
- legacy: { client: { callAsInternalUser: callClusterStub } },
- },
security: {
license: { isEnabled: () => true },
authc: {
@@ -85,6 +81,9 @@ describe('POST /api/reporting/generate', () => {
runTaskFnFactory: () => async () => ({ runParamsTest: { test2: 'yes' } } as any),
});
core.getExportTypesRegistry = () => mockExportTypesRegistry;
+
+ mockEsClient = (await core.getEsClient()).asInternalUser as typeof mockEsClient;
+ mockEsClient.index.mockResolvedValue({ body: {} } as any);
});
afterEach(async () => {
@@ -144,7 +143,7 @@ describe('POST /api/reporting/generate', () => {
});
it('returns 500 if job handler throws an error', async () => {
- callClusterStub.withArgs('index').rejects('silly');
+ mockEsClient.index.mockRejectedValueOnce('silly');
registerJobGenerationRoutes(core, mockLogger);
@@ -157,7 +156,7 @@ describe('POST /api/reporting/generate', () => {
});
it(`returns 200 if job handler doesn't error`, async () => {
- callClusterStub.withArgs('index').resolves({ _id: 'foo', _index: 'foo-index' });
+ mockEsClient.index.mockResolvedValueOnce({ body: { _id: 'foo', _index: 'foo-index' } } as any);
registerJobGenerationRoutes(core, mockLogger);
await server.start();
diff --git a/x-pack/plugins/reporting/server/routes/jobs.test.ts b/x-pack/plugins/reporting/server/routes/jobs.test.ts
index 706a8d5dad7dd..885fc701935fe 100644
--- a/x-pack/plugins/reporting/server/routes/jobs.test.ts
+++ b/x-pack/plugins/reporting/server/routes/jobs.test.ts
@@ -6,7 +6,9 @@
*/
import { UnwrapPromise } from '@kbn/utility-types';
+import type { DeeplyMockedKeys } from '@kbn/utility-types/jest';
import { of } from 'rxjs';
+import { ElasticsearchClient } from 'kibana/server';
import { setupServer } from 'src/core/server/test_utils';
import supertest from 'supertest';
import { ReportingCore } from '..';
@@ -29,6 +31,7 @@ describe('GET /api/reporting/jobs/download', () => {
let httpSetup: SetupServerReturn['httpSetup'];
let exportTypesRegistry: ExportTypesRegistry;
let core: ReportingCore;
+ let mockEsClient: DeeplyMockedKeys;
const config = createMockConfig(createMockConfigSchema());
const getHits = (...sources: any) => {
@@ -47,9 +50,6 @@ describe('GET /api/reporting/jobs/download', () => {
() => ({})
);
const mockSetupDeps = createMockPluginSetup({
- elasticsearch: {
- legacy: { client: { callAsInternalUser: jest.fn() } },
- },
security: {
license: {
isEnabled: () => true,
@@ -89,6 +89,8 @@ describe('GET /api/reporting/jobs/download', () => {
validLicenses: ['basic', 'gold'],
} as ExportTypeDefinition);
core.getExportTypesRegistry = () => exportTypesRegistry;
+
+ mockEsClient = (await core.getEsClient()).asInternalUser as typeof mockEsClient;
});
afterEach(async () => {
@@ -96,10 +98,7 @@ describe('GET /api/reporting/jobs/download', () => {
});
it('fails on malformed download IDs', async () => {
- // @ts-ignore
- core.pluginSetupDeps.elasticsearch.legacy.client = {
- callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(getHits())),
- };
+ mockEsClient.search.mockResolvedValueOnce({ body: getHits() } as any);
registerJobInfoRoutes(core);
await server.start();
@@ -171,11 +170,7 @@ describe('GET /api/reporting/jobs/download', () => {
});
it('returns 404 if job not found', async () => {
- // @ts-ignore
- core.pluginSetupDeps.elasticsearch.legacy.client = {
- callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(getHits())),
- };
-
+ mockEsClient.search.mockResolvedValueOnce({ body: getHits() } as any);
registerJobInfoRoutes(core);
await server.start();
@@ -184,12 +179,9 @@ describe('GET /api/reporting/jobs/download', () => {
});
it('returns a 401 if not a valid job type', async () => {
- // @ts-ignore
- core.pluginSetupDeps.elasticsearch.legacy.client = {
- callAsInternalUser: jest
- .fn()
- .mockReturnValue(Promise.resolve(getHits({ jobtype: 'invalidJobType' }))),
- };
+ mockEsClient.search.mockResolvedValueOnce({
+ body: getHits({ jobtype: 'invalidJobType' }),
+ } as any);
registerJobInfoRoutes(core);
await server.start();
@@ -198,14 +190,9 @@ describe('GET /api/reporting/jobs/download', () => {
});
it('when a job is incomplete', async () => {
- // @ts-ignore
- core.pluginSetupDeps.elasticsearch.legacy.client = {
- callAsInternalUser: jest
- .fn()
- .mockReturnValue(
- Promise.resolve(getHits({ jobtype: 'unencodedJobType', status: 'pending' }))
- ),
- };
+ mockEsClient.search.mockResolvedValueOnce({
+ body: getHits({ jobtype: 'unencodedJobType', status: 'pending' }),
+ } as any);
registerJobInfoRoutes(core);
await server.start();
@@ -218,18 +205,13 @@ describe('GET /api/reporting/jobs/download', () => {
});
it('when a job fails', async () => {
- // @ts-ignore
- core.pluginSetupDeps.elasticsearch.legacy.client = {
- callAsInternalUser: jest.fn().mockReturnValue(
- Promise.resolve(
- getHits({
- jobtype: 'unencodedJobType',
- status: 'failed',
- output: { content: 'job failure message' },
- })
- )
- ),
- };
+ mockEsClient.search.mockResolvedValueOnce({
+ body: getHits({
+ jobtype: 'unencodedJobType',
+ status: 'failed',
+ output: { content: 'job failure message' },
+ }),
+ } as any);
registerJobInfoRoutes(core);
await server.start();
@@ -243,7 +225,7 @@ describe('GET /api/reporting/jobs/download', () => {
});
describe('successful downloads', () => {
- const getCompleteHits = async ({
+ const getCompleteHits = ({
jobType = 'unencodedJobType',
outputContent = 'job output content',
outputContentType = 'text/plain',
@@ -260,11 +242,7 @@ describe('GET /api/reporting/jobs/download', () => {
};
it('when a known job-type is complete', async () => {
- const hits = getCompleteHits();
- // @ts-ignore
- core.pluginSetupDeps.elasticsearch.legacy.client = {
- callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)),
- };
+ mockEsClient.search.mockResolvedValueOnce({ body: getCompleteHits() } as any);
registerJobInfoRoutes(core);
await server.start();
@@ -276,11 +254,7 @@ describe('GET /api/reporting/jobs/download', () => {
});
it('succeeds when security is not there or disabled', async () => {
- const hits = getCompleteHits();
- // @ts-ignore
- core.pluginSetupDeps.elasticsearch.legacy.client = {
- callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)),
- };
+ mockEsClient.search.mockResolvedValueOnce({ body: getCompleteHits() } as any);
// @ts-ignore
core.pluginSetupDeps.security = null;
@@ -297,14 +271,12 @@ describe('GET /api/reporting/jobs/download', () => {
});
it(`doesn't encode output-content for non-specified job-types`, async () => {
- const hits = getCompleteHits({
- jobType: 'unencodedJobType',
- outputContent: 'test',
- });
- // @ts-ignore
- core.pluginSetupDeps.elasticsearch.legacy.client = {
- callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)),
- };
+ mockEsClient.search.mockResolvedValueOnce({
+ body: getCompleteHits({
+ jobType: 'unencodedJobType',
+ outputContent: 'test',
+ }),
+ } as any);
registerJobInfoRoutes(core);
await server.start();
@@ -316,15 +288,13 @@ describe('GET /api/reporting/jobs/download', () => {
});
it(`base64 encodes output content for configured jobTypes`, async () => {
- const hits = getCompleteHits({
- jobType: 'base64EncodedJobType',
- outputContent: 'test',
- outputContentType: 'application/pdf',
- });
- // @ts-ignore
- core.pluginSetupDeps.elasticsearch.legacy.client = {
- callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)),
- };
+ mockEsClient.search.mockResolvedValueOnce({
+ body: getCompleteHits({
+ jobType: 'base64EncodedJobType',
+ outputContent: 'test',
+ outputContentType: 'application/pdf',
+ }),
+ } as any);
registerJobInfoRoutes(core);
await server.start();
@@ -337,15 +307,13 @@ describe('GET /api/reporting/jobs/download', () => {
});
it('refuses to return unknown content-types', async () => {
- const hits = getCompleteHits({
- jobType: 'unencodedJobType',
- outputContent: 'alert("all your base mine now");',
- outputContentType: 'application/html',
- });
- // @ts-ignore
- core.pluginSetupDeps.elasticsearch.legacy.client = {
- callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)),
- };
+ mockEsClient.search.mockResolvedValueOnce({
+ body: getCompleteHits({
+ jobType: 'unencodedJobType',
+ outputContent: 'alert("all your base mine now");',
+ outputContentType: 'application/html',
+ }),
+ } as any);
registerJobInfoRoutes(core);
await server.start();
diff --git a/x-pack/plugins/reporting/server/routes/lib/jobs_query.ts b/x-pack/plugins/reporting/server/routes/lib/jobs_query.ts
index 456c60e5c82e3..1db62f818216a 100644
--- a/x-pack/plugins/reporting/server/routes/lib/jobs_query.ts
+++ b/x-pack/plugins/reporting/server/routes/lib/jobs_query.ts
@@ -5,83 +5,59 @@
* 2.0.
*/
+import { UnwrapPromise } from '@kbn/utility-types';
import { i18n } from '@kbn/i18n';
-import { errors as elasticsearchErrors } from 'elasticsearch';
-import { get } from 'lodash';
+import { ResponseError } from '@elastic/elasticsearch/lib/errors';
+import { ElasticsearchClient } from 'src/core/server';
import { ReportingCore } from '../../';
import { ReportDocument } from '../../lib/store';
import { ReportingUser } from '../../types';
-const esErrors = elasticsearchErrors as Record;
-const defaultSize = 10;
-
-// TODO: use SearchRequest from elasticsearch-client
-interface QueryBody {
- size?: number;
- from?: number;
- _source?: {
- excludes: string[];
- };
- query: {
- constant_score: {
- filter: {
- bool: {
- must: Array>;
- };
- };
- };
- };
-}
+type SearchRequest = Required>[0];
interface GetOpts {
includeContent?: boolean;
}
-// TODO: use SearchResult from elasticsearch-client
-interface CountAggResult {
- count: number;
-}
-
+const defaultSize = 10;
const getUsername = (user: ReportingUser) => (user ? user.username : false);
-export function jobsQueryFactory(reportingCore: ReportingCore) {
- const { elasticsearch } = reportingCore.getPluginSetupDeps();
- const { callAsInternalUser } = elasticsearch.legacy.client;
-
- function execQuery(queryType: string, body: QueryBody) {
- const defaultBody: Record = {
- search: {
- _source: {
- excludes: ['output.content'],
- },
- sort: [{ created_at: { order: 'desc' } }],
- size: defaultSize,
- },
- };
+function getSearchBody(body: SearchRequest['body']): SearchRequest['body'] {
+ return {
+ _source: {
+ excludes: ['output.content'],
+ },
+ sort: [{ created_at: { order: 'desc' } }],
+ size: defaultSize,
+ ...body,
+ };
+}
+export function jobsQueryFactory(reportingCore: ReportingCore) {
+ function getIndex() {
const config = reportingCore.getConfig();
- const index = config.get('index');
- const query = {
- index: `${index}-*`,
- body: Object.assign(defaultBody[queryType] || {}, body),
- };
-
- return callAsInternalUser(queryType, query).catch((err) => {
- if (err instanceof esErrors['401']) return;
- if (err instanceof esErrors['403']) return;
- if (err instanceof esErrors['404']) return;
- throw err;
- });
+
+ return `${config.get('index')}-*`;
}
- type Result = number;
+ async function execQuery any>(
+ callback: T
+ ): Promise> | undefined> {
+ try {
+ const { asInternalUser: client } = await reportingCore.getEsClient();
+
+ return await callback(client);
+ } catch (error) {
+ if (error instanceof ResponseError && [401, 403, 404].includes(error.statusCode)) {
+ return;
+ }
- function getHits(query: Promise) {
- return query.then((res) => get(res, 'hits.hits', []));
+ throw error;
+ }
}
return {
- list(
+ async list(
jobTypes: string[],
user: ReportingUser,
page = 0,
@@ -89,32 +65,34 @@ export function jobsQueryFactory(reportingCore: ReportingCore) {
jobIds: string[] | null
) {
const username = getUsername(user);
- const body: QueryBody = {
+ const body = getSearchBody({
size,
from: size * page,
query: {
constant_score: {
filter: {
bool: {
- must: [{ terms: { jobtype: jobTypes } }, { term: { created_by: username } }],
+ must: [
+ { terms: { jobtype: jobTypes } },
+ { term: { created_by: username } },
+ ...(jobIds ? [{ ids: { values: jobIds } }] : []),
+ ],
},
},
},
},
- };
+ });
- if (jobIds) {
- body.query.constant_score.filter.bool.must.push({
- ids: { values: jobIds },
- });
- }
+ const response = await execQuery((elasticsearchClient) =>
+ elasticsearchClient.search({ body, index: getIndex() })
+ );
- return getHits(execQuery('search', body));
+ return response?.body.hits?.hits ?? [];
},
- count(jobTypes: string[], user: ReportingUser) {
+ async count(jobTypes: string[], user: ReportingUser) {
const username = getUsername(user);
- const body: QueryBody = {
+ const body = {
query: {
constant_score: {
filter: {
@@ -126,17 +104,21 @@ export function jobsQueryFactory(reportingCore: ReportingCore) {
},
};
- return execQuery('count', body).then((doc: CountAggResult) => {
- if (!doc) return 0;
- return doc.count;
- });
+ const response = await execQuery((elasticsearchClient) =>
+ elasticsearchClient.count({ body, index: getIndex() })
+ );
+
+ return response?.body.count ?? 0;
},
- get(user: ReportingUser, id: string, opts: GetOpts = {}): Promise {
- if (!id) return Promise.resolve();
- const username = getUsername(user);
+ async get(user: ReportingUser, id: string, opts: GetOpts = {}): Promise {
+ if (!id) {
+ return;
+ }
- const body: QueryBody = {
+ const username = getUsername(user);
+ const body: SearchRequest['body'] = {
+ ...(opts.includeContent ? { _source: { excludes: [] } } : {}),
query: {
constant_score: {
filter: {
@@ -149,22 +131,23 @@ export function jobsQueryFactory(reportingCore: ReportingCore) {
size: 1,
};
- if (opts.includeContent) {
- body._source = {
- excludes: [],
- };
+ const response = await execQuery((elasticsearchClient) =>
+ elasticsearchClient.search({ body, index: getIndex() })
+ );
+
+ if (response?.body.hits?.hits?.length !== 1) {
+ return;
}
- return getHits(execQuery('search', body)).then((hits) => {
- if (hits.length !== 1) return;
- return hits[0];
- });
+ return response.body.hits.hits[0] as ReportDocument;
},
async delete(deleteIndex: string, id: string) {
try {
+ const { asInternalUser: elasticsearchClient } = await reportingCore.getEsClient();
const query = { id, index: deleteIndex, refresh: true };
- return callAsInternalUser('delete', query);
+
+ return await elasticsearchClient.delete(query);
} catch (error) {
throw new Error(
i18n.translate('xpack.reporting.jobsQuery.deleteError', {
diff --git a/x-pack/plugins/reporting/server/test_helpers/create_mock_reportingplugin.ts b/x-pack/plugins/reporting/server/test_helpers/create_mock_reportingplugin.ts
index e42d87c50e118..952f801ba519d 100644
--- a/x-pack/plugins/reporting/server/test_helpers/create_mock_reportingplugin.ts
+++ b/x-pack/plugins/reporting/server/test_helpers/create_mock_reportingplugin.ts
@@ -37,7 +37,6 @@ import { createMockLevelLogger } from './create_mock_levellogger';
export const createMockPluginSetup = (setupMock?: any): ReportingInternalSetup => {
return {
features: featuresPluginMock.createSetup(),
- elasticsearch: setupMock.elasticsearch || { legacy: { client: {} } },
basePath: { set: jest.fn() },
router: setupMock.router,
security: setupMock.security,
@@ -137,7 +136,7 @@ export const createMockReportingCore = async (
) => {
const mockReportingCore = ({
getConfig: () => config,
- getElasticsearchService: () => setupDepsMock?.elasticsearch,
+ getEsClient: () => startDepsMock?.esClient,
getDataService: () => startDepsMock?.data,
} as unknown) as ReportingCore;
diff --git a/x-pack/plugins/security/server/saved_objects/secure_saved_objects_client_wrapper.ts b/x-pack/plugins/security/server/saved_objects/secure_saved_objects_client_wrapper.ts
index 8378cc4d848cf..d876175a05fe8 100644
--- a/x-pack/plugins/security/server/saved_objects/secure_saved_objects_client_wrapper.ts
+++ b/x-pack/plugins/security/server/saved_objects/secure_saved_objects_client_wrapper.ts
@@ -213,7 +213,7 @@ export class SecureSavedObjectsClientWrapper implements SavedObjectsClientContra
return await this.baseClient.delete(type, id, options);
}
- public async find(options: SavedObjectsFindOptions) {
+ public async find(options: SavedObjectsFindOptions) {
if (
this.getSpacesService() == null &&
Array.isArray(options.namespaces) &&
@@ -245,7 +245,7 @@ export class SecureSavedObjectsClientWrapper implements SavedObjectsClientContra
error: new Error(status),
})
);
- return SavedObjectsUtils.createEmptyFindResponse(options);
+ return SavedObjectsUtils.createEmptyFindResponse(options);
}
const typeToNamespacesMap = Array.from(typeMap).reduce