diff --git a/.buildkite/scripts/steps/storybooks/build_and_upload.js b/.buildkite/scripts/steps/storybooks/build_and_upload.js
index 0fdf24d87ffad..89958fe08d6cc 100644
--- a/.buildkite/scripts/steps/storybooks/build_and_upload.js
+++ b/.buildkite/scripts/steps/storybooks/build_and_upload.js
@@ -8,6 +8,7 @@ const STORYBOOKS = [
'canvas',
'codeeditor',
'ci_composite',
+ 'custom_integrations',
'url_template_editor',
'dashboard',
'dashboard_enhanced',
diff --git a/.ci/.storybook/main.js b/.ci/.storybook/main.js
index e399ec087e168..37f3391337308 100644
--- a/.ci/.storybook/main.js
+++ b/.ci/.storybook/main.js
@@ -11,6 +11,12 @@ const aliases = require('../../src/dev/storybook/aliases.ts').storybookAliases;
config.refs = {};
+// Required due to https://github.com/storybookjs/storybook/issues/13834
+config.babel = async (options) => ({
+ ...options,
+ plugins: ['@babel/plugin-transform-typescript', ...options.plugins],
+});
+
for (const alias of Object.keys(aliases).filter((a) => a !== 'ci_composite')) {
// snake_case -> Title Case
const title = alias
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 149f5cd74d8c0..244689025173f 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -243,7 +243,6 @@
/packages/kbn-std/ @elastic/kibana-core
/packages/kbn-config/ @elastic/kibana-core
/packages/kbn-logging/ @elastic/kibana-core
-/packages/kbn-legacy-logging/ @elastic/kibana-core
/packages/kbn-crypto/ @elastic/kibana-core
/packages/kbn-http-tools/ @elastic/kibana-core
/src/plugins/saved_objects_management/ @elastic/kibana-core
diff --git a/.i18nrc.json b/.i18nrc.json
index 45016edc38dcd..46d2f8c6a23bf 100644
--- a/.i18nrc.json
+++ b/.i18nrc.json
@@ -5,6 +5,7 @@
"kbnConfig": "packages/kbn-config/src",
"console": "src/plugins/console",
"core": "src/core",
+ "customIntegrations": "src/plugins/custom_integrations",
"discover": "src/plugins/discover",
"bfetch": "src/plugins/bfetch",
"dashboard": "src/plugins/dashboard",
@@ -18,6 +19,7 @@
"home": "src/plugins/home",
"flot": "packages/kbn-ui-shared-deps-src/src/flot_charts",
"charts": "src/plugins/charts",
+ "customIntegrations": "src/plugins/custom_integrations",
"esUi": "src/plugins/es_ui_shared",
"devTools": "src/plugins/dev_tools",
"expressions": "src/plugins/expressions",
diff --git a/config/kibana.yml b/config/kibana.yml
index dea9849f17b28..13a4b9bb98e85 100644
--- a/config/kibana.yml
+++ b/config/kibana.yml
@@ -84,24 +84,32 @@
# Time in milliseconds for Elasticsearch to wait for responses from shards. Set to 0 to disable.
#elasticsearch.shardTimeout: 30000
-# Logs queries sent to Elasticsearch. Requires logging.verbose set to true.
-#elasticsearch.logQueries: false
-
# Specifies the path where Kibana creates the process ID file.
#pid.file: /run/kibana/kibana.pid
+# Set the value of this setting to off to suppress all logging output, or to debug to log everything.
+# logging.root.level: debug
+
# Enables you to specify a file where Kibana stores log output.
-#logging.dest: stdout
+# logging.appenders.default:
+# type: file
+# fileName: /var/logs/kibana.log
+
-# Set the value of this setting to true to suppress all logging output.
-#logging.silent: false
+# Logs queries sent to Elasticsearch.
+# logging.loggers:
+# - name: elasticsearch.queries
+# level: debug
-# Set the value of this setting to true to suppress all logging output other than error messages.
-#logging.quiet: false
+# Logs http responses.
+# logging.loggers:
+# - name: http.server.response
+# level: debug
-# Set the value of this setting to true to log all events, including system usage information
-# and all requests.
-#logging.verbose: false
+# Logs system usage information.
+# logging.loggers:
+# - name: metrics.ops
+# level: debug
# Set the interval in milliseconds to sample system and process performance
# metrics. Minimum is 100ms. Defaults to 5000.
diff --git a/dev_docs/assets/1000_ft_arch.png b/dev_docs/assets/1000_ft_arch.png
new file mode 100644
index 0000000000000..715c830606d76
Binary files /dev/null and b/dev_docs/assets/1000_ft_arch.png differ
diff --git a/dev_docs/key_concepts/kibana_platform_plugin_intro.mdx b/dev_docs/key_concepts/kibana_platform_plugin_intro.mdx
index b2255dbc8e5c4..133b96f44da88 100644
--- a/dev_docs/key_concepts/kibana_platform_plugin_intro.mdx
+++ b/dev_docs/key_concepts/kibana_platform_plugin_intro.mdx
@@ -1,7 +1,7 @@
---
id: kibPlatformIntro
slug: /kibana-dev-docs/key-concepts/platform-intro
-title: Plugins and the Kibana platform
+title: Plugins, packages, and the platform
summary: An introduction to the Kibana platform and how to use it to build a plugin.
date: 2021-01-06
tags: ['kibana', 'onboarding', 'dev', 'architecture']
@@ -17,30 +17,47 @@ already existing applications. Did you know that almost everything you see in th
Kibana UI is built inside a plugin? If you removed all plugins from Kibana, you'd be left with an empty navigation menu, and a set of
developer tools. The Kibana platform is a blank canvas, just waiting for a developer to come along and create something!
-
+
-## Platform services
+## 1,000 foot view
-Plugins have access to three kinds of public services:
+At a super high-level, Kibana is composed of **plugins**, **core**, and **Kibana packages**.
-- Platform services provided by `core` ()
-- Platform services provided by plugins ()
-- Shared services provided by plugins, that are only relevant for only a few, specific plugins (e.g. "presentation utils").
+
-The first two items are what make up "Platform services".
+**Plugins** provide the majority of all functionality in Kibana. All applications and UIs are defined here.
-
+**Core** provides the runtime and the most fundamental services.
-We try to put only the most stable and fundamental code into `Core`, while more application focused functionality goes in a plugin, but the heuristic isn't
-clear, and we haven't done a great job of sticking to it. For example, notifications and toasts are core services, but data and search are plugin services.
+**@kbn packages** provide static utilities that can be imported anywhere in Kibana.
+
+
+
+If it's stateful, it has to go in a plugin, but packages are often a good choices for stateless utilities. Stateless code exported publicly from a plugin will increase the page load bundle size of _every single page_, even if none of those plugin's services are actually needed. With packages, however, only code that is needed for the current page is downloaded.
+
+The downside however is that the packages folder is far away from the plugins folder so having a part of your code in a plugin and the rest in a package may make it hard to find, leading to duplication.
+
+The Operations team hopes to resolve this conundrum by supporting co-located packages and plugins and automatically putting all stateless code inside a package. You can track this work by following [this issue](https://github.com/elastic/kibana/issues/112886).
+
+Until then, consider whether it makes sense to logically separate the code, and consider the size of the exports, when determining whether you should put stateless public exports in a package or a plugin.
+
+
+
+
+
+
+
+We try to put only the most stable and fundamental code into `Core`, while optional add-ons, applications, and solution-oriented functionality goes in a plugin. Unfortunately, we haven't done a great job of sticking to that. For example, notifications and toasts are core services, but data and search are plugin services.
Today it looks something like this.
-
+
+
+"Platform plugins" provide core-like functionality, just outside of core, and their public APIs tend to be more volatile. Other plugins may still expose shared services, but they are intended only for usage by a small subset of specific plugins, and may not be generic or "platform-like".
-
-When the Kibana platform and plugin infrastructure was built, we thought of two types of code: core services, and other plugin services. We planned to keep the most stable and fundamental
-code needed to build plugins inside core.
+**A bit of history**
+
+When the Kibana platform and plugin infrastructure was built, we thought of two types of code: core services, and other plugin services. We planned to keep the most stable and fundamental code needed to build plugins inside core.
In reality, we ended up with many platform-like services living outside of core, with no (short term) intention of moving them. We highly encourage plugin developers to use
them, so we consider them part of platform services.
@@ -49,36 +66,18 @@ When we built our platform system, we also thought we'd end up with only a handf
footprint and speed up Kibana.
In reality, our plugin model ended up being used like micro-services. Plugins are the only form of encapsulation we provide developers, and they liked it! However, we ended
-up with a ton of small plugins, that developers never intended to be uninstallable, nor tested in this manner. We are considering ways to provide developers the ability to build services
-with the encapsulation
-they desire, without the need to build a plugin.
+up with a ton of small plugins, that developers never intended to be uninstallable, nor tested in this manner. We are considering ways to provide developers the ability to build services with the encapsulation they desire, without the need to build a plugin.
Another side effect of having many small plugins is that common code often ends up extracted into another plugin. Use case specific utilities are exported,
that are not meant to be used in a general manner. This makes our definition of "platform code" a bit trickier to define. We'd like to say "The platform is made up of
-every publically exposed service", but in today's world, that wouldn't be a very accurate picture.
+every publicly exposed service", but in today's world, that wouldn't be a very accurate picture.
We recognize the need to better clarify the relationship between core functionality, platform-like plugin functionality, and functionality exposed by other plugins.
It's something we will be working on!
-
-
We will continue to focus on adding clarity around these types of services and what developers can expect from each.
-
-
-### Core services
-
-Sometimes referred to just as provide the most basic and fundamental tools neccessary for building a plugin, like creating saved objects,
-routing, application registration, notifications and . The Core platform is not a plugin itself, although
-there are some plugins that provide platform functionality. We call these .
-
-### Platform plugins
-
-Plugins that provide fundamental services and functionality to extend and customize Kibana, for example, the
-
- plugin. There is no official way to tell if a plugin is a
-platform plugin or not. Platform plugins are _usually_ plugins that are managed by the Platform Group,
-but we are starting to see some exceptions.
+
## Plugins
@@ -92,7 +91,7 @@ A plugin may register many applications, or none.
Applications are top level pages in the Kibana UI. Dashboard, Canvas, Maps, App Search, etc, are all examples of applications:
-
+
A plugin can register an application by
adding it to core's application .
diff --git a/docs/apm/troubleshooting.asciidoc b/docs/apm/troubleshooting.asciidoc
index 4a62f71528676..6e0c3b1decda8 100644
--- a/docs/apm/troubleshooting.asciidoc
+++ b/docs/apm/troubleshooting.asciidoc
@@ -72,8 +72,6 @@ then the index template will not be set up automatically. Instead, you'll need t
*Using a custom index names*
This problem can also occur if you've customized the index name that you write APM data to.
-The default index name that APM writes events to can be found
-{apm-server-ref}/elasticsearch-output.html#index-option-es[here].
If you change the default, you must also configure the `setup.template.name` and `setup.template.pattern` options.
See {apm-server-ref}/configuration-template.html[Load the Elasticsearch index template].
If the Elasticsearch index template has already been successfully loaded to the index,
diff --git a/docs/dev-tools/grokdebugger/index.asciidoc b/docs/dev-tools/grokdebugger/index.asciidoc
index 934452c54ccca..6a809c13fcb93 100644
--- a/docs/dev-tools/grokdebugger/index.asciidoc
+++ b/docs/dev-tools/grokdebugger/index.asciidoc
@@ -9,21 +9,22 @@ structure it. Grok is good for parsing syslog, apache, and other
webserver logs, mysql logs, and in general, any log format that is
written for human consumption.
-Grok patterns are supported in the ingest node
-{ref}/grok-processor.html[grok processor] and the Logstash
-{logstash-ref}/plugins-filters-grok.html[grok filter]. See
-{logstash-ref}/plugins-filters-grok.html#_grok_basics[grok basics]
-for more information on the syntax for a grok pattern.
-
-The Elastic Stack ships
-with more than 120 reusable grok patterns. See
-https://github.com/elastic/elasticsearch/tree/master/libs/grok/src/main/resources/patterns[Ingest node grok patterns] and https://github.com/logstash-plugins/logstash-patterns-core/tree/master/patterns[Logstash grok patterns]
-for the complete list of patterns.
+Grok patterns are supported in {es} {ref}/runtime.html[runtime fields], the {es}
+{ref}/grok-processor.html[grok ingest processor], and the {ls}
+{logstash-ref}/plugins-filters-grok.html[grok filter]. For syntax, see
+{ref}/grok.html[Grokking grok].
+
+The {stack} ships with more than 120 reusable grok patterns. For a complete
+list of patterns, see
+https://github.com/elastic/elasticsearch/tree/master/libs/grok/src/main/resources/patterns[{es}
+grok patterns] and
+https://github.com/logstash-plugins/logstash-patterns-core/tree/master/patterns[{ls}
+grok patterns].
Because
-ingest node and Logstash share the same grok implementation and pattern
+{es} and {ls} share the same grok implementation and pattern
libraries, any grok pattern that you create in the *Grok Debugger* will work
-in ingest node and Logstash.
+in both {es} and {ls}.
[float]
[[grokdebugger-getting-started]]
diff --git a/docs/developer/architecture/core/logging-configuration-migration.asciidoc b/docs/developer/architecture/core/logging-configuration-migration.asciidoc
index 19f10a881d5e8..db02b4d4e507f 100644
--- a/docs/developer/architecture/core/logging-configuration-migration.asciidoc
+++ b/docs/developer/architecture/core/logging-configuration-migration.asciidoc
@@ -76,9 +76,5 @@ you can override the flags with:
|--verbose| --logging.root.level=debug --logging.root.appenders[0]=default --logging.root.appenders[1]=custom | --verbose
-|--quiet| --logging.root.level=error --logging.root.appenders[0]=default --logging.root.appenders[1]=custom | not supported
-
|--silent| --logging.root.level=off | --silent
|===
-
-NOTE: To preserve backwards compatibility, you are required to pass the root `default` appender until the legacy logging system is removed in `v8.0`.
diff --git a/docs/developer/getting-started/monorepo-packages.asciidoc b/docs/developer/getting-started/monorepo-packages.asciidoc
index b42bc980c8758..7754463339771 100644
--- a/docs/developer/getting-started/monorepo-packages.asciidoc
+++ b/docs/developer/getting-started/monorepo-packages.asciidoc
@@ -74,7 +74,6 @@ yarn kbn watch
- @kbn/i18n
- @kbn/interpreter
- @kbn/io-ts-utils
-- @kbn/legacy-logging
- @kbn/logging
- @kbn/mapbox-gl
- @kbn/monaco
diff --git a/docs/developer/plugin-list.asciidoc b/docs/developer/plugin-list.asciidoc
index 7f7041f7815cd..cbf46801fa86f 100644
--- a/docs/developer/plugin-list.asciidoc
+++ b/docs/developer/plugin-list.asciidoc
@@ -458,7 +458,7 @@ the infrastructure monitoring use-case within Kibana.
|{kib-repo}blob/{branch}/x-pack/plugins/ingest_pipelines/README.md[ingestPipelines]
-|The ingest_pipelines plugin provides Kibana support for Elasticsearch's ingest nodes. Please refer to the Elasticsearch documentation for more details.
+|The ingest_pipelines plugin provides Kibana support for Elasticsearch's ingest pipelines.
|{kib-repo}blob/{branch}/x-pack/plugins/lens/readme.md[lens]
diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
index f40f52db55de9..ab0f2d0ee5a17 100644
--- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
+++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
@@ -234,5 +234,17 @@ readonly links: {
readonly ecs: {
readonly guide: string;
};
+ readonly clients: {
+ readonly guide: string;
+ readonly goOverview: string;
+ readonly javaIndex: string;
+ readonly jsIntro: string;
+ readonly netGuide: string;
+ readonly perlGuide: string;
+ readonly phpGuide: string;
+ readonly pythonGuide: string;
+ readonly rubyOverview: string;
+ readonly rustGuide: string;
+ };
};
```
diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
index 2499227d20ad4..f0fe058c403ed 100644
--- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
+++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
@@ -17,5 +17,5 @@ export interface DocLinksStart
| --- | --- | --- |
| [DOC\_LINK\_VERSION](./kibana-plugin-core-public.doclinksstart.doc_link_version.md) | string | |
| [ELASTIC\_WEBSITE\_URL](./kibana-plugin-core-public.doclinksstart.elastic_website_url.md) | string | |
-| [links](./kibana-plugin-core-public.doclinksstart.links.md) | { readonly settings: string; readonly apm: { readonly kibanaSettings: string; readonly supportedServiceMaps: string; readonly customLinks: string; readonly droppedTransactionSpans: string; readonly upgrading: string; readonly metaData: string; }; readonly canvas: { readonly guide: string; }; readonly dashboard: { readonly guide: string; readonly drilldowns: string; readonly drilldownsTriggerPicker: string; readonly urlDrilldownTemplateSyntax: string; readonly urlDrilldownVariables: string; }; readonly discover: Record<string, string>; readonly filebeat: { readonly base: string; readonly installation: string; readonly configuration: string; readonly elasticsearchOutput: string; readonly elasticsearchModule: string; readonly startup: string; readonly exportedFields: string; readonly suricataModule: string; readonly zeekModule: string; }; readonly auditbeat: { readonly base: string; readonly auditdModule: string; readonly systemModule: string; }; readonly metricbeat: { readonly base: string; readonly configure: string; readonly httpEndpoint: string; readonly install: string; readonly start: string; }; readonly enterpriseSearch: { readonly base: string; readonly appSearchBase: string; readonly workplaceSearchBase: string; }; readonly heartbeat: { readonly base: string; }; readonly libbeat: { readonly getStarted: string; }; readonly logstash: { readonly base: string; }; readonly functionbeat: { readonly base: string; }; readonly winlogbeat: { readonly base: string; }; readonly aggs: { readonly composite: string; readonly composite_missing_bucket: string; readonly date_histogram: string; readonly date_range: string; readonly date_format_pattern: string; readonly filter: string; readonly filters: string; readonly geohash_grid: string; readonly histogram: string; readonly ip_range: string; readonly range: string; readonly significant_terms: string; readonly terms: string; readonly avg: string; readonly avg_bucket: string; readonly max_bucket: string; readonly min_bucket: string; readonly sum_bucket: string; readonly cardinality: string; readonly count: string; readonly cumulative_sum: string; readonly derivative: string; readonly geo_bounds: string; readonly geo_centroid: string; readonly max: string; readonly median: string; readonly min: string; readonly moving_avg: string; readonly percentile_ranks: string; readonly serial_diff: string; readonly std_dev: string; readonly sum: string; readonly top_hits: string; }; readonly runtimeFields: { readonly overview: string; readonly mapping: string; }; readonly scriptedFields: { readonly scriptFields: string; readonly scriptAggs: string; readonly painless: string; readonly painlessApi: string; readonly painlessLangSpec: string; readonly painlessSyntax: string; readonly painlessWalkthrough: string; readonly luceneExpressions: string; }; readonly search: { readonly sessions: string; readonly sessionLimits: string; }; readonly indexPatterns: { readonly introduction: string; readonly fieldFormattersNumber: string; readonly fieldFormattersString: string; readonly runtimeFields: string; }; readonly addData: string; readonly kibana: string; readonly upgradeAssistant: string; readonly rollupJobs: string; readonly elasticsearch: Record<string, string>; readonly siem: { readonly privileges: string; readonly guide: string; readonly gettingStarted: string; readonly ml: string; readonly ruleChangeLog: string; readonly detectionsReq: string; readonly networkMap: string; }; readonly query: { readonly eql: string; readonly kueryQuerySyntax: string; readonly luceneQuerySyntax: string; readonly percolate: string; readonly queryDsl: string; readonly autocompleteChanges: string; }; readonly date: { readonly dateMath: string; readonly dateMathIndexNames: string; }; readonly management: Record<string, string>; readonly ml: Record<string, string>; readonly transforms: Record<string, string>; readonly visualize: Record<string, string>; readonly apis: Readonly<{ bulkIndexAlias: string; byteSizeUnits: string; createAutoFollowPattern: string; createFollower: string; createIndex: string; createSnapshotLifecyclePolicy: string; createRoleMapping: string; createRoleMappingTemplates: string; createRollupJobsRequest: string; createApiKey: string; createPipeline: string; createTransformRequest: string; cronExpressions: string; executeWatchActionModes: string; indexExists: string; openIndex: string; putComponentTemplate: string; painlessExecute: string; painlessExecuteAPIContexts: string; putComponentTemplateMetadata: string; putSnapshotLifecyclePolicy: string; putIndexTemplateV1: string; putWatch: string; simulatePipeline: string; timeUnits: string; updateTransform: string; }>; readonly observability: Readonly<{ guide: string; infrastructureThreshold: string; logsThreshold: string; metricsThreshold: string; monitorStatus: string; monitorUptime: string; tlsCertificate: string; uptimeDurationAnomaly: string; }>; readonly alerting: Record<string, string>; readonly maps: Record<string, string>; readonly monitoring: Record<string, string>; readonly security: Readonly<{ apiKeyServiceSettings: string; clusterPrivileges: string; elasticsearchSettings: string; elasticsearchEnableSecurity: string; indicesPrivileges: string; kibanaTLS: string; kibanaPrivileges: string; mappingRoles: string; mappingRolesFieldRules: string; runAsPrivilege: string; }>; readonly watcher: Record<string, string>; readonly ccs: Record<string, string>; readonly plugins: Record<string, string>; readonly snapshotRestore: Record<string, string>; readonly ingest: Record<string, string>; readonly fleet: Readonly<{ guide: string; fleetServer: string; fleetServerAddFleetServer: string; settings: string; settingsFleetServerHostSettings: string; troubleshooting: string; elasticAgent: string; datastreams: string; datastreamsNamingScheme: string; upgradeElasticAgent: string; upgradeElasticAgent712lower: string; }>; readonly ecs: { readonly guide: string; }; } | |
+| [links](./kibana-plugin-core-public.doclinksstart.links.md) | { readonly settings: string; readonly apm: { readonly kibanaSettings: string; readonly supportedServiceMaps: string; readonly customLinks: string; readonly droppedTransactionSpans: string; readonly upgrading: string; readonly metaData: string; }; readonly canvas: { readonly guide: string; }; readonly dashboard: { readonly guide: string; readonly drilldowns: string; readonly drilldownsTriggerPicker: string; readonly urlDrilldownTemplateSyntax: string; readonly urlDrilldownVariables: string; }; readonly discover: Record<string, string>; readonly filebeat: { readonly base: string; readonly installation: string; readonly configuration: string; readonly elasticsearchOutput: string; readonly elasticsearchModule: string; readonly startup: string; readonly exportedFields: string; readonly suricataModule: string; readonly zeekModule: string; }; readonly auditbeat: { readonly base: string; readonly auditdModule: string; readonly systemModule: string; }; readonly metricbeat: { readonly base: string; readonly configure: string; readonly httpEndpoint: string; readonly install: string; readonly start: string; }; readonly enterpriseSearch: { readonly base: string; readonly appSearchBase: string; readonly workplaceSearchBase: string; }; readonly heartbeat: { readonly base: string; }; readonly libbeat: { readonly getStarted: string; }; readonly logstash: { readonly base: string; }; readonly functionbeat: { readonly base: string; }; readonly winlogbeat: { readonly base: string; }; readonly aggs: { readonly composite: string; readonly composite_missing_bucket: string; readonly date_histogram: string; readonly date_range: string; readonly date_format_pattern: string; readonly filter: string; readonly filters: string; readonly geohash_grid: string; readonly histogram: string; readonly ip_range: string; readonly range: string; readonly significant_terms: string; readonly terms: string; readonly avg: string; readonly avg_bucket: string; readonly max_bucket: string; readonly min_bucket: string; readonly sum_bucket: string; readonly cardinality: string; readonly count: string; readonly cumulative_sum: string; readonly derivative: string; readonly geo_bounds: string; readonly geo_centroid: string; readonly max: string; readonly median: string; readonly min: string; readonly moving_avg: string; readonly percentile_ranks: string; readonly serial_diff: string; readonly std_dev: string; readonly sum: string; readonly top_hits: string; }; readonly runtimeFields: { readonly overview: string; readonly mapping: string; }; readonly scriptedFields: { readonly scriptFields: string; readonly scriptAggs: string; readonly painless: string; readonly painlessApi: string; readonly painlessLangSpec: string; readonly painlessSyntax: string; readonly painlessWalkthrough: string; readonly luceneExpressions: string; }; readonly search: { readonly sessions: string; readonly sessionLimits: string; }; readonly indexPatterns: { readonly introduction: string; readonly fieldFormattersNumber: string; readonly fieldFormattersString: string; readonly runtimeFields: string; }; readonly addData: string; readonly kibana: string; readonly upgradeAssistant: string; readonly rollupJobs: string; readonly elasticsearch: Record<string, string>; readonly siem: { readonly privileges: string; readonly guide: string; readonly gettingStarted: string; readonly ml: string; readonly ruleChangeLog: string; readonly detectionsReq: string; readonly networkMap: string; }; readonly query: { readonly eql: string; readonly kueryQuerySyntax: string; readonly luceneQuerySyntax: string; readonly percolate: string; readonly queryDsl: string; readonly autocompleteChanges: string; }; readonly date: { readonly dateMath: string; readonly dateMathIndexNames: string; }; readonly management: Record<string, string>; readonly ml: Record<string, string>; readonly transforms: Record<string, string>; readonly visualize: Record<string, string>; readonly apis: Readonly<{ bulkIndexAlias: string; byteSizeUnits: string; createAutoFollowPattern: string; createFollower: string; createIndex: string; createSnapshotLifecyclePolicy: string; createRoleMapping: string; createRoleMappingTemplates: string; createRollupJobsRequest: string; createApiKey: string; createPipeline: string; createTransformRequest: string; cronExpressions: string; executeWatchActionModes: string; indexExists: string; openIndex: string; putComponentTemplate: string; painlessExecute: string; painlessExecuteAPIContexts: string; putComponentTemplateMetadata: string; putSnapshotLifecyclePolicy: string; putIndexTemplateV1: string; putWatch: string; simulatePipeline: string; timeUnits: string; updateTransform: string; }>; readonly observability: Readonly<{ guide: string; infrastructureThreshold: string; logsThreshold: string; metricsThreshold: string; monitorStatus: string; monitorUptime: string; tlsCertificate: string; uptimeDurationAnomaly: string; }>; readonly alerting: Record<string, string>; readonly maps: Record<string, string>; readonly monitoring: Record<string, string>; readonly security: Readonly<{ apiKeyServiceSettings: string; clusterPrivileges: string; elasticsearchSettings: string; elasticsearchEnableSecurity: string; indicesPrivileges: string; kibanaTLS: string; kibanaPrivileges: string; mappingRoles: string; mappingRolesFieldRules: string; runAsPrivilege: string; }>; readonly watcher: Record<string, string>; readonly ccs: Record<string, string>; readonly plugins: Record<string, string>; readonly snapshotRestore: Record<string, string>; readonly ingest: Record<string, string>; readonly fleet: Readonly<{ guide: string; fleetServer: string; fleetServerAddFleetServer: string; settings: string; settingsFleetServerHostSettings: string; troubleshooting: string; elasticAgent: string; datastreams: string; datastreamsNamingScheme: string; upgradeElasticAgent: string; upgradeElasticAgent712lower: string; }>; readonly ecs: { readonly guide: string; }; readonly clients: { readonly guide: string; readonly goOverview: string; readonly javaIndex: string; readonly jsIntro: string; readonly netGuide: string; readonly perlGuide: string; readonly phpGuide: string; readonly pythonGuide: string; readonly rubyOverview: string; readonly rustGuide: string; }; } | |
diff --git a/docs/development/core/public/kibana-plugin-core-public.domaindeprecationdetails.domainid.md b/docs/development/core/public/kibana-plugin-core-public.domaindeprecationdetails.domainid.md
deleted file mode 100644
index b6d1f9386be8f..0000000000000
--- a/docs/development/core/public/kibana-plugin-core-public.domaindeprecationdetails.domainid.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [DomainDeprecationDetails](./kibana-plugin-core-public.domaindeprecationdetails.md) > [domainId](./kibana-plugin-core-public.domaindeprecationdetails.domainid.md)
-
-## DomainDeprecationDetails.domainId property
-
-Signature:
-
-```typescript
-domainId: string;
-```
diff --git a/docs/development/core/public/kibana-plugin-core-public.domaindeprecationdetails.md b/docs/development/core/public/kibana-plugin-core-public.domaindeprecationdetails.md
deleted file mode 100644
index 93d715a11c503..0000000000000
--- a/docs/development/core/public/kibana-plugin-core-public.domaindeprecationdetails.md
+++ /dev/null
@@ -1,18 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [DomainDeprecationDetails](./kibana-plugin-core-public.domaindeprecationdetails.md)
-
-## DomainDeprecationDetails interface
-
-Signature:
-
-```typescript
-export interface DomainDeprecationDetails extends DeprecationsDetails
-```
-
-## Properties
-
-| Property | Type | Description |
-| --- | --- | --- |
-| [domainId](./kibana-plugin-core-public.domaindeprecationdetails.domainid.md) | string | |
-
diff --git a/docs/development/core/public/kibana-plugin-core-public.md b/docs/development/core/public/kibana-plugin-core-public.md
index 08c3c376df4e8..e5fbe7c3524ed 100644
--- a/docs/development/core/public/kibana-plugin-core-public.md
+++ b/docs/development/core/public/kibana-plugin-core-public.md
@@ -60,7 +60,6 @@ The plugin integrates with the core system via lifecycle events: `setup`
| [CoreStart](./kibana-plugin-core-public.corestart.md) | Core services exposed to the Plugin start lifecycle |
| [DeprecationsServiceStart](./kibana-plugin-core-public.deprecationsservicestart.md) | DeprecationsService provides methods to fetch domain deprecation details from the Kibana server. |
| [DocLinksStart](./kibana-plugin-core-public.doclinksstart.md) | |
-| [DomainDeprecationDetails](./kibana-plugin-core-public.domaindeprecationdetails.md) | |
| [ErrorToastOptions](./kibana-plugin-core-public.errortoastoptions.md) | Options available for [IToasts](./kibana-plugin-core-public.itoasts.md) error APIs. |
| [FatalErrorInfo](./kibana-plugin-core-public.fatalerrorinfo.md) | Represents the message and stack of a fatal Error |
| [FatalErrorsSetup](./kibana-plugin-core-public.fatalerrorssetup.md) | FatalErrors stop the Kibana Public Core and displays a fatal error screen with details about the Kibana build and the error. |
diff --git a/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md b/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md
index f6de959589eca..7d9772af91c38 100644
--- a/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md
+++ b/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md
@@ -8,5 +8,5 @@
Signature:
```typescript
-export declare type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig | RewriteAppenderConfig | RollingFileAppenderConfig;
+export declare type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | RewriteAppenderConfig | RollingFileAppenderConfig;
```
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md
index 447823a5c3491..657c62a21c581 100644
--- a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md
@@ -4,6 +4,8 @@
## DeprecationsDetails.correctiveActions property
+corrective action needed to fix this deprecation.
+
Signature:
```typescript
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.documentationurl.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.documentationurl.md
index 467d6d76cf842..457cf7b61dac8 100644
--- a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.documentationurl.md
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.documentationurl.md
@@ -4,6 +4,8 @@
## DeprecationsDetails.documentationUrl property
+(optional) link to the documentation for more details on the deprecation.
+
Signature:
```typescript
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md
index bd0fc1e5b3713..86418a1d0c1c3 100644
--- a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md
@@ -4,6 +4,7 @@
## DeprecationsDetails interface
+
Signature:
```typescript
@@ -14,11 +15,11 @@ export interface DeprecationsDetails
| Property | Type | Description |
| --- | --- | --- |
-| [correctiveActions](./kibana-plugin-core-server.deprecationsdetails.correctiveactions.md) | { api?: { path: string; method: 'POST' | 'PUT'; body?: { [key: string]: any; }; }; manualSteps: string[]; } | |
+| [correctiveActions](./kibana-plugin-core-server.deprecationsdetails.correctiveactions.md) | { api?: { path: string; method: 'POST' | 'PUT'; body?: { [key: string]: any; }; }; manualSteps: string[]; } | corrective action needed to fix this deprecation. |
| [deprecationType](./kibana-plugin-core-server.deprecationsdetails.deprecationtype.md) | 'config' | 'feature' | (optional) Used to identify between different deprecation types. Example use case: in Upgrade Assistant, we may want to allow the user to sort by deprecation type or show each type in a separate tab.Feel free to add new types if necessary. Predefined types are necessary to reduce having similar definitions with different keywords across kibana deprecations. |
-| [documentationUrl](./kibana-plugin-core-server.deprecationsdetails.documentationurl.md) | string | |
+| [documentationUrl](./kibana-plugin-core-server.deprecationsdetails.documentationurl.md) | string | (optional) link to the documentation for more details on the deprecation. |
| [level](./kibana-plugin-core-server.deprecationsdetails.level.md) | 'warning' | 'critical' | 'fetch_error' | levels: - warning: will not break deployment upon upgrade - critical: needs to be addressed before upgrade. - fetch\_error: Deprecations service failed to grab the deprecation details for the domain. |
| [message](./kibana-plugin-core-server.deprecationsdetails.message.md) | string | The description message to be displayed for the deprecation. Check the README for writing deprecations in src/core/server/deprecations/README.mdx |
-| [requireRestart](./kibana-plugin-core-server.deprecationsdetails.requirerestart.md) | boolean | |
+| [requireRestart](./kibana-plugin-core-server.deprecationsdetails.requirerestart.md) | boolean | (optional) specify the fix for this deprecation requires a full kibana restart. |
| [title](./kibana-plugin-core-server.deprecationsdetails.title.md) | string | The title of the deprecation. Check the README for writing deprecations in src/core/server/deprecations/README.mdx |
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.requirerestart.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.requirerestart.md
index 52c0fcf1c3001..85bddd9436e73 100644
--- a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.requirerestart.md
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.requirerestart.md
@@ -4,6 +4,8 @@
## DeprecationsDetails.requireRestart property
+(optional) specify the fix for this deprecation requires a full kibana restart.
+
Signature:
```typescript
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md
index 2bc7f6cba594d..7b2cbdecd146a 100644
--- a/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md
@@ -27,7 +27,6 @@ async function getDeprecations({ esClient, savedObjectsClient }: GetDeprecations
const deprecations: DeprecationsDetails[] = [];
const count = await getFooCount(savedObjectsClient);
if (count > 0) {
- // Example of a manual correctiveAction
deprecations.push({
title: i18n.translate('xpack.foo.deprecations.title', {
defaultMessage: `Foo's are deprecated`
diff --git a/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.md b/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.md
index 1018444f0849a..96dd2ceb524ce 100644
--- a/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.md
+++ b/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.md
@@ -4,6 +4,7 @@
## GetDeprecationsContext interface
+
Signature:
```typescript
diff --git a/docs/development/core/server/kibana-plugin-core-server.registerdeprecationsconfig.md b/docs/development/core/server/kibana-plugin-core-server.registerdeprecationsconfig.md
index 59e6d406f84bf..444c2653512de 100644
--- a/docs/development/core/server/kibana-plugin-core-server.registerdeprecationsconfig.md
+++ b/docs/development/core/server/kibana-plugin-core-server.registerdeprecationsconfig.md
@@ -4,6 +4,7 @@
## RegisterDeprecationsConfig interface
+
Signature:
```typescript
diff --git a/docs/migration/migrate_8_0.asciidoc b/docs/migration/migrate_8_0.asciidoc
index 4c0c335b3c33e..60a65580501a6 100644
--- a/docs/migration/migrate_8_0.asciidoc
+++ b/docs/migration/migrate_8_0.asciidoc
@@ -364,4 +364,34 @@ Configuration management tools and automation will need to be updated to use the
=== `server.xsrf.token` is no longer valid
*Details:* The deprecated `server.xsrf.token` setting in the `kibana.yml` file has been removed.
+[float]
+=== `newsfeed.defaultLanguage` is no longer valid
+*Details:* Specifying a default language to retrieve newsfeed items is no longer supported.
+
+*Impact:* Newsfeed items will be retrieved based on the browser locale and fallback to 'en' if an item does not have a translation for the locale. Configure {kibana-ref}/i18n-settings-kb.html#general-i18n-settings-kb[`i18n.locale`] to override the default behavior.
+
+[float]
+=== `xpack.banners.placement` has changed value
+*Details:* `xpack.banners.placement: 'header'` setting in `kibana.yml` has changed value.
+
+*Impact:* Use {kibana-ref}/banners-settings-kb.html#banners-settings-kb[`xpack.banners.placement: 'top'`] instead.
+
+[float]
+=== `cpu.cgroup.path.override` is no longer valid
+*Details:* The deprecated `cpu.cgroup.path.override` setting is no longer supported.
+
+*Impact:* Configure {kibana-ref}/settings.html#ops-cGroupOverrides-cpuPath[`ops.cGroupOverrides.cpuPath`] instead.
+
+[float]
+=== `cpuacct.cgroup.path.override` is no longer valid
+*Details:* The deprecated `cpuacct.cgroup.path.override` setting is no longer supported.
+
+*Impact:* Configure {kibana-ref}/settings.html#ops-cGroupOverrides-cpuAcctPath[`ops.cGroupOverrides.cpuAcctPath`] instead.
+
+[float]
+=== `server.xsrf.whitelist` is no longer valid
+*Details:* The deprecated `server.xsrf.whitelist` setting is no longer supported.
+
+*Impact:* Use {kibana-ref}/settings.html#settings-xsrf-allowlist[`server.xsrf.allowlist`] instead.
+
// end::notable-breaking-changes[]
diff --git a/docs/redirects.asciidoc b/docs/redirects.asciidoc
index 97a87506f2337..d5bc2ccd8ef7d 100644
--- a/docs/redirects.asciidoc
+++ b/docs/redirects.asciidoc
@@ -293,7 +293,7 @@ This content has moved. Refer to <>.
This content has moved. Refer to <>.
[role="exclude",id="ingest-node-pipelines"]
-== Ingest Node Pipelines
+== Ingest Pipelines
This content has moved. Refer to {ref}/ingest.html[Ingest pipelines].
diff --git a/docs/settings/alert-action-settings.asciidoc b/docs/settings/alert-action-settings.asciidoc
index 92357a8800d67..3a94e652d2ea0 100644
--- a/docs/settings/alert-action-settings.asciidoc
+++ b/docs/settings/alert-action-settings.asciidoc
@@ -31,11 +31,6 @@ Be sure to back up the encryption key value somewhere safe, as your alerting rul
[[action-settings]]
==== Action settings
-`xpack.actions.enabled`::
-deprecated:[7.16.0,"In 8.0 and later, this setting will no longer be supported."]
-Feature toggle that enables Actions in {kib}.
-If `false`, all features dependent on Actions are disabled, including the *Observability* and *Security* apps. Default: `true`.
-
`xpack.actions.allowedHosts` {ess-icon}::
A list of hostnames that {kib} is allowed to connect to when built-in actions are triggered. It defaults to `[*]`, allowing any host, but keep in mind the potential for SSRF attacks when hosts are not explicitly added to the allowed hosts. An empty list `[]` can be used to block built-in actions from making any external connections.
+
@@ -179,3 +174,10 @@ For example, `20m`, `24h`, `7d`, `1w`. Default: `60s`.
`xpack.alerting.maxEphemeralActionsPerAlert`::
Sets the number of actions that will be executed ephemerally. To use this, enable ephemeral tasks in task manager first with <>
+
+`xpack.alerting.defaultRuleTaskTimeout`::
+Specifies the default timeout for the all rule types tasks. The time is formatted as:
++
+`[ms,s,m,h,d,w,M,Y]`
++
+For example, `20m`, `24h`, `7d`, `1w`. Default: `60s`.
diff --git a/docs/settings/logging-settings.asciidoc b/docs/settings/logging-settings.asciidoc
index 77f3bd90a911a..177d1bc8db118 100644
--- a/docs/settings/logging-settings.asciidoc
+++ b/docs/settings/logging-settings.asciidoc
@@ -12,16 +12,6 @@
Refer to the <> for common configuration use cases. To learn more about possible configuration values, go to {kibana-ref}/logging-service.html[{kib}'s Logging service].
-[[log-settings-compatibility]]
-==== Backwards compatibility
-Compatibility with the legacy logging system is assured until the end of the `v7` version.
-All log messages handled by `root` context (default) are forwarded to the legacy logging service.
-The logging configuration is validated against the predefined schema and if there are
-any issues with it, {kib} will fail to start with the detailed error message.
-
-NOTE: When you switch to the new logging configuration, you will start seeing duplicate log entries in both formats.
-These will be removed when the `default` appender is no longer required.
-
[[log-settings-examples]]
==== Examples
Here are some configuration examples for the most common logging use cases:
diff --git a/docs/settings/reporting-settings.asciidoc b/docs/settings/reporting-settings.asciidoc
index 694f8c53f6745..560f2d850c6d5 100644
--- a/docs/settings/reporting-settings.asciidoc
+++ b/docs/settings/reporting-settings.asciidoc
@@ -11,7 +11,6 @@ You can configure `xpack.reporting` settings in your `kibana.yml` to:
* <>
* <>
-* <>
* <>
* <>
* <>
@@ -47,33 +46,6 @@ The static encryption key for reporting. Use an alphanumeric text string that is
xpack.reporting.encryptionKey: "something_secret"
--------------------------------------------------------------------------------
-[float]
-[[report-indices]]
-==== Reporting index setting
-
-
-
-`xpack.reporting.index`::
-deprecated:[7.11.0,This setting will be removed in 8.0.0.] Multitenancy by changing `kibana.index` is unsupported starting in 8.0.0. For more details, refer to https://ela.st/kbn-remove-legacy-multitenancy[8.0 Breaking Changes]. When you divide workspaces in an Elastic cluster using multiple {kib} instances with a different `kibana.index` setting per instance, you must set a unique `xpack.reporting.index` setting per `kibana.index`. Otherwise, report generation periodically fails if a report is queued through an instance with one `kibana.index` setting, and an instance with a different `kibana.index` attempts to claim the job. Reporting uses a weekly index in {es} to store the reporting job and the report content. The index is automatically created if it does not already exist. Configure a unique value for `xpack.reporting.index`, beginning with `.reporting-`, for every {kib} instance that has a unique <> setting. Defaults to `.reporting`.
-
-{kib} instance A:
-[source,yaml]
---------------------------------------------------------------------------------
-kibana.index: ".kibana-a"
-xpack.reporting.index: ".reporting-a"
-xpack.reporting.encryptionKey: "something_secret"
---------------------------------------------------------------------------------
-
-{kib} instance B:
-[source,yaml]
---------------------------------------------------------------------------------
-kibana.index: ".kibana-b"
-xpack.reporting.index: ".reporting-b"
-xpack.reporting.encryptionKey: "something_secret"
---------------------------------------------------------------------------------
-
-NOTE: If security is enabled, the `xpack.reporting.index` setting should begin with `.reporting-` for the `kibana_system` role to have the necessary privileges over the index.
-
[float]
[[reporting-kibana-server-settings]]
==== {kib} server settings
diff --git a/docs/setup/configuring-reporting.asciidoc b/docs/setup/configuring-reporting.asciidoc
index 6d209092d3338..38bf2955fb56e 100644
--- a/docs/setup/configuring-reporting.asciidoc
+++ b/docs/setup/configuring-reporting.asciidoc
@@ -148,56 +148,6 @@ reporting_user:
- "cn=Bill Murray,dc=example,dc=com"
--------------------------------------------------------------------------------
-[float]
-==== Grant access with a custom index
-
-If you are using a custom index, the `xpack.reporting.index` setting must begin with `.reporting-*`. The default {kib} system user has `all` privileges against the `.reporting-*` pattern of indices.
-
-If you use a different pattern for the `xpack.reporting.index` setting, you must create a custom `kibana_system` user with appropriate access to the index.
-
-NOTE: In the next major version of Kibana, granting access with a custom index is unsupported.
-
-. Create the reporting role.
-
-.. Open the main menu, then click *Stack Management*.
-
-.. Click *Roles > Create role*.
-
-. Specify the role settings.
-
-.. Enter the *Role name*. For example, `custom-reporting-user`.
-
-.. From the *Indices* dropdown, select the custom index.
-
-.. From the *Privileges* dropdown, select *all*.
-
-.. Click *Add Kibana privilege*.
-
-.. Select one or more *Spaces* that you want to grant reporting privileges to.
-
-.. Click *Customize*, then click *Analytics*.
-
-.. Next to each application you want to grant reporting privileges to, click *All*.
-
-.. Click *Add {kib} privilege*, then click *Create role*.
-
-. Assign the reporting role to a user.
-
-.. Open the main menu, then click *Stack Management*.
-
-.. Click *Users*, then click the user you want to assign the reporting role to.
-
-.. From the *Roles* dropdown, select *kibana_system* and *custom-reporting-user*.
-
-.. Click *Update user*.
-
-. Configure {kib} to use the new account.
-+
-[source,js]
---------------------------------------------------------------------------------
-elasticsearch.username: 'custom_kibana_system'
---------------------------------------------------------------------------------
-
[float]
[[securing-reporting]]
=== Secure the reporting endpoints
diff --git a/docs/setup/settings.asciidoc b/docs/setup/settings.asciidoc
index c098fb697de04..7a85411065db6 100644
--- a/docs/setup/settings.asciidoc
+++ b/docs/setup/settings.asciidoc
@@ -26,16 +26,6 @@ Toggling this causes the server to regenerate assets on the next startup,
which may cause a delay before pages start being served.
Set to `false` to disable Console. *Default: `true`*
-| `cpu.cgroup.path.override:`
- | deprecated:[7.10.0,"In 8.0 and later, this setting will no longer be supported."]
- This setting has been renamed to
- <>.
-
-| `cpuacct.cgroup.path.override:`
- | deprecated:[7.10.0,"In 8.0 and later, this setting will no longer be supported."]
- This setting has been renamed to
- <>.
-
| `csp.rules:`
| deprecated:[7.14.0,"In 8.0 and later, this setting will no longer be supported."]
A https://w3c.github.io/webappsec-csp/[Content Security Policy] template
diff --git a/docs/user/management.asciidoc b/docs/user/management.asciidoc
index 4e5f70db9aef6..1f38d50e2d0bd 100644
--- a/docs/user/management.asciidoc
+++ b/docs/user/management.asciidoc
@@ -17,7 +17,7 @@ Consult your administrator if you do not have the appropriate access.
[cols="50, 50"]
|===
-| {ref}/ingest.html[Ingest Node Pipelines]
+| {ref}/ingest.html[Ingest Pipelines]
| Create and manage ingest pipelines that let you perform common transformations
and enrichments on your data.
diff --git a/docs/user/monitoring/monitoring-metricbeat.asciidoc b/docs/user/monitoring/monitoring-metricbeat.asciidoc
index 5ef3b8177a9c5..101377e047588 100644
--- a/docs/user/monitoring/monitoring-metricbeat.asciidoc
+++ b/docs/user/monitoring/monitoring-metricbeat.asciidoc
@@ -189,8 +189,9 @@ If you configured the monitoring cluster to use encrypted communications, you
must access it via HTTPS. For example, use a `hosts` setting like
`https://es-mon-1:9200`.
-IMPORTANT: The {es} {monitor-features} use ingest pipelines, therefore the
-cluster that stores the monitoring data must have at least one ingest node.
+IMPORTANT: The {es} {monitor-features} use ingest pipelines. The
+cluster that stores the monitoring data must have at least one node with the
+`ingest` role.
If the {es} {security-features} are enabled on the monitoring cluster, you
must provide a valid user ID and password so that {metricbeat} can send metrics
diff --git a/docs/user/production-considerations/production.asciidoc b/docs/user/production-considerations/production.asciidoc
index 455e07e452807..db8d0738323ff 100644
--- a/docs/user/production-considerations/production.asciidoc
+++ b/docs/user/production-considerations/production.asciidoc
@@ -32,12 +32,21 @@ server.name
Settings unique across each host (for example, running multiple installations on the same virtual machine):
[source,js]
--------
-logging.dest
path.data
pid.file
server.port
--------
+When using a file appender, the target file must also be unique:
+[source,yaml]
+--------
+logging:
+ appenders:
+ default:
+ type: file
+ fileName: /unique/path/per/instance
+--------
+
Settings that must be the same:
[source,js]
--------
diff --git a/package.json b/package.json
index f04a8423196fd..6539189ca994d 100644
--- a/package.json
+++ b/package.json
@@ -92,6 +92,9 @@
"yarn": "^1.21.1"
},
"dependencies": {
+ "@dnd-kit/core": "^3.1.1",
+ "@dnd-kit/sortable": "^4.0.0",
+ "@dnd-kit/utilities": "^2.0.0",
"@babel/runtime": "^7.15.4",
"@elastic/apm-rum": "^5.9.1",
"@elastic/apm-rum-react": "^1.3.1",
@@ -101,7 +104,6 @@
"@elastic/ems-client": "7.15.0",
"@elastic/eui": "38.0.1",
"@elastic/filesaver": "1.1.2",
- "@elastic/good": "^9.0.1-kibana3",
"@elastic/maki": "6.3.0",
"@elastic/node-crypto": "1.2.1",
"@elastic/numeral": "^2.5.1",
@@ -113,12 +115,10 @@
"@hapi/accept": "^5.0.2",
"@hapi/boom": "^9.1.4",
"@hapi/cookie": "^11.0.2",
- "@hapi/good-squeeze": "6.0.0",
"@hapi/h2o2": "^9.1.0",
"@hapi/hapi": "^20.2.0",
"@hapi/hoek": "^9.2.0",
"@hapi/inert": "^6.0.4",
- "@hapi/podium": "^4.1.3",
"@hapi/wreck": "^17.1.0",
"@kbn/ace": "link:bazel-bin/packages/kbn-ace",
"@kbn/alerts": "link:bazel-bin/packages/kbn-alerts",
@@ -133,7 +133,6 @@
"@kbn/i18n": "link:bazel-bin/packages/kbn-i18n",
"@kbn/interpreter": "link:bazel-bin/packages/kbn-interpreter",
"@kbn/io-ts-utils": "link:bazel-bin/packages/kbn-io-ts-utils",
- "@kbn/legacy-logging": "link:bazel-bin/packages/kbn-legacy-logging",
"@kbn/logging": "link:bazel-bin/packages/kbn-logging",
"@kbn/mapbox-gl": "link:bazel-bin/packages/kbn-mapbox-gl",
"@kbn/monaco": "link:bazel-bin/packages/kbn-monaco",
diff --git a/packages/BUILD.bazel b/packages/BUILD.bazel
index 36bdee5303cb7..75c8d700e2843 100644
--- a/packages/BUILD.bazel
+++ b/packages/BUILD.bazel
@@ -29,7 +29,6 @@ filegroup(
"//packages/kbn-i18n:build",
"//packages/kbn-interpreter:build",
"//packages/kbn-io-ts-utils:build",
- "//packages/kbn-legacy-logging:build",
"//packages/kbn-logging:build",
"//packages/kbn-mapbox-gl:build",
"//packages/kbn-monaco:build",
diff --git a/packages/kbn-cli-dev-mode/src/bootstrap.ts b/packages/kbn-cli-dev-mode/src/bootstrap.ts
index 86a276c64f1f5..0428051b77e31 100644
--- a/packages/kbn-cli-dev-mode/src/bootstrap.ts
+++ b/packages/kbn-cli-dev-mode/src/bootstrap.ts
@@ -20,7 +20,7 @@ interface BootstrapArgs {
}
export async function bootstrapDevMode({ configs, cliArgs, applyConfigOverrides }: BootstrapArgs) {
- const log = new CliLog(!!cliArgs.quiet, !!cliArgs.silent);
+ const log = new CliLog(!!cliArgs.silent);
const env = Env.createDefault(REPO_ROOT, {
configs,
diff --git a/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts b/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
index 8937eadfa4ee3..e5e009e51e69e 100644
--- a/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
+++ b/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
@@ -74,7 +74,6 @@ const createCliArgs = (parts: Partial = {}): SomeCliArgs => ({
runExamples: false,
watch: true,
silent: false,
- quiet: false,
...parts,
});
diff --git a/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts b/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
index 28f38592ff3c4..2396b316aa3a2 100644
--- a/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
+++ b/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
@@ -48,7 +48,6 @@ const GRACEFUL_TIMEOUT = 30000;
export type SomeCliArgs = Pick<
CliArgs,
- | 'quiet'
| 'silent'
| 'verbose'
| 'disableOptimizer'
@@ -108,7 +107,7 @@ export class CliDevMode {
private subscription?: Rx.Subscription;
constructor({ cliArgs, config, log }: { cliArgs: SomeCliArgs; config: CliDevConfig; log?: Log }) {
- this.log = log || new CliLog(!!cliArgs.quiet, !!cliArgs.silent);
+ this.log = log || new CliLog(!!cliArgs.silent);
if (cliArgs.basePath) {
this.basePathProxy = new BasePathProxyServer(this.log, config.http, config.dev);
@@ -163,7 +162,7 @@ export class CliDevMode {
runExamples: cliArgs.runExamples,
cache: cliArgs.cache,
dist: cliArgs.dist,
- quiet: !!cliArgs.quiet,
+ quiet: false,
silent: !!cliArgs.silent,
verbose: !!cliArgs.verbose,
watch: cliArgs.watch,
diff --git a/packages/kbn-cli-dev-mode/src/dev_server.test.ts b/packages/kbn-cli-dev-mode/src/dev_server.test.ts
index 9962a9a285a42..92dbe484eb005 100644
--- a/packages/kbn-cli-dev-mode/src/dev_server.test.ts
+++ b/packages/kbn-cli-dev-mode/src/dev_server.test.ts
@@ -130,7 +130,6 @@ describe('#run$', () => {
Array [
"foo",
"bar",
- "--logging.json=false",
],
Object {
"env": Object {
diff --git a/packages/kbn-cli-dev-mode/src/log.ts b/packages/kbn-cli-dev-mode/src/log.ts
index 86956abec202a..2cbd02b94a844 100644
--- a/packages/kbn-cli-dev-mode/src/log.ts
+++ b/packages/kbn-cli-dev-mode/src/log.ts
@@ -21,7 +21,7 @@ export interface Log {
export class CliLog implements Log {
public toolingLog = new ToolingLog({
- level: this.silent ? 'silent' : this.quiet ? 'error' : 'info',
+ level: this.silent ? 'silent' : 'info',
writeTo: {
write: (msg) => {
this.write(msg);
@@ -29,10 +29,10 @@ export class CliLog implements Log {
},
});
- constructor(private readonly quiet: boolean, private readonly silent: boolean) {}
+ constructor(private readonly silent: boolean) {}
good(label: string, ...args: any[]) {
- if (this.quiet || this.silent) {
+ if (this.silent) {
return;
}
@@ -41,7 +41,7 @@ export class CliLog implements Log {
}
warn(label: string, ...args: any[]) {
- if (this.quiet || this.silent) {
+ if (this.silent) {
return;
}
diff --git a/packages/kbn-cli-dev-mode/src/using_server_process.ts b/packages/kbn-cli-dev-mode/src/using_server_process.ts
index 0d0227c63adc2..eb997295035d8 100644
--- a/packages/kbn-cli-dev-mode/src/using_server_process.ts
+++ b/packages/kbn-cli-dev-mode/src/using_server_process.ts
@@ -25,7 +25,7 @@ export function usingServerProcess(
) {
return Rx.using(
(): ProcResource => {
- const proc = execa.node(script, [...argv, '--logging.json=false'], {
+ const proc = execa.node(script, argv, {
stdio: 'pipe',
nodeOptions: [
...process.execArgv,
diff --git a/packages/kbn-config/src/__mocks__/env.ts b/packages/kbn-config/src/__mocks__/env.ts
index 6f05f8f1f5a45..124a798501a96 100644
--- a/packages/kbn-config/src/__mocks__/env.ts
+++ b/packages/kbn-config/src/__mocks__/env.ts
@@ -19,7 +19,6 @@ export function getEnvOptions(options: DeepPartial = {}): EnvOptions
configs: options.configs || [],
cliArgs: {
dev: true,
- quiet: false,
silent: false,
watch: false,
basePath: false,
diff --git a/packages/kbn-config/src/__snapshots__/env.test.ts.snap b/packages/kbn-config/src/__snapshots__/env.test.ts.snap
index 570ed948774cc..a8e2eb62dbedb 100644
--- a/packages/kbn-config/src/__snapshots__/env.test.ts.snap
+++ b/packages/kbn-config/src/__snapshots__/env.test.ts.snap
@@ -11,7 +11,6 @@ Env {
"dist": false,
"envName": "development",
"oss": false,
- "quiet": false,
"runExamples": false,
"silent": false,
"watch": false,
@@ -54,7 +53,6 @@ Env {
"dist": false,
"envName": "production",
"oss": false,
- "quiet": false,
"runExamples": false,
"silent": false,
"watch": false,
@@ -96,7 +94,6 @@ Env {
"disableOptimizer": true,
"dist": false,
"oss": false,
- "quiet": false,
"runExamples": false,
"silent": false,
"watch": false,
@@ -138,7 +135,6 @@ Env {
"disableOptimizer": true,
"dist": false,
"oss": false,
- "quiet": false,
"runExamples": false,
"silent": false,
"watch": false,
@@ -180,7 +176,6 @@ Env {
"disableOptimizer": true,
"dist": false,
"oss": false,
- "quiet": false,
"runExamples": false,
"silent": false,
"watch": false,
@@ -222,7 +217,6 @@ Env {
"disableOptimizer": true,
"dist": false,
"oss": false,
- "quiet": false,
"runExamples": false,
"silent": false,
"watch": false,
diff --git a/packages/kbn-config/src/config_service.test.ts b/packages/kbn-config/src/config_service.test.ts
index 754de1c0a99f5..e8fd7ab187596 100644
--- a/packages/kbn-config/src/config_service.test.ts
+++ b/packages/kbn-config/src/config_service.test.ts
@@ -15,6 +15,7 @@ import { rawConfigServiceMock } from './raw/raw_config_service.mock';
import { schema } from '@kbn/config-schema';
import { MockedLogger, loggerMock } from '@kbn/logging/mocks';
+import type { ConfigDeprecationContext } from './deprecation';
import { ConfigService, Env, RawPackageInfo } from '.';
import { getEnvOptions } from './__mocks__/env';
@@ -475,6 +476,43 @@ test('logs deprecation warning during validation', async () => {
`);
});
+test('calls `applyDeprecations` with the correct parameters', async () => {
+ const cfg = { foo: { bar: 1 } };
+ const rawConfig = getRawConfigProvider(cfg);
+ const configService = new ConfigService(rawConfig, defaultEnv, logger);
+
+ const context: ConfigDeprecationContext = {
+ branch: defaultEnv.packageInfo.branch,
+ version: defaultEnv.packageInfo.version,
+ };
+
+ const deprecationA = jest.fn();
+ const deprecationB = jest.fn();
+
+ configService.addDeprecationProvider('foo', () => [deprecationA]);
+ configService.addDeprecationProvider('bar', () => [deprecationB]);
+
+ await configService.validate();
+
+ expect(mockApplyDeprecations).toHaveBeenCalledTimes(1);
+ expect(mockApplyDeprecations).toHaveBeenCalledWith(
+ cfg,
+ [
+ {
+ deprecation: deprecationA,
+ path: 'foo',
+ context,
+ },
+ {
+ deprecation: deprecationB,
+ path: 'bar',
+ context,
+ },
+ ],
+ expect.any(Function)
+ );
+});
+
test('does not log warnings for silent deprecations during validation', async () => {
const rawConfig = getRawConfigProvider({});
const configService = new ConfigService(rawConfig, defaultEnv, logger);
diff --git a/packages/kbn-config/src/config_service.ts b/packages/kbn-config/src/config_service.ts
index 5883ce8ab513c..5103aa1a2d49d 100644
--- a/packages/kbn-config/src/config_service.ts
+++ b/packages/kbn-config/src/config_service.ts
@@ -19,12 +19,13 @@ import { RawConfigurationProvider } from './raw/raw_config_service';
import {
applyDeprecations,
ConfigDeprecationWithContext,
+ ConfigDeprecationContext,
ConfigDeprecationProvider,
configDeprecationFactory,
DeprecatedConfigDetails,
ChangedDeprecatedPaths,
} from './deprecation';
-import { LegacyObjectToConfigAdapter } from './legacy';
+import { ObjectToConfigAdapter } from './object_to_config_adapter';
/** @internal */
export type IConfigService = PublicMethodsOf;
@@ -71,7 +72,7 @@ export class ConfigService {
map(([rawConfig, deprecations]) => {
const migrated = applyDeprecations(rawConfig, deprecations);
this.deprecatedConfigPaths.next(migrated.changedPaths);
- return new LegacyObjectToConfigAdapter(migrated.config);
+ return new ObjectToConfigAdapter(migrated.config);
}),
tap((config) => {
this.lastConfig = config;
@@ -103,6 +104,7 @@ export class ConfigService {
...provider(configDeprecationFactory).map((deprecation) => ({
deprecation,
path: flatPath,
+ context: createDeprecationContext(this.env),
})),
]);
}
@@ -298,3 +300,10 @@ const pathToString = (path: ConfigPath) => (Array.isArray(path) ? path.join('.')
*/
const isPathHandled = (path: string, handledPaths: string[]) =>
handledPaths.some((handledPath) => hasConfigPathIntersection(path, handledPath));
+
+const createDeprecationContext = (env: Env): ConfigDeprecationContext => {
+ return {
+ branch: env.packageInfo.branch,
+ version: env.packageInfo.version,
+ };
+};
diff --git a/packages/kbn-config/src/deprecation/apply_deprecations.test.ts b/packages/kbn-config/src/deprecation/apply_deprecations.test.ts
index 8ad1491c19c9b..70945b2d96b32 100644
--- a/packages/kbn-config/src/deprecation/apply_deprecations.test.ts
+++ b/packages/kbn-config/src/deprecation/apply_deprecations.test.ts
@@ -7,18 +7,24 @@
*/
import { applyDeprecations } from './apply_deprecations';
-import { ConfigDeprecation, ConfigDeprecationWithContext } from './types';
+import { ConfigDeprecation, ConfigDeprecationContext, ConfigDeprecationWithContext } from './types';
import { configDeprecationFactory as deprecations } from './deprecation_factory';
-const wrapHandler = (
- handler: ConfigDeprecation,
- path: string = ''
-): ConfigDeprecationWithContext => ({
- deprecation: handler,
- path,
-});
-
describe('applyDeprecations', () => {
+ const context: ConfigDeprecationContext = {
+ version: '7.16.2',
+ branch: '7.16',
+ };
+
+ const wrapHandler = (
+ handler: ConfigDeprecation,
+ path: string = ''
+ ): ConfigDeprecationWithContext => ({
+ deprecation: handler,
+ path,
+ context,
+ });
+
it('calls all deprecations handlers once', () => {
const handlerA = jest.fn();
const handlerB = jest.fn();
@@ -32,6 +38,26 @@ describe('applyDeprecations', () => {
expect(handlerC).toHaveBeenCalledTimes(1);
});
+ it('calls deprecations handlers with the correct parameters', () => {
+ const config = { foo: 'bar' };
+ const addDeprecation = jest.fn();
+ const createAddDeprecation = jest.fn().mockReturnValue(addDeprecation);
+
+ const handlerA = jest.fn();
+ const handlerB = jest.fn();
+ applyDeprecations(
+ config,
+ [wrapHandler(handlerA, 'pathA'), wrapHandler(handlerB, 'pathB')],
+ createAddDeprecation
+ );
+
+ expect(handlerA).toHaveBeenCalledTimes(1);
+ expect(handlerA).toHaveBeenCalledWith(config, 'pathA', addDeprecation, context);
+
+ expect(handlerB).toHaveBeenCalledTimes(1);
+ expect(handlerB).toHaveBeenCalledWith(config, 'pathB', addDeprecation, context);
+ });
+
it('passes path to addDeprecation factory', () => {
const addDeprecation = jest.fn();
const createAddDeprecation = jest.fn().mockReturnValue(addDeprecation);
@@ -51,7 +77,7 @@ describe('applyDeprecations', () => {
expect(createAddDeprecation).toHaveBeenNthCalledWith(2, 'pathB');
});
- it('calls handlers with correct arguments', () => {
+ it('calls handlers with correct config argument', () => {
const addDeprecation = jest.fn();
const createAddDeprecation = jest.fn().mockReturnValue(addDeprecation);
const initialConfig = { foo: 'bar', deprecated: 'deprecated' };
diff --git a/packages/kbn-config/src/deprecation/apply_deprecations.ts b/packages/kbn-config/src/deprecation/apply_deprecations.ts
index d38ae98835831..11b35840969d0 100644
--- a/packages/kbn-config/src/deprecation/apply_deprecations.ts
+++ b/packages/kbn-config/src/deprecation/apply_deprecations.ts
@@ -15,6 +15,7 @@ import type {
} from './types';
const noopAddDeprecationFactory: () => AddConfigDeprecation = () => () => undefined;
+
/**
* Applies deprecations on given configuration and passes addDeprecation hook.
* This hook is used for logging any deprecation warning using provided logger.
@@ -32,8 +33,8 @@ export const applyDeprecations = (
set: [],
unset: [],
};
- deprecations.forEach(({ deprecation, path }) => {
- const commands = deprecation(result, path, createAddDeprecation(path));
+ deprecations.forEach(({ deprecation, path, context }) => {
+ const commands = deprecation(result, path, createAddDeprecation(path), context);
if (commands) {
if (commands.set) {
changedPaths.set.push(...commands.set.map((c) => c.path));
diff --git a/packages/kbn-config/src/deprecation/deprecation_factory.test.ts b/packages/kbn-config/src/deprecation/deprecation_factory.test.ts
index dfd6b8fac681f..415c8fb9f0610 100644
--- a/packages/kbn-config/src/deprecation/deprecation_factory.test.ts
+++ b/packages/kbn-config/src/deprecation/deprecation_factory.test.ts
@@ -7,11 +7,13 @@
*/
import { DeprecatedConfigDetails } from './types';
+import { configDeprecationsMock } from './deprecations.mock';
import { configDeprecationFactory } from './deprecation_factory';
describe('DeprecationFactory', () => {
const { deprecate, deprecateFromRoot, rename, renameFromRoot, unused, unusedFromRoot } =
configDeprecationFactory;
+ const context = configDeprecationsMock.createContext();
const addDeprecation = jest.fn();
@@ -30,7 +32,12 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const commands = deprecate('deprecated', '8.0.0')(rawConfig, 'myplugin', addDeprecation);
+ const commands = deprecate('deprecated', '8.0.0')(
+ rawConfig,
+ 'myplugin',
+ addDeprecation,
+ context
+ );
expect(commands).toBeUndefined();
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
@@ -64,7 +71,8 @@ describe('DeprecationFactory', () => {
const commands = deprecate('section.deprecated', '8.0.0')(
rawConfig,
'myplugin',
- addDeprecation
+ addDeprecation,
+ context
);
expect(commands).toBeUndefined();
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
@@ -93,7 +101,12 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const commands = deprecate('deprecated', '8.0.0')(rawConfig, 'myplugin', addDeprecation);
+ const commands = deprecate('deprecated', '8.0.0')(
+ rawConfig,
+ 'myplugin',
+ addDeprecation,
+ context
+ );
expect(commands).toBeUndefined();
expect(addDeprecation).toBeCalledTimes(0);
});
@@ -113,7 +126,8 @@ describe('DeprecationFactory', () => {
const commands = deprecateFromRoot('myplugin.deprecated', '8.0.0')(
rawConfig,
'does-not-matter',
- addDeprecation
+ addDeprecation,
+ context
);
expect(commands).toBeUndefined();
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
@@ -145,7 +159,8 @@ describe('DeprecationFactory', () => {
const commands = deprecateFromRoot('myplugin.deprecated', '8.0.0')(
rawConfig,
'does-not-matter',
- addDeprecation
+ addDeprecation,
+ context
);
expect(commands).toBeUndefined();
expect(addDeprecation).toBeCalledTimes(0);
@@ -163,7 +178,12 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const commands = rename('deprecated', 'renamed')(rawConfig, 'myplugin', addDeprecation);
+ const commands = rename('deprecated', 'renamed')(
+ rawConfig,
+ 'myplugin',
+ addDeprecation,
+ context
+ );
expect(commands).toEqual({
set: [
{
@@ -199,7 +219,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const commands = rename('deprecated', 'new')(rawConfig, 'myplugin', addDeprecation);
+ const commands = rename('deprecated', 'new')(rawConfig, 'myplugin', addDeprecation, context);
expect(commands).toBeUndefined();
expect(addDeprecation).toHaveBeenCalledTimes(0);
});
@@ -218,7 +238,8 @@ describe('DeprecationFactory', () => {
const commands = rename('oldsection.deprecated', 'newsection.renamed')(
rawConfig,
'myplugin',
- addDeprecation
+ addDeprecation,
+ context
);
expect(commands).toEqual({
set: [
@@ -252,7 +273,12 @@ describe('DeprecationFactory', () => {
renamed: 'renamed',
},
};
- const commands = rename('deprecated', 'renamed')(rawConfig, 'myplugin', addDeprecation);
+ const commands = rename('deprecated', 'renamed')(
+ rawConfig,
+ 'myplugin',
+ addDeprecation,
+ context
+ );
expect(commands).toEqual({
unset: [{ path: 'myplugin.deprecated' }],
});
@@ -289,7 +315,8 @@ describe('DeprecationFactory', () => {
const commands = renameFromRoot('myplugin.deprecated', 'myplugin.renamed')(
rawConfig,
'does-not-matter',
- addDeprecation
+ addDeprecation,
+ context
);
expect(commands).toEqual({
set: [
@@ -330,7 +357,8 @@ describe('DeprecationFactory', () => {
const commands = renameFromRoot('oldplugin.deprecated', 'newplugin.renamed')(
rawConfig,
'does-not-matter',
- addDeprecation
+ addDeprecation,
+ context
);
expect(commands).toEqual({
set: [
@@ -371,7 +399,8 @@ describe('DeprecationFactory', () => {
const commands = renameFromRoot('myplugin.deprecated', 'myplugin.new')(
rawConfig,
'does-not-matter',
- addDeprecation
+ addDeprecation,
+ context
);
expect(commands).toBeUndefined();
expect(addDeprecation).toBeCalledTimes(0);
@@ -387,7 +416,8 @@ describe('DeprecationFactory', () => {
const commands = renameFromRoot('myplugin.deprecated', 'myplugin.renamed')(
rawConfig,
'does-not-matter',
- addDeprecation
+ addDeprecation,
+ context
);
expect(commands).toEqual({
unset: [{ path: 'myplugin.deprecated' }],
@@ -423,7 +453,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const commands = unused('deprecated')(rawConfig, 'myplugin', addDeprecation);
+ const commands = unused('deprecated')(rawConfig, 'myplugin', addDeprecation, context);
expect(commands).toEqual({
unset: [{ path: 'myplugin.deprecated' }],
});
@@ -456,7 +486,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const commands = unused('section.deprecated')(rawConfig, 'myplugin', addDeprecation);
+ const commands = unused('section.deprecated')(rawConfig, 'myplugin', addDeprecation, context);
expect(commands).toEqual({
unset: [{ path: 'myplugin.section.deprecated' }],
});
@@ -486,7 +516,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const commands = unused('deprecated')(rawConfig, 'myplugin', addDeprecation);
+ const commands = unused('deprecated')(rawConfig, 'myplugin', addDeprecation, context);
expect(commands).toBeUndefined();
expect(addDeprecation).toBeCalledTimes(0);
});
@@ -506,7 +536,8 @@ describe('DeprecationFactory', () => {
const commands = unusedFromRoot('myplugin.deprecated')(
rawConfig,
'does-not-matter',
- addDeprecation
+ addDeprecation,
+ context
);
expect(commands).toEqual({
unset: [{ path: 'myplugin.deprecated' }],
@@ -540,7 +571,8 @@ describe('DeprecationFactory', () => {
const commands = unusedFromRoot('myplugin.deprecated')(
rawConfig,
'does-not-matter',
- addDeprecation
+ addDeprecation,
+ context
);
expect(commands).toBeUndefined();
expect(addDeprecation).toBeCalledTimes(0);
diff --git a/packages/kbn-config/src/deprecation/deprecations.mock.ts b/packages/kbn-config/src/deprecation/deprecations.mock.ts
new file mode 100644
index 0000000000000..80b65c84b4879
--- /dev/null
+++ b/packages/kbn-config/src/deprecation/deprecations.mock.ts
@@ -0,0 +1,20 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import type { ConfigDeprecationContext } from './types';
+
+const createMockedContext = (): ConfigDeprecationContext => {
+ return {
+ branch: 'master',
+ version: '8.0.0',
+ };
+};
+
+export const configDeprecationsMock = {
+ createContext: createMockedContext,
+};
diff --git a/packages/kbn-config/src/deprecation/index.ts b/packages/kbn-config/src/deprecation/index.ts
index ce10bafd9c575..fd06ddb6aaa30 100644
--- a/packages/kbn-config/src/deprecation/index.ts
+++ b/packages/kbn-config/src/deprecation/index.ts
@@ -9,6 +9,7 @@
export type {
ConfigDeprecation,
ConfigDeprecationCommand,
+ ConfigDeprecationContext,
ConfigDeprecationWithContext,
ConfigDeprecationFactory,
AddConfigDeprecation,
diff --git a/packages/kbn-config/src/deprecation/types.ts b/packages/kbn-config/src/deprecation/types.ts
index 47a31b9e6725a..12b561aa2b1b9 100644
--- a/packages/kbn-config/src/deprecation/types.ts
+++ b/packages/kbn-config/src/deprecation/types.ts
@@ -6,6 +6,7 @@
* Side Public License, v 1.
*/
import type { RecursiveReadonly } from '@kbn/utility-types';
+
/**
* Config deprecation hook used when invoking a {@link ConfigDeprecation}
*
@@ -19,9 +20,9 @@ export type AddConfigDeprecation = (details: DeprecatedConfigDetails) => void;
* @public
*/
export interface DeprecatedConfigDetails {
- /* The title to be displayed for the deprecation. */
+ /** The title to be displayed for the deprecation. */
title?: string;
- /* The message to be displayed for the deprecation. */
+ /** The message to be displayed for the deprecation. */
message: string;
/**
* levels:
@@ -29,11 +30,11 @@ export interface DeprecatedConfigDetails {
* - critical: needs to be addressed before upgrade.
*/
level?: 'warning' | 'critical';
- /* (optional) set false to prevent the config service from logging the deprecation message. */
+ /** (optional) set false to prevent the config service from logging the deprecation message. */
silent?: boolean;
- /* (optional) link to the documentation for more details on the deprecation. */
+ /** (optional) link to the documentation for more details on the deprecation. */
documentationUrl?: string;
- /* corrective action needed to fix this deprecation. */
+ /** corrective action needed to fix this deprecation. */
correctiveActions: {
/**
* Specify a list of manual steps our users need to follow
@@ -55,14 +56,27 @@ export interface DeprecatedConfigDetails {
* ```typescript
* const provider: ConfigDeprecation = (config, path) => ({ unset: [{ key: 'path.to.key' }] })
* ```
- * @internal
+ * @public
*/
export type ConfigDeprecation = (
config: RecursiveReadonly>,
fromPath: string,
- addDeprecation: AddConfigDeprecation
+ addDeprecation: AddConfigDeprecation,
+ context: ConfigDeprecationContext
) => void | ConfigDeprecationCommand;
+/**
+ * Deprecation context provided to {@link ConfigDeprecation | config deprecations}
+ *
+ * @public
+ */
+export interface ConfigDeprecationContext {
+ /** The current Kibana version, e.g `7.16.1`, `8.0.0` */
+ version: string;
+ /** The current Kibana branch, e.g `7.x`, `7.16`, `master` */
+ branch: string;
+}
+
/**
* List of config paths changed during deprecation.
*
@@ -137,6 +151,7 @@ export interface ConfigDeprecationFactory {
removeBy: string,
details?: Partial
): ConfigDeprecation;
+
/**
* Deprecate a configuration property from the root configuration.
* Will log a deprecation warning if the deprecatedKey was found.
@@ -157,6 +172,7 @@ export interface ConfigDeprecationFactory {
removeBy: string,
details?: Partial
): ConfigDeprecation;
+
/**
* Rename a configuration property from inside a plugin's configuration path.
* Will log a deprecation warning if the oldKey was found and deprecation applied.
@@ -174,6 +190,7 @@ export interface ConfigDeprecationFactory {
newKey: string,
details?: Partial
): ConfigDeprecation;
+
/**
* Rename a configuration property from the root configuration.
* Will log a deprecation warning if the oldKey was found and deprecation applied.
@@ -194,6 +211,7 @@ export interface ConfigDeprecationFactory {
newKey: string,
details?: Partial
): ConfigDeprecation;
+
/**
* Remove a configuration property from inside a plugin's configuration path.
* Will log a deprecation warning if the unused key was found and deprecation applied.
@@ -207,6 +225,7 @@ export interface ConfigDeprecationFactory {
* ```
*/
unused(unusedKey: string, details?: Partial): ConfigDeprecation;
+
/**
* Remove a configuration property from the root configuration.
* Will log a deprecation warning if the unused key was found and deprecation applied.
@@ -229,4 +248,5 @@ export interface ConfigDeprecationFactory {
export interface ConfigDeprecationWithContext {
deprecation: ConfigDeprecation;
path: string;
+ context: ConfigDeprecationContext;
}
diff --git a/packages/kbn-config/src/env.ts b/packages/kbn-config/src/env.ts
index 053bb93ce158c..73f32606c463f 100644
--- a/packages/kbn-config/src/env.ts
+++ b/packages/kbn-config/src/env.ts
@@ -21,8 +21,6 @@ export interface EnvOptions {
export interface CliArgs {
dev: boolean;
envName?: string;
- /** @deprecated */
- quiet?: boolean;
silent?: boolean;
verbose?: boolean;
watch: boolean;
diff --git a/packages/kbn-config/src/index.ts b/packages/kbn-config/src/index.ts
index 08cf12343f459..0068fc87855b0 100644
--- a/packages/kbn-config/src/index.ts
+++ b/packages/kbn-config/src/index.ts
@@ -13,6 +13,7 @@ export type {
ConfigDeprecationWithContext,
ConfigDeprecation,
ConfigDeprecationCommand,
+ ConfigDeprecationContext,
ChangedDeprecatedPaths,
} from './deprecation';
@@ -30,5 +31,4 @@ export { Config, ConfigPath, isConfigPath, hasConfigPathIntersection } from './c
export { ObjectToConfigAdapter } from './object_to_config_adapter';
export { CliArgs, Env, RawPackageInfo } from './env';
export { EnvironmentMode, PackageInfo } from './types';
-export { LegacyObjectToConfigAdapter, LegacyLoggingConfig } from './legacy';
export { getPluginSearchPaths } from './plugins';
diff --git a/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap b/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap
deleted file mode 100644
index 17ac75e9f3d9e..0000000000000
--- a/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap
+++ /dev/null
@@ -1,95 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`#get correctly handles silent logging config. 1`] = `
-Object {
- "appenders": Object {
- "default": Object {
- "legacyLoggingConfig": Object {
- "silent": true,
- },
- "type": "legacy-appender",
- },
- },
- "loggers": undefined,
- "root": Object {
- "level": "off",
- },
- "silent": true,
-}
-`;
-
-exports[`#get correctly handles verbose file logging config with json format. 1`] = `
-Object {
- "appenders": Object {
- "default": Object {
- "legacyLoggingConfig": Object {
- "dest": "/some/path.log",
- "json": true,
- "verbose": true,
- },
- "type": "legacy-appender",
- },
- },
- "dest": "/some/path.log",
- "json": true,
- "loggers": undefined,
- "root": Object {
- "level": "all",
- },
- "verbose": true,
-}
-`;
-
-exports[`#getFlattenedPaths returns all paths of the underlying object. 1`] = `
-Array [
- "known",
- "knownContainer.sub1",
- "knownContainer.sub2",
- "legacy.known",
-]
-`;
-
-exports[`#set correctly sets values for existing paths. 1`] = `
-Object {
- "known": "value",
- "knownContainer": Object {
- "sub1": "sub-value-1",
- "sub2": "sub-value-2",
- },
-}
-`;
-
-exports[`#set correctly sets values for paths that do not exist. 1`] = `
-Object {
- "unknown": Object {
- "sub1": "sub-value-1",
- "sub2": "sub-value-2",
- },
-}
-`;
-
-exports[`#toRaw returns a deep copy of the underlying raw config object. 1`] = `
-Object {
- "known": "foo",
- "knownContainer": Object {
- "sub1": "bar",
- "sub2": "baz",
- },
- "legacy": Object {
- "known": "baz",
- },
-}
-`;
-
-exports[`#toRaw returns a deep copy of the underlying raw config object. 2`] = `
-Object {
- "known": "bar",
- "knownContainer": Object {
- "sub1": "baz",
- "sub2": "baz",
- },
- "legacy": Object {
- "known": "baz",
- },
-}
-`;
diff --git a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts b/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts
deleted file mode 100644
index 47151503e1634..0000000000000
--- a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { LegacyObjectToConfigAdapter } from './legacy_object_to_config_adapter';
-
-describe('#get', () => {
- test('correctly handles paths that do not exist.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({});
-
- expect(configAdapter.get('one')).not.toBeDefined();
- expect(configAdapter.get(['one', 'two'])).not.toBeDefined();
- expect(configAdapter.get(['one.three'])).not.toBeDefined();
- });
-
- test('correctly handles paths that do not need to be transformed.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({
- one: 'value-one',
- two: {
- sub: 'value-two-sub',
- },
- container: {
- value: 'some',
- },
- });
-
- expect(configAdapter.get('one')).toEqual('value-one');
- expect(configAdapter.get(['two', 'sub'])).toEqual('value-two-sub');
- expect(configAdapter.get('two.sub')).toEqual('value-two-sub');
- expect(configAdapter.get('container')).toEqual({ value: 'some' });
- });
-
- test('correctly handles csp config.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({
- csp: {
- rules: ['strict'],
- },
- });
-
- expect(configAdapter.get('csp')).toMatchInlineSnapshot(`
- Object {
- "rules": Array [
- "strict",
- ],
- }
- `);
- });
-
- test('correctly handles silent logging config.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({
- logging: { silent: true },
- });
-
- expect(configAdapter.get('logging')).toMatchSnapshot();
- });
-
- test('correctly handles verbose file logging config with json format.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({
- logging: { verbose: true, json: true, dest: '/some/path.log' },
- });
-
- expect(configAdapter.get('logging')).toMatchSnapshot();
- });
-});
-
-describe('#set', () => {
- test('correctly sets values for paths that do not exist.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({});
-
- configAdapter.set('unknown', 'value');
- configAdapter.set(['unknown', 'sub1'], 'sub-value-1');
- configAdapter.set('unknown.sub2', 'sub-value-2');
-
- expect(configAdapter.toRaw()).toMatchSnapshot();
- });
-
- test('correctly sets values for existing paths.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({
- known: '',
- knownContainer: {
- sub1: 'sub-1',
- sub2: 'sub-2',
- },
- });
-
- configAdapter.set('known', 'value');
- configAdapter.set(['knownContainer', 'sub1'], 'sub-value-1');
- configAdapter.set('knownContainer.sub2', 'sub-value-2');
-
- expect(configAdapter.toRaw()).toMatchSnapshot();
- });
-});
-
-describe('#has', () => {
- test('returns false if config is not set', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({});
-
- expect(configAdapter.has('unknown')).toBe(false);
- expect(configAdapter.has(['unknown', 'sub1'])).toBe(false);
- expect(configAdapter.has('unknown.sub2')).toBe(false);
- });
-
- test('returns true if config is set.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({
- known: 'foo',
- knownContainer: {
- sub1: 'bar',
- sub2: 'baz',
- },
- });
-
- expect(configAdapter.has('known')).toBe(true);
- expect(configAdapter.has(['knownContainer', 'sub1'])).toBe(true);
- expect(configAdapter.has('knownContainer.sub2')).toBe(true);
- });
-});
-
-describe('#toRaw', () => {
- test('returns a deep copy of the underlying raw config object.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({
- known: 'foo',
- knownContainer: {
- sub1: 'bar',
- sub2: 'baz',
- },
- legacy: { known: 'baz' },
- });
-
- const firstRawCopy = configAdapter.toRaw();
-
- configAdapter.set('known', 'bar');
- configAdapter.set(['knownContainer', 'sub1'], 'baz');
-
- const secondRawCopy = configAdapter.toRaw();
-
- expect(firstRawCopy).not.toBe(secondRawCopy);
- expect(firstRawCopy.knownContainer).not.toBe(secondRawCopy.knownContainer);
-
- expect(firstRawCopy).toMatchSnapshot();
- expect(secondRawCopy).toMatchSnapshot();
- });
-});
-
-describe('#getFlattenedPaths', () => {
- test('returns all paths of the underlying object.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({
- known: 'foo',
- knownContainer: {
- sub1: 'bar',
- sub2: 'baz',
- },
- legacy: { known: 'baz' },
- });
-
- expect(configAdapter.getFlattenedPaths()).toMatchSnapshot();
- });
-});
diff --git a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts b/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts
deleted file mode 100644
index bc6fd49e2498a..0000000000000
--- a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { ConfigPath } from '../config';
-import { ObjectToConfigAdapter } from '../object_to_config_adapter';
-
-/**
- * Represents logging config supported by the legacy platform.
- */
-export interface LegacyLoggingConfig {
- silent?: boolean;
- verbose?: boolean;
- quiet?: boolean;
- dest?: string;
- json?: boolean;
- events?: Record;
-}
-
-type MixedLoggingConfig = LegacyLoggingConfig & Record;
-
-/**
- * Represents adapter between config provided by legacy platform and `Config`
- * supported by the current platform.
- * @internal
- */
-export class LegacyObjectToConfigAdapter extends ObjectToConfigAdapter {
- private static transformLogging(configValue: MixedLoggingConfig = {}) {
- const { appenders, root, loggers, ...legacyLoggingConfig } = configValue;
-
- const loggingConfig = {
- appenders: {
- ...appenders,
- default: { type: 'legacy-appender', legacyLoggingConfig },
- },
- root: { level: 'info', ...root },
- loggers,
- ...legacyLoggingConfig,
- };
-
- if (configValue.silent) {
- loggingConfig.root.level = 'off';
- } else if (configValue.quiet) {
- loggingConfig.root.level = 'error';
- } else if (configValue.verbose) {
- loggingConfig.root.level = 'all';
- }
-
- return loggingConfig;
- }
-
- public get(configPath: ConfigPath) {
- const configValue = super.get(configPath);
- switch (configPath) {
- case 'logging':
- return LegacyObjectToConfigAdapter.transformLogging(configValue as LegacyLoggingConfig);
- default:
- return configValue;
- }
- }
-}
diff --git a/packages/kbn-config/src/mocks.ts b/packages/kbn-config/src/mocks.ts
index 0306b0cc0663e..40df96eb41f08 100644
--- a/packages/kbn-config/src/mocks.ts
+++ b/packages/kbn-config/src/mocks.ts
@@ -14,4 +14,5 @@
export { configMock } from './config.mock';
export { configServiceMock } from './config_service.mock';
export { rawConfigServiceMock } from './raw/raw_config_service.mock';
+export { configDeprecationsMock } from './deprecation/deprecations.mock';
export { getEnvOptions } from './__mocks__/env';
diff --git a/packages/kbn-dev-utils/src/ci_stats_reporter/ci_stats_reporter.ts b/packages/kbn-dev-utils/src/ci_stats_reporter/ci_stats_reporter.ts
index 4d6ea646b2ab1..45d31c1eefad9 100644
--- a/packages/kbn-dev-utils/src/ci_stats_reporter/ci_stats_reporter.ts
+++ b/packages/kbn-dev-utils/src/ci_stats_reporter/ci_stats_reporter.ts
@@ -82,6 +82,7 @@ export class CiStatsReporter {
const upstreamBranch = options.upstreamBranch ?? this.getUpstreamBranch();
const kibanaUuid = options.kibanaUuid === undefined ? this.getKibanaUuid() : options.kibanaUuid;
let email;
+ let branch;
try {
const { stdout } = await execa('git', ['config', 'user.email']);
@@ -90,19 +91,33 @@ export class CiStatsReporter {
this.log.debug(e.message);
}
+ try {
+ const { stdout } = await execa('git', ['branch', '--show-current']);
+ branch = stdout;
+ } catch (e) {
+ this.log.debug(e.message);
+ }
+
+ const memUsage = process.memoryUsage();
const isElasticCommitter = email && email.endsWith('@elastic.co') ? true : false;
const defaultMetadata = {
+ kibanaUuid,
+ isElasticCommitter,
committerHash: email
? crypto.createHash('sha256').update(email).digest('hex').substring(0, 20)
: undefined,
+ email: isElasticCommitter ? email : undefined,
+ branch: isElasticCommitter ? branch : undefined,
cpuCount: Os.cpus()?.length,
cpuModel: Os.cpus()[0]?.model,
cpuSpeed: Os.cpus()[0]?.speed,
- email: isElasticCommitter ? email : undefined,
freeMem: Os.freemem(),
- isElasticCommitter,
- kibanaUuid,
+ memoryUsageRss: memUsage.rss,
+ memoryUsageHeapTotal: memUsage.heapTotal,
+ memoryUsageHeapUsed: memUsage.heapUsed,
+ memoryUsageExternal: memUsage.external,
+ memoryUsageArrayBuffers: memUsage.arrayBuffers,
nestedTiming: process.env.CI_STATS_NESTED_TIMING ? true : false,
osArch: Os.arch(),
osPlatform: Os.platform(),
diff --git a/packages/kbn-dev-utils/src/ci_stats_reporter/index.ts b/packages/kbn-dev-utils/src/ci_stats_reporter/index.ts
index d99217c38b410..9cb05608526eb 100644
--- a/packages/kbn-dev-utils/src/ci_stats_reporter/index.ts
+++ b/packages/kbn-dev-utils/src/ci_stats_reporter/index.ts
@@ -8,3 +8,4 @@
export * from './ci_stats_reporter';
export * from './ship_ci_stats_cli';
+export { getTimeReporter } from './report_time';
diff --git a/packages/kbn-dev-utils/src/ci_stats_reporter/report_time.ts b/packages/kbn-dev-utils/src/ci_stats_reporter/report_time.ts
new file mode 100644
index 0000000000000..d10250a03f091
--- /dev/null
+++ b/packages/kbn-dev-utils/src/ci_stats_reporter/report_time.ts
@@ -0,0 +1,25 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { CiStatsReporter, ToolingLog } from '..';
+
+export const getTimeReporter = (log: ToolingLog, group: string) => {
+ const reporter = CiStatsReporter.fromEnv(log);
+ return async (startTime: number, id: string, meta: Record) => {
+ await reporter.timings({
+ timings: [
+ {
+ group,
+ id,
+ ms: Date.now() - startTime,
+ meta,
+ },
+ ],
+ });
+ };
+};
diff --git a/packages/kbn-es/src/cli_commands/snapshot.js b/packages/kbn-es/src/cli_commands/snapshot.js
index 7f5653db72b49..e64dcb7c77318 100644
--- a/packages/kbn-es/src/cli_commands/snapshot.js
+++ b/packages/kbn-es/src/cli_commands/snapshot.js
@@ -8,6 +8,7 @@
const dedent = require('dedent');
const getopts = require('getopts');
+import { ToolingLog, getTimeReporter } from '@kbn/dev-utils';
const { Cluster } = require('../cluster');
exports.description = 'Downloads and run from a nightly snapshot';
@@ -36,6 +37,13 @@ exports.help = (defaults = {}) => {
};
exports.run = async (defaults = {}) => {
+ const runStartTime = Date.now();
+ const log = new ToolingLog({
+ level: 'info',
+ writeTo: process.stdout,
+ });
+ const reportTime = getTimeReporter(log, 'scripts/es snapshot');
+
const argv = process.argv.slice(2);
const options = getopts(argv, {
alias: {
@@ -56,12 +64,22 @@ exports.run = async (defaults = {}) => {
if (options['download-only']) {
await cluster.downloadSnapshot(options);
} else {
+ const installStartTime = Date.now();
const { installPath } = await cluster.installSnapshot(options);
if (options.dataArchive) {
await cluster.extractDataDirectory(installPath, options.dataArchive);
}
- await cluster.run(installPath, options);
+ reportTime(installStartTime, 'installed', {
+ success: true,
+ ...options,
+ });
+
+ await cluster.run(installPath, {
+ reportTime,
+ startTime: runStartTime,
+ ...options,
+ });
}
};
diff --git a/packages/kbn-es/src/cluster.js b/packages/kbn-es/src/cluster.js
index ac4380da88be0..0866b14f4ade8 100644
--- a/packages/kbn-es/src/cluster.js
+++ b/packages/kbn-es/src/cluster.js
@@ -240,7 +240,7 @@ exports.Cluster = class Cluster {
* @return {undefined}
*/
_exec(installPath, opts = {}) {
- const { skipNativeRealmSetup = false, ...options } = opts;
+ const { skipNativeRealmSetup = false, reportTime = () => {}, startTime, ...options } = opts;
if (this._process || this._outcome) {
throw new Error('ES has already been started');
@@ -321,10 +321,17 @@ exports.Cluster = class Cluster {
await nativeRealm.setPasswords(options);
});
+ let reportSent = false;
// parse and forward es stdout to the log
this._process.stdout.on('data', (data) => {
const lines = parseEsLog(data.toString());
lines.forEach((line) => {
+ if (!reportSent && line.message.includes('publish_address')) {
+ reportSent = true;
+ reportTime(startTime, 'ready', {
+ success: true,
+ });
+ }
this._log.info(line.formattedMessage);
});
});
@@ -341,7 +348,16 @@ exports.Cluster = class Cluster {
// JVM exits with 143 on SIGTERM and 130 on SIGINT, dont' treat them as errors
if (code > 0 && !(code === 143 || code === 130)) {
+ reportTime(startTime, 'abort', {
+ success: true,
+ error: code,
+ });
throw createCliError(`ES exited with code ${code}`);
+ } else {
+ reportTime(startTime, 'error', {
+ success: false,
+ error: `exited with ${code}`,
+ });
}
});
}
diff --git a/packages/kbn-legacy-logging/BUILD.bazel b/packages/kbn-legacy-logging/BUILD.bazel
deleted file mode 100644
index c4927fe076e15..0000000000000
--- a/packages/kbn-legacy-logging/BUILD.bazel
+++ /dev/null
@@ -1,107 +0,0 @@
-load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
-load("//src/dev/bazel:index.bzl", "jsts_transpiler")
-
-PKG_BASE_NAME = "kbn-legacy-logging"
-PKG_REQUIRE_NAME = "@kbn/legacy-logging"
-
-SOURCE_FILES = glob(
- [
- "src/**/*.ts",
- ],
- exclude = ["**/*.test.*"],
-)
-
-SRCS = SOURCE_FILES
-
-filegroup(
- name = "srcs",
- srcs = SRCS,
-)
-
-NPM_MODULE_EXTRA_FILES = [
- "package.json",
- "README.md"
-]
-
-RUNTIME_DEPS = [
- "//packages/kbn-config-schema",
- "//packages/kbn-utils",
- "@npm//@elastic/numeral",
- "@npm//@hapi/hapi",
- "@npm//@hapi/podium",
- "@npm//chokidar",
- "@npm//lodash",
- "@npm//moment-timezone",
- "@npm//query-string",
- "@npm//rxjs",
- "@npm//tslib",
-]
-
-TYPES_DEPS = [
- "//packages/kbn-config-schema",
- "//packages/kbn-utils",
- "@npm//@elastic/numeral",
- "@npm//@hapi/podium",
- "@npm//chokidar",
- "@npm//query-string",
- "@npm//rxjs",
- "@npm//tslib",
- "@npm//@types/hapi__hapi",
- "@npm//@types/jest",
- "@npm//@types/lodash",
- "@npm//@types/moment-timezone",
- "@npm//@types/node",
-]
-
-jsts_transpiler(
- name = "target_node",
- srcs = SRCS,
- build_pkg_name = package_name(),
-)
-
-ts_config(
- name = "tsconfig",
- src = "tsconfig.json",
- deps = [
- "//:tsconfig.base.json",
- "//:tsconfig.bazel.json",
- ],
-)
-
-ts_project(
- name = "tsc_types",
- args = ['--pretty'],
- srcs = SRCS,
- deps = TYPES_DEPS,
- declaration = True,
- declaration_map = True,
- emit_declaration_only = True,
- out_dir = "target_types",
- source_map = True,
- root_dir = "src",
- tsconfig = ":tsconfig",
-)
-
-js_library(
- name = PKG_BASE_NAME,
- srcs = NPM_MODULE_EXTRA_FILES,
- deps = RUNTIME_DEPS + [":target_node", ":tsc_types"],
- package_name = PKG_REQUIRE_NAME,
- visibility = ["//visibility:public"],
-)
-
-pkg_npm(
- name = "npm_module",
- deps = [
- ":%s" % PKG_BASE_NAME,
- ]
-)
-
-filegroup(
- name = "build",
- srcs = [
- ":npm_module",
- ],
- visibility = ["//visibility:public"],
-)
diff --git a/packages/kbn-legacy-logging/README.md b/packages/kbn-legacy-logging/README.md
deleted file mode 100644
index 4c5989fc892dc..0000000000000
--- a/packages/kbn-legacy-logging/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# @kbn/legacy-logging
-
-This package contains the implementation of the legacy logging
-system, based on `@hapi/good`
\ No newline at end of file
diff --git a/packages/kbn-legacy-logging/package.json b/packages/kbn-legacy-logging/package.json
deleted file mode 100644
index 6e846ffc5bfaf..0000000000000
--- a/packages/kbn-legacy-logging/package.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "name": "@kbn/legacy-logging",
- "version": "1.0.0",
- "private": true,
- "license": "SSPL-1.0 OR Elastic License 2.0",
- "main": "./target_node/index.js",
- "types": "./target_types/index.d.ts"
-}
diff --git a/packages/kbn-legacy-logging/src/get_logging_config.ts b/packages/kbn-legacy-logging/src/get_logging_config.ts
deleted file mode 100644
index f74bc5904e24b..0000000000000
--- a/packages/kbn-legacy-logging/src/get_logging_config.ts
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import _ from 'lodash';
-import { getLogReporter } from './log_reporter';
-import { LegacyLoggingConfig } from './schema';
-
-/**
- * Returns the `@hapi/good` plugin configuration to be used for the legacy logging
- * @param config
- */
-export function getLoggingConfiguration(config: LegacyLoggingConfig, opsInterval: number) {
- const events = config.events;
-
- if (config.silent) {
- _.defaults(events, {});
- } else if (config.quiet) {
- _.defaults(events, {
- log: ['listening', 'error', 'fatal'],
- request: ['error'],
- error: '*',
- });
- } else if (config.verbose) {
- _.defaults(events, {
- error: '*',
- log: '*',
- // To avoid duplicate logs, we explicitly disable these in verbose
- // mode as they are already provided by the new logging config under
- // the `http.server.response` and `metrics.ops` contexts.
- ops: '!',
- request: '!',
- response: '!',
- });
- } else {
- _.defaults(events, {
- log: ['info', 'warning', 'error', 'fatal'],
- request: ['info', 'warning', 'error', 'fatal'],
- error: '*',
- });
- }
-
- const loggerStream = getLogReporter({
- config: {
- json: config.json,
- dest: config.dest,
- timezone: config.timezone,
-
- // I'm adding the default here because if you add another filter
- // using the commandline it will remove authorization. I want users
- // to have to explicitly set --logging.filter.authorization=none or
- // --logging.filter.cookie=none to have it show up in the logs.
- filter: _.defaults(config.filter, {
- authorization: 'remove',
- cookie: 'remove',
- }),
- },
- events: _.transform(
- events,
- function (filtered: Record, val: string, key: string) {
- // provide a string compatible way to remove events
- if (val !== '!') filtered[key] = val;
- },
- {}
- ),
- });
-
- const options = {
- ops: {
- interval: opsInterval,
- },
- includes: {
- request: ['headers', 'payload'],
- response: ['headers', 'payload'],
- },
- reporters: {
- logReporter: [loggerStream],
- },
- };
- return options;
-}
diff --git a/packages/kbn-legacy-logging/src/index.ts b/packages/kbn-legacy-logging/src/index.ts
deleted file mode 100644
index 670df4e95f337..0000000000000
--- a/packages/kbn-legacy-logging/src/index.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-export { LegacyLoggingConfig, legacyLoggingConfigSchema } from './schema';
-export { attachMetaData } from './metadata';
-export { setupLoggingRotate } from './rotate';
-export { setupLogging, reconfigureLogging } from './setup_logging';
-export { getLoggingConfiguration } from './get_logging_config';
-export { LegacyLoggingServer } from './legacy_logging_server';
diff --git a/packages/kbn-legacy-logging/src/legacy_logging_server.test.ts b/packages/kbn-legacy-logging/src/legacy_logging_server.test.ts
deleted file mode 100644
index 40019fc90ff42..0000000000000
--- a/packages/kbn-legacy-logging/src/legacy_logging_server.test.ts
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-jest.mock('./setup_logging');
-
-import { LegacyLoggingServer, LogRecord } from './legacy_logging_server';
-
-test('correctly forwards log records.', () => {
- const loggingServer = new LegacyLoggingServer({ events: {} });
- const onLogMock = jest.fn();
- loggingServer.events.on('log', onLogMock);
-
- const timestamp = 1554433221100;
- const firstLogRecord: LogRecord = {
- timestamp: new Date(timestamp),
- pid: 5355,
- level: {
- id: 'info',
- value: 5,
- },
- context: 'some-context',
- message: 'some-message',
- };
-
- const secondLogRecord: LogRecord = {
- timestamp: new Date(timestamp),
- pid: 5355,
- level: {
- id: 'error',
- value: 3,
- },
- context: 'some-context.sub-context',
- message: 'some-message',
- meta: { unknown: 2 },
- error: new Error('some-error'),
- };
-
- const thirdLogRecord: LogRecord = {
- timestamp: new Date(timestamp),
- pid: 5355,
- level: {
- id: 'trace',
- value: 7,
- },
- context: 'some-context.sub-context',
- message: 'some-message',
- meta: { tags: ['important', 'tags'], unknown: 2 },
- };
-
- loggingServer.log(firstLogRecord);
- loggingServer.log(secondLogRecord);
- loggingServer.log(thirdLogRecord);
-
- expect(onLogMock).toHaveBeenCalledTimes(3);
-
- const [[firstCall], [secondCall], [thirdCall]] = onLogMock.mock.calls;
- expect(firstCall).toMatchInlineSnapshot(`
-Object {
- "data": "some-message",
- "tags": Array [
- "info",
- "some-context",
- ],
- "timestamp": 1554433221100,
-}
-`);
-
- expect(secondCall).toMatchInlineSnapshot(`
-Object {
- "data": [Error: some-error],
- "tags": Array [
- "error",
- "some-context",
- "sub-context",
- ],
- "timestamp": 1554433221100,
-}
-`);
-
- expect(thirdCall).toMatchInlineSnapshot(`
-Object {
- "data": Object {
- Symbol(log message with metadata): Object {
- "message": "some-message",
- "metadata": Object {
- "unknown": 2,
- },
- },
- },
- "tags": Array [
- "debug",
- "some-context",
- "sub-context",
- "important",
- "tags",
- ],
- "timestamp": 1554433221100,
-}
-`);
-});
diff --git a/packages/kbn-legacy-logging/src/legacy_logging_server.ts b/packages/kbn-legacy-logging/src/legacy_logging_server.ts
deleted file mode 100644
index f6c42dd1b161f..0000000000000
--- a/packages/kbn-legacy-logging/src/legacy_logging_server.ts
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { ServerExtType, Server } from '@hapi/hapi';
-import Podium from '@hapi/podium';
-import { setupLogging } from './setup_logging';
-import { attachMetaData } from './metadata';
-import { legacyLoggingConfigSchema } from './schema';
-
-// these LogXXX types are duplicated to avoid a cross dependency with the @kbn/logging package.
-// typescript will error if they diverge at some point.
-type LogLevelId = 'all' | 'fatal' | 'error' | 'warn' | 'info' | 'debug' | 'trace' | 'off';
-
-interface LogLevel {
- id: LogLevelId;
- value: number;
-}
-
-export interface LogRecord {
- timestamp: Date;
- level: LogLevel;
- context: string;
- message: string;
- error?: Error;
- meta?: { [name: string]: any };
- pid: number;
-}
-
-const isEmptyObject = (obj: object) => Object.keys(obj).length === 0;
-
-function getDataToLog(error: Error | undefined, metadata: object, message: string) {
- if (error) {
- return error;
- }
- if (!isEmptyObject(metadata)) {
- return attachMetaData(message, metadata);
- }
- return message;
-}
-
-interface PluginRegisterParams {
- plugin: {
- register: (
- server: LegacyLoggingServer,
- options: PluginRegisterParams['options']
- ) => Promise;
- };
- options: Record;
-}
-
-/**
- * Converts core log level to a one that's known to the legacy platform.
- * @param level Log level from the core.
- */
-function getLegacyLogLevel(level: LogLevel) {
- const logLevel = level.id.toLowerCase();
- if (logLevel === 'warn') {
- return 'warning';
- }
-
- if (logLevel === 'trace') {
- return 'debug';
- }
-
- return logLevel;
-}
-
-/**
- * The "legacy" Kibana uses Hapi server + even-better plugin to log, so we should
- * use the same approach here to make log records generated by the core to look the
- * same as the rest of the records generated by the "legacy" Kibana. But to reduce
- * overhead of having full blown Hapi server instance we create our own "light" version.
- * @internal
- */
-export class LegacyLoggingServer {
- public connections = [];
- // Emulates Hapi's usage of the podium event bus.
- public events: Podium = new Podium(['log', 'request', 'response']);
-
- private onPostStopCallback?: () => void;
-
- constructor(legacyLoggingConfig: any) {
- // We set `ops.interval` to max allowed number and `ops` filter to value
- // that doesn't exist to avoid logging of ops at all, if turned on it will be
- // logged by the "legacy" Kibana.
- const loggingConfig = legacyLoggingConfigSchema.validate({
- ...legacyLoggingConfig,
- events: {
- ...legacyLoggingConfig.events,
- ops: '__no-ops__',
- },
- });
-
- setupLogging(this as unknown as Server, loggingConfig, 2147483647);
- }
-
- public register({ plugin: { register }, options }: PluginRegisterParams): Promise {
- return register(this, options);
- }
-
- public log({ level, context, message, error, timestamp, meta = {} }: LogRecord) {
- const { tags = [], ...metadata } = meta;
-
- this.events
- .emit('log', {
- data: getDataToLog(error, metadata, message),
- tags: [getLegacyLogLevel(level), ...context.split('.'), ...tags],
- timestamp: timestamp.getTime(),
- })
- .catch((err) => {
- // eslint-disable-next-line no-console
- console.error('An unexpected error occurred while writing to the log:', err.stack);
- process.exit(1);
- });
- }
-
- public stop() {
- // Tell the plugin we're stopping.
- if (this.onPostStopCallback !== undefined) {
- this.onPostStopCallback();
- }
- }
-
- public ext(eventName: ServerExtType, callback: () => void) {
- // method is called by plugin that's being registered.
- if (eventName === 'onPostStop') {
- this.onPostStopCallback = callback;
- }
- // We don't care about any others the plugin registers
- }
-
- public expose() {
- // method is called by plugin that's being registered.
- }
-}
diff --git a/packages/kbn-legacy-logging/src/log_events.ts b/packages/kbn-legacy-logging/src/log_events.ts
deleted file mode 100644
index 193bfbea42ace..0000000000000
--- a/packages/kbn-legacy-logging/src/log_events.ts
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import type { ResponseObject } from '@hapi/hapi';
-import { EventData, isEventData } from './metadata';
-
-export interface BaseEvent {
- event: string;
- timestamp: number;
- pid: number;
- tags?: string[];
-}
-
-export interface ResponseEvent extends BaseEvent {
- event: 'response';
- method: 'GET' | 'POST' | 'PUT' | 'DELETE';
- statusCode: number;
- path: string;
- headers: Record;
- responseHeaders: Record;
- responsePayload: ResponseObject['source'];
- responseTime: string;
- query: Record;
-}
-
-export interface OpsEvent extends BaseEvent {
- event: 'ops';
- os: {
- load: string[];
- };
- proc: Record;
- load: string;
-}
-
-export interface ErrorEvent extends BaseEvent {
- event: 'error';
- error: Error;
- url: string;
-}
-
-export interface UndeclaredErrorEvent extends BaseEvent {
- error: Error;
-}
-
-export interface LogEvent extends BaseEvent {
- data: EventData;
-}
-
-export interface UnkownEvent extends BaseEvent {
- data: string | Record;
-}
-
-export type AnyEvent =
- | ResponseEvent
- | OpsEvent
- | ErrorEvent
- | UndeclaredErrorEvent
- | LogEvent
- | UnkownEvent;
-
-export const isResponseEvent = (e: AnyEvent): e is ResponseEvent => e.event === 'response';
-export const isOpsEvent = (e: AnyEvent): e is OpsEvent => e.event === 'ops';
-export const isErrorEvent = (e: AnyEvent): e is ErrorEvent => e.event === 'error';
-export const isLogEvent = (e: AnyEvent): e is LogEvent => isEventData((e as LogEvent).data);
-export const isUndeclaredErrorEvent = (e: AnyEvent): e is UndeclaredErrorEvent =>
- (e as any).error instanceof Error;
diff --git a/packages/kbn-legacy-logging/src/log_format.ts b/packages/kbn-legacy-logging/src/log_format.ts
deleted file mode 100644
index a0eaf023dff19..0000000000000
--- a/packages/kbn-legacy-logging/src/log_format.ts
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import Stream from 'stream';
-import moment from 'moment-timezone';
-import _ from 'lodash';
-import queryString from 'query-string';
-import numeral from '@elastic/numeral';
-import chalk from 'chalk';
-import { inspect } from 'util';
-
-import { applyFiltersToKeys, getResponsePayloadBytes } from './utils';
-import { getLogEventData } from './metadata';
-import { LegacyLoggingConfig } from './schema';
-import {
- AnyEvent,
- ResponseEvent,
- isResponseEvent,
- isOpsEvent,
- isErrorEvent,
- isLogEvent,
- isUndeclaredErrorEvent,
-} from './log_events';
-
-export type LogFormatConfig = Pick;
-
-function serializeError(err: any = {}) {
- return {
- message: err.message,
- name: err.name,
- stack: err.stack,
- code: err.code,
- signal: err.signal,
- };
-}
-
-const levelColor = function (code: number) {
- if (code < 299) return chalk.green(String(code));
- if (code < 399) return chalk.yellow(String(code));
- if (code < 499) return chalk.magentaBright(String(code));
- return chalk.red(String(code));
-};
-
-export abstract class BaseLogFormat extends Stream.Transform {
- constructor(private readonly config: LogFormatConfig) {
- super({
- readableObjectMode: false,
- writableObjectMode: true,
- });
- }
-
- abstract format(data: Record): string;
-
- filter(data: Record) {
- if (!this.config.filter) {
- return data;
- }
- return applyFiltersToKeys(data, this.config.filter);
- }
-
- _transform(event: AnyEvent, enc: string, next: Stream.TransformCallback) {
- const data = this.filter(this.readEvent(event));
- this.push(this.format(data) + '\n');
- next();
- }
-
- getContentLength({ responsePayload, responseHeaders }: ResponseEvent): number | undefined {
- try {
- return getResponsePayloadBytes(responsePayload, responseHeaders);
- } catch (e) {
- // We intentionally swallow any errors as this information is
- // only a nicety for logging purposes, and should not cause the
- // server to crash if it cannot be determined.
- this.push(
- this.format({
- type: 'log',
- tags: ['warning', 'logging'],
- message: `Failed to calculate response payload bytes. [${e}]`,
- }) + '\n'
- );
- }
- }
-
- extractAndFormatTimestamp(data: Record, format?: string) {
- const { timezone } = this.config;
- const date = moment(data['@timestamp']);
- if (timezone) {
- date.tz(timezone);
- }
- return date.format(format);
- }
-
- readEvent(event: AnyEvent) {
- const data: Record = {
- type: event.event,
- '@timestamp': event.timestamp,
- tags: [...(event.tags || [])],
- pid: event.pid,
- };
-
- if (isResponseEvent(event)) {
- _.defaults(data, _.pick(event, ['method', 'statusCode']));
-
- const source = _.get(event, 'source', {});
- data.req = {
- url: event.path,
- method: event.method || '',
- headers: event.headers,
- remoteAddress: source.remoteAddress,
- userAgent: source.userAgent,
- referer: source.referer,
- };
-
- data.res = {
- statusCode: event.statusCode,
- responseTime: event.responseTime,
- contentLength: this.getContentLength(event),
- };
-
- const query = queryString.stringify(event.query, { sort: false });
- if (query) {
- data.req.url += '?' + query;
- }
-
- data.message = data.req.method.toUpperCase() + ' ';
- data.message += data.req.url;
- data.message += ' ';
- data.message += levelColor(data.res.statusCode);
- data.message += ' ';
- data.message += chalk.gray(data.res.responseTime + 'ms');
- if (data.res.contentLength) {
- data.message += chalk.gray(' - ' + numeral(data.res.contentLength).format('0.0b'));
- }
- } else if (isOpsEvent(event)) {
- _.defaults(data, _.pick(event, ['pid', 'os', 'proc', 'load']));
- data.message = chalk.gray('memory: ');
- data.message += numeral(_.get(data, 'proc.mem.heapUsed')).format('0.0b');
- data.message += ' ';
- data.message += chalk.gray('uptime: ');
- data.message += numeral(_.get(data, 'proc.uptime')).format('00:00:00');
- data.message += ' ';
- data.message += chalk.gray('load: [');
- data.message += _.get(data, 'os.load', [])
- .map((val: number) => {
- return numeral(val).format('0.00');
- })
- .join(' ');
- data.message += chalk.gray(']');
- data.message += ' ';
- data.message += chalk.gray('delay: ');
- data.message += numeral(_.get(data, 'proc.delay')).format('0.000');
- } else if (isErrorEvent(event)) {
- data.level = 'error';
- data.error = serializeError(event.error);
- data.url = event.url;
- const message = _.get(event, 'error.message');
- data.message = message || 'Unknown error (no message)';
- } else if (isUndeclaredErrorEvent(event)) {
- data.type = 'error';
- data.level = _.includes(event.tags, 'fatal') ? 'fatal' : 'error';
- data.error = serializeError(event.error);
- const message = _.get(event, 'error.message');
- data.message = message || 'Unknown error object (no message)';
- } else if (isLogEvent(event)) {
- _.assign(data, getLogEventData(event.data));
- } else {
- data.message = _.isString(event.data) ? event.data : inspect(event.data);
- }
- return data;
- }
-}
diff --git a/packages/kbn-legacy-logging/src/log_format_json.test.ts b/packages/kbn-legacy-logging/src/log_format_json.test.ts
deleted file mode 100644
index 3255c5d17bb30..0000000000000
--- a/packages/kbn-legacy-logging/src/log_format_json.test.ts
+++ /dev/null
@@ -1,281 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import moment from 'moment';
-
-import { attachMetaData } from './metadata';
-import { createListStream, createPromiseFromStreams } from '@kbn/utils';
-import { KbnLoggerJsonFormat } from './log_format_json';
-
-const time = +moment('2010-01-01T05:15:59Z', moment.ISO_8601);
-
-const makeEvent = (eventType: string) => ({
- event: eventType,
- timestamp: time,
-});
-
-describe('KbnLoggerJsonFormat', () => {
- const config: any = {};
-
- describe('event types and messages', () => {
- let format: KbnLoggerJsonFormat;
- beforeEach(() => {
- format = new KbnLoggerJsonFormat(config);
- });
-
- it('log', async () => {
- const result = await createPromiseFromStreams([
- createListStream([makeEvent('log')]),
- format,
- ]);
- const { type, message } = JSON.parse(result);
-
- expect(type).toBe('log');
- expect(message).toBe('undefined');
- });
-
- describe('response', () => {
- it('handles a response object', async () => {
- const event = {
- ...makeEvent('response'),
- statusCode: 200,
- contentLength: 800,
- responseTime: 12000,
- method: 'GET',
- path: '/path/to/resource',
- responsePayload: '1234567879890',
- source: {
- remoteAddress: '127.0.0.1',
- userAgent: 'Test Thing',
- referer: 'elastic.co',
- },
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { type, method, statusCode, message, req } = JSON.parse(result);
-
- expect(type).toBe('response');
- expect(method).toBe('GET');
- expect(statusCode).toBe(200);
- expect(message).toBe('GET /path/to/resource 200 12000ms - 13.0B');
- expect(req.remoteAddress).toBe('127.0.0.1');
- expect(req.userAgent).toBe('Test Thing');
- });
-
- it('leaves payload size empty if not available', async () => {
- const event = {
- ...makeEvent('response'),
- statusCode: 200,
- responseTime: 12000,
- method: 'GET',
- path: '/path/to/resource',
- responsePayload: null,
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- expect(JSON.parse(result).message).toBe('GET /path/to/resource 200 12000ms');
- });
- });
-
- it('ops', async () => {
- const event = {
- ...makeEvent('ops'),
- os: {
- load: [1, 1, 2],
- },
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { type, message } = JSON.parse(result);
-
- expect(type).toBe('ops');
- expect(message).toBe('memory: 0.0B uptime: 0:00:00 load: [1.00 1.00 2.00] delay: 0.000');
- });
-
- describe('with metadata', () => {
- it('logs an event with meta data', async () => {
- const event = {
- data: attachMetaData('message for event', {
- prop1: 'value1',
- prop2: 'value2',
- }),
- tags: ['tag1', 'tag2'],
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { level, message, prop1, prop2, tags } = JSON.parse(result);
-
- expect(level).toBe(undefined);
- expect(message).toBe('message for event');
- expect(prop1).toBe('value1');
- expect(prop2).toBe('value2');
- expect(tags).toEqual(['tag1', 'tag2']);
- });
-
- it('meta data rewrites event fields', async () => {
- const event = {
- data: attachMetaData('message for event', {
- tags: ['meta-data-tag'],
- prop1: 'value1',
- prop2: 'value2',
- }),
- tags: ['tag1', 'tag2'],
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { level, message, prop1, prop2, tags } = JSON.parse(result);
-
- expect(level).toBe(undefined);
- expect(message).toBe('message for event');
- expect(prop1).toBe('value1');
- expect(prop2).toBe('value2');
- expect(tags).toEqual(['meta-data-tag']);
- });
-
- it('logs an event with empty meta data', async () => {
- const event = {
- data: attachMetaData('message for event'),
- tags: ['tag1', 'tag2'],
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { level, message, prop1, prop2, tags } = JSON.parse(result);
-
- expect(level).toBe(undefined);
- expect(message).toBe('message for event');
- expect(prop1).toBe(undefined);
- expect(prop2).toBe(undefined);
- expect(tags).toEqual(['tag1', 'tag2']);
- });
-
- it('does not log meta data for an error event', async () => {
- const event = {
- error: new Error('reason'),
- data: attachMetaData('message for event', {
- prop1: 'value1',
- prop2: 'value2',
- }),
- tags: ['tag1', 'tag2'],
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { level, message, prop1, prop2, tags } = JSON.parse(result);
-
- expect(level).toBe('error');
- expect(message).toBe('reason');
- expect(prop1).toBe(undefined);
- expect(prop2).toBe(undefined);
- expect(tags).toEqual(['tag1', 'tag2']);
- });
- });
-
- describe('errors', () => {
- it('error type', async () => {
- const event = {
- ...makeEvent('error'),
- error: {
- message: 'test error 0',
- },
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { level, message, error } = JSON.parse(result);
-
- expect(level).toBe('error');
- expect(message).toBe('test error 0');
- expect(error).toEqual({ message: 'test error 0' });
- });
-
- it('with no message', async () => {
- const event = {
- event: 'error',
- error: {},
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { level, message, error } = JSON.parse(result);
-
- expect(level).toBe('error');
- expect(message).toBe('Unknown error (no message)');
- expect(error).toEqual({});
- });
-
- it('event error instanceof Error', async () => {
- const event = {
- error: new Error('test error 2') as any,
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { level, message, error } = JSON.parse(result);
-
- expect(level).toBe('error');
- expect(message).toBe('test error 2');
-
- expect(error.message).toBe(event.error.message);
- expect(error.name).toBe(event.error.name);
- expect(error.stack).toBe(event.error.stack);
- expect(error.code).toBe(event.error.code);
- expect(error.signal).toBe(event.error.signal);
- });
-
- it('event error instanceof Error - fatal', async () => {
- const event = {
- error: new Error('test error 2') as any,
- tags: ['fatal', 'tag2'],
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { tags, level, message, error } = JSON.parse(result);
-
- expect(tags).toEqual(['fatal', 'tag2']);
- expect(level).toBe('fatal');
- expect(message).toBe('test error 2');
-
- expect(error.message).toBe(event.error.message);
- expect(error.name).toBe(event.error.name);
- expect(error.stack).toBe(event.error.stack);
- expect(error.code).toBe(event.error.code);
- expect(error.signal).toBe(event.error.signal);
- });
-
- it('event error instanceof Error, no message', async () => {
- const event = {
- error: new Error('') as any,
- };
- const result = await createPromiseFromStreams([createListStream([event]), format]);
- const { level, message, error } = JSON.parse(result);
-
- expect(level).toBe('error');
- expect(message).toBe('Unknown error object (no message)');
-
- expect(error.message).toBe(event.error.message);
- expect(error.name).toBe(event.error.name);
- expect(error.stack).toBe(event.error.stack);
- expect(error.code).toBe(event.error.code);
- expect(error.signal).toBe(event.error.signal);
- });
- });
- });
-
- describe('timezone', () => {
- it('logs in UTC', async () => {
- const format = new KbnLoggerJsonFormat({
- timezone: 'UTC',
- } as any);
-
- const result = await createPromiseFromStreams([
- createListStream([makeEvent('log')]),
- format,
- ]);
-
- const { '@timestamp': timestamp } = JSON.parse(result);
- expect(timestamp).toBe(moment.utc(time).format());
- });
-
- it('logs in local timezone timezone is undefined', async () => {
- const format = new KbnLoggerJsonFormat({} as any);
-
- const result = await createPromiseFromStreams([
- createListStream([makeEvent('log')]),
- format,
- ]);
-
- const { '@timestamp': timestamp } = JSON.parse(result);
- expect(timestamp).toBe(moment(time).format());
- });
- });
-});
diff --git a/packages/kbn-legacy-logging/src/log_format_json.ts b/packages/kbn-legacy-logging/src/log_format_json.ts
deleted file mode 100644
index 427415d1715a6..0000000000000
--- a/packages/kbn-legacy-logging/src/log_format_json.ts
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-// @ts-expect-error missing type def
-import stringify from 'json-stringify-safe';
-import { BaseLogFormat } from './log_format';
-
-const stripColors = function (string: string) {
- return string.replace(/\u001b[^m]+m/g, '');
-};
-
-export class KbnLoggerJsonFormat extends BaseLogFormat {
- format(data: Record) {
- data.message = stripColors(data.message);
- data['@timestamp'] = this.extractAndFormatTimestamp(data);
- return stringify(data);
- }
-}
diff --git a/packages/kbn-legacy-logging/src/log_format_string.test.ts b/packages/kbn-legacy-logging/src/log_format_string.test.ts
deleted file mode 100644
index 3ea02c2cfb286..0000000000000
--- a/packages/kbn-legacy-logging/src/log_format_string.test.ts
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import moment from 'moment';
-
-import { attachMetaData } from './metadata';
-import { createListStream, createPromiseFromStreams } from '@kbn/utils';
-import { KbnLoggerStringFormat } from './log_format_string';
-
-const time = +moment('2010-01-01T05:15:59Z', moment.ISO_8601);
-
-const makeEvent = () => ({
- event: 'log',
- timestamp: time,
- tags: ['tag'],
- pid: 1,
- data: 'my log message',
-});
-
-describe('KbnLoggerStringFormat', () => {
- it('logs in UTC', async () => {
- const format = new KbnLoggerStringFormat({
- timezone: 'UTC',
- } as any);
-
- const result = await createPromiseFromStreams([createListStream([makeEvent()]), format]);
-
- expect(String(result)).toContain(moment.utc(time).format('HH:mm:ss.SSS'));
- });
-
- it('logs in local timezone when timezone is undefined', async () => {
- const format = new KbnLoggerStringFormat({} as any);
-
- const result = await createPromiseFromStreams([createListStream([makeEvent()]), format]);
-
- expect(String(result)).toContain(moment(time).format('HH:mm:ss.SSS'));
- });
- describe('with metadata', () => {
- it('does not log meta data', async () => {
- const format = new KbnLoggerStringFormat({} as any);
- const event = {
- data: attachMetaData('message for event', {
- prop1: 'value1',
- }),
- tags: ['tag1', 'tag2'],
- };
-
- const result = await createPromiseFromStreams([createListStream([event]), format]);
-
- const resultString = String(result);
- expect(resultString).toContain('tag1');
- expect(resultString).toContain('tag2');
- expect(resultString).toContain('message for event');
-
- expect(resultString).not.toContain('value1');
- expect(resultString).not.toContain('prop1');
- });
- });
-});
diff --git a/packages/kbn-legacy-logging/src/log_format_string.ts b/packages/kbn-legacy-logging/src/log_format_string.ts
deleted file mode 100644
index da21e56e00340..0000000000000
--- a/packages/kbn-legacy-logging/src/log_format_string.ts
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import _ from 'lodash';
-import chalk from 'chalk';
-
-import { BaseLogFormat } from './log_format';
-
-const statuses = ['err', 'info', 'error', 'warning', 'fatal', 'status', 'debug'];
-
-const typeColors: Record = {
- log: 'white',
- req: 'green',
- res: 'green',
- ops: 'cyan',
- config: 'cyan',
- err: 'red',
- info: 'green',
- error: 'red',
- warning: 'red',
- fatal: 'magentaBright',
- status: 'yellowBright',
- debug: 'gray',
- server: 'gray',
- optmzr: 'white',
- manager: 'green',
- optimize: 'magentaBright',
- listening: 'magentaBright',
- scss: 'magentaBright',
-};
-
-const color = _.memoize((name: string): ((...text: string[]) => string) => {
- // @ts-expect-error couldn't even get rid of the error with an any cast
- return chalk[typeColors[name]] || _.identity;
-});
-
-const type = _.memoize((t: string) => {
- return color(t)(_.pad(t, 7).slice(0, 7));
-});
-
-const prefix = process.env.isDevCliChild ? `${type('server')} ` : '';
-
-export class KbnLoggerStringFormat extends BaseLogFormat {
- format(data: Record) {
- const time = color('time')(this.extractAndFormatTimestamp(data, 'HH:mm:ss.SSS'));
- const msg = data.error ? color('error')(data.error.stack) : color('message')(data.message);
-
- const tags = _(data.tags)
- .sortBy(function (tag) {
- if (color(tag) === _.identity) return `2${tag}`;
- if (_.includes(statuses, tag)) return `0${tag}`;
- return `1${tag}`;
- })
- .reduce(function (s, t) {
- return s + `[${color(t)(t)}]`;
- }, '');
-
- return `${prefix}${type(data.type)} [${time}] ${tags} ${msg}`;
- }
-}
diff --git a/packages/kbn-legacy-logging/src/log_interceptor.test.ts b/packages/kbn-legacy-logging/src/log_interceptor.test.ts
deleted file mode 100644
index 53d622444ece8..0000000000000
--- a/packages/kbn-legacy-logging/src/log_interceptor.test.ts
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { ErrorEvent } from './log_events';
-import { LogInterceptor } from './log_interceptor';
-
-function stubClientErrorEvent(errorMeta: Record): ErrorEvent {
- const error = new Error();
- Object.assign(error, errorMeta);
- return {
- event: 'error',
- url: '',
- pid: 1234,
- timestamp: Date.now(),
- tags: ['connection', 'client', 'error'],
- error,
- };
-}
-
-const stubEconnresetEvent = () => stubClientErrorEvent({ code: 'ECONNRESET' });
-const stubEpipeEvent = () => stubClientErrorEvent({ errno: 'EPIPE' });
-const stubEcanceledEvent = () => stubClientErrorEvent({ errno: 'ECANCELED' });
-
-function assertDowngraded(transformed: Record) {
- expect(!!transformed).toBe(true);
- expect(transformed).toHaveProperty('event', 'log');
- expect(transformed).toHaveProperty('tags');
- expect(transformed.tags).not.toContain('error');
-}
-
-describe('server logging LogInterceptor', () => {
- describe('#downgradeIfEconnreset()', () => {
- it('transforms ECONNRESET events', () => {
- const interceptor = new LogInterceptor();
- const event = stubEconnresetEvent();
- assertDowngraded(interceptor.downgradeIfEconnreset(event)!);
- });
-
- it('does not match if the tags are not in order', () => {
- const interceptor = new LogInterceptor();
- const event = stubEconnresetEvent();
- event.tags = [...event.tags!.slice(1), event.tags![0]];
- expect(interceptor.downgradeIfEconnreset(event)).toBe(null);
- });
-
- it('ignores non ECONNRESET events', () => {
- const interceptor = new LogInterceptor();
- const event = stubClientErrorEvent({ errno: 'not ECONNRESET' });
- expect(interceptor.downgradeIfEconnreset(event)).toBe(null);
- });
-
- it('ignores if tags are wrong', () => {
- const interceptor = new LogInterceptor();
- const event = stubEconnresetEvent();
- event.tags = ['different', 'tags'];
- expect(interceptor.downgradeIfEconnreset(event)).toBe(null);
- });
- });
-
- describe('#downgradeIfEpipe()', () => {
- it('transforms EPIPE events', () => {
- const interceptor = new LogInterceptor();
- const event = stubEpipeEvent();
- assertDowngraded(interceptor.downgradeIfEpipe(event)!);
- });
-
- it('does not match if the tags are not in order', () => {
- const interceptor = new LogInterceptor();
- const event = stubEpipeEvent();
- event.tags = [...event.tags!.slice(1), event.tags![0]];
- expect(interceptor.downgradeIfEpipe(event)).toBe(null);
- });
-
- it('ignores non EPIPE events', () => {
- const interceptor = new LogInterceptor();
- const event = stubClientErrorEvent({ errno: 'not EPIPE' });
- expect(interceptor.downgradeIfEpipe(event)).toBe(null);
- });
-
- it('ignores if tags are wrong', () => {
- const interceptor = new LogInterceptor();
- const event = stubEpipeEvent();
- event.tags = ['different', 'tags'];
- expect(interceptor.downgradeIfEpipe(event)).toBe(null);
- });
- });
-
- describe('#downgradeIfEcanceled()', () => {
- it('transforms ECANCELED events', () => {
- const interceptor = new LogInterceptor();
- const event = stubEcanceledEvent();
- assertDowngraded(interceptor.downgradeIfEcanceled(event)!);
- });
-
- it('does not match if the tags are not in order', () => {
- const interceptor = new LogInterceptor();
- const event = stubEcanceledEvent();
- event.tags = [...event.tags!.slice(1), event.tags![0]];
- expect(interceptor.downgradeIfEcanceled(event)).toBe(null);
- });
-
- it('ignores non ECANCELED events', () => {
- const interceptor = new LogInterceptor();
- const event = stubClientErrorEvent({ errno: 'not ECANCELLED' });
- expect(interceptor.downgradeIfEcanceled(event)).toBe(null);
- });
-
- it('ignores if tags are wrong', () => {
- const interceptor = new LogInterceptor();
- const event = stubEcanceledEvent();
- event.tags = ['different', 'tags'];
- expect(interceptor.downgradeIfEcanceled(event)).toBe(null);
- });
- });
-
- describe('#downgradeIfHTTPSWhenHTTP', () => {
- it('transforms https requests when serving http errors', () => {
- const interceptor = new LogInterceptor();
- const event = stubClientErrorEvent({ message: 'Parse Error', code: 'HPE_INVALID_METHOD' });
- assertDowngraded(interceptor.downgradeIfHTTPSWhenHTTP(event)!);
- });
-
- it('ignores non events', () => {
- const interceptor = new LogInterceptor();
- const event = stubClientErrorEvent({
- message: 'Parse Error',
- code: 'NOT_HPE_INVALID_METHOD',
- });
- expect(interceptor.downgradeIfEcanceled(event)).toBe(null);
- });
- });
-
- describe('#downgradeIfHTTPWhenHTTPS', () => {
- it('transforms http requests when serving https errors', () => {
- const message =
- '4584650176:error:1408F09C:SSL routines:ssl3_get_record:http request:../deps/openssl/openssl/ssl/record/ssl3_record.c:322:\n';
- const interceptor = new LogInterceptor();
- const event = stubClientErrorEvent({ message });
- assertDowngraded(interceptor.downgradeIfHTTPWhenHTTPS(event)!);
- });
-
- it('ignores non events', () => {
- const interceptor = new LogInterceptor();
- const event = stubClientErrorEvent({ message: 'Not error' });
- expect(interceptor.downgradeIfEcanceled(event)).toBe(null);
- });
- });
-});
diff --git a/packages/kbn-legacy-logging/src/log_interceptor.ts b/packages/kbn-legacy-logging/src/log_interceptor.ts
deleted file mode 100644
index 1085806135ca6..0000000000000
--- a/packages/kbn-legacy-logging/src/log_interceptor.ts
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import Stream from 'stream';
-import { get, isEqual } from 'lodash';
-import { AnyEvent } from './log_events';
-
-/**
- * Matches error messages when clients connect via HTTP instead of HTTPS; see unit test for full message. Warning: this can change when Node
- * and its bundled OpenSSL binary are upgraded.
- */
-const OPENSSL_GET_RECORD_REGEX = /ssl3_get_record:http/;
-
-function doTagsMatch(event: AnyEvent, tags: string[]) {
- return isEqual(event.tags, tags);
-}
-
-function doesMessageMatch(errorMessage: string, match: RegExp | string) {
- if (!errorMessage) {
- return false;
- }
- if (match instanceof RegExp) {
- return match.test(errorMessage);
- }
- return errorMessage === match;
-}
-
-// converts the given event into a debug log if it's an error of the given type
-function downgradeIfErrorType(errorType: string, event: AnyEvent) {
- const isClientError = doTagsMatch(event, ['connection', 'client', 'error']);
- if (!isClientError) {
- return null;
- }
-
- const matchesErrorType =
- get(event, 'error.code') === errorType || get(event, 'error.errno') === errorType;
- if (!matchesErrorType) {
- return null;
- }
-
- const errorTypeTag = errorType.toLowerCase();
-
- return {
- event: 'log',
- pid: event.pid,
- timestamp: event.timestamp,
- tags: ['debug', 'connection', errorTypeTag],
- data: `${errorType}: Socket was closed by the client (probably the browser) before it could be read completely`,
- };
-}
-
-function downgradeIfErrorMessage(match: RegExp | string, event: AnyEvent) {
- const isClientError = doTagsMatch(event, ['connection', 'client', 'error']);
- const errorMessage = get(event, 'error.message');
- const matchesErrorMessage = isClientError && doesMessageMatch(errorMessage, match);
-
- if (!matchesErrorMessage) {
- return null;
- }
-
- return {
- event: 'log',
- pid: event.pid,
- timestamp: event.timestamp,
- tags: ['debug', 'connection'],
- data: errorMessage,
- };
-}
-
-export class LogInterceptor extends Stream.Transform {
- constructor() {
- super({
- readableObjectMode: true,
- writableObjectMode: true,
- });
- }
-
- /**
- * Since the upgrade to hapi 14, any socket read
- * error is surfaced as a generic "client error"
- * but "ECONNRESET" specifically is not useful for the
- * logs unless you are trying to debug edge-case behaviors.
- *
- * For that reason, we downgrade this from error to debug level
- *
- * @param {object} - log event
- */
- downgradeIfEconnreset(event: AnyEvent) {
- return downgradeIfErrorType('ECONNRESET', event);
- }
-
- /**
- * Since the upgrade to hapi 14, any socket write
- * error is surfaced as a generic "client error"
- * but "EPIPE" specifically is not useful for the
- * logs unless you are trying to debug edge-case behaviors.
- *
- * For that reason, we downgrade this from error to debug level
- *
- * @param {object} - log event
- */
- downgradeIfEpipe(event: AnyEvent) {
- return downgradeIfErrorType('EPIPE', event);
- }
-
- /**
- * Since the upgrade to hapi 14, any socket write
- * error is surfaced as a generic "client error"
- * but "ECANCELED" specifically is not useful for the
- * logs unless you are trying to debug edge-case behaviors.
- *
- * For that reason, we downgrade this from error to debug level
- *
- * @param {object} - log event
- */
- downgradeIfEcanceled(event: AnyEvent) {
- return downgradeIfErrorType('ECANCELED', event);
- }
-
- downgradeIfHTTPSWhenHTTP(event: AnyEvent) {
- return downgradeIfErrorType('HPE_INVALID_METHOD', event);
- }
-
- downgradeIfHTTPWhenHTTPS(event: AnyEvent) {
- return downgradeIfErrorMessage(OPENSSL_GET_RECORD_REGEX, event);
- }
-
- _transform(event: AnyEvent, enc: string, next: Stream.TransformCallback) {
- const downgraded =
- this.downgradeIfEconnreset(event) ||
- this.downgradeIfEpipe(event) ||
- this.downgradeIfEcanceled(event) ||
- this.downgradeIfHTTPSWhenHTTP(event) ||
- this.downgradeIfHTTPWhenHTTPS(event);
-
- this.push(downgraded || event);
- next();
- }
-}
diff --git a/packages/kbn-legacy-logging/src/log_reporter.test.ts b/packages/kbn-legacy-logging/src/log_reporter.test.ts
deleted file mode 100644
index a2ad8984ba244..0000000000000
--- a/packages/kbn-legacy-logging/src/log_reporter.test.ts
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import os from 'os';
-import path from 'path';
-import fs from 'fs';
-
-import stripAnsi from 'strip-ansi';
-
-import { getLogReporter } from './log_reporter';
-
-const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
-
-describe('getLogReporter', () => {
- it('should log to stdout (not json)', async () => {
- const lines: string[] = [];
- const origWrite = process.stdout.write;
- process.stdout.write = (buffer: string | Uint8Array): boolean => {
- lines.push(stripAnsi(buffer.toString()).trim());
- return true;
- };
-
- const loggerStream = getLogReporter({
- config: {
- json: false,
- dest: 'stdout',
- filter: {},
- },
- events: { log: '*' },
- });
-
- loggerStream.end({ event: 'log', tags: ['foo'], data: 'hello world' });
-
- await sleep(500);
-
- process.stdout.write = origWrite;
- expect(lines.length).toBe(1);
- expect(lines[0]).toMatch(/^log \[[^\]]*\] \[foo\] hello world$/);
- });
-
- it('should log to stdout (as json)', async () => {
- const lines: string[] = [];
- const origWrite = process.stdout.write;
- process.stdout.write = (buffer: string | Uint8Array): boolean => {
- lines.push(JSON.parse(buffer.toString().trim()));
- return true;
- };
-
- const loggerStream = getLogReporter({
- config: {
- json: true,
- dest: 'stdout',
- filter: {},
- },
- events: { log: '*' },
- });
-
- loggerStream.end({ event: 'log', tags: ['foo'], data: 'hello world' });
-
- await sleep(500);
-
- process.stdout.write = origWrite;
- expect(lines.length).toBe(1);
- expect(lines[0]).toMatchObject({
- type: 'log',
- tags: ['foo'],
- message: 'hello world',
- });
- });
-
- it('should log to custom file (not json)', async () => {
- const dir = os.tmpdir();
- const logfile = `dest-${Date.now()}.log`;
- const dest = path.join(dir, logfile);
-
- const loggerStream = getLogReporter({
- config: {
- json: false,
- dest,
- filter: {},
- },
- events: { log: '*' },
- });
-
- loggerStream.end({ event: 'log', tags: ['foo'], data: 'hello world' });
-
- await sleep(500);
-
- const lines = stripAnsi(fs.readFileSync(dest, { encoding: 'utf8' }))
- .trim()
- .split(os.EOL);
- expect(lines.length).toBe(1);
- expect(lines[0]).toMatch(/^log \[[^\]]*\] \[foo\] hello world$/);
- });
-
- it('should log to custom file (as json)', async () => {
- const dir = os.tmpdir();
- const logfile = `dest-${Date.now()}.log`;
- const dest = path.join(dir, logfile);
-
- const loggerStream = getLogReporter({
- config: {
- json: true,
- dest,
- filter: {},
- },
- events: { log: '*' },
- });
-
- loggerStream.end({ event: 'log', tags: ['foo'], data: 'hello world' });
-
- await sleep(500);
-
- const lines = fs
- .readFileSync(dest, { encoding: 'utf8' })
- .trim()
- .split(os.EOL)
- .map((data) => JSON.parse(data));
- expect(lines.length).toBe(1);
- expect(lines[0]).toMatchObject({
- type: 'log',
- tags: ['foo'],
- message: 'hello world',
- });
- });
-});
diff --git a/packages/kbn-legacy-logging/src/log_reporter.ts b/packages/kbn-legacy-logging/src/log_reporter.ts
deleted file mode 100644
index d42fb78f1647b..0000000000000
--- a/packages/kbn-legacy-logging/src/log_reporter.ts
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { createWriteStream } from 'fs';
-import { pipeline } from 'stream';
-
-// @ts-expect-error missing type def
-import { Squeeze } from '@hapi/good-squeeze';
-
-import { KbnLoggerJsonFormat } from './log_format_json';
-import { KbnLoggerStringFormat } from './log_format_string';
-import { LogInterceptor } from './log_interceptor';
-import { LogFormatConfig } from './log_format';
-
-export function getLogReporter({ events, config }: { events: any; config: LogFormatConfig }) {
- const squeeze = new Squeeze(events);
- const format = config.json ? new KbnLoggerJsonFormat(config) : new KbnLoggerStringFormat(config);
- const logInterceptor = new LogInterceptor();
-
- if (config.dest === 'stdout') {
- pipeline(logInterceptor, squeeze, format, onFinished);
- // The `pipeline` function is used to properly close all streams in the
- // pipeline in case one of them ends or fails. Since stdout obviously
- // shouldn't be closed in case of a failure in one of the other streams,
- // we're not including that in the call to `pipeline`, but rely on the old
- // `pipe` function instead.
- format.pipe(process.stdout);
- } else {
- const dest = createWriteStream(config.dest, {
- flags: 'a',
- encoding: 'utf8',
- });
- pipeline(logInterceptor, squeeze, format, dest, onFinished);
- }
-
- return logInterceptor;
-}
-
-function onFinished(err: NodeJS.ErrnoException | null) {
- if (err) {
- // eslint-disable-next-line no-console
- console.error('An unexpected error occurred in the logging pipeline:', err.stack);
- }
-}
diff --git a/packages/kbn-legacy-logging/src/metadata.ts b/packages/kbn-legacy-logging/src/metadata.ts
deleted file mode 100644
index 0f41673ef6723..0000000000000
--- a/packages/kbn-legacy-logging/src/metadata.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { isPlainObject } from 'lodash';
-
-export const metadataSymbol = Symbol('log message with metadata');
-
-export interface EventData {
- [metadataSymbol]?: EventMetadata;
- [key: string]: any;
-}
-
-export interface EventMetadata {
- message: string;
- metadata: Record;
-}
-
-export const isEventData = (eventData: EventData) => {
- return Boolean(isPlainObject(eventData) && eventData[metadataSymbol]);
-};
-
-export const getLogEventData = (eventData: EventData) => {
- const { message, metadata } = eventData[metadataSymbol]!;
- return {
- ...metadata,
- message,
- };
-};
-
-export const attachMetaData = (message: string, metadata: Record = {}) => {
- return {
- [metadataSymbol]: {
- message,
- metadata,
- },
- };
-};
diff --git a/packages/kbn-legacy-logging/src/rotate/index.ts b/packages/kbn-legacy-logging/src/rotate/index.ts
deleted file mode 100644
index 39305dcccf788..0000000000000
--- a/packages/kbn-legacy-logging/src/rotate/index.ts
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { Server } from '@hapi/hapi';
-import { LogRotator } from './log_rotator';
-import { LegacyLoggingConfig } from '../schema';
-
-let logRotator: LogRotator;
-
-export async function setupLoggingRotate(server: Server, config: LegacyLoggingConfig) {
- // If log rotate is not enabled we skip
- if (!config.rotate.enabled) {
- return;
- }
-
- // We don't want to run logging rotate server if
- // we are not logging to a file
- if (config.dest === 'stdout') {
- server.log(
- ['warning', 'logging:rotate'],
- 'Log rotation is enabled but logging.dest is configured for stdout. Set logging.dest to a file for this setting to take effect.'
- );
- return;
- }
-
- // Enable Logging Rotate Service
- // We need the master process and it can
- // try to setupLoggingRotate more than once,
- // so we'll need to assure it only loads once.
- if (!logRotator) {
- logRotator = new LogRotator(config, server);
- await logRotator.start();
- }
-
- return logRotator;
-}
diff --git a/packages/kbn-legacy-logging/src/rotate/log_rotator.test.ts b/packages/kbn-legacy-logging/src/rotate/log_rotator.test.ts
deleted file mode 100644
index ce9a24e63455f..0000000000000
--- a/packages/kbn-legacy-logging/src/rotate/log_rotator.test.ts
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import del from 'del';
-import fs, { existsSync, mkdirSync, statSync, writeFileSync } from 'fs';
-import { tmpdir } from 'os';
-import { dirname, join } from 'path';
-import { LogRotator } from './log_rotator';
-import { LegacyLoggingConfig } from '../schema';
-
-const mockOn = jest.fn();
-jest.mock('chokidar', () => ({
- watch: jest.fn(() => ({
- on: mockOn,
- close: jest.fn(),
- })),
-}));
-
-jest.mock('lodash', () => ({
- ...(jest.requireActual('lodash') as any),
- throttle: (fn: any) => fn,
-}));
-
-const tempDir = join(tmpdir(), 'kbn_log_rotator_test');
-const testFilePath = join(tempDir, 'log_rotator_test_log_file.log');
-
-const createLogRotatorConfig = (logFilePath: string): LegacyLoggingConfig => {
- return {
- dest: logFilePath,
- rotate: {
- enabled: true,
- keepFiles: 2,
- everyBytes: 2,
- usePolling: false,
- pollingInterval: 10000,
- pollingPolicyTestTimeout: 4000,
- },
- } as LegacyLoggingConfig;
-};
-
-const mockServer: any = {
- log: jest.fn(),
-};
-
-const writeBytesToFile = (filePath: string, numberOfBytes: number) => {
- writeFileSync(filePath, 'a'.repeat(numberOfBytes), { flag: 'a' });
-};
-
-describe('LogRotator', () => {
- beforeEach(() => {
- mkdirSync(tempDir, { recursive: true });
- writeFileSync(testFilePath, '');
- });
-
- afterEach(() => {
- del.sync(tempDir, { force: true });
- mockOn.mockClear();
- });
-
- it('rotates log file when bigger than set limit on start', async () => {
- writeBytesToFile(testFilePath, 3);
-
- const logRotator = new LogRotator(createLogRotatorConfig(testFilePath), mockServer);
- jest.spyOn(logRotator, '_sendReloadLogConfigSignal').mockImplementation(() => {});
-
- await logRotator.start();
-
- expect(logRotator.running).toBe(true);
-
- await logRotator.stop();
-
- expect(existsSync(join(tempDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy();
- });
-
- it('rotates log file when equal than set limit over time', async () => {
- writeBytesToFile(testFilePath, 1);
-
- const logRotator = new LogRotator(createLogRotatorConfig(testFilePath), mockServer);
- jest.spyOn(logRotator, '_sendReloadLogConfigSignal').mockImplementation(() => {});
- await logRotator.start();
-
- expect(logRotator.running).toBe(true);
-
- const testLogFileDir = dirname(testFilePath);
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeFalsy();
-
- writeBytesToFile(testFilePath, 1);
-
- // ['change', [asyncFunction]]
- const onChangeCb = mockOn.mock.calls[0][1];
- await onChangeCb(testLogFileDir, { size: 2 });
-
- await logRotator.stop();
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy();
- });
-
- it('rotates log file when file size is bigger than limit', async () => {
- writeBytesToFile(testFilePath, 1);
-
- const logRotator = new LogRotator(createLogRotatorConfig(testFilePath), mockServer);
- jest.spyOn(logRotator, '_sendReloadLogConfigSignal').mockImplementation(() => {});
- await logRotator.start();
-
- expect(logRotator.running).toBe(true);
-
- const testLogFileDir = dirname(testFilePath);
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeFalsy();
-
- writeBytesToFile(testFilePath, 2);
-
- // ['change', [asyncFunction]]
- const onChangeCb = mockOn.mock.calls[0][1];
- await onChangeCb(testLogFileDir, { size: 3 });
-
- await logRotator.stop();
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy();
- });
-
- it('rotates log file service correctly keeps number of files', async () => {
- writeBytesToFile(testFilePath, 3);
-
- const logRotator = new LogRotator(createLogRotatorConfig(testFilePath), mockServer);
- jest.spyOn(logRotator, '_sendReloadLogConfigSignal').mockImplementation(() => {});
- await logRotator.start();
-
- expect(logRotator.running).toBe(true);
-
- const testLogFileDir = dirname(testFilePath);
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy();
-
- writeBytesToFile(testFilePath, 2);
-
- // ['change', [asyncFunction]]
- const onChangeCb = mockOn.mock.calls[0][1];
- await onChangeCb(testLogFileDir, { size: 2 });
-
- writeBytesToFile(testFilePath, 5);
- await onChangeCb(testLogFileDir, { size: 5 });
-
- await logRotator.stop();
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy();
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.1'))).toBeTruthy();
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.2'))).toBeFalsy();
- expect(statSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0')).size).toBe(5);
- });
-
- it('rotates log file service correctly keeps number of files even when number setting changes', async () => {
- writeBytesToFile(testFilePath, 3);
-
- const logRotator = new LogRotator(createLogRotatorConfig(testFilePath), mockServer);
- jest.spyOn(logRotator, '_sendReloadLogConfigSignal').mockImplementation(() => {});
- await logRotator.start();
-
- expect(logRotator.running).toBe(true);
-
- const testLogFileDir = dirname(testFilePath);
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy();
-
- writeBytesToFile(testFilePath, 2);
-
- // ['change', [asyncFunction]]
- const onChangeCb = mockOn.mock.calls[0][1];
- await onChangeCb(testLogFileDir, { size: 2 });
-
- writeBytesToFile(testFilePath, 5);
- await onChangeCb(testLogFileDir, { size: 5 });
-
- await logRotator.stop();
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy();
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.1'))).toBeTruthy();
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.2'))).toBeFalsy();
- expect(statSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0')).size).toBe(5);
-
- logRotator.keepFiles = 1;
- await logRotator.start();
-
- writeBytesToFile(testFilePath, 5);
- await onChangeCb(testLogFileDir, { size: 5 });
-
- await logRotator.stop();
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy();
- expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.1'))).toBeFalsy();
- expect(statSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0')).size).toBe(5);
- });
-
- it('rotates log file service correctly detects usePolling when it should be false', async () => {
- writeBytesToFile(testFilePath, 1);
-
- const logRotator = new LogRotator(createLogRotatorConfig(testFilePath), mockServer);
- jest.spyOn(logRotator, '_sendReloadLogConfigSignal').mockImplementation(() => {});
- await logRotator.start();
-
- expect(logRotator.running).toBe(true);
- expect(logRotator.usePolling).toBe(false);
-
- const shouldUsePolling = await logRotator._shouldUsePolling();
- expect(shouldUsePolling).toBe(false);
-
- await logRotator.stop();
- });
-
- it('rotates log file service correctly detects usePolling when it should be true', async () => {
- writeBytesToFile(testFilePath, 1);
-
- const logRotator = new LogRotator(createLogRotatorConfig(testFilePath), mockServer);
- jest.spyOn(logRotator, '_sendReloadLogConfigSignal').mockImplementation(() => {});
-
- jest.spyOn(fs, 'watch').mockImplementation(
- () =>
- ({
- on: jest.fn((eventType, cb) => {
- if (eventType === 'error') {
- cb();
- }
- }),
- close: jest.fn(),
- } as any)
- );
-
- await logRotator.start();
-
- expect(logRotator.running).toBe(true);
- expect(logRotator.usePolling).toBe(false);
- expect(logRotator.shouldUsePolling).toBe(true);
-
- await logRotator.stop();
- });
-
- it('rotates log file service correctly fallback to usePolling true after defined timeout', async () => {
- jest.useFakeTimers();
- writeBytesToFile(testFilePath, 1);
-
- const logRotator = new LogRotator(createLogRotatorConfig(testFilePath), mockServer);
- jest.spyOn(logRotator, '_sendReloadLogConfigSignal').mockImplementation(() => {});
- jest.spyOn(fs, 'watch').mockImplementation(
- () =>
- ({
- on: jest.fn((ev: string) => {
- if (ev === 'error') {
- jest.runTimersToTime(15000);
- }
- }),
- close: jest.fn(),
- } as any)
- );
-
- await logRotator.start();
-
- expect(logRotator.running).toBe(true);
- expect(logRotator.usePolling).toBe(false);
- expect(logRotator.shouldUsePolling).toBe(true);
-
- await logRotator.stop();
- jest.useRealTimers();
- });
-});
diff --git a/packages/kbn-legacy-logging/src/rotate/log_rotator.ts b/packages/kbn-legacy-logging/src/rotate/log_rotator.ts
deleted file mode 100644
index 4b1e34839030f..0000000000000
--- a/packages/kbn-legacy-logging/src/rotate/log_rotator.ts
+++ /dev/null
@@ -1,352 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import * as chokidar from 'chokidar';
-import fs from 'fs';
-import { Server } from '@hapi/hapi';
-import { throttle } from 'lodash';
-import { tmpdir } from 'os';
-import { basename, dirname, join, sep } from 'path';
-import { Observable } from 'rxjs';
-import { first } from 'rxjs/operators';
-import { promisify } from 'util';
-import { LegacyLoggingConfig } from '../schema';
-
-const mkdirAsync = promisify(fs.mkdir);
-const readdirAsync = promisify(fs.readdir);
-const renameAsync = promisify(fs.rename);
-const statAsync = promisify(fs.stat);
-const unlinkAsync = promisify(fs.unlink);
-const writeFileAsync = promisify(fs.writeFile);
-
-export class LogRotator {
- private readonly config: LegacyLoggingConfig;
- private readonly log: Server['log'];
- public logFilePath: string;
- public everyBytes: number;
- public keepFiles: number;
- public running: boolean;
- private logFileSize: number;
- public isRotating: boolean;
- public throttledRotate: () => void;
- public stalker: chokidar.FSWatcher | null;
- public usePolling: boolean;
- public pollingInterval: number;
- private stalkerUsePollingPolicyTestTimeout: NodeJS.Timeout | null;
- public shouldUsePolling: boolean;
-
- constructor(config: LegacyLoggingConfig, server: Server) {
- this.config = config;
- this.log = server.log.bind(server);
- this.logFilePath = config.dest;
- this.everyBytes = config.rotate.everyBytes;
- this.keepFiles = config.rotate.keepFiles;
- this.running = false;
- this.logFileSize = 0;
- this.isRotating = false;
- this.throttledRotate = throttle(async () => await this._rotate(), 5000);
- this.stalker = null;
- this.usePolling = config.rotate.usePolling;
- this.pollingInterval = config.rotate.pollingInterval;
- this.shouldUsePolling = false;
- this.stalkerUsePollingPolicyTestTimeout = null;
- }
-
- async start() {
- if (this.running) {
- return;
- }
-
- this.running = true;
-
- // create exit listener for cleanup purposes
- this._createExitListener();
-
- // call rotate on startup
- await this._callRotateOnStartup();
-
- // init log file size monitor
- await this._startLogFileSizeMonitor();
- }
-
- stop = () => {
- if (!this.running) {
- return;
- }
-
- // cleanup exit listener
- this._deleteExitListener();
-
- // stop log file size monitor
- this._stopLogFileSizeMonitor();
-
- this.running = false;
- };
-
- async _shouldUsePolling() {
- try {
- // Setup a test file in order to try the fs env
- // and understand if we need to usePolling or not
- const tempFileDir = tmpdir();
- const tempFile = join(tempFileDir, 'kbn_log_rotation_use_polling_test_file.log');
-
- await mkdirAsync(tempFileDir, { recursive: true });
- await writeFileAsync(tempFile, '');
-
- // setup fs.watch for the temp test file
- const testWatcher = fs.watch(tempFile, { persistent: false });
-
- // await writeFileAsync(tempFile, 'test');
-
- const usePollingTest$ = new Observable((observer) => {
- // observable complete function
- const completeFn = (completeStatus: boolean) => {
- if (this.stalkerUsePollingPolicyTestTimeout) {
- clearTimeout(this.stalkerUsePollingPolicyTestTimeout);
- }
- testWatcher.close();
-
- observer.next(completeStatus);
- observer.complete();
- };
-
- // setup conditions that would fire the observable
- this.stalkerUsePollingPolicyTestTimeout = setTimeout(
- () => completeFn(true),
- this.config.rotate.pollingPolicyTestTimeout || 15000
- );
- testWatcher.on('change', () => completeFn(false));
- testWatcher.on('error', () => completeFn(true));
-
- // fire test watcher events
- setTimeout(() => {
- fs.writeFileSync(tempFile, 'test');
- }, 0);
- });
-
- // wait for the first observable result and consider it as the result
- // for our use polling test
- const usePollingTestResult = await usePollingTest$.pipe(first()).toPromise();
-
- // delete the temp file used for the test
- await unlinkAsync(tempFile);
-
- return usePollingTestResult;
- } catch {
- return true;
- }
- }
-
- async _startLogFileSizeMonitor() {
- this.usePolling = this.config.rotate.usePolling;
- this.shouldUsePolling = await this._shouldUsePolling();
-
- if (this.usePolling && !this.shouldUsePolling) {
- this.log(
- ['warning', 'logging:rotate'],
- 'Looks like your current environment support a faster algorithm than polling. You can try to disable `usePolling`'
- );
- }
-
- if (!this.usePolling && this.shouldUsePolling) {
- this.log(
- ['error', 'logging:rotate'],
- 'Looks like within your current environment you need to use polling in order to enable log rotator. Please enable `usePolling`'
- );
- }
-
- this.stalker = chokidar.watch(this.logFilePath, {
- ignoreInitial: true,
- awaitWriteFinish: false,
- useFsEvents: false,
- usePolling: this.usePolling,
- interval: this.pollingInterval,
- binaryInterval: this.pollingInterval,
- alwaysStat: true,
- atomic: false,
- });
- this.stalker.on('change', this._logFileSizeMonitorHandler);
- }
-
- _logFileSizeMonitorHandler = async (filename: string, stats: fs.Stats) => {
- if (!filename || !stats) {
- return;
- }
-
- this.logFileSize = stats.size || 0;
- await this.throttledRotate();
- };
-
- _stopLogFileSizeMonitor() {
- if (!this.stalker) {
- return;
- }
-
- this.stalker.close();
-
- if (this.stalkerUsePollingPolicyTestTimeout) {
- clearTimeout(this.stalkerUsePollingPolicyTestTimeout);
- }
- }
-
- _createExitListener() {
- process.on('exit', this.stop);
- }
-
- _deleteExitListener() {
- process.removeListener('exit', this.stop);
- }
-
- async _getLogFileSizeAndCreateIfNeeded() {
- try {
- const logFileStats = await statAsync(this.logFilePath);
- return logFileStats.size;
- } catch {
- // touch the file to make the watcher being able to register
- // change events
- await writeFileAsync(this.logFilePath, '');
- return 0;
- }
- }
-
- async _callRotateOnStartup() {
- this.logFileSize = await this._getLogFileSizeAndCreateIfNeeded();
- await this._rotate();
- }
-
- _shouldRotate() {
- // should rotate evaluation
- // 1. should rotate if current log size exceeds
- // the defined one on everyBytes
- // 2. should not rotate if is already rotating or if any
- // of the conditions on 1. do not apply
- if (this.isRotating) {
- return false;
- }
-
- return this.logFileSize >= this.everyBytes;
- }
-
- async _rotate() {
- if (!this._shouldRotate()) {
- return;
- }
-
- await this._rotateNow();
- }
-
- async _rotateNow() {
- // rotate process
- // 1. get rotated files metadata (list of log rotated files present on the log folder, numerical sorted)
- // 2. delete last file
- // 3. rename all files to the correct index +1
- // 4. rename + compress current log into 1
- // 5. send SIGHUP to reload log config
-
- // rotate process is starting
- this.isRotating = true;
-
- // get rotated files metadata
- const foundRotatedFiles = await this._readRotatedFilesMetadata();
-
- // delete number of rotated files exceeding the keepFiles limit setting
- const rotatedFiles: string[] = await this._deleteFoundRotatedFilesAboveKeepFilesLimit(
- foundRotatedFiles
- );
-
- // delete last file
- await this._deleteLastRotatedFile(rotatedFiles);
-
- // rename all files to correct index + 1
- // and normalize numbering if by some reason
- // (for example log file deletion) that numbering
- // was interrupted
- await this._renameRotatedFilesByOne(rotatedFiles);
-
- // rename current log into 0
- await this._rotateCurrentLogFile();
-
- // send SIGHUP to reload log configuration
- this._sendReloadLogConfigSignal();
-
- // Reset log file size
- this.logFileSize = 0;
-
- // rotate process is finished
- this.isRotating = false;
- }
-
- async _readRotatedFilesMetadata() {
- const logFileBaseName = basename(this.logFilePath);
- const logFilesFolder = dirname(this.logFilePath);
- const foundLogFiles: string[] = await readdirAsync(logFilesFolder);
-
- return (
- foundLogFiles
- .filter((file) => new RegExp(`${logFileBaseName}\\.\\d`).test(file))
- // we use .slice(-1) here in order to retrieve the last number match in the read filenames
- .sort((a, b) => Number(a.match(/(\d+)/g)!.slice(-1)) - Number(b.match(/(\d+)/g)!.slice(-1)))
- .map((filename) => `${logFilesFolder}${sep}${filename}`)
- );
- }
-
- async _deleteFoundRotatedFilesAboveKeepFilesLimit(foundRotatedFiles: string[]) {
- if (foundRotatedFiles.length <= this.keepFiles) {
- return foundRotatedFiles;
- }
-
- const finalRotatedFiles = foundRotatedFiles.slice(0, this.keepFiles);
- const rotatedFilesToDelete = foundRotatedFiles.slice(
- finalRotatedFiles.length,
- foundRotatedFiles.length
- );
-
- await Promise.all(
- rotatedFilesToDelete.map((rotatedFilePath: string) => unlinkAsync(rotatedFilePath))
- );
-
- return finalRotatedFiles;
- }
-
- async _deleteLastRotatedFile(rotatedFiles: string[]) {
- if (rotatedFiles.length < this.keepFiles) {
- return;
- }
-
- const lastFilePath: string = rotatedFiles.pop() as string;
- await unlinkAsync(lastFilePath);
- }
-
- async _renameRotatedFilesByOne(rotatedFiles: string[]) {
- const logFileBaseName = basename(this.logFilePath);
- const logFilesFolder = dirname(this.logFilePath);
-
- for (let i = rotatedFiles.length - 1; i >= 0; i--) {
- const oldFilePath = rotatedFiles[i];
- const newFilePath = `${logFilesFolder}${sep}${logFileBaseName}.${i + 1}`;
- await renameAsync(oldFilePath, newFilePath);
- }
- }
-
- async _rotateCurrentLogFile() {
- const newFilePath = `${this.logFilePath}.0`;
- await renameAsync(this.logFilePath, newFilePath);
- }
-
- _sendReloadLogConfigSignal() {
- if (!process.env.isDevCliChild || !process.send) {
- process.emit('SIGHUP', 'SIGHUP');
- return;
- }
-
- // Send a special message to the cluster manager
- // so it can forward it correctly
- // It will only run when we are under cluster mode (not under a production environment)
- process.send(['RELOAD_LOGGING_CONFIG_FROM_SERVER_WORKER']);
- }
-}
diff --git a/packages/kbn-legacy-logging/src/schema.ts b/packages/kbn-legacy-logging/src/schema.ts
deleted file mode 100644
index 0330708e746c0..0000000000000
--- a/packages/kbn-legacy-logging/src/schema.ts
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { schema } from '@kbn/config-schema';
-
-/**
- * @deprecated
- *
- * Legacy logging has been deprecated and will be removed in 8.0.
- * Set up logging from the platform logging instead
- */
-export interface LegacyLoggingConfig {
- silent: boolean;
- quiet: boolean;
- verbose: boolean;
- events: Record;
- dest: string;
- filter: Record;
- json: boolean;
- timezone?: string;
- rotate: {
- enabled: boolean;
- everyBytes: number;
- keepFiles: number;
- pollingInterval: number;
- usePolling: boolean;
- pollingPolicyTestTimeout?: number;
- };
-}
-
-export const legacyLoggingConfigSchema = schema.object({
- silent: schema.boolean({ defaultValue: false }),
- quiet: schema.conditional(
- schema.siblingRef('silent'),
- true,
- schema.boolean({
- defaultValue: true,
- validate: (quiet) => {
- if (!quiet) {
- return 'must be true when `silent` is true';
- }
- },
- }),
- schema.boolean({ defaultValue: false })
- ),
- verbose: schema.conditional(
- schema.siblingRef('quiet'),
- true,
- schema.boolean({
- defaultValue: false,
- validate: (verbose) => {
- if (verbose) {
- return 'must be false when `quiet` is true';
- }
- },
- }),
- schema.boolean({ defaultValue: false })
- ),
- events: schema.recordOf(schema.string(), schema.any(), { defaultValue: {} }),
- dest: schema.string({ defaultValue: 'stdout' }),
- filter: schema.recordOf(schema.string(), schema.any(), { defaultValue: {} }),
- json: schema.conditional(
- schema.siblingRef('dest'),
- 'stdout',
- schema.boolean({
- defaultValue: !process.stdout.isTTY,
- }),
- schema.boolean({
- defaultValue: true,
- })
- ),
- timezone: schema.maybe(schema.string()),
- rotate: schema.object({
- enabled: schema.boolean({ defaultValue: false }),
- everyBytes: schema.number({
- min: 1048576, // > 1MB
- max: 1073741825, // < 1GB
- defaultValue: 10485760, // 10MB
- }),
- keepFiles: schema.number({
- min: 2,
- max: 1024,
- defaultValue: 7,
- }),
- pollingInterval: schema.number({
- min: 5000,
- max: 3600000,
- defaultValue: 10000,
- }),
- usePolling: schema.boolean({ defaultValue: false }),
- }),
-});
diff --git a/packages/kbn-legacy-logging/src/setup_logging.test.ts b/packages/kbn-legacy-logging/src/setup_logging.test.ts
deleted file mode 100644
index 8e1d76477f64a..0000000000000
--- a/packages/kbn-legacy-logging/src/setup_logging.test.ts
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { Server } from '@hapi/hapi';
-import { reconfigureLogging, setupLogging } from './setup_logging';
-import { LegacyLoggingConfig } from './schema';
-
-describe('reconfigureLogging', () => {
- test(`doesn't throw an error`, () => {
- const server = new Server();
- const config: LegacyLoggingConfig = {
- silent: false,
- quiet: false,
- verbose: true,
- events: {},
- dest: '/tmp/foo',
- filter: {},
- json: true,
- rotate: {
- enabled: false,
- everyBytes: 0,
- keepFiles: 0,
- pollingInterval: 0,
- usePolling: false,
- },
- };
- setupLogging(server, config, 10);
- reconfigureLogging(server, { ...config, dest: '/tmp/bar' }, 0);
- });
-});
diff --git a/packages/kbn-legacy-logging/src/setup_logging.ts b/packages/kbn-legacy-logging/src/setup_logging.ts
deleted file mode 100644
index a045469e81251..0000000000000
--- a/packages/kbn-legacy-logging/src/setup_logging.ts
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-// @ts-expect-error missing typedef
-import { plugin as good } from '@elastic/good';
-import { Server } from '@hapi/hapi';
-import { LegacyLoggingConfig } from './schema';
-import { getLoggingConfiguration } from './get_logging_config';
-
-export async function setupLogging(
- server: Server,
- config: LegacyLoggingConfig,
- opsInterval: number
-) {
- // NOTE: legacy logger creates a new stream for each new access
- // In https://github.com/elastic/kibana/pull/55937 we reach the max listeners
- // default limit of 10 for process.stdout which starts a long warning/error
- // thrown every time we start the server.
- // In order to keep using the legacy logger until we remove it I'm just adding
- // a new hard limit here.
- process.stdout.setMaxListeners(60);
-
- return await server.register({
- plugin: good,
- options: getLoggingConfiguration(config, opsInterval),
- });
-}
-
-export function reconfigureLogging(
- server: Server,
- config: LegacyLoggingConfig,
- opsInterval: number
-) {
- const loggingOptions = getLoggingConfiguration(config, opsInterval);
- (server.plugins as any).good.reconfigure(loggingOptions);
-}
diff --git a/packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.test.ts b/packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.test.ts
deleted file mode 100644
index b662c88eba7b7..0000000000000
--- a/packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.test.ts
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { applyFiltersToKeys } from './apply_filters_to_keys';
-
-describe('applyFiltersToKeys(obj, actionsByKey)', function () {
- it('applies for each key+prop in actionsByKey', function () {
- const data = applyFiltersToKeys(
- {
- a: {
- b: {
- c: 1,
- },
- d: {
- e: 'foobar',
- },
- },
- req: {
- headers: {
- authorization: 'Basic dskd939k2i',
- },
- },
- },
- {
- b: 'remove',
- e: 'censor',
- authorization: '/([^\\s]+)$/',
- }
- );
-
- expect(data).toEqual({
- a: {
- d: {
- e: 'XXXXXX',
- },
- },
- req: {
- headers: {
- authorization: 'Basic XXXXXXXXXX',
- },
- },
- });
- });
-});
diff --git a/packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.ts b/packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.ts
deleted file mode 100644
index 578fa3a835129..0000000000000
--- a/packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.ts
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-function toPojo(obj: Record) {
- return JSON.parse(JSON.stringify(obj));
-}
-
-function replacer(match: string, group: any[]) {
- return new Array(group.length + 1).join('X');
-}
-
-function apply(obj: Record, key: string, action: string) {
- for (const k in obj) {
- if (obj.hasOwnProperty(k)) {
- let val = obj[k];
- if (k === key) {
- if (action === 'remove') {
- delete obj[k];
- } else if (action === 'censor' && typeof val === 'object') {
- delete obj[key];
- } else if (action === 'censor') {
- obj[k] = ('' + val).replace(/./g, 'X');
- } else if (/\/.+\//.test(action)) {
- const matches = action.match(/\/(.+)\//);
- if (matches) {
- const regex = new RegExp(matches[1]);
- obj[k] = ('' + val).replace(regex, replacer);
- }
- }
- } else if (typeof val === 'object') {
- val = apply(val as Record, key, action);
- }
- }
- }
- return obj;
-}
-
-export function applyFiltersToKeys(
- obj: Record,
- actionsByKey: Record
-) {
- return Object.keys(actionsByKey).reduce((output, key) => {
- return apply(output, key, actionsByKey[key]);
- }, toPojo(obj));
-}
diff --git a/packages/kbn-legacy-logging/src/utils/get_payload_size.test.ts b/packages/kbn-legacy-logging/src/utils/get_payload_size.test.ts
deleted file mode 100644
index 01d2cf29758db..0000000000000
--- a/packages/kbn-legacy-logging/src/utils/get_payload_size.test.ts
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import mockFs from 'mock-fs';
-import { createReadStream } from 'fs';
-import { PassThrough } from 'stream';
-import { createGzip, createGunzip } from 'zlib';
-
-import { getResponsePayloadBytes } from './get_payload_size';
-
-describe('getPayloadSize', () => {
- describe('handles Buffers', () => {
- test('with ascii characters', () => {
- const payload = 'heya';
- const result = getResponsePayloadBytes(Buffer.from(payload));
- expect(result).toBe(4);
- });
-
- test('with special characters', () => {
- const payload = '¡hola!';
- const result = getResponsePayloadBytes(Buffer.from(payload));
- expect(result).toBe(7);
- });
- });
-
- describe('handles streams', () => {
- afterEach(() => mockFs.restore());
-
- test('ignores streams that are not fs or zlib streams', async () => {
- const result = getResponsePayloadBytes(new PassThrough());
- expect(result).toBe(undefined);
- });
-
- describe('fs streams', () => {
- test('with ascii characters', async () => {
- mockFs({ 'test.txt': 'heya' });
- const readStream = createReadStream('test.txt');
-
- let data = '';
- for await (const chunk of readStream) {
- data += chunk;
- }
-
- const result = getResponsePayloadBytes(readStream);
- expect(result).toBe(Buffer.byteLength(data));
- });
-
- test('with special characters', async () => {
- mockFs({ 'test.txt': '¡hola!' });
- const readStream = createReadStream('test.txt');
-
- let data = '';
- for await (const chunk of readStream) {
- data += chunk;
- }
-
- const result = getResponsePayloadBytes(readStream);
- expect(result).toBe(Buffer.byteLength(data));
- });
-
- describe('zlib streams', () => {
- test('with ascii characters', async () => {
- mockFs({ 'test.txt': 'heya' });
- const readStream = createReadStream('test.txt');
- const source = readStream.pipe(createGzip()).pipe(createGunzip());
-
- let data = '';
- for await (const chunk of source) {
- data += chunk;
- }
-
- const result = getResponsePayloadBytes(source);
-
- expect(data).toBe('heya');
- expect(result).toBe(source.bytesWritten);
- });
-
- test('with special characters', async () => {
- mockFs({ 'test.txt': '¡hola!' });
- const readStream = createReadStream('test.txt');
- const source = readStream.pipe(createGzip()).pipe(createGunzip());
-
- let data = '';
- for await (const chunk of source) {
- data += chunk;
- }
-
- const result = getResponsePayloadBytes(source);
-
- expect(data).toBe('¡hola!');
- expect(result).toBe(source.bytesWritten);
- });
- });
- });
- });
-
- describe('handles plain responses', () => {
- test('when source is text', () => {
- const result = getResponsePayloadBytes('heya');
- expect(result).toBe(4);
- });
-
- test('when source contains special characters', () => {
- const result = getResponsePayloadBytes('¡hola!');
- expect(result).toBe(7);
- });
-
- test('when source is object', () => {
- const payload = { message: 'heya' };
- const result = getResponsePayloadBytes(payload);
- expect(result).toBe(JSON.stringify(payload).length);
- });
-
- test('when source is array object', () => {
- const payload = [{ message: 'hey' }, { message: 'ya' }];
- const result = getResponsePayloadBytes(payload);
- expect(result).toBe(JSON.stringify(payload).length);
- });
-
- test('returns undefined when source is not plain object', () => {
- class TestClass {
- constructor() {}
- }
- const result = getResponsePayloadBytes(new TestClass());
- expect(result).toBe(undefined);
- });
- });
-
- describe('handles content-length header', () => {
- test('always provides content-length header if available', () => {
- const headers = { 'content-length': '123' };
- const result = getResponsePayloadBytes('heya', headers);
- expect(result).toBe(123);
- });
-
- test('uses first value when hapi header is an array', () => {
- const headers = { 'content-length': ['123', '456'] };
- const result = getResponsePayloadBytes(null, headers);
- expect(result).toBe(123);
- });
-
- test('returns undefined if length is NaN', () => {
- const headers = { 'content-length': 'oops' };
- const result = getResponsePayloadBytes(null, headers);
- expect(result).toBeUndefined();
- });
- });
-
- test('defaults to undefined', () => {
- const result = getResponsePayloadBytes(null);
- expect(result).toBeUndefined();
- });
-});
diff --git a/packages/kbn-legacy-logging/src/utils/get_payload_size.ts b/packages/kbn-legacy-logging/src/utils/get_payload_size.ts
deleted file mode 100644
index acc517c74c2d4..0000000000000
--- a/packages/kbn-legacy-logging/src/utils/get_payload_size.ts
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { isPlainObject } from 'lodash';
-import { ReadStream } from 'fs';
-import { Zlib } from 'zlib';
-import type { ResponseObject } from '@hapi/hapi';
-
-const isBuffer = (obj: unknown): obj is Buffer => Buffer.isBuffer(obj);
-const isFsReadStream = (obj: unknown): obj is ReadStream =>
- typeof obj === 'object' && obj !== null && 'bytesRead' in obj && obj instanceof ReadStream;
-const isZlibStream = (obj: unknown): obj is Zlib => {
- return typeof obj === 'object' && obj !== null && 'bytesWritten' in obj;
-};
-const isString = (obj: unknown): obj is string => typeof obj === 'string';
-
-/**
- * Attempts to determine the size (in bytes) of a hapi/good
- * responsePayload based on the payload type. Falls back to
- * `undefined` if the size cannot be determined.
- *
- * This is similar to the implementation in `core/server/http/logging`,
- * however it uses more duck typing as we do not have access to the
- * entire hapi request object like we do in the HttpServer.
- *
- * @param headers responseHeaders from hapi/good event
- * @param payload responsePayload from hapi/good event
- *
- * @internal
- */
-export function getResponsePayloadBytes(
- payload: ResponseObject['source'],
- headers: Record = {}
-): number | undefined {
- const contentLength = headers['content-length'];
- if (contentLength) {
- const val = parseInt(
- // hapi response headers can be `string | string[]`, so we need to handle both cases
- Array.isArray(contentLength) ? String(contentLength) : contentLength,
- 10
- );
- return !isNaN(val) ? val : undefined;
- }
-
- if (isBuffer(payload)) {
- return payload.byteLength;
- }
-
- if (isFsReadStream(payload)) {
- return payload.bytesRead;
- }
-
- if (isZlibStream(payload)) {
- return payload.bytesWritten;
- }
-
- if (isString(payload)) {
- return Buffer.byteLength(payload);
- }
-
- if (isPlainObject(payload) || Array.isArray(payload)) {
- return Buffer.byteLength(JSON.stringify(payload));
- }
-
- return undefined;
-}
diff --git a/packages/kbn-legacy-logging/tsconfig.json b/packages/kbn-legacy-logging/tsconfig.json
deleted file mode 100644
index 55047dbcadc91..0000000000000
--- a/packages/kbn-legacy-logging/tsconfig.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
- "extends": "../../tsconfig.bazel.json",
- "compilerOptions": {
- "declaration": true,
- "declarationMap": true,
- "emitDeclarationOnly": true,
- "outDir": "target_types",
- "rootDir": "src",
- "sourceMap": true,
- "sourceRoot": "../../../../packages/kbn-legacy-logging/src",
- "stripInternal": false,
- "types": ["jest", "node"]
- },
- "include": ["src/**/*"]
-}
diff --git a/packages/kbn-pm/dist/index.js b/packages/kbn-pm/dist/index.js
index c96a1eb28cfce..cab1f6d916f02 100644
--- a/packages/kbn-pm/dist/index.js
+++ b/packages/kbn-pm/dist/index.js
@@ -9014,6 +9014,7 @@ class CiStatsReporter {
const upstreamBranch = (_options$upstreamBran = options.upstreamBranch) !== null && _options$upstreamBran !== void 0 ? _options$upstreamBran : this.getUpstreamBranch();
const kibanaUuid = options.kibanaUuid === undefined ? this.getKibanaUuid() : options.kibanaUuid;
let email;
+ let branch;
try {
const {
@@ -9024,16 +9025,32 @@ class CiStatsReporter {
this.log.debug(e.message);
}
+ try {
+ const {
+ stdout
+ } = await (0, _execa.default)('git', ['branch', '--show-current']);
+ branch = stdout;
+ } catch (e) {
+ this.log.debug(e.message);
+ }
+
+ const memUsage = process.memoryUsage();
const isElasticCommitter = email && email.endsWith('@elastic.co') ? true : false;
const defaultMetadata = {
+ kibanaUuid,
+ isElasticCommitter,
committerHash: email ? _crypto.default.createHash('sha256').update(email).digest('hex').substring(0, 20) : undefined,
+ email: isElasticCommitter ? email : undefined,
+ branch: isElasticCommitter ? branch : undefined,
cpuCount: (_Os$cpus = _os.default.cpus()) === null || _Os$cpus === void 0 ? void 0 : _Os$cpus.length,
cpuModel: (_Os$cpus$ = _os.default.cpus()[0]) === null || _Os$cpus$ === void 0 ? void 0 : _Os$cpus$.model,
cpuSpeed: (_Os$cpus$2 = _os.default.cpus()[0]) === null || _Os$cpus$2 === void 0 ? void 0 : _Os$cpus$2.speed,
- email: isElasticCommitter ? email : undefined,
freeMem: _os.default.freemem(),
- isElasticCommitter,
- kibanaUuid,
+ memoryUsageRss: memUsage.rss,
+ memoryUsageHeapTotal: memUsage.heapTotal,
+ memoryUsageHeapUsed: memUsage.heapUsed,
+ memoryUsageExternal: memUsage.external,
+ memoryUsageArrayBuffers: memUsage.arrayBuffers,
nestedTiming: process.env.CI_STATS_NESTED_TIMING ? true : false,
osArch: _os.default.arch(),
osPlatform: _os.default.platform(),
diff --git a/packages/kbn-rule-data-utils/src/technical_field_names.ts b/packages/kbn-rule-data-utils/src/technical_field_names.ts
index 86a036bbb9fe2..6ac897bbafb08 100644
--- a/packages/kbn-rule-data-utils/src/technical_field_names.ts
+++ b/packages/kbn-rule-data-utils/src/technical_field_names.ts
@@ -52,6 +52,7 @@ const ALERT_RULE_LICENSE = `${ALERT_RULE_NAMESPACE}.license` as const;
const ALERT_RULE_CATEGORY = `${ALERT_RULE_NAMESPACE}.category` as const;
const ALERT_RULE_NAME = `${ALERT_RULE_NAMESPACE}.name` as const;
const ALERT_RULE_NOTE = `${ALERT_RULE_NAMESPACE}.note` as const;
+const ALERT_RULE_PARAMS = `${ALERT_RULE_NAMESPACE}.params` as const;
const ALERT_RULE_REFERENCES = `${ALERT_RULE_NAMESPACE}.references` as const;
const ALERT_RULE_RISK_SCORE = `${ALERT_RULE_NAMESPACE}.risk_score` as const;
const ALERT_RULE_RISK_SCORE_MAPPING = `${ALERT_RULE_NAMESPACE}.risk_score_mapping` as const;
@@ -109,6 +110,7 @@ const fields = {
ALERT_RULE_LICENSE,
ALERT_RULE_NAME,
ALERT_RULE_NOTE,
+ ALERT_RULE_PARAMS,
ALERT_RULE_REFERENCES,
ALERT_RULE_RISK_SCORE,
ALERT_RULE_RISK_SCORE_MAPPING,
@@ -164,6 +166,7 @@ export {
ALERT_RULE_LICENSE,
ALERT_RULE_NAME,
ALERT_RULE_NOTE,
+ ALERT_RULE_PARAMS,
ALERT_RULE_REFERENCES,
ALERT_RULE_RISK_SCORE,
ALERT_RULE_RISK_SCORE_MAPPING,
diff --git a/packages/kbn-test/src/functional_test_runner/cli.ts b/packages/kbn-test/src/functional_test_runner/cli.ts
index ccd578aa038f8..3ad365a028b65 100644
--- a/packages/kbn-test/src/functional_test_runner/cli.ts
+++ b/packages/kbn-test/src/functional_test_runner/cli.ts
@@ -9,7 +9,7 @@
import { resolve } from 'path';
import { inspect } from 'util';
-import { run, createFlagError, Flags } from '@kbn/dev-utils';
+import { run, createFlagError, Flags, ToolingLog, getTimeReporter } from '@kbn/dev-utils';
import exitHook from 'exit-hook';
import { FunctionalTestRunner } from './functional_test_runner';
@@ -27,6 +27,12 @@ const parseInstallDir = (flags: Flags) => {
};
export function runFtrCli() {
+ const runStartTime = Date.now();
+ const toolingLog = new ToolingLog({
+ level: 'info',
+ writeTo: process.stdout,
+ });
+ const reportTime = getTimeReporter(toolingLog, 'scripts/functional_test_runner');
run(
async ({ flags, log }) => {
const functionalTestRunner = new FunctionalTestRunner(
@@ -68,9 +74,19 @@ export function runFtrCli() {
teardownRun = true;
if (err) {
+ await reportTime(runStartTime, 'total', {
+ success: false,
+ err: err.message,
+ ...flags,
+ });
log.indent(-log.indent());
log.error(err);
process.exitCode = 1;
+ } else {
+ await reportTime(runStartTime, 'total', {
+ success: true,
+ ...flags,
+ });
}
try {
diff --git a/packages/kbn-test/src/functional_tests/cli/start_servers/cli.js b/packages/kbn-test/src/functional_tests/cli/start_servers/cli.js
index 824cf3e6ceec1..df7f8750b2ae3 100644
--- a/packages/kbn-test/src/functional_tests/cli/start_servers/cli.js
+++ b/packages/kbn-test/src/functional_tests/cli/start_servers/cli.js
@@ -18,6 +18,8 @@ import { processOptions, displayHelp } from './args';
export async function startServersCli(defaultConfigPath) {
await runCli(displayHelp, async (userOptions) => {
const options = processOptions(userOptions, defaultConfigPath);
- await startServers(options);
+ await startServers({
+ ...options,
+ });
});
}
diff --git a/packages/kbn-test/src/functional_tests/tasks.ts b/packages/kbn-test/src/functional_tests/tasks.ts
index d45f8656ed728..3bc697c143f40 100644
--- a/packages/kbn-test/src/functional_tests/tasks.ts
+++ b/packages/kbn-test/src/functional_tests/tasks.ts
@@ -9,7 +9,7 @@
import { relative } from 'path';
import * as Rx from 'rxjs';
import { startWith, switchMap, take } from 'rxjs/operators';
-import { withProcRunner, ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { withProcRunner, ToolingLog, REPO_ROOT, getTimeReporter } from '@kbn/dev-utils';
import dedent from 'dedent';
import {
@@ -147,7 +147,14 @@ interface StartServerOptions {
useDefaultConfig?: boolean;
}
-export async function startServers(options: StartServerOptions) {
+export async function startServers({ ...options }: StartServerOptions) {
+ const runStartTime = Date.now();
+ const toolingLog = new ToolingLog({
+ level: 'info',
+ writeTo: process.stdout,
+ });
+ const reportTime = getTimeReporter(toolingLog, 'scripts/functional_tests_server');
+
const log = options.createLogger();
const opts = {
...options,
@@ -170,6 +177,11 @@ export async function startServers(options: StartServerOptions) {
},
});
+ reportTime(runStartTime, 'ready', {
+ success: true,
+ ...options,
+ });
+
// wait for 5 seconds of silence before logging the
// success message so that it doesn't get buried
await silence(log, 5000);
diff --git a/packages/kbn-test/src/jest/run.ts b/packages/kbn-test/src/jest/run.ts
index 441104befde91..07610a3eb84c6 100644
--- a/packages/kbn-test/src/jest/run.ts
+++ b/packages/kbn-test/src/jest/run.ts
@@ -21,7 +21,8 @@ import { resolve, relative, sep as osSep } from 'path';
import { existsSync } from 'fs';
import { run } from 'jest';
import { buildArgv } from 'jest-cli/build/cli';
-import { ToolingLog } from '@kbn/dev-utils';
+import { ToolingLog, getTimeReporter } from '@kbn/dev-utils';
+import { map } from 'lodash';
// yarn test:jest src/core/server/saved_objects
// yarn test:jest src/core/public/core_system.test.ts
@@ -35,9 +36,14 @@ export function runJest(configName = 'jest.config.js') {
writeTo: process.stdout,
});
+ const runStartTime = Date.now();
+ const reportTime = getTimeReporter(log, 'scripts/jest');
+ let cwd: string;
+ let testFiles: string[];
+
if (!argv.config) {
- const cwd = process.env.INIT_CWD || process.cwd();
- const testFiles = argv._.splice(2).map((p) => resolve(cwd, p));
+ cwd = process.env.INIT_CWD || process.cwd();
+ testFiles = argv._.splice(2).map((p) => resolve(cwd, p));
const commonTestFiles = commonBasePath(testFiles);
const testFilesProvided = testFiles.length > 0;
@@ -73,7 +79,14 @@ export function runJest(configName = 'jest.config.js') {
process.env.NODE_ENV = 'test';
}
- run();
+ run().then(() => {
+ // Success means that tests finished, doesn't mean they passed.
+ reportTime(runStartTime, 'total', {
+ success: true,
+ isXpack: cwd.includes('x-pack'),
+ testFiles: map(testFiles, (testFile) => relative(cwd, testFile)),
+ });
+ });
}
/**
diff --git a/packages/kbn-typed-react-router-config/src/create_router.ts b/packages/kbn-typed-react-router-config/src/create_router.ts
index 13f09e7546de5..77c2bba14e85a 100644
--- a/packages/kbn-typed-react-router-config/src/create_router.ts
+++ b/packages/kbn-typed-react-router-config/src/create_router.ts
@@ -20,7 +20,7 @@ import type { deepExactRt as deepExactRtTyped, mergeRt as mergeRtTyped } from '@
import { deepExactRt as deepExactRtNonTyped } from '@kbn/io-ts-utils/target_node/deep_exact_rt';
// @ts-expect-error
import { mergeRt as mergeRtNonTyped } from '@kbn/io-ts-utils/target_node/merge_rt';
-import { Route, Router } from './types';
+import { FlattenRoutesOf, Route, Router } from './types';
const deepExactRt: typeof deepExactRtTyped = deepExactRtNonTyped;
const mergeRt: typeof mergeRtTyped = mergeRtNonTyped;
@@ -51,6 +51,20 @@ export function createRouter(routes: TRoutes): Router {
+ return routesByReactRouterConfig.get(match.route)!;
+ });
+
+ return matchedRoutes;
+ }
+
const matchRoutes = (...args: any[]) => {
let optional: boolean = false;
@@ -142,15 +156,7 @@ export function createRouter(routes: TRoutes): Router {
- return routesByReactRouterConfig.get(match.route)!;
- });
+ const matchedRoutes = getRoutesToMatch(path);
const validationType = mergeRt(
...(compact(
@@ -200,5 +206,8 @@ export function createRouter(routes: TRoutes): Router {
return reactRouterConfigsByRoute.get(route)!.path as string;
},
+ getRoutesToMatch: (path: string) => {
+ return getRoutesToMatch(path) as unknown as FlattenRoutesOf;
+ },
};
}
diff --git a/packages/kbn-typed-react-router-config/src/outlet.tsx b/packages/kbn-typed-react-router-config/src/outlet.tsx
index 696085489abee..9af7b8bdd6422 100644
--- a/packages/kbn-typed-react-router-config/src/outlet.tsx
+++ b/packages/kbn-typed-react-router-config/src/outlet.tsx
@@ -5,9 +5,24 @@
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
-import { useCurrentRoute } from './use_current_route';
+import React, { createContext, useContext } from 'react';
+
+const OutletContext = createContext<{ element?: React.ReactElement } | undefined>(undefined);
+
+export function OutletContextProvider({
+ element,
+ children,
+}: {
+ element: React.ReactElement;
+ children: React.ReactNode;
+}) {
+ return {children};
+}
export function Outlet() {
- const { element } = useCurrentRoute();
- return element;
+ const outletContext = useContext(OutletContext);
+ if (!outletContext) {
+ throw new Error('Outlet context not available');
+ }
+ return outletContext.element || null;
}
diff --git a/packages/kbn-typed-react-router-config/src/router_provider.tsx b/packages/kbn-typed-react-router-config/src/router_provider.tsx
index d2512ba8fe426..657df9e9fc592 100644
--- a/packages/kbn-typed-react-router-config/src/router_provider.tsx
+++ b/packages/kbn-typed-react-router-config/src/router_provider.tsx
@@ -18,7 +18,7 @@ export function RouterProvider({
}: {
router: Router;
history: History;
- children: React.ReactElement;
+ children: React.ReactNode;
}) {
return (
diff --git a/packages/kbn-typed-react-router-config/src/types/index.ts b/packages/kbn-typed-react-router-config/src/types/index.ts
index 9c19c8dca323b..c1ae5afd816ee 100644
--- a/packages/kbn-typed-react-router-config/src/types/index.ts
+++ b/packages/kbn-typed-react-router-config/src/types/index.ts
@@ -147,6 +147,7 @@ interface PlainRoute {
children?: PlainRoute[];
params?: t.Type;
defaults?: Record>;
+ pre?: ReactElement;
}
interface ReadonlyPlainRoute {
@@ -155,6 +156,7 @@ interface ReadonlyPlainRoute {
readonly children?: readonly ReadonlyPlainRoute[];
readonly params?: t.Type;
readonly defaults?: Record>;
+ pre?: ReactElement;
}
export type Route = PlainRoute | ReadonlyPlainRoute;
@@ -209,6 +211,10 @@ export type TypeAsArgs = keyof TObject extends never
? [TObject] | []
: [TObject];
+export type FlattenRoutesOf = Array<
+ Omit>, 'parents'>
+>;
+
export interface Router {
matchRoutes>(
path: TPath,
@@ -245,6 +251,7 @@ export interface Router {
...args: TypeAsArgs>
): string;
getRoutePath(route: Route): string;
+ getRoutesToMatch(path: string): FlattenRoutesOf;
}
type AppendPath<
@@ -256,23 +263,21 @@ type MaybeUnion, U extends Record> =
[key in keyof U]: key extends keyof T ? T[key] | U[key] : U[key];
};
-type MapRoute = TRoute extends Route
- ? MaybeUnion<
- {
- [key in TRoute['path']]: TRoute & { parents: TParents };
- },
- TRoute extends { children: Route[] }
- ? MaybeUnion<
- MapRoutes,
- {
- [key in AppendPath]: ValuesType<
- MapRoutes
- >;
- }
- >
- : {}
- >
- : {};
+type MapRoute = MaybeUnion<
+ {
+ [key in TRoute['path']]: TRoute & { parents: TParents };
+ },
+ TRoute extends { children: Route[] }
+ ? MaybeUnion<
+ MapRoutes,
+ {
+ [key in AppendPath]: ValuesType<
+ MapRoutes
+ >;
+ }
+ >
+ : {}
+>;
type MapRoutes = TRoutes extends [Route]
? MapRoute
diff --git a/packages/kbn-typed-react-router-config/src/use_current_route.tsx b/packages/kbn-typed-react-router-config/src/use_current_route.tsx
index 9227b119107b3..a36e6f4ec9c8e 100644
--- a/packages/kbn-typed-react-router-config/src/use_current_route.tsx
+++ b/packages/kbn-typed-react-router-config/src/use_current_route.tsx
@@ -6,6 +6,7 @@
* Side Public License, v 1.
*/
import React, { createContext, useContext } from 'react';
+import { OutletContextProvider } from './outlet';
import { RouteMatch } from './types';
const CurrentRouteContext = createContext<
@@ -23,7 +24,7 @@ export const CurrentRouteContextProvider = ({
}) => {
return (
- {children}
+ {children}
);
};
diff --git a/packages/kbn-typed-react-router-config/src/use_match_routes.ts b/packages/kbn-typed-react-router-config/src/use_match_routes.ts
index b818ff06e9ae6..12c5af1f4412d 100644
--- a/packages/kbn-typed-react-router-config/src/use_match_routes.ts
+++ b/packages/kbn-typed-react-router-config/src/use_match_routes.ts
@@ -5,7 +5,7 @@
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
-
+import { useMemo } from 'react';
import { useLocation } from 'react-router-dom';
import { RouteMatch } from './types';
import { useRouter } from './use_router';
@@ -14,7 +14,11 @@ export function useMatchRoutes(path?: string): RouteMatch[] {
const router = useRouter();
const location = useLocation();
- return typeof path === 'undefined'
- ? router.matchRoutes(location)
- : router.matchRoutes(path as never, location);
+ const routeMatches = useMemo(() => {
+ return typeof path === 'undefined'
+ ? router.matchRoutes(location)
+ : router.matchRoutes(path as never, location);
+ }, [path, router, location]);
+
+ return routeMatches;
}
diff --git a/packages/kbn-typed-react-router-config/src/use_router.tsx b/packages/kbn-typed-react-router-config/src/use_router.tsx
index b54530ed0fbdb..c78e85650f26d 100644
--- a/packages/kbn-typed-react-router-config/src/use_router.tsx
+++ b/packages/kbn-typed-react-router-config/src/use_router.tsx
@@ -16,7 +16,7 @@ export const RouterContextProvider = ({
children,
}: {
router: Router;
- children: React.ReactElement;
+ children: React.ReactNode;
}) => {children};
export function useRouter(): Router {
diff --git a/packages/kbn-utils/src/path/index.ts b/packages/kbn-utils/src/path/index.ts
index 9ee699c22c30c..15d6a3eddf01e 100644
--- a/packages/kbn-utils/src/path/index.ts
+++ b/packages/kbn-utils/src/path/index.ts
@@ -15,14 +15,12 @@ const isString = (v: any): v is string => typeof v === 'string';
const CONFIG_PATHS = [
process.env.KBN_PATH_CONF && join(process.env.KBN_PATH_CONF, 'kibana.yml'),
- process.env.KIBANA_PATH_CONF && join(process.env.KIBANA_PATH_CONF, 'kibana.yml'), // deprecated
join(REPO_ROOT, 'config/kibana.yml'),
'/etc/kibana/kibana.yml',
].filter(isString);
const CONFIG_DIRECTORIES = [
process.env.KBN_PATH_CONF,
- process.env.KIBANA_PATH_CONF, // deprecated
join(REPO_ROOT, 'config'),
'/etc/kibana',
].filter(isString);
diff --git a/src/cli/serve/integration_tests/__fixtures__/invalid_config.yml b/src/cli/serve/integration_tests/__fixtures__/invalid_config.yml
index df9ea641cd3fe..d8e59ced89c80 100644
--- a/src/cli/serve/integration_tests/__fixtures__/invalid_config.yml
+++ b/src/cli/serve/integration_tests/__fixtures__/invalid_config.yml
@@ -1,3 +1,13 @@
+logging:
+ root:
+ level: fatal
+ appenders: [console-json]
+ appenders:
+ console-json:
+ type: console
+ layout:
+ type: json
+
unknown:
key: 1
diff --git a/src/cli/serve/integration_tests/__fixtures__/reload_logging_config/kibana.test.yml b/src/cli/serve/integration_tests/__fixtures__/reload_logging_config/kibana.test.yml
deleted file mode 100644
index 1761a7984e0e7..0000000000000
--- a/src/cli/serve/integration_tests/__fixtures__/reload_logging_config/kibana.test.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-server:
- autoListen: false
- port: 8274
-logging:
- json: true
-optimize:
- enabled: false
-plugins:
- initialize: false
-migrations:
- skip: true
-elasticsearch:
- skipStartupConnectionCheck: true
diff --git a/src/cli/serve/integration_tests/invalid_config.test.ts b/src/cli/serve/integration_tests/invalid_config.test.ts
index 724998699da85..2de902582a548 100644
--- a/src/cli/serve/integration_tests/invalid_config.test.ts
+++ b/src/cli/serve/integration_tests/invalid_config.test.ts
@@ -14,14 +14,15 @@ const INVALID_CONFIG_PATH = require.resolve('./__fixtures__/invalid_config.yml')
interface LogEntry {
message: string;
- tags?: string[];
- type: string;
+ log: {
+ level: string;
+ };
}
-describe('cli invalid config support', function () {
+describe('cli invalid config support', () => {
it(
- 'exits with statusCode 64 and logs a single line when config is invalid',
- function () {
+ 'exits with statusCode 64 and logs an error when config is invalid',
+ () => {
// Unused keys only throw once LegacyService starts, so disable migrations so that Core
// will finish the start lifecycle without a running Elasticsearch instance.
const { error, status, stdout, stderr } = spawnSync(
@@ -31,41 +32,27 @@ describe('cli invalid config support', function () {
cwd: REPO_ROOT,
}
);
+ expect(error).toBe(undefined);
- let fatalLogLine;
+ let fatalLogEntries;
try {
- [fatalLogLine] = stdout
+ fatalLogEntries = stdout
.toString('utf8')
.split('\n')
.filter(Boolean)
.map((line) => JSON.parse(line) as LogEntry)
- .filter((line) => line.tags?.includes('fatal'))
- .map((obj) => ({
- ...obj,
- pid: '## PID ##',
- '@timestamp': '## @timestamp ##',
- error: '## Error with stack trace ##',
- }));
+ .filter((line) => line.log.level === 'FATAL');
} catch (e) {
throw new Error(
`error parsing log output:\n\n${e.stack}\n\nstdout: \n${stdout}\n\nstderr:\n${stderr}`
);
}
- expect(error).toBe(undefined);
-
- if (!fatalLogLine) {
- throw new Error(
- `cli did not log the expected fatal error message:\n\nstdout: \n${stdout}\n\nstderr:\n${stderr}`
- );
- }
-
- expect(fatalLogLine.message).toContain(
- 'Error: Unknown configuration key(s): "unknown.key", "other.unknown.key", "other.third", "some.flat.key", ' +
+ expect(fatalLogEntries).toHaveLength(1);
+ expect(fatalLogEntries[0].message).toContain(
+ 'Unknown configuration key(s): "unknown.key", "other.unknown.key", "other.third", "some.flat.key", ' +
'"some.array". Check for spelling errors and ensure that expected plugins are installed.'
);
- expect(fatalLogLine.tags).toEqual(['fatal', 'root']);
- expect(fatalLogLine.type).toEqual('log');
expect(status).toBe(64);
},
diff --git a/src/cli/serve/integration_tests/reload_logging_config.test.ts b/src/cli/serve/integration_tests/reload_logging_config.test.ts
index 80ce52661565c..4cee7dfae4126 100644
--- a/src/cli/serve/integration_tests/reload_logging_config.test.ts
+++ b/src/cli/serve/integration_tests/reload_logging_config.test.ts
@@ -17,7 +17,6 @@ import { map, filter, take } from 'rxjs/operators';
import { safeDump } from 'js-yaml';
import { getConfigFromFiles } from '@kbn/config';
-const legacyConfig = follow('__fixtures__/reload_logging_config/kibana.test.yml');
const configFileLogConsole = follow(
'__fixtures__/reload_logging_config/kibana_log_console.test.yml'
);
@@ -96,81 +95,6 @@ describe.skip('Server logging configuration', function () {
return;
}
- describe('legacy logging', () => {
- it(
- 'should be reloadable via SIGHUP process signaling',
- async function () {
- const configFilePath = Path.resolve(tempDir, 'kibana.yml');
- Fs.copyFileSync(legacyConfig, configFilePath);
-
- child = Child.spawn(process.execPath, [
- kibanaPath,
- '--oss',
- '--config',
- configFilePath,
- '--verbose',
- ]);
-
- // TypeScript note: As long as the child stdio[1] is 'pipe', then stdout will not be null
- const message$ = Rx.fromEvent(child.stdout!, 'data').pipe(
- map((messages) => String(messages).split('\n').filter(Boolean))
- );
-
- await message$
- .pipe(
- // We know the sighup handler will be registered before this message logged
- filter((messages: string[]) => messages.some((m) => m.includes('setting up root'))),
- take(1)
- )
- .toPromise();
-
- const lastMessage = await message$.pipe(take(1)).toPromise();
- expect(containsJsonOnly(lastMessage)).toBe(true);
-
- createConfigManager(configFilePath).modify((oldConfig) => {
- oldConfig.logging.json = false;
- return oldConfig;
- });
-
- child.kill('SIGHUP');
-
- await message$
- .pipe(
- filter((messages) => !containsJsonOnly(messages)),
- take(1)
- )
- .toPromise();
- },
- minute
- );
-
- it(
- 'should recreate file handle on SIGHUP',
- async function () {
- const logPath = Path.resolve(tempDir, 'kibana.log');
- const logPathArchived = Path.resolve(tempDir, 'kibana_archive.log');
-
- child = Child.spawn(process.execPath, [
- kibanaPath,
- '--oss',
- '--config',
- legacyConfig,
- '--logging.dest',
- logPath,
- '--verbose',
- ]);
-
- await watchFileUntil(logPath, /setting up root/, 30 * second);
- // once the server is running, archive the log file and issue SIGHUP
- Fs.renameSync(logPath, logPathArchived);
- child.kill('SIGHUP');
-
- await watchFileUntil(logPath, /Reloaded logging configuration due to SIGHUP/, 30 * second);
- },
- minute
- );
- });
-
describe('platform logging', () => {
it(
'should be reloadable via SIGHUP process signaling',
diff --git a/src/cli/serve/serve.js b/src/cli/serve/serve.js
index 705acfe4fdf54..8b346d38cfea8 100644
--- a/src/cli/serve/serve.js
+++ b/src/cli/serve/serve.js
@@ -124,17 +124,12 @@ function applyConfigOverrides(rawConfig, opts, extraCliOptions) {
if (opts.elasticsearch) set('elasticsearch.hosts', opts.elasticsearch.split(','));
if (opts.port) set('server.port', opts.port);
if (opts.host) set('server.host', opts.host);
+
if (opts.silent) {
- set('logging.silent', true);
set('logging.root.level', 'off');
}
if (opts.verbose) {
- if (has('logging.root.appenders')) {
- set('logging.root.level', 'all');
- } else {
- // Only set logging.verbose to true for legacy logging when KP logging isn't configured.
- set('logging.verbose', true);
- }
+ set('logging.root.level', 'all');
}
set('plugins.paths', _.compact([].concat(get('plugins.paths'), opts.pluginPath)));
@@ -159,9 +154,8 @@ export default function (program) {
[getConfigPath()]
)
.option('-p, --port ', 'The port to bind to', parseInt)
- .option('-q, --quiet', 'Deprecated, set logging level in your configuration')
- .option('-Q, --silent', 'Prevent all logging')
- .option('--verbose', 'Turns on verbose logging')
+ .option('-Q, --silent', 'Set the root logger level to off')
+ .option('--verbose', 'Set the root logger level to all')
.option('-H, --host ', 'The host to bind to')
.option(
'-l, --log-file ',
@@ -217,8 +211,6 @@ export default function (program) {
const cliArgs = {
dev: !!opts.dev,
envName: unknownOptions.env ? unknownOptions.env.name : undefined,
- // no longer supported
- quiet: !!opts.quiet,
silent: !!opts.silent,
verbose: !!opts.verbose,
watch: !!opts.watch,
diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts
index 0fe1347d299f9..624d6d10992cd 100644
--- a/src/core/public/doc_links/doc_links_service.ts
+++ b/src/core/public/doc_links/doc_links_service.ts
@@ -470,6 +470,19 @@ export class DocLinksService {
ecs: {
guide: `${ELASTIC_WEBSITE_URL}guide/en/ecs/current/index.html`,
},
+ clients: {
+ /** Changes to these URLs must also be synched in src/plugins/custom_integrations/server/language_clients/index.ts */
+ guide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/index.html`,
+ goOverview: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/go-api/${DOC_LINK_VERSION}/overview.html`,
+ javaIndex: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/java-api-client/${DOC_LINK_VERSION}/index.html`,
+ jsIntro: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/javascript-api/${DOC_LINK_VERSION}/introduction.html`,
+ netGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/net-api/${DOC_LINK_VERSION}/index.html`,
+ perlGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/perl-api/${DOC_LINK_VERSION}/index.html`,
+ phpGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/php-api/${DOC_LINK_VERSION}/index.html`,
+ pythonGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/net-api/${DOC_LINK_VERSION}/index.html`,
+ rubyOverview: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/ruby-api/${DOC_LINK_VERSION}/ruby_client.html`,
+ rustGuide: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/client/rust-api/${DOC_LINK_VERSION}/index.html`,
+ },
},
});
}
@@ -706,5 +719,17 @@ export interface DocLinksStart {
readonly ecs: {
readonly guide: string;
};
+ readonly clients: {
+ readonly guide: string;
+ readonly goOverview: string;
+ readonly javaIndex: string;
+ readonly jsIntro: string;
+ readonly netGuide: string;
+ readonly perlGuide: string;
+ readonly phpGuide: string;
+ readonly pythonGuide: string;
+ readonly rubyOverview: string;
+ readonly rustGuide: string;
+ };
};
}
diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md
index eace9c4011942..7871558574074 100644
--- a/src/core/public/public.api.md
+++ b/src/core/public/public.api.md
@@ -459,9 +459,13 @@ export const DEFAULT_APP_CATEGORIES: Record;
// @public
export interface DeprecationsServiceStart {
+ // Warning: (ae-incompatible-release-tags) The symbol "getAllDeprecations" is marked as @public, but its signature references "DomainDeprecationDetails" which is marked as @internal
getAllDeprecations: () => Promise;
+ // Warning: (ae-incompatible-release-tags) The symbol "getDeprecations" is marked as @public, but its signature references "DomainDeprecationDetails" which is marked as @internal
getDeprecations: (domainId: string) => Promise;
+ // Warning: (ae-incompatible-release-tags) The symbol "isDeprecationResolvable" is marked as @public, but its signature references "DomainDeprecationDetails" which is marked as @internal
isDeprecationResolvable: (details: DomainDeprecationDetails) => boolean;
+ // Warning: (ae-incompatible-release-tags) The symbol "resolveDeprecation" is marked as @public, but its signature references "DomainDeprecationDetails" which is marked as @internal
resolveDeprecation: (details: DomainDeprecationDetails) => Promise;
}
@@ -699,13 +703,24 @@ export interface DocLinksStart {
readonly ecs: {
readonly guide: string;
};
+ readonly clients: {
+ readonly guide: string;
+ readonly goOverview: string;
+ readonly javaIndex: string;
+ readonly jsIntro: string;
+ readonly netGuide: string;
+ readonly perlGuide: string;
+ readonly phpGuide: string;
+ readonly pythonGuide: string;
+ readonly rubyOverview: string;
+ readonly rustGuide: string;
+ };
};
}
// Warning: (ae-forgotten-export) The symbol "DeprecationsDetails" needs to be exported by the entry point index.d.ts
-// Warning: (ae-missing-release-tag) "DomainDeprecationDetails" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
//
-// @public (undocumented)
+// @internal (undocumented)
export interface DomainDeprecationDetails extends DeprecationsDetails {
// (undocumented)
domainId: string;
diff --git a/src/core/server/config/deprecation/core_deprecations.test.ts b/src/core/server/config/deprecation/core_deprecations.test.ts
index d3a4d7f997062..95e23561a9378 100644
--- a/src/core/server/config/deprecation/core_deprecations.test.ts
+++ b/src/core/server/config/deprecation/core_deprecations.test.ts
@@ -8,6 +8,7 @@
import { getDeprecationsForGlobalSettings } from '../test_utils';
import { coreDeprecationProvider } from './core_deprecations';
+
const initialEnv = { ...process.env };
const applyCoreDeprecations = (settings?: Record) =>
@@ -18,38 +19,6 @@ describe('core deprecations', () => {
process.env = { ...initialEnv };
});
- describe('kibanaPathConf', () => {
- it('logs a warning if KIBANA_PATH_CONF environ variable is set', () => {
- process.env.KIBANA_PATH_CONF = 'somepath';
- const { messages } = applyCoreDeprecations();
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "Environment variable \\"KIBANA_PATH_CONF\\" is deprecated. It has been replaced with \\"KBN_PATH_CONF\\" pointing to a config folder",
- ]
- `);
- });
-
- it('does not log a warning if KIBANA_PATH_CONF environ variable is unset', () => {
- delete process.env.KIBANA_PATH_CONF;
- const { messages } = applyCoreDeprecations();
- expect(messages).toHaveLength(0);
- });
- });
-
- describe('xsrfDeprecation', () => {
- it('logs a warning if server.xsrf.whitelist is set', () => {
- const { migrated, messages } = applyCoreDeprecations({
- server: { xsrf: { whitelist: ['/path'] } },
- });
- expect(migrated.server.xsrf.allowlist).toEqual(['/path']);
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "Setting \\"server.xsrf.whitelist\\" has been replaced by \\"server.xsrf.allowlist\\"",
- ]
- `);
- });
- });
-
describe('server.cors', () => {
it('renames server.cors to server.cors.enabled', () => {
const { migrated } = applyCoreDeprecations({
@@ -57,8 +26,9 @@ describe('core deprecations', () => {
});
expect(migrated.server.cors).toEqual({ enabled: true });
});
+
it('logs a warning message about server.cors renaming', () => {
- const { messages } = applyCoreDeprecations({
+ const { messages, levels } = applyCoreDeprecations({
server: { cors: true },
});
expect(messages).toMatchInlineSnapshot(`
@@ -66,7 +36,13 @@ describe('core deprecations', () => {
"\\"server.cors\\" is deprecated and has been replaced by \\"server.cors.enabled\\"",
]
`);
+ expect(levels).toMatchInlineSnapshot(`
+ Array [
+ "warning",
+ ]
+ `);
});
+
it('does not log deprecation message when server.cors.enabled set', () => {
const { migrated, messages } = applyCoreDeprecations({
server: { cors: { enabled: true } },
@@ -203,230 +179,4 @@ describe('core deprecations', () => {
).toEqual([`worker-src blob:`]);
});
});
-
- describe('logging.events.ops', () => {
- it('warns when ops events are used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { events: { ops: '*' } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.events.ops\\" has been deprecated and will be removed in 8.0. To access ops data moving forward, please enable debug logs for the \\"metrics.ops\\" context in your logging configuration. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx",
- ]
- `);
- });
- });
-
- describe('logging.events.request and logging.events.response', () => {
- it('warns when request and response events are used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { events: { request: '*', response: '*' } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.events.request\\" and \\"logging.events.response\\" have been deprecated and will be removed in 8.0. To access request and/or response data moving forward, please enable debug logs for the \\"http.server.response\\" context in your logging configuration. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx",
- ]
- `);
- });
-
- it('warns when only request event is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { events: { request: '*' } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.events.request\\" and \\"logging.events.response\\" have been deprecated and will be removed in 8.0. To access request and/or response data moving forward, please enable debug logs for the \\"http.server.response\\" context in your logging configuration. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx",
- ]
- `);
- });
-
- it('warns when only response event is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { events: { response: '*' } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.events.request\\" and \\"logging.events.response\\" have been deprecated and will be removed in 8.0. To access request and/or response data moving forward, please enable debug logs for the \\"http.server.response\\" context in your logging configuration. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx",
- ]
- `);
- });
- });
-
- describe('logging.timezone', () => {
- it('warns when ops events are used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { timezone: 'GMT' },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.timezone\\" has been deprecated and will be removed in 8.0. To set the timezone moving forward, please add a timezone date modifier to the log pattern in your logging configuration. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx",
- ]
- `);
- });
- });
-
- describe('logging.dest', () => {
- it('warns when dest is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { dest: 'stdout' },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.dest\\" has been deprecated and will be removed in 8.0. To set the destination moving forward, you can use the \\"console\\" appender in your logging configuration or define a custom one. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx",
- ]
- `);
- });
- it('warns when dest path is given', () => {
- const { messages } = applyCoreDeprecations({
- logging: { dest: '/log-log.txt' },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.dest\\" has been deprecated and will be removed in 8.0. To set the destination moving forward, you can use the \\"console\\" appender in your logging configuration or define a custom one. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx",
- ]
- `);
- });
- });
-
- describe('logging.quiet, logging.silent and logging.verbose', () => {
- it('warns when quiet is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { quiet: true },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.quiet\\" has been deprecated and will be removed in 8.0. Moving forward, you can use \\"logging.root.level:error\\" in your logging configuration. ",
- ]
- `);
- });
- it('warns when silent is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { silent: true },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.silent\\" has been deprecated and will be removed in 8.0. Moving forward, you can use \\"logging.root.level:off\\" in your logging configuration. ",
- ]
- `);
- });
- it('warns when verbose is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { verbose: true },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.verbose\\" has been deprecated and will be removed in 8.0. Moving forward, you can use \\"logging.root.level:all\\" in your logging configuration. ",
- ]
- `);
- });
- });
-
- describe('logging.json', () => {
- it('warns when json is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { json: true },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.json\\" has been deprecated and will be removed in 8.0. To specify log message format moving forward, you can configure the \\"appender.layout\\" property for every custom appender in your logging configuration. There is currently no default layout for custom appenders and each one must be declared explicitly. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx",
- ]
- `);
- });
- });
-
- describe('logging.rotate.enabled, logging.rotate.usePolling, logging.rotate.pollingInterval, logging.rotate.everyBytes and logging.rotate.keepFiles', () => {
- it('warns when logging.rotate configurations are used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { rotate: { enabled: true } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.rotate\\" and sub-options have been deprecated and will be removed in 8.0. Moving forward, you can enable log rotation using the \\"rolling-file\\" appender for a logger in your logging configuration. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#rolling-file-appender",
- ]
- `);
- });
-
- it('warns when logging.rotate polling configurations are used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { rotate: { enabled: true, usePolling: true, pollingInterval: 5000 } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.rotate\\" and sub-options have been deprecated and will be removed in 8.0. Moving forward, you can enable log rotation using the \\"rolling-file\\" appender for a logger in your logging configuration. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#rolling-file-appender",
- ]
- `);
- });
-
- it('warns when logging.rotate.everyBytes configurations are used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { rotate: { enabled: true, everyBytes: 1048576 } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.rotate\\" and sub-options have been deprecated and will be removed in 8.0. Moving forward, you can enable log rotation using the \\"rolling-file\\" appender for a logger in your logging configuration. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#rolling-file-appender",
- ]
- `);
- });
-
- it('warns when logging.rotate.keepFiles is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { rotate: { enabled: true, keepFiles: 1024 } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.rotate\\" and sub-options have been deprecated and will be removed in 8.0. Moving forward, you can enable log rotation using the \\"rolling-file\\" appender for a logger in your logging configuration. For more details, see https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#rolling-file-appender",
- ]
- `);
- });
- });
-
- describe('logging.events.log', () => {
- it('warns when events.log is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { events: { log: ['info'] } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.events.log\\" has been deprecated and will be removed in 8.0. Moving forward, log levels can be customized on a per-logger basis using the new logging configuration.",
- ]
- `);
- });
- });
-
- describe('logging.events.error', () => {
- it('warns when events.error is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { events: { error: ['some error'] } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.events.error\\" has been deprecated and will be removed in 8.0. Moving forward, you can use \\"logging.root.level: error\\" in your logging configuration.",
- ]
- `);
- });
- });
-
- describe('logging.filter', () => {
- it('warns when filter.cookie is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { filter: { cookie: 'none' } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.filter\\" has been deprecated and will be removed in 8.0.",
- ]
- `);
- });
-
- it('warns when filter.authorization is used', () => {
- const { messages } = applyCoreDeprecations({
- logging: { filter: { authorization: 'none' } },
- });
- expect(messages).toMatchInlineSnapshot(`
- Array [
- "\\"logging.filter\\" has been deprecated and will be removed in 8.0.",
- ]
- `);
- });
- });
});
diff --git a/src/core/server/config/deprecation/core_deprecations.ts b/src/core/server/config/deprecation/core_deprecations.ts
index 6e7365d0d5cbf..4e5f711fe9f3a 100644
--- a/src/core/server/config/deprecation/core_deprecations.ts
+++ b/src/core/server/config/deprecation/core_deprecations.ts
@@ -8,19 +8,6 @@
import { ConfigDeprecationProvider, ConfigDeprecation } from '@kbn/config';
-const kibanaPathConf: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (process.env?.KIBANA_PATH_CONF) {
- addDeprecation({
- message: `Environment variable "KIBANA_PATH_CONF" is deprecated. It has been replaced with "KBN_PATH_CONF" pointing to a config folder`,
- correctiveActions: {
- manualSteps: [
- 'Use "KBN_PATH_CONF" instead of "KIBANA_PATH_CONF" to point to a config folder.',
- ],
- },
- });
- }
-};
-
const rewriteBasePathDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (settings.server?.basePath && !settings.server?.rewriteBasePath) {
addDeprecation({
@@ -44,6 +31,7 @@ const rewriteCorsSettings: ConfigDeprecation = (settings, fromPath, addDeprecati
if (typeof corsSettings === 'boolean') {
addDeprecation({
message: '"server.cors" is deprecated and has been replaced by "server.cors.enabled"',
+ level: 'warning',
correctiveActions: {
manualSteps: [
`Replace "server.cors: ${corsSettings}" with "server.cors.enabled: ${corsSettings}"`,
@@ -113,263 +101,8 @@ const cspRulesDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecati
}
};
-const opsLoggingEventDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.events?.ops) {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents',
- message:
- '"logging.events.ops" has been deprecated and will be removed ' +
- 'in 8.0. To access ops data moving forward, please enable debug logs for the ' +
- '"metrics.ops" context in your logging configuration. For more details, see ' +
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
- correctiveActions: {
- manualSteps: [
- `Remove "logging.events.ops" from your kibana settings.`,
- `Enable debug logs for the "metrics.ops" context in your logging configuration`,
- ],
- },
- });
- }
-};
-
-const requestLoggingEventDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.events?.request || settings.logging?.events?.response) {
- const removeConfigsSteps = [];
-
- if (settings.logging?.events?.request) {
- removeConfigsSteps.push(`Remove "logging.events.request" from your kibana configs.`);
- }
-
- if (settings.logging?.events?.response) {
- removeConfigsSteps.push(`Remove "logging.events.response" from your kibana configs.`);
- }
-
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents',
- message:
- '"logging.events.request" and "logging.events.response" have been deprecated and will be removed ' +
- 'in 8.0. To access request and/or response data moving forward, please enable debug logs for the ' +
- '"http.server.response" context in your logging configuration. For more details, see ' +
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
- correctiveActions: {
- manualSteps: [
- ...removeConfigsSteps,
- `enable debug logs for the "http.server.response" context in your logging configuration.`,
- ],
- },
- });
- }
-};
-
-const timezoneLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.timezone) {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingtimezone',
- message:
- '"logging.timezone" has been deprecated and will be removed ' +
- 'in 8.0. To set the timezone moving forward, please add a timezone date modifier to the log pattern ' +
- 'in your logging configuration. For more details, see ' +
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
- correctiveActions: {
- manualSteps: [
- `Remove "logging.timezone" from your kibana configs.`,
- `To set the timezone add a timezone date modifier to the log pattern in your logging configuration.`,
- ],
- },
- });
- }
-};
-
-const destLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.dest) {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingdest',
- message:
- '"logging.dest" has been deprecated and will be removed ' +
- 'in 8.0. To set the destination moving forward, you can use the "console" appender ' +
- 'in your logging configuration or define a custom one. For more details, see ' +
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
- correctiveActions: {
- manualSteps: [
- `Remove "logging.dest" from your kibana configs.`,
- `To set the destination use the "console" appender in your logging configuration or define a custom one.`,
- ],
- },
- });
- }
-};
-
-const quietLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.quiet) {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingquiet',
- message:
- '"logging.quiet" has been deprecated and will be removed ' +
- 'in 8.0. Moving forward, you can use "logging.root.level:error" in your logging configuration. ',
- correctiveActions: {
- manualSteps: [
- `Remove "logging.quiet" from your kibana configs.`,
- `Use "logging.root.level:error" in your logging configuration.`,
- ],
- },
- });
- }
-};
-
-const silentLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.silent) {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingsilent',
- message:
- '"logging.silent" has been deprecated and will be removed ' +
- 'in 8.0. Moving forward, you can use "logging.root.level:off" in your logging configuration. ',
- correctiveActions: {
- manualSteps: [
- `Remove "logging.silent" from your kibana configs.`,
- `Use "logging.root.level:off" in your logging configuration.`,
- ],
- },
- });
- }
-};
-
-const verboseLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.verbose) {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingverbose',
- message:
- '"logging.verbose" has been deprecated and will be removed ' +
- 'in 8.0. Moving forward, you can use "logging.root.level:all" in your logging configuration. ',
- correctiveActions: {
- manualSteps: [
- `Remove "logging.verbose" from your kibana configs.`,
- `Use "logging.root.level:all" in your logging configuration.`,
- ],
- },
- });
- }
-};
-
-const jsonLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- // We silence the deprecation warning when running in development mode because
- // the dev CLI code in src/dev/cli_dev_mode/using_server_process.ts manually
- // specifies `--logging.json=false`. Since it's executed in a child process, the
- // ` legacyLoggingConfigSchema` returns `true` for the TTY check on `process.stdout.isTTY`
- if (settings.logging?.json && settings.env !== 'development') {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
- message:
- '"logging.json" has been deprecated and will be removed ' +
- 'in 8.0. To specify log message format moving forward, ' +
- 'you can configure the "appender.layout" property for every custom appender in your logging configuration. ' +
- 'There is currently no default layout for custom appenders and each one must be declared explicitly. ' +
- 'For more details, see ' +
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
- correctiveActions: {
- manualSteps: [
- `Remove "logging.json" from your kibana configs.`,
- `Configure the "appender.layout" property for every custom appender in your logging configuration.`,
- ],
- },
- });
- }
-};
-
-const logRotateDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.rotate) {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#rolling-file-appender',
- message:
- '"logging.rotate" and sub-options have been deprecated and will be removed in 8.0. ' +
- 'Moving forward, you can enable log rotation using the "rolling-file" appender for a logger ' +
- 'in your logging configuration. For more details, see ' +
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#rolling-file-appender',
- correctiveActions: {
- manualSteps: [
- `Remove "logging.rotate" from your kibana configs.`,
- `Enable log rotation using the "rolling-file" appender for a logger in your logging configuration.`,
- ],
- },
- });
- }
-};
-
-const logEventsLogDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.events?.log) {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents',
- message:
- '"logging.events.log" has been deprecated and will be removed ' +
- 'in 8.0. Moving forward, log levels can be customized on a per-logger basis using the new logging configuration.',
- correctiveActions: {
- manualSteps: [
- `Remove "logging.events.log" from your kibana configs.`,
- `Customize log levels can be per-logger using the new logging configuration.`,
- ],
- },
- });
- }
-};
-
-const logEventsErrorDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.events?.error) {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents',
- message:
- '"logging.events.error" has been deprecated and will be removed ' +
- 'in 8.0. Moving forward, you can use "logging.root.level: error" in your logging configuration.',
- correctiveActions: {
- manualSteps: [
- `Remove "logging.events.error" from your kibana configs.`,
- `Use "logging.root.level: error" in your logging configuration.`,
- ],
- },
- });
- }
-};
-
-const logFilterDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
- if (settings.logging?.filter) {
- addDeprecation({
- documentationUrl:
- 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingfilter',
- message: '"logging.filter" has been deprecated and will be removed in 8.0.',
- correctiveActions: {
- manualSteps: [`Remove "logging.filter" from your kibana configs.`],
- },
- });
- }
-};
-
export const coreDeprecationProvider: ConfigDeprecationProvider = ({ rename, unusedFromRoot }) => [
- rename('cpu.cgroup.path.override', 'ops.cGroupOverrides.cpuPath'),
- rename('cpuacct.cgroup.path.override', 'ops.cGroupOverrides.cpuAcctPath'),
- rename('server.xsrf.whitelist', 'server.xsrf.allowlist'),
rewriteCorsSettings,
- kibanaPathConf,
rewriteBasePathDeprecation,
cspRulesDeprecation,
- opsLoggingEventDeprecation,
- requestLoggingEventDeprecation,
- timezoneLoggingDeprecation,
- destLoggingDeprecation,
- quietLoggingDeprecation,
- silentLoggingDeprecation,
- verboseLoggingDeprecation,
- jsonLoggingDeprecation,
- logRotateDeprecation,
- logEventsLogDeprecation,
- logEventsErrorDeprecation,
- logFilterDeprecation,
];
diff --git a/src/core/server/config/index.ts b/src/core/server/config/index.ts
index 686564c6d678a..62e8ad755795f 100644
--- a/src/core/server/config/index.ts
+++ b/src/core/server/config/index.ts
@@ -25,10 +25,10 @@ export type {
ConfigPath,
CliArgs,
ConfigDeprecation,
+ ConfigDeprecationContext,
AddConfigDeprecation,
ConfigDeprecationProvider,
ConfigDeprecationFactory,
EnvironmentMode,
PackageInfo,
- LegacyObjectToConfigAdapter,
} from '@kbn/config';
diff --git a/src/core/server/config/integration_tests/config_deprecation.test.ts b/src/core/server/config/integration_tests/config_deprecation.test.ts
index 0138c6e7ef154..5036fa4742b59 100644
--- a/src/core/server/config/integration_tests/config_deprecation.test.ts
+++ b/src/core/server/config/integration_tests/config_deprecation.test.ts
@@ -23,17 +23,13 @@ describe('configuration deprecations', () => {
}
});
- it('should not log deprecation warnings for default configuration that is not one of `logging.verbose`, `logging.quiet` or `logging.silent`', async () => {
+ it('should not log deprecation warnings for default configuration', async () => {
root = kbnTestServer.createRoot();
await root.preboot();
await root.setup();
const logs = loggingSystemMock.collect(mockLoggingSystem);
- expect(logs.warn.flat()).toMatchInlineSnapshot(`
- Array [
- "\\"logging.silent\\" has been deprecated and will be removed in 8.0. Moving forward, you can use \\"logging.root.level:off\\" in your logging configuration. ",
- ]
- `);
+ expect(logs.warn.flat()).toHaveLength(0);
});
});
diff --git a/src/core/server/config/mocks.ts b/src/core/server/config/mocks.ts
index 46f6c3e95cefb..1ac4ea745aeec 100644
--- a/src/core/server/config/mocks.ts
+++ b/src/core/server/config/mocks.ts
@@ -11,6 +11,7 @@ import type {
rawConfigServiceMock as rawConfigServiceMockTyped,
configServiceMock as configServiceMockTyped,
configMock as configMockTyped,
+ configDeprecationsMock as configDeprecationsMockTyped,
} from '@kbn/config/target_types/mocks';
import {
@@ -18,6 +19,7 @@ import {
rawConfigServiceMock as rawConfigServiceMockNonTyped,
configServiceMock as configServiceMockNonTyped,
configMock as configMockNonTyped,
+ configDeprecationsMock as configDeprecationsMockNonTyped,
// @ts-expect-error
} from '@kbn/config/target_node/mocks';
@@ -25,5 +27,12 @@ const getEnvOptions: typeof getEnvOptionsTyped = getEnvOptionsNonTyped;
const rawConfigServiceMock: typeof rawConfigServiceMockTyped = rawConfigServiceMockNonTyped;
const configServiceMock: typeof configServiceMockTyped = configServiceMockNonTyped;
const configMock: typeof configMockTyped = configMockNonTyped;
+const configDeprecationsMock: typeof configDeprecationsMockTyped = configDeprecationsMockNonTyped;
-export { getEnvOptions, rawConfigServiceMock, configServiceMock, configMock };
+export {
+ getEnvOptions,
+ rawConfigServiceMock,
+ configServiceMock,
+ configMock,
+ configDeprecationsMock,
+};
diff --git a/src/core/server/config/test_utils.ts b/src/core/server/config/test_utils.ts
index e3f9ca7eb29f2..f4d452005fbe4 100644
--- a/src/core/server/config/test_utils.ts
+++ b/src/core/server/config/test_utils.ts
@@ -6,28 +6,37 @@
* Side Public License, v 1.
*/
import { set } from '@elastic/safer-lodash-set';
-import type { ConfigDeprecationProvider } from '@kbn/config';
+import type { ConfigDeprecationProvider, ConfigDeprecationContext } from '@kbn/config';
import { configDeprecationFactory, applyDeprecations } from '@kbn/config';
+import { configDeprecationsMock } from './mocks';
+
+const defaultContext = configDeprecationsMock.createContext();
function collectDeprecations(
provider: ConfigDeprecationProvider,
settings: Record,
- path: string
+ path: string,
+ context: ConfigDeprecationContext = defaultContext
) {
const deprecations = provider(configDeprecationFactory);
const deprecationMessages: string[] = [];
+ const deprecationLevels: string[] = [];
const { config: migrated } = applyDeprecations(
settings,
deprecations.map((deprecation) => ({
deprecation,
path,
+ context,
})),
() =>
- ({ message }) =>
- deprecationMessages.push(message)
+ ({ message, level }) => {
+ deprecationMessages.push(message);
+ deprecationLevels.push(level ?? '');
+ }
);
return {
messages: deprecationMessages,
+ levels: deprecationLevels,
migrated,
};
}
diff --git a/src/core/server/deprecations/types.ts b/src/core/server/deprecations/types.ts
index c924cacd02e28..7e276514a64d3 100644
--- a/src/core/server/deprecations/types.ts
+++ b/src/core/server/deprecations/types.ts
@@ -11,10 +11,16 @@ import type { IScopedClusterClient } from '../elasticsearch';
type MaybePromise = T | Promise;
+/**
+ * @internal
+ */
export interface DomainDeprecationDetails extends DeprecationsDetails {
domainId: string;
}
+/**
+ * @public
+ */
export interface DeprecationsDetails {
/**
* The title of the deprecation.
@@ -43,11 +49,11 @@ export interface DeprecationsDetails {
* across kibana deprecations.
*/
deprecationType?: 'config' | 'feature';
- /* (optional) link to the documentation for more details on the deprecation. */
+ /** (optional) link to the documentation for more details on the deprecation. */
documentationUrl?: string;
- /* (optional) specify the fix for this deprecation requires a full kibana restart. */
+ /** (optional) specify the fix for this deprecation requires a full kibana restart. */
requireRestart?: boolean;
- /* corrective action needed to fix this deprecation. */
+ /** corrective action needed to fix this deprecation. */
correctiveActions: {
/**
* (optional) The api to be called to automatically fix the deprecation
@@ -55,11 +61,11 @@ export interface DeprecationsDetails {
* handle their deprecations.
*/
api?: {
- /* Kibana route path. Passing a query string is allowed */
+ /** Kibana route path. Passing a query string is allowed */
path: string;
- /* Kibana route method: 'POST' or 'PUT'. */
+ /** Kibana route method: 'POST' or 'PUT'. */
method: 'POST' | 'PUT';
- /* Additional details to be passed to the route. */
+ /** Additional details to be passed to the route. */
body?: {
[key: string]: any;
};
@@ -74,15 +80,24 @@ export interface DeprecationsDetails {
};
}
+/**
+ * @public
+ */
export interface RegisterDeprecationsConfig {
getDeprecations: (context: GetDeprecationsContext) => MaybePromise;
}
+/**
+ * @public
+ */
export interface GetDeprecationsContext {
esClient: IScopedClusterClient;
savedObjectsClient: SavedObjectsClientContract;
}
+/**
+ * @public
+ */
export interface DeprecationsGetResponse {
deprecations: DomainDeprecationDetails[];
}
diff --git a/src/core/server/elasticsearch/elasticsearch_config.ts b/src/core/server/elasticsearch/elasticsearch_config.ts
index 995b3ffbd947d..7470ff7081717 100644
--- a/src/core/server/elasticsearch/elasticsearch_config.ts
+++ b/src/core/server/elasticsearch/elasticsearch_config.ts
@@ -211,7 +211,7 @@ const deprecations: ConfigDeprecationProvider = () => [
});
} else if (es.logQueries === true) {
addDeprecation({
- message: `Setting [${fromPath}.logQueries] is deprecated and no longer used. You should set the log level to "debug" for the "elasticsearch.queries" context in "logging.loggers" or use "logging.verbose: true".`,
+ message: `Setting [${fromPath}.logQueries] is deprecated and no longer used. You should set the log level to "debug" for the "elasticsearch.queries" context in "logging.loggers".`,
correctiveActions: {
manualSteps: [
`Remove Setting [${fromPath}.logQueries] from your kibana configs`,
diff --git a/src/core/server/http/integration_tests/logging.test.ts b/src/core/server/http/integration_tests/logging.test.ts
index 12d555a240cde..20e0175d4b19d 100644
--- a/src/core/server/http/integration_tests/logging.test.ts
+++ b/src/core/server/http/integration_tests/logging.test.ts
@@ -51,7 +51,6 @@ describe('request logging', () => {
it('logs at the correct level and with the correct context', async () => {
const root = kbnTestServer.createRoot({
logging: {
- silent: true,
appenders: {
'test-console': {
type: 'console',
@@ -99,7 +98,6 @@ describe('request logging', () => {
let root: ReturnType;
const config = {
logging: {
- silent: true,
appenders: {
'test-console': {
type: 'console',
@@ -300,7 +298,6 @@ describe('request logging', () => {
it('filters sensitive request headers when RewriteAppender is configured', async () => {
root = kbnTestServer.createRoot({
logging: {
- silent: true,
appenders: {
'test-console': {
type: 'console',
@@ -402,7 +399,6 @@ describe('request logging', () => {
it('filters sensitive response headers when RewriteAppender is configured', async () => {
root = kbnTestServer.createRoot({
logging: {
- silent: true,
appenders: {
'test-console': {
type: 'console',
diff --git a/src/core/server/index.ts b/src/core/server/index.ts
index 110ac4d5bd973..2e46e8f68570c 100644
--- a/src/core/server/index.ts
+++ b/src/core/server/index.ts
@@ -96,6 +96,7 @@ export type {
ConfigPath,
ConfigService,
ConfigDeprecation,
+ ConfigDeprecationContext,
ConfigDeprecationProvider,
ConfigDeprecationFactory,
AddConfigDeprecation,
diff --git a/src/core/server/legacy/integration_tests/logging.test.ts b/src/core/server/legacy/integration_tests/logging.test.ts
deleted file mode 100644
index a79e434ce4576..0000000000000
--- a/src/core/server/legacy/integration_tests/logging.test.ts
+++ /dev/null
@@ -1,234 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { LegacyLoggingConfig } from '@kbn/config';
-import * as kbnTestServer from '../../../test_helpers/kbn_server';
-
-import {
- getPlatformLogsFromMock,
- getLegacyPlatformLogsFromMock,
-} from '../../logging/integration_tests/utils';
-
-function createRoot(legacyLoggingConfig: LegacyLoggingConfig = {}) {
- return kbnTestServer.createRoot({
- migrations: { skip: true }, // otherwise stuck in polling ES
- plugins: { initialize: false },
- elasticsearch: { skipStartupConnectionCheck: true },
- logging: {
- // legacy platform config
- silent: false,
- json: false,
- ...legacyLoggingConfig,
- events: {
- log: ['test-file-legacy'],
- },
- // platform config
- appenders: {
- 'test-console': {
- type: 'console',
- layout: {
- highlight: false,
- type: 'pattern',
- },
- },
- },
- loggers: [
- {
- name: 'test-file',
- appenders: ['test-console'],
- level: 'info',
- },
- ],
- },
- });
-}
-
-describe('logging service', () => {
- let mockConsoleLog: jest.SpyInstance;
- let mockStdout: jest.SpyInstance;
-
- beforeAll(async () => {
- mockConsoleLog = jest.spyOn(global.console, 'log');
- mockStdout = jest.spyOn(global.process.stdout, 'write');
- });
-
- afterAll(async () => {
- mockConsoleLog.mockRestore();
- mockStdout.mockRestore();
- });
-
- describe('compatibility', () => {
- describe('uses configured loggers', () => {
- let root: ReturnType;
- beforeAll(async () => {
- root = createRoot();
-
- await root.preboot();
- await root.setup();
- await root.start();
- }, 30000);
-
- afterAll(async () => {
- await root.shutdown();
- });
-
- beforeEach(() => {
- mockConsoleLog.mockClear();
- mockStdout.mockClear();
- });
-
- it('when context matches', async () => {
- root.logger.get('test-file').info('handled by NP');
-
- expect(mockConsoleLog).toHaveBeenCalledTimes(1);
- const loggedString = getPlatformLogsFromMock(mockConsoleLog);
- expect(loggedString).toMatchInlineSnapshot(`
- Array [
- "[xxxx-xx-xxTxx:xx:xx.xxx-xx:xx][INFO ][test-file] handled by NP",
- ]
- `);
- });
-
- it('falls back to the root legacy logger otherwise', async () => {
- root.logger.get('test-file-legacy').info('handled by LP');
-
- expect(mockStdout).toHaveBeenCalledTimes(1);
-
- const loggedString = getLegacyPlatformLogsFromMock(mockStdout);
- expect(loggedString).toMatchInlineSnapshot(`
- Array [
- " log [xx:xx:xx.xxx] [info][test-file-legacy] handled by LP
- ",
- ]
- `);
- });
- });
-
- describe('logging config respects legacy logging settings', () => {
- let root: ReturnType;
-
- afterEach(async () => {
- mockConsoleLog.mockClear();
- mockStdout.mockClear();
- await root.shutdown();
- });
-
- it('"silent": true', async () => {
- root = createRoot({ silent: true });
-
- await root.preboot();
- await root.setup();
- await root.start();
-
- const platformLogger = root.logger.get('test-file');
- platformLogger.info('info');
- platformLogger.warn('warn');
- platformLogger.error('error');
-
- expect(mockConsoleLog).toHaveBeenCalledTimes(3);
-
- expect(getPlatformLogsFromMock(mockConsoleLog)).toMatchInlineSnapshot(`
- Array [
- "[xxxx-xx-xxTxx:xx:xx.xxx-xx:xx][INFO ][test-file] info",
- "[xxxx-xx-xxTxx:xx:xx.xxx-xx:xx][WARN ][test-file] warn",
- "[xxxx-xx-xxTxx:xx:xx.xxx-xx:xx][ERROR][test-file] error",
- ]
- `);
-
- mockStdout.mockClear();
-
- const legacyPlatformLogger = root.logger.get('test-file-legacy');
- legacyPlatformLogger.info('info');
- legacyPlatformLogger.warn('warn');
- legacyPlatformLogger.error('error');
-
- expect(mockStdout).toHaveBeenCalledTimes(0);
- });
-
- it('"quiet": true', async () => {
- root = createRoot({ quiet: true });
-
- await root.preboot();
- await root.setup();
- await root.start();
-
- const platformLogger = root.logger.get('test-file');
- platformLogger.info('info');
- platformLogger.warn('warn');
- platformLogger.error('error');
-
- expect(mockConsoleLog).toHaveBeenCalledTimes(3);
-
- expect(getPlatformLogsFromMock(mockConsoleLog)).toMatchInlineSnapshot(`
- Array [
- "[xxxx-xx-xxTxx:xx:xx.xxx-xx:xx][INFO ][test-file] info",
- "[xxxx-xx-xxTxx:xx:xx.xxx-xx:xx][WARN ][test-file] warn",
- "[xxxx-xx-xxTxx:xx:xx.xxx-xx:xx][ERROR][test-file] error",
- ]
- `);
-
- mockStdout.mockClear();
-
- const legacyPlatformLogger = root.logger.get('test-file-legacy');
- legacyPlatformLogger.info('info');
- legacyPlatformLogger.warn('warn');
- legacyPlatformLogger.error('error');
-
- expect(mockStdout).toHaveBeenCalledTimes(1);
- expect(getLegacyPlatformLogsFromMock(mockStdout)).toMatchInlineSnapshot(`
- Array [
- " log [xx:xx:xx.xxx] [error][test-file-legacy] error
- ",
- ]
- `);
- });
-
- it('"verbose": true', async () => {
- root = createRoot({ verbose: true });
-
- await root.preboot();
- await root.setup();
- await root.start();
-
- const platformLogger = root.logger.get('test-file');
- platformLogger.info('info');
- platformLogger.warn('warn');
- platformLogger.error('error');
-
- expect(mockConsoleLog).toHaveBeenCalledTimes(3);
-
- expect(getPlatformLogsFromMock(mockConsoleLog)).toMatchInlineSnapshot(`
- Array [
- "[xxxx-xx-xxTxx:xx:xx.xxx-xx:xx][INFO ][test-file] info",
- "[xxxx-xx-xxTxx:xx:xx.xxx-xx:xx][WARN ][test-file] warn",
- "[xxxx-xx-xxTxx:xx:xx.xxx-xx:xx][ERROR][test-file] error",
- ]
- `);
-
- mockStdout.mockClear();
-
- const legacyPlatformLogger = root.logger.get('test-file-legacy');
- legacyPlatformLogger.info('info');
- legacyPlatformLogger.warn('warn');
- legacyPlatformLogger.error('error');
-
- expect(mockStdout).toHaveBeenCalledTimes(3);
- expect(getLegacyPlatformLogsFromMock(mockStdout)).toMatchInlineSnapshot(`
- Array [
- " log [xx:xx:xx.xxx] [info][test-file-legacy] info
- ",
- " log [xx:xx:xx.xxx] [warning][test-file-legacy] warn
- ",
- " log [xx:xx:xx.xxx] [error][test-file-legacy] error
- ",
- ]
- `);
- });
- });
- });
-});
diff --git a/src/core/server/legacy/legacy_service.mock.ts b/src/core/server/legacy/legacy_service.mock.ts
deleted file mode 100644
index 0d72318a630e0..0000000000000
--- a/src/core/server/legacy/legacy_service.mock.ts
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import type { PublicMethodsOf } from '@kbn/utility-types';
-import { LegacyService } from './legacy_service';
-
-type LegacyServiceMock = jest.Mocked>;
-
-const createLegacyServiceMock = (): LegacyServiceMock => ({
- setup: jest.fn(),
- stop: jest.fn(),
-});
-
-export const legacyServiceMock = {
- create: createLegacyServiceMock,
-};
diff --git a/src/core/server/legacy/legacy_service.test.mocks.ts b/src/core/server/legacy/legacy_service.test.mocks.ts
deleted file mode 100644
index 506f0fd6f96d3..0000000000000
--- a/src/core/server/legacy/legacy_service.test.mocks.ts
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-export const reconfigureLoggingMock = jest.fn();
-export const setupLoggingMock = jest.fn();
-export const setupLoggingRotateMock = jest.fn();
-
-jest.doMock('@kbn/legacy-logging', () => ({
- ...(jest.requireActual('@kbn/legacy-logging') as any),
- reconfigureLogging: reconfigureLoggingMock,
- setupLogging: setupLoggingMock,
- setupLoggingRotate: setupLoggingRotateMock,
-}));
diff --git a/src/core/server/legacy/legacy_service.test.ts b/src/core/server/legacy/legacy_service.test.ts
deleted file mode 100644
index 6b20bd7434baf..0000000000000
--- a/src/core/server/legacy/legacy_service.test.ts
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import {
- setupLoggingMock,
- setupLoggingRotateMock,
- reconfigureLoggingMock,
-} from './legacy_service.test.mocks';
-
-import { BehaviorSubject } from 'rxjs';
-import moment from 'moment';
-import { REPO_ROOT } from '@kbn/dev-utils';
-
-import { Config, Env, ObjectToConfigAdapter } from '../config';
-
-import { getEnvOptions, configServiceMock } from '../config/mocks';
-import { loggingSystemMock } from '../logging/logging_system.mock';
-import { httpServiceMock } from '../http/http_service.mock';
-import { LegacyService, LegacyServiceSetupDeps } from './legacy_service';
-
-let coreId: symbol;
-let env: Env;
-let config$: BehaviorSubject;
-
-let setupDeps: LegacyServiceSetupDeps;
-
-const logger = loggingSystemMock.create();
-let configService: ReturnType;
-
-beforeEach(() => {
- coreId = Symbol();
- env = Env.createDefault(REPO_ROOT, getEnvOptions());
- configService = configServiceMock.create();
-
- setupDeps = {
- http: httpServiceMock.createInternalSetupContract(),
- };
-
- config$ = new BehaviorSubject(
- new ObjectToConfigAdapter({
- elasticsearch: { hosts: ['http://127.0.0.1'] },
- server: { autoListen: true },
- })
- );
-
- configService.getConfig$.mockReturnValue(config$);
-});
-
-afterEach(() => {
- jest.clearAllMocks();
- setupLoggingMock.mockReset();
- setupLoggingRotateMock.mockReset();
- reconfigureLoggingMock.mockReset();
-});
-
-describe('#setup', () => {
- it('initializes legacy logging', async () => {
- const opsConfig = {
- interval: moment.duration(5, 'second'),
- };
- const opsConfig$ = new BehaviorSubject(opsConfig);
-
- const loggingConfig = {
- foo: 'bar',
- };
- const loggingConfig$ = new BehaviorSubject(loggingConfig);
-
- configService.atPath.mockImplementation((path) => {
- if (path === 'ops') {
- return opsConfig$;
- }
- if (path === 'logging') {
- return loggingConfig$;
- }
- return new BehaviorSubject({});
- });
-
- const legacyService = new LegacyService({
- coreId,
- env,
- logger,
- configService: configService as any,
- });
-
- await legacyService.setup(setupDeps);
-
- expect(setupLoggingMock).toHaveBeenCalledTimes(1);
- expect(setupLoggingMock).toHaveBeenCalledWith(
- setupDeps.http.server,
- loggingConfig,
- opsConfig.interval.asMilliseconds()
- );
-
- expect(setupLoggingRotateMock).toHaveBeenCalledTimes(1);
- expect(setupLoggingRotateMock).toHaveBeenCalledWith(setupDeps.http.server, loggingConfig);
- });
-
- it('reloads the logging config when the config changes', async () => {
- const opsConfig = {
- interval: moment.duration(5, 'second'),
- };
- const opsConfig$ = new BehaviorSubject(opsConfig);
-
- const loggingConfig = {
- foo: 'bar',
- };
- const loggingConfig$ = new BehaviorSubject(loggingConfig);
-
- configService.atPath.mockImplementation((path) => {
- if (path === 'ops') {
- return opsConfig$;
- }
- if (path === 'logging') {
- return loggingConfig$;
- }
- return new BehaviorSubject({});
- });
-
- const legacyService = new LegacyService({
- coreId,
- env,
- logger,
- configService: configService as any,
- });
-
- await legacyService.setup(setupDeps);
-
- expect(reconfigureLoggingMock).toHaveBeenCalledTimes(1);
- expect(reconfigureLoggingMock).toHaveBeenCalledWith(
- setupDeps.http.server,
- loggingConfig,
- opsConfig.interval.asMilliseconds()
- );
-
- loggingConfig$.next({
- foo: 'changed',
- });
-
- expect(reconfigureLoggingMock).toHaveBeenCalledTimes(2);
- expect(reconfigureLoggingMock).toHaveBeenCalledWith(
- setupDeps.http.server,
- { foo: 'changed' },
- opsConfig.interval.asMilliseconds()
- );
- });
-
- it('stops reloading logging config once the service is stopped', async () => {
- const opsConfig = {
- interval: moment.duration(5, 'second'),
- };
- const opsConfig$ = new BehaviorSubject(opsConfig);
-
- const loggingConfig = {
- foo: 'bar',
- };
- const loggingConfig$ = new BehaviorSubject(loggingConfig);
-
- configService.atPath.mockImplementation((path) => {
- if (path === 'ops') {
- return opsConfig$;
- }
- if (path === 'logging') {
- return loggingConfig$;
- }
- return new BehaviorSubject({});
- });
-
- const legacyService = new LegacyService({
- coreId,
- env,
- logger,
- configService: configService as any,
- });
-
- await legacyService.setup(setupDeps);
-
- expect(reconfigureLoggingMock).toHaveBeenCalledTimes(1);
- expect(reconfigureLoggingMock).toHaveBeenCalledWith(
- setupDeps.http.server,
- loggingConfig,
- opsConfig.interval.asMilliseconds()
- );
-
- await legacyService.stop();
-
- loggingConfig$.next({
- foo: 'changed',
- });
-
- expect(reconfigureLoggingMock).toHaveBeenCalledTimes(1);
- });
-});
diff --git a/src/core/server/legacy/legacy_service.ts b/src/core/server/legacy/legacy_service.ts
deleted file mode 100644
index 1d5343ff5311d..0000000000000
--- a/src/core/server/legacy/legacy_service.ts
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { combineLatest, Observable, Subscription } from 'rxjs';
-import { first } from 'rxjs/operators';
-import { Server } from '@hapi/hapi';
-import type { PublicMethodsOf } from '@kbn/utility-types';
-import {
- reconfigureLogging,
- setupLogging,
- setupLoggingRotate,
- LegacyLoggingConfig,
-} from '@kbn/legacy-logging';
-
-import { CoreContext } from '../core_context';
-import { config as loggingConfig } from '../logging';
-import { opsConfig, OpsConfigType } from '../metrics';
-import { Logger } from '../logging';
-import { InternalHttpServiceSetup } from '../http';
-
-export interface LegacyServiceSetupDeps {
- http: InternalHttpServiceSetup;
-}
-
-/** @internal */
-export type ILegacyService = PublicMethodsOf;
-
-/** @internal */
-export class LegacyService {
- private readonly log: Logger;
- private readonly opsConfig$: Observable;
- private readonly legacyLoggingConfig$: Observable;
- private configSubscription?: Subscription;
-
- constructor(coreContext: CoreContext) {
- const { logger, configService } = coreContext;
-
- this.log = logger.get('legacy-service');
- this.legacyLoggingConfig$ = configService.atPath(loggingConfig.path);
- this.opsConfig$ = configService.atPath(opsConfig.path);
- }
-
- public async setup(setupDeps: LegacyServiceSetupDeps) {
- this.log.debug('setting up legacy service');
- await this.setupLegacyLogging(setupDeps.http.server);
- }
-
- private async setupLegacyLogging(server: Server) {
- const legacyLoggingConfig = await this.legacyLoggingConfig$.pipe(first()).toPromise();
- const currentOpsConfig = await this.opsConfig$.pipe(first()).toPromise();
-
- await setupLogging(server, legacyLoggingConfig, currentOpsConfig.interval.asMilliseconds());
- await setupLoggingRotate(server, legacyLoggingConfig);
-
- this.configSubscription = combineLatest([this.legacyLoggingConfig$, this.opsConfig$]).subscribe(
- ([newLoggingConfig, newOpsConfig]) => {
- reconfigureLogging(server, newLoggingConfig, newOpsConfig.interval.asMilliseconds());
- }
- );
- }
-
- public async stop() {
- this.log.debug('stopping legacy service');
-
- if (this.configSubscription !== undefined) {
- this.configSubscription.unsubscribe();
- this.configSubscription = undefined;
- }
- }
-}
diff --git a/src/core/server/legacy/logging/appenders/__snapshots__/legacy_appender.test.ts.snap b/src/core/server/legacy/logging/appenders/__snapshots__/legacy_appender.test.ts.snap
deleted file mode 100644
index 3c40362e8211e..0000000000000
--- a/src/core/server/legacy/logging/appenders/__snapshots__/legacy_appender.test.ts.snap
+++ /dev/null
@@ -1,142 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`\`append()\` correctly pushes records to legacy platform. 1`] = `
-Object {
- "context": "context-1",
- "level": LogLevel {
- "id": "trace",
- "value": 7,
- },
- "message": "message-1",
- "pid": Any,
- "timestamp": 2012-02-01T11:22:33.044Z,
-}
-`;
-
-exports[`\`append()\` correctly pushes records to legacy platform. 2`] = `
-Object {
- "context": "context-2",
- "level": LogLevel {
- "id": "debug",
- "value": 6,
- },
- "message": "message-2",
- "pid": Any,
- "timestamp": 2012-02-01T11:22:33.044Z,
-}
-`;
-
-exports[`\`append()\` correctly pushes records to legacy platform. 3`] = `
-Object {
- "context": "context-3.sub-context-3",
- "level": LogLevel {
- "id": "info",
- "value": 5,
- },
- "message": "message-3",
- "pid": Any,
- "timestamp": 2012-02-01T11:22:33.044Z,
-}
-`;
-
-exports[`\`append()\` correctly pushes records to legacy platform. 4`] = `
-Object {
- "context": "context-4.sub-context-4",
- "level": LogLevel {
- "id": "warn",
- "value": 4,
- },
- "message": "message-4",
- "pid": Any,
- "timestamp": 2012-02-01T11:22:33.044Z,
-}
-`;
-
-exports[`\`append()\` correctly pushes records to legacy platform. 5`] = `
-Object {
- "context": "context-5",
- "error": [Error: Some Error],
- "level": LogLevel {
- "id": "error",
- "value": 3,
- },
- "message": "message-5-with-error",
- "pid": Any,
- "timestamp": 2012-02-01T11:22:33.044Z,
-}
-`;
-
-exports[`\`append()\` correctly pushes records to legacy platform. 6`] = `
-Object {
- "context": "context-6",
- "level": LogLevel {
- "id": "error",
- "value": 3,
- },
- "message": "message-6-with-message",
- "pid": Any,
- "timestamp": 2012-02-01T11:22:33.044Z,
-}
-`;
-
-exports[`\`append()\` correctly pushes records to legacy platform. 7`] = `
-Object {
- "context": "context-7.sub-context-7.sub-sub-context-7",
- "error": [Error: Some Fatal Error],
- "level": LogLevel {
- "id": "fatal",
- "value": 2,
- },
- "message": "message-7-with-error",
- "pid": Any,
- "timestamp": 2012-02-01T11:22:33.044Z,
-}
-`;
-
-exports[`\`append()\` correctly pushes records to legacy platform. 8`] = `
-Object {
- "context": "context-8.sub-context-8.sub-sub-context-8",
- "level": LogLevel {
- "id": "fatal",
- "value": 2,
- },
- "message": "message-8-with-message",
- "pid": Any,
- "timestamp": 2012-02-01T11:22:33.044Z,
-}
-`;
-
-exports[`\`append()\` correctly pushes records to legacy platform. 9`] = `
-Object {
- "context": "context-9.sub-context-9",
- "level": LogLevel {
- "id": "info",
- "value": 5,
- },
- "message": "message-9-with-message",
- "meta": Object {
- "someValue": 3,
- },
- "pid": Any,
- "timestamp": 2012-02-01T11:22:33.044Z,
-}
-`;
-
-exports[`\`append()\` correctly pushes records to legacy platform. 10`] = `
-Object {
- "context": "context-10.sub-context-10",
- "level": LogLevel {
- "id": "info",
- "value": 5,
- },
- "message": "message-10-with-message",
- "meta": Object {
- "tags": Array [
- "tag1",
- "tag2",
- ],
- },
- "pid": Any,
- "timestamp": 2012-02-01T11:22:33.044Z,
-}
-`;
diff --git a/src/core/server/legacy/logging/appenders/legacy_appender.test.ts b/src/core/server/legacy/logging/appenders/legacy_appender.test.ts
deleted file mode 100644
index 9213403d72d07..0000000000000
--- a/src/core/server/legacy/logging/appenders/legacy_appender.test.ts
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-jest.mock('@kbn/legacy-logging');
-
-import { LogRecord, LogLevel } from '../../../logging';
-import { LegacyLoggingServer } from '@kbn/legacy-logging';
-import { LegacyAppender } from './legacy_appender';
-
-afterEach(() => (LegacyLoggingServer as any).mockClear());
-
-test('`configSchema` creates correct schema.', () => {
- const appenderSchema = LegacyAppender.configSchema;
- const validConfig = { type: 'legacy-appender', legacyLoggingConfig: { verbose: true } };
- expect(appenderSchema.validate(validConfig)).toEqual({
- type: 'legacy-appender',
- legacyLoggingConfig: { verbose: true },
- });
-
- const wrongConfig = { type: 'not-legacy-appender' };
- expect(() => appenderSchema.validate(wrongConfig)).toThrow();
-});
-
-test('`append()` correctly pushes records to legacy platform.', () => {
- const timestamp = new Date(Date.UTC(2012, 1, 1, 11, 22, 33, 44));
- const records: LogRecord[] = [
- {
- context: 'context-1',
- level: LogLevel.Trace,
- message: 'message-1',
- timestamp,
- pid: 5355,
- },
- {
- context: 'context-2',
- level: LogLevel.Debug,
- message: 'message-2',
- timestamp,
- pid: 5355,
- },
- {
- context: 'context-3.sub-context-3',
- level: LogLevel.Info,
- message: 'message-3',
- timestamp,
- pid: 5355,
- },
- {
- context: 'context-4.sub-context-4',
- level: LogLevel.Warn,
- message: 'message-4',
- timestamp,
- pid: 5355,
- },
- {
- context: 'context-5',
- error: new Error('Some Error'),
- level: LogLevel.Error,
- message: 'message-5-with-error',
- timestamp,
- pid: 5355,
- },
- {
- context: 'context-6',
- level: LogLevel.Error,
- message: 'message-6-with-message',
- timestamp,
- pid: 5355,
- },
- {
- context: 'context-7.sub-context-7.sub-sub-context-7',
- error: new Error('Some Fatal Error'),
- level: LogLevel.Fatal,
- message: 'message-7-with-error',
- timestamp,
- pid: 5355,
- },
- {
- context: 'context-8.sub-context-8.sub-sub-context-8',
- level: LogLevel.Fatal,
- message: 'message-8-with-message',
- timestamp,
- pid: 5355,
- },
- {
- context: 'context-9.sub-context-9',
- level: LogLevel.Info,
- message: 'message-9-with-message',
- timestamp,
- pid: 5355,
- meta: { someValue: 3 },
- },
- {
- context: 'context-10.sub-context-10',
- level: LogLevel.Info,
- message: 'message-10-with-message',
- timestamp,
- pid: 5355,
- meta: { tags: ['tag1', 'tag2'] },
- },
- ];
-
- const appender = new LegacyAppender({ verbose: true });
- for (const record of records) {
- appender.append(record);
- }
-
- const [mockLegacyLoggingServerInstance] = (LegacyLoggingServer as any).mock.instances;
- expect(mockLegacyLoggingServerInstance.log.mock.calls).toHaveLength(records.length);
- records.forEach((r, idx) => {
- expect(mockLegacyLoggingServerInstance.log.mock.calls[idx][0]).toMatchSnapshot({
- pid: expect.any(Number),
- });
- });
-});
-
-test('legacy logging server is correctly created and disposed.', async () => {
- const mockRawLegacyLoggingConfig = { verbose: true };
- const appender = new LegacyAppender(mockRawLegacyLoggingConfig);
-
- expect(LegacyLoggingServer).toHaveBeenCalledTimes(1);
- expect(LegacyLoggingServer).toHaveBeenCalledWith(mockRawLegacyLoggingConfig);
-
- const [mockLegacyLoggingServerInstance] = (LegacyLoggingServer as any).mock.instances;
- expect(mockLegacyLoggingServerInstance.stop).not.toHaveBeenCalled();
-
- await appender.dispose();
-
- expect(mockLegacyLoggingServerInstance.stop).toHaveBeenCalledTimes(1);
-});
diff --git a/src/core/server/legacy/logging/appenders/legacy_appender.ts b/src/core/server/legacy/logging/appenders/legacy_appender.ts
deleted file mode 100644
index 7e02d00c7b234..0000000000000
--- a/src/core/server/legacy/logging/appenders/legacy_appender.ts
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import { schema } from '@kbn/config-schema';
-import { LegacyLoggingServer } from '@kbn/legacy-logging';
-import { DisposableAppender, LogRecord } from '@kbn/logging';
-
-export interface LegacyAppenderConfig {
- type: 'legacy-appender';
- legacyLoggingConfig?: Record;
-}
-
-/**
- * Simple appender that just forwards `LogRecord` to the legacy KbnServer log.
- * @internal
- */
-export class LegacyAppender implements DisposableAppender {
- public static configSchema = schema.object({
- type: schema.literal('legacy-appender'),
- legacyLoggingConfig: schema.recordOf(schema.string(), schema.any()),
- });
-
- /**
- * Sets {@link Appender.receiveAllLevels} because legacy does its own filtering based on the legacy logging
- * configuration.
- */
- public readonly receiveAllLevels = true;
-
- private readonly loggingServer: LegacyLoggingServer;
-
- constructor(legacyLoggingConfig: any) {
- this.loggingServer = new LegacyLoggingServer(legacyLoggingConfig);
- }
-
- /**
- * Forwards `LogRecord` to the legacy platform that will layout and
- * write record to the configured destination.
- * @param record `LogRecord` instance to forward to.
- */
- public append(record: LogRecord) {
- this.loggingServer.log(record);
- }
-
- public dispose() {
- this.loggingServer.stop();
- }
-}
diff --git a/src/core/server/logging/README.mdx b/src/core/server/logging/README.mdx
index 08e4ed34204c0..11437d1e8df20 100644
--- a/src/core/server/logging/README.mdx
+++ b/src/core/server/logging/README.mdx
@@ -562,11 +562,6 @@ The log will be less verbose with `warn` level for the `server` context name:
```
### Logging config migration
-Compatibility with the legacy logging system is assured until the end of the `v7` version.
-All log messages handled by `root` context are forwarded to the legacy logging service using a `default` appender. If you re-write
-root appenders, make sure that it contains `default` appender to provide backward compatibility.
-**Note**: If you define an appender for a context name, the log messages for that specific context aren't handled by the
-`root` context anymore and not forwarded to the legacy logging service.
#### logging.dest
By default logs in *stdout*. With new Kibana logging you can use pre-existing `console` appender or
diff --git a/src/core/server/logging/appenders/appenders.test.ts b/src/core/server/logging/appenders/appenders.test.ts
index bd32e4061049b..759fcb9546f09 100644
--- a/src/core/server/logging/appenders/appenders.test.ts
+++ b/src/core/server/logging/appenders/appenders.test.ts
@@ -9,7 +9,6 @@
import { mockCreateLayout } from './appenders.test.mocks';
import { ByteSizeValue } from '@kbn/config-schema';
-import { LegacyAppender } from '../../legacy/logging/appenders/legacy_appender';
import { Appenders } from './appenders';
import { ConsoleAppender } from './console/console_appender';
import { FileAppender } from './file/file_appender';
@@ -68,13 +67,6 @@ test('`create()` creates correct appender.', () => {
});
expect(fileAppender).toBeInstanceOf(FileAppender);
- const legacyAppender = Appenders.create({
- type: 'legacy-appender',
- legacyLoggingConfig: { verbose: true },
- });
-
- expect(legacyAppender).toBeInstanceOf(LegacyAppender);
-
const rollingFileAppender = Appenders.create({
type: 'rolling-file',
fileName: 'path',
diff --git a/src/core/server/logging/appenders/appenders.ts b/src/core/server/logging/appenders/appenders.ts
index 88df355bd5ebe..3e867739aa1c7 100644
--- a/src/core/server/logging/appenders/appenders.ts
+++ b/src/core/server/logging/appenders/appenders.ts
@@ -10,10 +10,6 @@ import { schema } from '@kbn/config-schema';
import { assertNever } from '@kbn/std';
import { DisposableAppender } from '@kbn/logging';
-import {
- LegacyAppender,
- LegacyAppenderConfig,
-} from '../../legacy/logging/appenders/legacy_appender';
import { Layouts } from '../layouts/layouts';
import { ConsoleAppender, ConsoleAppenderConfig } from './console/console_appender';
import { FileAppender, FileAppenderConfig } from './file/file_appender';
@@ -32,7 +28,6 @@ import {
export const appendersSchema = schema.oneOf([
ConsoleAppender.configSchema,
FileAppender.configSchema,
- LegacyAppender.configSchema,
RewriteAppender.configSchema,
RollingFileAppender.configSchema,
]);
@@ -41,7 +36,6 @@ export const appendersSchema = schema.oneOf([
export type AppenderConfigType =
| ConsoleAppenderConfig
| FileAppenderConfig
- | LegacyAppenderConfig
| RewriteAppenderConfig
| RollingFileAppenderConfig;
@@ -64,8 +58,6 @@ export class Appenders {
return new RewriteAppender(config);
case 'rolling-file':
return new RollingFileAppender(config);
- case 'legacy-appender':
- return new LegacyAppender(config.legacyLoggingConfig);
default:
return assertNever(config);
diff --git a/src/core/server/logging/integration_tests/logging.test.ts b/src/core/server/logging/integration_tests/logging.test.ts
index ade10fc1c0257..ff681222c4f30 100644
--- a/src/core/server/logging/integration_tests/logging.test.ts
+++ b/src/core/server/logging/integration_tests/logging.test.ts
@@ -14,7 +14,6 @@ import { Subject } from 'rxjs';
function createRoot() {
return kbnTestServer.createRoot({
logging: {
- silent: true, // set "true" in kbnTestServer
appenders: {
'test-console': {
type: 'console',
diff --git a/src/core/server/logging/integration_tests/rolling_file_appender.test.ts b/src/core/server/logging/integration_tests/rolling_file_appender.test.ts
index 83533e29ad12e..dc6a01b80e951 100644
--- a/src/core/server/logging/integration_tests/rolling_file_appender.test.ts
+++ b/src/core/server/logging/integration_tests/rolling_file_appender.test.ts
@@ -19,7 +19,6 @@ const flush = async () => delay(flushDelay);
function createRoot(appenderConfig: any) {
return kbnTestServer.createRoot({
logging: {
- silent: true, // set "true" in kbnTestServer
appenders: {
'rolling-file': appenderConfig,
},
diff --git a/src/core/server/logging/logging_config.test.ts b/src/core/server/logging/logging_config.test.ts
index e0004ba992c17..41acd072b295d 100644
--- a/src/core/server/logging/logging_config.test.ts
+++ b/src/core/server/logging/logging_config.test.ts
@@ -9,35 +9,18 @@
import { LoggingConfig, config } from './logging_config';
test('`schema` creates correct schema with defaults.', () => {
- expect(config.schema.validate({})).toMatchInlineSnapshot(
- { json: expect.any(Boolean) }, // default value depends on TTY
- `
+ expect(config.schema.validate({})).toMatchInlineSnapshot(`
Object {
"appenders": Map {},
- "dest": "stdout",
- "events": Object {},
- "filter": Object {},
- "json": Any,
"loggers": Array [],
- "quiet": false,
"root": Object {
"appenders": Array [
"default",
],
"level": "info",
},
- "rotate": Object {
- "enabled": false,
- "everyBytes": 10485760,
- "keepFiles": 7,
- "pollingInterval": 10000,
- "usePolling": false,
- },
- "silent": false,
- "verbose": false,
}
- `
- );
+ `);
});
test('`schema` throws if `root` logger does not have appenders configured.', () => {
@@ -52,16 +35,14 @@ test('`schema` throws if `root` logger does not have appenders configured.', ()
);
});
-test('`schema` throws if `root` logger does not have "default" appender configured.', () => {
+test('`schema` does not throw if `root` logger does not have "default" appender configured.', () => {
expect(() =>
config.schema.validate({
root: {
appenders: ['console'],
},
})
- ).toThrowErrorMatchingInlineSnapshot(
- `"[root]: \\"default\\" appender required for migration period till the next major release"`
- );
+ ).not.toThrow();
});
test('`getParentLoggerContext()` returns correct parent context name.', () => {
diff --git a/src/core/server/logging/logging_config.ts b/src/core/server/logging/logging_config.ts
index f5b75d7bb739c..a04506ad9c0f6 100644
--- a/src/core/server/logging/logging_config.ts
+++ b/src/core/server/logging/logging_config.ts
@@ -7,7 +7,6 @@
*/
import { schema, TypeOf } from '@kbn/config-schema';
-import { legacyLoggingConfigSchema } from '@kbn/legacy-logging';
import { AppenderConfigType, Appenders } from './appenders/appenders';
// We need this helper for the types to be correct
@@ -58,31 +57,23 @@ export const loggerSchema = schema.object({
/** @public */
export type LoggerConfigType = TypeOf;
+
export const config = {
path: 'logging',
- schema: legacyLoggingConfigSchema.extends({
+ schema: schema.object({
appenders: schema.mapOf(schema.string(), Appenders.configSchema, {
defaultValue: new Map(),
}),
loggers: schema.arrayOf(loggerSchema, {
defaultValue: [],
}),
- root: schema.object(
- {
- appenders: schema.arrayOf(schema.string(), {
- defaultValue: [DEFAULT_APPENDER_NAME],
- minSize: 1,
- }),
- level: levelSchema,
- },
- {
- validate(rawConfig) {
- if (!rawConfig.appenders.includes(DEFAULT_APPENDER_NAME)) {
- return `"${DEFAULT_APPENDER_NAME}" appender required for migration period till the next major release`;
- }
- },
- }
- ),
+ root: schema.object({
+ appenders: schema.arrayOf(schema.string(), {
+ defaultValue: [DEFAULT_APPENDER_NAME],
+ minSize: 1,
+ }),
+ level: levelSchema,
+ }),
}),
};
diff --git a/src/core/server/logging/logging_system.test.ts b/src/core/server/logging/logging_system.test.ts
index dd546d4e7eaca..ebe06326f499d 100644
--- a/src/core/server/logging/logging_system.test.ts
+++ b/src/core/server/logging/logging_system.test.ts
@@ -15,11 +15,6 @@ jest.mock('fs', () => ({
const dynamicProps = { process: { pid: expect.any(Number) } };
-jest.mock('@kbn/legacy-logging', () => ({
- ...(jest.requireActual('@kbn/legacy-logging') as any),
- setupLoggingRotate: jest.fn().mockImplementation(() => Promise.resolve({})),
-}));
-
const timestamp = new Date(Date.UTC(2012, 1, 1, 14, 33, 22, 11));
let mockConsoleLog: jest.SpyInstance;
diff --git a/src/core/server/mocks.ts b/src/core/server/mocks.ts
index f8b56e81ab188..8b4dee45a8e72 100644
--- a/src/core/server/mocks.ts
+++ b/src/core/server/mocks.ts
@@ -39,7 +39,7 @@ import { deprecationsServiceMock } from './deprecations/deprecations_service.moc
import { executionContextServiceMock } from './execution_context/execution_context_service.mock';
import { prebootServiceMock } from './preboot/preboot_service.mock';
-export { configServiceMock } from './config/mocks';
+export { configServiceMock, configDeprecationsMock } from './config/mocks';
export { httpServerMock } from './http/http_server.mocks';
export { httpResourcesMock } from './http_resources/http_resources_service.mock';
export { sessionStorageMock } from './http/cookie_session_storage.mocks';
diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md
index 1ef845730e1f3..c92f767ce891d 100644
--- a/src/core/server/server.api.md
+++ b/src/core/server/server.api.md
@@ -11,6 +11,7 @@ import { ByteSizeValue } from '@kbn/config-schema';
import { CliArgs } from '@kbn/config';
import { ClientOptions } from '@elastic/elasticsearch';
import { ConfigDeprecation } from '@kbn/config';
+import { ConfigDeprecationContext } from '@kbn/config';
import { ConfigDeprecationFactory } from '@kbn/config';
import { ConfigDeprecationProvider } from '@kbn/config';
import { ConfigPath } from '@kbn/config';
@@ -71,12 +72,11 @@ export interface AppCategory {
// Warning: (ae-forgotten-export) The symbol "ConsoleAppenderConfig" needs to be exported by the entry point index.d.ts
// Warning: (ae-forgotten-export) The symbol "FileAppenderConfig" needs to be exported by the entry point index.d.ts
-// Warning: (ae-forgotten-export) The symbol "LegacyAppenderConfig" needs to be exported by the entry point index.d.ts
// Warning: (ae-forgotten-export) The symbol "RewriteAppenderConfig" needs to be exported by the entry point index.d.ts
// Warning: (ae-forgotten-export) The symbol "RollingFileAppenderConfig" needs to be exported by the entry point index.d.ts
//
// @public (undocumented)
-export type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig | RewriteAppenderConfig | RollingFileAppenderConfig;
+export type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | RewriteAppenderConfig | RollingFileAppenderConfig;
// @public @deprecated
export interface AsyncPlugin {
@@ -247,6 +247,8 @@ export const config: {
export { ConfigDeprecation }
+export { ConfigDeprecationContext }
+
export { ConfigDeprecationFactory }
export { ConfigDeprecationProvider }
@@ -801,11 +803,8 @@ export interface DeprecationsClient {
getAllDeprecations: () => Promise;
}
-// Warning: (ae-missing-release-tag) "DeprecationsDetails" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
-//
// @public (undocumented)
export interface DeprecationsDetails {
- // (undocumented)
correctiveActions: {
api?: {
path: string;
@@ -817,11 +816,9 @@ export interface DeprecationsDetails {
manualSteps: string[];
};
deprecationType?: 'config' | 'feature';
- // (undocumented)
documentationUrl?: string;
level: 'warning' | 'critical' | 'fetch_error';
message: string;
- // (undocumented)
requireRestart?: boolean;
title: string;
}
@@ -984,8 +981,6 @@ export type GetAuthState = (request: KibanaRequest) => {
state: T;
};
-// Warning: (ae-missing-release-tag) "GetDeprecationsContext" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
-//
// @public (undocumented)
export interface GetDeprecationsContext {
// (undocumented)
@@ -1700,8 +1695,6 @@ export type RedirectResponseOptions = HttpResponseOptions & {
};
};
-// Warning: (ae-missing-release-tag) "RegisterDeprecationsConfig" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
-//
// @public (undocumented)
export interface RegisterDeprecationsConfig {
// Warning: (ae-forgotten-export) The symbol "MaybePromise" needs to be exported by the entry point index.d.ts
diff --git a/src/core/server/server.test.mocks.ts b/src/core/server/server.test.mocks.ts
index 47899043dc5a5..c4f420f75b5d1 100644
--- a/src/core/server/server.test.mocks.ts
+++ b/src/core/server/server.test.mocks.ts
@@ -7,32 +7,30 @@
*/
import { httpServiceMock } from './http/http_service.mock';
+
export const mockHttpService = httpServiceMock.create();
jest.doMock('./http/http_service', () => ({
HttpService: jest.fn(() => mockHttpService),
}));
import { pluginServiceMock } from './plugins/plugins_service.mock';
+
export const mockPluginsService = pluginServiceMock.create();
jest.doMock('./plugins/plugins_service', () => ({
PluginsService: jest.fn(() => mockPluginsService),
}));
import { elasticsearchServiceMock } from './elasticsearch/elasticsearch_service.mock';
+
export const mockElasticsearchService = elasticsearchServiceMock.create();
jest.doMock('./elasticsearch/elasticsearch_service', () => ({
ElasticsearchService: jest.fn(() => mockElasticsearchService),
}));
-import { legacyServiceMock } from './legacy/legacy_service.mock';
-export const mockLegacyService = legacyServiceMock.create();
-jest.mock('./legacy/legacy_service', () => ({
- LegacyService: jest.fn(() => mockLegacyService),
-}));
-
const realKbnConfig = jest.requireActual('@kbn/config');
import { configServiceMock } from './config/mocks';
+
export const mockConfigService = configServiceMock.create();
jest.doMock('@kbn/config', () => ({
...realKbnConfig,
@@ -40,18 +38,21 @@ jest.doMock('@kbn/config', () => ({
}));
import { savedObjectsServiceMock } from './saved_objects/saved_objects_service.mock';
+
export const mockSavedObjectsService = savedObjectsServiceMock.create();
jest.doMock('./saved_objects/saved_objects_service', () => ({
SavedObjectsService: jest.fn(() => mockSavedObjectsService),
}));
import { contextServiceMock } from './context/context_service.mock';
+
export const mockContextService = contextServiceMock.create();
jest.doMock('./context/context_service', () => ({
ContextService: jest.fn(() => mockContextService),
}));
import { uiSettingsServiceMock } from './ui_settings/ui_settings_service.mock';
+
export const mockUiSettingsService = uiSettingsServiceMock.create();
jest.doMock('./ui_settings/ui_settings_service', () => ({
UiSettingsService: jest.fn(() => mockUiSettingsService),
@@ -63,46 +64,54 @@ jest.doMock('./config/ensure_valid_configuration', () => ({
}));
import { RenderingService, mockRenderingService } from './rendering/__mocks__/rendering_service';
+
export { mockRenderingService };
jest.doMock('./rendering/rendering_service', () => ({ RenderingService }));
import { environmentServiceMock } from './environment/environment_service.mock';
+
export const mockEnvironmentService = environmentServiceMock.create();
jest.doMock('./environment/environment_service', () => ({
EnvironmentService: jest.fn(() => mockEnvironmentService),
}));
import { metricsServiceMock } from './metrics/metrics_service.mock';
+
export const mockMetricsService = metricsServiceMock.create();
jest.doMock('./metrics/metrics_service', () => ({
MetricsService: jest.fn(() => mockMetricsService),
}));
import { statusServiceMock } from './status/status_service.mock';
+
export const mockStatusService = statusServiceMock.create();
jest.doMock('./status/status_service', () => ({
StatusService: jest.fn(() => mockStatusService),
}));
import { loggingServiceMock } from './logging/logging_service.mock';
+
export const mockLoggingService = loggingServiceMock.create();
jest.doMock('./logging/logging_service', () => ({
LoggingService: jest.fn(() => mockLoggingService),
}));
import { i18nServiceMock } from './i18n/i18n_service.mock';
+
export const mockI18nService = i18nServiceMock.create();
jest.doMock('./i18n/i18n_service', () => ({
I18nService: jest.fn(() => mockI18nService),
}));
import { prebootServiceMock } from './preboot/preboot_service.mock';
+
export const mockPrebootService = prebootServiceMock.create();
jest.doMock('./preboot/preboot_service', () => ({
PrebootService: jest.fn(() => mockPrebootService),
}));
import { deprecationsServiceMock } from './deprecations/deprecations_service.mock';
+
export const mockDeprecationService = deprecationsServiceMock.create();
jest.doMock('./deprecations/deprecations_service', () => ({
DeprecationsService: jest.fn(() => mockDeprecationService),
diff --git a/src/core/server/server.test.ts b/src/core/server/server.test.ts
index b27c8fa769c48..112693aae0279 100644
--- a/src/core/server/server.test.ts
+++ b/src/core/server/server.test.ts
@@ -9,7 +9,6 @@
import {
mockElasticsearchService,
mockHttpService,
- mockLegacyService,
mockPluginsService,
mockConfigService,
mockSavedObjectsService,
@@ -95,7 +94,6 @@ test('sets up services on "setup"', async () => {
expect(mockHttpService.setup).not.toHaveBeenCalled();
expect(mockElasticsearchService.setup).not.toHaveBeenCalled();
expect(mockPluginsService.setup).not.toHaveBeenCalled();
- expect(mockLegacyService.setup).not.toHaveBeenCalled();
expect(mockSavedObjectsService.setup).not.toHaveBeenCalled();
expect(mockUiSettingsService.setup).not.toHaveBeenCalled();
expect(mockRenderingService.setup).not.toHaveBeenCalled();
@@ -111,7 +109,6 @@ test('sets up services on "setup"', async () => {
expect(mockHttpService.setup).toHaveBeenCalledTimes(1);
expect(mockElasticsearchService.setup).toHaveBeenCalledTimes(1);
expect(mockPluginsService.setup).toHaveBeenCalledTimes(1);
- expect(mockLegacyService.setup).toHaveBeenCalledTimes(1);
expect(mockSavedObjectsService.setup).toHaveBeenCalledTimes(1);
expect(mockUiSettingsService.setup).toHaveBeenCalledTimes(1);
expect(mockRenderingService.setup).toHaveBeenCalledTimes(1);
@@ -199,7 +196,6 @@ test('stops services on "stop"', async () => {
expect(mockHttpService.stop).not.toHaveBeenCalled();
expect(mockElasticsearchService.stop).not.toHaveBeenCalled();
expect(mockPluginsService.stop).not.toHaveBeenCalled();
- expect(mockLegacyService.stop).not.toHaveBeenCalled();
expect(mockSavedObjectsService.stop).not.toHaveBeenCalled();
expect(mockUiSettingsService.stop).not.toHaveBeenCalled();
expect(mockMetricsService.stop).not.toHaveBeenCalled();
@@ -211,7 +207,6 @@ test('stops services on "stop"', async () => {
expect(mockHttpService.stop).toHaveBeenCalledTimes(1);
expect(mockElasticsearchService.stop).toHaveBeenCalledTimes(1);
expect(mockPluginsService.stop).toHaveBeenCalledTimes(1);
- expect(mockLegacyService.stop).toHaveBeenCalledTimes(1);
expect(mockSavedObjectsService.stop).toHaveBeenCalledTimes(1);
expect(mockUiSettingsService.stop).toHaveBeenCalledTimes(1);
expect(mockMetricsService.stop).toHaveBeenCalledTimes(1);
diff --git a/src/core/server/server.ts b/src/core/server/server.ts
index 867446484a230..8b0714e899139 100644
--- a/src/core/server/server.ts
+++ b/src/core/server/server.ts
@@ -21,7 +21,6 @@ import { ElasticsearchService } from './elasticsearch';
import { HttpService } from './http';
import { HttpResourcesService } from './http_resources';
import { RenderingService } from './rendering';
-import { LegacyService } from './legacy';
import { Logger, LoggerFactory, LoggingService, ILoggingSystem } from './logging';
import { UiSettingsService } from './ui_settings';
import { PluginsService, config as pluginsConfig } from './plugins';
@@ -69,7 +68,6 @@ export class Server {
private readonly elasticsearch: ElasticsearchService;
private readonly http: HttpService;
private readonly rendering: RenderingService;
- private readonly legacy: LegacyService;
private readonly log: Logger;
private readonly plugins: PluginsService;
private readonly savedObjects: SavedObjectsService;
@@ -108,7 +106,6 @@ export class Server {
this.http = new HttpService(core);
this.rendering = new RenderingService(core);
this.plugins = new PluginsService(core);
- this.legacy = new LegacyService(core);
this.elasticsearch = new ElasticsearchService(core);
this.savedObjects = new SavedObjectsService(core);
this.uiSettings = new UiSettingsService(core);
@@ -286,10 +283,6 @@ export class Server {
const pluginsSetup = await this.plugins.setup(coreSetup);
this.#pluginsInitialized = pluginsSetup.initialized;
- await this.legacy.setup({
- http: httpSetup,
- });
-
this.registerCoreContext(coreSetup);
this.coreApp.setup(coreSetup, uiPlugins);
@@ -348,7 +341,6 @@ export class Server {
public async stop() {
this.log.debug('stopping server');
- await this.legacy.stop();
await this.http.stop(); // HTTP server has to stop before savedObjects and ES clients are closed to be able to gracefully attempt to resolve any pending requests
await this.plugins.stop();
await this.savedObjects.stop();
diff --git a/src/core/server/status/routes/integration_tests/status.test.ts b/src/core/server/status/routes/integration_tests/status.test.ts
index 082be62f8dc09..df840f5d7c059 100644
--- a/src/core/server/status/routes/integration_tests/status.test.ts
+++ b/src/core/server/status/routes/integration_tests/status.test.ts
@@ -18,20 +18,30 @@ import { MetricsServiceSetup } from '../../../metrics';
import { HttpService, InternalHttpServiceSetup } from '../../../http';
import { registerStatusRoute } from '../status';
-import { ServiceStatus, ServiceStatusLevels } from '../../types';
+import { ServiceStatus, ServiceStatusLevels, ServiceStatusLevel } from '../../types';
import { statusServiceMock } from '../../status_service.mock';
import { executionContextServiceMock } from '../../../execution_context/execution_context_service.mock';
import { contextServiceMock } from '../../../context/context_service.mock';
const coreId = Symbol('core');
+const createServiceStatus = (
+ level: ServiceStatusLevel = ServiceStatusLevels.available
+): ServiceStatus => ({
+ level,
+ summary: 'status summary',
+});
+
describe('GET /api/status', () => {
let server: HttpService;
let httpSetup: InternalHttpServiceSetup;
let metrics: jest.Mocked;
let incrementUsageCounter: jest.Mock;
- const setupServer = async ({ allowAnonymous = true }: { allowAnonymous?: boolean } = {}) => {
+ const setupServer = async ({
+ allowAnonymous = true,
+ coreOverall,
+ }: { allowAnonymous?: boolean; coreOverall?: ServiceStatus } = {}) => {
const coreContext = createCoreContext({ coreId });
const contextService = new ContextService(coreContext);
@@ -43,7 +53,12 @@ describe('GET /api/status', () => {
});
metrics = metricsServiceMock.createSetupContract();
- const status = statusServiceMock.createSetupContract();
+
+ const status = statusServiceMock.createInternalSetupContract();
+ if (coreOverall) {
+ status.coreOverall$ = new BehaviorSubject(coreOverall);
+ }
+
const pluginsStatus$ = new BehaviorSubject>({
a: { level: ServiceStatusLevels.available, summary: 'a is available' },
b: { level: ServiceStatusLevels.degraded, summary: 'b is degraded' },
@@ -71,6 +86,7 @@ describe('GET /api/status', () => {
metrics,
status: {
overall$: status.overall$,
+ coreOverall$: status.coreOverall$,
core$: status.core$,
plugins$: pluginsStatus$,
},
@@ -318,4 +334,60 @@ describe('GET /api/status', () => {
expect(incrementUsageCounter).not.toHaveBeenCalled();
});
});
+
+ describe('status level and http response code', () => {
+ describe('using standard format', () => {
+ it('respond with a 200 when core.overall.status is available', async () => {
+ await setupServer({
+ coreOverall: createServiceStatus(ServiceStatusLevels.available),
+ });
+ await supertest(httpSetup.server.listener).get('/api/status?v8format=true').expect(200);
+ });
+ it('respond with a 200 when core.overall.status is degraded', async () => {
+ await setupServer({
+ coreOverall: createServiceStatus(ServiceStatusLevels.degraded),
+ });
+ await supertest(httpSetup.server.listener).get('/api/status?v8format=true').expect(200);
+ });
+ it('respond with a 503 when core.overall.status is unavailable', async () => {
+ await setupServer({
+ coreOverall: createServiceStatus(ServiceStatusLevels.unavailable),
+ });
+ await supertest(httpSetup.server.listener).get('/api/status?v8format=true').expect(503);
+ });
+ it('respond with a 503 when core.overall.status is critical', async () => {
+ await setupServer({
+ coreOverall: createServiceStatus(ServiceStatusLevels.critical),
+ });
+ await supertest(httpSetup.server.listener).get('/api/status?v8format=true').expect(503);
+ });
+ });
+
+ describe('using legacy format', () => {
+ it('respond with a 200 when core.overall.status is available', async () => {
+ await setupServer({
+ coreOverall: createServiceStatus(ServiceStatusLevels.available),
+ });
+ await supertest(httpSetup.server.listener).get('/api/status?v7format=true').expect(200);
+ });
+ it('respond with a 200 when core.overall.status is degraded', async () => {
+ await setupServer({
+ coreOverall: createServiceStatus(ServiceStatusLevels.degraded),
+ });
+ await supertest(httpSetup.server.listener).get('/api/status?v7format=true').expect(200);
+ });
+ it('respond with a 503 when core.overall.status is unavailable', async () => {
+ await setupServer({
+ coreOverall: createServiceStatus(ServiceStatusLevels.unavailable),
+ });
+ await supertest(httpSetup.server.listener).get('/api/status?v7format=true').expect(503);
+ });
+ it('respond with a 503 when core.overall.status is critical', async () => {
+ await setupServer({
+ coreOverall: createServiceStatus(ServiceStatusLevels.critical),
+ });
+ await supertest(httpSetup.server.listener).get('/api/status?v7format=true').expect(503);
+ });
+ });
+ });
});
diff --git a/src/core/server/status/routes/status.ts b/src/core/server/status/routes/status.ts
index cef5ee05ea2e5..7751980ae6ec3 100644
--- a/src/core/server/status/routes/status.ts
+++ b/src/core/server/status/routes/status.ts
@@ -31,6 +31,7 @@ interface Deps {
};
metrics: MetricsServiceSetup;
status: {
+ coreOverall$: Observable;
overall$: Observable;
core$: Observable;
plugins$: Observable>;
@@ -59,9 +60,11 @@ export const registerStatusRoute = ({
// Since the status.plugins$ observable is not subscribed to elsewhere, we need to subscribe it here to eagerly load
// the plugins status when Kibana starts up so this endpoint responds quickly on first boot.
const combinedStatus$ = new ReplaySubject<
- [ServiceStatus, CoreStatus, Record>]
+ [ServiceStatus, ServiceStatus, CoreStatus, Record>]
>(1);
- combineLatest([status.overall$, status.core$, status.plugins$]).subscribe(combinedStatus$);
+ combineLatest([status.overall$, status.coreOverall$, status.core$, status.plugins$]).subscribe(
+ combinedStatus$
+ );
router.get(
{
@@ -89,7 +92,7 @@ export const registerStatusRoute = ({
async (context, req, res) => {
const { version, buildSha, buildNum } = config.packageInfo;
const versionWithoutSnapshot = version.replace(SNAPSHOT_POSTFIX, '');
- const [overall, core, plugins] = await combinedStatus$.pipe(first()).toPromise();
+ const [overall, coreOverall, core, plugins] = await combinedStatus$.pipe(first()).toPromise();
const { v8format = true, v7format = false } = req.query ?? {};
@@ -137,7 +140,7 @@ export const registerStatusRoute = ({
},
};
- const statusCode = overall.level >= ServiceStatusLevels.unavailable ? 503 : 200;
+ const statusCode = coreOverall.level >= ServiceStatusLevels.unavailable ? 503 : 200;
return res.custom({ body, statusCode, bypassErrorFormat: true });
}
);
diff --git a/src/core/server/status/status_service.mock.ts b/src/core/server/status/status_service.mock.ts
index 8ef34558ca7b2..7241bb2f0479e 100644
--- a/src/core/server/status/status_service.mock.ts
+++ b/src/core/server/status/status_service.mock.ts
@@ -42,6 +42,7 @@ const createSetupContractMock = () => {
const createInternalSetupContractMock = () => {
const setupContract: jest.Mocked = {
core$: new BehaviorSubject(availableCoreStatus),
+ coreOverall$: new BehaviorSubject(available),
overall$: new BehaviorSubject(available),
isStatusPageAnonymous: jest.fn().mockReturnValue(false),
plugins: {
diff --git a/src/core/server/status/status_service.test.ts b/src/core/server/status/status_service.test.ts
index 9148f69e079aa..255ed821bc2fe 100644
--- a/src/core/server/status/status_service.test.ts
+++ b/src/core/server/status/status_service.test.ts
@@ -30,6 +30,7 @@ describe('StatusService', () => {
});
const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
+
const available: ServiceStatus = {
level: ServiceStatusLevels.available,
summary: 'Available',
@@ -38,6 +39,10 @@ describe('StatusService', () => {
level: ServiceStatusLevels.degraded,
summary: 'This is degraded!',
};
+ const critical: ServiceStatus = {
+ level: ServiceStatusLevels.critical,
+ summary: 'This is critical!',
+ };
type SetupDeps = Parameters[0];
const setupDeps = (overrides: Partial): SetupDeps => {
@@ -321,6 +326,177 @@ describe('StatusService', () => {
});
});
+ describe('coreOverall$', () => {
+ it('exposes an overall summary of core services', async () => {
+ const setup = await service.setup(
+ setupDeps({
+ elasticsearch: {
+ status$: of(degraded),
+ },
+ savedObjects: {
+ status$: of(degraded),
+ },
+ })
+ );
+ expect(await setup.coreOverall$.pipe(first()).toPromise()).toMatchObject({
+ level: ServiceStatusLevels.degraded,
+ summary: '[2] services are degraded',
+ });
+ });
+
+ it('computes the summary depending on the services status', async () => {
+ const setup = await service.setup(
+ setupDeps({
+ elasticsearch: {
+ status$: of(degraded),
+ },
+ savedObjects: {
+ status$: of(critical),
+ },
+ })
+ );
+ expect(await setup.coreOverall$.pipe(first()).toPromise()).toMatchObject({
+ level: ServiceStatusLevels.critical,
+ summary: '[savedObjects]: This is critical!',
+ });
+ });
+
+ it('replays last event', async () => {
+ const setup = await service.setup(
+ setupDeps({
+ elasticsearch: {
+ status$: of(degraded),
+ },
+ savedObjects: {
+ status$: of(degraded),
+ },
+ })
+ );
+
+ const subResult1 = await setup.coreOverall$.pipe(first()).toPromise();
+ const subResult2 = await setup.coreOverall$.pipe(first()).toPromise();
+ const subResult3 = await setup.coreOverall$.pipe(first()).toPromise();
+
+ expect(subResult1).toMatchObject({
+ level: ServiceStatusLevels.degraded,
+ summary: '[2] services are degraded',
+ });
+ expect(subResult2).toMatchObject({
+ level: ServiceStatusLevels.degraded,
+ summary: '[2] services are degraded',
+ });
+ expect(subResult3).toMatchObject({
+ level: ServiceStatusLevels.degraded,
+ summary: '[2] services are degraded',
+ });
+ });
+
+ it('does not emit duplicate events', async () => {
+ const elasticsearch$ = new BehaviorSubject(available);
+ const savedObjects$ = new BehaviorSubject(degraded);
+ const setup = await service.setup(
+ setupDeps({
+ elasticsearch: {
+ status$: elasticsearch$,
+ },
+ savedObjects: {
+ status$: savedObjects$,
+ },
+ })
+ );
+
+ const statusUpdates: ServiceStatus[] = [];
+ const subscription = setup.coreOverall$.subscribe((status) => statusUpdates.push(status));
+
+ // Wait for timers to ensure that duplicate events are still filtered out regardless of debouncing.
+ elasticsearch$.next(available);
+ await delay(500);
+ elasticsearch$.next(available);
+ await delay(500);
+ elasticsearch$.next({
+ level: ServiceStatusLevels.available,
+ summary: `Wow another summary`,
+ });
+ await delay(500);
+ savedObjects$.next(degraded);
+ await delay(500);
+ savedObjects$.next(available);
+ await delay(500);
+ savedObjects$.next(available);
+ await delay(500);
+ subscription.unsubscribe();
+
+ expect(statusUpdates).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "detail": "See the status page for more information",
+ "level": degraded,
+ "meta": Object {
+ "affectedServices": Array [
+ "savedObjects",
+ ],
+ },
+ "summary": "[savedObjects]: This is degraded!",
+ },
+ Object {
+ "level": available,
+ "summary": "All services are available",
+ },
+ ]
+ `);
+ });
+
+ it('debounces events in quick succession', async () => {
+ const savedObjects$ = new BehaviorSubject(available);
+ const setup = await service.setup(
+ setupDeps({
+ elasticsearch: {
+ status$: new BehaviorSubject(available),
+ },
+ savedObjects: {
+ status$: savedObjects$,
+ },
+ })
+ );
+
+ const statusUpdates: ServiceStatus[] = [];
+ const subscription = setup.coreOverall$.subscribe((status) => statusUpdates.push(status));
+
+ // All of these should debounced into a single `available` status
+ savedObjects$.next(degraded);
+ savedObjects$.next(available);
+ savedObjects$.next(degraded);
+ savedObjects$.next(available);
+ savedObjects$.next(degraded);
+ savedObjects$.next(available);
+ savedObjects$.next(degraded);
+ // Waiting for the debounce timeout should cut a new update
+ await delay(500);
+ savedObjects$.next(available);
+ await delay(500);
+ subscription.unsubscribe();
+
+ expect(statusUpdates).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "detail": "See the status page for more information",
+ "level": degraded,
+ "meta": Object {
+ "affectedServices": Array [
+ "savedObjects",
+ ],
+ },
+ "summary": "[savedObjects]: This is degraded!",
+ },
+ Object {
+ "level": available,
+ "summary": "All services are available",
+ },
+ ]
+ `);
+ });
+ });
+
describe('preboot status routes', () => {
let prebootRouterMock: RouterMock;
beforeEach(async () => {
diff --git a/src/core/server/status/status_service.ts b/src/core/server/status/status_service.ts
index 107074bdb98b1..a0ac5b392efe1 100644
--- a/src/core/server/status/status_service.ts
+++ b/src/core/server/status/status_service.ts
@@ -49,7 +49,7 @@ export class StatusService implements CoreService {
private overall$?: Observable;
private pluginsStatus?: PluginsStatusService;
- private overallSubscription?: Subscription;
+ private subscriptions: Subscription[] = [];
constructor(private readonly coreContext: CoreContext) {
this.logger = coreContext.logger.get('status');
@@ -88,8 +88,24 @@ export class StatusService implements CoreService {
shareReplay(1)
);
- // Create an unused subscription to ensure all underlying lazy observables are started.
- this.overallSubscription = this.overall$.subscribe();
+ const coreOverall$ = core$.pipe(
+ // Prevent many emissions at once from dependency status resolution from making this too noisy
+ debounceTime(25),
+ map((coreStatus) => {
+ const coreOverall = getSummaryStatus([...Object.entries(coreStatus)]);
+ this.logger.debug(`Recalculated core overall status`, {
+ kibana: {
+ status: coreOverall,
+ },
+ });
+ return coreOverall;
+ }),
+ distinctUntilChanged(isDeepStrictEqual),
+ shareReplay(1)
+ );
+
+ // Create unused subscriptions to ensure all underlying lazy observables are started.
+ this.subscriptions.push(this.overall$.subscribe(), coreOverall$.subscribe());
const commonRouteDeps = {
config: {
@@ -103,6 +119,7 @@ export class StatusService implements CoreService {
overall$: this.overall$,
plugins$: this.pluginsStatus.getAll$(),
core$,
+ coreOverall$,
},
incrementUsageCounter: coreUsageData.incrementUsageCounter,
};
@@ -128,6 +145,7 @@ export class StatusService implements CoreService {
return {
core$,
+ coreOverall$,
overall$: this.overall$,
plugins: {
set: this.pluginsStatus.set.bind(this.pluginsStatus),
@@ -153,10 +171,10 @@ export class StatusService implements CoreService {
this.stop$.next();
this.stop$.complete();
- if (this.overallSubscription) {
- this.overallSubscription.unsubscribe();
- this.overallSubscription = undefined;
- }
+ this.subscriptions.forEach((subscription) => {
+ subscription.unsubscribe();
+ });
+ this.subscriptions = [];
}
private setupCoreStatus({
diff --git a/src/core/server/status/types.ts b/src/core/server/status/types.ts
index bfca4c74d9365..aab3bf302dfea 100644
--- a/src/core/server/status/types.ts
+++ b/src/core/server/status/types.ts
@@ -232,6 +232,11 @@ export interface StatusServiceSetup {
/** @internal */
export interface InternalStatusServiceSetup
extends Pick {
+ /**
+ * Overall status of core's service.
+ */
+ coreOverall$: Observable;
+
// Namespaced under `plugins` key to improve clarity that these are APIs for plugins specifically.
plugins: {
set(plugin: PluginName, status$: Observable): void;
diff --git a/src/core/test_helpers/kbn_server.ts b/src/core/test_helpers/kbn_server.ts
index 67bd6c7455d6d..58720be637e2f 100644
--- a/src/core/test_helpers/kbn_server.ts
+++ b/src/core/test_helpers/kbn_server.ts
@@ -32,7 +32,11 @@ const DEFAULTS_SETTINGS = {
port: 0,
xsrf: { disableProtection: true },
},
- logging: { silent: true },
+ logging: {
+ root: {
+ level: 'off',
+ },
+ },
plugins: {},
migrations: { skip: false },
};
@@ -45,7 +49,6 @@ export function createRootWithSettings(
configs: [],
cliArgs: {
dev: false,
- silent: false,
watch: false,
basePath: false,
runExamples: false,
diff --git a/src/core/types/elasticsearch/search.ts b/src/core/types/elasticsearch/search.ts
index 88d6cda3777dd..a54f5f3758ce3 100644
--- a/src/core/types/elasticsearch/search.ts
+++ b/src/core/types/elasticsearch/search.ts
@@ -48,7 +48,7 @@ type ValueTypeOfField = T extends Record
type MaybeArray = T | T[];
-type Fields = Exclude['body']['fields'], undefined>;
+type Fields = Required['body']>['fields'];
type DocValueFields = MaybeArray;
export type SearchHit<
diff --git a/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker b/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
index cee43fd85c90f..dd5b66af9ef21 100755
--- a/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
+++ b/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
@@ -26,8 +26,6 @@ kibana_vars=(
console.enabled
console.proxyConfig
console.proxyFilter
- cpu.cgroup.path.override
- cpuacct.cgroup.path.override
csp.rules
csp.strict
csp.warnLegacyBrowsers
@@ -82,24 +80,13 @@ kibana_vars=(
logging.appenders
logging.appenders.console
logging.appenders.file
- logging.dest
- logging.json
logging.loggers
logging.loggers.appenders
logging.loggers.level
logging.loggers.name
- logging.quiet
logging.root
logging.root.appenders
logging.root.level
- logging.rotate.enabled
- logging.rotate.everyBytes
- logging.rotate.keepFiles
- logging.rotate.pollingInterval
- logging.rotate.usePolling
- logging.silent
- logging.useUTC
- logging.verbose
map.includeElasticMapsService
map.proxyElasticMapsServiceInMaps
map.regionmap
@@ -186,7 +173,6 @@ kibana_vars=(
server.uuid
server.xsrf.allowlist
server.xsrf.disableProtection
- server.xsrf.whitelist
status.allowAnonymous
status.v6ApiFormat
telemetry.allowChangingOptInStatus
@@ -205,7 +191,6 @@ kibana_vars=(
vis_type_vega.enableExternalUrls
xpack.actions.allowedHosts
xpack.actions.customHostSettings
- xpack.actions.enabled
xpack.actions.enabledActionTypes
xpack.actions.maxResponseContentLength
xpack.actions.preconfigured
@@ -222,6 +207,7 @@ kibana_vars=(
xpack.alerting.healthCheck.interval
xpack.alerting.invalidateApiKeysTask.interval
xpack.alerting.invalidateApiKeysTask.removalDelay
+ xpack.alerting.defaultRuleTaskTimeout
xpack.alerts.healthCheck.interval
xpack.alerts.invalidateApiKeysTask.interval
xpack.alerts.invalidateApiKeysTask.removalDelay
@@ -262,7 +248,6 @@ kibana_vars=(
xpack.discoverEnhanced.actions.exploreDataInContextMenu.enabled
xpack.encryptedSavedObjects.encryptionKey
xpack.encryptedSavedObjects.keyRotation.decryptionOnlyKeys
- xpack.event_log.enabled
xpack.event_log.indexEntries
xpack.event_log.logEntries
xpack.fleet.agentPolicies
@@ -329,7 +314,6 @@ kibana_vars=(
xpack.reporting.csv.useByteOrderMarkEncoding
xpack.reporting.enabled
xpack.reporting.encryptionKey
- xpack.reporting.index
xpack.reporting.kibanaApp
xpack.reporting.kibanaServer.hostname
xpack.reporting.kibanaServer.port
@@ -395,7 +379,6 @@ kibana_vars=(
xpack.securitySolution.prebuiltRulesFromSavedObjects
xpack.spaces.enabled
xpack.spaces.maxSpaces
- xpack.task_manager.enabled
xpack.task_manager.index
xpack.task_manager.max_attempts
xpack.task_manager.max_poll_inactivity_cycles
@@ -437,7 +420,7 @@ umask 0002
# paths. Therefore, Kibana provides a mechanism to override
# reading the cgroup path from /proc/self/cgroup and instead uses the
# cgroup path defined the configuration properties
-# cpu.cgroup.path.override and cpuacct.cgroup.path.override.
+# ops.cGroupOverrides.cpuPath and ops.cGroupOverrides.cpuAcctPath.
# Therefore, we set this value here so that cgroup statistics are
# available for the container this process will run in.
diff --git a/src/dev/run_check_published_api_changes.ts b/src/dev/run_check_published_api_changes.ts
index 7c8105bc40c51..452922ac56bcd 100644
--- a/src/dev/run_check_published_api_changes.ts
+++ b/src/dev/run_check_published_api_changes.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { ToolingLog } from '@kbn/dev-utils';
+import { ToolingLog, getTimeReporter } from '@kbn/dev-utils';
import {
Extractor,
IConfigFile,
@@ -27,6 +27,9 @@ const log = new ToolingLog({
writeTo: process.stdout,
});
+const runStartTime = Date.now();
+const reportTime = getTimeReporter(log, 'scripts/check_published_api_changes');
+
/*
* Step 1: execute build:types
* This users tsconfig.types.json to generate types in `target/types`
@@ -184,6 +187,7 @@ async function run(folder: string, { opts }: { opts: Options }): Promise {
+ reportTime(runStartTime, 'error', {
+ success: false,
+ error: e.message,
+ });
log.error(e);
process.exitCode = 1;
});
diff --git a/src/dev/run_i18n_check.ts b/src/dev/run_i18n_check.ts
index 48ce2e013fc29..8aa93d33f60fd 100644
--- a/src/dev/run_i18n_check.ts
+++ b/src/dev/run_i18n_check.ts
@@ -9,7 +9,7 @@
import chalk from 'chalk';
import Listr from 'listr';
-import { createFailError, run } from '@kbn/dev-utils';
+import { createFailError, run, ToolingLog, getTimeReporter } from '@kbn/dev-utils';
import { ErrorReporter, I18nConfig } from './i18n';
import {
extractDefaultMessages,
@@ -19,6 +19,14 @@ import {
mergeConfigs,
} from './i18n/tasks';
+const toolingLog = new ToolingLog({
+ level: 'info',
+ writeTo: process.stdout,
+});
+
+const runStartTime = Date.now();
+const reportTime = getTimeReporter(toolingLog, 'scripts/i18n_check');
+
const skipOnNoTranslations = ({ config }: { config: I18nConfig }) =>
!config.translations.length && 'No translations found.';
@@ -116,13 +124,24 @@ run(
const reporter = new ErrorReporter();
const messages: Map = new Map();
await list.run({ messages, reporter });
- } catch (error) {
+
+ reportTime(runStartTime, 'total', {
+ success: true,
+ });
+ } catch (error: Error | ErrorReporter) {
process.exitCode = 1;
if (error instanceof ErrorReporter) {
error.errors.forEach((e: string | Error) => log.error(e));
+ reportTime(runStartTime, 'error', {
+ success: false,
+ });
} else {
log.error('Unhandled exception!');
log.error(error);
+ reportTime(runStartTime, 'error', {
+ success: false,
+ error: error.message,
+ });
}
}
},
diff --git a/src/dev/storybook/aliases.ts b/src/dev/storybook/aliases.ts
index a61a2618d6428..c04f0d4f9320f 100644
--- a/src/dev/storybook/aliases.ts
+++ b/src/dev/storybook/aliases.ts
@@ -12,6 +12,7 @@ export const storybookAliases = {
canvas: 'x-pack/plugins/canvas/storybook',
codeeditor: 'src/plugins/kibana_react/public/code_editor/.storybook',
ci_composite: '.ci/.storybook',
+ custom_integrations: 'src/plugins/custom_integrations/storybook',
url_template_editor: 'src/plugins/kibana_react/public/url_template_editor/.storybook',
dashboard: 'src/plugins/dashboard/.storybook',
dashboard_enhanced: 'x-pack/plugins/dashboard_enhanced/.storybook',
diff --git a/src/plugins/custom_integrations/common/index.ts b/src/plugins/custom_integrations/common/index.ts
index 73e15c91ce4bf..e2408d3124604 100755
--- a/src/plugins/custom_integrations/common/index.ts
+++ b/src/plugins/custom_integrations/common/index.ts
@@ -15,6 +15,7 @@ export interface IntegrationCategoryCount {
}
export const INTEGRATION_CATEGORY_DISPLAY = {
+ // Known EPR
aws: 'AWS',
azure: 'Azure',
cloud: 'Cloud',
@@ -39,8 +40,12 @@ export const INTEGRATION_CATEGORY_DISPLAY = {
ticketing: 'Ticketing',
version_control: 'Version control',
web: 'Web',
+
+ // Kibana added
upload_file: 'Upload a file',
+ language_client: 'Language client',
+ // Internal
updates_available: 'Updates available',
};
diff --git a/src/plugins/custom_integrations/kibana.json b/src/plugins/custom_integrations/kibana.json
index 3a78270d9ef09..cd58c1aec1ecb 100755
--- a/src/plugins/custom_integrations/kibana.json
+++ b/src/plugins/custom_integrations/kibana.json
@@ -12,5 +12,8 @@
"extraPublicDirs": [
"common"
],
+ "requiredPlugins": [
+ "presentationUtil"
+ ],
"optionalPlugins": []
}
diff --git a/src/plugins/custom_integrations/public/assets/language_clients/dotnet.svg b/src/plugins/custom_integrations/public/assets/language_clients/dotnet.svg
new file mode 100755
index 0000000000000..92a7ad45d9f9c
--- /dev/null
+++ b/src/plugins/custom_integrations/public/assets/language_clients/dotnet.svg
@@ -0,0 +1,7 @@
+
diff --git a/src/plugins/custom_integrations/public/assets/language_clients/es.svg b/src/plugins/custom_integrations/public/assets/language_clients/es.svg
new file mode 100755
index 0000000000000..b1224e212e098
--- /dev/null
+++ b/src/plugins/custom_integrations/public/assets/language_clients/es.svg
@@ -0,0 +1,15 @@
+
diff --git a/src/plugins/custom_integrations/public/assets/language_clients/go.svg b/src/plugins/custom_integrations/public/assets/language_clients/go.svg
new file mode 100755
index 0000000000000..223a57194fd7c
--- /dev/null
+++ b/src/plugins/custom_integrations/public/assets/language_clients/go.svg
@@ -0,0 +1,7 @@
+
diff --git a/src/plugins/custom_integrations/public/assets/language_clients/java.svg b/src/plugins/custom_integrations/public/assets/language_clients/java.svg
new file mode 100644
index 0000000000000..d24d844695762
--- /dev/null
+++ b/src/plugins/custom_integrations/public/assets/language_clients/java.svg
@@ -0,0 +1,10 @@
+
diff --git a/src/plugins/custom_integrations/public/assets/language_clients/nodejs.svg b/src/plugins/custom_integrations/public/assets/language_clients/nodejs.svg
new file mode 100755
index 0000000000000..4dd358743bbff
--- /dev/null
+++ b/src/plugins/custom_integrations/public/assets/language_clients/nodejs.svg
@@ -0,0 +1,46 @@
+
diff --git a/src/plugins/custom_integrations/public/assets/language_clients/perl.svg b/src/plugins/custom_integrations/public/assets/language_clients/perl.svg
new file mode 100755
index 0000000000000..6ef322a3f58ae
--- /dev/null
+++ b/src/plugins/custom_integrations/public/assets/language_clients/perl.svg
@@ -0,0 +1,3 @@
+
diff --git a/src/plugins/custom_integrations/public/assets/language_clients/php.svg b/src/plugins/custom_integrations/public/assets/language_clients/php.svg
new file mode 100755
index 0000000000000..7a1c20116f466
--- /dev/null
+++ b/src/plugins/custom_integrations/public/assets/language_clients/php.svg
@@ -0,0 +1,18 @@
+
diff --git a/src/plugins/custom_integrations/public/assets/language_clients/python.svg b/src/plugins/custom_integrations/public/assets/language_clients/python.svg
new file mode 100755
index 0000000000000..b7234c439ced5
--- /dev/null
+++ b/src/plugins/custom_integrations/public/assets/language_clients/python.svg
@@ -0,0 +1,14 @@
+
diff --git a/src/plugins/custom_integrations/public/assets/language_clients/ruby.svg b/src/plugins/custom_integrations/public/assets/language_clients/ruby.svg
new file mode 100755
index 0000000000000..5e515bc0dd98e
--- /dev/null
+++ b/src/plugins/custom_integrations/public/assets/language_clients/ruby.svg
@@ -0,0 +1,120 @@
+
diff --git a/src/plugins/custom_integrations/public/assets/language_clients/rust.svg b/src/plugins/custom_integrations/public/assets/language_clients/rust.svg
new file mode 100755
index 0000000000000..82dcaf2ade93e
--- /dev/null
+++ b/src/plugins/custom_integrations/public/assets/language_clients/rust.svg
@@ -0,0 +1,4 @@
+
diff --git a/src/plugins/custom_integrations/public/components/index.tsx b/src/plugins/custom_integrations/public/components/index.tsx
new file mode 100644
index 0000000000000..cfbec7d6d5ae5
--- /dev/null
+++ b/src/plugins/custom_integrations/public/components/index.tsx
@@ -0,0 +1,31 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React, { Suspense, ComponentType, ReactElement, Ref } from 'react';
+import { EuiLoadingSpinner, EuiErrorBoundary } from '@elastic/eui';
+
+/**
+ * A HOC which supplies React.Suspense with a fallback component, and a `EuiErrorBoundary` to contain errors.
+ * @param Component A component deferred by `React.lazy`
+ * @param fallback A fallback component to render while things load; default is `EuiLoadingSpinner`
+ */
+export const withSuspense =
(
+ Component: ComponentType
,
+ fallback: ReactElement | null =
+) =>
+ React.forwardRef((props: P, ref: Ref) => {
+ return (
+
+
+
+
+
+ );
+ });
+
+export const LazyReplacementCard = React.lazy(() => import('./replacement_card'));
diff --git a/packages/kbn-legacy-logging/jest.config.js b/src/plugins/custom_integrations/public/components/replacement_card/index.ts
similarity index 58%
rename from packages/kbn-legacy-logging/jest.config.js
rename to src/plugins/custom_integrations/public/components/replacement_card/index.ts
index d00b1c56dae81..631dc1fcb2ba2 100644
--- a/packages/kbn-legacy-logging/jest.config.js
+++ b/src/plugins/custom_integrations/public/components/replacement_card/index.ts
@@ -6,8 +6,10 @@
* Side Public License, v 1.
*/
-module.exports = {
- preset: '@kbn/test',
- rootDir: '../..',
- roots: ['/packages/kbn-legacy-logging'],
-};
+import { ReplacementCard } from './replacement_card';
+
+export { ReplacementCard, Props } from './replacement_card';
+
+// required for dynamic import using React.lazy()
+// eslint-disable-next-line import/no-default-export
+export default ReplacementCard;
diff --git a/src/plugins/custom_integrations/public/components/replacement_card/replacement_card.component.tsx b/src/plugins/custom_integrations/public/components/replacement_card/replacement_card.component.tsx
new file mode 100644
index 0000000000000..f66d13fb911b5
--- /dev/null
+++ b/src/plugins/custom_integrations/public/components/replacement_card/replacement_card.component.tsx
@@ -0,0 +1,116 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+/** @jsx jsx */
+
+import { css, jsx } from '@emotion/react';
+
+import {
+ htmlIdGenerator,
+ EuiButton,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiPanel,
+ EuiText,
+ EuiAccordion,
+ EuiLink,
+ useEuiTheme,
+} from '@elastic/eui';
+import { i18n } from '@kbn/i18n';
+import { FormattedMessage } from '@kbn/i18n/react';
+
+import { CustomIntegration } from '../../../common';
+import { usePlatformService } from '../../services';
+
+export interface Props {
+ replacements: Array>;
+}
+
+// TODO - clintandrewhall: should use doc-links service
+const URL_COMPARISON = 'https://ela.st/beats-agent-comparison';
+
+const idGenerator = htmlIdGenerator('replacementCard');
+const alsoAvailable = i18n.translate('customIntegrations.components.replacementAccordionLabel', {
+ defaultMessage: 'Also available in Beats',
+});
+
+const link = (
+
+
+
+);
+
+/**
+ * A pure component, an accordion panel which can display information about replacements for a given EPR module.
+ */
+export const ReplacementCard = ({ replacements }: Props) => {
+ const { euiTheme } = useEuiTheme();
+ const { getAbsolutePath } = usePlatformService();
+
+ if (replacements.length === 0) {
+ return null;
+ }
+
+ const buttons = replacements.map((replacement) => (
+
+
+
+ {replacement.title}
+
+
+
+ ));
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+ {buttons}
+
+
+
+
+
+
+ );
+};
diff --git a/src/plugins/custom_integrations/public/components/replacement_card/replacement_card.stories.tsx b/src/plugins/custom_integrations/public/components/replacement_card/replacement_card.stories.tsx
new file mode 100644
index 0000000000000..8fa0674c9b467
--- /dev/null
+++ b/src/plugins/custom_integrations/public/components/replacement_card/replacement_card.stories.tsx
@@ -0,0 +1,68 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React from 'react';
+import { Meta } from '@storybook/react';
+
+import { ReplacementCard as ConnectedComponent } from './replacement_card';
+import { ReplacementCard as PureComponent } from './replacement_card.component';
+
+export default {
+ title: 'Replacement Card',
+ description:
+ 'An accordion panel which can display information about Beats alternatives to a given EPR module, (if available)',
+ decorators: [
+ (storyFn, { globals }) => (
+
+ {storyFn()}
+
+ ),
+ ],
+} as Meta;
+
+interface Args {
+ eprPackageName: string;
+}
+
+const args: Args = {
+ eprPackageName: 'nginx',
+};
+
+const argTypes = {
+ eprPackageName: {
+ control: {
+ type: 'radio',
+ options: ['nginx', 'okta', 'aws', 'apache'],
+ },
+ },
+};
+
+export function ReplacementCard({ eprPackageName }: Args) {
+ return ;
+}
+
+ReplacementCard.args = args;
+ReplacementCard.argTypes = argTypes;
+
+export function Component() {
+ return (
+
+ );
+}
diff --git a/src/plugins/custom_integrations/public/components/replacement_card/replacement_card.tsx b/src/plugins/custom_integrations/public/components/replacement_card/replacement_card.tsx
new file mode 100644
index 0000000000000..3e829270773a6
--- /dev/null
+++ b/src/plugins/custom_integrations/public/components/replacement_card/replacement_card.tsx
@@ -0,0 +1,35 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React from 'react';
+import useAsync from 'react-use/lib/useAsync';
+import { useFindService } from '../../services';
+
+import { ReplacementCard as Component } from './replacement_card.component';
+
+export interface Props {
+ eprPackageName: string;
+}
+
+/**
+ * A data-connected component which can query about Beats-based replacement options for a given EPR module.
+ */
+export const ReplacementCard = ({ eprPackageName }: Props) => {
+ const { findReplacementIntegrations } = useFindService();
+ const integrations = useAsync(async () => {
+ return await findReplacementIntegrations({ shipper: 'beats', eprPackageName });
+ }, [eprPackageName]);
+
+ const { loading, value: replacements } = integrations;
+
+ if (loading || !replacements || replacements.length === 0) {
+ return null;
+ }
+
+ return ;
+};
diff --git a/src/plugins/custom_integrations/public/index.ts b/src/plugins/custom_integrations/public/index.ts
index 9e979dd6692bc..91da75c634a44 100755
--- a/src/plugins/custom_integrations/public/index.ts
+++ b/src/plugins/custom_integrations/public/index.ts
@@ -13,4 +13,8 @@ import { CustomIntegrationsPlugin } from './plugin';
export function plugin() {
return new CustomIntegrationsPlugin();
}
+
export { CustomIntegrationsSetup, CustomIntegrationsStart } from './types';
+
+export { withSuspense, LazyReplacementCard } from './components';
+export { filterCustomIntegrations } from './services/find';
diff --git a/src/plugins/custom_integrations/public/mocks.ts b/src/plugins/custom_integrations/public/mocks.ts
index 2e6bc491c2c5c..a8fedbbb712b2 100644
--- a/src/plugins/custom_integrations/public/mocks.ts
+++ b/src/plugins/custom_integrations/public/mocks.ts
@@ -6,7 +6,11 @@
* Side Public License, v 1.
*/
-import { CustomIntegrationsSetup } from './types';
+import { pluginServices } from './services';
+import { PluginServiceRegistry } from '../../presentation_util/public';
+import { CustomIntegrationsSetup, CustomIntegrationsStart } from './types';
+import { CustomIntegrationsServices } from './services';
+import { providers } from './services/stub';
function createCustomIntegrationsSetup(): jest.Mocked {
const mock: jest.Mocked = {
@@ -16,6 +20,17 @@ function createCustomIntegrationsSetup(): jest.Mocked {
return mock;
}
+function createCustomIntegrationsStart(): jest.Mocked {
+ const registry = new PluginServiceRegistry(providers);
+ pluginServices.setRegistry(registry.start({}));
+ const ContextProvider = pluginServices.getContextProvider();
+
+ return {
+ ContextProvider: jest.fn(ContextProvider),
+ };
+}
+
export const customIntegrationsMock = {
createSetup: createCustomIntegrationsSetup,
+ createStart: createCustomIntegrationsStart,
};
diff --git a/src/plugins/custom_integrations/public/plugin.ts b/src/plugins/custom_integrations/public/plugin.ts
index 7ea7a829e8072..a3470fefba46c 100755
--- a/src/plugins/custom_integrations/public/plugin.ts
+++ b/src/plugins/custom_integrations/public/plugin.ts
@@ -7,13 +7,20 @@
*/
import { CoreSetup, CoreStart, Plugin } from 'src/core/public';
-import { CustomIntegrationsSetup, CustomIntegrationsStart } from './types';
+import {
+ CustomIntegrationsSetup,
+ CustomIntegrationsStart,
+ CustomIntegrationsStartDependencies,
+} from './types';
import {
CustomIntegration,
ROUTES_APPEND_CUSTOM_INTEGRATIONS,
ROUTES_REPLACEMENT_CUSTOM_INTEGRATIONS,
} from '../common';
+import { pluginServices } from './services';
+import { pluginServiceRegistry } from './services/kibana';
+
export class CustomIntegrationsPlugin
implements Plugin
{
@@ -30,8 +37,14 @@ export class CustomIntegrationsPlugin
};
}
- public start(core: CoreStart): CustomIntegrationsStart {
- return {};
+ public start(
+ coreStart: CoreStart,
+ startPlugins: CustomIntegrationsStartDependencies
+ ): CustomIntegrationsStart {
+ pluginServices.setRegistry(pluginServiceRegistry.start({ coreStart, startPlugins }));
+ return {
+ ContextProvider: pluginServices.getContextProvider(),
+ };
}
public stop() {}
diff --git a/src/plugins/custom_integrations/public/services/find.test.ts b/src/plugins/custom_integrations/public/services/find.test.ts
new file mode 100644
index 0000000000000..df52c22313b68
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/find.test.ts
@@ -0,0 +1,95 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { filterCustomIntegrations } from './find';
+import { CustomIntegration } from '../../common';
+
+describe('Custom Integrations Find Service', () => {
+ const integrations: CustomIntegration[] = [
+ {
+ id: 'foo',
+ title: 'Foo',
+ description: 'test integration',
+ type: 'ui_link',
+ uiInternalPath: '/path/to/foo',
+ isBeta: false,
+ icons: [],
+ categories: ['aws', 'cloud'],
+ shipper: 'tests',
+ },
+ {
+ id: 'bar',
+ title: 'Bar',
+ description: 'test integration',
+ type: 'ui_link',
+ uiInternalPath: '/path/to/bar',
+ isBeta: false,
+ icons: [],
+ categories: ['aws'],
+ shipper: 'other',
+ eprOverlap: 'eprValue',
+ },
+ {
+ id: 'bar',
+ title: 'Bar',
+ description: 'test integration',
+ type: 'ui_link',
+ uiInternalPath: '/path/to/bar',
+ isBeta: false,
+ icons: [],
+ categories: ['cloud'],
+ shipper: 'other',
+ eprOverlap: 'eprValue',
+ },
+ {
+ id: 'baz',
+ title: 'Baz',
+ description: 'test integration',
+ type: 'ui_link',
+ uiInternalPath: '/path/to/baz',
+ isBeta: false,
+ icons: [],
+ categories: ['cloud'],
+ shipper: 'tests',
+ eprOverlap: 'eprOtherValue',
+ },
+ ];
+
+ describe('filterCustomIntegrations', () => {
+ test('filters on shipper', () => {
+ let result = filterCustomIntegrations(integrations, { shipper: 'other' });
+ expect(result.length).toBe(2);
+ result = filterCustomIntegrations(integrations, { shipper: 'tests' });
+ expect(result.length).toBe(2);
+ result = filterCustomIntegrations(integrations, { shipper: 'foobar' });
+ expect(result.length).toBe(0);
+ });
+ test('filters on eprOverlap', () => {
+ let result = filterCustomIntegrations(integrations, { eprPackageName: 'eprValue' });
+ expect(result.length).toBe(2);
+ result = filterCustomIntegrations(integrations, { eprPackageName: 'eprOtherValue' });
+ expect(result.length).toBe(1);
+ result = filterCustomIntegrations(integrations, { eprPackageName: 'otherValue' });
+ expect(result.length).toBe(0);
+ });
+ test('filters on categories and shipper, eprOverlap', () => {
+ const result = filterCustomIntegrations(integrations, {
+ shipper: 'other',
+ eprPackageName: 'eprValue',
+ });
+ expect(result.length).toBe(2);
+ });
+ });
+});
diff --git a/src/plugins/custom_integrations/public/services/find.ts b/src/plugins/custom_integrations/public/services/find.ts
new file mode 100644
index 0000000000000..4e69327c351b4
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/find.ts
@@ -0,0 +1,46 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { CustomIntegration } from '../../common';
+
+interface FindParams {
+ eprPackageName?: string;
+ shipper?: string;
+}
+
+/**
+ * A plugin service that finds and returns custom integrations.
+ */
+export interface CustomIntegrationsFindService {
+ findReplacementIntegrations(params?: FindParams): Promise;
+ findAppendedIntegrations(params?: FindParams): Promise;
+}
+
+/**
+ * Filter a set of integrations by eprPackageName, and/or shipper.
+ */
+export const filterCustomIntegrations = (
+ integrations: CustomIntegration[],
+ { eprPackageName, shipper }: FindParams = {}
+) => {
+ if (!eprPackageName && !shipper) {
+ return integrations;
+ }
+
+ let result = integrations;
+
+ if (eprPackageName) {
+ result = result.filter((integration) => integration.eprOverlap === eprPackageName);
+ }
+
+ if (shipper) {
+ result = result.filter((integration) => integration.shipper === shipper);
+ }
+
+ return result;
+};
diff --git a/src/plugins/custom_integrations/public/services/index.ts b/src/plugins/custom_integrations/public/services/index.ts
new file mode 100644
index 0000000000000..8a257ee1a2cd7
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/index.ts
@@ -0,0 +1,36 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { PluginServices } from '../../../presentation_util/public';
+
+import { CustomIntegrationsFindService } from './find';
+import { CustomIntegrationsPlatformService } from './platform';
+
+/**
+ * Services used by the custom integrations plugin.
+ */
+export interface CustomIntegrationsServices {
+ find: CustomIntegrationsFindService;
+ platform: CustomIntegrationsPlatformService;
+}
+
+/**
+ * The `PluginServices` object for the custom integrations plugin.
+ * @see /src/plugins/presentation_util/public/services/create/index.ts
+ */
+export const pluginServices = new PluginServices();
+
+/**
+ * A React hook that provides connections to the `CustomIntegrationsFindService`.
+ */
+export const useFindService = () => (() => pluginServices.getHooks().find.useService())();
+
+/**
+ * A React hook that provides connections to the `CustomIntegrationsPlatformService`.
+ */
+export const usePlatformService = () => (() => pluginServices.getHooks().platform.useService())();
diff --git a/src/plugins/custom_integrations/public/services/kibana/find.ts b/src/plugins/custom_integrations/public/services/kibana/find.ts
new file mode 100644
index 0000000000000..5fc7626baa1e1
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/kibana/find.ts
@@ -0,0 +1,46 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import {
+ CustomIntegration,
+ ROUTES_APPEND_CUSTOM_INTEGRATIONS,
+ ROUTES_REPLACEMENT_CUSTOM_INTEGRATIONS,
+} from '../../../common';
+import { KibanaPluginServiceFactory } from '../../../../presentation_util/public';
+
+import { CustomIntegrationsStartDependencies } from '../../types';
+import { CustomIntegrationsFindService, filterCustomIntegrations } from '../find';
+
+/**
+ * A type definition for a factory to produce the `CustomIntegrationsFindService` for use in Kibana.
+ * @see /src/plugins/presentation_util/public/services/create/factory.ts
+ */
+export type CustomIntegrationsFindServiceFactory = KibanaPluginServiceFactory<
+ CustomIntegrationsFindService,
+ CustomIntegrationsStartDependencies
+>;
+
+/**
+ * A factory to produce the `CustomIntegrationsFindService` for use in Kibana.
+ */
+export const findServiceFactory: CustomIntegrationsFindServiceFactory = ({ coreStart }) => ({
+ findAppendedIntegrations: async (params) => {
+ const integrations: CustomIntegration[] = await coreStart.http.get(
+ ROUTES_APPEND_CUSTOM_INTEGRATIONS
+ );
+
+ return filterCustomIntegrations(integrations, params);
+ },
+ findReplacementIntegrations: async (params) => {
+ const replacements: CustomIntegration[] = await coreStart.http.get(
+ ROUTES_REPLACEMENT_CUSTOM_INTEGRATIONS
+ );
+
+ return filterCustomIntegrations(replacements, params);
+ },
+});
diff --git a/src/plugins/custom_integrations/public/services/kibana/index.ts b/src/plugins/custom_integrations/public/services/kibana/index.ts
new file mode 100644
index 0000000000000..d3cf27b9bc7c0
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/kibana/index.ts
@@ -0,0 +1,44 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import {
+ PluginServiceProviders,
+ PluginServiceProvider,
+ PluginServiceRegistry,
+ KibanaPluginServiceParams,
+} from '../../../../presentation_util/public';
+
+import { CustomIntegrationsServices } from '..';
+import { CustomIntegrationsStartDependencies } from '../../types';
+
+import { findServiceFactory } from './find';
+import { platformServiceFactory } from './platform';
+
+export { findServiceFactory } from './find';
+export { platformServiceFactory } from './platform';
+
+/**
+ * A set of `PluginServiceProvider`s for use in Kibana.
+ * @see /src/plugins/presentation_util/public/services/create/provider.tsx
+ */
+export const pluginServiceProviders: PluginServiceProviders<
+ CustomIntegrationsServices,
+ KibanaPluginServiceParams
+> = {
+ find: new PluginServiceProvider(findServiceFactory),
+ platform: new PluginServiceProvider(platformServiceFactory),
+};
+
+/**
+ * A `PluginServiceRegistry` for use in Kibana.
+ * @see /src/plugins/presentation_util/public/services/create/registry.tsx
+ */
+export const pluginServiceRegistry = new PluginServiceRegistry<
+ CustomIntegrationsServices,
+ KibanaPluginServiceParams
+>(pluginServiceProviders);
diff --git a/src/plugins/custom_integrations/public/services/kibana/platform.ts b/src/plugins/custom_integrations/public/services/kibana/platform.ts
new file mode 100644
index 0000000000000..e6fe89b68c975
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/kibana/platform.ts
@@ -0,0 +1,31 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { KibanaPluginServiceFactory } from '../../../../presentation_util/public';
+
+import type { CustomIntegrationsPlatformService } from '../platform';
+import type { CustomIntegrationsStartDependencies } from '../../types';
+
+/**
+ * A type definition for a factory to produce the `CustomIntegrationsPlatformService` for use in Kibana.
+ * @see /src/plugins/presentation_util/public/services/create/factory.ts
+ */
+export type CustomIntegrationsPlatformServiceFactory = KibanaPluginServiceFactory<
+ CustomIntegrationsPlatformService,
+ CustomIntegrationsStartDependencies
+>;
+
+/**
+ * A factory to produce the `CustomIntegrationsPlatformService` for use in Kibana.
+ */
+export const platformServiceFactory: CustomIntegrationsPlatformServiceFactory = ({
+ coreStart,
+}) => ({
+ getBasePath: coreStart.http.basePath.get,
+ getAbsolutePath: (path: string): string => coreStart.http.basePath.prepend(`${path}`),
+});
diff --git a/packages/kbn-legacy-logging/src/utils/index.ts b/src/plugins/custom_integrations/public/services/platform.ts
similarity index 73%
rename from packages/kbn-legacy-logging/src/utils/index.ts
rename to src/plugins/custom_integrations/public/services/platform.ts
index 3036671121fe0..0eb9c7d5c3c10 100644
--- a/packages/kbn-legacy-logging/src/utils/index.ts
+++ b/src/plugins/custom_integrations/public/services/platform.ts
@@ -6,5 +6,7 @@
* Side Public License, v 1.
*/
-export { applyFiltersToKeys } from './apply_filters_to_keys';
-export { getResponsePayloadBytes } from './get_payload_size';
+export interface CustomIntegrationsPlatformService {
+ getBasePath: () => string;
+ getAbsolutePath: (path: string) => string;
+}
diff --git a/src/plugins/custom_integrations/public/services/storybook/index.ts b/src/plugins/custom_integrations/public/services/storybook/index.ts
new file mode 100644
index 0000000000000..4dfed1b37e294
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/storybook/index.ts
@@ -0,0 +1,35 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import {
+ PluginServiceProviders,
+ PluginServiceProvider,
+ PluginServiceRegistry,
+} from '../../../../presentation_util/public';
+
+import { CustomIntegrationsServices } from '..';
+import { findServiceFactory } from '../stub/find';
+import { platformServiceFactory } from '../stub/platform';
+
+export { findServiceFactory } from '../stub/find';
+export { platformServiceFactory } from '../stub/platform';
+
+/**
+ * A set of `PluginServiceProvider`s for use in Storybook.
+ * @see /src/plugins/presentation_util/public/services/create/provider.tsx
+ */
+export const providers: PluginServiceProviders = {
+ find: new PluginServiceProvider(findServiceFactory),
+ platform: new PluginServiceProvider(platformServiceFactory),
+};
+
+/**
+ * A `PluginServiceRegistry` for use in Storybook.
+ * @see /src/plugins/presentation_util/public/services/create/registry.tsx
+ */
+export const registry = new PluginServiceRegistry(providers);
diff --git a/src/plugins/custom_integrations/public/services/stub/find.ts b/src/plugins/custom_integrations/public/services/stub/find.ts
new file mode 100644
index 0000000000000..08def4e63471d
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/stub/find.ts
@@ -0,0 +1,32 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { PluginServiceFactory } from '../../../../presentation_util/public';
+
+import { CustomIntegrationsFindService, filterCustomIntegrations } from '../find';
+
+/**
+ * A type definition for a factory to produce the `CustomIntegrationsFindService` with stubbed output.
+ * @see /src/plugins/presentation_util/public/services/create/factory.ts
+ */
+export type CustomIntegrationsFindServiceFactory =
+ PluginServiceFactory;
+
+/**
+ * A factory to produce the `CustomIntegrationsFindService` with stubbed output.
+ */
+export const findServiceFactory: CustomIntegrationsFindServiceFactory = () => ({
+ findAppendedIntegrations: async (params) => {
+ const { integrations } = await import('./fixtures/integrations');
+ return filterCustomIntegrations(integrations, params);
+ },
+ findReplacementIntegrations: async (params) => {
+ const { integrations } = await import('./fixtures/integrations');
+ return filterCustomIntegrations(integrations, params);
+ },
+});
diff --git a/src/plugins/custom_integrations/public/services/stub/fixtures/integrations.ts b/src/plugins/custom_integrations/public/services/stub/fixtures/integrations.ts
new file mode 100644
index 0000000000000..7553deada9e26
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/stub/fixtures/integrations.ts
@@ -0,0 +1,1884 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import type { CustomIntegration } from '../../../../common';
+
+export const integrations: CustomIntegration[] = [
+ {
+ type: 'ui_link',
+ id: 'System logs',
+ title: 'System logs',
+ categories: ['os_system', 'security'],
+ uiInternalPath: '/app/home#/tutorial/systemLogs',
+ description: 'Collect system logs of common Unix/Linux based distributions.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoLogging',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'system',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'System metrics',
+ title: 'System metrics',
+ categories: ['os_system', 'security'],
+ uiInternalPath: '/app/home#/tutorial/systemMetrics',
+ description: 'Collect CPU, memory, network, and disk statistics from the host.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/system.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'system',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Apache logs',
+ title: 'Apache logs',
+ categories: ['web'],
+ uiInternalPath: '/app/home#/tutorial/apacheLogs',
+ description: 'Collect and parse access and error logs created by the Apache HTTP server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoApache',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'apache',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Apache metrics',
+ title: 'Apache metrics',
+ categories: ['web'],
+ uiInternalPath: '/app/home#/tutorial/apacheMetrics',
+ description: 'Fetch internal metrics from the Apache 2 HTTP server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoApache',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'apache',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Elasticsearch logs',
+ title: 'Elasticsearch logs',
+ categories: ['containers', 'os_system'],
+ uiInternalPath: '/app/home#/tutorial/elasticsearchLogs',
+ description: 'Collect and parse logs created by Elasticsearch.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoElasticsearch',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'elasticsearch',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'IIS logs',
+ title: 'IIS logs',
+ categories: ['web'],
+ uiInternalPath: '/app/home#/tutorial/iisLogs',
+ description: 'Collect and parse access and error logs created by the IIS HTTP server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/iis.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'iis',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Kafka logs',
+ title: 'Kafka logs',
+ categories: ['message_queue'],
+ uiInternalPath: '/app/home#/tutorial/kafkaLogs',
+ description: 'Collect and parse logs created by Kafka.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoKafka',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'kafka',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Logstash logs',
+ title: 'Logstash logs',
+ categories: ['custom'],
+ uiInternalPath: '/app/home#/tutorial/logstashLogs',
+ description: 'Collect Logstash main and slow logs.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoLogstash',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'logstash',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Nginx logs',
+ title: 'Nginx logs',
+ categories: ['web', 'security'],
+ uiInternalPath: '/app/home#/tutorial/nginxLogs',
+ description: 'Collect and parse access and error logs created by the Nginx HTTP server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoNginx',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'nginx',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Nginx metrics',
+ title: 'Nginx metrics',
+ categories: ['web', 'security'],
+ uiInternalPath: '/app/home#/tutorial/nginxMetrics',
+ description: 'Fetch internal metrics from the Nginx HTTP server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoNginx',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'nginx',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'MySQL logs',
+ title: 'MySQL logs',
+ categories: ['datastore'],
+ uiInternalPath: '/app/home#/tutorial/mysqlLogs',
+ description: 'Collect and parse error and slow logs created by MySQL.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoMySQL',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'mysql',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'MySQL metrics',
+ title: 'MySQL metrics',
+ categories: ['datastore'],
+ uiInternalPath: '/app/home#/tutorial/mysqlMetrics',
+ description: 'Fetch internal metrics from MySQL.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoMySQL',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'mysql',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'MongoDB metrics',
+ title: 'MongoDB metrics',
+ categories: ['datastore'],
+ uiInternalPath: '/app/home#/tutorial/mongodbMetrics',
+ description: 'Fetch internal metrics from MongoDB.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoMongodb',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'mongodb',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Osquery logs',
+ title: 'Osquery logs',
+ categories: ['security', 'os_system'],
+ uiInternalPath: '/app/home#/tutorial/osqueryLogs',
+ description: 'Collect osquery logs in JSON format.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/osquery.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'osquery',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'PHP-FPM metrics',
+ title: 'PHP-FPM metrics',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/phpfpmMetrics',
+ description: 'Fetch internal metrics from PHP-FPM.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoPhp',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'php_fpm',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'PostgreSQL metrics',
+ title: 'PostgreSQL metrics',
+ categories: ['datastore'],
+ uiInternalPath: '/app/home#/tutorial/postgresqlMetrics',
+ description: 'Fetch internal metrics from PostgreSQL.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoPostgres',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'postgresql',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'PostgreSQL logs',
+ title: 'PostgreSQL logs',
+ categories: ['datastore'],
+ uiInternalPath: '/app/home#/tutorial/postgresqlLogs',
+ description: 'Collect and parse error and slow logs created by PostgreSQL.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoPostgres',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'postgresql',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'RabbitMQ metrics',
+ title: 'RabbitMQ metrics',
+ categories: ['message_queue'],
+ uiInternalPath: '/app/home#/tutorial/rabbitmqMetrics',
+ description: 'Fetch internal metrics from the RabbitMQ server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoRabbitmq',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'rabbitmq',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Redis logs',
+ title: 'Redis logs',
+ categories: ['datastore', 'message_queue'],
+ uiInternalPath: '/app/home#/tutorial/redisLogs',
+ description: 'Collect and parse error and slow logs created by Redis.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoRedis',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'redis',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Redis metrics',
+ title: 'Redis metrics',
+ categories: ['datastore', 'message_queue'],
+ uiInternalPath: '/app/home#/tutorial/redisMetrics',
+ description: 'Fetch internal metrics from Redis.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoRedis',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'redis',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Suricata logs',
+ title: 'Suricata logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/suricataLogs',
+ description: 'Collect Suricata IDS/IPS/NSM logs.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/suricata.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'suricata',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Docker metrics',
+ title: 'Docker metrics',
+ categories: ['containers', 'os_system'],
+ uiInternalPath: '/app/home#/tutorial/dockerMetrics',
+ description: 'Fetch metrics about your Docker containers.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoDocker',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'docker',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Kubernetes metrics',
+ title: 'Kubernetes metrics',
+ categories: ['containers', 'kubernetes'],
+ uiInternalPath: '/app/home#/tutorial/kubernetesMetrics',
+ description: 'Fetch metrics from your Kubernetes installation.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoKubernetes',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'kubernetes',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'uWSGI metrics',
+ title: 'uWSGI metrics',
+ categories: ['web', 'security'],
+ uiInternalPath: '/app/home#/tutorial/uwsgiMetrics',
+ description: 'Fetch internal metrics from the uWSGI server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/uwsgi.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'uwsgi',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'NetFlow / IPFIX Collector',
+ title: 'NetFlow / IPFIX Collector',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/netflowLogs',
+ description: 'Collect NetFlow and IPFIX flow records.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoBeats',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'netflow',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Traefik logs',
+ title: 'Traefik logs',
+ categories: ['web', 'security'],
+ uiInternalPath: '/app/home#/tutorial/traefikLogs',
+ description: 'Collect Traefik access logs.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/traefik.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'traefik',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Ceph metrics',
+ title: 'Ceph metrics',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/cephMetrics',
+ description: 'Fetch internal metrics from the Ceph server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoCeph',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'ceph',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Aerospike metrics',
+ title: 'Aerospike metrics',
+ categories: ['web'],
+ uiInternalPath: '/app/home#/tutorial/aerospikeMetrics',
+ description: 'Fetch internal metrics from the Aerospike server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoAerospike',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'aerospike',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Couchbase metrics',
+ title: 'Couchbase metrics',
+ categories: ['security', 'network', 'web'],
+ uiInternalPath: '/app/home#/tutorial/couchbaseMetrics',
+ description: 'Fetch internal metrics from Couchbase.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoCouchbase',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'couchbase',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Dropwizard metrics',
+ title: 'Dropwizard metrics',
+ categories: ['elastic_stack', 'datastore'],
+ uiInternalPath: '/app/home#/tutorial/dropwizardMetrics',
+ description: 'Fetch internal metrics from Dropwizard Java application.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoDropwizard',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'dropwizard',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Elasticsearch metrics',
+ title: 'Elasticsearch metrics',
+ categories: ['elastic_stack', 'datastore'],
+ uiInternalPath: '/app/home#/tutorial/elasticsearchMetrics',
+ description: 'Fetch internal metrics from Elasticsearch.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoElasticsearch',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'elasticsearch',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Etcd metrics',
+ title: 'Etcd metrics',
+ categories: ['elastic_stack', 'datastore'],
+ uiInternalPath: '/app/home#/tutorial/etcdMetrics',
+ description: 'Fetch internal metrics from the Etcd server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoEtcd',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'etcd',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'HAProxy metrics',
+ title: 'HAProxy metrics',
+ categories: ['network', 'web'],
+ uiInternalPath: '/app/home#/tutorial/haproxyMetrics',
+ description: 'Fetch internal metrics from the HAProxy server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoHAproxy',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'haproxy',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Kafka metrics',
+ title: 'Kafka metrics',
+ categories: ['message_queue'],
+ uiInternalPath: '/app/home#/tutorial/kafkaMetrics',
+ description: 'Fetch internal metrics from the Kafka server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoKafka',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'kafka',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Kibana metrics',
+ title: 'Kibana metrics',
+ categories: ['message_queue'],
+ uiInternalPath: '/app/home#/tutorial/kibanaMetrics',
+ description: 'Fetch internal metrics from Kibana.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoKibana',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'kibana',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Memcached metrics',
+ title: 'Memcached metrics',
+ categories: ['custom'],
+ uiInternalPath: '/app/home#/tutorial/memcachedMetrics',
+ description: 'Fetch internal metrics from the Memcached server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoMemcached',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'memcached',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Munin metrics',
+ title: 'Munin metrics',
+ categories: ['datastore'],
+ uiInternalPath: '/app/home#/tutorial/muninMetrics',
+ description: 'Fetch internal metrics from the Munin server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/munin.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'munin',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'vSphere metrics',
+ title: 'vSphere metrics',
+ categories: ['web', 'security'],
+ uiInternalPath: '/app/home#/tutorial/vsphereMetrics',
+ description: 'Fetch internal metrics from vSphere.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/vsphere.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'vsphere',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Windows metrics',
+ title: 'Windows metrics',
+ categories: ['os_system', 'security'],
+ uiInternalPath: '/app/home#/tutorial/windowsMetrics',
+ description: 'Fetch internal metrics from Windows.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoWindows',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'windows',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Windows Event Log',
+ title: 'Windows Event Log',
+ categories: ['os_system', 'security'],
+ uiInternalPath: '/app/home#/tutorial/windowsEventLogs',
+ description: 'Fetch logs from the Windows Event Log.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoWindows',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'windows',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Golang metrics',
+ title: 'Golang metrics',
+ categories: ['google_cloud', 'cloud', 'network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/golangMetrics',
+ description: 'Fetch internal metrics from a Golang app.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoGolang',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'golang',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Logstash metrics',
+ title: 'Logstash metrics',
+ categories: ['custom'],
+ uiInternalPath: '/app/home#/tutorial/logstashMetrics',
+ description: 'Fetch internal metrics from a Logstash server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoLogstash',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'logstash',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Prometheus metrics',
+ title: 'Prometheus metrics',
+ categories: ['monitoring', 'datastore'],
+ uiInternalPath: '/app/home#/tutorial/prometheusMetrics',
+ description: 'Fetch metrics from a Prometheus exporter.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoPrometheus',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'prometheus',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Zookeeper metrics',
+ title: 'Zookeeper metrics',
+ categories: ['datastore', 'config_management'],
+ uiInternalPath: '/app/home#/tutorial/zookeeperMetrics',
+ description: 'Fetch internal metrics from a Zookeeper server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/zookeeper.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'zookeeper',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Uptime Monitors',
+ title: 'Uptime Monitors',
+ categories: ['web', 'security'],
+ uiInternalPath: '/app/home#/tutorial/uptimeMonitors',
+ description: 'Monitor services for their availability',
+ icons: [
+ {
+ type: 'eui',
+ src: 'uptimeApp',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'uptime',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'AWS Cloudwatch logs',
+ title: 'AWS Cloudwatch logs',
+ categories: ['security', 'network', 'web'],
+ uiInternalPath: '/app/home#/tutorial/cloudwatchLogs',
+ description: 'Collect Cloudwatch logs with Functionbeat.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoAWS',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'aws',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'AWS metrics',
+ title: 'AWS metrics',
+ categories: ['aws', 'cloud', 'datastore', 'security', 'network'],
+ uiInternalPath: '/app/home#/tutorial/awsMetrics',
+ description: 'Fetch monitoring metrics for EC2 instances from the AWS APIs and Cloudwatch.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoAWS',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'aws',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Microsoft SQL Server Metrics',
+ title: 'Microsoft SQL Server Metrics',
+ categories: ['datastore'],
+ uiInternalPath: '/app/home#/tutorial/mssqlMetrics',
+ description: 'Fetch monitoring metrics from a Microsoft SQL Server instance',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/mssql.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'mssql',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'NATS metrics',
+ title: 'NATS metrics',
+ categories: ['message_queue'],
+ uiInternalPath: '/app/home#/tutorial/natsMetrics',
+ description: 'Fetch monitoring metrics from the Nats server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/nats.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'nats',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'NATS logs',
+ title: 'NATS logs',
+ categories: ['message_queue'],
+ uiInternalPath: '/app/home#/tutorial/natsLogs',
+ description: 'Collect and parse logs created by Nats.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/nats.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'nats',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Zeek logs',
+ title: 'Zeek logs',
+ categories: ['network', 'monitoring', 'security'],
+ uiInternalPath: '/app/home#/tutorial/zeekLogs',
+ description: 'Collect Zeek network security monitoring logs.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/zeek.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'zeek',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'CoreDNS metrics',
+ title: 'CoreDNS metrics',
+ categories: ['security', 'network', 'web'],
+ uiInternalPath: '/app/home#/tutorial/corednsMetrics',
+ description: 'Fetch monitoring metrics from the CoreDNS server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/coredns.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'coredns',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'CoreDNS logs',
+ title: 'CoreDNS logs',
+ categories: ['security', 'network', 'web'],
+ uiInternalPath: '/app/home#/tutorial/corednsLogs',
+ description: 'Collect CoreDNS logs.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/coredns.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'coredns',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Auditbeat',
+ title: 'Auditbeat',
+ categories: ['web'],
+ uiInternalPath: '/app/home#/tutorial/auditbeat',
+ description: 'Collect audit data from your hosts.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'securityAnalyticsApp',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'auditbeat',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Iptables logs',
+ title: 'Iptables logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/iptablesLogs',
+ description: 'Collect iptables and ip6tables logs.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/linux.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'iptables',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Cisco logs',
+ title: 'Cisco logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/ciscoLogs',
+ description: 'Collect Cisco network device logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/cisco.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'cisco',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Envoy Proxy logs',
+ title: 'Envoy Proxy logs',
+ categories: ['elastic_stack', 'datastore'],
+ uiInternalPath: '/app/home#/tutorial/envoyproxyLogs',
+ description: 'Collect Envoy Proxy logs.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/envoyproxy.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'envoyproxy',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'CouchDB metrics',
+ title: 'CouchDB metrics',
+ categories: ['security', 'network', 'web'],
+ uiInternalPath: '/app/home#/tutorial/couchdbMetrics',
+ description: 'Fetch monitoring metrics from the CouchdB server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/couchdb.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'couchdb',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Consul metrics',
+ title: 'Consul metrics',
+ categories: ['security', 'network', 'web'],
+ uiInternalPath: '/app/home#/tutorial/consulMetrics',
+ description: 'Fetch monitoring metrics from the Consul server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/consul.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'consul',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'CockroachDB metrics',
+ title: 'CockroachDB metrics',
+ categories: ['security', 'network', 'web'],
+ uiInternalPath: '/app/home#/tutorial/cockroachdbMetrics',
+ description: 'Fetch monitoring metrics from the CockroachDB server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/cockroachdb.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'cockroachdb',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Traefik metrics',
+ title: 'Traefik metrics',
+ categories: ['web', 'security'],
+ uiInternalPath: '/app/home#/tutorial/traefikMetrics',
+ description: 'Fetch monitoring metrics from Traefik.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/traefik.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'traefik',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'AWS S3 based logs',
+ title: 'AWS S3 based logs',
+ categories: ['aws', 'cloud', 'datastore', 'security', 'network'],
+ uiInternalPath: '/app/home#/tutorial/awsLogs',
+ description: 'Collect AWS logs from S3 bucket with Filebeat.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoAWS',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'aws',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'ActiveMQ logs',
+ title: 'ActiveMQ logs',
+ categories: ['web'],
+ uiInternalPath: '/app/home#/tutorial/activemqLogs',
+ description: 'Collect ActiveMQ logs with Filebeat.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/activemq.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'activemq',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'ActiveMQ metrics',
+ title: 'ActiveMQ metrics',
+ categories: ['web'],
+ uiInternalPath: '/app/home#/tutorial/activemqMetrics',
+ description: 'Fetch monitoring metrics from ActiveMQ instances.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/activemq.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'activemq',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Azure metrics',
+ title: 'Azure metrics',
+ categories: ['azure', 'cloud', 'network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/azureMetrics',
+ description: 'Fetch Azure Monitor metrics.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoAzure',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'azure',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'IBM MQ logs',
+ title: 'IBM MQ logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/ibmmqLogs',
+ description: 'Collect IBM MQ logs with Filebeat.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/ibmmq.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'ibmmq',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'IBM MQ metrics',
+ title: 'IBM MQ metrics',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/ibmmqMetrics',
+ description: 'Fetch monitoring metrics from IBM MQ instances.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/ibmmq.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'ibmmq',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'STAN metrics',
+ title: 'STAN metrics',
+ categories: ['message_queue', 'kubernetes'],
+ uiInternalPath: '/app/home#/tutorial/stanMetrics',
+ description: 'Fetch monitoring metrics from the STAN server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/stan.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'stan',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Envoy Proxy metrics',
+ title: 'Envoy Proxy metrics',
+ categories: ['elastic_stack', 'datastore'],
+ uiInternalPath: '/app/home#/tutorial/envoyproxyMetrics',
+ description: 'Fetch monitoring metrics from Envoy Proxy.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/envoyproxy.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'envoyproxy',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Statsd metrics',
+ title: 'Statsd metrics',
+ categories: ['message_queue', 'kubernetes'],
+ uiInternalPath: '/app/home#/tutorial/statsdMetrics',
+ description: 'Fetch monitoring metrics from statsd.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/statsd.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'statsd',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Redis Enterprise metrics',
+ title: 'Redis Enterprise metrics',
+ categories: ['datastore', 'message_queue'],
+ uiInternalPath: '/app/home#/tutorial/redisenterpriseMetrics',
+ description: 'Fetch monitoring metrics from Redis Enterprise Server.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoRedis',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'redisenterprise',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'OpenMetrics metrics',
+ title: 'OpenMetrics metrics',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/openmetricsMetrics',
+ description: 'Fetch metrics from an endpoint that serves metrics in OpenMetrics format.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/openmetrics.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'openmetrics',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'oracle metrics',
+ title: 'oracle metrics',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/oracleMetrics',
+ description: 'Fetch internal metrics from a Oracle server.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/oracle.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'oracle',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'IIS Metrics',
+ title: 'IIS Metrics',
+ categories: ['web'],
+ uiInternalPath: '/app/home#/tutorial/iisMetrics',
+ description: 'Collect IIS server related metrics.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/iis.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'iis',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Azure logs',
+ title: 'Azure logs',
+ categories: ['azure', 'cloud', 'network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/azureLogs',
+ description: 'Collects Azure activity and audit related logs.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoAzure',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'azure',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Google Cloud metrics',
+ title: 'Google Cloud metrics',
+ categories: ['google_cloud', 'cloud', 'network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/gcpMetrics',
+ description:
+ 'Fetch monitoring metrics from Google Cloud Platform using Stackdriver Monitoring API.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoGCP',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'gcp',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Auditd logs',
+ title: 'Auditd logs',
+ categories: ['os_system'],
+ uiInternalPath: '/app/home#/tutorial/auditdLogs',
+ description: 'Collect logs from the Linux auditd daemon.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/linux.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'auditd',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Barracuda logs',
+ title: 'Barracuda logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/barracudaLogs',
+ description: 'Collect Barracuda Web Application Firewall logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/barracuda.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'barracuda',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Bluecoat logs',
+ title: 'Bluecoat logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/bluecoatLogs',
+ description: 'Collect Blue Coat Director logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoLogging',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'bluecoat',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'CEF logs',
+ title: 'CEF logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/cefLogs',
+ description: 'Collect Common Event Format (CEF) log data over syslog.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoLogging',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'cef',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Check Point logs',
+ title: 'Check Point logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/checkpointLogs',
+ description: 'Collect Check Point firewall logs.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/checkpoint.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'checkpoint',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'CrowdStrike logs',
+ title: 'CrowdStrike logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/crowdstrikeLogs',
+ description: 'Collect CrowdStrike Falcon logs using the Falcon SIEM Connector.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/crowdstrike.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'crowdstrike',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'CylancePROTECT logs',
+ title: 'CylancePROTECT logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/cylanceLogs',
+ description: 'Collect CylancePROTECT logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/cylance.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'cylance',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'F5 logs',
+ title: 'F5 logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/f5Logs',
+ description: 'Collect F5 Big-IP Access Policy Manager logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/f5.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'f5',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Fortinet logs',
+ title: 'Fortinet logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/fortinetLogs',
+ description: 'Collect Fortinet FortiOS logs over syslog.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/fortinet.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'fortinet',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Google Cloud logs',
+ title: 'Google Cloud logs',
+ categories: ['google_cloud', 'cloud', 'network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/gcpLogs',
+ description: 'Collect Google Cloud audit, firewall, and VPC flow logs.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoGoogleG',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'gcp',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'GSuite logs',
+ title: 'GSuite logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/gsuiteLogs',
+ description: 'Collect GSuite activity reports.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoGoogleG',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'gsuite',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'HAProxy logs',
+ title: 'HAProxy logs',
+ categories: ['network', 'web'],
+ uiInternalPath: '/app/home#/tutorial/haproxyLogs',
+ description: 'Collect HAProxy logs.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoHAproxy',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'haproxy',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Icinga logs',
+ title: 'Icinga logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/icingaLogs',
+ description: 'Collect Icinga main, debug, and startup logs.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/icinga.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'icinga',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Imperva logs',
+ title: 'Imperva logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/impervaLogs',
+ description: 'Collect Imperva SecureSphere logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoLogging',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'imperva',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Infoblox logs',
+ title: 'Infoblox logs',
+ categories: ['network'],
+ uiInternalPath: '/app/home#/tutorial/infobloxLogs',
+ description: 'Collect Infoblox NIOS logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/infoblox.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'infoblox',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Juniper Logs',
+ title: 'Juniper Logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/juniperLogs',
+ description: 'Collect Juniper JUNOS logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/juniper.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'juniper',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Kibana Logs',
+ title: 'Kibana Logs',
+ categories: ['message_queue'],
+ uiInternalPath: '/app/home#/tutorial/kibanaLogs',
+ description: 'Collect Kibana logs.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoKibana',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'kibana',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Microsoft Defender ATP logs',
+ title: 'Microsoft Defender ATP logs',
+ categories: ['network', 'security', 'azure'],
+ uiInternalPath: '/app/home#/tutorial/microsoftLogs',
+ description: 'Collect Microsoft Defender ATP alerts.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/microsoft.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'microsoft',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'MISP threat intel logs',
+ title: 'MISP threat intel logs',
+ categories: ['network', 'security', 'azure'],
+ uiInternalPath: '/app/home#/tutorial/mispLogs',
+ description: 'Collect MISP threat intelligence data with Filebeat.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/misp.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'misp',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'MongoDB logs',
+ title: 'MongoDB logs',
+ categories: ['datastore'],
+ uiInternalPath: '/app/home#/tutorial/mongodbLogs',
+ description: 'Collect MongoDB logs.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoMongodb',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'mongodb',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'MSSQL logs',
+ title: 'MSSQL logs',
+ categories: ['datastore'],
+ uiInternalPath: '/app/home#/tutorial/mssqlLogs',
+ description: 'Collect MSSQL logs.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/microsoft.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'mssql',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Arbor Peakflow logs',
+ title: 'Arbor Peakflow logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/netscoutLogs',
+ description: 'Collect Netscout Arbor Peakflow SP logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/netscout.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'netscout',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Office 365 logs',
+ title: 'Office 365 logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/o365Logs',
+ description: 'Collect Office 365 activity logs via the Office 365 API.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/o365.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'o365',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Okta logs',
+ title: 'Okta logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/oktaLogs',
+ description: 'Collect the Okta system log via the Okta API.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/okta.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'okta',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Palo Alto Networks PAN-OS logs',
+ title: 'Palo Alto Networks PAN-OS logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/panwLogs',
+ description:
+ 'Collect Palo Alto Networks PAN-OS threat and traffic logs over syslog or from a log file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/paloalto.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'panw',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'RabbitMQ logs',
+ title: 'RabbitMQ logs',
+ categories: ['message_queue'],
+ uiInternalPath: '/app/home#/tutorial/rabbitmqLogs',
+ description: 'Collect RabbitMQ logs.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/rabbitmq.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'rabbitmq',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Radware DefensePro logs',
+ title: 'Radware DefensePro logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/radwareLogs',
+ description: 'Collect Radware DefensePro logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/radware.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'radware',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Google Santa logs',
+ title: 'Google Santa logs',
+ categories: ['security', 'os_system'],
+ uiInternalPath: '/app/home#/tutorial/santaLogs',
+ description: 'Collect Google Santa logs about process executions on MacOS.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoLogging',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'santa',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Sonicwall FW logs',
+ title: 'Sonicwall FW logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/sonicwallLogs',
+ description: 'Collect Sonicwall-FW logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/sonicwall.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'sonicwall',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Sophos logs',
+ title: 'Sophos logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/sophosLogs',
+ description: 'Collect Sophos XG SFOS logs over syslog.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/sophos.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'sophos',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Squid logs',
+ title: 'Squid logs',
+ categories: ['security'],
+ uiInternalPath: '/app/home#/tutorial/squidLogs',
+ description: 'Collect Squid logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'logoLogging',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'squid',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Tomcat logs',
+ title: 'Tomcat logs',
+ categories: ['web', 'security'],
+ uiInternalPath: '/app/home#/tutorial/tomcatLogs',
+ description: 'Collect Apache Tomcat logs over syslog or from a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/tomcat.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'tomcat',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'Zscaler Logs',
+ title: 'Zscaler Logs',
+ categories: ['network', 'security'],
+ uiInternalPath: '/app/home#/tutorial/zscalerLogs',
+ description: 'This is a module for receiving Zscaler NSS logs over Syslog or a file.',
+ icons: [
+ {
+ type: 'svg',
+ src: '/dqo/plugins/home/assets/logos/zscaler.svg',
+ },
+ ],
+ shipper: 'beats',
+ eprOverlap: 'zscaler',
+ isBeta: false,
+ },
+ {
+ type: 'ui_link',
+ id: 'apm',
+ title: 'APM',
+ categories: ['web'],
+ uiInternalPath: '/app/home#/tutorial/apm',
+ description: 'Collect in-depth performance metrics and errors from inside your applications.',
+ icons: [
+ {
+ type: 'eui',
+ src: 'apmApp',
+ },
+ ],
+ shipper: 'tutorial',
+ isBeta: false,
+ eprOverlap: 'apm',
+ },
+];
diff --git a/src/plugins/custom_integrations/public/services/stub/index.ts b/src/plugins/custom_integrations/public/services/stub/index.ts
new file mode 100644
index 0000000000000..fe7465949d565
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/stub/index.ts
@@ -0,0 +1,27 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import {
+ PluginServiceProviders,
+ PluginServiceProvider,
+ PluginServiceRegistry,
+} from '../../../../presentation_util/public';
+
+import { CustomIntegrationsServices } from '..';
+import { findServiceFactory } from './find';
+import { platformServiceFactory } from './platform';
+
+export { findServiceFactory } from './find';
+export { platformServiceFactory } from './platform';
+
+export const providers: PluginServiceProviders = {
+ find: new PluginServiceProvider(findServiceFactory),
+ platform: new PluginServiceProvider(platformServiceFactory),
+};
+
+export const registry = new PluginServiceRegistry(providers);
diff --git a/src/plugins/custom_integrations/public/services/stub/platform.ts b/src/plugins/custom_integrations/public/services/stub/platform.ts
new file mode 100644
index 0000000000000..60480f9905cb9
--- /dev/null
+++ b/src/plugins/custom_integrations/public/services/stub/platform.ts
@@ -0,0 +1,26 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { PluginServiceFactory } from '../../../../presentation_util/public';
+
+import type { CustomIntegrationsPlatformService } from '../platform';
+
+/**
+ * A type definition for a factory to produce the `CustomIntegrationsPlatformService` with stubbed output.
+ * @see /src/plugins/presentation_util/public/services/create/factory.ts
+ */
+export type CustomIntegrationsPlatformServiceFactory =
+ PluginServiceFactory;
+
+/**
+ * A factory to produce the `CustomIntegrationsPlatformService` with stubbed output.
+ */
+export const platformServiceFactory: CustomIntegrationsPlatformServiceFactory = () => ({
+ getBasePath: () => '/basePath',
+ getAbsolutePath: (path: string): string => `/basePath${path}`,
+});
diff --git a/src/plugins/custom_integrations/public/types.ts b/src/plugins/custom_integrations/public/types.ts
index 9a12af767ecbc..946115329e2b5 100755
--- a/src/plugins/custom_integrations/public/types.ts
+++ b/src/plugins/custom_integrations/public/types.ts
@@ -6,14 +6,19 @@
* Side Public License, v 1.
*/
+import type { PresentationUtilPluginStart } from '../../presentation_util/public';
+
import { CustomIntegration } from '../common';
export interface CustomIntegrationsSetup {
getAppendCustomIntegrations: () => Promise;
getReplacementCustomIntegrations: () => Promise;
}
-// eslint-disable-next-line @typescript-eslint/no-empty-interface
-export interface CustomIntegrationsStart {}
-// eslint-disable-next-line @typescript-eslint/no-empty-interface
-export interface AppPluginStartDependencies {}
+export interface CustomIntegrationsStart {
+ ContextProvider: React.FC;
+}
+
+export interface CustomIntegrationsStartDependencies {
+ presentationUtil: PresentationUtilPluginStart;
+}
diff --git a/src/plugins/custom_integrations/server/language_clients/index.ts b/src/plugins/custom_integrations/server/language_clients/index.ts
new file mode 100644
index 0000000000000..da61f804b4242
--- /dev/null
+++ b/src/plugins/custom_integrations/server/language_clients/index.ts
@@ -0,0 +1,181 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { i18n } from '@kbn/i18n';
+import { CoreSetup } from 'kibana/server';
+import { CustomIntegrationRegistry } from '../custom_integration_registry';
+import { CustomIntegrationIcon, PLUGIN_ID } from '../../common';
+
+interface LanguageIntegration {
+ id: string;
+ title: string;
+ icon?: string;
+ euiIconName?: string;
+ description: string;
+ docUrlTemplate: string;
+}
+
+const ELASTIC_WEBSITE_URL = 'https://www.elastic.co';
+const ELASTICSEARCH_CLIENT_URL = `${ELASTIC_WEBSITE_URL}/guide/en/elasticsearch/client`;
+export const integrations: LanguageIntegration[] = [
+ {
+ id: 'all',
+ title: i18n.translate('customIntegrations.languageclients.AllTitle', {
+ defaultMessage: 'Elasticsearch Clients',
+ }),
+ euiIconName: 'logoElasticsearch',
+ description: i18n.translate('customIntegrations.languageclients.AllDescription', {
+ defaultMessage:
+ 'Start building your custom application on top of Elasticsearch with the official language clients.',
+ }),
+ docUrlTemplate: `${ELASTICSEARCH_CLIENT_URL}/index.html`,
+ },
+ {
+ id: 'javascript',
+ title: i18n.translate('customIntegrations.languageclients.JavascriptTitle', {
+ defaultMessage: 'Elasticsearch JavaScript Client',
+ }),
+ icon: 'nodejs.svg',
+ description: i18n.translate('customIntegrations.languageclients.JavascriptDescription', {
+ defaultMessage:
+ 'Start building your custom application on top of Elasticsearch with the official Node.js client.',
+ }),
+ docUrlTemplate: `${ELASTICSEARCH_CLIENT_URL}/javascript-api/{branch}/introduction.html`,
+ },
+ {
+ id: 'ruby',
+ title: i18n.translate('customIntegrations.languageclients.RubyTitle', {
+ defaultMessage: 'Elasticsearch Ruby Client',
+ }),
+ icon: 'ruby.svg',
+ description: i18n.translate('customIntegrations.languageclients.RubyDescription', {
+ defaultMessage:
+ 'Start building your custom application on top of Elasticsearch with the official Ruby client.',
+ }),
+ docUrlTemplate: `${ELASTICSEARCH_CLIENT_URL}/ruby-api/{branch}/ruby_client.html`,
+ },
+ {
+ id: 'go',
+ title: i18n.translate('customIntegrations.languageclients.GoTitle', {
+ defaultMessage: 'Elasticsearch Go Client',
+ }),
+ icon: 'go.svg',
+ description: i18n.translate('customIntegrations.languageclients.GoDescription', {
+ defaultMessage:
+ 'Start building your custom application on top of Elasticsearch with the official Go client.',
+ }),
+ docUrlTemplate: `${ELASTICSEARCH_CLIENT_URL}/go-api/{branch}/overview.html`,
+ },
+ {
+ id: 'dotnet',
+ title: i18n.translate('customIntegrations.languageclients.DotNetTitle', {
+ defaultMessage: 'Elasticsearch .NET Client',
+ }),
+ icon: 'dotnet.svg',
+ description: i18n.translate('customIntegrations.languageclients.DotNetDescription', {
+ defaultMessage:
+ 'Start building your custom application on top of Elasticsearch with the official .NET client.',
+ }),
+ docUrlTemplate: `${ELASTICSEARCH_CLIENT_URL}/net-api/{branch}/index.html`,
+ },
+ {
+ id: 'php',
+ title: i18n.translate('customIntegrations.languageclients.PhpTitle', {
+ defaultMessage: 'Elasticsearch PHP Client',
+ }),
+ icon: 'php.svg',
+ description: i18n.translate('customIntegrations.languageclients.PhpDescription', {
+ defaultMessage:
+ 'Start building your custom application on top of Elasticsearch with the official .PHP client.',
+ }),
+ docUrlTemplate: `${ELASTICSEARCH_CLIENT_URL}/php-api/{branch}/index.html`,
+ },
+ {
+ id: 'perl',
+ title: i18n.translate('customIntegrations.languageclients.PerlTitle', {
+ defaultMessage: 'Elasticsearch Perl Client',
+ }),
+ icon: 'perl.svg',
+ description: i18n.translate('customIntegrations.languageclients.PerlDescription', {
+ defaultMessage:
+ 'Start building your custom application on top of Elasticsearch with the official Perl client.',
+ }),
+ docUrlTemplate: `${ELASTICSEARCH_CLIENT_URL}/perl-api/{branch}/index.html`,
+ },
+ {
+ id: 'python',
+ title: i18n.translate('customIntegrations.languageclients.PythonTitle', {
+ defaultMessage: 'Elasticsearch Python Client',
+ }),
+ icon: 'python.svg',
+ description: i18n.translate('customIntegrations.languageclients.PythonDescription', {
+ defaultMessage:
+ 'Start building your custom application on top of Elasticsearch with the official Python client.',
+ }),
+ docUrlTemplate: `${ELASTICSEARCH_CLIENT_URL}/python-api/{branch}/index.html`,
+ },
+ {
+ id: 'rust',
+ title: i18n.translate('customIntegrations.languageclients.RustTitle', {
+ defaultMessage: 'Elasticsearch Rust Client',
+ }),
+ icon: 'rust.svg',
+ description: i18n.translate('customIntegrations.languageclients.RustDescription', {
+ defaultMessage:
+ 'Start building your custom application on top of Elasticsearch with the official Rust client.',
+ }),
+ docUrlTemplate: `${ELASTICSEARCH_CLIENT_URL}/rust-api/{branch}/index.html`,
+ },
+ {
+ id: 'java',
+ title: i18n.translate('customIntegrations.languageclients.JavaTitle', {
+ defaultMessage: 'Elasticsearch Java Client',
+ }),
+ icon: 'java.svg',
+ description: i18n.translate('customIntegrations.languageclients.JavaDescription', {
+ defaultMessage:
+ 'Start building your custom application on top of Elasticsearch with the official Java client.',
+ }),
+ docUrlTemplate: `${ELASTICSEARCH_CLIENT_URL}/java-api-client/{branch}/index.html`,
+ },
+];
+
+export function registerLanguageClients(
+ core: CoreSetup,
+ registry: CustomIntegrationRegistry,
+ branch: string
+) {
+ integrations.forEach((integration: LanguageIntegration) => {
+ const icons: CustomIntegrationIcon[] = [];
+ if (integration.euiIconName) {
+ icons.push({
+ type: 'eui',
+ src: integration.euiIconName,
+ });
+ } else if (integration.icon) {
+ icons.push({
+ type: 'svg',
+ src: core.http.basePath.prepend(
+ `/plugins/${PLUGIN_ID}/assets/language_clients/${integration.icon}`
+ ),
+ });
+ }
+
+ registry.registerCustomIntegration({
+ id: `language_client.${integration.id}`,
+ title: integration.title,
+ description: integration.description,
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath: integration.docUrlTemplate.replace('{branch}', branch),
+ isBeta: false,
+ icons,
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ });
+ });
+}
diff --git a/src/plugins/custom_integrations/server/plugin.test.ts b/src/plugins/custom_integrations/server/plugin.test.ts
index 424eedf0603cd..8dee81ba6cba3 100644
--- a/src/plugins/custom_integrations/server/plugin.test.ts
+++ b/src/plugins/custom_integrations/server/plugin.test.ts
@@ -22,9 +22,145 @@ describe('CustomIntegrationsPlugin', () => {
initContext = coreMock.createPluginInitializerContext();
});
- test('wires up tutorials provider service and returns registerTutorial and addScopedTutorialContextFactory', () => {
+ test('should return setup contract', () => {
const setup = new CustomIntegrationsPlugin(initContext).setup(mockCoreSetup);
expect(setup).toHaveProperty('registerCustomIntegration');
+ expect(setup).toHaveProperty('getAppendCustomIntegrations');
+ });
+
+ test('should register language clients', () => {
+ const setup = new CustomIntegrationsPlugin(initContext).setup(mockCoreSetup);
+ expect(setup.getAppendCustomIntegrations()).toEqual([
+ {
+ id: 'language_client.all',
+ title: 'Elasticsearch Clients',
+ description:
+ 'Start building your custom application on top of Elasticsearch with the official language clients.',
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath: 'https://www.elastic.co/guide/en/elasticsearch/client/index.html',
+ isBeta: false,
+ icons: [{ type: 'eui', src: 'logoElasticsearch' }],
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ },
+ {
+ id: 'language_client.javascript',
+ title: 'Elasticsearch JavaScript Client',
+ description:
+ 'Start building your custom application on top of Elasticsearch with the official Node.js client.',
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath:
+ 'https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/branch/introduction.html',
+ isBeta: false,
+ icons: [{ type: 'svg' }],
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ },
+ {
+ id: 'language_client.ruby',
+ title: 'Elasticsearch Ruby Client',
+ description:
+ 'Start building your custom application on top of Elasticsearch with the official Ruby client.',
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath:
+ 'https://www.elastic.co/guide/en/elasticsearch/client/ruby-api/branch/ruby_client.html',
+ isBeta: false,
+ icons: [{ type: 'svg' }],
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ },
+ {
+ id: 'language_client.go',
+ title: 'Elasticsearch Go Client',
+ description:
+ 'Start building your custom application on top of Elasticsearch with the official Go client.',
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath:
+ 'https://www.elastic.co/guide/en/elasticsearch/client/go-api/branch/overview.html',
+ isBeta: false,
+ icons: [{ type: 'svg' }],
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ },
+ {
+ id: 'language_client.dotnet',
+ title: 'Elasticsearch .NET Client',
+ description:
+ 'Start building your custom application on top of Elasticsearch with the official .NET client.',
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath:
+ 'https://www.elastic.co/guide/en/elasticsearch/client/net-api/branch/index.html',
+ isBeta: false,
+ icons: [{ type: 'svg' }],
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ },
+ {
+ id: 'language_client.php',
+ title: 'Elasticsearch PHP Client',
+ description:
+ 'Start building your custom application on top of Elasticsearch with the official .PHP client.',
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath:
+ 'https://www.elastic.co/guide/en/elasticsearch/client/php-api/branch/index.html',
+ isBeta: false,
+ icons: [{ type: 'svg' }],
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ },
+ {
+ id: 'language_client.perl',
+ title: 'Elasticsearch Perl Client',
+ description:
+ 'Start building your custom application on top of Elasticsearch with the official Perl client.',
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath:
+ 'https://www.elastic.co/guide/en/elasticsearch/client/perl-api/branch/index.html',
+ isBeta: false,
+ icons: [{ type: 'svg' }],
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ },
+ {
+ id: 'language_client.python',
+ title: 'Elasticsearch Python Client',
+ description:
+ 'Start building your custom application on top of Elasticsearch with the official Python client.',
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath:
+ 'https://www.elastic.co/guide/en/elasticsearch/client/python-api/branch/index.html',
+ isBeta: false,
+ icons: [{ type: 'svg' }],
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ },
+ {
+ id: 'language_client.rust',
+ title: 'Elasticsearch Rust Client',
+ description:
+ 'Start building your custom application on top of Elasticsearch with the official Rust client.',
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath:
+ 'https://www.elastic.co/guide/en/elasticsearch/client/rust-api/branch/index.html',
+ isBeta: false,
+ icons: [{ type: 'svg' }],
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ },
+ {
+ id: 'language_client.java',
+ title: 'Elasticsearch Java Client',
+ description:
+ 'Start building your custom application on top of Elasticsearch with the official Java client.',
+ type: 'ui_link',
+ shipper: 'language_clients',
+ uiInternalPath:
+ 'https://www.elastic.co/guide/en/elasticsearch/client/java-api-client/branch/index.html',
+ isBeta: false,
+ icons: [{ type: 'svg' }],
+ categories: ['elastic_stack', 'custom', 'language_client'],
+ },
+ ]);
});
});
});
diff --git a/src/plugins/custom_integrations/server/plugin.ts b/src/plugins/custom_integrations/server/plugin.ts
index 099650ee15a05..330a1288d05a2 100755
--- a/src/plugins/custom_integrations/server/plugin.ts
+++ b/src/plugins/custom_integrations/server/plugin.ts
@@ -12,12 +12,14 @@ import { CustomIntegrationsPluginSetup, CustomIntegrationsPluginStart } from './
import { CustomIntegration } from '../common';
import { CustomIntegrationRegistry } from './custom_integration_registry';
import { defineRoutes } from './routes/define_routes';
+import { registerLanguageClients } from './language_clients';
export class CustomIntegrationsPlugin
implements Plugin
{
private readonly logger: Logger;
private readonly customIngegrationRegistry: CustomIntegrationRegistry;
+ private readonly branch: string;
constructor(initializerContext: PluginInitializerContext) {
this.logger = initializerContext.logger.get();
@@ -25,6 +27,7 @@ export class CustomIntegrationsPlugin
this.logger,
initializerContext.env.mode.dev
);
+ this.branch = initializerContext.env.packageInfo.branch;
}
public setup(core: CoreSetup) {
@@ -33,6 +36,8 @@ export class CustomIntegrationsPlugin
const router = core.http.createRouter();
defineRoutes(router, this.customIngegrationRegistry);
+ registerLanguageClients(core, this.customIngegrationRegistry, this.branch);
+
return {
registerCustomIntegration: (integration: Omit) => {
this.customIngegrationRegistry.registerCustomIntegration({
@@ -40,6 +45,9 @@ export class CustomIntegrationsPlugin
...integration,
});
},
+ getAppendCustomIntegrations: () => {
+ return this.customIngegrationRegistry.getAppendCustomIntegrations();
+ },
} as CustomIntegrationsPluginSetup;
}
diff --git a/src/plugins/custom_integrations/storybook/decorator.tsx b/src/plugins/custom_integrations/storybook/decorator.tsx
new file mode 100644
index 0000000000000..c5fea9615ee47
--- /dev/null
+++ b/src/plugins/custom_integrations/storybook/decorator.tsx
@@ -0,0 +1,48 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React from 'react';
+
+import { DecoratorFn } from '@storybook/react';
+import { I18nProvider } from '@kbn/i18n/react';
+
+import { PluginServiceRegistry } from '../../presentation_util/public';
+
+import { pluginServices } from '../public/services';
+import { CustomIntegrationsServices } from '../public/services';
+import { providers } from '../public/services/storybook';
+import { EuiThemeProvider } from '../../kibana_react/common/eui_styled_components';
+
+/**
+ * Returns a Storybook Decorator that provides both the `I18nProvider` and access to `PluginServices`
+ * for components rendered in Storybook.
+ */
+export const getCustomIntegrationsContextDecorator =
+ (): DecoratorFn =>
+ (story, { globals }) => {
+ const ContextProvider = getCustomIntegrationsContextProvider();
+ const darkMode = globals.euiTheme === 'v8.dark' || globals.euiTheme === 'v7.dark';
+
+ return (
+
+
+ {story()}
+
+
+ );
+ };
+
+/**
+ * Prepares `PluginServices` for use in Storybook and returns a React `Context.Provider` element
+ * so components that access `PluginServices` can be rendered.
+ */
+export const getCustomIntegrationsContextProvider = () => {
+ const registry = new PluginServiceRegistry(providers);
+ pluginServices.setRegistry(registry.start({}));
+ return pluginServices.getContextProvider();
+};
diff --git a/src/core/server/legacy/index.ts b/src/plugins/custom_integrations/storybook/index.ts
similarity index 65%
rename from src/core/server/legacy/index.ts
rename to src/plugins/custom_integrations/storybook/index.ts
index 39ffef501a9ec..a9e34e1aeeb7e 100644
--- a/src/core/server/legacy/index.ts
+++ b/src/plugins/custom_integrations/storybook/index.ts
@@ -6,6 +6,7 @@
* Side Public License, v 1.
*/
-/** @internal */
-export type { ILegacyService } from './legacy_service';
-export { LegacyService } from './legacy_service';
+export {
+ getCustomIntegrationsContextDecorator as getStorybookContextDecorator,
+ getCustomIntegrationsContextProvider as getStorybookContextProvider,
+} from '../storybook/decorator';
diff --git a/packages/kbn-config/src/legacy/index.ts b/src/plugins/custom_integrations/storybook/main.ts
similarity index 76%
rename from packages/kbn-config/src/legacy/index.ts
rename to src/plugins/custom_integrations/storybook/main.ts
index f6906f81d1821..1261fe5a06f69 100644
--- a/packages/kbn-config/src/legacy/index.ts
+++ b/src/plugins/custom_integrations/storybook/main.ts
@@ -6,7 +6,6 @@
* Side Public License, v 1.
*/
-export {
- LegacyObjectToConfigAdapter,
- LegacyLoggingConfig,
-} from './legacy_object_to_config_adapter';
+import { defaultConfig } from '@kbn/storybook';
+
+module.exports = defaultConfig;
diff --git a/src/plugins/custom_integrations/storybook/manager.ts b/src/plugins/custom_integrations/storybook/manager.ts
new file mode 100644
index 0000000000000..99c01efdddfdc
--- /dev/null
+++ b/src/plugins/custom_integrations/storybook/manager.ts
@@ -0,0 +1,21 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { addons } from '@storybook/addons';
+import { create } from '@storybook/theming';
+import { PANEL_ID } from '@storybook/addon-actions';
+
+addons.setConfig({
+ theme: create({
+ base: 'light',
+ brandTitle: 'Kibana Custom Integrations Storybook',
+ brandUrl: 'https://github.com/elastic/kibana/tree/master/src/plugins/custom_integrations',
+ }),
+ showPanel: true.valueOf,
+ selectedPanel: PANEL_ID,
+});
diff --git a/src/plugins/custom_integrations/storybook/preview.tsx b/src/plugins/custom_integrations/storybook/preview.tsx
new file mode 100644
index 0000000000000..c27390261c920
--- /dev/null
+++ b/src/plugins/custom_integrations/storybook/preview.tsx
@@ -0,0 +1,28 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React from 'react';
+import { Title, Subtitle, Description, Primary, Stories } from '@storybook/addon-docs/blocks';
+
+import { getCustomIntegrationsContextDecorator } from './decorator';
+
+export const decorators = [getCustomIntegrationsContextDecorator()];
+
+export const parameters = {
+ docs: {
+ page: () => (
+ <>
+
+
+
+
+
+ >
+ ),
+ },
+};
diff --git a/src/plugins/custom_integrations/tsconfig.json b/src/plugins/custom_integrations/tsconfig.json
index 2ce7bf9c8112c..ccb75c358611b 100644
--- a/src/plugins/custom_integrations/tsconfig.json
+++ b/src/plugins/custom_integrations/tsconfig.json
@@ -6,8 +6,15 @@
"declaration": true,
"declarationMap": true
},
- "include": ["common/**/*", "public/**/*", "server/**/*"],
+ "include": [
+ "../../../typings/**/*",
+ "common/**/*",
+ "public/**/*",
+ "server/**/*",
+ "storybook/**/*"
+ ],
"references": [
- { "path": "../../core/tsconfig.json" }
+ { "path": "../../core/tsconfig.json" },
+ { "path": "../presentation_util/tsconfig.json" }
]
}
diff --git a/src/plugins/dashboard/public/application/top_nav/editor_menu.tsx b/src/plugins/dashboard/public/application/top_nav/editor_menu.tsx
index 0ddd0902b719f..46ae4d9456d92 100644
--- a/src/plugins/dashboard/public/application/top_nav/editor_menu.tsx
+++ b/src/plugins/dashboard/public/application/top_nav/editor_menu.tsx
@@ -128,7 +128,10 @@ export const EditorMenu = ({ dashboardContainer, createNewVisType }: Props) => {
name: titleInWizard || title,
icon: icon as string,
onClick:
- group === VisGroups.AGGBASED ? createNewAggsBasedVis(visType) : createNewVisType(visType),
+ // not all the agg-based visualizations need to be created via the wizard
+ group === VisGroups.AGGBASED && visType.options.showIndexSelection
+ ? createNewAggsBasedVis(visType)
+ : createNewVisType(visType),
'data-test-subj': `visType-${name}`,
toolTipContent: description,
};
diff --git a/src/plugins/data/server/config_deprecations.test.ts b/src/plugins/data/server/config_deprecations.test.ts
index 365c3b749f6c7..6c09b060aa763 100644
--- a/src/plugins/data/server/config_deprecations.test.ts
+++ b/src/plugins/data/server/config_deprecations.test.ts
@@ -9,9 +9,12 @@
import { cloneDeep } from 'lodash';
import { applyDeprecations, configDeprecationFactory } from '@kbn/config';
+import { configDeprecationsMock } from '../../../core/server/mocks';
import { autocompleteConfigDeprecationProvider } from './config_deprecations';
+const deprecationContext = configDeprecationsMock.createContext();
+
const applyConfigDeprecations = (settings: Record = {}) => {
const deprecations = autocompleteConfigDeprecationProvider(configDeprecationFactory);
const deprecationMessages: string[] = [];
@@ -20,6 +23,7 @@ const applyConfigDeprecations = (settings: Record = {}) => {
deprecations.map((deprecation) => ({
deprecation,
path: '',
+ context: deprecationContext,
})),
() =>
({ message }) =>
diff --git a/src/plugins/discover/public/application/apps/main/components/uninitialized/uninitialized.tsx b/src/plugins/discover/public/application/apps/main/components/uninitialized/uninitialized.tsx
index c9e0c43900ba1..6c1b1bfc87d20 100644
--- a/src/plugins/discover/public/application/apps/main/components/uninitialized/uninitialized.tsx
+++ b/src/plugins/discover/public/application/apps/main/components/uninitialized/uninitialized.tsx
@@ -32,7 +32,7 @@ export const DiscoverUninitialized = ({ onRefresh }: Props) => {
}
actions={
-
+ {
@@ -46,7 +46,6 @@ export function DiscoverMainApp(props: DiscoverMainProps) {
},
[history]
);
- const savedSearch = props.savedSearch;
/**
* State related logic
diff --git a/src/plugins/discover/public/application/apps/main/discover_main_route.tsx b/src/plugins/discover/public/application/apps/main/discover_main_route.tsx
index 5141908e44ade..a95668642558c 100644
--- a/src/plugins/discover/public/application/apps/main/discover_main_route.tsx
+++ b/src/plugins/discover/public/application/apps/main/discover_main_route.tsx
@@ -75,8 +75,6 @@ export function DiscoverMainRoute({ services, history }: DiscoverMainProps) {
async function loadSavedSearch() {
try {
- // force a refresh if a given saved search without id was saved
- setSavedSearch(undefined);
const loadedSavedSearch = await services.getSavedSearchById(savedSearchId);
const loadedIndexPattern = await loadDefaultOrCurrentIndexPattern(loadedSavedSearch);
if (loadedSavedSearch && !loadedSavedSearch?.searchSource.getField('index')) {
diff --git a/src/plugins/discover/public/application/apps/main/services/use_discover_state.ts b/src/plugins/discover/public/application/apps/main/services/use_discover_state.ts
index e11a9937111a1..223d896b16cd1 100644
--- a/src/plugins/discover/public/application/apps/main/services/use_discover_state.ts
+++ b/src/plugins/discover/public/application/apps/main/services/use_discover_state.ts
@@ -96,6 +96,7 @@ export function useDiscoverState({
useEffect(() => {
const stopSync = stateContainer.initializeAndSync(indexPattern, filterManager, data);
+
return () => stopSync();
}, [stateContainer, filterManager, data, indexPattern]);
@@ -209,16 +210,13 @@ export function useDiscoverState({
}, [config, data, savedSearch, reset, stateContainer]);
/**
- * Initial data fetching, also triggered when index pattern changes
+ * Trigger data fetching on indexPattern or savedSearch changes
*/
useEffect(() => {
- if (!indexPattern) {
- return;
- }
- if (initialFetchStatus === FetchStatus.LOADING) {
+ if (indexPattern) {
refetch$.next();
}
- }, [initialFetchStatus, refetch$, indexPattern]);
+ }, [initialFetchStatus, refetch$, indexPattern, savedSearch.id]);
return {
data$,
diff --git a/src/plugins/discover/public/application/apps/main/services/use_saved_search.ts b/src/plugins/discover/public/application/apps/main/services/use_saved_search.ts
index 26f95afba5a93..d11c76283fedd 100644
--- a/src/plugins/discover/public/application/apps/main/services/use_saved_search.ts
+++ b/src/plugins/discover/public/application/apps/main/services/use_saved_search.ts
@@ -156,6 +156,7 @@ export const useSavedSearch = ({
refetch$,
searchSessionManager,
searchSource,
+ initialFetchStatus,
});
const subscription = fetch$.subscribe((val) => {
diff --git a/src/plugins/discover/public/application/apps/main/utils/get_fetch_observable.ts b/src/plugins/discover/public/application/apps/main/utils/get_fetch_observable.ts
index aac6196e64f6f..528f0e74d3ed6 100644
--- a/src/plugins/discover/public/application/apps/main/utils/get_fetch_observable.ts
+++ b/src/plugins/discover/public/application/apps/main/utils/get_fetch_observable.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
import { merge } from 'rxjs';
-import { debounceTime, filter, tap } from 'rxjs/operators';
+import { debounceTime, filter, skip, tap } from 'rxjs/operators';
import { FetchStatus } from '../../../types';
import type {
@@ -26,6 +26,7 @@ export function getFetch$({
main$,
refetch$,
searchSessionManager,
+ initialFetchStatus,
}: {
setAutoRefreshDone: (val: AutoRefreshDoneFn | undefined) => void;
data: DataPublicPluginStart;
@@ -33,10 +34,11 @@ export function getFetch$({
refetch$: DataRefetch$;
searchSessionManager: DiscoverSearchSessionManager;
searchSource: SearchSource;
+ initialFetchStatus: FetchStatus;
}) {
const { timefilter } = data.query.timefilter;
const { filterManager } = data.query;
- return merge(
+ let fetch$ = merge(
refetch$,
filterManager.getFetches$(),
timefilter.getFetch$(),
@@ -58,4 +60,13 @@ export function getFetch$({
data.query.queryString.getUpdates$(),
searchSessionManager.newSearchSessionIdFromURL$.pipe(filter((sessionId) => !!sessionId))
).pipe(debounceTime(100));
+
+ /**
+ * Skip initial fetch when discover:searchOnPageLoad is disabled.
+ */
+ if (initialFetchStatus === FetchStatus.UNINITIALIZED) {
+ fetch$ = fetch$.pipe(skip(1));
+ }
+
+ return fetch$;
}
diff --git a/src/plugins/discover/public/application/apps/main/utils/get_fetch_observeable.test.ts b/src/plugins/discover/public/application/apps/main/utils/get_fetch_observeable.test.ts
index 5f728b115b2e9..39873ff609d64 100644
--- a/src/plugins/discover/public/application/apps/main/utils/get_fetch_observeable.test.ts
+++ b/src/plugins/discover/public/application/apps/main/utils/get_fetch_observeable.test.ts
@@ -58,6 +58,7 @@ describe('getFetchObservable', () => {
data: createDataMock(new Subject(), new Subject(), new Subject(), new Subject()),
searchSessionManager: searchSessionManagerMock.searchSessionManager,
searchSource: savedSearchMock.searchSource,
+ initialFetchStatus: FetchStatus.LOADING,
});
fetch$.subscribe(() => {
@@ -81,6 +82,7 @@ describe('getFetchObservable', () => {
data: dataMock,
searchSessionManager: searchSessionManagerMock.searchSessionManager,
searchSource: savedSearchMockWithTimeField.searchSource,
+ initialFetchStatus: FetchStatus.LOADING,
});
const fetchfnMock = jest.fn();
diff --git a/src/plugins/discover/public/application/apps/main/utils/get_state_defaults.test.ts b/src/plugins/discover/public/application/apps/main/utils/get_state_defaults.test.ts
index 554aca6ddb8f1..04ee5f414e7f4 100644
--- a/src/plugins/discover/public/application/apps/main/utils/get_state_defaults.test.ts
+++ b/src/plugins/discover/public/application/apps/main/utils/get_state_defaults.test.ts
@@ -31,6 +31,7 @@ describe('getStateDefaults', () => {
"index": "index-pattern-with-timefield-id",
"interval": "auto",
"query": undefined,
+ "savedQuery": undefined,
"sort": Array [
Array [
"timestamp",
@@ -59,6 +60,7 @@ describe('getStateDefaults', () => {
"index": "the-index-pattern-id",
"interval": "auto",
"query": undefined,
+ "savedQuery": undefined,
"sort": Array [],
}
`);
diff --git a/src/plugins/discover/public/application/apps/main/utils/get_state_defaults.ts b/src/plugins/discover/public/application/apps/main/utils/get_state_defaults.ts
index 4061d9a61f0a3..cd23d52022374 100644
--- a/src/plugins/discover/public/application/apps/main/utils/get_state_defaults.ts
+++ b/src/plugins/discover/public/application/apps/main/utils/get_state_defaults.ts
@@ -47,6 +47,7 @@ export function getStateDefaults({
interval: 'auto',
filters: cloneDeep(searchSource.getOwnField('filter')),
hideChart: undefined,
+ savedQuery: undefined,
} as AppState;
if (savedSearch.grid) {
defaultState.grid = savedSearch.grid;
diff --git a/src/plugins/discover/public/application/apps/not_found/not_found_route.tsx b/src/plugins/discover/public/application/apps/not_found/not_found_route.tsx
index cd16a820cc8f7..6b6ef584d07f1 100644
--- a/src/plugins/discover/public/application/apps/not_found/not_found_route.tsx
+++ b/src/plugins/discover/public/application/apps/not_found/not_found_route.tsx
@@ -9,6 +9,7 @@ import React, { useEffect } from 'react';
import { i18n } from '@kbn/i18n';
import { EuiCallOut } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
+import { Redirect } from 'react-router-dom';
import { toMountPoint } from '../../../../../kibana_react/public';
import { DiscoverServices } from '../../../build_services';
import { getUrlTracker } from '../../../kibana_services';
@@ -23,7 +24,8 @@ let bannerId: string | undefined;
export function NotFoundRoute(props: NotFoundRouteProps) {
const { services } = props;
- const { urlForwarding } = services;
+ const { urlForwarding, core, history } = services;
+ const currentLocation = history().location.pathname;
useEffect(() => {
const path = window.location.hash.substr(1);
@@ -34,14 +36,17 @@ export function NotFoundRoute(props: NotFoundRouteProps) {
defaultMessage: 'Page not found',
});
- bannerId = services.core.overlays.banners.replace(
+ bannerId = core.overlays.banners.replace(
bannerId,
toMountPoint(
-
+
@@ -51,10 +56,10 @@ export function NotFoundRoute(props: NotFoundRouteProps) {
// hide the message after the user has had a chance to acknowledge it -- so it doesn't permanently stick around
setTimeout(() => {
if (bannerId) {
- services.core.overlays.banners.remove(bannerId);
+ core.overlays.banners.remove(bannerId);
}
}, 15000);
- }, [services.core.overlays.banners, services.history, urlForwarding]);
+ }, [core.overlays.banners, history, urlForwarding]);
- return null;
+ return ;
}
diff --git a/src/plugins/embeddable/public/lib/containers/container.ts b/src/plugins/embeddable/public/lib/containers/container.ts
index a1d4b5b68d20d..06133fb2160c0 100644
--- a/src/plugins/embeddable/public/lib/containers/container.ts
+++ b/src/plugins/embeddable/public/lib/containers/container.ts
@@ -46,6 +46,7 @@ export abstract class Container<
parent?: Container
) {
super(input, output, parent);
+ this.getFactory = getFactory; // Currently required for using in storybook due to https://github.com/storybookjs/storybook/issues/13834
this.subscription = this.getInput$()
// At each update event, get both the previous and current state
.pipe(startWith(input), pairwise())
diff --git a/src/plugins/es_ui_shared/public/request/use_request.test.ts b/src/plugins/es_ui_shared/public/request/use_request.test.ts
index 68edde1336728..a6c22073dbc90 100644
--- a/src/plugins/es_ui_shared/public/request/use_request.test.ts
+++ b/src/plugins/es_ui_shared/public/request/use_request.test.ts
@@ -308,6 +308,24 @@ describe('useRequest hook', () => {
expect(getSendRequestSpy().callCount).toBe(2);
});
+ it(`changing pollIntervalMs to undefined cancels the poll`, async () => {
+ const { setupErrorRequest, setErrorResponse, completeRequest, getSendRequestSpy } = helpers;
+ // Send initial request.
+ setupErrorRequest({ pollIntervalMs: REQUEST_TIME });
+
+ // Setting the poll to undefined will cancel subsequent requests.
+ setErrorResponse({ pollIntervalMs: undefined });
+
+ // Complete initial request.
+ await completeRequest();
+
+ // If there were another scheduled poll request, this would complete it.
+ await completeRequest();
+
+ // But because we canceled the poll, we only see 1 request instead of 2.
+ expect(getSendRequestSpy().callCount).toBe(1);
+ });
+
it('when the path changes after a request is scheduled, the scheduled request is sent with that path', async () => {
const {
setupSuccessRequest,
diff --git a/src/plugins/home/server/index.ts b/src/plugins/home/server/index.ts
index 9523766596fed..c75ce4e83921c 100644
--- a/src/plugins/home/server/index.ts
+++ b/src/plugins/home/server/index.ts
@@ -18,9 +18,6 @@ export const config: PluginConfigDescriptor = {
disableWelcomeScreen: true,
},
schema: configSchema,
- deprecations: ({ renameFromRoot }) => [
- renameFromRoot('kibana.disableWelcomeScreen', 'home.disableWelcomeScreen'),
- ],
};
export const plugin = (initContext: PluginInitializerContext) => new HomeServerPlugin(initContext);
diff --git a/src/plugins/kibana_react/public/page_template/page_template.tsx b/src/plugins/kibana_react/public/page_template/page_template.tsx
index 1132d1dc6b4ed..cf2b27c3b00da 100644
--- a/src/plugins/kibana_react/public/page_template/page_template.tsx
+++ b/src/plugins/kibana_react/public/page_template/page_template.tsx
@@ -133,6 +133,7 @@ export const KibanaPageTemplate: FunctionComponent = ({
if (noDataConfig) {
return (
= {
mainInterval: true,
fetchInterval: true,
},
- deprecations: ({ unused }) => [unused('defaultLanguage')],
};
export function plugin() {
diff --git a/src/plugins/presentation_util/public/components/controls/__stories__/controls_service_stub.ts b/src/plugins/presentation_util/public/components/controls/__stories__/controls_service_stub.ts
new file mode 100644
index 0000000000000..59e7a44a83a17
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/__stories__/controls_service_stub.ts
@@ -0,0 +1,29 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { InputControlFactory } from '../types';
+import { ControlsService } from '../controls_service';
+import { flightFields, getEuiSelectableOptions } from './flights';
+import { OptionsListEmbeddableFactory } from '../control_types/options_list';
+
+export const getControlsServiceStub = () => {
+ const controlsServiceStub = new ControlsService();
+
+ const optionsListFactoryStub = new OptionsListEmbeddableFactory(
+ ({ field, search }) =>
+ new Promise((r) => setTimeout(() => r(getEuiSelectableOptions(field, search)), 500)),
+ () => Promise.resolve(['demo data flights']),
+ () => Promise.resolve(flightFields)
+ );
+
+ // cast to unknown because the stub cannot use the embeddable start contract to transform the EmbeddableFactoryDefinition into an EmbeddableFactory
+ const optionsListControlFactory = optionsListFactoryStub as unknown as InputControlFactory;
+ optionsListControlFactory.getDefaultInput = () => ({});
+ controlsServiceStub.registerInputControlType(optionsListControlFactory);
+ return controlsServiceStub;
+};
diff --git a/src/plugins/presentation_util/public/components/input_controls/__stories__/decorators.tsx b/src/plugins/presentation_util/public/components/controls/__stories__/decorators.tsx
similarity index 95%
rename from src/plugins/presentation_util/public/components/input_controls/__stories__/decorators.tsx
rename to src/plugins/presentation_util/public/components/controls/__stories__/decorators.tsx
index 0aaa0e7a8a533..c5d3cf2c815be 100644
--- a/src/plugins/presentation_util/public/components/input_controls/__stories__/decorators.tsx
+++ b/src/plugins/presentation_util/public/components/controls/__stories__/decorators.tsx
@@ -23,7 +23,7 @@ const panelStyle = {
const kqlBarStyle = { background: bar, padding: 16, minHeight, fontStyle: 'italic' };
-const inputBarStyle = { background: '#fff', padding: 4, minHeight };
+const inputBarStyle = { background: '#fff', padding: 4 };
const layout = (OptionStory: Story) => (
diff --git a/src/plugins/presentation_util/public/components/input_controls/__stories__/flights.ts b/src/plugins/presentation_util/public/components/controls/__stories__/flights.ts
similarity index 100%
rename from src/plugins/presentation_util/public/components/input_controls/__stories__/flights.ts
rename to src/plugins/presentation_util/public/components/controls/__stories__/flights.ts
diff --git a/src/plugins/presentation_util/public/components/controls/__stories__/input_controls.stories.tsx b/src/plugins/presentation_util/public/components/controls/__stories__/input_controls.stories.tsx
new file mode 100644
index 0000000000000..2a463fece18da
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/__stories__/input_controls.stories.tsx
@@ -0,0 +1,53 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React, { useEffect, useMemo } from 'react';
+import uuid from 'uuid';
+
+import { decorators } from './decorators';
+import { providers } from '../../../services/storybook';
+import { getControlsServiceStub } from './controls_service_stub';
+import { ControlGroupContainerFactory } from '../control_group/control_group_container_factory';
+
+export default {
+ title: 'Controls',
+ description: '',
+ decorators,
+};
+
+const ControlGroupStoryComponent = () => {
+ const embeddableRoot: React.RefObject = useMemo(() => React.createRef(), []);
+
+ providers.overlays.start({});
+ const overlays = providers.overlays.getService();
+
+ const controlsServiceStub = getControlsServiceStub();
+
+ useEffect(() => {
+ (async () => {
+ const factory = new ControlGroupContainerFactory(controlsServiceStub, overlays);
+ const controlGroupContainerEmbeddable = await factory.create({
+ inheritParentState: {
+ useQuery: false,
+ useFilters: false,
+ useTimerange: false,
+ },
+ controlStyle: 'oneLine',
+ id: uuid.v4(),
+ panels: {},
+ });
+ if (controlGroupContainerEmbeddable && embeddableRoot.current) {
+ controlGroupContainerEmbeddable.render(embeddableRoot.current);
+ }
+ })();
+ }, [embeddableRoot, controlsServiceStub, overlays]);
+
+ return ;
+};
+
+export const ControlGroupStory = () => ;
diff --git a/src/plugins/presentation_util/public/components/controls/control_frame/control_frame_component.tsx b/src/plugins/presentation_util/public/components/controls/control_frame/control_frame_component.tsx
new file mode 100644
index 0000000000000..240beea13b0e2
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/control_frame/control_frame_component.tsx
@@ -0,0 +1,117 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React, { useEffect, useMemo, useState } from 'react';
+import classNames from 'classnames';
+import {
+ EuiButtonIcon,
+ EuiFormControlLayout,
+ EuiFormLabel,
+ EuiFormRow,
+ EuiToolTip,
+} from '@elastic/eui';
+import { ControlGroupContainer } from '../control_group/control_group_container';
+import { useChildEmbeddable } from '../hooks/use_child_embeddable';
+import { ControlStyle } from '../types';
+import { ControlFrameStrings } from './control_frame_strings';
+
+export interface ControlFrameProps {
+ container: ControlGroupContainer;
+ customPrepend?: JSX.Element;
+ controlStyle: ControlStyle;
+ enableActions?: boolean;
+ onRemove?: () => void;
+ embeddableId: string;
+ onEdit?: () => void;
+}
+
+export const ControlFrame = ({
+ customPrepend,
+ enableActions,
+ embeddableId,
+ controlStyle,
+ container,
+ onRemove,
+ onEdit,
+}: ControlFrameProps) => {
+ const embeddableRoot: React.RefObject = useMemo(() => React.createRef(), []);
+ const embeddable = useChildEmbeddable({ container, embeddableId });
+
+ const [title, setTitle] = useState();
+
+ const usingTwoLineLayout = controlStyle === 'twoLine';
+
+ useEffect(() => {
+ if (embeddableRoot.current && embeddable) {
+ embeddable.render(embeddableRoot.current);
+ }
+ const subscription = embeddable?.getInput$().subscribe((newInput) => setTitle(newInput.title));
+ return () => subscription?.unsubscribe();
+ }, [embeddable, embeddableRoot]);
+
+ const floatingActions = (
+
+
+
+
+
+
+
+
+ );
+
+ const form = (
+
+ {customPrepend ?? null}
+ {usingTwoLineLayout ? undefined : (
+
+ {title}
+
+ )}
+ >
+ }
+ >
+
+
+ );
+
+ return (
+ <>
+ {enableActions && floatingActions}
+
+ {form}
+
+ >
+ );
+};
diff --git a/src/plugins/presentation_util/public/components/controls/control_frame/control_frame_strings.ts b/src/plugins/presentation_util/public/components/controls/control_frame/control_frame_strings.ts
new file mode 100644
index 0000000000000..5f9e89aa797cb
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/control_frame/control_frame_strings.ts
@@ -0,0 +1,22 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { i18n } from '@kbn/i18n';
+
+export const ControlFrameStrings = {
+ floatingActions: {
+ getEditButtonTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.floatingActions.editTitle', {
+ defaultMessage: 'Manage control',
+ }),
+ getRemoveButtonTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.floatingActions.removeTitle', {
+ defaultMessage: 'Remove control',
+ }),
+ },
+};
diff --git a/src/plugins/presentation_util/public/components/controls/control_group/component/control_group_component.tsx b/src/plugins/presentation_util/public/components/controls/control_group/component/control_group_component.tsx
new file mode 100644
index 0000000000000..d683c0749d98d
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/control_group/component/control_group_component.tsx
@@ -0,0 +1,163 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import '../control_group.scss';
+
+import { EuiButtonIcon, EuiFlexGroup, EuiFlexItem, EuiToolTip } from '@elastic/eui';
+import React, { useEffect, useMemo, useState } from 'react';
+import classNames from 'classnames';
+import {
+ arrayMove,
+ SortableContext,
+ rectSortingStrategy,
+ sortableKeyboardCoordinates,
+} from '@dnd-kit/sortable';
+import {
+ closestCenter,
+ DndContext,
+ DragEndEvent,
+ DragOverlay,
+ KeyboardSensor,
+ PointerSensor,
+ useSensor,
+ useSensors,
+ LayoutMeasuringStrategy,
+} from '@dnd-kit/core';
+
+import { ControlGroupStrings } from '../control_group_strings';
+import { ControlGroupContainer } from '../control_group_container';
+import { ControlClone, SortableControl } from './control_group_sortable_item';
+import { OPTIONS_LIST_CONTROL } from '../../control_types/options_list/options_list_embeddable';
+
+interface ControlGroupProps {
+ controlGroupContainer: ControlGroupContainer;
+}
+
+export const ControlGroup = ({ controlGroupContainer }: ControlGroupProps) => {
+ const [controlIds, setControlIds] = useState([]);
+
+ // sync controlIds every time input panels change
+ useEffect(() => {
+ const subscription = controlGroupContainer.getInput$().subscribe(() => {
+ setControlIds((currentIds) => {
+ // sync control Ids with panels from container input.
+ const { panels } = controlGroupContainer.getInput();
+ const newIds: string[] = [];
+ const allIds = [...currentIds, ...Object.keys(panels)];
+ allIds.forEach((id) => {
+ const currentIndex = currentIds.indexOf(id);
+ if (!panels[id] && currentIndex !== -1) {
+ currentIds.splice(currentIndex, 1);
+ }
+ if (currentIndex === -1 && Boolean(panels[id])) {
+ newIds.push(id);
+ }
+ });
+ return [...currentIds, ...newIds];
+ });
+ });
+ return () => subscription.unsubscribe();
+ }, [controlGroupContainer]);
+
+ const [draggingId, setDraggingId] = useState(null);
+
+ const draggingIndex = useMemo(
+ () => (draggingId ? controlIds.indexOf(draggingId) : -1),
+ [controlIds, draggingId]
+ );
+
+ const sensors = useSensors(
+ useSensor(PointerSensor),
+ useSensor(KeyboardSensor, { coordinateGetter: sortableKeyboardCoordinates })
+ );
+
+ const onDragEnd = ({ over }: DragEndEvent) => {
+ if (over) {
+ const overIndex = controlIds.indexOf(over.id);
+ if (draggingIndex !== overIndex) {
+ const newIndex = overIndex;
+ setControlIds((currentControlIds) => arrayMove(currentControlIds, draggingIndex, newIndex));
+ }
+ }
+ setDraggingId(null);
+ };
+
+ return (
+
+
+ setDraggingId(active.id)}
+ onDragEnd={onDragEnd}
+ onDragCancel={() => setDraggingId(null)}
+ sensors={sensors}
+ collisionDetection={closestCenter}
+ layoutMeasuring={{
+ strategy: LayoutMeasuringStrategy.Always,
+ }}
+ >
+
+
+ {controlIds.map((controlId, index) => (
+ controlGroupContainer.editControl(controlId)}
+ onRemove={() => controlGroupContainer.removeEmbeddable(controlId)}
+ dragInfo={{ index, draggingIndex }}
+ container={controlGroupContainer}
+ controlStyle={controlGroupContainer.getInput().controlStyle}
+ embeddableId={controlId}
+ width={controlGroupContainer.getInput().panels[controlId].width}
+ key={controlId}
+ />
+ ))}
+
+
+
+ {draggingId ? (
+
+ ) : null}
+
+
+
+
+
+
+
+
+
+
+
+
+ controlGroupContainer.createNewControl(OPTIONS_LIST_CONTROL)} // use popover when there are multiple types of control
+ />
+
+
+
+
+
+ );
+};
diff --git a/src/plugins/presentation_util/public/components/controls/control_group/component/control_group_sortable_item.tsx b/src/plugins/presentation_util/public/components/controls/control_group/component/control_group_sortable_item.tsx
new file mode 100644
index 0000000000000..3ae171a588da4
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/control_group/component/control_group_sortable_item.tsx
@@ -0,0 +1,151 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { EuiFlexItem, EuiFormLabel, EuiIcon, EuiFlexGroup } from '@elastic/eui';
+import React, { forwardRef, HTMLAttributes } from 'react';
+import { useSortable } from '@dnd-kit/sortable';
+import { CSS } from '@dnd-kit/utilities';
+import classNames from 'classnames';
+
+import { ControlWidth } from '../../types';
+import { ControlGroupContainer } from '../control_group_container';
+import { useChildEmbeddable } from '../../hooks/use_child_embeddable';
+import { ControlFrame, ControlFrameProps } from '../../control_frame/control_frame_component';
+
+interface DragInfo {
+ isOver?: boolean;
+ isDragging?: boolean;
+ draggingIndex?: number;
+ index?: number;
+}
+
+export type SortableControlProps = ControlFrameProps & {
+ dragInfo: DragInfo;
+ width: ControlWidth;
+};
+
+/**
+ * A sortable wrapper around the generic control frame.
+ */
+export const SortableControl = (frameProps: SortableControlProps) => {
+ const { embeddableId } = frameProps;
+ const { over, listeners, isSorting, transform, transition, attributes, isDragging, setNodeRef } =
+ useSortable({
+ id: embeddableId,
+ animateLayoutChanges: () => true,
+ });
+
+ frameProps.dragInfo = { ...frameProps.dragInfo, isOver: over?.id === embeddableId, isDragging };
+
+ return (
+
+ );
+};
+
+const SortableControlInner = forwardRef<
+ HTMLButtonElement,
+ SortableControlProps & { style: HTMLAttributes['style'] }
+>(
+ (
+ {
+ embeddableId,
+ controlStyle,
+ container,
+ dragInfo,
+ onRemove,
+ onEdit,
+ style,
+ width,
+ ...dragHandleProps
+ },
+ dragHandleRef
+ ) => {
+ const { isOver, isDragging, draggingIndex, index } = dragInfo;
+
+ const dragHandle = (
+
+ );
+
+ return (
+ (draggingIndex ?? -1),
+ })}
+ style={style}
+ >
+
+
+ );
+ }
+);
+
+/**
+ * A simplified clone version of the control which is dragged. This version only shows
+ * the title, because individual controls can be any size, and dragging a wide item
+ * can be quite cumbersome.
+ */
+export const ControlClone = ({
+ embeddableId,
+ container,
+ width,
+}: {
+ embeddableId: string;
+ container: ControlGroupContainer;
+ width: ControlWidth;
+}) => {
+ const embeddable = useChildEmbeddable({ embeddableId, container });
+ const layout = container.getInput().controlStyle;
+ return (
+
+ {layout === 'twoLine' ? (
+ {embeddable?.getInput().title}
+ ) : undefined}
+
+
+
+
+ {container.getInput().controlStyle === 'oneLine' ? (
+ {embeddable?.getInput().title}
+ ) : undefined}
+
+
+ );
+};
diff --git a/src/plugins/presentation_util/public/components/controls/control_group/control_group.scss b/src/plugins/presentation_util/public/components/controls/control_group/control_group.scss
new file mode 100644
index 0000000000000..f49efa7aab043
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/control_group/control_group.scss
@@ -0,0 +1,184 @@
+$smallControl: $euiSize * 14;
+$mediumControl: $euiSize * 25;
+$largeControl: $euiSize * 50;
+$controlMinWidth: $euiSize * 14;
+
+.controlGroup {
+ margin-left: $euiSizeXS;
+ overflow-x: clip; // sometimes when using auto width, removing a control can cause a horizontal scrollbar to appear.
+ min-height: $euiSize * 4;
+ padding: $euiSize 0;
+}
+
+.controlFrame--cloneWrapper {
+ width: max-content;
+
+ .euiFormLabel {
+ padding-bottom: $euiSizeXS;
+ }
+
+ &-small {
+ width: $smallControl;
+ }
+
+ &-medium {
+ width: $mediumControl;
+ }
+
+ &-large {
+ width: $largeControl;
+ }
+
+ &-twoLine {
+ margin-top: -$euiSize * 1.25;
+ }
+
+ .euiFormLabel, div {
+ cursor: grabbing !important; // prevents cursor flickering while dragging the clone
+ }
+
+ .controlFrame--draggable {
+ cursor: grabbing;
+ height: $euiButtonHeight;
+ align-items: center;
+ border-radius: $euiBorderRadius;
+ @include euiFontSizeS;
+ font-weight: $euiFontWeightSemiBold;
+ @include euiFormControlDefaultShadow;
+ background-color: $euiFormInputGroupLabelBackground;
+ min-width: $controlMinWidth;
+ }
+
+ .controlFrame--formControlLayout, .controlFrame--draggable {
+ &-clone {
+ box-shadow: 0 0 0 1px $euiShadowColor,
+ 0 1px 6px 0 $euiShadowColor;
+ cursor: grabbing !important;
+ }
+
+ .controlFrame--dragHandle {
+ cursor: grabbing;
+ }
+ }
+}
+
+.controlFrame--wrapper {
+ flex-basis: auto;
+ position: relative;
+ display: block;
+
+ .controlFrame--formControlLayout {
+ width: 100%;
+ min-width: $controlMinWidth;
+ transition:background-color .1s, color .1s;
+
+ &__label {
+ @include euiTextTruncate;
+ max-width: 50%;
+ }
+
+ &:not(.controlFrame--formControlLayout-clone) {
+ .controlFrame--dragHandle {
+ cursor: grab;
+ }
+ }
+
+ .controlFrame--control {
+ height: 100%;
+ transition: opacity .1s;
+
+ &.controlFrame--twoLine {
+ width: 100%;
+ }
+ }
+ }
+
+ &-small {
+ width: $smallControl;
+ }
+
+ &-medium {
+ width: $mediumControl;
+ }
+
+ &-large {
+ width: $largeControl;
+ }
+
+ &-insertBefore,
+ &-insertAfter {
+ .controlFrame--formControlLayout:after {
+ content: '';
+ position: absolute;
+ background-color: transparentize($euiColorPrimary, .5);
+ border-radius: $euiBorderRadius;
+ top: 0;
+ bottom: 0;
+ width: 2px;
+ }
+ }
+
+ &-insertBefore {
+ .controlFrame--formControlLayout:after {
+ left: -$euiSizeS;
+ }
+ }
+
+ &-insertAfter {
+ .controlFrame--formControlLayout:after {
+ right: -$euiSizeS;
+ }
+ }
+
+ .controlFrame--floatingActions {
+ visibility: hidden;
+ opacity: 0;
+
+ // slower transition on hover leave in case the user accidentally stops hover
+ transition: visibility .3s, opacity .3s;
+
+ z-index: 1;
+ position: absolute;
+
+ &-oneLine {
+ right:$euiSizeXS;
+ top: -$euiSizeL;
+ padding: $euiSizeXS;
+ border-radius: $euiBorderRadius;
+ background-color: $euiColorEmptyShade;
+ box-shadow: 0 0 0 1pt $euiColorLightShade;
+ }
+
+ &-twoLine {
+ right:$euiSizeXS;
+ top: -$euiSizeXS;
+ }
+ }
+
+ &:hover {
+ .controlFrame--floatingActions {
+ transition:visibility .1s, opacity .1s;
+ visibility: visible;
+ opacity: 1;
+ }
+ }
+
+ &-isDragging {
+ .euiFormRow__labelWrapper {
+ opacity: 0;
+ }
+ .controlFrame--formControlLayout {
+ background-color: $euiColorEmptyShade !important;
+ color: transparent !important;
+ box-shadow: none;
+
+ .euiFormLabel {
+ opacity: 0;
+ }
+
+ .controlFrame--control {
+ opacity: 0;
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/plugins/presentation_util/public/components/controls/control_group/control_group_constants.ts b/src/plugins/presentation_util/public/components/controls/control_group/control_group_constants.ts
new file mode 100644
index 0000000000000..3c22b1ffbcd23
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/control_group/control_group_constants.ts
@@ -0,0 +1,44 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { ControlWidth } from '../types';
+import { ControlGroupStrings } from './control_group_strings';
+
+export const CONTROL_GROUP_TYPE = 'control_group';
+
+export const DEFAULT_CONTROL_WIDTH: ControlWidth = 'auto';
+
+export const CONTROL_WIDTH_OPTIONS = [
+ {
+ id: `auto`,
+ label: ControlGroupStrings.management.controlWidth.getAutoWidthTitle(),
+ },
+ {
+ id: `small`,
+ label: ControlGroupStrings.management.controlWidth.getSmallWidthTitle(),
+ },
+ {
+ id: `medium`,
+ label: ControlGroupStrings.management.controlWidth.getMediumWidthTitle(),
+ },
+ {
+ id: `large`,
+ label: ControlGroupStrings.management.controlWidth.getLargeWidthTitle(),
+ },
+];
+
+export const CONTROL_LAYOUT_OPTIONS = [
+ {
+ id: `oneLine`,
+ label: ControlGroupStrings.management.controlStyle.getSingleLineTitle(),
+ },
+ {
+ id: `twoLine`,
+ label: ControlGroupStrings.management.controlStyle.getTwoLineTitle(),
+ },
+];
diff --git a/src/plugins/presentation_util/public/components/controls/control_group/control_group_container.tsx b/src/plugins/presentation_util/public/components/controls/control_group/control_group_container.tsx
new file mode 100644
index 0000000000000..03249889dfdea
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/control_group/control_group_container.tsx
@@ -0,0 +1,224 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React from 'react';
+import ReactDOM from 'react-dom';
+import { cloneDeep } from 'lodash';
+
+import {
+ Container,
+ EmbeddableFactory,
+ EmbeddableFactoryNotFoundError,
+} from '../../../../../embeddable/public';
+import {
+ InputControlEmbeddable,
+ InputControlInput,
+ InputControlOutput,
+ IEditableControlFactory,
+ ControlWidth,
+} from '../types';
+import { ControlsService } from '../controls_service';
+import { ControlGroupInput, ControlPanelState } from './types';
+import { ManageControlComponent } from './editor/manage_control';
+import { toMountPoint } from '../../../../../kibana_react/public';
+import { ControlGroup } from './component/control_group_component';
+import { PresentationOverlaysService } from '../../../services/overlays';
+import { CONTROL_GROUP_TYPE, DEFAULT_CONTROL_WIDTH } from './control_group_constants';
+import { ManageControlGroup } from './editor/manage_control_group_component';
+import { OverlayRef } from '../../../../../../core/public';
+import { ControlGroupStrings } from './control_group_strings';
+
+export class ControlGroupContainer extends Container {
+ public readonly type = CONTROL_GROUP_TYPE;
+
+ private nextControlWidth: ControlWidth = DEFAULT_CONTROL_WIDTH;
+
+ constructor(
+ initialInput: ControlGroupInput,
+ private readonly controlsService: ControlsService,
+ private readonly overlays: PresentationOverlaysService,
+ parent?: Container
+ ) {
+ super(initialInput, { embeddableLoaded: {} }, controlsService.getControlFactory, parent);
+ this.overlays = overlays;
+ this.controlsService = controlsService;
+ }
+
+ protected createNewPanelState(
+ factory: EmbeddableFactory,
+ partial: Partial = {}
+ ): ControlPanelState {
+ const panelState = super.createNewPanelState(factory, partial);
+ return {
+ order: 1,
+ width: this.nextControlWidth,
+ ...panelState,
+ } as ControlPanelState;
+ }
+
+ protected getInheritedInput(id: string): InputControlInput {
+ const { filters, query, timeRange, inheritParentState } = this.getInput();
+ return {
+ filters: inheritParentState.useFilters ? filters : undefined,
+ query: inheritParentState.useQuery ? query : undefined,
+ timeRange: inheritParentState.useTimerange ? timeRange : undefined,
+ id,
+ };
+ }
+
+ public createNewControl = async (type: string) => {
+ const factory = this.controlsService.getControlFactory(type);
+ if (!factory) throw new EmbeddableFactoryNotFoundError(type);
+
+ const initialInputPromise = new Promise>((resolve, reject) => {
+ let inputToReturn: Partial = {};
+
+ const onCancel = (ref: OverlayRef) => {
+ this.overlays
+ .openConfirm(ControlGroupStrings.management.discardNewControl.getSubtitle(), {
+ confirmButtonText: ControlGroupStrings.management.discardNewControl.getConfirm(),
+ cancelButtonText: ControlGroupStrings.management.discardNewControl.getCancel(),
+ title: ControlGroupStrings.management.discardNewControl.getTitle(),
+ buttonColor: 'danger',
+ })
+ .then((confirmed) => {
+ if (confirmed) {
+ reject();
+ ref.close();
+ }
+ });
+ };
+
+ const flyoutInstance = this.overlays.openFlyout(
+ toMountPoint(
+ (inputToReturn.title = newTitle)}
+ updateWidth={(newWidth) => (this.nextControlWidth = newWidth)}
+ controlEditorComponent={(factory as IEditableControlFactory).getControlEditor?.({
+ onChange: (partialInput) => {
+ inputToReturn = { ...inputToReturn, ...partialInput };
+ },
+ })}
+ onSave={() => {
+ resolve(inputToReturn);
+ flyoutInstance.close();
+ }}
+ onCancel={() => onCancel(flyoutInstance)}
+ />
+ ),
+ {
+ onClose: (flyout) => onCancel(flyout),
+ }
+ );
+ });
+ initialInputPromise.then(
+ async (explicitInput) => {
+ await this.addNewEmbeddable(type, explicitInput);
+ },
+ () => {} // swallow promise rejection because it can be part of normal flow
+ );
+ };
+
+ public editControl = async (embeddableId: string) => {
+ const panel = this.getInput().panels[embeddableId];
+ const factory = this.getFactory(panel.type);
+ const embeddable = await this.untilEmbeddableLoaded(embeddableId);
+
+ if (!factory) throw new EmbeddableFactoryNotFoundError(panel.type);
+
+ const initialExplicitInput = cloneDeep(panel.explicitInput);
+ const initialWidth = panel.width;
+
+ const onCancel = (ref: OverlayRef) => {
+ this.overlays
+ .openConfirm(ControlGroupStrings.management.discardChanges.getSubtitle(), {
+ confirmButtonText: ControlGroupStrings.management.discardChanges.getConfirm(),
+ cancelButtonText: ControlGroupStrings.management.discardChanges.getCancel(),
+ title: ControlGroupStrings.management.discardChanges.getTitle(),
+ buttonColor: 'danger',
+ })
+ .then((confirmed) => {
+ if (confirmed) {
+ embeddable.updateInput(initialExplicitInput);
+ this.updateInput({
+ panels: {
+ ...this.getInput().panels,
+ [embeddableId]: { ...this.getInput().panels[embeddableId], width: initialWidth },
+ },
+ });
+ ref.close();
+ }
+ });
+ };
+
+ const flyoutInstance = this.overlays.openFlyout(
+ toMountPoint(
+ this.removeEmbeddable(embeddableId)}
+ updateTitle={(newTitle) => embeddable.updateInput({ title: newTitle })}
+ controlEditorComponent={(factory as IEditableControlFactory).getControlEditor?.({
+ onChange: (partialInput) => embeddable.updateInput(partialInput),
+ initialInput: embeddable.getInput(),
+ })}
+ onCancel={() => onCancel(flyoutInstance)}
+ onSave={() => flyoutInstance.close()}
+ updateWidth={(newWidth) =>
+ this.updateInput({
+ panels: {
+ ...this.getInput().panels,
+ [embeddableId]: { ...this.getInput().panels[embeddableId], width: newWidth },
+ },
+ })
+ }
+ />
+ ),
+ {
+ onClose: (flyout) => onCancel(flyout),
+ }
+ );
+ };
+
+ public editControlGroup = () => {
+ const flyoutInstance = this.overlays.openFlyout(
+ toMountPoint(
+ this.updateInput({ controlStyle: newStyle })}
+ deleteAllEmbeddables={() => {
+ this.overlays
+ .openConfirm(ControlGroupStrings.management.deleteAllControls.getSubtitle(), {
+ confirmButtonText: ControlGroupStrings.management.deleteAllControls.getConfirm(),
+ cancelButtonText: ControlGroupStrings.management.deleteAllControls.getCancel(),
+ title: ControlGroupStrings.management.deleteAllControls.getTitle(),
+ buttonColor: 'danger',
+ })
+ .then((confirmed) => {
+ if (confirmed) {
+ Object.keys(this.getInput().panels).forEach((id) => this.removeEmbeddable(id));
+ flyoutInstance.close();
+ }
+ });
+ }}
+ setAllPanelWidths={(newWidth) => {
+ const newPanels = cloneDeep(this.getInput().panels);
+ Object.values(newPanels).forEach((panel) => (panel.width = newWidth));
+ this.updateInput({ panels: { ...newPanels, ...newPanels } });
+ }}
+ panels={this.getInput().panels}
+ />
+ )
+ );
+ };
+
+ public render(dom: HTMLElement) {
+ ReactDOM.render(, dom);
+ }
+}
diff --git a/src/plugins/presentation_util/public/components/controls/control_group/control_group_container_factory.ts b/src/plugins/presentation_util/public/components/controls/control_group/control_group_container_factory.ts
new file mode 100644
index 0000000000000..97ef48e6b240c
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/control_group/control_group_container_factory.ts
@@ -0,0 +1,72 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import {
+ Container,
+ ContainerOutput,
+ EmbeddableFactory,
+ EmbeddableFactoryDefinition,
+ ErrorEmbeddable,
+} from '../../../../../embeddable/public';
+import { ControlGroupInput } from './types';
+import { ControlsService } from '../controls_service';
+import { ControlGroupStrings } from './control_group_strings';
+import { CONTROL_GROUP_TYPE } from './control_group_constants';
+import { ControlGroupContainer } from './control_group_container';
+import { PresentationOverlaysService } from '../../../services/overlays';
+
+export type DashboardContainerFactory = EmbeddableFactory<
+ ControlGroupInput,
+ ContainerOutput,
+ ControlGroupContainer
+>;
+export class ControlGroupContainerFactory
+ implements EmbeddableFactoryDefinition
+{
+ public readonly isContainerType = true;
+ public readonly type = CONTROL_GROUP_TYPE;
+ public readonly controlsService: ControlsService;
+ private readonly overlays: PresentationOverlaysService;
+
+ constructor(controlsService: ControlsService, overlays: PresentationOverlaysService) {
+ this.overlays = overlays;
+ this.controlsService = controlsService;
+ }
+
+ public isEditable = async () => false;
+
+ public readonly getDisplayName = () => {
+ return ControlGroupStrings.getEmbeddableTitle();
+ };
+
+ public getDefaultInput(): Partial {
+ return {
+ panels: {},
+ inheritParentState: {
+ useFilters: true,
+ useQuery: true,
+ useTimerange: true,
+ },
+ };
+ }
+
+ public create = async (
+ initialInput: ControlGroupInput,
+ parent?: Container
+ ): Promise => {
+ return new ControlGroupContainer(initialInput, this.controlsService, this.overlays, parent);
+ };
+}
diff --git a/src/plugins/presentation_util/public/components/controls/control_group/control_group_strings.ts b/src/plugins/presentation_util/public/components/controls/control_group/control_group_strings.ts
new file mode 100644
index 0000000000000..78e50d8651931
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/control_group/control_group_strings.ts
@@ -0,0 +1,176 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { i18n } from '@kbn/i18n';
+
+export const ControlGroupStrings = {
+ getEmbeddableTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.title', {
+ defaultMessage: 'Control group',
+ }),
+ manageControl: {
+ getFlyoutTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.manageControl.flyoutTitle', {
+ defaultMessage: 'Manage control',
+ }),
+ getTitleInputTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.manageControl.titleInputTitle', {
+ defaultMessage: 'Title',
+ }),
+ getWidthInputTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.manageControl.widthInputTitle', {
+ defaultMessage: 'Control width',
+ }),
+ getSaveChangesTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.manageControl.saveChangesTitle', {
+ defaultMessage: 'Save and close',
+ }),
+ getCancelTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.manageControl.cancelTitle', {
+ defaultMessage: 'Cancel',
+ }),
+ },
+ management: {
+ getAddControlTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.addControl', {
+ defaultMessage: 'Add control',
+ }),
+ getManageButtonTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.buttonTitle', {
+ defaultMessage: 'Manage controls',
+ }),
+ getFlyoutTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.flyoutTitle', {
+ defaultMessage: 'Manage controls',
+ }),
+ getDesignTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.designTitle', {
+ defaultMessage: 'Design',
+ }),
+ getWidthTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.widthTitle', {
+ defaultMessage: 'Width',
+ }),
+ getLayoutTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.layoutTitle', {
+ defaultMessage: 'Layout',
+ }),
+ getDeleteButtonTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.delete', {
+ defaultMessage: 'Delete control',
+ }),
+ getDeleteAllButtonTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.deleteAll', {
+ defaultMessage: 'Delete all',
+ }),
+ controlWidth: {
+ getChangeAllControlWidthsTitle: () =>
+ i18n.translate(
+ 'presentationUtil.inputControls.controlGroup.management.layout.changeAllControlWidths',
+ {
+ defaultMessage: 'Set width for all controls',
+ }
+ ),
+ getWidthSwitchLegend: () =>
+ i18n.translate(
+ 'presentationUtil.inputControls.controlGroup.management.layout.controlWidthLegend',
+ {
+ defaultMessage: 'Change individual control width',
+ }
+ ),
+ getAutoWidthTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.layout.auto', {
+ defaultMessage: 'Auto',
+ }),
+ getSmallWidthTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.layout.small', {
+ defaultMessage: 'Small',
+ }),
+ getMediumWidthTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.layout.medium', {
+ defaultMessage: 'Medium',
+ }),
+ getLargeWidthTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.layout.large', {
+ defaultMessage: 'Large',
+ }),
+ },
+ controlStyle: {
+ getDesignSwitchLegend: () =>
+ i18n.translate(
+ 'presentationUtil.inputControls.controlGroup.management.layout.designSwitchLegend',
+ {
+ defaultMessage: 'Switch control designs',
+ }
+ ),
+ getSingleLineTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.layout.singleLine', {
+ defaultMessage: 'Single line layout',
+ }),
+ getTwoLineTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.layout.twoLine', {
+ defaultMessage: 'Two line layout',
+ }),
+ },
+ deleteAllControls: {
+ getTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.deleteAll.title', {
+ defaultMessage: 'Delete all?',
+ }),
+ getSubtitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.deleteAll.sub', {
+ defaultMessage: 'Controls are not recoverable once removed.',
+ }),
+ getConfirm: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.deleteAll.confirm', {
+ defaultMessage: 'Delete',
+ }),
+ getCancel: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.deleteAll.cancel', {
+ defaultMessage: 'Cancel',
+ }),
+ },
+ discardChanges: {
+ getTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.discard.title', {
+ defaultMessage: 'Discard?',
+ }),
+ getSubtitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.discard.sub', {
+ defaultMessage:
+ 'Discard changes to this control? Controls are not recoverable once removed.',
+ }),
+ getConfirm: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.discard.confirm', {
+ defaultMessage: 'Discard',
+ }),
+ getCancel: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.discard.cancel', {
+ defaultMessage: 'Cancel',
+ }),
+ },
+ discardNewControl: {
+ getTitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.deleteNew.title', {
+ defaultMessage: 'Discard?',
+ }),
+ getSubtitle: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.deleteNew.sub', {
+ defaultMessage: 'Discard new control? Controls are not recoverable once removed.',
+ }),
+ getConfirm: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.deleteNew.confirm', {
+ defaultMessage: 'Discard',
+ }),
+ getCancel: () =>
+ i18n.translate('presentationUtil.inputControls.controlGroup.management.deleteNew.cancel', {
+ defaultMessage: 'Cancel',
+ }),
+ },
+ },
+};
diff --git a/src/plugins/presentation_util/public/components/controls/control_group/editor/manage_control.tsx b/src/plugins/presentation_util/public/components/controls/control_group/editor/manage_control.tsx
new file mode 100644
index 0000000000000..6d80a6e0b31f6
--- /dev/null
+++ b/src/plugins/presentation_util/public/components/controls/control_group/editor/manage_control.tsx
@@ -0,0 +1,150 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React, { useEffect, useState } from 'react';
+import {
+ EuiFlyoutHeader,
+ EuiButtonGroup,
+ EuiFlyoutBody,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiTitle,
+ EuiFieldText,
+ EuiFlyoutFooter,
+ EuiButton,
+ EuiFormRow,
+ EuiForm,
+ EuiButtonEmpty,
+ EuiSpacer,
+} from '@elastic/eui';
+
+import { ControlGroupStrings } from '../control_group_strings';
+import { ControlEditorComponent, ControlWidth } from '../../types';
+import { CONTROL_WIDTH_OPTIONS } from '../control_group_constants';
+
+interface ManageControlProps {
+ title?: string;
+ onSave: () => void;
+ width: ControlWidth;
+ onCancel: () => void;
+ removeControl?: () => void;
+ controlEditorComponent?: ControlEditorComponent;
+ updateTitle: (title: string) => void;
+ updateWidth: (newWidth: ControlWidth) => void;
+}
+
+export const ManageControlComponent = ({
+ controlEditorComponent,
+ removeControl,
+ updateTitle,
+ updateWidth,
+ onCancel,
+ onSave,
+ title,
+ width,
+}: ManageControlProps) => {
+ const [currentTitle, setCurrentTitle] = useState(title);
+ const [currentWidth, setCurrentWidth] = useState(width);
+
+ const [controlEditorValid, setControlEditorValid] = useState(false);
+ const [editorValid, setEditorValid] = useState(false);
+
+ useEffect(() => setEditorValid(Boolean(currentTitle)), [currentTitle]);
+
+ return (
+ <>
+
+
+