diff --git a/.bazelignore b/.bazelignore
index ac7a2d15a7aa4..ec61c2f3e7e75 100644
--- a/.bazelignore
+++ b/.bazelignore
@@ -8,7 +8,10 @@
.idea
.teamcity
.yarn-local-mirror
-/bazel
+bazel-bin
+bazel-kibana
+bazel-out
+bazel-testlogs
build
node_modules
target
diff --git a/.bazelrc.common b/.bazelrc.common
index fb8e8e86b9ef5..20a41c4cde9a0 100644
--- a/.bazelrc.common
+++ b/.bazelrc.common
@@ -18,17 +18,16 @@ build --disk_cache=~/.bazel-cache/disk-cache
build --repository_cache=~/.bazel-cache/repository-cache
# Bazel will create symlinks from the workspace directory to output artifacts.
-# Build results will be placed in a directory called "bazel/bin"
+# Build results will be placed in a directory called "bazel-bin"
# This will still create a bazel-out symlink in
# the project directory, which must be excluded from the
# editor's search path.
-build --symlink_prefix=bazel/
# To disable the symlinks altogether (including bazel-out) we can use
# build --symlink_prefix=/
# however this makes it harder to find outputs.
# Prevents the creation of bazel-out dir
-build --experimental_no_product_name_out_symlink
+# build --experimental_no_product_name_out_symlink
# Make direct file system calls to create symlink trees
build --experimental_inprocess_symlink_creation
@@ -83,7 +82,7 @@ test:debug --test_output=streamed --test_strategy=exclusive --test_timeout=9999
run:debug --define=VERBOSE_LOGS=1 -- --node_options=--inspect-brk
# The following option will change the build output of certain rules such as terser and may not be desirable in all cases
# It will also output both the repo cache and action cache to a folder inside the repo
-build:debug --compilation_mode=dbg --show_result=1 --disk_cache=bazel/disk-cache --repository_cache=bazel/repository-cache
+build:debug --compilation_mode=dbg --show_result=1
# Turn off legacy external runfiles
# This prevents accidentally depending on this feature, which Bazel will remove.
diff --git a/.ci/packer_cache.sh b/.ci/packer_cache.sh
index 5317b2c500b49..a63c2825816bd 100755
--- a/.ci/packer_cache.sh
+++ b/.ci/packer_cache.sh
@@ -2,8 +2,10 @@
set -e
-# cache image used by kibana-load-testing project
-docker pull "maven:3.6.3-openjdk-8-slim"
+if [[ "$(which docker)" != "" && "$(command uname -m)" != "aarch64" ]]; then
+ # cache image used by kibana-load-testing project
+ docker pull "maven:3.6.3-openjdk-8-slim"
+fi
./.ci/packer_cache_for_branch.sh master
./.ci/packer_cache_for_branch.sh 7.x
diff --git a/.eslintignore b/.eslintignore
index 4559711bb9dd3..4058d971b7642 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -21,19 +21,13 @@ snapshots.js
# plugin overrides
/src/core/lib/kbn_internal_native_observable
-/src/legacy/plugin_discovery/plugin_pack/__tests__/fixtures/plugins/broken
/src/plugins/data/common/es_query/kuery/ast/_generated_/**
/src/plugins/vis_type_timelion/common/_generated_/**
-/x-pack/legacy/plugins/**/__tests__/fixtures/**
/x-pack/plugins/apm/e2e/tmp/*
/x-pack/plugins/canvas/canvas_plugin
/x-pack/plugins/canvas/shareable_runtime/build
/x-pack/plugins/canvas/storybook/build
/x-pack/plugins/reporting/server/export_types/printable_pdf/server/lib/pdf/assets/**
-/x-pack/legacy/plugins/infra/common/graphql/types.ts
-/x-pack/legacy/plugins/infra/public/graphql/types.ts
-/x-pack/legacy/plugins/infra/server/graphql/types.ts
-/x-pack/legacy/plugins/maps/public/vendor/**
# package overrides
/packages/elastic-eslint-config-kibana
@@ -48,4 +42,4 @@ snapshots.js
/packages/kbn-monaco/src/painless/antlr
# Bazel
-/bazel
+/bazel-*
diff --git a/.eslintrc.js b/.eslintrc.js
index a7b45534391c0..19ba7cacc3c44 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -410,11 +410,7 @@ module.exports = {
errorMessage: `Common code can not import from server or public, use a common directory.`,
},
{
- target: [
- 'src/legacy/**/*',
- '(src|x-pack)/plugins/**/(public|server)/**/*',
- 'examples/**/*',
- ],
+ target: ['(src|x-pack)/plugins/**/(public|server)/**/*', 'examples/**/*'],
from: [
'src/core/public/**/*',
'!src/core/public/index.ts', // relative import
@@ -428,8 +424,6 @@ module.exports = {
'!src/core/server/mocks{,.ts}',
'!src/core/server/types{,.ts}',
'!src/core/server/test_utils{,.ts}',
- '!src/core/server/utils', // ts alias
- '!src/core/server/utils/**/*',
// for absolute imports until fixed in
// https://github.com/elastic/kibana/issues/36096
'!src/core/server/*.test.mocks{,.ts}',
@@ -442,7 +436,6 @@ module.exports = {
},
{
target: [
- 'src/legacy/**/*',
'(src|x-pack)/plugins/**/(public|server)/**/*',
'examples/**/*',
'!(src|x-pack)/**/*.test.*',
@@ -482,7 +475,7 @@ module.exports = {
},
{
target: ['src/core/**/*'],
- from: ['plugins/**/*', 'src/plugins/**/*', 'src/legacy/ui/**/*'],
+ from: ['plugins/**/*', 'src/plugins/**/*'],
errorMessage: 'The core cannot depend on any plugins.',
},
{
@@ -490,19 +483,6 @@ module.exports = {
from: ['ui/**/*'],
errorMessage: 'Plugins cannot import legacy UI code.',
},
- {
- from: ['src/legacy/ui/**/*', 'ui/**/*'],
- target: [
- 'test/plugin_functional/plugins/**/public/np_ready/**/*',
- 'test/plugin_functional/plugins/**/server/np_ready/**/*',
- ],
- allowSameFolder: true,
- errorMessage:
- 'NP-ready code should not import from /src/legacy/ui/** folder. ' +
- 'Instead of importing from /src/legacy/ui/** deeply within a np_ready folder, ' +
- 'import those things once at the top level of your plugin and pass those down, just ' +
- 'like you pass down `core` and `plugins` objects.',
- },
],
},
],
@@ -1180,7 +1160,7 @@ module.exports = {
pathGroups: [
{
pattern:
- '{../../../../../../,../../../../../,../../../../,../../../,../../,../}{common/,*}__mocks__{*,/**}',
+ '{../../../../../../,../../../../../,../../../../,../../../,../../,../,./}{common/,*}__mocks__{*,/**}',
group: 'unknown',
},
{
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index f27885c1e32c3..a8dcafeb7753c 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -107,7 +107,6 @@
/x-pack/plugins/dashboard_enhanced/ @elastic/kibana-presentation
/x-pack/test/functional/apps/canvas/ @elastic/kibana-presentation
#CC# /src/plugins/kibana_react/public/code_editor/ @elastic/kibana-presentation
-#CC# /x-pack/legacy/plugins/canvas/ @elastic/kibana-presentation
#CC# /x-pack/plugins/dashboard_mode @elastic/kibana-presentation
@@ -146,7 +145,6 @@
/x-pack/test/visual_regression/tests/maps/index.js @elastic/kibana-gis
#CC# /src/plugins/maps_legacy/ @elastic/kibana-gis
#CC# /x-pack/plugins/file_upload @elastic/kibana-gis
-#CC# /x-pack/plugins/maps_legacy_licensing @elastic/kibana-gis
/src/plugins/tile_map/ @elastic/kibana-gis
/src/plugins/region_map/ @elastic/kibana-gis
@@ -165,7 +163,6 @@
/packages/kbn-utils/ @elastic/kibana-operations
/packages/kbn-cli-dev-mode/ @elastic/kibana-operations
/src/cli/keystore/ @elastic/kibana-operations
-/src/legacy/server/warnings/ @elastic/kibana-operations
/.ci/es-snapshots/ @elastic/kibana-operations
/.github/workflows/ @elastic/kibana-operations
/vars/ @elastic/kibana-operations
@@ -202,9 +199,6 @@
/packages/kbn-legacy-logging/ @elastic/kibana-core
/packages/kbn-crypto/ @elastic/kibana-core
/packages/kbn-http-tools/ @elastic/kibana-core
-/src/legacy/server/config/ @elastic/kibana-core
-/src/legacy/server/http/ @elastic/kibana-core
-/src/legacy/server/logging/ @elastic/kibana-core
/src/plugins/status_page/ @elastic/kibana-core
/src/plugins/saved_objects_management/ @elastic/kibana-core
/src/dev/run_check_published_api_changes.ts @elastic/kibana-core
@@ -214,9 +208,6 @@
/src/plugins/kibana_overview/ @elastic/kibana-core
/x-pack/plugins/global_search_bar/ @elastic/kibana-core
#CC# /src/core/server/csp/ @elastic/kibana-core
-#CC# /src/legacy/server/config/ @elastic/kibana-core
-#CC# /src/legacy/server/http/ @elastic/kibana-core
-#CC# /src/legacy/ui/public/documentation_links @elastic/kibana-core
#CC# /src/plugins/legacy_export/ @elastic/kibana-core
#CC# /src/plugins/xpack_legacy/ @elastic/kibana-core
#CC# /src/plugins/saved_objects/ @elastic/kibana-core
@@ -348,6 +339,7 @@
# Security Solution sub teams
/x-pack/plugins/case @elastic/security-threat-hunting
+/x-pack/plugins/timelines @elastic/security-threat-hunting
/x-pack/test/case_api_integration @elastic/security-threat-hunting
/x-pack/plugins/lists @elastic/security-detections-response
diff --git a/.github/ISSUE_TEMPLATE/v8_breaking_change.md b/.github/ISSUE_TEMPLATE/v8_breaking_change.md
index 86e321990d05f..67d2ee2d3286b 100644
--- a/.github/ISSUE_TEMPLATE/v8_breaking_change.md
+++ b/.github/ISSUE_TEMPLATE/v8_breaking_change.md
@@ -2,7 +2,7 @@
name: 8.0 Breaking change
about: Breaking changes from 7.x -> 8.0
title: "[Breaking change]"
-labels: Team:Elasticsearch UI, Feature:Upgrade Assistant, Breaking Change
+labels: Feature:Upgrade Assistant, Breaking Change
assignees: ''
---
@@ -12,8 +12,8 @@ assignees: ''
******* LABEL CHANGES NECESSARY ********
****************************************
-Please add a "NeededFor:${TeamName}" label to denote the team that is
-requesting the breaking change to be surfaced in the Upgrade Assistant.
+Please add a team label to denote the team that the
+breaking change is applicable to.
-->
@@ -30,16 +30,14 @@ requesting the breaking change to be surfaced in the Upgrade Assistant.
-**How can we programmatically determine whether the cluster is affected by this breaking change?**
+**Can the change be registered with the [Kibana deprecation service](https://github.com/elastic/kibana/blob/master/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md)?**
-**What can users do to address the change manually?**
+
-
-
-**How could we make migration easier with the Upgrade Assistant?**
-
-
+
**Are there any edge cases?**
diff --git a/.github/relabel.yml b/.github/relabel.yml
new file mode 100644
index 0000000000000..a737be356ce81
--- /dev/null
+++ b/.github/relabel.yml
@@ -0,0 +1,3 @@
+issues:
+ - missingLabel: needs-team
+ regex: ^(\:ml)|(Team:.*)$
\ No newline at end of file
diff --git a/.github/workflows/project-assigner.yml b/.github/workflows/project-assigner.yml
index d9d2d6d1ddb8b..37d04abda7530 100644
--- a/.github/workflows/project-assigner.yml
+++ b/.github/workflows/project-assigner.yml
@@ -11,7 +11,7 @@ jobs:
uses: elastic/github-actions/project-assigner@v2.0.0
id: project_assigner
with:
- issue-mappings: '[{"label": "Feature:Lens", "projectNumber": 32, "columnName": "Long-term goals"}, {"label": "Feature:Canvas", "projectNumber": 38, "columnName": "Inbox"}, {"label": "Feature:Dashboard", "projectNumber": 68, "columnName": "Inbox"}, {"label": "Feature:Drilldowns", "projectNumber": 68, "columnName": "Inbox"}]'
+ issue-mappings: '[{"label": "Feature:Lens", "projectNumber": 32, "columnName": "Long-term goals"}, {"label": "Feature:Canvas", "projectNumber": 38, "columnName": "Inbox"}, {"label": "Feature:Dashboard", "projectNumber": 68, "columnName": "Inbox"}, {"label": "Feature:Drilldowns", "projectNumber": 68, "columnName": "Inbox"}], {"label": "Feature:Input Controls", "projectNumber": 72, "columnName": "Inbox"}]'
ghToken: ${{ secrets.PROJECT_ASSIGNER_TOKEN }}
diff --git a/.gitignore b/.gitignore
index fbe28b8f1e77c..ce8fd38b18a92 100644
--- a/.gitignore
+++ b/.gitignore
@@ -75,5 +75,6 @@ report.asciidoc
.yarn-local-mirror
# Bazel
-/bazel
-/.bazelrc.user
+bazel
+bazel-*
+.bazelrc.user
diff --git a/.stylelintignore b/.stylelintignore
index a48b3adfa3632..72d9d5104a0e9 100644
--- a/.stylelintignore
+++ b/.stylelintignore
@@ -1,3 +1,4 @@
x-pack/plugins/canvas/shareable_runtime/**/*.s+(a|c)ss
build
target
+bazel-*
diff --git a/BUILD.bazel b/BUILD.bazel
index 38a478565a4af..4502daeaacb59 100644
--- a/BUILD.bazel
+++ b/BUILD.bazel
@@ -2,6 +2,7 @@
# other packages builds and need to be included as inputs
exports_files(
[
+ "tsconfig.base.json",
"tsconfig.json",
"package.json"
],
diff --git a/docs/api/actions-and-connectors/legacy/create.asciidoc b/docs/api/actions-and-connectors/legacy/create.asciidoc
index af4feddcb80fb..0361c4222986b 100644
--- a/docs/api/actions-and-connectors/legacy/create.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/create.asciidoc
@@ -4,7 +4,7 @@
Legacy Create connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Creates a connector.
diff --git a/docs/api/actions-and-connectors/legacy/delete.asciidoc b/docs/api/actions-and-connectors/legacy/delete.asciidoc
index 170fceba2d157..9ec2c0d392a96 100644
--- a/docs/api/actions-and-connectors/legacy/delete.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/delete.asciidoc
@@ -4,7 +4,7 @@
Legacy Delete connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Deletes a connector by ID.
diff --git a/docs/api/actions-and-connectors/legacy/execute.asciidoc b/docs/api/actions-and-connectors/legacy/execute.asciidoc
index 200844ab72f17..f01aa1585b192 100644
--- a/docs/api/actions-and-connectors/legacy/execute.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/execute.asciidoc
@@ -4,7 +4,7 @@
Legacy Execute connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Executes a connector by ID.
diff --git a/docs/api/actions-and-connectors/legacy/get.asciidoc b/docs/api/actions-and-connectors/legacy/get.asciidoc
index 1b138fb7032e0..6413fce558f5b 100644
--- a/docs/api/actions-and-connectors/legacy/get.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/get.asciidoc
@@ -4,7 +4,7 @@
Legacy Get connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieves a connector by ID.
diff --git a/docs/api/actions-and-connectors/legacy/get_all.asciidoc b/docs/api/actions-and-connectors/legacy/get_all.asciidoc
index ba235955c005e..191eccb6f8d39 100644
--- a/docs/api/actions-and-connectors/legacy/get_all.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/get_all.asciidoc
@@ -4,7 +4,7 @@
Legacy Get all connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieves all connectors.
diff --git a/docs/api/actions-and-connectors/legacy/list.asciidoc b/docs/api/actions-and-connectors/legacy/list.asciidoc
index 8acfd5415af57..d78838dcbe974 100644
--- a/docs/api/actions-and-connectors/legacy/list.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/list.asciidoc
@@ -4,7 +4,7 @@
Legacy List all connector types
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieves a list of all connector types.
diff --git a/docs/api/actions-and-connectors/legacy/update.asciidoc b/docs/api/actions-and-connectors/legacy/update.asciidoc
index 517daf9a40dca..6a33e765cf063 100644
--- a/docs/api/actions-and-connectors/legacy/update.asciidoc
+++ b/docs/api/actions-and-connectors/legacy/update.asciidoc
@@ -4,7 +4,7 @@
Legacy Update connector
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Updates the attributes for an existing connector.
diff --git a/docs/api/alerting/legacy/create.asciidoc b/docs/api/alerting/legacy/create.asciidoc
index 5c594d64a3f45..8363569541356 100644
--- a/docs/api/alerting/legacy/create.asciidoc
+++ b/docs/api/alerting/legacy/create.asciidoc
@@ -4,7 +4,7 @@
Legacy create alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Create {kib} alerts.
diff --git a/docs/api/alerting/legacy/delete.asciidoc b/docs/api/alerting/legacy/delete.asciidoc
index 68851973cab5b..2af420f2bc34e 100644
--- a/docs/api/alerting/legacy/delete.asciidoc
+++ b/docs/api/alerting/legacy/delete.asciidoc
@@ -4,7 +4,7 @@
Legacy delete alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Permanently remove an alert.
diff --git a/docs/api/alerting/legacy/disable.asciidoc b/docs/api/alerting/legacy/disable.asciidoc
index 56e06371570c2..1a9b928bfba78 100644
--- a/docs/api/alerting/legacy/disable.asciidoc
+++ b/docs/api/alerting/legacy/disable.asciidoc
@@ -4,7 +4,7 @@
Legacy disable alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Disable an alert.
diff --git a/docs/api/alerting/legacy/enable.asciidoc b/docs/api/alerting/legacy/enable.asciidoc
index 913d96a84352b..da4b466d6fda4 100644
--- a/docs/api/alerting/legacy/enable.asciidoc
+++ b/docs/api/alerting/legacy/enable.asciidoc
@@ -4,7 +4,7 @@
Legacy enable alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Enable an alert.
diff --git a/docs/api/alerting/legacy/find.asciidoc b/docs/api/alerting/legacy/find.asciidoc
index 94d9bc425bd21..7c493e9c8eb5b 100644
--- a/docs/api/alerting/legacy/find.asciidoc
+++ b/docs/api/alerting/legacy/find.asciidoc
@@ -4,7 +4,7 @@
Legacy find alerts
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieve a paginated set of alerts based on condition.
diff --git a/docs/api/alerting/legacy/get.asciidoc b/docs/api/alerting/legacy/get.asciidoc
index f1014d18e8774..ee0f52f51005a 100644
--- a/docs/api/alerting/legacy/get.asciidoc
+++ b/docs/api/alerting/legacy/get.asciidoc
@@ -4,7 +4,7 @@
Legacy get alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieve an alert by ID.
diff --git a/docs/api/alerting/legacy/health.asciidoc b/docs/api/alerting/legacy/health.asciidoc
index b25307fb5efd1..68f04cc715bd7 100644
--- a/docs/api/alerting/legacy/health.asciidoc
+++ b/docs/api/alerting/legacy/health.asciidoc
@@ -4,7 +4,7 @@
Legacy get Alerting framework health
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieve the health status of the Alerting framework.
diff --git a/docs/api/alerting/legacy/list.asciidoc b/docs/api/alerting/legacy/list.asciidoc
index e9ef3bbc27cd9..be37be36cd0e8 100644
--- a/docs/api/alerting/legacy/list.asciidoc
+++ b/docs/api/alerting/legacy/list.asciidoc
@@ -4,7 +4,7 @@
Legacy list all alert types
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Retrieve a list of all alert types.
diff --git a/docs/api/alerting/legacy/mute.asciidoc b/docs/api/alerting/legacy/mute.asciidoc
index dff42f5911e53..cf7adc446a2fd 100644
--- a/docs/api/alerting/legacy/mute.asciidoc
+++ b/docs/api/alerting/legacy/mute.asciidoc
@@ -4,7 +4,7 @@
Legacy mute alert instance
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Mute an alert instance.
diff --git a/docs/api/alerting/legacy/mute_all.asciidoc b/docs/api/alerting/legacy/mute_all.asciidoc
index df89fa15d1590..bc865480340e2 100644
--- a/docs/api/alerting/legacy/mute_all.asciidoc
+++ b/docs/api/alerting/legacy/mute_all.asciidoc
@@ -4,7 +4,7 @@
Legacy mute all alert instances
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Mute all alert instances.
diff --git a/docs/api/alerting/legacy/unmute.asciidoc b/docs/api/alerting/legacy/unmute.asciidoc
index 0be7e40dc1a19..300cf71b57a01 100644
--- a/docs/api/alerting/legacy/unmute.asciidoc
+++ b/docs/api/alerting/legacy/unmute.asciidoc
@@ -4,7 +4,7 @@
Legacy unmute alert instance
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Unmute an alert instance.
diff --git a/docs/api/alerting/legacy/unmute_all.asciidoc b/docs/api/alerting/legacy/unmute_all.asciidoc
index 8687c2d2fe8bb..3b0a7afe5f44d 100644
--- a/docs/api/alerting/legacy/unmute_all.asciidoc
+++ b/docs/api/alerting/legacy/unmute_all.asciidoc
@@ -4,7 +4,7 @@
Legacy unmute all alert instances
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Unmute all alert instances.
diff --git a/docs/api/alerting/legacy/update.asciidoc b/docs/api/alerting/legacy/update.asciidoc
index bffdf26c31400..b9cce995660e6 100644
--- a/docs/api/alerting/legacy/update.asciidoc
+++ b/docs/api/alerting/legacy/update.asciidoc
@@ -4,7 +4,7 @@
Legacy update alert
++++
-WARNING: Deprecated in 7.13.0. Use <> instead.
+deprecated::[7.13.0,Use <> instead.]
Update the attributes for an existing alert.
diff --git a/docs/developer/architecture/core/application_service.asciidoc b/docs/developer/architecture/core/application_service.asciidoc
new file mode 100644
index 0000000000000..ba3c6bbed72be
--- /dev/null
+++ b/docs/developer/architecture/core/application_service.asciidoc
@@ -0,0 +1,40 @@
+[[application-service]]
+== Application service
+Kibana has migrated to be a Single Page Application. Plugins should use `Application service` API to instruct Kibana that an application should be loaded and rendered in the UI in response to user interactions. The service also provides utilities for controlling the navigation link state, seamlessly integrating routing between applications, and loading async chunks on demand.
+
+NOTE: The Application service is only available client side.
+
+[source,typescript]
+----
+import { AppMountParameters, CoreSetup, Plugin, DEFAULT_APP_CATEGORIES } from 'kibana/public';
+
+export class MyPlugin implements Plugin {
+ public setup(core: CoreSetup) {
+ core.application.register({ // <1>
+ category: DEFAULT_APP_CATEGORIES.kibana,
+ id: 'my-plugin',
+ title: 'my plugin title',
+ euiIconType: '/path/to/some.svg',
+ order: 100,
+ appRoute: '/app/my_plugin', // <2>
+ async mount(params: AppMountParameters) { // <3>
+ // Load application bundle
+ const { renderApp } = await import('./application');
+ // Get start services
+ const [coreStart, depsStart] = await core.getStartServices(); // <4>
+ // Render the application
+ return renderApp(coreStart, depsStart, params); // <5>
+ },
+ });
+ }
+}
+----
+<1> See {kib-repo}blob/{branch}/docs/development/core/public/kibana-plugin-core-public.applicationsetup.register.md[application.register interface]
+<2> Application specific URL.
+<3> `mount` callback is invoked when a user navigates to the application-specific URL.
+<4> `core.getStartServices` method provides API available during `start` lifecycle.
+<5> `mount` method must return a function that will be called to unmount the application, which is called when Kibana unmounts the application. You can put a clean-up logic there.
+
+NOTE: you are free to use any UI library to render a plugin application in DOM.
+However, we recommend using React and https://elastic.github.io/eui[EUI] for all your basic UI
+components to create a consistent UI experience.
diff --git a/docs/developer/architecture/core/configuration-service.asciidoc b/docs/developer/architecture/core/configuration-service.asciidoc
new file mode 100644
index 0000000000000..031135c7b790f
--- /dev/null
+++ b/docs/developer/architecture/core/configuration-service.asciidoc
@@ -0,0 +1,149 @@
+[[configuration-service]]
+== Configuration service
+{kib} provides `ConfigService` for plugin developers that want to support
+adjustable runtime behavior for their plugins.
+Plugins can only read their own configuration values, it is not possible to access the configuration values from {kib} Core or other plugins directly.
+
+NOTE: The Configuration service is only available server side.
+
+[source,js]
+----
+// in Legacy platform
+const basePath = config.get('server.basePath');
+// in Kibana Platform 'basePath' belongs to the http service
+const basePath = core.http.basePath.get(request);
+----
+
+To have access to your plugin config, you _should_:
+
+* Declare plugin-specific `configPath` (will fallback to plugin `id`
+if not specified) in {kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.pluginmanifest.md[`kibana.json`] manifest file.
+* Export schema validation for the config from plugin's main file. Schema is
+mandatory. If a plugin reads from the config without schema declaration,
+`ConfigService` will throw an error.
+
+*my_plugin/server/index.ts*
+[source,typescript]
+----
+import { schema, TypeOf } from '@kbn/config-schema';
+export const plugin = …
+export const config = {
+ schema: schema.object(…),
+};
+export type MyPluginConfigType = TypeOf;
+----
+
+* Read config value exposed via `PluginInitializerContext`:
+
+*my_plugin/server/index.ts*
+[source,typescript]
+----
+import type { PluginInitializerContext } from 'kibana/server';
+export class MyPlugin {
+ constructor(initializerContext: PluginInitializerContext) {
+ this.config$ = initializerContext.config.create();
+ // or if config is optional:
+ this.config$ = initializerContext.config.createIfExists();
+ }
+ ...
+}
+----
+
+If your plugin also has a client-side part, you can also expose
+configuration properties to it using the configuration `exposeToBrowser`
+allow-list property.
+
+*my_plugin/server/index.ts*
+[source,typescript]
+----
+import { schema, TypeOf } from '@kbn/config-schema';
+import type { PluginConfigDescriptor } from 'kibana/server';
+
+const configSchema = schema.object({
+ secret: schema.string({ defaultValue: 'Only on server' }),
+ uiProp: schema.string({ defaultValue: 'Accessible from client' }),
+});
+
+type ConfigType = TypeOf;
+
+export const config: PluginConfigDescriptor = {
+ exposeToBrowser: {
+ uiProp: true,
+ },
+ schema: configSchema,
+};
+----
+
+Configuration containing only the exposed properties will be then
+available on the client-side using the plugin's `initializerContext`:
+
+*my_plugin/public/index.ts*
+[source,typescript]
+----
+interface ClientConfigType {
+ uiProp: string;
+}
+
+export class MyPlugin implements Plugin {
+ constructor(private readonly initializerContext: PluginInitializerContext) {}
+
+ public async setup(core: CoreSetup, deps: {}) {
+ const config = this.initializerContext.config.get();
+ }
+----
+
+All plugins are considered enabled by default. If you want to disable
+your plugin, you could declare the `enabled` flag in the plugin
+config. This is a special {kib} Platform key. {kib} reads its
+value and won’t create a plugin instance if `enabled: false`.
+
+[source,js]
+----
+export const config = {
+ schema: schema.object({ enabled: schema.boolean({ defaultValue: false }) }),
+};
+----
+[[handle-plugin-configuration-deprecations]]
+=== Handle plugin configuration deprecations
+If your plugin has deprecated configuration keys, you can describe them using
+the `deprecations` config descriptor field.
+Deprecations are managed on a per-plugin basis, meaning you don’t need to specify
+the whole property path, but use the relative path from your plugin’s
+configuration root.
+
+*my_plugin/server/index.ts*
+[source,typescript]
+----
+import { schema, TypeOf } from '@kbn/config-schema';
+import type { PluginConfigDescriptor } from 'kibana/server';
+
+const configSchema = schema.object({
+ newProperty: schema.string({ defaultValue: 'Some string' }),
+});
+
+type ConfigType = TypeOf;
+
+export const config: PluginConfigDescriptor = {
+ schema: configSchema,
+ deprecations: ({ rename, unused }) => [
+ rename('oldProperty', 'newProperty'),
+ unused('someUnusedProperty'),
+ ],
+};
+----
+
+In some cases, accessing the whole configuration for deprecations is
+necessary. For these edge cases, `renameFromRoot` and `unusedFromRoot`
+are also accessible when declaring deprecations.
+
+*my_plugin/server/index.ts*
+[source,typescript]
+----
+export const config: PluginConfigDescriptor = {
+ schema: configSchema,
+ deprecations: ({ renameFromRoot, unusedFromRoot }) => [
+ renameFromRoot('oldplugin.property', 'myplugin.property'),
+ unusedFromRoot('oldplugin.deprecated'),
+ ],
+};
+----
diff --git a/docs/developer/architecture/core/elasticsearch-service.asciidoc b/docs/developer/architecture/core/elasticsearch-service.asciidoc
new file mode 100644
index 0000000000000..55632c0117938
--- /dev/null
+++ b/docs/developer/architecture/core/elasticsearch-service.asciidoc
@@ -0,0 +1,30 @@
+[[elasticsearch-service]]
+== Elasticsearch service
+`Elasticsearch service` provides `elasticsearch.client` program API to communicate with Elasticsearch server HTTP API.
+
+NOTE: The Elasticsearch service is only available server side. You can use the {kib-repo}blob/{branch}/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md[Data plugin] APIs on the client side.
+
+`elasticsearch.client` interacts with Elasticsearch service on behalf of:
+
+- `kibana_system` user via `elasticsearch.client.asInternalUser.*` methods.
+- a current end-user via `elasticsearch.client.asCurrentUser.*` methods. In this case Elasticsearch client should be given the current user credentials.
+See <> and <>.
+
+{kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.elasticsearchservicestart.md[Elasticsearch service API docs]
+
+[source,typescript]
+----
+import { CoreStart, Plugin } from 'kibana/public';
+
+export class MyPlugin implements Plugin {
+ public start(core: CoreStart) {
+ async function asyncTask() {
+ const result = await core.elasticsearch.client.asInternalUser.ping(…);
+ }
+ asyncTask();
+ }
+}
+----
+
+For advanced use-cases, such as a search, use {kib-repo}blob/{branch}/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.md[Data plugin]
+
diff --git a/docs/developer/architecture/core/http-service.asciidoc b/docs/developer/architecture/core/http-service.asciidoc
new file mode 100644
index 0000000000000..45468d618dd09
--- /dev/null
+++ b/docs/developer/architecture/core/http-service.asciidoc
@@ -0,0 +1,67 @@
+[[http-service]]
+== HTTP service
+
+NOTE: The HTTP service is available both server and client side.
+
+=== Server side usage
+
+The server-side HttpService allows server-side plugins to register endpoints with built-in support for request validation. These endpoints may be used by client-side code or be exposed as a public API for users. Most plugins integrate directly with this service.
+
+The service allows plugins to:
+* to extend the {kib} server with custom HTTP API.
+* to execute custom logic on an incoming request or server response.
+* implement custom authentication and authorization strategy.
+
+See {kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.httpservicesetup.md[HTTP service API docs]
+
+[source,typescript]
+----
+import { schema } from '@kbn/config-schema';
+import type { CoreSetup, Plugin } from 'kibana/server';
+
+export class MyPlugin implements Plugin {
+ public setup(core: CoreSetup) {
+ const router = core.http.createRouter();
+
+ const validate = {
+ params: schema.object({
+ id: schema.string(),
+ }),
+ };
+
+ router.get({
+ path: 'my_plugin/{id}',
+ validate
+ },
+ async (context, request, response) => {
+ const data = await findObject(request.params.id);
+ if (!data) return response.notFound();
+ return response.ok({
+ body: data,
+ headers: {
+ 'content-type': 'application/json'
+ }
+ });
+ });
+ }
+}
+----
+
+=== Client side usage
+
+The HTTP service is also offered on the client side and provides an API to communicate with the {kib} server via HTTP interface.
+The client-side HttpService is a preconfigured wrapper around `window.fetch` that includes some default behavior and automatically handles common errors (such as session expiration). The service should only be used for access to backend endpoints registered by the same plugin. Feel free to use another HTTP client library to request 3rd party services.
+
+[source,typescript]
+----
+import { CoreStart } from 'kibana/public';
+interface ResponseType {…};
+interface MyPluginData {…};
+async function fetchData(core: CoreStart) {
+ return await core.http.get(
+ '/api/my_plugin/',
+ { query: … },
+ );
+}
+----
+See {kib-repo}blob/{branch}/docs/development/core/public/kibana-plugin-core-public.httpsetup.md[for all available API].
diff --git a/docs/developer/architecture/core/index.asciidoc b/docs/developer/architecture/core/index.asciidoc
index 4a86c90cf8c10..53720a593d3f2 100644
--- a/docs/developer/architecture/core/index.asciidoc
+++ b/docs/developer/architecture/core/index.asciidoc
@@ -27,421 +27,18 @@ export class MyPlugin {
}
----
-=== Server-side
-[[configuration-service]]
-==== Configuration service
-{kib} provides `ConfigService` if a plugin developer may want to support
-adjustable runtime behavior for their plugins.
-Plugins can only read their own configuration values, it is not possible to access the configuration values from {kib} Core or other plugins directly.
+The services that core provides are:
-[source,js]
-----
-// in Legacy platform
-const basePath = config.get('server.basePath');
-// in Kibana Platform 'basePath' belongs to the http service
-const basePath = core.http.basePath.get(request);
-----
-
-To have access to your plugin config, you _should_:
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
-* Declare plugin-specific `configPath` (will fallback to plugin `id`
-if not specified) in {kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.pluginmanifest.md[`kibana.json`] manifest file.
-* Export schema validation for the config from plugin's main file. Schema is
-mandatory. If a plugin reads from the config without schema declaration,
-`ConfigService` will throw an error.
-
-*my_plugin/server/index.ts*
-[source,typescript]
-----
-import { schema, TypeOf } from '@kbn/config-schema';
-export const plugin = …
-export const config = {
- schema: schema.object(…),
-};
-export type MyPluginConfigType = TypeOf;
-----
-
-* Read config value exposed via `PluginInitializerContext`.
-*my_plugin/server/index.ts*
-[source,typescript]
-----
-import type { PluginInitializerContext } from 'kibana/server';
-export class MyPlugin {
- constructor(initializerContext: PluginInitializerContext) {
- this.config$ = initializerContext.config.create();
- // or if config is optional:
- this.config$ = initializerContext.config.createIfExists();
- }
-----
-
-If your plugin also has a client-side part, you can also expose
-configuration properties to it using the configuration `exposeToBrowser`
-allow-list property.
-
-*my_plugin/server/index.ts*
-[source,typescript]
-----
-import { schema, TypeOf } from '@kbn/config-schema';
-import type { PluginConfigDescriptor } from 'kibana/server';
-const configSchema = schema.object({
- secret: schema.string({ defaultValue: 'Only on server' }),
- uiProp: schema.string({ defaultValue: 'Accessible from client' }),
-});
-type ConfigType = TypeOf;
-export const config: PluginConfigDescriptor = {
- exposeToBrowser: {
- uiProp: true,
- },
- schema: configSchema,
-};
-----
-
-Configuration containing only the exposed properties will be then
-available on the client-side using the plugin's `initializerContext`:
-
-*my_plugin/public/index.ts*
-[source,typescript]
-----
-interface ClientConfigType {
- uiProp: string;
-}
-
-export class MyPlugin implements Plugin {
- constructor(private readonly initializerContext: PluginInitializerContext) {}
-
- public async setup(core: CoreSetup, deps: {}) {
- const config = this.initializerContext.config.get();
- }
-----
-
-All plugins are considered enabled by default. If you want to disable
-your plugin, you could declare the `enabled` flag in the plugin
-config. This is a special {kib} Platform key. {kib} reads its
-value and won’t create a plugin instance if `enabled: false`.
-
-[source,js]
-----
-export const config = {
- schema: schema.object({ enabled: schema.boolean({ defaultValue: false }) }),
-};
-----
-[[handle-plugin-configuration-deprecations]]
-===== Handle plugin configuration deprecations
-If your plugin has deprecated configuration keys, you can describe them using
-the `deprecations` config descriptor field.
-Deprecations are managed on a per-plugin basis, meaning you don’t need to specify
-the whole property path, but use the relative path from your plugin’s
-configuration root.
-
-*my_plugin/server/index.ts*
-[source,typescript]
-----
-import { schema, TypeOf } from '@kbn/config-schema';
-import type { PluginConfigDescriptor } from 'kibana/server';
-
-const configSchema = schema.object({
- newProperty: schema.string({ defaultValue: 'Some string' }),
-});
-
-type ConfigType = TypeOf;
-
-export const config: PluginConfigDescriptor = {
- schema: configSchema,
- deprecations: ({ rename, unused }) => [
- rename('oldProperty', 'newProperty'),
- unused('someUnusedProperty'),
- ],
-};
-----
-
-In some cases, accessing the whole configuration for deprecations is
-necessary. For these edge cases, `renameFromRoot` and `unusedFromRoot`
-are also accessible when declaring deprecations.
-
-*my_plugin/server/index.ts*
-[source,typescript]
-----
-export const config: PluginConfigDescriptor = {
- schema: configSchema,
- deprecations: ({ renameFromRoot, unusedFromRoot }) => [
- renameFromRoot('oldplugin.property', 'myplugin.property'),
- unusedFromRoot('oldplugin.deprecated'),
- ],
-};
-----
-==== Logging service
-Allows a plugin to provide status and diagnostic information.
-For detailed instructions see the {kib-repo}blob/{branch}/src/core/server/logging/README.md[logging service documentation].
-
-[source,typescript]
-----
-import type { PluginInitializerContext, CoreSetup, Plugin, Logger } from 'kibana/server';
-
-export class MyPlugin implements Plugin {
- private readonly logger: Logger;
-
- constructor(initializerContext: PluginInitializerContext) {
- this.logger = initializerContext.logger.get();
- }
-
- public setup(core: CoreSetup) {
- try {
- this.logger.debug('doing something...');
- // …
- } catch (e) {
- this.logger.error('failed doing something...');
- }
- }
-}
-----
-
-==== Elasticsearch service
-`Elasticsearch service` provides `elasticsearch.client` program API to communicate with Elasticsearch server REST API.
-`elasticsearch.client` interacts with Elasticsearch service on behalf of:
-
-- `kibana_system` user via `elasticsearch.client.asInternalUser.*` methods.
-- a current end-user via `elasticsearch.client.asCurrentUser.*` methods. In this case Elasticsearch client should be given the current user credentials.
-See <> and <>.
-
-{kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.elasticsearchservicestart.md[Elasticsearch service API docs]
-
-[source,typescript]
-----
-import { CoreStart, Plugin } from 'kibana/public';
-
-export class MyPlugin implements Plugin {
- public start(core: CoreStart) {
- async function asyncTask() {
- const result = await core.elasticsearch.client.asInternalUser.ping(…);
- }
- asyncTask();
- }
-}
-----
-For advanced use-cases, such as a search, use {kib-repo}blob/{branch}/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.md[Data plugin]
-include::saved-objects-service.asciidoc[leveloffset=+1]
-
-==== HTTP service
-Allows plugins:
-
-* to extend the {kib} server with custom REST API.
-* to execute custom logic on an incoming request or server response.
-* implement custom authentication and authorization strategy.
-
-See {kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.httpservicesetup.md[HTTP service API docs]
-
-[source,typescript]
-----
-import { schema } from '@kbn/config-schema';
-import type { CoreSetup, Plugin } from 'kibana/server';
-
-export class MyPlugin implements Plugin {
- public setup(core: CoreSetup) {
- const router = core.http.createRouter();
-
- const validate = {
- params: schema.object({
- id: schema.string(),
- }),
- };
-
- router.get({
- path: 'my_plugin/{id}',
- validate
- },
- async (context, request, response) => {
- const data = await findObject(request.params.id);
- if (!data) return response.notFound();
- return response.ok({
- body: data,
- headers: {
- 'content-type': 'application/json'
- }
- });
- });
- }
-}
-----
-
-==== UI settings service
-The program interface to <>.
-It makes it possible for Kibana plugins to extend Kibana UI Settings Management with custom settings.
-
-See:
-
-- {kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.uisettingsservicesetup.register.md[UI settings service Setup API docs]
-- {kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.uisettingsservicestart.register.md[UI settings service Start API docs]
-
-[source,typescript]
-----
-import { schema } from '@kbn/config-schema';
-import type { CoreSetup,Plugin } from 'kibana/server';
-
-export class MyPlugin implements Plugin {
- public setup(core: CoreSetup) {
- core.uiSettings.register({
- custom: {
- value: '42',
- schema: schema.string(),
- },
- });
- const router = core.http.createRouter();
- router.get({
- path: 'my_plugin/{id}',
- validate: …,
- },
- async (context, request, response) => {
- const customSetting = await context.uiSettings.client.get('custom');
- …
- });
- }
-}
-
-----
-
-=== Client-side
-==== Application service
-Kibana has migrated to be a Single Page Application. Plugins should use `Application service` API to instruct Kibana what an application should be loaded & rendered in the UI in response to user interactions.
-[source,typescript]
-----
-import { AppMountParameters, CoreSetup, Plugin, DEFAULT_APP_CATEGORIES } from 'kibana/public';
-
-export class MyPlugin implements Plugin {
- public setup(core: CoreSetup) {
- core.application.register({ // <1>
- category: DEFAULT_APP_CATEGORIES.kibana,
- id: 'my-plugin',
- title: 'my plugin title',
- euiIconType: '/path/to/some.svg',
- order: 100,
- appRoute: '/app/my_plugin', // <2>
- async mount(params: AppMountParameters) { // <3>
- // Load application bundle
- const { renderApp } = await import('./application');
- // Get start services
- const [coreStart, depsStart] = await core.getStartServices(); // <4>
- // Render the application
- return renderApp(coreStart, depsStart, params); // <5>
- },
- });
- }
-}
-----
-<1> See {kib-repo}blob/{branch}/docs/development/core/public/kibana-plugin-core-public.applicationsetup.register.md[application.register interface]
-<2> Application specific URL.
-<3> `mount` callback is invoked when a user navigates to the application-specific URL.
-<4> `core.getStartServices` method provides API available during `start` lifecycle.
-<5> `mount` method must return a function that will be called to unmount the application.
-
-NOTE:: you are free to use any UI library to render a plugin application in DOM.
-However, we recommend using React and https://elastic.github.io/eui[EUI] for all your basic UI
-components to create a consistent UI experience.
-
-==== HTTP service
-Provides API to communicate with the {kib} server. Feel free to use another HTTP client library to request 3rd party services.
-
-[source,typescript]
-----
-import { CoreStart } from 'kibana/public';
-interface ResponseType {…};
-async function fetchData(core: CoreStart) {
- return await core.http.get<>(
- '/api/my_plugin/',
- { query: … },
- );
-}
-----
-See {kib-repo}blob/{branch}/docs/development/core/public/kibana-plugin-core-public.httpsetup.md[for all available API].
-
-==== Elasticsearch service
-Not available in the browser. Use {kib-repo}blob/{branch}/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md[Data plugin] instead.
-
-== Patterns
-[[scoped-services]]
-=== Scoped services
-Whenever Kibana needs to get access to data saved in elasticsearch, it
-should perform a check whether an end-user has access to the data. In
-the legacy platform, Kibana requires binding elasticsearch related API
-with an incoming request to access elasticsearch service on behalf of a
-user.
-
-[source,js]
-----
-async function handler(req, res) {
- const dataCluster = server.plugins.elasticsearch.getCluster('data');
- const data = await dataCluster.callWithRequest(req, 'ping');
-}
-----
-
-The Kibana Platform introduced a handler interface on the server-side to perform that association
-internally. Core services, that require impersonation with an incoming
-request, are exposed via `context` argument of
-{kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.requesthandler.md[the
-request handler interface.] The above example looks in the Kibana Platform
-as
-
-[source,js]
-----
-async function handler(context, req, res) {
- const data = await context.core.elasticsearch.client.asCurrentUser('ping');
-}
-----
-
-The
-{kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.requesthandlercontext.md[request
-handler context] exposed the next scoped *core* services:
-
-[width="100%",cols="30%,70%",options="header",]
-|===
-|Legacy Platform |Kibana Platform
-|`request.getSavedObjectsClient`
-|{kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.savedobjectsclient.md[`context.savedObjects.client`]
-
-|`server.plugins.elasticsearch.getCluster('admin')`
-|{kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.iscopedclusterclient.md[`context.elasticsearch.client`]
-
-|`server.plugins.elasticsearch.getCluster('data')`
-|{kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.iscopedclusterclient.md[`context.elasticsearch.client`]
-
-|`request.getUiSettingsService`
-|{kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.iuisettingsclient.md[`context.uiSettings.client`]
-|===
-
-==== Declare a custom scoped service
-
-Plugins can extend the handler context with a custom API that will be
-available to the plugin itself and all dependent plugins. For example,
-the plugin creates a custom elasticsearch client and wants to use it via
-the request handler context:
-
-[source,typescript]
-----
-import type { CoreSetup, RequestHandlerContext, IScopedClusterClient } from 'kibana/server';
-
-interface MyRequestHandlerContext extends RequestHandlerContext {
- myPlugin: {
- client: IScopedClusterClient;
- };
-}
-
-class MyPlugin {
- setup(core: CoreSetup) {
- const client = core.elasticsearch.createClient('myClient');
- core.http.registerRouteHandlerContext('myPlugin', (context, req, res) => {
- return { client: client.asScoped(req) };
- });
- const router = core.http.createRouter();
- router.get(
- { path: '/api/my-plugin/', validate: … },
- async (context, req, res) => {
- // context type is inferred as MyPluginContext
- const data = await context.myPlugin.client.asCurrentUser('endpoint');
- }
- );
- }
-----
diff --git a/docs/developer/architecture/core/logging-configuration-migration.asciidoc b/docs/developer/architecture/core/logging-configuration-migration.asciidoc
new file mode 100644
index 0000000000000..19f10a881d5e8
--- /dev/null
+++ b/docs/developer/architecture/core/logging-configuration-migration.asciidoc
@@ -0,0 +1,84 @@
+[[logging-configuration-migration]]
+== Logging configuration migration
+
+Compatibility with the legacy logging system is assured until the end of the `v7` version.
+All log messages handled by `root` context are forwarded to the legacy logging service. If you re-write
+root appenders, make sure that it contains `default` appender to provide backward compatibility.
+
+NOTE: When you switch to the new logging configuration, you will start seeing duplicate log entries in both formats.
+These will be removed when the `default` appender is no longer required. If you define an appender for a logger,
+the log messages aren't handled by the `root` logger anymore and are not forwarded to the legacy logging service.
+
+[[logging-pattern-format-old-and-new-example]]
+[options="header"]
+|===
+
+| Parameter | Platform log record in **pattern** format | Legacy Platform log record **text** format
+
+| @timestamp | ISO8601_TZ `2012-01-31T23:33:22.011-05:00` | Absolute `23:33:22.011`
+
+| logger | `parent.child` | `['parent', 'child']`
+
+| level | `DEBUG` | `['debug']`
+
+| meta | stringified JSON object `{"to": "v8"}`| N/A
+
+| pid | can be configured as `%pid` | N/A
+
+|===
+
+[[logging-json-format-old-and-new-example]]
+[options="header"]
+|===
+
+| Parameter | Platform log record in **json** format | Legacy Platform log record **json** format
+
+| @timestamp | ISO8601_TZ `2012-01-31T23:33:22.011-05:00` | ISO8601 `2012-01-31T23:33:22.011Z`
+
+| logger | `log.logger: parent.child` | `tags: ['parent', 'child']`
+
+| level | `log.level: DEBUG` | `tags: ['debug']`
+
+| meta | merged in log record `{... "to": "v8"}` | merged in log record `{... "to": "v8"}`
+
+| pid | `process.pid: 12345` | `pid: 12345`
+
+| type | N/A | `type: log`
+
+| error | `{ message, name, stack }` | `{ message, name, stack, code, signal }`
+
+|===
+
+[[logging-cli-migration]]
+=== Logging configuration via CLI
+
+As is the case for any of Kibana's config settings, you can specify your logging configuration via the CLI. For convenience, the `--verbose` and `--silent` flags exist as shortcuts and will continue to be supported beyond v7.
+
+If you wish to override these flags, you can always do so by passing your preferred logging configuration directly to the CLI. For example, with the following configuration:
+
+[source,yaml]
+----
+logging:
+ appenders:
+ custom:
+ type: console
+ layout:
+ type: pattern
+ pattern: "[%date][%level] %message"
+----
+
+you can override the flags with:
+
+[options="header"]
+|===
+
+| legacy logging | {kib} Platform logging | cli shortcuts
+
+|--verbose| --logging.root.level=debug --logging.root.appenders[0]=default --logging.root.appenders[1]=custom | --verbose
+
+|--quiet| --logging.root.level=error --logging.root.appenders[0]=default --logging.root.appenders[1]=custom | not supported
+
+|--silent| --logging.root.level=off | --silent
+|===
+
+NOTE: To preserve backwards compatibility, you are required to pass the root `default` appender until the legacy logging system is removed in `v8.0`.
diff --git a/docs/developer/architecture/core/logging-service.asciidoc b/docs/developer/architecture/core/logging-service.asciidoc
new file mode 100644
index 0000000000000..7dc2a4ca1f4ce
--- /dev/null
+++ b/docs/developer/architecture/core/logging-service.asciidoc
@@ -0,0 +1,545 @@
+[[logging-service]]
+== Logging service
+Allows a plugin to provide status and diagnostic information.
+
+NOTE: The Logging service is only available server side.
+
+[source,typescript]
+----
+import type { PluginInitializerContext, CoreSetup, Plugin, Logger } from 'kibana/server';
+
+export class MyPlugin implements Plugin {
+ private readonly logger: Logger;
+
+ constructor(initializerContext: PluginInitializerContext) {
+ this.logger = initializerContext.logger.get();
+ }
+
+ public setup(core: CoreSetup) {
+ try {
+ this.logger.debug('doing something...');
+ // …
+ } catch (e) {
+ this.logger.error('failed doing something...');
+ }
+ }
+}
+----
+
+The way logging works in {kib} is inspired by the `log4j 2` logging framework used by {ref-bare}/current/logging.html[Elasticsearch].
+The main idea is to have consistent logging behavior (configuration, log format etc.) across the entire Elastic Stack where possible.
+
+=== Loggers, Appenders and Layouts
+
+The {kib} logging system has three main components: _loggers_, _appenders_ and _layouts_. These components allow us to log
+messages according to message type and level, to control how these messages are formatted and where the final logs
+will be displayed or stored.
+
+__Loggers__ define what logging settings should be applied to a particular logger.
+
+__<>__ define where log messages are displayed (eg. stdout or console) and stored (eg. file on the disk).
+
+__<>__ define how log messages are formatted and what type of information they include.
+
+[[log-level]]
+=== Log level
+
+Currently we support the following log levels: _all_, _fatal_, _error_, _warn_, _info_, _debug_, _trace_, _off_.
+
+Levels are ordered, so _all_ > _fatal_ > _error_ > _warn_ > _info_ > _debug_ > _trace_ > _off_.
+
+A log record is being logged by the logger if its level is higher than or equal to the level of its logger. Otherwise,
+the log record is ignored.
+
+The _all_ and _off_ levels can be used only in configuration and are just handy shortcuts that allow you to log every
+log record or disable logging entirely or for a specific logger. These levels are also configurable as <>.
+
+[[logging-layouts]]
+=== Layouts
+
+Every appender should know exactly how to format log messages before they are written to the console or file on the disk.
+This behavior is controlled by the layouts and configured through `appender.layout` configuration property for every
+custom appender. Currently we don't define any default layout for the
+custom appenders, so one should always make the choice explicitly.
+
+There are two types of layout supported at the moment: <> and <>.
+
+[[pattern-layout]]
+==== Pattern layout
+
+With `pattern` layout it's possible to define a string pattern with special placeholders `%conversion_pattern` that will be replaced with data from the actual log message. By default the following pattern is used: `[%date][%level][%logger] %message`.
+
+NOTE: The `pattern` layout uses a sub-set of https://logging.apache.org/log4j/2.x/manual/layouts.html#PatternLayout[log4j 2 pattern syntax] and **doesn't implement** all `log4j 2` capabilities.
+
+The conversions that are provided out of the box are:
+
+**level**
+Outputs the <> of the logging event.
+Example of `%level` output: `TRACE`, `DEBUG`, `INFO`.
+
+**logger**
+Outputs the name of the logger that published the logging event.
+Example of `%logger` output: `server`, `server.http`, `server.http.kibana`.
+
+**message**
+Outputs the application supplied message associated with the logging event.
+
+**meta**
+Outputs the entries of `meta` object data in **json** format, if one is present in the event.
+Example of `%meta` output:
+[source,bash]
+----
+// Meta{from: 'v7', to: 'v8'}
+'{"from":"v7","to":"v8"}'
+// Meta empty object
+'{}'
+// no Meta provided
+''
+----
+
+[[date-format]]
+**date**
+Outputs the date of the logging event. The date conversion specifier may be followed by a set of braces containing a name of predefined date format and canonical timezone name.
+Timezone name is expected to be one from https://en.wikipedia.org/wiki/List_of_tz_database_time_zones[TZ database name].
+Timezone defaults to the host timezone when not explicitly specified.
+Example of `%date` output:
+
+[[date-conversion-pattern-examples]]
+[options="header"]
+|===
+
+| Conversion pattern | Example
+
+| `%date`
+| `2012-02-01T14:30:22.011Z` uses `ISO8601` format by default
+
+| `%date{ISO8601}`
+| `2012-02-01T14:30:22.011Z`
+
+| `%date{ISO8601_TZ}`
+| `2012-02-01T09:30:22.011-05:00` `ISO8601` with timezone
+
+| `%date{ISO8601_TZ}{America/Los_Angeles}`
+| `2012-02-01T06:30:22.011-08:00`
+
+| `%date{ABSOLUTE}`
+| `09:30:22.011`
+
+| `%date{ABSOLUTE}{America/Los_Angeles}`
+| `06:30:22.011`
+
+| `%date{UNIX}`
+| `1328106622`
+
+| `%date{UNIX_MILLIS}`
+| `1328106622011`
+
+|===
+
+**pid**
+Outputs the process ID.
+
+The pattern layout also offers a `highlight` option that allows you to highlight
+some parts of the log message with different colors. Highlighting is quite handy if log messages are forwarded
+to a terminal with color support.
+
+[[json-layout]]
+==== JSON layout
+With `json` layout log messages will be formatted as JSON strings in https://www.elastic.co/guide/en/ecs/current/ecs-reference.html[ECS format] that includes a timestamp, log level, logger, message text and any other metadata that may be associated with the log message itself.
+
+[[logging-appenders]]
+=== Appenders
+
+[[rolling-file-appender]]
+==== Rolling File Appender
+
+Similar to Log4j's `RollingFileAppender`, this appender will log into a file, and rotate it following a rolling
+strategy when the configured policy triggers.
+
+===== Triggering Policies
+
+The triggering policy determines when a rollover should occur.
+
+There are currently two policies supported: `size-limit` and `time-interval`.
+
+[[size-limit-triggering-policy]]
+**SizeLimitTriggeringPolicy**
+
+This policy will rotate the file when it reaches a predetermined size.
+
+[source,yaml]
+----
+logging:
+ appenders:
+ rolling-file:
+ type: rolling-file
+ fileName: /var/logs/kibana.log
+ policy:
+ type: size-limit
+ size: 50mb
+ strategy:
+ //...
+ layout:
+ type: pattern
+----
+
+The options are:
+
+- `size`
+
+The maximum size the log file should reach before a rollover should be performed. The default value is `100mb`
+
+[[time-interval-triggering-policy]]
+**TimeIntervalTriggeringPolicy**
+
+This policy will rotate the file every given interval of time.
+
+[source,yaml]
+----
+logging:
+ appenders:
+ rolling-file:
+ type: rolling-file
+ fileName: /var/logs/kibana.log
+ policy:
+ type: time-interval
+ interval: 10s
+ modulate: true
+ strategy:
+ //...
+ layout:
+ type: pattern
+----
+
+The options are:
+
+- `interval`
+
+How often a rollover should occur. The default value is `24h`
+
+- `modulate`
+
+Whether the interval should be adjusted to cause the next rollover to occur on the interval boundary.
+
+For example, if modulate is true and the interval is `4h`, if the current hour is 3 am then the first rollover will occur at 4 am
+and then next ones will occur at 8 am, noon, 4pm, etc. The default value is `true`.
+
+===== Rolling strategies
+
+The rolling strategy determines how the rollover should occur: both the naming of the rolled files,
+and their retention policy.
+
+There is currently one strategy supported: `numeric`.
+
+**NumericRollingStrategy**
+
+This strategy will suffix the file with a given pattern when rolling,
+and will retains a fixed amount of rolled files.
+
+[source,yaml]
+----
+logging:
+ appenders:
+ rolling-file:
+ type: rolling-file
+ fileName: /var/logs/kibana.log
+ policy:
+ // ...
+ strategy:
+ type: numeric
+ pattern: '-%i'
+ max: 2
+ layout:
+ type: pattern
+----
+
+For example, with this configuration:
+
+- During the first rollover kibana.log is renamed to kibana-1.log. A new kibana.log file is created and starts
+ being written to.
+- During the second rollover kibana-1.log is renamed to kibana-2.log and kibana.log is renamed to kibana-1.log.
+ A new kibana.log file is created and starts being written to.
+- During the third and subsequent rollovers, kibana-2.log is deleted, kibana-1.log is renamed to kibana-2.log and
+ kibana.log is renamed to kibana-1.log. A new kibana.log file is created and starts being written to.
+
+The options are:
+
+- `pattern`
+
+The suffix to append to the file path when rolling. Must include `%i`, as this is the value
+that will be converted to the file index.
+
+For example, with `fileName: /var/logs/kibana.log` and `pattern: '-%i'`, the rolling files created
+will be `/var/logs/kibana-1.log`, `/var/logs/kibana-2.log`, and so on. The default value is `-%i`
+
+- `max`
+
+The maximum number of files to keep. Once this number is reached, oldest files will be deleted. The default value is `7`
+
+==== Rewrite Appender
+
+WARNING: This appender is currently considered experimental and is not intended
+for public consumption. The API is subject to change at any time.
+
+Similar to log4j's `RewriteAppender`, this appender serves as a sort of middleware,
+modifying the provided log events before passing them along to another
+appender.
+
+[source,yaml]
+----
+logging:
+ appenders:
+ my-rewrite-appender:
+ type: rewrite
+ appenders: [console, file] # name of "destination" appender(s)
+ policy:
+ # ...
+----
+
+The most common use case for the `RewriteAppender` is when you want to
+filter or censor sensitive data that may be contained in a log entry.
+In fact, with a default configuration, {kib} will automatically redact
+any `authorization`, `cookie`, or `set-cookie` headers when logging http
+requests & responses.
+
+To configure additional rewrite rules, you'll need to specify a <>.
+
+[[rewrite-policies]]
+===== Rewrite Policies
+
+Rewrite policies exist to indicate which parts of a log record can be
+modified within the rewrite appender.
+
+**Meta**
+
+The `meta` rewrite policy can read and modify any data contained in the
+`LogMeta` before passing it along to a destination appender.
+
+Meta policies must specify one of three modes, which indicate which action
+to perform on the configured properties:
+- `update` updates an existing property at the provided `path`.
+- `remove` removes an existing property at the provided `path`.
+
+The `properties` are listed as a `path` and `value` pair, where `path` is
+the dot-delimited path to the target property in the `LogMeta` object, and
+`value` is the value to add or update in that target property. When using
+the `remove` mode, a `value` is not necessary.
+
+Here's an example of how you would replace any `cookie` header values with `[REDACTED]`:
+
+[source,yaml]
+----
+logging:
+ appenders:
+ my-rewrite-appender:
+ type: rewrite
+ appenders: [console]
+ policy:
+ type: meta # indicates that we want to rewrite the LogMeta
+ mode: update # will update an existing property only
+ properties:
+ - path: "http.request.headers.cookie" # path to property
+ value: "[REDACTED]" # value to replace at path
+----
+
+Rewrite appenders can even be passed to other rewrite appenders to apply
+multiple filter policies/modes, as long as it doesn't create a circular
+reference. Each rewrite appender is applied sequentially (one after the other).
+
+[source,yaml]
+----
+logging:
+ appenders:
+ remove-request-headers:
+ type: rewrite
+ appenders: [censor-response-headers] # redirect to the next rewrite appender
+ policy:
+ type: meta
+ mode: remove
+ properties:
+ - path: "http.request.headers" # remove all request headers
+ censor-response-headers:
+ type: rewrite
+ appenders: [console] # output to console
+ policy:
+ type: meta
+ mode: update
+ properties:
+ - path: "http.response.headers.set-cookie"
+ value: "[REDACTED]"
+----
+
+===== Complete Example For Rewrite Appender
+
+[source,yaml]
+----
+logging:
+ appenders:
+ custom_console:
+ type: console
+ layout:
+ type: pattern
+ highlight: true
+ pattern: "[%date][%level][%logger] %message %meta"
+ file:
+ type: file
+ fileName: ./kibana.log
+ layout:
+ type: json
+ censor:
+ type: rewrite
+ appenders: [custom_console, file]
+ policy:
+ type: meta
+ mode: update
+ properties:
+ - path: "http.request.headers.cookie"
+ value: "[REDACTED]"
+ loggers:
+ - name: http.server.response
+ appenders: [censor] # pass these logs to our rewrite appender
+ level: debug
+----
+
+[[logger-hierarchy]]
+=== Logger hierarchy
+
+Every logger has a unique name that follows a hierarchical naming rule. The logger is considered to be an
+ancestor of another logger if its name followed by a `.` is a prefix of the descendant logger. For example, a logger
+named `a.b` is an ancestor of logger `a.b.c`. All top-level loggers are descendants of a special `root` logger at the top of the logger hierarchy. The `root` logger always exists and
+fully configured.
+
+You can configure _<>_ and _appenders_ for a specific logger. If a logger only has a _log level_ configured, then the _appenders_ configuration applied to the logger is inherited from the ancestor logger.
+
+NOTE: In the current implementation we __don't support__ so called _appender additivity_ when log messages are forwarded to _every_ distinct appender within the
+ancestor chain including `root`. That means that log messages are only forwarded to appenders that are configured for a particular logger. If a logger doesn't have any appenders configured, the configuration of that particular logger will be inherited from its closest ancestor.
+
+[[dedicated-loggers]]
+==== Dedicated loggers
+
+**Root**
+
+The `root` logger has a dedicated configuration node since this logger is special and should always exist. By default `root` is configured with `info` level and `default` appender that is also always available. This is the configuration that all custom loggers will use unless they're re-configured explicitly.
+
+For example to see _all_ log messages that fall back on the `root` logger configuration, just add one line to the configuration:
+
+[source,yaml]
+----
+logging.root.level: all
+----
+
+Or disable logging entirely with `off`:
+
+[source,yaml]
+----
+logging.root.level: off
+----
+
+**Metrics Logs**
+
+The `metrics.ops` logger is configured with `debug` level and will automatically output sample system and process information at a regular interval.
+The metrics that are logged are a subset of the data collected and are formatted in the log message as follows:
+
+[options="header"]
+|===
+
+| Ops formatted log property | Location in metrics service | Log units
+
+| memory | process.memory.heap.used_in_bytes | http://numeraljs.com/#format[depends on the value], typically MB or GB
+
+| uptime | process.uptime_in_millis | HH:mm:ss
+
+| load | os.load | [ "load for the last 1 min" "load for the last 5 min" "load for the last 15 min"]
+
+| delay | process.event_loop_delay | ms
+|===
+
+The log interval is the same as the interval at which system and process information is refreshed and is configurable under `ops.interval`:
+
+[source,yaml]
+----
+ops.interval: 5000
+----
+
+The minimum interval is 100ms and defaults to 5000ms.
+
+[[request-response-logger]]
+**Request and Response Logs**
+
+The `http.server.response` logger is configured with `debug` level and will automatically output
+data about http requests and responses occurring on the {kib} server.
+The message contains some high-level information, and the corresponding log meta contains the following:
+
+[options="header"]
+|===
+
+| Meta property | Description | Format
+
+| client.ip | IP address of the requesting client | ip
+
+| http.request.method | http verb for the request (uppercase) | string
+
+| http.request.mime_type | (optional) mime as specified in the headers | string
+
+| http.request.referrer | (optional) referrer | string
+
+| http.request.headers | request headers | object
+
+| http.response.body.bytes | (optional) Calculated response payload size in bytes | number
+
+| http.response.status_code | status code returned | number
+
+| http.response.headers | response headers | object
+
+| http.response.responseTime | (optional) Calculated response time in ms | number
+
+| url.path | request path | string
+
+| url.query | (optional) request query string | string
+
+| user_agent.original | raw user-agent string provided in request headers | string
+
+|===
+
+=== Usage
+
+Usage is very straightforward, one should just get a logger for a specific context and use it to log messages with
+different log level.
+
+[source,typescript]
+----
+const logger = kibana.logger.get('server');
+
+logger.trace('Message with `trace` log level.');
+logger.debug('Message with `debug` log level.');
+logger.info('Message with `info` log level.');
+logger.warn('Message with `warn` log level.');
+logger.error('Message with `error` log level.');
+logger.fatal('Message with `fatal` log level.');
+
+const loggerWithNestedContext = kibana.logger.get('server', 'http');
+loggerWithNestedContext.trace('Message with `trace` log level.');
+loggerWithNestedContext.debug('Message with `debug` log level.');
+----
+
+And assuming logger for `server` name with `console` appender and `trace` level was used, console output will look like this:
+[source,bash]
+----
+[2017-07-25T11:54:41.639-07:00][TRACE][server] Message with `trace` log level.
+[2017-07-25T11:54:41.639-07:00][DEBUG][server] Message with `debug` log level.
+[2017-07-25T11:54:41.639-07:00][INFO ][server] Message with `info` log level.
+[2017-07-25T11:54:41.639-07:00][WARN ][server] Message with `warn` log level.
+[2017-07-25T11:54:41.639-07:00][ERROR][server] Message with `error` log level.
+[2017-07-25T11:54:41.639-07:00][FATAL][server] Message with `fatal` log level.
+
+[2017-07-25T11:54:41.639-07:00][TRACE][server.http] Message with `trace` log level.
+[2017-07-25T11:54:41.639-07:00][DEBUG][server.http] Message with `debug` log level.
+----
+
+The log will be less verbose with `warn` level for the `server` logger:
+[source,bash]
+----
+[2017-07-25T11:54:41.639-07:00][WARN ][server] Message with `warn` log level.
+[2017-07-25T11:54:41.639-07:00][ERROR][server] Message with `error` log level.
+[2017-07-25T11:54:41.639-07:00][FATAL][server] Message with `fatal` log level.
+----
diff --git a/docs/developer/architecture/core/patterns-scoped-services.asciidoc b/docs/developer/architecture/core/patterns-scoped-services.asciidoc
new file mode 100644
index 0000000000000..d4618684fc7e4
--- /dev/null
+++ b/docs/developer/architecture/core/patterns-scoped-services.asciidoc
@@ -0,0 +1,61 @@
+[[patterns]]
+== Patterns
+[[scoped-services]]
+=== Scoped services
+Whenever Kibana needs to get access to data saved in Elasticsearch, it
+should perform a check whether an end-user has access to the data.
+The Kibana Platform introduced a handler interface on the server-side to perform that association
+internally. Core services, that require impersonation with an incoming
+request, are exposed via `context` argument of
+{kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.requesthandler.md[the
+request handler interface.]
+as
+
+[source,js]
+----
+async function handler(context, req, res) {
+ const data = await context.core.elasticsearch.client.asCurrentUser('ping');
+}
+----
+
+The
+{kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.requesthandlercontext.md[request
+handler context] exposes the following scoped *core* services:
+
+* {kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.savedobjectsclient.md[`context.savedObjects.client`]
+* {kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.iscopedclusterclient.md[`context.elasticsearch.client`]
+* {kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.iuisettingsclient.md[`context.uiSettings.client`]
+
+==== Declare a custom scoped service
+
+Plugins can extend the handler context with a custom API that will be
+available to the plugin itself and all dependent plugins. For example,
+the plugin creates a custom Elasticsearch client and wants to use it via
+the request handler context:
+
+[source,typescript]
+----
+import type { CoreSetup, RequestHandlerContext, IScopedClusterClient } from 'kibana/server';
+
+interface MyRequestHandlerContext extends RequestHandlerContext {
+ myPlugin: {
+ client: IScopedClusterClient;
+ };
+}
+
+class MyPlugin {
+ setup(core: CoreSetup) {
+ const client = core.elasticsearch.createClient('myClient');
+ core.http.registerRouteHandlerContext('myPlugin', (context, req, res) => {
+ return { client: client.asScoped(req) };
+ });
+ const router = core.http.createRouter();
+ router.get(
+ { path: '/api/my-plugin/', validate: … },
+ async (context, req, res) => {
+ // context type is inferred as MyPluginContext
+ const data = await context.myPlugin.client.asCurrentUser('endpoint');
+ }
+ );
+ }
+----
diff --git a/docs/developer/architecture/core/saved-objects-service.asciidoc b/docs/developer/architecture/core/saved-objects-service.asciidoc
index 047c3dffa6358..fa7fc4233259d 100644
--- a/docs/developer/architecture/core/saved-objects-service.asciidoc
+++ b/docs/developer/architecture/core/saved-objects-service.asciidoc
@@ -1,6 +1,8 @@
[[saved-objects-service]]
== Saved Objects service
+NOTE: The Saved Objects service is available both server and client side.
+
`Saved Objects service` allows {kib} plugins to use {es} like a primary
database. Think of it as an Object Document Mapper for {es}. Once a
plugin has registered one or more Saved Object types, the Saved Objects client
@@ -28,7 +30,9 @@ spaces.
This document contains developer guidelines and best-practices for plugins
wanting to use Saved Objects.
-=== Registering a Saved Object type
+=== Server side usage
+
+==== Registering a Saved Object type
Saved object type definitions should be defined in their own `my_plugin/server/saved_objects` directory.
The folder should contain a file per type, named after the snake_case name of the type, and an `index.ts` file exporting all the types.
@@ -83,7 +87,7 @@ export class MyPlugin implements Plugin {
}
----
-=== Mappings
+==== Mappings
Each Saved Object type can define it's own {es} field mappings.
Because multiple Saved Object types can share the same index, mappings defined
by a type will be nested under a top-level field that matches the type name.
@@ -149,59 +153,6 @@ should carefully consider the fields they add to the mappings. Similarly,
Saved Object types should never use `dynamic: true` as this can cause an
arbitrary amount of fields to be added to the `.kibana` index.
-=== References
-When a Saved Object declares `references` to other Saved Objects, the
-Saved Objects Export API will automatically export the target object with all
-of it's references. This makes it easy for users to export the entire
-reference graph of an object.
-
-If a Saved Object can't be used on it's own, that is, it needs other objects
-to exist for a feature to function correctly, that Saved Object should declare
-references to all the objects it requires. For example, a `dashboard`
-object might have panels for several `visualization` objects. When these
-`visualization` objects don't exist, the dashboard cannot be rendered
-correctly. The `dashboard` object should declare references to all it's
-visualizations.
-
-However, `visualization` objects can continue to be rendered or embedded into
-other dashboards even if the `dashboard` it was originally embedded into
-doesn't exist. As a result, `visualization` objects should not declare
-references to `dashboard` objects.
-
-For each referenced object, an `id`, `type` and `name` are added to the
-`references` array:
-
-[source, typescript]
-----
-router.get(
- { path: '/some-path', validate: false },
- async (context, req, res) => {
- const object = await context.core.savedObjects.client.create(
- 'dashboard',
- {
- title: 'my dashboard',
- panels: [
- { visualization: 'vis1' }, // <1>
- ],
- indexPattern: 'indexPattern1'
- },
- { references: [
- { id: '...', type: 'visualization', name: 'vis1' },
- { id: '...', type: 'index_pattern', name: 'indexPattern1' },
- ]
- }
- )
- ...
- }
-);
-----
-<1> Note how `dashboard.panels[0].visualization` stores the `name` property of
-the reference (not the `id` directly) to be able to uniquely identify this
-reference. This guarantees that the id the reference points to always remains
-up to date. If a visualization `id` was directly stored in
-`dashboard.panels[0].visualization` there is a risk that this `id` gets
-updated without updating the reference in the references array.
-
==== Writing Migrations
Saved Objects support schema changes between Kibana versions, which we call
@@ -308,4 +259,60 @@ point in time.
It is critical that you have extensive tests to ensure that migrations behave
as expected with all possible input documents. Given how simple it is to test
all the branch conditions in a migration function and the high impact of a bug
-in this code, there's really no reason not to aim for 100% test code coverage.
\ No newline at end of file
+in this code, there's really no reason not to aim for 100% test code coverage.
+
+=== Client side usage
+
+==== References
+
+When a Saved Object declares `references` to other Saved Objects, the
+Saved Objects Export API will automatically export the target object with all
+of its references. This makes it easy for users to export the entire
+reference graph of an object.
+
+If a Saved Object can't be used on its own, that is, it needs other objects
+to exist for a feature to function correctly, that Saved Object should declare
+references to all the objects it requires. For example, a `dashboard`
+object might have panels for several `visualization` objects. When these
+`visualization` objects don't exist, the dashboard cannot be rendered
+correctly. The `dashboard` object should declare references to all its
+visualizations.
+
+However, `visualization` objects can continue to be rendered or embedded into
+other dashboards even if the `dashboard` it was originally embedded into
+doesn't exist. As a result, `visualization` objects should not declare
+references to `dashboard` objects.
+
+For each referenced object, an `id`, `type` and `name` are added to the
+`references` array:
+
+[source, typescript]
+----
+router.get(
+ { path: '/some-path', validate: false },
+ async (context, req, res) => {
+ const object = await context.core.savedObjects.client.create(
+ 'dashboard',
+ {
+ title: 'my dashboard',
+ panels: [
+ { visualization: 'vis1' }, // <1>
+ ],
+ indexPattern: 'indexPattern1'
+ },
+ { references: [
+ { id: '...', type: 'visualization', name: 'vis1' },
+ { id: '...', type: 'index_pattern', name: 'indexPattern1' },
+ ]
+ }
+ )
+ ...
+ }
+);
+----
+<1> Note how `dashboard.panels[0].visualization` stores the `name` property of
+the reference (not the `id` directly) to be able to uniquely identify this
+reference. This guarantees that the id the reference points to always remains
+up to date. If a visualization `id` was directly stored in
+`dashboard.panels[0].visualization` there is a risk that this `id` gets
+updated without updating the reference in the references array.
diff --git a/docs/developer/architecture/core/uisettings-service.asciidoc b/docs/developer/architecture/core/uisettings-service.asciidoc
new file mode 100644
index 0000000000000..85ed9c9eabc72
--- /dev/null
+++ b/docs/developer/architecture/core/uisettings-service.asciidoc
@@ -0,0 +1,40 @@
+[[ui-settings-service]]
+== UI settings service
+
+NOTE: The UI settings service is available both server and client side.
+
+=== Server side usage
+
+The program interface to <>.
+It makes it possible for Kibana plugins to extend Kibana UI Settings Management with custom settings.
+
+See:
+
+- {kib-repo}blob/{branch}/docs/development/core/server/kibana-plugin-core-server.uisettingsservicesetup.register.md[UI settings service Setup API docs]
+
+[source,typescript]
+----
+import { schema } from '@kbn/config-schema';
+import type { CoreSetup,Plugin } from 'kibana/server';
+
+export class MyPlugin implements Plugin {
+ public setup(core: CoreSetup) {
+ core.uiSettings.register({
+ custom: {
+ value: '42',
+ schema: schema.string(),
+ },
+ });
+ const router = core.http.createRouter();
+ router.get({
+ path: 'my_plugin/{id}',
+ validate: …,
+ },
+ async (context, request, response) => {
+ const customSetting = await context.uiSettings.client.get('custom');
+ …
+ });
+ }
+}
+
+----
diff --git a/docs/developer/architecture/index.asciidoc b/docs/developer/architecture/index.asciidoc
index 4bdd693979b49..1a0e7bab2f8f8 100644
--- a/docs/developer/architecture/index.asciidoc
+++ b/docs/developer/architecture/index.asciidoc
@@ -29,6 +29,24 @@ include::kibana-platform-plugin-api.asciidoc[leveloffset=+1]
include::core/index.asciidoc[leveloffset=+1]
+include::core/application_service.asciidoc[leveloffset=+1]
+
+include::core/configuration-service.asciidoc[leveloffset=+1]
+
+include::core/elasticsearch-service.asciidoc[leveloffset=+1]
+
+include::core/http-service.asciidoc[leveloffset=+1]
+
+include::core/logging-service.asciidoc[leveloffset=+1]
+
+include::core/logging-configuration-migration.asciidoc[leveloffset=+1]
+
+include::core/saved-objects-service.asciidoc[leveloffset=+1]
+
+include::core/uisettings-service.asciidoc[leveloffset=+1]
+
+include::core/patterns-scoped-services.asciidoc[leveloffset=+1]
+
include::security/index.asciidoc[leveloffset=+1]
include::add-data-tutorials.asciidoc[leveloffset=+1]
diff --git a/docs/developer/contributing/index.asciidoc b/docs/developer/contributing/index.asciidoc
index bbf2903491bf6..1cf96d19bfb2b 100644
--- a/docs/developer/contributing/index.asciidoc
+++ b/docs/developer/contributing/index.asciidoc
@@ -1,7 +1,7 @@
[[contributing]]
== Contributing
-Whether you want to fix a bug, implement a feature, or add some other improvements or apis, the following sections will
+Whether you want to fix a bug, implement a feature, add an improvement, or add APIs, the following sections will
guide you on the process. After committing your code, check out the link:https://www.elastic.co/community/contributor[Elastic Contributor Program] where you can earn points and rewards for your contributions.
Read <> to get your environment up and running, then read <>.
@@ -53,24 +53,27 @@ To use a single paragraph of text, enter a `Release note:` or `## Release note`
When you create the Release Notes text, use the following best practices:
-* Use present tense.
+* Use active voice.
* Use sentence case.
-* When you create a feature PR, start with `Adds`.
-* When you create an enhancement PR, start with `Improves`.
-* When you create a bug fix PR, start with `Fixes`.
-* When you create a deprecation PR, start with `Deprecates`.
+* When you create a PR that adds a feature, start with `Adds`.
+* When you create a PR that improves an existing feature, start with `Improves`.
+* When you create a PR that fixes existing functionality, start with `Fixes`.
+* When you create a PR that deprecates functionality, start with `Deprecates`.
[discrete]
==== Add your labels
+To make sure that your PR is included in the Release Notes, add the right label.
+
[arabic]
. Label the PR with the targeted version (ex: `v7.3.0`).
. Label the PR with the appropriate GitHub labels:
- * For a new feature or functionality, use `release_note:enhancement`.
- * For an external-facing fix, use `release_note:fix`. We do not include docs, build, and test fixes in the Release Notes, or unreleased issues that are only on `master`.
- * For a deprecated feature, use `release_note:deprecation`.
- * For a breaking change, use `release_note:breaking`.
- * To **NOT** include your changes in the Release Notes, use `release_note:skip`.
+ * `release_note:feature` — New user-facing features, significant enhancements to features, and significant bug fixes (in rare cases).
+ * `release_note:enhancement` — Minor UI changes and enhancements.
+ * `release_note:fix` — Fixes for bugs that existed in the previous release.
+ * `release_note:deprecation` — Deprecates functionality that existed in previous releases.
+ * `release_note:breaking` — Breaking changes that weren't present in previous releases.
+ * `release_note:skip` — Changes that should not appear in the Release Notes. For example, docs, build, and test fixes, or unreleased issues that are only in `master`.
include::development-github.asciidoc[leveloffset=+1]
diff --git a/docs/developer/plugin-list.asciidoc b/docs/developer/plugin-list.asciidoc
index e1c2c40a31384..691d7fb82f3bc 100644
--- a/docs/developer/plugin-list.asciidoc
+++ b/docs/developer/plugin-list.asciidoc
@@ -452,10 +452,6 @@ using the CURL scripts in the scripts folder.
|Visualize geo data from Elasticsearch or 3rd party geo-services.
-|{kib-repo}blob/{branch}/x-pack/plugins/maps_legacy_licensing/README.md[mapsLegacyLicensing]
-|This plugin provides access to the detailed tile map services from Elastic.
-
-
|{kib-repo}blob/{branch}/x-pack/plugins/ml/readme.md[ml]
|This plugin provides access to the machine learning features provided by
Elastic.
@@ -537,6 +533,10 @@ Documentation: https://www.elastic.co/guide/en/kibana/master/task-manager-produc
|Gathers all usage collection, retrieving them from both: OSS and X-Pack plugins.
+|{kib-repo}blob/{branch}/x-pack/plugins/timelines/README.md[timelines]
+|Timelines is a plugin that provides a grid component with accompanying server side apis to help users identify events of interest and perform root cause analysis within Kibana.
+
+
|{kib-repo}blob/{branch}/x-pack/plugins/transform/readme.md[transform]
|This plugin provides access to the transforms features provided by Elastic.
diff --git a/docs/development/core/public/kibana-plugin-core-public.corestart.deprecations.md b/docs/development/core/public/kibana-plugin-core-public.corestart.deprecations.md
new file mode 100644
index 0000000000000..624c4868d54a7
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-core-public.corestart.deprecations.md
@@ -0,0 +1,13 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [CoreStart](./kibana-plugin-core-public.corestart.md) > [deprecations](./kibana-plugin-core-public.corestart.deprecations.md)
+
+## CoreStart.deprecations property
+
+[DeprecationsServiceStart](./kibana-plugin-core-public.deprecationsservicestart.md)
+
+Signature:
+
+```typescript
+deprecations: DeprecationsServiceStart;
+```
diff --git a/docs/development/core/public/kibana-plugin-core-public.corestart.md b/docs/development/core/public/kibana-plugin-core-public.corestart.md
index a7b45b318d2c9..6ad9adca53ef5 100644
--- a/docs/development/core/public/kibana-plugin-core-public.corestart.md
+++ b/docs/development/core/public/kibana-plugin-core-public.corestart.md
@@ -18,6 +18,7 @@ export interface CoreStart
| --- | --- | --- |
| [application](./kibana-plugin-core-public.corestart.application.md) | ApplicationStart
| [ApplicationStart](./kibana-plugin-core-public.applicationstart.md) |
| [chrome](./kibana-plugin-core-public.corestart.chrome.md) | ChromeStart
| [ChromeStart](./kibana-plugin-core-public.chromestart.md) |
+| [deprecations](./kibana-plugin-core-public.corestart.deprecations.md) | DeprecationsServiceStart
| [DeprecationsServiceStart](./kibana-plugin-core-public.deprecationsservicestart.md) |
| [docLinks](./kibana-plugin-core-public.corestart.doclinks.md) | DocLinksStart
| [DocLinksStart](./kibana-plugin-core-public.doclinksstart.md) |
| [fatalErrors](./kibana-plugin-core-public.corestart.fatalerrors.md) | FatalErrorsStart
| [FatalErrorsStart](./kibana-plugin-core-public.fatalerrorsstart.md) |
| [http](./kibana-plugin-core-public.corestart.http.md) | HttpStart
| [HttpStart](./kibana-plugin-core-public.httpstart.md) |
diff --git a/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.getalldeprecations.md b/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.getalldeprecations.md
new file mode 100644
index 0000000000000..8175da8a1893a
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.getalldeprecations.md
@@ -0,0 +1,13 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [DeprecationsServiceStart](./kibana-plugin-core-public.deprecationsservicestart.md) > [getAllDeprecations](./kibana-plugin-core-public.deprecationsservicestart.getalldeprecations.md)
+
+## DeprecationsServiceStart.getAllDeprecations property
+
+Grabs deprecations details for all domains.
+
+Signature:
+
+```typescript
+getAllDeprecations: () => Promise;
+```
diff --git a/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.getdeprecations.md b/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.getdeprecations.md
new file mode 100644
index 0000000000000..6e3472b7c3fe3
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.getdeprecations.md
@@ -0,0 +1,13 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [DeprecationsServiceStart](./kibana-plugin-core-public.deprecationsservicestart.md) > [getDeprecations](./kibana-plugin-core-public.deprecationsservicestart.getdeprecations.md)
+
+## DeprecationsServiceStart.getDeprecations property
+
+Grabs deprecations for a specific domain.
+
+Signature:
+
+```typescript
+getDeprecations: (domainId: string) => Promise;
+```
diff --git a/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.isdeprecationresolvable.md b/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.isdeprecationresolvable.md
new file mode 100644
index 0000000000000..842761f6b7cea
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.isdeprecationresolvable.md
@@ -0,0 +1,13 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [DeprecationsServiceStart](./kibana-plugin-core-public.deprecationsservicestart.md) > [isDeprecationResolvable](./kibana-plugin-core-public.deprecationsservicestart.isdeprecationresolvable.md)
+
+## DeprecationsServiceStart.isDeprecationResolvable property
+
+Returns a boolean if the provided deprecation can be automatically resolvable.
+
+Signature:
+
+```typescript
+isDeprecationResolvable: (details: DomainDeprecationDetails) => boolean;
+```
diff --git a/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.md b/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.md
new file mode 100644
index 0000000000000..0d2c963ec5547
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.md
@@ -0,0 +1,23 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [DeprecationsServiceStart](./kibana-plugin-core-public.deprecationsservicestart.md)
+
+## DeprecationsServiceStart interface
+
+DeprecationsService provides methods to fetch domain deprecation details from the Kibana server.
+
+Signature:
+
+```typescript
+export interface DeprecationsServiceStart
+```
+
+## Properties
+
+| Property | Type | Description |
+| --- | --- | --- |
+| [getAllDeprecations](./kibana-plugin-core-public.deprecationsservicestart.getalldeprecations.md) | () => Promise<DomainDeprecationDetails[]>
| Grabs deprecations details for all domains. |
+| [getDeprecations](./kibana-plugin-core-public.deprecationsservicestart.getdeprecations.md) | (domainId: string) => Promise<DomainDeprecationDetails[]>
| Grabs deprecations for a specific domain. |
+| [isDeprecationResolvable](./kibana-plugin-core-public.deprecationsservicestart.isdeprecationresolvable.md) | (details: DomainDeprecationDetails) => boolean
| Returns a boolean if the provided deprecation can be automatically resolvable. |
+| [resolveDeprecation](./kibana-plugin-core-public.deprecationsservicestart.resolvedeprecation.md) | (details: DomainDeprecationDetails) => Promise<ResolveDeprecationResponse>
| Calls the correctiveActions.api to automatically resolve the depprecation. |
+
diff --git a/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.resolvedeprecation.md b/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.resolvedeprecation.md
new file mode 100644
index 0000000000000..fae623fed3cc2
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-core-public.deprecationsservicestart.resolvedeprecation.md
@@ -0,0 +1,13 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [DeprecationsServiceStart](./kibana-plugin-core-public.deprecationsservicestart.md) > [resolveDeprecation](./kibana-plugin-core-public.deprecationsservicestart.resolvedeprecation.md)
+
+## DeprecationsServiceStart.resolveDeprecation property
+
+Calls the correctiveActions.api to automatically resolve the depprecation.
+
+Signature:
+
+```typescript
+resolveDeprecation: (details: DomainDeprecationDetails) => Promise;
+```
diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
index 6ca7a83ac0a03..860f7c3c74892 100644
--- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
+++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
@@ -144,6 +144,7 @@ readonly links: {
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putWatch: string;
+ simulatePipeline: string;
updateTransform: string;
}>;
readonly observability: Record;
diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
index 3847ab0c6183a..a9cb6729b214e 100644
--- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
+++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
@@ -17,5 +17,5 @@ export interface DocLinksStart
| --- | --- | --- |
| [DOC\_LINK\_VERSION](./kibana-plugin-core-public.doclinksstart.doc_link_version.md) | string
| |
| [ELASTIC\_WEBSITE\_URL](./kibana-plugin-core-public.doclinksstart.elastic_website_url.md) | string
| |
-| [links](./kibana-plugin-core-public.doclinksstart.links.md) | {
readonly dashboard: {
readonly guide: string;
readonly drilldowns: string;
readonly drilldownsTriggerPicker: string;
readonly urlDrilldownTemplateSyntax: string;
readonly urlDrilldownVariables: string;
};
readonly discover: Record<string, string>;
readonly filebeat: {
readonly base: string;
readonly installation: string;
readonly configuration: string;
readonly elasticsearchOutput: string;
readonly elasticsearchModule: string;
readonly startup: string;
readonly exportedFields: string;
};
readonly auditbeat: {
readonly base: string;
};
readonly metricbeat: {
readonly base: string;
readonly configure: string;
readonly httpEndpoint: string;
readonly install: string;
readonly start: string;
};
readonly enterpriseSearch: {
readonly base: string;
readonly appSearchBase: string;
readonly workplaceSearchBase: string;
};
readonly heartbeat: {
readonly base: string;
};
readonly logstash: {
readonly base: string;
};
readonly functionbeat: {
readonly base: string;
};
readonly winlogbeat: {
readonly base: string;
};
readonly aggs: {
readonly composite: string;
readonly composite_missing_bucket: string;
readonly date_histogram: string;
readonly date_range: string;
readonly date_format_pattern: string;
readonly filter: string;
readonly filters: string;
readonly geohash_grid: string;
readonly histogram: string;
readonly ip_range: string;
readonly range: string;
readonly significant_terms: string;
readonly terms: string;
readonly avg: string;
readonly avg_bucket: string;
readonly max_bucket: string;
readonly min_bucket: string;
readonly sum_bucket: string;
readonly cardinality: string;
readonly count: string;
readonly cumulative_sum: string;
readonly derivative: string;
readonly geo_bounds: string;
readonly geo_centroid: string;
readonly max: string;
readonly median: string;
readonly min: string;
readonly moving_avg: string;
readonly percentile_ranks: string;
readonly serial_diff: string;
readonly std_dev: string;
readonly sum: string;
readonly top_hits: string;
};
readonly runtimeFields: {
readonly mapping: string;
};
readonly scriptedFields: {
readonly scriptFields: string;
readonly scriptAggs: string;
readonly painless: string;
readonly painlessApi: string;
readonly painlessLangSpec: string;
readonly painlessSyntax: string;
readonly painlessWalkthrough: string;
readonly luceneExpressions: string;
};
readonly indexPatterns: {
readonly introduction: string;
readonly fieldFormattersNumber: string;
readonly fieldFormattersString: string;
};
readonly addData: string;
readonly kibana: string;
readonly elasticsearch: Record<string, string>;
readonly siem: {
readonly guide: string;
readonly gettingStarted: string;
};
readonly query: {
readonly eql: string;
readonly luceneQuerySyntax: string;
readonly queryDsl: string;
readonly kueryQuerySyntax: string;
};
readonly date: {
readonly dateMath: string;
readonly dateMathIndexNames: string;
};
readonly management: Record<string, string>;
readonly ml: Record<string, string>;
readonly transforms: Record<string, string>;
readonly visualize: Record<string, string>;
readonly apis: Readonly<{
createIndex: string;
createSnapshotLifecyclePolicy: string;
createRoleMapping: string;
createRoleMappingTemplates: string;
createApiKey: string;
createPipeline: string;
createTransformRequest: string;
cronExpressions: string;
executeWatchActionModes: string;
indexExists: string;
openIndex: string;
putComponentTemplate: string;
painlessExecute: string;
painlessExecuteAPIContexts: string;
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putWatch: string;
updateTransform: string;
}>;
readonly observability: Record<string, string>;
readonly alerting: Record<string, string>;
readonly maps: Record<string, string>;
readonly monitoring: Record<string, string>;
readonly security: Readonly<{
apiKeyServiceSettings: string;
clusterPrivileges: string;
elasticsearchSettings: string;
elasticsearchEnableSecurity: string;
indicesPrivileges: string;
kibanaTLS: string;
kibanaPrivileges: string;
mappingRoles: string;
mappingRolesFieldRules: string;
runAsPrivilege: string;
}>;
readonly watcher: Record<string, string>;
readonly ccs: Record<string, string>;
readonly plugins: Record<string, string>;
readonly snapshotRestore: Record<string, string>;
readonly ingest: Record<string, string>;
}
| |
+| [links](./kibana-plugin-core-public.doclinksstart.links.md) | {
readonly dashboard: {
readonly guide: string;
readonly drilldowns: string;
readonly drilldownsTriggerPicker: string;
readonly urlDrilldownTemplateSyntax: string;
readonly urlDrilldownVariables: string;
};
readonly discover: Record<string, string>;
readonly filebeat: {
readonly base: string;
readonly installation: string;
readonly configuration: string;
readonly elasticsearchOutput: string;
readonly elasticsearchModule: string;
readonly startup: string;
readonly exportedFields: string;
};
readonly auditbeat: {
readonly base: string;
};
readonly metricbeat: {
readonly base: string;
readonly configure: string;
readonly httpEndpoint: string;
readonly install: string;
readonly start: string;
};
readonly enterpriseSearch: {
readonly base: string;
readonly appSearchBase: string;
readonly workplaceSearchBase: string;
};
readonly heartbeat: {
readonly base: string;
};
readonly logstash: {
readonly base: string;
};
readonly functionbeat: {
readonly base: string;
};
readonly winlogbeat: {
readonly base: string;
};
readonly aggs: {
readonly composite: string;
readonly composite_missing_bucket: string;
readonly date_histogram: string;
readonly date_range: string;
readonly date_format_pattern: string;
readonly filter: string;
readonly filters: string;
readonly geohash_grid: string;
readonly histogram: string;
readonly ip_range: string;
readonly range: string;
readonly significant_terms: string;
readonly terms: string;
readonly avg: string;
readonly avg_bucket: string;
readonly max_bucket: string;
readonly min_bucket: string;
readonly sum_bucket: string;
readonly cardinality: string;
readonly count: string;
readonly cumulative_sum: string;
readonly derivative: string;
readonly geo_bounds: string;
readonly geo_centroid: string;
readonly max: string;
readonly median: string;
readonly min: string;
readonly moving_avg: string;
readonly percentile_ranks: string;
readonly serial_diff: string;
readonly std_dev: string;
readonly sum: string;
readonly top_hits: string;
};
readonly runtimeFields: {
readonly mapping: string;
};
readonly scriptedFields: {
readonly scriptFields: string;
readonly scriptAggs: string;
readonly painless: string;
readonly painlessApi: string;
readonly painlessLangSpec: string;
readonly painlessSyntax: string;
readonly painlessWalkthrough: string;
readonly luceneExpressions: string;
};
readonly indexPatterns: {
readonly introduction: string;
readonly fieldFormattersNumber: string;
readonly fieldFormattersString: string;
};
readonly addData: string;
readonly kibana: string;
readonly elasticsearch: Record<string, string>;
readonly siem: {
readonly guide: string;
readonly gettingStarted: string;
};
readonly query: {
readonly eql: string;
readonly luceneQuerySyntax: string;
readonly queryDsl: string;
readonly kueryQuerySyntax: string;
};
readonly date: {
readonly dateMath: string;
readonly dateMathIndexNames: string;
};
readonly management: Record<string, string>;
readonly ml: Record<string, string>;
readonly transforms: Record<string, string>;
readonly visualize: Record<string, string>;
readonly apis: Readonly<{
createIndex: string;
createSnapshotLifecyclePolicy: string;
createRoleMapping: string;
createRoleMappingTemplates: string;
createApiKey: string;
createPipeline: string;
createTransformRequest: string;
cronExpressions: string;
executeWatchActionModes: string;
indexExists: string;
openIndex: string;
putComponentTemplate: string;
painlessExecute: string;
painlessExecuteAPIContexts: string;
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putWatch: string;
simulatePipeline: string;
updateTransform: string;
}>;
readonly observability: Record<string, string>;
readonly alerting: Record<string, string>;
readonly maps: Record<string, string>;
readonly monitoring: Record<string, string>;
readonly security: Readonly<{
apiKeyServiceSettings: string;
clusterPrivileges: string;
elasticsearchSettings: string;
elasticsearchEnableSecurity: string;
indicesPrivileges: string;
kibanaTLS: string;
kibanaPrivileges: string;
mappingRoles: string;
mappingRolesFieldRules: string;
runAsPrivilege: string;
}>;
readonly watcher: Record<string, string>;
readonly ccs: Record<string, string>;
readonly plugins: Record<string, string>;
readonly snapshotRestore: Record<string, string>;
readonly ingest: Record<string, string>;
}
| |
diff --git a/docs/development/core/public/kibana-plugin-core-public.md b/docs/development/core/public/kibana-plugin-core-public.md
index e9d08dcd3bf4c..32f17d5488f66 100644
--- a/docs/development/core/public/kibana-plugin-core-public.md
+++ b/docs/development/core/public/kibana-plugin-core-public.md
@@ -59,6 +59,7 @@ The plugin integrates with the core system via lifecycle events: `setup`
| [ChromeUserBanner](./kibana-plugin-core-public.chromeuserbanner.md) | |
| [CoreSetup](./kibana-plugin-core-public.coresetup.md) | Core services exposed to the Plugin
setup lifecycle |
| [CoreStart](./kibana-plugin-core-public.corestart.md) | Core services exposed to the Plugin
start lifecycle |
+| [DeprecationsServiceStart](./kibana-plugin-core-public.deprecationsservicestart.md) | DeprecationsService provides methods to fetch domain deprecation details from the Kibana server. |
| [DocLinksStart](./kibana-plugin-core-public.doclinksstart.md) | |
| [ErrorToastOptions](./kibana-plugin-core-public.errortoastoptions.md) | Options available for [IToasts](./kibana-plugin-core-public.itoasts.md) error APIs. |
| [FatalErrorInfo](./kibana-plugin-core-public.fatalerrorinfo.md) | Represents the message
and stack
of a fatal Error |
@@ -164,6 +165,7 @@ The plugin integrates with the core system via lifecycle events: `setup`
| [PublicAppMetaInfo](./kibana-plugin-core-public.publicappmetainfo.md) | Public information about a registered app's [keywords](./kibana-plugin-core-public.appmeta.md) |
| [PublicAppSearchDeepLinkInfo](./kibana-plugin-core-public.publicappsearchdeeplinkinfo.md) | Public information about a registered app's [searchDeepLinks](./kibana-plugin-core-public.appsearchdeeplink.md) |
| [PublicUiSettingsParams](./kibana-plugin-core-public.publicuisettingsparams.md) | A sub-set of [UiSettingsParams](./kibana-plugin-core-public.uisettingsparams.md) exposed to the client-side. |
+| [ResolveDeprecationResponse](./kibana-plugin-core-public.resolvedeprecationresponse.md) | |
| [SavedObjectAttribute](./kibana-plugin-core-public.savedobjectattribute.md) | Type definition for a Saved Object attribute value |
| [SavedObjectAttributeSingle](./kibana-plugin-core-public.savedobjectattributesingle.md) | Don't use this type, it's simply a helper type for [SavedObjectAttribute](./kibana-plugin-core-public.savedobjectattribute.md) |
| [SavedObjectsClientContract](./kibana-plugin-core-public.savedobjectsclientcontract.md) | SavedObjectsClientContract as implemented by the [SavedObjectsClient](./kibana-plugin-core-public.savedobjectsclient.md) |
diff --git a/docs/development/core/public/kibana-plugin-core-public.resolvedeprecationresponse.md b/docs/development/core/public/kibana-plugin-core-public.resolvedeprecationresponse.md
new file mode 100644
index 0000000000000..928bf8c07004e
--- /dev/null
+++ b/docs/development/core/public/kibana-plugin-core-public.resolvedeprecationresponse.md
@@ -0,0 +1,16 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-public](./kibana-plugin-core-public.md) > [ResolveDeprecationResponse](./kibana-plugin-core-public.resolvedeprecationresponse.md)
+
+## ResolveDeprecationResponse type
+
+Signature:
+
+```typescript
+export declare type ResolveDeprecationResponse = {
+ status: 'ok';
+} | {
+ status: 'fail';
+ reason: string;
+};
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.coresetup.deprecations.md b/docs/development/core/server/kibana-plugin-core-server.coresetup.deprecations.md
new file mode 100644
index 0000000000000..436cc29b6e343
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.coresetup.deprecations.md
@@ -0,0 +1,13 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [CoreSetup](./kibana-plugin-core-server.coresetup.md) > [deprecations](./kibana-plugin-core-server.coresetup.deprecations.md)
+
+## CoreSetup.deprecations property
+
+[DeprecationsServiceSetup](./kibana-plugin-core-server.deprecationsservicesetup.md)
+
+Signature:
+
+```typescript
+deprecations: DeprecationsServiceSetup;
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.coresetup.md b/docs/development/core/server/kibana-plugin-core-server.coresetup.md
index 1171dbad570ce..b37ac80db87d6 100644
--- a/docs/development/core/server/kibana-plugin-core-server.coresetup.md
+++ b/docs/development/core/server/kibana-plugin-core-server.coresetup.md
@@ -18,6 +18,7 @@ export interface CoreSetupCapabilitiesSetup | [CapabilitiesSetup](./kibana-plugin-core-server.capabilitiessetup.md) |
| [context](./kibana-plugin-core-server.coresetup.context.md) | ContextSetup
| [ContextSetup](./kibana-plugin-core-server.contextsetup.md) |
+| [deprecations](./kibana-plugin-core-server.coresetup.deprecations.md) | DeprecationsServiceSetup
| [DeprecationsServiceSetup](./kibana-plugin-core-server.deprecationsservicesetup.md) |
| [elasticsearch](./kibana-plugin-core-server.coresetup.elasticsearch.md) | ElasticsearchServiceSetup
| [ElasticsearchServiceSetup](./kibana-plugin-core-server.elasticsearchservicesetup.md) |
| [getStartServices](./kibana-plugin-core-server.coresetup.getstartservices.md) | StartServicesAccessor<TPluginsStart, TStart>
| [StartServicesAccessor](./kibana-plugin-core-server.startservicesaccessor.md) |
| [http](./kibana-plugin-core-server.coresetup.http.md) | HttpServiceSetup & {
resources: HttpResources;
}
| [HttpServiceSetup](./kibana-plugin-core-server.httpservicesetup.md) |
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md
new file mode 100644
index 0000000000000..e362bc4e0329c
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md
@@ -0,0 +1,20 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsDetails](./kibana-plugin-core-server.deprecationsdetails.md) > [correctiveActions](./kibana-plugin-core-server.deprecationsdetails.correctiveactions.md)
+
+## DeprecationsDetails.correctiveActions property
+
+Signature:
+
+```typescript
+correctiveActions: {
+ api?: {
+ path: string;
+ method: 'POST' | 'PUT';
+ body?: {
+ [key: string]: any;
+ };
+ };
+ manualSteps?: string[];
+ };
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.documentationurl.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.documentationurl.md
new file mode 100644
index 0000000000000..467d6d76cf842
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.documentationurl.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsDetails](./kibana-plugin-core-server.deprecationsdetails.md) > [documentationUrl](./kibana-plugin-core-server.deprecationsdetails.documentationurl.md)
+
+## DeprecationsDetails.documentationUrl property
+
+Signature:
+
+```typescript
+documentationUrl?: string;
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.level.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.level.md
new file mode 100644
index 0000000000000..64ad22e2c87fb
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.level.md
@@ -0,0 +1,13 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsDetails](./kibana-plugin-core-server.deprecationsdetails.md) > [level](./kibana-plugin-core-server.deprecationsdetails.level.md)
+
+## DeprecationsDetails.level property
+
+levels: - warning: will not break deployment upon upgrade - critical: needs to be addressed before upgrade. - fetch\_error: Deprecations service failed to grab the deprecation details for the domain.
+
+Signature:
+
+```typescript
+level: 'warning' | 'critical' | 'fetch_error';
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md
new file mode 100644
index 0000000000000..bb77e4247711f
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md
@@ -0,0 +1,21 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsDetails](./kibana-plugin-core-server.deprecationsdetails.md)
+
+## DeprecationsDetails interface
+
+Signature:
+
+```typescript
+export interface DeprecationsDetails
+```
+
+## Properties
+
+| Property | Type | Description |
+| --- | --- | --- |
+| [correctiveActions](./kibana-plugin-core-server.deprecationsdetails.correctiveactions.md) | {
api?: {
path: string;
method: 'POST' | 'PUT';
body?: {
[key: string]: any;
};
};
manualSteps?: string[];
}
| |
+| [documentationUrl](./kibana-plugin-core-server.deprecationsdetails.documentationurl.md) | string
| |
+| [level](./kibana-plugin-core-server.deprecationsdetails.level.md) | 'warning' | 'critical' | 'fetch_error'
| levels: - warning: will not break deployment upon upgrade - critical: needs to be addressed before upgrade. - fetch\_error: Deprecations service failed to grab the deprecation details for the domain. |
+| [message](./kibana-plugin-core-server.deprecationsdetails.message.md) | string
| |
+
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.message.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.message.md
new file mode 100644
index 0000000000000..d79a4c9bd7995
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.message.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsDetails](./kibana-plugin-core-server.deprecationsdetails.md) > [message](./kibana-plugin-core-server.deprecationsdetails.message.md)
+
+## DeprecationsDetails.message property
+
+Signature:
+
+```typescript
+message: string;
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md
new file mode 100644
index 0000000000000..7d9d3dcdda4da
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md
@@ -0,0 +1,95 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsServiceSetup](./kibana-plugin-core-server.deprecationsservicesetup.md)
+
+## DeprecationsServiceSetup interface
+
+The deprecations service provides a way for the Kibana platform to communicate deprecated features and configs with its users. These deprecations are only communicated if the deployment is using these features. Allowing for a user tailored experience for upgrading the stack version.
+
+The Deprecation service is consumed by the upgrade assistant to assist with the upgrade experience.
+
+If a deprecated feature can be resolved without manual user intervention. Using correctiveActions.api allows the Upgrade Assistant to use this api to correct the deprecation upon a user trigger.
+
+Signature:
+
+```typescript
+export interface DeprecationsServiceSetup
+```
+
+## Example
+
+
+```ts
+import { DeprecationsDetails, GetDeprecationsContext, CoreSetup } from 'src/core/server';
+
+async function getDeprecations({ esClient, savedObjectsClient }: GetDeprecationsContext): Promise {
+ const deprecations: DeprecationsDetails[] = [];
+ const count = await getTimelionSheetsCount(savedObjectsClient);
+
+ if (count > 0) {
+ // Example of a manual correctiveAction
+ deprecations.push({
+ message: `You have ${count} Timelion worksheets. The Timelion app will be removed in 8.0. To continue using your Timelion worksheets, migrate them to a dashboard.`,
+ documentationUrl:
+ 'https://www.elastic.co/guide/en/kibana/current/create-panels-with-timelion.html',
+ level: 'warning',
+ correctiveActions: {
+ manualSteps: [
+ 'Navigate to the Kibana Dashboard and click "Create dashboard".',
+ 'Select Timelion from the "New Visualization" window.',
+ 'Open a new tab, open the Timelion app, select the chart you want to copy, then copy the chart expression.',
+ 'Go to Timelion, paste the chart expression in the Timelion expression field, then click Update.',
+ 'In the toolbar, click Save.',
+ 'On the Save visualization window, enter the visualization Title, then click Save and return.',
+ ],
+ },
+ });
+ }
+
+ // Example of an api correctiveAction
+ deprecations.push({
+ "message": "User 'test_dashboard_user' is using a deprecated role: 'kibana_user'",
+ "documentationUrl": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html",
+ "level": "critical",
+ "correctiveActions": {
+ "api": {
+ "path": "/internal/security/users/test_dashboard_user",
+ "method": "POST",
+ "body": {
+ "username": "test_dashboard_user",
+ "roles": [
+ "machine_learning_user",
+ "enrich_user",
+ "kibana_admin"
+ ],
+ "full_name": "Alison Goryachev",
+ "email": "alisongoryachev@gmail.com",
+ "metadata": {},
+ "enabled": true
+ }
+ },
+ "manualSteps": [
+ "Using Kibana user management, change all users using the kibana_user role to the kibana_admin role.",
+ "Using Kibana role-mapping management, change all role-mappings which assing the kibana_user role to the kibana_admin role."
+ ]
+ },
+ });
+
+ return deprecations;
+}
+
+
+export class Plugin() {
+ setup: (core: CoreSetup) => {
+ core.deprecations.registerDeprecations({ getDeprecations });
+ }
+}
+
+```
+
+## Properties
+
+| Property | Type | Description |
+| --- | --- | --- |
+| [registerDeprecations](./kibana-plugin-core-server.deprecationsservicesetup.registerdeprecations.md) | (deprecationContext: RegisterDeprecationsConfig) => void
| |
+
diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.registerdeprecations.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.registerdeprecations.md
new file mode 100644
index 0000000000000..07c2a3ad0ce55
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.registerdeprecations.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsServiceSetup](./kibana-plugin-core-server.deprecationsservicesetup.md) > [registerDeprecations](./kibana-plugin-core-server.deprecationsservicesetup.registerdeprecations.md)
+
+## DeprecationsServiceSetup.registerDeprecations property
+
+Signature:
+
+```typescript
+registerDeprecations: (deprecationContext: RegisterDeprecationsConfig) => void;
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.esclient.md b/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.esclient.md
new file mode 100644
index 0000000000000..70c1864bf905f
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.esclient.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [GetDeprecationsContext](./kibana-plugin-core-server.getdeprecationscontext.md) > [esClient](./kibana-plugin-core-server.getdeprecationscontext.esclient.md)
+
+## GetDeprecationsContext.esClient property
+
+Signature:
+
+```typescript
+esClient: IScopedClusterClient;
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.md b/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.md
new file mode 100644
index 0000000000000..1018444f0849a
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.md
@@ -0,0 +1,19 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [GetDeprecationsContext](./kibana-plugin-core-server.getdeprecationscontext.md)
+
+## GetDeprecationsContext interface
+
+Signature:
+
+```typescript
+export interface GetDeprecationsContext
+```
+
+## Properties
+
+| Property | Type | Description |
+| --- | --- | --- |
+| [esClient](./kibana-plugin-core-server.getdeprecationscontext.esclient.md) | IScopedClusterClient
| |
+| [savedObjectsClient](./kibana-plugin-core-server.getdeprecationscontext.savedobjectsclient.md) | SavedObjectsClientContract
| |
+
diff --git a/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.savedobjectsclient.md b/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.savedobjectsclient.md
new file mode 100644
index 0000000000000..66da52d3b5824
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.getdeprecationscontext.savedobjectsclient.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [GetDeprecationsContext](./kibana-plugin-core-server.getdeprecationscontext.md) > [savedObjectsClient](./kibana-plugin-core-server.getdeprecationscontext.savedobjectsclient.md)
+
+## GetDeprecationsContext.savedObjectsClient property
+
+Signature:
+
+```typescript
+savedObjectsClient: SavedObjectsClientContract;
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.kibanaresponsefactory.md b/docs/development/core/server/kibana-plugin-core-server.kibanaresponsefactory.md
index 395c26a6e4bf6..8ddc0da5f1b28 100644
--- a/docs/development/core/server/kibana-plugin-core-server.kibanaresponsefactory.md
+++ b/docs/development/core/server/kibana-plugin-core-server.kibanaresponsefactory.md
@@ -10,10 +10,10 @@ Set of helpers used to create `KibanaResponse` to form HTTP response on an incom
```typescript
kibanaResponseFactory: {
- custom: | Error | Buffer | {
+ custom: | Error | Buffer | Stream | {
message: string | Error;
attributes?: Record | undefined;
- } | Stream | undefined>(options: CustomHttpResponseOptions) => KibanaResponse;
+ } | undefined>(options: CustomHttpResponseOptions) => KibanaResponse;
badRequest: (options?: ErrorHttpResponseOptions) => KibanaResponse;
unauthorized: (options?: ErrorHttpResponseOptions) => KibanaResponse;
forbidden: (options?: ErrorHttpResponseOptions) => KibanaResponse;
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.core.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.core.md
deleted file mode 100644
index 67f2cf0cdcc7c..0000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.core.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceSetupDeps](./kibana-plugin-core-server.legacyservicesetupdeps.md) > [core](./kibana-plugin-core-server.legacyservicesetupdeps.core.md)
-
-## LegacyServiceSetupDeps.core property
-
-Signature:
-
-```typescript
-core: LegacyCoreSetup;
-```
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.md
deleted file mode 100644
index a5c1d59be06d3..0000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.md
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceSetupDeps](./kibana-plugin-core-server.legacyservicesetupdeps.md)
-
-## LegacyServiceSetupDeps interface
-
-> Warning: This API is now obsolete.
->
->
-
-Signature:
-
-```typescript
-export interface LegacyServiceSetupDeps
-```
-
-## Properties
-
-| Property | Type | Description |
-| --- | --- | --- |
-| [core](./kibana-plugin-core-server.legacyservicesetupdeps.core.md) | LegacyCoreSetup
| |
-| [plugins](./kibana-plugin-core-server.legacyservicesetupdeps.plugins.md) | Record<string, unknown>
| |
-| [uiPlugins](./kibana-plugin-core-server.legacyservicesetupdeps.uiplugins.md) | UiPlugins
| |
-
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.plugins.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.plugins.md
deleted file mode 100644
index 032762904640b..0000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.plugins.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceSetupDeps](./kibana-plugin-core-server.legacyservicesetupdeps.md) > [plugins](./kibana-plugin-core-server.legacyservicesetupdeps.plugins.md)
-
-## LegacyServiceSetupDeps.plugins property
-
-Signature:
-
-```typescript
-plugins: Record;
-```
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.uiplugins.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.uiplugins.md
deleted file mode 100644
index d19a7dfcbfcfa..0000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicesetupdeps.uiplugins.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceSetupDeps](./kibana-plugin-core-server.legacyservicesetupdeps.md) > [uiPlugins](./kibana-plugin-core-server.legacyservicesetupdeps.uiplugins.md)
-
-## LegacyServiceSetupDeps.uiPlugins property
-
-Signature:
-
-```typescript
-uiPlugins: UiPlugins;
-```
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.core.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.core.md
deleted file mode 100644
index 17369e00a7068..0000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.core.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceStartDeps](./kibana-plugin-core-server.legacyservicestartdeps.md) > [core](./kibana-plugin-core-server.legacyservicestartdeps.core.md)
-
-## LegacyServiceStartDeps.core property
-
-Signature:
-
-```typescript
-core: LegacyCoreStart;
-```
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.md
deleted file mode 100644
index d6f6b38b79f84..0000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.md
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceStartDeps](./kibana-plugin-core-server.legacyservicestartdeps.md)
-
-## LegacyServiceStartDeps interface
-
-> Warning: This API is now obsolete.
->
->
-
-Signature:
-
-```typescript
-export interface LegacyServiceStartDeps
-```
-
-## Properties
-
-| Property | Type | Description |
-| --- | --- | --- |
-| [core](./kibana-plugin-core-server.legacyservicestartdeps.core.md) | LegacyCoreStart
| |
-| [plugins](./kibana-plugin-core-server.legacyservicestartdeps.plugins.md) | Record<string, unknown>
| |
-
diff --git a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.plugins.md b/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.plugins.md
deleted file mode 100644
index 4634bf21fb42c..0000000000000
--- a/docs/development/core/server/kibana-plugin-core-server.legacyservicestartdeps.plugins.md
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [LegacyServiceStartDeps](./kibana-plugin-core-server.legacyservicestartdeps.md) > [plugins](./kibana-plugin-core-server.legacyservicestartdeps.plugins.md)
-
-## LegacyServiceStartDeps.plugins property
-
-Signature:
-
-```typescript
-plugins: Record;
-```
diff --git a/docs/development/core/server/kibana-plugin-core-server.md b/docs/development/core/server/kibana-plugin-core-server.md
index 4bf00d2da6e23..3bbdf8c703ab1 100644
--- a/docs/development/core/server/kibana-plugin-core-server.md
+++ b/docs/development/core/server/kibana-plugin-core-server.md
@@ -69,13 +69,16 @@ The plugin integrates with the core system via lifecycle events: `setup`
| [DeprecationAPIClientParams](./kibana-plugin-core-server.deprecationapiclientparams.md) | |
| [DeprecationAPIResponse](./kibana-plugin-core-server.deprecationapiresponse.md) | |
| [DeprecationInfo](./kibana-plugin-core-server.deprecationinfo.md) | |
+| [DeprecationsDetails](./kibana-plugin-core-server.deprecationsdetails.md) | |
| [DeprecationSettings](./kibana-plugin-core-server.deprecationsettings.md) | UiSettings deprecation field options. |
+| [DeprecationsServiceSetup](./kibana-plugin-core-server.deprecationsservicesetup.md) | The deprecations service provides a way for the Kibana platform to communicate deprecated features and configs with its users. These deprecations are only communicated if the deployment is using these features. Allowing for a user tailored experience for upgrading the stack version.The Deprecation service is consumed by the upgrade assistant to assist with the upgrade experience.If a deprecated feature can be resolved without manual user intervention. Using correctiveActions.api allows the Upgrade Assistant to use this api to correct the deprecation upon a user trigger. |
| [DiscoveredPlugin](./kibana-plugin-core-server.discoveredplugin.md) | Small container object used to expose information about discovered plugins that may or may not have been started. |
| [ElasticsearchServiceSetup](./kibana-plugin-core-server.elasticsearchservicesetup.md) | |
| [ElasticsearchServiceStart](./kibana-plugin-core-server.elasticsearchservicestart.md) | |
| [ElasticsearchStatusMeta](./kibana-plugin-core-server.elasticsearchstatusmeta.md) | |
| [ErrorHttpResponseOptions](./kibana-plugin-core-server.errorhttpresponseoptions.md) | HTTP response parameters |
| [FakeRequest](./kibana-plugin-core-server.fakerequest.md) | Fake request object created manually by Kibana plugins. |
+| [GetDeprecationsContext](./kibana-plugin-core-server.getdeprecationscontext.md) | |
| [GetResponse](./kibana-plugin-core-server.getresponse.md) | |
| [HttpAuth](./kibana-plugin-core-server.httpauth.md) | |
| [HttpResources](./kibana-plugin-core-server.httpresources.md) | HttpResources service is responsible for serving static & dynamic assets for Kibana application via HTTP. Provides API allowing plug-ins to respond with: - a pre-configured HTML page bootstrapping Kibana client app - custom HTML page - custom JS script file. |
@@ -107,8 +110,6 @@ The plugin integrates with the core system via lifecycle events: `setup`
| [LegacyCallAPIOptions](./kibana-plugin-core-server.legacycallapioptions.md) | The set of options that defines how API call should be made and result be processed. |
| [LegacyElasticsearchError](./kibana-plugin-core-server.legacyelasticsearcherror.md) | @deprecated. The new elasticsearch client doesn't wrap errors anymore. |
| [LegacyRequest](./kibana-plugin-core-server.legacyrequest.md) | |
-| [LegacyServiceSetupDeps](./kibana-plugin-core-server.legacyservicesetupdeps.md) | |
-| [LegacyServiceStartDeps](./kibana-plugin-core-server.legacyservicestartdeps.md) | |
| [LoggerContextConfigInput](./kibana-plugin-core-server.loggercontextconfiginput.md) | |
| [LoggingServiceSetup](./kibana-plugin-core-server.loggingservicesetup.md) | Provides APIs to plugins for customizing the plugin's logger. |
| [MetricsServiceSetup](./kibana-plugin-core-server.metricsservicesetup.md) | APIs to retrieves metrics gathered and exposed by the core platform. |
@@ -128,6 +129,7 @@ The plugin integrates with the core system via lifecycle events: `setup`
| [PluginConfigDescriptor](./kibana-plugin-core-server.pluginconfigdescriptor.md) | Describes a plugin configuration properties. |
| [PluginInitializerContext](./kibana-plugin-core-server.plugininitializercontext.md) | Context that's available to plugins during initialization stage. |
| [PluginManifest](./kibana-plugin-core-server.pluginmanifest.md) | Describes the set of required and optional properties plugin can define in its mandatory JSON manifest file. |
+| [RegisterDeprecationsConfig](./kibana-plugin-core-server.registerdeprecationsconfig.md) | |
| [RequestHandlerContext](./kibana-plugin-core-server.requesthandlercontext.md) | Plugin specific context passed to a route handler.Provides the following clients and services: - [savedObjects.client](./kibana-plugin-core-server.savedobjectsclient.md) - Saved Objects client which uses the credentials of the incoming request - [savedObjects.typeRegistry](./kibana-plugin-core-server.isavedobjecttyperegistry.md) - Type registry containing all the registered types. - [elasticsearch.client](./kibana-plugin-core-server.iscopedclusterclient.md) - Elasticsearch data client which uses the credentials of the incoming request - [elasticsearch.legacy.client](./kibana-plugin-core-server.legacyscopedclusterclient.md) - The legacy Elasticsearch data client which uses the credentials of the incoming request - [uiSettings.client](./kibana-plugin-core-server.iuisettingsclient.md) - uiSettings client which uses the credentials of the incoming request |
| [ResolveCapabilitiesOptions](./kibana-plugin-core-server.resolvecapabilitiesoptions.md) | Defines a set of additional options for the resolveCapabilities
method of [CapabilitiesStart](./kibana-plugin-core-server.capabilitiesstart.md). |
| [RouteConfig](./kibana-plugin-core-server.routeconfig.md) | Route specific configuration. |
diff --git a/docs/development/core/server/kibana-plugin-core-server.registerdeprecationsconfig.getdeprecations.md b/docs/development/core/server/kibana-plugin-core-server.registerdeprecationsconfig.getdeprecations.md
new file mode 100644
index 0000000000000..cf008725ff15b
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.registerdeprecationsconfig.getdeprecations.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [RegisterDeprecationsConfig](./kibana-plugin-core-server.registerdeprecationsconfig.md) > [getDeprecations](./kibana-plugin-core-server.registerdeprecationsconfig.getdeprecations.md)
+
+## RegisterDeprecationsConfig.getDeprecations property
+
+Signature:
+
+```typescript
+getDeprecations: (context: GetDeprecationsContext) => MaybePromise;
+```
diff --git a/docs/development/core/server/kibana-plugin-core-server.registerdeprecationsconfig.md b/docs/development/core/server/kibana-plugin-core-server.registerdeprecationsconfig.md
new file mode 100644
index 0000000000000..59e6d406f84bf
--- /dev/null
+++ b/docs/development/core/server/kibana-plugin-core-server.registerdeprecationsconfig.md
@@ -0,0 +1,18 @@
+
+
+[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [RegisterDeprecationsConfig](./kibana-plugin-core-server.registerdeprecationsconfig.md)
+
+## RegisterDeprecationsConfig interface
+
+Signature:
+
+```typescript
+export interface RegisterDeprecationsConfig
+```
+
+## Properties
+
+| Property | Type | Description |
+| --- | --- | --- |
+| [getDeprecations](./kibana-plugin-core-server.registerdeprecationsconfig.getdeprecations.md) | (context: GetDeprecationsContext) => MaybePromise<DeprecationsDetails[]>
| |
+
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.fieldistimefield.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.fieldistimefield.md
index a1fde4dec25b1..6e7b753320270 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.fieldistimefield.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.fieldistimefield.md
@@ -7,9 +7,9 @@
Signature:
```typescript
-fieldIsTimeField(): boolean | "" | undefined;
+fieldIsTimeField(): boolean;
```
Returns:
-`boolean | "" | undefined`
+`boolean`
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.md
index c0ba1bbeea334..22f8994747aa2 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.md
@@ -23,6 +23,7 @@ export declare class AggConfigs
| [aggs](./kibana-plugin-plugins-data-public.aggconfigs.aggs.md) | | IAggConfig[]
| |
| [createAggConfig](./kibana-plugin-plugins-data-public.aggconfigs.createaggconfig.md) | | <T extends AggConfig = AggConfig>(params: CreateAggConfigParams, { addToAggConfigs }?: {
addToAggConfigs?: boolean | undefined;
}) => T
| |
| [indexPattern](./kibana-plugin-plugins-data-public.aggconfigs.indexpattern.md) | | IndexPattern
| |
+| [timeFields](./kibana-plugin-plugins-data-public.aggconfigs.timefields.md) | | string[]
| |
| [timeRange](./kibana-plugin-plugins-data-public.aggconfigs.timerange.md) | | TimeRange
| |
## Methods
@@ -43,6 +44,7 @@ export declare class AggConfigs
| [getResponseAggs()](./kibana-plugin-plugins-data-public.aggconfigs.getresponseaggs.md) | | Gets the AggConfigs (and possibly ResponseAggConfigs) that represent the values that will be produced when all aggs are run.With multi-value metric aggs it is possible for a single agg request to result in multiple agg values, which is why the length of a vis' responseValuesAggs may be different than the vis' aggs {array\[AggConfig\]} |
| [jsonDataEquals(aggConfigs)](./kibana-plugin-plugins-data-public.aggconfigs.jsondataequals.md) | | Data-by-data comparison of this Aggregation Ignores the non-array indexes |
| [onSearchRequestStart(searchSource, options)](./kibana-plugin-plugins-data-public.aggconfigs.onsearchrequeststart.md) | | |
+| [setTimeFields(timeFields)](./kibana-plugin-plugins-data-public.aggconfigs.settimefields.md) | | |
| [setTimeRange(timeRange)](./kibana-plugin-plugins-data-public.aggconfigs.settimerange.md) | | |
| [toDsl(hierarchical)](./kibana-plugin-plugins-data-public.aggconfigs.todsl.md) | | |
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.settimefields.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.settimefields.md
new file mode 100644
index 0000000000000..31eadc5756d3d
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.settimefields.md
@@ -0,0 +1,22 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfigs](./kibana-plugin-plugins-data-public.aggconfigs.md) > [setTimeFields](./kibana-plugin-plugins-data-public.aggconfigs.settimefields.md)
+
+## AggConfigs.setTimeFields() method
+
+Signature:
+
+```typescript
+setTimeFields(timeFields: string[] | undefined): void;
+```
+
+## Parameters
+
+| Parameter | Type | Description |
+| --- | --- | --- |
+| timeFields | string[] | undefined
| |
+
+Returns:
+
+`void`
+
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.timefields.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.timefields.md
new file mode 100644
index 0000000000000..903370fd8eb84
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.timefields.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfigs](./kibana-plugin-plugins-data-public.aggconfigs.md) > [timeFields](./kibana-plugin-plugins-data-public.aggconfigs.timefields.md)
+
+## AggConfigs.timeFields property
+
+Signature:
+
+```typescript
+timeFields?: string[];
+```
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsinglepercentile.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsinglepercentile.md
new file mode 100644
index 0000000000000..4e432b8d365a3
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsinglepercentile.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggFunctionsMapping](./kibana-plugin-plugins-data-public.aggfunctionsmapping.md) > [aggSinglePercentile](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsinglepercentile.md)
+
+## AggFunctionsMapping.aggSinglePercentile property
+
+Signature:
+
+```typescript
+aggSinglePercentile: ReturnType;
+```
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.md
index 05388e2b86d7b..852c6d5f1c00b 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggfunctionsmapping.md
@@ -45,6 +45,7 @@ export interface AggFunctionsMapping
| [aggRange](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggrange.md) | ReturnType<typeof aggRange>
| |
| [aggSerialDiff](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggserialdiff.md) | ReturnType<typeof aggSerialDiff>
| |
| [aggSignificantTerms](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsignificantterms.md) | ReturnType<typeof aggSignificantTerms>
| |
+| [aggSinglePercentile](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsinglepercentile.md) | ReturnType<typeof aggSinglePercentile>
| |
| [aggStdDeviation](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggstddeviation.md) | ReturnType<typeof aggStdDeviation>
| |
| [aggSum](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggsum.md) | ReturnType<typeof aggSum>
| |
| [aggTerms](./kibana-plugin-plugins-data-public.aggfunctionsmapping.aggterms.md) | ReturnType<typeof aggTerms>
| |
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.metric_types.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.metric_types.md
index 3b5cecf1a0b82..bdae3ec738ac3 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.metric_types.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.metric_types.md
@@ -32,6 +32,7 @@ export declare enum METRIC_TYPES
| PERCENTILE\_RANKS | "percentile_ranks"
| |
| PERCENTILES | "percentiles"
| |
| SERIAL\_DIFF | "serial_diff"
| |
+| SINGLE\_PERCENTILE | "single_percentile"
| |
| STD\_DEV | "std_dev"
| |
| SUM | "sum"
| |
| SUM\_BUCKET | "sum_bucket"
| |
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.search.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.search.md
index 440fd25993d64..cfaad01c029ea 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.search.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.search.md
@@ -14,7 +14,7 @@ search: {
intervalOptions: ({
display: string;
val: string;
- enabled(agg: import("../common").IBucketAggConfig): boolean | "" | undefined;
+ enabled(agg: import("../common").IBucketAggConfig): boolean;
} | {
display: string;
val: string;
@@ -47,6 +47,11 @@ search: {
intervalLabel: string;
})[];
getNumberHistogramIntervalByDatatableColumn: (column: import("../../expressions").DatatableColumn) => number | undefined;
+ getDateHistogramMetaDataByDatatableColumn: (column: import("../../expressions").DatatableColumn) => {
+ interval: string | undefined;
+ timeZone: string | undefined;
+ timeRange: import("../common").TimeRange | undefined;
+ } | undefined;
};
getRequestInspectorStats: typeof getRequestInspectorStats;
getResponseInspectorStats: typeof getResponseInspectorStats;
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsinglepercentile.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsinglepercentile.md
new file mode 100644
index 0000000000000..d1418d7245d73
--- /dev/null
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsinglepercentile.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [AggFunctionsMapping](./kibana-plugin-plugins-data-server.aggfunctionsmapping.md) > [aggSinglePercentile](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsinglepercentile.md)
+
+## AggFunctionsMapping.aggSinglePercentile property
+
+Signature:
+
+```typescript
+aggSinglePercentile: ReturnType;
+```
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.md
index 86bf797572b09..6b5f854c155f3 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.aggfunctionsmapping.md
@@ -45,6 +45,7 @@ export interface AggFunctionsMapping
| [aggRange](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggrange.md) | ReturnType<typeof aggRange>
| |
| [aggSerialDiff](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggserialdiff.md) | ReturnType<typeof aggSerialDiff>
| |
| [aggSignificantTerms](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsignificantterms.md) | ReturnType<typeof aggSignificantTerms>
| |
+| [aggSinglePercentile](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsinglepercentile.md) | ReturnType<typeof aggSinglePercentile>
| |
| [aggStdDeviation](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggstddeviation.md) | ReturnType<typeof aggStdDeviation>
| |
| [aggSum](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggsum.md) | ReturnType<typeof aggSum>
| |
| [aggTerms](./kibana-plugin-plugins-data-server.aggfunctionsmapping.aggterms.md) | ReturnType<typeof aggTerms>
| |
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.md
index d408f00e33c9e..b5c7d8931ad4b 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.md
@@ -14,6 +14,6 @@ export declare class IndexPatternsServiceProvider implements PluginSignature:
```typescript
-setup(core: CoreSetup, { expressions }: IndexPatternsServiceSetupDeps): void;
+setup(core: CoreSetup, { expressions, usageCollection }: IndexPatternsServiceSetupDeps): void;
```
## Parameters
| Parameter | Type | Description |
| --- | --- | --- |
-| core | CoreSetup<DataPluginStartDependencies, DataPluginStart>
| |
-| { expressions } | IndexPatternsServiceSetupDeps
| |
+| core | CoreSetup<IndexPatternsServiceStartDeps, DataPluginStart>
| |
+| { expressions, usageCollection } | IndexPatternsServiceSetupDeps
| |
Returns:
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.start.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.start.md
index 98f9310c6d98c..88079bb2fa3cb 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.start.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpatternsserviceprovider.start.md
@@ -8,7 +8,7 @@
```typescript
start(core: CoreStart, { fieldFormats, logger }: IndexPatternsServiceStartDeps): {
- indexPatternsServiceFactory: (savedObjectsClient: SavedObjectsClientContract, elasticsearchClient: ElasticsearchClient) => Promise;
+ indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: ElasticsearchClient) => Promise;
};
```
@@ -22,6 +22,6 @@ start(core: CoreStart, { fieldFormats, logger }: IndexPatternsServiceStartDeps):
Returns:
`{
- indexPatternsServiceFactory: (savedObjectsClient: SavedObjectsClientContract, elasticsearchClient: ElasticsearchClient) => Promise;
+ indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: ElasticsearchClient) => Promise;
}`
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.metric_types.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.metric_types.md
index 250173d11a056..37f53af8971b3 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.metric_types.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.metric_types.md
@@ -32,6 +32,7 @@ export declare enum METRIC_TYPES
| PERCENTILE\_RANKS | "percentile_ranks"
| |
| PERCENTILES | "percentiles"
| |
| SERIAL\_DIFF | "serial_diff"
| |
+| SINGLE\_PERCENTILE | "single_percentile"
| |
| STD\_DEV | "std_dev"
| |
| SUM | "sum"
| |
| SUM\_BUCKET | "sum_bucket"
| |
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md
index 025cab9f48c1a..f4404521561d2 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.plugin.start.md
@@ -12,7 +12,7 @@ start(core: CoreStart): {
fieldFormatServiceFactory: (uiSettings: import("../../../core/server").IUiSettingsClient) => Promise;
};
indexPatterns: {
- indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: import("../../../core/server").ElasticsearchClient) => Promise;
+ indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: import("../../../core/server").ElasticsearchClient) => Promise;
};
search: ISearchStart>;
};
@@ -31,7 +31,7 @@ start(core: CoreStart): {
fieldFormatServiceFactory: (uiSettings: import("../../../core/server").IUiSettingsClient) => Promise;
};
indexPatterns: {
- indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: import("../../../core/server").ElasticsearchClient) => Promise;
+ indexPatternsServiceFactory: (savedObjectsClient: Pick, elasticsearchClient: import("../../../core/server").ElasticsearchClient) => Promise;
};
search: ISearchStart>;
}`
diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md
index 4f8a0beefa421..0911c3e86964d 100644
--- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md
+++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.search.md
@@ -14,7 +14,7 @@ search: {
intervalOptions: ({
display: string;
val: string;
- enabled(agg: import("../common").IBucketAggConfig): boolean | "" | undefined;
+ enabled(agg: import("../common").IBucketAggConfig): boolean;
} | {
display: string;
val: string;
diff --git a/docs/discover/search.asciidoc b/docs/discover/search.asciidoc
index 9971a6f574f9c..0306be3eb670d 100644
--- a/docs/discover/search.asciidoc
+++ b/docs/discover/search.asciidoc
@@ -110,7 +110,7 @@ image::discover/images/read-only-badge.png[Example of Discover's read only acces
==== Save a search
To save the current search:
-. Click *Save* in the Kibana toolbar.
+. Click *Save* in the toolbar.
. Enter a name for the search and click *Save*.
To import, export, and delete saved searches, open the main menu,
@@ -119,7 +119,7 @@ then click *Stack Management > Saved Objects*.
==== Open a saved search
To load a saved search into Discover:
-. Click *Open* in the Kibana toolbar.
+. Click *Open* in the toolbar.
. Select the search you want to open.
If the saved search is associated with a different index pattern than is currently
diff --git a/docs/management/advanced-options.asciidoc b/docs/management/advanced-options.asciidoc
index 446b6a2cfd851..a9de1888465f7 100644
--- a/docs/management/advanced-options.asciidoc
+++ b/docs/management/advanced-options.asciidoc
@@ -209,6 +209,32 @@ from *{stack-monitor-app}*.
Turns off all unnecessary animations in the {kib} UI. Refresh the page to apply
the changes.
+[float]
+[[kibana-banners-settings]]
+==== Banners
+
+[NOTE]
+====
+Banners are a https://www.elastic.co/subscriptions[subscription feature].
+====
+
+[horizontal]
+[[banners-placement]]`banners:placement`::
+Set to `Top` to display a banner above the Elastic header for this space. Defaults to the value of
+the `xpack.banners.placement` configuration property.
+
+[[banners-textcontent]]`banners:textContent`::
+The text to display inside the banner for this space, either plain text or Markdown.
+Defaults to the value of the `xpack.banners.textContent` configuration property.
+
+[[banners-textcolor]]`banners:textColor`::
+The color for the banner text for this space. Defaults to the value of
+the `xpack.banners.textColor` configuration property.
+
+[[banners-backgroundcolor]]`banners:backgroundColor`::
+The color of the banner background for this space. Defaults to the value of
+the `xpack.banners.backgroundColor` configuration property.
+
[float]
[[kibana-dashboard-settings]]
==== Dashboard
diff --git a/docs/management/managing-fields.asciidoc b/docs/management/managing-fields.asciidoc
index 5cd5c1ffd6248..505f6853c7906 100644
--- a/docs/management/managing-fields.asciidoc
+++ b/docs/management/managing-fields.asciidoc
@@ -78,6 +78,7 @@ include::field-formatters/color-formatter.asciidoc[]
[[scripted-fields]]
=== Scripted fields
+deprecated::[7.13,Use {ref}/runtime.html[runtime fields] instead of scripted fields. Runtime fields support Painless scripts and provide greater flexibility.]
Scripted fields compute data on the fly from the data in your {es} indices. The data is shown on
the Discover tab as part of the document data, and you can use scripted fields in your visualizations. You query scripted fields with the <>, and can filter them using the filter bar. The scripted field values are computed at query time, so they aren't indexed and cannot be searched using the {kib} default
@@ -87,7 +88,7 @@ WARNING: Computing data on the fly with scripted fields can be very resource int
{kib} performance. Keep in mind that there's no built-in validation of a scripted field. If your scripts are
buggy, you'll get exceptions whenever you try to view the dynamically generated data.
-When you define a scripted field in {kib}, you have a choice of the {ref}/modules-scripting-expression.html[Lucene expressions] or the
+When you define a scripted field in {kib}, you have a choice of the {ref}/modules-scripting-expression.html[Lucene expressions] or the
{ref}/modules-scripting-painless.html[Painless] scripting language.
You can reference any single value numeric field in your expressions, for example:
diff --git a/docs/maps/import-geospatial-data.asciidoc b/docs/maps/import-geospatial-data.asciidoc
index fb4250368086e..0218bac58815a 100644
--- a/docs/maps/import-geospatial-data.asciidoc
+++ b/docs/maps/import-geospatial-data.asciidoc
@@ -6,6 +6,30 @@ To import geospatical data into the Elastic Stack, the data must be indexed as {
Geospatial data comes in many formats.
Choose an import tool based on the format of your geospatial data.
+[discrete]
+[[import-geospatial-privileges]]
+=== Security privileges
+
+The {stack-security-features} provide roles and privileges that control which users can upload files.
+You can manage your roles, privileges, and
+spaces in **{stack-manage-app}** in {kib}. For more information, see
+{ref}/security-privileges.html[Security privileges],
+<>, and <>.
+
+To upload GeoJSON files in {kib} with *Maps*, you must have:
+
+* The `all` {kib} privilege for *Maps*.
+* The `all` {kib} privilege for *Index Pattern Management*.
+* The `create` and `create_index` index privileges for destination indices.
+* To use the index in *Maps*, you must also have the `read` and `view_index_metadata` index privileges for destination indices.
+
+To upload CSV files in {kib} with the *{file-data-viz}*, you must have privileges to upload GeoJSON files and:
+
+* The `manage_pipeline` cluster privilege.
+* The `read` {kib} privilege for *Machine Learning*.
+* The `machine_learning_admin` or `machine_learning_user` role.
+
+
[discrete]
=== Upload CSV with latitude and longitude columns
diff --git a/docs/maps/maps-aggregations.asciidoc b/docs/maps/maps-aggregations.asciidoc
index 265bf6bfaea30..7f4af952653e7 100644
--- a/docs/maps/maps-aggregations.asciidoc
+++ b/docs/maps/maps-aggregations.asciidoc
@@ -76,9 +76,8 @@ then accumulates the most relevant documents based on sort order for each entry
To enable top hits:
-. Click *Add layer*, then select the *Documents* layer.
+. Click *Add layer*, then select the *Top hits per entity* layer.
. Configure *Index pattern* and *Geospatial field*.
-. In *Scaling*, select *Show top hits per entity*.
. Set *Entity* to the field that identifies entities in your documents.
This field will be used in the terms aggregation to group your documents into entity buckets.
. Set *Documents per entity* to configure the maximum number of documents accumulated per entity.
diff --git a/docs/maps/vector-layer.asciidoc b/docs/maps/vector-layer.asciidoc
index 6a2228161845e..2115c16a889c6 100644
--- a/docs/maps/vector-layer.asciidoc
+++ b/docs/maps/vector-layer.asciidoc
@@ -23,8 +23,6 @@ Select the appropriate *Scaling* option for your use case.
* *Limit results to 10000.* The layer displays features from the first `index.max_result_window` documents.
Results exceeding `index.max_result_window` are not displayed.
-* *Show top hits per entity.* The layer displays the <>.
-
* *Show clusters when results exceed 10000.* When results exceed `index.max_result_window`, the layer uses {ref}/search-aggregations-bucket-geotilegrid-aggregation.html[GeoTile grid aggregation] to group your documents into clusters and displays metrics for each cluster. When results are less then `index.max_result_window`, the layer displays features from individual documents.
* *Use vector tiles.* Vector tiles partition your map into 6 to 8 tiles.
@@ -36,6 +34,9 @@ Tiles exceeding `index.max_result_window` have a visual indicator when there are
*Point to point*:: Aggregated data paths between the source and destination.
The index must contain at least 2 fields mapped as {ref}/geo-point.html[geo_point], source and destination.
+*Top hits per entity*:: The layer displays the <>.
+The index must contain at least one field mapped as {ref}/geo-point.html[geo_point] or {ref}/geo-shape.html[geo_shape].
+
*Tracks*:: Create lines from points.
The index must contain at least one field mapped as {ref}/geo-point.html[geo_point].
diff --git a/docs/migration/migrate_8_0.asciidoc b/docs/migration/migrate_8_0.asciidoc
index 52d1d63ce0653..acb343191609d 100644
--- a/docs/migration/migrate_8_0.asciidoc
+++ b/docs/migration/migrate_8_0.asciidoc
@@ -50,46 +50,56 @@ for example, `logstash-*`.
[float]
==== Default logging timezone is now the system's timezone
-*Details:* In prior releases the timezone used in logs defaulted to UTC. We now use the host machine's timezone by default.
+*Details:* In prior releases the timezone used in logs defaulted to UTC. We now use the host machine's timezone by default.
*Impact:* To restore the previous behavior, in kibana.yml use the pattern layout, with a date modifier:
[source,yaml]
-------------------
logging:
appenders:
- console:
- kind: console
+ custom:
+ type: console
layout:
- kind: pattern
+ type: pattern
pattern: "%date{ISO8601_TZ}{UTC}"
-------------------
See https://github.com/elastic/kibana/pull/90368 for more details.
[float]
==== Responses are never logged by default
-*Details:* Previously responses would be logged if either `logging.json` was true, `logging.dest` was specified, or a `TTY` was detected.
+*Details:* Previously responses would be logged if either `logging.json` was true, `logging.dest` was specified, or a `TTY` was detected. With the new logging configuration, these are provided by a dedicated logger.
-*Impact:* To restore the previous behavior, in kibana.yml enable `debug` logs for the `http.server.response` context under `logging.loggers`:
+*Impact:* To restore the previous behavior, in `kibana.yml` enable `debug` for the `http.server.response` logger:
[source,yaml]
-------------------
logging:
+ appenders:
+ custom:
+ type: console
+ layout:
+ type: pattern
loggers:
- - context: http.server.response
- appenders: [console]
+ - name: http.server.response
+ appenders: [custom]
level: debug
-------------------
See https://github.com/elastic/kibana/pull/87939 for more details.
[float]
==== Logging destination is specified by the appender
-*Details:* Previously log destination would be `stdout` and could be changed to `file` using `logging.dest`.
+*Details:* Previously log destination would be `stdout` and could be changed to `file` using `logging.dest`. With the new logging configuration, you can specify the destination using appenders.
-*Impact:* To restore the previous behavior, in `kibana.yml` use the `console` appender to send logs to `stdout`.
+*Impact:* To restore the previous behavior and log records to *stdout*, in `kibana.yml` use an appender with `type: console`.
[source,yaml]
-------------------
logging:
+ appenders:
+ custom:
+ type: console
+ layout:
+ type: pattern
root:
- appenders: [default, console]
+ appenders: [default, custom]
-------------------
To send logs to `file` with a given file path, you should define a custom appender with `type:file`:
@@ -107,16 +117,15 @@ logging:
-------------------
[float]
-==== Specify log event output with root
-*Details:* Previously logging output would be specified by `logging.silent` (none), 'logging.quiet' (error messages only) and `logging.verbose` (all).
+==== Set log verbosity with root
+*Details:* Previously logging output would be specified by `logging.silent` (none), `logging.quiet` (error messages only) and `logging.verbose` (all). With the new logging configuration, set the minimum required log level.
-*Impact:* To restore the previous behavior, in `kibana.yml` specify `logging.root.level` as one of `off`, `error`, `all`:
+*Impact:* To restore the previous behavior, in `kibana.yml` specify `logging.root.level`:
[source,yaml]
-------------------
# suppress all logs
logging:
root:
- appenders: [default]
level: off
-------------------
@@ -125,7 +134,6 @@ logging:
# only log error messages
logging:
root:
- appenders: [default]
level: error
-------------------
@@ -134,54 +142,14 @@ logging:
# log all events
logging:
root:
- appenders: [default]
level: all
-------------------
[float]
-==== Suppress all log output with root
-*Details:* Previously all logging output would be suppressed if `logging.silent` was true.
-
-*Impact:* To restore the previous behavior, in `kibana.yml` turn `logging.root.level` to 'off'.
-[source,yaml]
--------------------
-logging:
- root:
- appenders: [default]
- level: off
--------------------
+==== Declare log message format
+*Details:* Previously all events would be logged in `json` format when `logging.json` was true. With the new logging configuration you can specify the output format with layouts. You can choose between `json` and pattern format depending on your needs.
-[float]
-==== Suppress log output with root
-*Details:* Previously all logging output other than error messages would be suppressed if `logging.quiet` was true.
-
-*Impact:* To restore the previous behavior, in `kibana.yml` turn `logging.root.level` to 'error'.
-[source,yaml]
--------------------
-logging:
- root:
- appenders: [default]
- level: error
--------------------
-
-[float]
-==== Log all output with root
-*Details:* Previously all events would be logged if `logging.verbose` was true.
-
-*Impact:* To restore the previous behavior, in `kibana.yml` turn `logging.root.level` to 'all'.
-[source,yaml]
--------------------
-logging:
- root:
- appenders: [default]
- level: all
--------------------
-
-[float]
-==== Declare log message format for each custom appender
-*Details:* Previously all events would be logged in `json` format when `logging.json` was true.
-
-*Impact:* To restore the previous behavior, in `kibana.yml` configure the logging format for each custom appender with the `appender.layout` property. There is no default for custom appenders and each one must be configured expilictly.
+*Impact:* To restore the previous behavior, in `kibana.yml` configure the logging format for each custom appender with the `appender.layout` property. There is no default for custom appenders and each one must be configured expilictly.
[source,yaml]
-------------------
@@ -352,6 +320,15 @@ All supported operating systems support using systemd service files. Any system
*Impact:*
Any installations using `.deb` or `.rpm` packages using SysV will need to migrate to systemd.
+[float]
+=== TLS v1.0 and v1.1 are disabled by default
+
+*Details:*
+Support can be re-enabled by setting `--tls-min-1.0` in the `node.options` config file that can be found inside `kibana/config` folder or any other configured with the environment variable `KBN_PATH_CONF` (for example in Debian based system would be `/etc/kibana`).
+
+*Impact:*
+Browser and proxy clients communicating over TLS v1.0 and v1.1.
+
[float]
=== Platform removed from root folder name for `.tar.gz` and `.zip` archives
diff --git a/docs/settings/banners-settings.asciidoc b/docs/settings/banners-settings.asciidoc
index 2a68cbe82f9f2..ce56d4dbe7a4d 100644
--- a/docs/settings/banners-settings.asciidoc
+++ b/docs/settings/banners-settings.asciidoc
@@ -9,6 +9,11 @@ Banners are disabled by default. You need to manually configure them in order to
You can configure the `xpack.banners` settings in your `kibana.yml` file.
+[NOTE]
+====
+Banners are a https://www.elastic.co/subscriptions[subscription feature].
+====
+
[[general-banners-settings-kb]]
==== General banner settings
@@ -16,7 +21,7 @@ You can configure the `xpack.banners` settings in your `kibana.yml` file.
|===
| `xpack.banners.placement`
-| Set to `header` to enable the header banner. Defaults to `disabled`.
+| Set to `top` to display a banner above the Elastic header. Defaults to `disabled`.
| `xpack.banners.textContent`
| The text to display inside the banner, either plain text or Markdown.
@@ -27,9 +32,7 @@ You can configure the `xpack.banners` settings in your `kibana.yml` file.
| `xpack.banners.backgroundColor`
| The color of the banner background. Defaults to `#FFF9E8`.
-|===
+| `xpack.banners.disableSpaceBanners`
+| If true, per-space banner overrides will be disabled. Defaults to `false`.
-[NOTE]
-====
-The `banners` plugin is a https://www.elastic.co/subscriptions[subscription feature]
-====
\ No newline at end of file
+|===
diff --git a/docs/settings/logging-settings.asciidoc b/docs/settings/logging-settings.asciidoc
new file mode 100644
index 0000000000000..aa38d54305eec
--- /dev/null
+++ b/docs/settings/logging-settings.asciidoc
@@ -0,0 +1,173 @@
+[[logging-settings]]
+=== Logging settings in {kib}
+++++
+Logging settings
+++++
+
+Compatibility with the legacy logging system is assured until the end of the `v7` version.
+All log messages handled by `root` context (default) are forwarded to the legacy logging service.
+The logging configuration is validated against the predefined schema and if there are
+any issues with it, {kib} will fail to start with the detailed error message.
+
+NOTE: When you switch to the new logging configuration, you will start seeing duplicate log entries in both formats.
+These will be removed when the `default` appender is no longer required.
+
+Here are some configuration examples for the most common logging use cases:
+
+[[log-to-file-example]]
+==== Log to a file
+
+Log the default log format to a file instead of to stdout (the default).
+
+[source,yaml]
+----
+logging:
+ appenders:
+ file:
+ type: file
+ fileName: /var/log/kibana.log
+ layout:
+ type: pattern
+ root:
+ appenders: [default, file]
+----
+
+[[log-in-json-ECS-example]]
+==== Log in json format
+
+Log the default log format to json layout instead of pattern (the default).
+With `json` layout log messages will be formatted as JSON strings in https://www.elastic.co/guide/en/ecs/current/ecs-reference.html[ECS format] that includes a timestamp, log level, logger, message text and any other metadata that may be associated with the log message itself
+
+[source,yaml]
+----
+logging:
+ appenders:
+ json-layout:
+ type: console
+ layout:
+ type: json
+ root:
+ appenders: [default, json-layout]
+----
+
+[[log-with-meta-to-stdout]]
+==== Log with meta to stdout
+
+Include `%meta` in your pattern layout:
+
+[source,yaml]
+----
+logging:
+ appenders:
+ console-meta:
+ type: console
+ layout:
+ type: pattern
+ pattern: "[%date] [%level] [%logger] [%meta] %message"
+ root:
+ appenders: [default, console-meta]
+----
+
+[[log-elasticsearch-queries]]
+==== Log {es} queries
+
+[source,yaml]
+--
+logging:
+ appenders:
+ console_appender:
+ type: console
+ layout:
+ type: pattern
+ highlight: true
+ root:
+ appenders: [default, console_appender]
+ level: warn
+ loggers:
+ - name: elasticsearch.query
+ level: debug
+--
+
+[[change-overall-log-level]]
+==== Change overall log level.
+
+[source,yaml]
+----
+logging:
+ root:
+ level: debug
+----
+
+[[customize-specific-log-records]]
+==== Customize specific log records
+Here is a detailed configuration example that can be used to configure _loggers_, _appenders_ and _layouts_:
+
+[source,yaml]
+----
+logging:
+ appenders:
+ console:
+ type: console
+ layout:
+ type: pattern
+ highlight: true
+ file:
+ type: file
+ fileName: /var/log/kibana.log
+ custom:
+ type: console
+ layout:
+ type: pattern
+ pattern: "[%date][%level] %message"
+ json-file-appender:
+ type: file
+ fileName: /var/log/kibana-json.log
+ layout:
+ type: json
+
+ root:
+ appenders: [default, console, file]
+ level: error
+
+ loggers:
+ - name: plugins
+ appenders: [custom]
+ level: warn
+ - name: plugins.myPlugin
+ level: info
+ - name: server
+ level: fatal
+ - name: optimize
+ appenders: [console]
+ - name: telemetry
+ appenders: [json-file-appender]
+ level: all
+ - name: metrics.ops
+ appenders: [console]
+ level: debug
+----
+
+Here is what we get with the config above:
+[options="header"]
+|===
+
+| Context name | Appenders | Level
+
+| root | console, file | error
+
+| plugins | custom | warn
+
+| plugins.myPlugin | custom | info
+
+| server | console, file | fatal
+
+| optimize | console | error
+
+| telemetry | json-file-appender | all
+
+| metrics.ops | console | debug
+|===
+
+NOTE: If you modify `root.appenders`, make sure to include `default`.
+
+// For more details about logging configuration, refer to the logging system documentation (update to include a link).
diff --git a/docs/settings/reporting-settings.asciidoc b/docs/settings/reporting-settings.asciidoc
index cef5a953fded4..9bb11f3f99a15 100644
--- a/docs/settings/reporting-settings.asciidoc
+++ b/docs/settings/reporting-settings.asciidoc
@@ -260,19 +260,21 @@ For information about {kib} memory limits, see <> setting. Defaults to `.reporting`.
-
| `xpack.reporting.capture.networkPolicy`
| Capturing a screenshot from a {kib} page involves sending out requests for all the linked web assets. For example, a Markdown
visualization can show an image from a remote server. You can configure what type of requests to allow or filter by setting a
<> for Reporting.
+| `xpack.reporting.index`
+ | deprecated:[7.11.0,This setting will be removed in 8.0.] Multitenancy by
+ changing `kibana.index` will not be supported starting in 8.0. See
+ https://ela.st/kbn-remove-legacy-multitenancy[8.0 Breaking Changes] for more
+ details. Reporting uses a weekly index in {es} to store the reporting job and
+ the report content. The index is automatically created if it does not already
+ exist. Configure this to a unique value, beginning with `.reporting-`, for
+ every {kib} instance that has a unique <>
+ setting. Defaults to `.reporting`.
+
| `xpack.reporting.roles.allow`
| Specifies the roles in addition to superusers that can use reporting.
Defaults to `[ "reporting_user" ]`. +
diff --git a/docs/settings/search-sessions-settings.asciidoc b/docs/settings/search-sessions-settings.asciidoc
index c9a9e709ac7f8..cf64d08e4806c 100644
--- a/docs/settings/search-sessions-settings.asciidoc
+++ b/docs/settings/search-sessions-settings.asciidoc
@@ -11,15 +11,15 @@ Configure the search session settings in your `kibana.yml` configuration file.
[cols="2*<"]
|===
a| `xpack.data_enhanced.`
-`search.sessions:enabled`
- | Set to `true` (default) to enable search sessions.
+`search.sessions.enabled`
+| Set to `true` (default) to enable search sessions.
-a| `xpack.data.enhanced.`
-`search.sessions:trackingInterval`
- | The frequency for updating the state of a search session. The default is 10s.
+a| `xpack.data_enhanced.`
+`search.sessions.trackingInterval`
+| The frequency for updating the state of a search session. The default is 10s.
-a| `xpack.data.enhanced.`
-`search.sessions:defaultExpiration`
- | How long search session results are stored before they are deleted.
- Extending a search session resets the expiration by the same value. The default is 7d.
+a| `xpack.data_enhanced.`
+`search.sessions.defaultExpiration`
+| How long search session results are stored before they are deleted.
+Extending a search session resets the expiration by the same value. The default is 7d.
|===
diff --git a/docs/setup/docker.asciidoc b/docs/setup/docker.asciidoc
index 25883307e69f0..31e7b25eb66b1 100644
--- a/docs/setup/docker.asciidoc
+++ b/docs/setup/docker.asciidoc
@@ -39,11 +39,13 @@ docker pull {docker-repo}:{version}
=== Run Kibana on Docker for development
Kibana can be quickly started and connected to a local Elasticsearch container for development
or testing use with the following command:
---------------------------------------------
+
+[source,sh,subs="attributes"]
+----
docker run --link YOUR_ELASTICSEARCH_CONTAINER_NAME_OR_ID:elasticsearch -p 5601:5601 {docker-repo}:{version}
---------------------------------------------
-endif::[]
+----
+endif::[]
[float]
[[configuring-kibana-docker]]
=== Configure Kibana on Docker
diff --git a/docs/setup/settings.asciidoc b/docs/setup/settings.asciidoc
index 62e0f0847cbac..73b268e1e48b3 100644
--- a/docs/setup/settings.asciidoc
+++ b/docs/setup/settings.asciidoc
@@ -25,12 +25,14 @@ which may cause a delay before pages start being served.
Set to `false` to disable Console. *Default: `true`*
| `cpu.cgroup.path.override:`
- | *deprecated* This setting has been renamed to <>
-and the old name will no longer be supported as of 8.0.
+ | deprecated:[7.10.0,"This setting will no longer be supported as of 8.0."]
+ This setting has been renamed to
+ <>.
| `cpuacct.cgroup.path.override:`
- | *deprecated* This setting has been renamed to <>
-and the old name will no longer be supported as of 8.0.
+ | deprecated:[7.10.0,"This setting will no longer be supported as of 8.0."]
+ This setting has been renamed to
+ <>.
| `csp.rules:`
| A https://w3c.github.io/webappsec-csp/[content-security-policy] template
@@ -64,10 +66,35 @@ To enable SSL/TLS for outbound connections to {es}, use the `https` protocol
in this setting.
| `elasticsearch.logQueries:`
- | *deprecated* This setting is no longer used and will get removed in Kibana 8.0. Instead, set <> to `true`
-This is useful for seeing the query DSL generated by applications that
-currently do not have an inspector, for example Timelion and Monitoring.
-*Default: `false`*
+ | deprecated:[7.12.0,"This setting is no longer used and will be removed in Kibana 8.0."]
+ Instead, configure the `elasticsearch.query` logger.
+ +
+ This is useful for seeing the query DSL generated by applications that
+ currently do not have an inspector, for example Timelion and Monitoring.
+ *Default: `false`*
+
+The following example shows a valid `elasticsearch.query` logger configuration:
+|===
+
+[source,text]
+--
+logging:
+ appenders:
+ console_appender:
+ type: console
+ layout:
+ type: pattern
+ highlight: true
+ root:
+ appenders: [default, console_appender]
+ level: warn
+ loggers:
+ - name: elasticsearch.query
+ level: debug
+--
+
+[cols="2*<"]
+|===
|[[elasticsearch-pingTimeout]] `elasticsearch.pingTimeout:`
| Time in milliseconds to wait for {es} to respond to pings.
@@ -217,18 +244,22 @@ on the {kib} index at startup. {kib} users still need to authenticate with
| Enables use of interpreter in Visualize. *Default: `true`*
| `kibana.defaultAppId:`
- | *deprecated* This setting is deprecated and will get removed in Kibana 8.0.
-Please use the `defaultRoute` advanced setting instead.
-The default application to load. *Default: `"home"`*
+ | deprecated:[7.9.0,This setting will be removed in Kibana 8.0.]
+ Instead, use the <>.
+ +
+ The default application to load. *Default: `"home"`*
|[[kibana-index]] `kibana.index:`
- | *deprecated* This setting is deprecated and will be removed in 8.0. Multitenancy by changing
-`kibana.index` will not be supported starting in 8.0. See https://ela.st/kbn-remove-legacy-multitenancy[8.0 Breaking Changes]
-for more details. {kib} uses an index in {es} to store saved searches, visualizations, and
-dashboards. {kib} creates a new index if the index doesn’t already exist.
-If you configure a custom index, the name must be lowercase, and conform to the
-{es} {ref}/indices-create-index.html[index name limitations].
-*Default: `".kibana"`*
+ | deprecated:[7.11.0,This setting will be removed in 8.0.] Multitenancy by
+ changing `kibana.index` will not be supported starting in 8.0. See
+ https://ela.st/kbn-remove-legacy-multitenancy[8.0 Breaking Changes] for more
+ details.
+ +
+ {kib} uses an index in {es} to store saved searches, visualizations, and
+ dashboards. {kib} creates a new index if the index doesn’t already exist. If
+ you configure a custom index, the name must be lowercase, and conform to the
+ {es} {ref}/indices-create-index.html[index name limitations].
+ *Default: `".kibana"`*
| `kibana.autocompleteTimeout:` {ess-icon}
| Time in milliseconds to wait for autocomplete suggestions from {es}.
@@ -249,77 +280,44 @@ To reload the logging settings, send a SIGHUP signal to {kib}.
[cols="2*<"]
|===
-|[[logging-dest]] `logging.dest:`
- | Enables you to specify a file where {kib} stores log output.
-*Default: `stdout`*
+|[[logging-root]] `logging.root:`
+| The `root` logger has a dedicated configuration node since this context name is special and is pre-configured for logging by default.
+// TODO: add link to the advanced logging documentation.
-| `logging.json:`
- | Logs output as JSON. When set to `true`, the logs are formatted as JSON
-strings that include timestamp, log level, context, message text, and any other
-metadata that may be associated with the log message.
-When <> is set, and there is no interactive terminal ("TTY"),
-this setting defaults to `true`. *Default: `false`*
+|[[logging-root-appenders]] `logging.root.appenders:`
+| A list of logging appenders to forward the root level logger instance to. By default `root` is configured with the `default` appender that must be included in the list. This is the configuration that all custom loggers will use unless they're re-configured explicitly. Additional appenders, if configured, can be included in the list.
-| `logging.quiet:`
- | Set the value of this setting to `true` to suppress all logging output other
-than error messages. *Default: `false`*
+|[[logging-root-level]] `logging.root.level:` {ess-icon}
+| Level at which a log record should be logged. Supported levels are: _all_, _fatal_, _error_, _warn_, _info_, _debug_, _trace_, _off_. Levels are ordered from _all_ (highest) to _off_ and a log record will be logged it its level is higher than or equal to the level of its logger, otherwise the log record is ignored. Use this value to <>. Set to `all` to log all events, including system usage information and all requests. Set to `off` to silence all logs. *Default: `info`*.
-| `logging.rotate:`
- | experimental[] Specifies the options for the logging rotate feature.
-When not defined, all the sub options defaults would be applied.
-The following example shows a valid logging rotate configuration:
+|[[logging-loggers]] `logging.loggers:`
+ | Allows you to <>.
-|===
+| `logging.loggers.name:`
+| Specific logger instance.
-[source,text]
---
- logging.rotate:
- enabled: true
- everyBytes: 10485760
- keepFiles: 10
---
+| `logging.loggers.level:`
+| Level at which a log record should be shown. Supported levels are: _all_, _fatal_, _error_, _warn_, _info_, _debug_, _trace_, _off_.
-[cols="2*<"]
-|===
+| `logging.loggers.appenders:`
+| Specific appender format to apply for a particular logger context.
+
+| `logging.appenders:`
+| Define how and where log messages are displayed (eg. *stdout* or console) and stored (eg. file on the disk).
+// TODO: add link to the advanced logging documentation.
+
+| `logging.appenders.console:`
+| Appender to use for logging records to *stdout*. By default, uses the `[%date][%level][%logger] %message` **pattern** layout. To use a **json**, set the <>.
+
+| `logging.appenders.file:`
+| Allows you to specify a fileName to send log records to on disk. To send <>, add the file appender to `root.appenders`.
+
+| `logging.appenders.rolling-file:`
+| Similar to Log4j's `RollingFileAppender`, this appender will log into a file and rotate if following a rolling strategy when the configured policy triggers. There are currently two policies supported: `size-limit` and `time-interval`.
+
+The size limit policy will perform a rollover when the log file reaches a maximum `size`. *Default 100mb*
-| `logging.rotate.enabled:`
- | experimental[] Set the value of this setting to `true` to
-enable log rotation. If you do not have a <> set that is different from `stdout`
-that feature would not take any effect. *Default: `false`*
-
-| `logging.rotate.everyBytes:`
- | experimental[] The maximum size of a log file (that is `not an exact` limit). After the
-limit is reached, a new log file is generated. The default size limit is 10485760 (10 MB) and
-this option should be in the range of 1048576 (1 MB) to 1073741824 (1 GB). *Default: `10485760`*
-
-| `logging.rotate.keepFiles:`
- | experimental[] The number of most recent rotated log files to keep
-on disk. Older files are deleted during log rotation. The default value is 7. The `logging.rotate.keepFiles`
-option has to be in the range of 2 to 1024 files. *Default: `7`*
-
-| `logging.rotate.pollingInterval:`
- | experimental[] The number of milliseconds for the polling strategy in case
-the <> is enabled. `logging.rotate.usePolling` must be in the 5000 to 3600000 millisecond range. *Default: `10000`*
-
-|[[logging-rotate-usePolling]] `logging.rotate.usePolling:`
- | experimental[] By default we try to understand the best way to monitoring
-the log file and warning about it. Please be aware there are some systems where watch api is not accurate. In those cases, in order to get the feature working,
-the `polling` method could be used enabling that option. *Default: `false`*
-
-| `logging.silent:`
- | Set the value of this setting to `true` to
-suppress all logging output. *Default: `false`*
-
-| `logging.timezone`
- | Set to the canonical time zone ID
-(for example, `America/Los_Angeles`) to log events using that time zone.
-For possible values, refer to
-https://en.wikipedia.org/wiki/List_of_tz_database_time_zones[database time zones].
-When not set, log events use the host timezone
-
-| [[logging-verbose]] `logging.verbose:` {ess-icon}
- | Set to `true` to log all events, including system usage information and all
-requests. *Default: `false`*
+The time interval policy will rotate the log file every given interval of time. *Default 24h*
| [[regionmap-ES-map]] `map.includeElasticMapsService:` {ess-icon}
| Set to `false` to disable connections to Elastic Maps Service.
@@ -690,6 +688,7 @@ include::{kib-repo-dir}/settings/dev-settings.asciidoc[]
include::{kib-repo-dir}/settings/graph-settings.asciidoc[]
include::{kib-repo-dir}/settings/fleet-settings.asciidoc[]
include::{kib-repo-dir}/settings/i18n-settings.asciidoc[]
+include::{kib-repo-dir}/settings/logging-settings.asciidoc[]
include::{kib-repo-dir}/settings/logs-ui-settings.asciidoc[]
include::{kib-repo-dir}/settings/infrastructure-ui-settings.asciidoc[]
include::{kib-repo-dir}/settings/ml-settings.asciidoc[]
diff --git a/docs/user/dashboard/timelion.asciidoc b/docs/user/dashboard/timelion.asciidoc
index 676c46368a6ee..80ce77f30c75e 100644
--- a/docs/user/dashboard/timelion.asciidoc
+++ b/docs/user/dashboard/timelion.asciidoc
@@ -4,17 +4,7 @@
Instead of using a visual editor to create charts, you define a graph by chaining functions together, using the *Timelion*-specific syntax.
The syntax enables some features that classical point series charts don't offer, such as pulling data from different indices or data sources into one graph.
-[NOTE]
-====
-Timelion app deprecation
-
-*Timelion* is still supported, the *Timelion app* is deprecated in 7.0, replaced by
-dashboard features. In 8.0 and later, the *Timelion app* is removed from {kib}.
-To prepare for the removal of *Timelion app*, you must migrate *Timelion app* worksheets to a dashboard.
-
-For information on how to migrate *Timelion app* worksheets, refer to the
-link:https://www.elastic.co/guide/en/kibana/7.10/release-notes-7.10.0.html#deprecation-v7.10.0[7.10.0 Release Notes].
-====
+deprecated::[7.0.0,"*Timelion* is still supported. The *Timelion app* is deprecated in 7.0, replaced by dashboard features. In 8.0 and later, the *Timelion app* is removed from {kib}. To prepare for the removal of *Timelion app*, you must migrate *Timelion app* worksheets to a dashboard. For information on how to migrate *Timelion app* worksheets, refer to the link:https://www.elastic.co/guide/en/kibana/7.10/release-notes-7.10.0.html#deprecation-v7.10.0[7.10.0 Release Notes]."]
[float]
==== Timelion expressions
diff --git a/docs/user/reporting/reporting-troubleshooting.asciidoc b/docs/user/reporting/reporting-troubleshooting.asciidoc
index ebe095e0881b3..c43e9210dd7c8 100644
--- a/docs/user/reporting/reporting-troubleshooting.asciidoc
+++ b/docs/user/reporting/reporting-troubleshooting.asciidoc
@@ -126,10 +126,10 @@ all, the full logs from Reporting will be the first place to look. In `kibana.ym
[source,yaml]
--------------------------------------------------------------------------------
-logging.verbose: true
+logging.root.level: all
--------------------------------------------------------------------------------
-For more information about logging, see <>.
+For more information about logging, see <>.
[float]
[[reporting-troubleshooting-puppeteer-debug-logs]]
diff --git a/jest.config.js b/jest.config.js
index 03dc832ba170c..bd1e865a7e64a 100644
--- a/jest.config.js
+++ b/jest.config.js
@@ -12,7 +12,6 @@ module.exports = {
projects: [
'/packages/*/jest.config.js',
'/src/*/jest.config.js',
- '/src/legacy/*/jest.config.js',
'/src/plugins/*/jest.config.js',
'/test/*/jest.config.js',
'/x-pack/plugins/*/jest.config.js',
diff --git a/kibana.d.ts b/kibana.d.ts
index a2c670c96a699..8a7a531890057 100644
--- a/kibana.d.ts
+++ b/kibana.d.ts
@@ -13,18 +13,3 @@ import * as Public from 'src/core/public';
import * as Server from 'src/core/server';
export { Public, Server };
-
-/**
- * All exports from TS ambient definitions (where types are added for JS source in a .d.ts file).
- */
-import * as LegacyKibanaServer from './src/legacy/server/kbn_server';
-
-/**
- * Re-export legacy types under a namespace.
- */
-export namespace Legacy {
- export type KibanaConfig = LegacyKibanaServer.KibanaConfig;
- export type Request = LegacyKibanaServer.Request;
- export type ResponseToolkit = LegacyKibanaServer.ResponseToolkit;
- export type Server = LegacyKibanaServer.Server;
-}
diff --git a/package.json b/package.json
index 2654c433ac5fa..f3846bd0f71a2 100644
--- a/package.json
+++ b/package.json
@@ -76,6 +76,7 @@
"**/cross-fetch/node-fetch": "^2.6.1",
"**/deepmerge": "^4.2.2",
"**/fast-deep-equal": "^3.1.1",
+ "globby/fast-glob": "3.2.5",
"**/graphql-toolkit/lodash": "^4.17.21",
"**/hoist-non-react-statics": "^3.3.2",
"**/isomorphic-fetch/node-fetch": "^2.6.1",
@@ -97,8 +98,8 @@
"dependencies": {
"@elastic/apm-rum": "^5.6.1",
"@elastic/apm-rum-react": "^1.2.5",
- "@elastic/charts": "26.0.0",
- "@elastic/datemath": "link:packages/elastic-datemath",
+ "@elastic/charts": "27.0.0",
+ "@elastic/datemath": "link:bazel-bin/packages/elastic-datemath/npm_module",
"@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^8.0.0-canary.4",
"@elastic/ems-client": "7.12.0",
"@elastic/eui": "31.10.0",
@@ -240,7 +241,7 @@
"github-markdown-css": "^2.10.0",
"glob": "^7.1.2",
"glob-all": "^3.2.1",
- "globby": "^8.0.1",
+ "globby": "^11.0.3",
"graphql": "^0.13.2",
"graphql-fields": "^1.0.2",
"graphql-tag": "^2.10.3",
@@ -409,10 +410,10 @@
"utility-types": "^3.10.0",
"uuid": "3.3.2",
"vega": "^5.19.1",
- "vega-lite": "^4.17.0",
+ "vega-lite": "^5.0.0",
"vega-schema-url-parser": "^2.1.0",
"vega-spec-injector": "^0.0.2",
- "vega-tooltip": "^0.25.0",
+ "vega-tooltip": "^0.25.1",
"venn.js": "0.2.20",
"vinyl": "^2.2.0",
"vt-pbf": "^3.1.1",
@@ -441,6 +442,7 @@
"@babel/traverse": "^7.12.12",
"@babel/types": "^7.12.12",
"@bazel/ibazel": "^0.14.0",
+ "@bazel/typescript": "^3.2.3",
"@cypress/snapshot": "^2.1.7",
"@cypress/webpack-preprocessor": "^5.5.0",
"@elastic/apm-rum": "^5.6.1",
@@ -533,7 +535,6 @@
"@types/getos": "^3.0.0",
"@types/git-url-parse": "^9.0.0",
"@types/glob": "^7.1.2",
- "@types/globby": "^8.0.0",
"@types/graphql": "^0.13.2",
"@types/gulp": "^4.0.6",
"@types/gulp-zip": "^4.0.1",
diff --git a/packages/BUILD.bazel b/packages/BUILD.bazel
index 1f1eba0747ab7..31894fcb1bb5d 100644
--- a/packages/BUILD.bazel
+++ b/packages/BUILD.bazel
@@ -2,5 +2,7 @@
# targets so we can build them all at once
filegroup(
name = "build",
- srcs = [],
+ srcs = [
+ "//packages/elastic-datemath:build"
+ ],
)
diff --git a/packages/elastic-datemath/.npmignore b/packages/elastic-datemath/.npmignore
index 591be7afd1669..cb8c40d17ea04 100644
--- a/packages/elastic-datemath/.npmignore
+++ b/packages/elastic-datemath/.npmignore
@@ -1,2 +1,3 @@
+/index.test.js
+/jest.config.js
/tsconfig.json
-/__tests__
diff --git a/packages/elastic-datemath/BUILD.bazel b/packages/elastic-datemath/BUILD.bazel
new file mode 100644
index 0000000000000..6a80556d4eed5
--- /dev/null
+++ b/packages/elastic-datemath/BUILD.bazel
@@ -0,0 +1,76 @@
+load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
+
+PKG_BASE_NAME = "elastic-datemath"
+PKG_REQUIRE_NAME = "@elastic/datemath"
+
+SOURCE_FILES = [
+ "src/index.ts",
+]
+
+SRCS = SOURCE_FILES
+
+filegroup(
+ name = "srcs",
+ srcs = glob(SOURCE_FILES),
+)
+
+NPM_MODULE_EXTRA_FILES = [
+ "package.json",
+ "README.md",
+]
+
+SRC_DEPS = [
+ "@npm//moment",
+]
+
+TYPES_DEPS = [
+ "@npm//@types/node",
+]
+
+DEPS = SRC_DEPS + TYPES_DEPS
+
+ts_config(
+ name = "tsconfig",
+ src = "tsconfig.json",
+ deps = [
+ "//:tsconfig.base.json",
+ ],
+)
+
+ts_project(
+ name = "tsc",
+ srcs = SRCS,
+ deps = DEPS,
+ declaration = True,
+ declaration_map = True,
+ incremental = True,
+ out_dir = "target",
+ source_map = True,
+ root_dir = "src",
+ tsconfig = ":tsconfig",
+)
+
+js_library(
+ name = PKG_BASE_NAME,
+ srcs = [],
+ deps = [":tsc"] + DEPS,
+ package_name = PKG_REQUIRE_NAME,
+ visibility = ["//visibility:public"],
+)
+
+pkg_npm(
+ name = "npm_module",
+ srcs = NPM_MODULE_EXTRA_FILES,
+ deps = [
+ ":%s" % PKG_BASE_NAME,
+ ]
+)
+
+filegroup(
+ name = "build",
+ srcs = [
+ ":npm_module",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/packages/elastic-datemath/readme b/packages/elastic-datemath/README.md
similarity index 100%
rename from packages/elastic-datemath/readme
rename to packages/elastic-datemath/README.md
diff --git a/packages/elastic-datemath/index.d.ts b/packages/elastic-datemath/index.d.ts
deleted file mode 100644
index 319c598e3e4ab..0000000000000
--- a/packages/elastic-datemath/index.d.ts
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to Elasticsearch B.V. under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch B.V. licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import moment from 'moment';
-export type Unit = 'ms' | 's' | 'm' | 'h' | 'd' | 'w' | 'M' | 'y';
-
-declare const datemath: {
- unitsMap: {
- [k in Unit]: {
- weight: number;
- type: 'calendar' | 'fixed' | 'mixed';
- base: number;
- };
- };
- units: Unit[];
- unitsAsc: Unit[];
- unitsDesc: Unit[];
-
- /**
- * Parses a string into a moment object. The string can be something like "now - 15m".
- * @param options.forceNow If this optional parameter is supplied, "now" will be treated as this
- * date, rather than the real "now".
- */
- parse(
- input: string,
- options?: {
- roundUp?: boolean;
- forceNow?: Date;
- momentInstance?: typeof moment;
- }
- ): moment.Moment | undefined;
-};
-
-// eslint-disable-next-line import/no-default-export
-export default datemath;
diff --git a/packages/elastic-datemath/package.json b/packages/elastic-datemath/package.json
index 0d8f936ae6358..67fbb74eb223c 100644
--- a/packages/elastic-datemath/package.json
+++ b/packages/elastic-datemath/package.json
@@ -3,6 +3,9 @@
"version": "5.0.3",
"description": "elasticsearch datemath parser, used in kibana",
"license": "Apache-2.0",
- "main": "index.js",
- "typings": "index.d.ts"
+ "main": "./target/index.js",
+ "types": "./target/index.d.ts",
+ "peerDependencies": {
+ "moment": "^2.24.0"
+ }
}
\ No newline at end of file
diff --git a/packages/elastic-datemath/index.test.js b/packages/elastic-datemath/src/index.test.js
similarity index 100%
rename from packages/elastic-datemath/index.test.js
rename to packages/elastic-datemath/src/index.test.js
diff --git a/packages/elastic-datemath/index.js b/packages/elastic-datemath/src/index.ts
similarity index 71%
rename from packages/elastic-datemath/index.js
rename to packages/elastic-datemath/src/index.ts
index 8a69d251d057d..a513af800b7c3 100644
--- a/packages/elastic-datemath/index.js
+++ b/packages/elastic-datemath/src/index.ts
@@ -17,9 +17,18 @@
* under the License.
*/
-const moment = require('moment');
+import moment from 'moment';
+
+export type Unit = 'ms' | 's' | 'm' | 'h' | 'd' | 'w' | 'M' | 'y';
+export type UnitsMap = {
+ [k in Unit]: {
+ weight: number;
+ type: 'calendar' | 'fixed' | 'mixed';
+ base: number;
+ };
+};
-const unitsMap = {
+export const unitsMap: UnitsMap = {
ms: { weight: 1, type: 'fixed', base: 1 },
s: { weight: 2, type: 'fixed', base: 1000 },
m: { weight: 3, type: 'mixed', base: 1000 * 60 },
@@ -30,13 +39,14 @@ const unitsMap = {
// q: { weight: 8, type: 'calendar' }, // TODO: moment duration does not support quarter
y: { weight: 9, type: 'calendar', base: NaN },
};
-const units = Object.keys(unitsMap).sort((a, b) => unitsMap[b].weight - unitsMap[a].weight);
-const unitsDesc = [...units];
-const unitsAsc = [...units].reverse();
-
-const isDate = (d) => Object.prototype.toString.call(d) === '[object Date]';
+export const units: Unit[] = Object.keys(unitsMap).sort(
+ (a, b) => unitsMap[b as Unit].weight - unitsMap[a as Unit].weight
+) as Unit[];
+export const unitsDesc: Unit[] = [...units] as Unit[];
+export const unitsAsc: Unit[] = [...units].reverse() as Unit[];
-const isValidDate = (d) => isDate(d) && !isNaN(d.valueOf());
+const isDate = (d: string) => Object.prototype.toString.call(d) === '[object Date]';
+const isValidDate = (d: string) => isDate(d) && !isNaN(d.valueOf() as any);
/*
* This is a simplified version of elasticsearch's date parser.
@@ -44,11 +54,17 @@ const isValidDate = (d) => isDate(d) && !isNaN(d.valueOf());
* will be done using this (and its locale settings) instead of the one bundled
* with this library.
*/
-function parse(text, { roundUp = false, momentInstance = moment, forceNow } = {}) {
+export function parse(
+ input: string,
+ options: { roundUp?: boolean; momentInstance?: typeof moment; forceNow?: Date } = {}
+) {
+ const text = input;
+ const { roundUp = false, momentInstance = moment, forceNow } = options;
+
if (!text) return undefined;
if (momentInstance.isMoment(text)) return text;
if (isDate(text)) return momentInstance(text);
- if (forceNow !== undefined && !isValidDate(forceNow)) {
+ if (forceNow !== undefined && !isValidDate(forceNow as any)) {
throw new Error('forceNow must be a valid Date');
}
@@ -80,7 +96,7 @@ function parse(text, { roundUp = false, momentInstance = moment, forceNow } = {}
return parseDateMath(mathString, time, roundUp);
}
-function parseDateMath(mathString, time, roundUp) {
+function parseDateMath(mathString: string, time: moment.Moment, roundUp: boolean) {
const dateTime = time;
const len = mathString.length;
let i = 0;
@@ -89,7 +105,7 @@ function parseDateMath(mathString, time, roundUp) {
const c = mathString.charAt(i++);
let type;
let num;
- let unit;
+ let unit: Unit;
if (c === '/') {
type = 0;
@@ -101,13 +117,13 @@ function parseDateMath(mathString, time, roundUp) {
return;
}
- if (isNaN(mathString.charAt(i))) {
+ if (isNaN(mathString.charAt(i) as any)) {
num = 1;
} else if (mathString.length === 2) {
num = mathString.charAt(i);
} else {
const numFrom = i;
- while (!isNaN(mathString.charAt(i))) {
+ while (!isNaN(mathString.charAt(i) as any)) {
i++;
if (i >= len) return;
}
@@ -121,7 +137,7 @@ function parseDateMath(mathString, time, roundUp) {
}
}
- unit = mathString.charAt(i++);
+ unit = mathString.charAt(i++) as Unit;
// append additional characters in the unit
for (let j = i; j < len; j++) {
@@ -138,12 +154,12 @@ function parseDateMath(mathString, time, roundUp) {
return;
} else {
if (type === 0) {
- if (roundUp) dateTime.endOf(unit);
- else dateTime.startOf(unit);
+ if (roundUp) dateTime.endOf(unit as any);
+ else dateTime.startOf(unit as any);
} else if (type === 1) {
- dateTime.add(num, unit);
+ dateTime.add(num as any, unit);
} else if (type === 2) {
- dateTime.subtract(num, unit);
+ dateTime.subtract(num as any, unit);
}
}
}
@@ -151,8 +167,9 @@ function parseDateMath(mathString, time, roundUp) {
return dateTime;
}
-module.exports = {
- parse: parse,
+// eslint-disable-next-line import/no-default-export
+export default {
+ parse,
unitsMap: Object.freeze(unitsMap),
units: Object.freeze(units),
unitsAsc: Object.freeze(unitsAsc),
diff --git a/packages/elastic-datemath/tsconfig.json b/packages/elastic-datemath/tsconfig.json
index cbfe1e8047433..d0fa806ed411b 100644
--- a/packages/elastic-datemath/tsconfig.json
+++ b/packages/elastic-datemath/tsconfig.json
@@ -1,9 +1,17 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "tsBuildInfoFile": "../../build/tsbuildinfo/packages/elastic-datemath"
+ "declaration": true,
+ "declarationMap": true,
+ "outDir": "target",
+ "rootDir": "src",
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/elastic-datemath/src",
+ "types": [
+ "node"
+ ]
},
"include": [
- "index.d.ts"
+ "src/index.ts"
]
}
diff --git a/packages/kbn-ace/package.json b/packages/kbn-ace/package.json
index f7ca76b35e7c2..30f37b4786f36 100644
--- a/packages/kbn-ace/package.json
+++ b/packages/kbn-ace/package.json
@@ -3,6 +3,7 @@
"version": "1.0.0",
"private": true,
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"license": "SSPL-1.0 OR Elastic License 2.0",
"scripts": {
"build": "node ./scripts/build.js",
diff --git a/packages/kbn-ace/tsconfig.json b/packages/kbn-ace/tsconfig.json
index 6d3f433c6a6d1..9eef1ec56c6a2 100644
--- a/packages/kbn-ace/tsconfig.json
+++ b/packages/kbn-ace/tsconfig.json
@@ -1,13 +1,15 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
+ "incremental": false,
"outDir": "./target",
"declaration": true,
+ "declarationMap": true,
"sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-ace/src",
"types": [
- "jest",
"node"
- ]
+ ],
},
"include": [
"src/**/*"
diff --git a/packages/kbn-analytics/tsconfig.json b/packages/kbn-analytics/tsconfig.json
index 861e0204a31a2..c2e579e7fdbea 100644
--- a/packages/kbn-analytics/tsconfig.json
+++ b/packages/kbn-analytics/tsconfig.json
@@ -1,20 +1,19 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "declaration": true,
- "emitDeclarationOnly": true,
+ "incremental": false,
"outDir": "./target/types",
"stripInternal": true,
+ "emitDeclarationOnly": true,
+ "declaration": true,
"declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../../packages/kbn-analytics/src",
"types": [
- "jest",
"node"
]
},
"include": [
"src/**/*"
- ],
- "exclude": [
- "target"
]
}
diff --git a/packages/kbn-apm-config-loader/__fixtures__/config.yml b/packages/kbn-apm-config-loader/src/__fixtures__/config.yml
similarity index 100%
rename from packages/kbn-apm-config-loader/__fixtures__/config.yml
rename to packages/kbn-apm-config-loader/src/__fixtures__/config.yml
diff --git a/packages/kbn-apm-config-loader/__fixtures__/config_flat.yml b/packages/kbn-apm-config-loader/src/__fixtures__/config_flat.yml
similarity index 100%
rename from packages/kbn-apm-config-loader/__fixtures__/config_flat.yml
rename to packages/kbn-apm-config-loader/src/__fixtures__/config_flat.yml
diff --git a/packages/kbn-apm-config-loader/__fixtures__/en_var_ref_config.yml b/packages/kbn-apm-config-loader/src/__fixtures__/en_var_ref_config.yml
similarity index 100%
rename from packages/kbn-apm-config-loader/__fixtures__/en_var_ref_config.yml
rename to packages/kbn-apm-config-loader/src/__fixtures__/en_var_ref_config.yml
diff --git a/packages/kbn-apm-config-loader/__fixtures__/one.yml b/packages/kbn-apm-config-loader/src/__fixtures__/one.yml
similarity index 100%
rename from packages/kbn-apm-config-loader/__fixtures__/one.yml
rename to packages/kbn-apm-config-loader/src/__fixtures__/one.yml
diff --git a/packages/kbn-apm-config-loader/__fixtures__/two.yml b/packages/kbn-apm-config-loader/src/__fixtures__/two.yml
similarity index 100%
rename from packages/kbn-apm-config-loader/__fixtures__/two.yml
rename to packages/kbn-apm-config-loader/src/__fixtures__/two.yml
diff --git a/packages/kbn-apm-config-loader/src/utils/read_config.test.ts b/packages/kbn-apm-config-loader/src/utils/read_config.test.ts
index 16fbb5ce7aed8..2838738c0ab6c 100644
--- a/packages/kbn-apm-config-loader/src/utils/read_config.test.ts
+++ b/packages/kbn-apm-config-loader/src/utils/read_config.test.ts
@@ -9,7 +9,7 @@
import { relative, resolve } from 'path';
import { getConfigFromFiles } from './read_config';
-const fixtureFile = (name: string) => resolve(__dirname, '..', '..', '__fixtures__', name);
+const fixtureFile = (name: string) => resolve(__dirname, '..', '__fixtures__', name);
test('reads single yaml from file system and parses to json', () => {
const config = getConfigFromFiles([fixtureFile('config.yml')]);
diff --git a/packages/kbn-apm-config-loader/tsconfig.json b/packages/kbn-apm-config-loader/tsconfig.json
index ba00ddfa6adb6..250195785b931 100644
--- a/packages/kbn-apm-config-loader/tsconfig.json
+++ b/packages/kbn-apm-config-loader/tsconfig.json
@@ -1,12 +1,19 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "declaration": true,
+ "incremental": false,
"outDir": "./target",
"stripInternal": false,
+ "declaration": true,
"declarationMap": true,
- "types": ["jest", "node"]
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-apm-config-loader/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
- "include": ["./src/**/*.ts"],
- "exclude": ["target"]
+ "include": [
+ "src/**/*.ts"
+ ]
}
diff --git a/packages/kbn-apm-utils/tsconfig.json b/packages/kbn-apm-utils/tsconfig.json
index e1f79b5ef394d..e08769aab6543 100644
--- a/packages/kbn-apm-utils/tsconfig.json
+++ b/packages/kbn-apm-utils/tsconfig.json
@@ -1,18 +1,18 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "declaration": true,
+ "incremental": false,
"outDir": "./target",
"stripInternal": false,
+ "declaration": true,
"declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-apm-utils/src",
"types": [
"node"
]
},
"include": [
"./src/**/*.ts"
- ],
- "exclude": [
- "target"
]
}
diff --git a/packages/kbn-cli-dev-mode/src/base_path_proxy_server.ts b/packages/kbn-cli-dev-mode/src/base_path_proxy_server.ts
index 40841c8327cc2..6d12d5d05f07c 100644
--- a/packages/kbn-cli-dev-mode/src/base_path_proxy_server.ts
+++ b/packages/kbn-cli-dev-mode/src/base_path_proxy_server.ts
@@ -63,8 +63,6 @@ export class BasePathProxyServer {
}
public async start(options: BasePathProxyServerOptions) {
- this.log.write('starting basepath proxy server');
-
const serverOptions = getServerOptions(this.httpConfig);
const listenerOptions = getListenerOptions(this.httpConfig);
this.server = createServer(serverOptions, listenerOptions);
@@ -101,7 +99,6 @@ export class BasePathProxyServer {
return;
}
- this.log.write('stopping basepath proxy server');
await this.server.stop();
this.server = undefined;
diff --git a/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts b/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
index d5bafe7280bd9..7b45a2639c668 100644
--- a/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
+++ b/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
@@ -135,6 +135,7 @@ it('passes correct args to sub-classes', () => {
"repoRoot": ,
"runExamples": false,
"silent": false,
+ "verbose": false,
"watch": true,
},
],
diff --git a/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts b/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
index 94dbcb9654e8a..e867a7276989c 100644
--- a/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
+++ b/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
@@ -7,6 +7,8 @@
*/
import Path from 'path';
+import { EventEmitter } from 'events';
+
import * as Rx from 'rxjs';
import {
map,
@@ -17,6 +19,7 @@ import {
distinctUntilChanged,
switchMap,
concatMap,
+ takeUntil,
} from 'rxjs/operators';
import { CliArgs } from '@kbn/config';
import { REPO_ROOT, CiStatsReporter } from '@kbn/dev-utils';
@@ -30,6 +33,16 @@ import { shouldRedirectFromOldBasePath } from './should_redirect_from_old_base_p
import { getServerWatchPaths } from './get_server_watch_paths';
import { CliDevConfig } from './config';
+// signal that emits undefined once a termination signal has been sent
+const exitSignal$ = new Rx.ReplaySubject(1);
+Rx.merge(
+ Rx.fromEvent(process as EventEmitter, 'exit'),
+ Rx.fromEvent(process as EventEmitter, 'SIGINT'),
+ Rx.fromEvent(process as EventEmitter, 'SIGTERM')
+)
+ .pipe(mapTo(undefined), take(1))
+ .subscribe(exitSignal$);
+
// timeout where the server is allowed to exit gracefully
const GRACEFUL_TIMEOUT = 5000;
@@ -37,6 +50,7 @@ export type SomeCliArgs = Pick<
CliArgs,
| 'quiet'
| 'silent'
+ | 'verbose'
| 'disableOptimizer'
| 'watch'
| 'oss'
@@ -148,6 +162,7 @@ export class CliDevMode {
dist: cliArgs.dist,
quiet: !!cliArgs.quiet,
silent: !!cliArgs.silent,
+ verbose: !!cliArgs.verbose,
watch: cliArgs.watch,
});
}
@@ -216,9 +231,36 @@ export class CliDevMode {
this.log.warn('no-base-path', '='.repeat(100));
}
- this.subscription.add(this.optimizer.run$.subscribe(this.observer('@kbn/optimizer')));
- this.subscription.add(this.watcher.run$.subscribe(this.observer('watcher')));
- this.subscription.add(this.devServer.run$.subscribe(this.observer('dev server')));
+ this.subscription.add(
+ this.optimizer.run$
+ .pipe(
+ // stop the optimizer as soon as we get an exit signal
+ takeUntil(exitSignal$)
+ )
+ .subscribe(this.observer('@kbn/optimizer'))
+ );
+
+ this.subscription.add(
+ this.watcher.run$
+ .pipe(
+ // stop the watcher as soon as we get an exit signal
+ takeUntil(exitSignal$)
+ )
+ .subscribe(this.observer('watcher'))
+ );
+
+ this.subscription.add(
+ this.devServer.run$
+ .pipe(
+ tap({
+ complete: () => {
+ // when the devServer gracefully exits because of an exit signal stop the cli dev mode to trigger full shutdown
+ this.stop();
+ },
+ })
+ )
+ .subscribe(this.observer('dev server'))
+ );
}
private reportTimings(reporter: CiStatsReporter) {
diff --git a/packages/kbn-cli-dev-mode/src/config/load_config.ts b/packages/kbn-cli-dev-mode/src/config/load_config.ts
index 46129834ca2d9..073cd3dbd4b4c 100644
--- a/packages/kbn-cli-dev-mode/src/config/load_config.ts
+++ b/packages/kbn-cli-dev-mode/src/config/load_config.ts
@@ -28,13 +28,13 @@ export const loadConfig = async ({
const configService = new ConfigService(rawConfigService, env, logger);
configService.setSchema('dev', devConfigSchema);
configService.setSchema('plugins', pluginsConfigSchema);
- configService.setSchema('http', httpConfigSchema);
+ configService.setSchema('server', httpConfigSchema);
await configService.validate();
const devConfig = configService.atPathSync('dev');
const pluginsConfig = configService.atPathSync('plugins');
- const httpConfig = configService.atPathSync('http');
+ const httpConfig = configService.atPathSync('server');
return {
dev: new DevConfig(devConfig),
diff --git a/packages/kbn-cli-dev-mode/src/dev_server.ts b/packages/kbn-cli-dev-mode/src/dev_server.ts
index 3daf298c82324..60a279e456e3d 100644
--- a/packages/kbn-cli-dev-mode/src/dev_server.ts
+++ b/packages/kbn-cli-dev-mode/src/dev_server.ts
@@ -249,5 +249,11 @@ export class DevServer {
)
.subscribe(subscriber)
);
+
+ // complete state subjects when run$ completes
+ subscriber.add(() => {
+ this.phase$.complete();
+ this.ready$.complete();
+ });
});
}
diff --git a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
index ab113b96a5f03..ff25f2a7bf55e 100644
--- a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
+++ b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
@@ -27,8 +27,6 @@ it('produces the right watch and ignore list', () => {
expect(watchPaths).toMatchInlineSnapshot(`
Array [
/src/core,
- /src/legacy/server,
- /src/legacy/utils,
/config,
/x-pack/test/plugin_functional/plugins/resolver_test,
/src/plugins,
diff --git a/packages/kbn-cli-dev-mode/src/optimizer.test.ts b/packages/kbn-cli-dev-mode/src/optimizer.test.ts
index c270a00329897..ee8ea5f38ae84 100644
--- a/packages/kbn-cli-dev-mode/src/optimizer.test.ts
+++ b/packages/kbn-cli-dev-mode/src/optimizer.test.ts
@@ -46,6 +46,7 @@ const defaultOptions: Options = {
pluginScanDirs: ['/some-scan-path'],
quiet: true,
silent: true,
+ verbose: false,
repoRoot: '/app',
runExamples: true,
watch: true,
@@ -179,6 +180,7 @@ it('is ready when optimizer phase is success or issue and logs in familiar forma
"ready: false",
"",
"ready: true",
+ "complete",
]
`);
diff --git a/packages/kbn-cli-dev-mode/src/optimizer.ts b/packages/kbn-cli-dev-mode/src/optimizer.ts
index 5e2f16fcf7daa..fab566829f7a6 100644
--- a/packages/kbn-cli-dev-mode/src/optimizer.ts
+++ b/packages/kbn-cli-dev-mode/src/optimizer.ts
@@ -25,6 +25,7 @@ export interface Options {
repoRoot: string;
quiet: boolean;
silent: boolean;
+ verbose: boolean;
watch: boolean;
cache: boolean;
dist: boolean;
@@ -80,6 +81,7 @@ export class Optimizer {
const { flags: levelFlags } = parseLogLevel(
pickLevelFromFlags({
+ verbose: options.verbose,
quiet: options.quiet,
silent: options.silent,
})
@@ -105,14 +107,26 @@ export class Optimizer {
},
]);
- this.run$ = runOptimizer(config).pipe(
- logOptimizerState(log, config),
- tap(({ state }) => {
- this.phase$.next(state.phase);
- this.ready$.next(state.phase === 'success' || state.phase === 'issue');
- }),
- ignoreElements()
- );
+ this.run$ = new Rx.Observable((subscriber) => {
+ subscriber.add(
+ runOptimizer(config)
+ .pipe(
+ logOptimizerState(log, config),
+ tap(({ state }) => {
+ this.phase$.next(state.phase);
+ this.ready$.next(state.phase === 'success' || state.phase === 'issue');
+ }),
+ ignoreElements()
+ )
+ .subscribe(subscriber)
+ );
+
+ // complete state subjects when run$ completes
+ subscriber.add(() => {
+ this.phase$.complete();
+ this.ready$.complete();
+ });
+ });
}
getPhase$() {
diff --git a/packages/kbn-cli-dev-mode/src/watcher.ts b/packages/kbn-cli-dev-mode/src/watcher.ts
index 8e8d2db1b20bb..17993326cfcf3 100644
--- a/packages/kbn-cli-dev-mode/src/watcher.ts
+++ b/packages/kbn-cli-dev-mode/src/watcher.ts
@@ -103,6 +103,11 @@ export class Watcher {
.pipe(ignoreElements())
.subscribe(subscriber)
);
+
+ // complete state subjects when run$ completes
+ subscriber.add(() => {
+ this.restart$.complete();
+ });
});
serverShouldRestart$() {
diff --git a/packages/kbn-cli-dev-mode/tsconfig.json b/packages/kbn-cli-dev-mode/tsconfig.json
index b2bdaf8ceea36..4436d27dbff88 100644
--- a/packages/kbn-cli-dev-mode/tsconfig.json
+++ b/packages/kbn-cli-dev-mode/tsconfig.json
@@ -1,11 +1,18 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "declaration": true,
+ "incremental": false,
"outDir": "./target",
+ "declaration": true,
"declarationMap": true,
- "types": ["jest", "node"]
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-cli-dev-mode/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
- "include": ["./src/**/*.ts"],
- "exclude": ["target"]
+ "include": [
+ "./src/**/*.ts"
+ ],
}
diff --git a/packages/kbn-config-schema/tsconfig.json b/packages/kbn-config-schema/tsconfig.json
index 6a268f2e7c016..d33683acded16 100644
--- a/packages/kbn-config-schema/tsconfig.json
+++ b/packages/kbn-config-schema/tsconfig.json
@@ -1,21 +1,21 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "declaration": true,
- "declarationDir": "./target/types",
+ "incremental": false,
"outDir": "./target/out",
+ "declarationDir": "./target/types",
"stripInternal": true,
+ "declaration": true,
"declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../../packages/kbn-config-schema/src",
"types": [
"jest",
"node"
]
},
"include": [
- "./types/joi.d.ts",
- "./src/**/*.ts"
- ],
- "exclude": [
- "target"
+ "types/joi.d.ts",
+ "src/**/*.ts"
]
}
diff --git a/packages/kbn-config/__fixtures__/config.yml b/packages/kbn-config/src/__fixtures__/config.yml
similarity index 100%
rename from packages/kbn-config/__fixtures__/config.yml
rename to packages/kbn-config/src/__fixtures__/config.yml
diff --git a/packages/kbn-config/__fixtures__/config_flat.yml b/packages/kbn-config/src/__fixtures__/config_flat.yml
similarity index 100%
rename from packages/kbn-config/__fixtures__/config_flat.yml
rename to packages/kbn-config/src/__fixtures__/config_flat.yml
diff --git a/packages/kbn-config/__fixtures__/en_var_ref_config.yml b/packages/kbn-config/src/__fixtures__/en_var_ref_config.yml
similarity index 100%
rename from packages/kbn-config/__fixtures__/en_var_ref_config.yml
rename to packages/kbn-config/src/__fixtures__/en_var_ref_config.yml
diff --git a/packages/kbn-config/__fixtures__/one.yml b/packages/kbn-config/src/__fixtures__/one.yml
similarity index 100%
rename from packages/kbn-config/__fixtures__/one.yml
rename to packages/kbn-config/src/__fixtures__/one.yml
diff --git a/packages/kbn-config/__fixtures__/two.yml b/packages/kbn-config/src/__fixtures__/two.yml
similarity index 100%
rename from packages/kbn-config/__fixtures__/two.yml
rename to packages/kbn-config/src/__fixtures__/two.yml
diff --git a/packages/kbn-config/src/config_service.mock.ts b/packages/kbn-config/src/config_service.mock.ts
index 638627caf1e50..83fbf20b5c0b3 100644
--- a/packages/kbn-config/src/config_service.mock.ts
+++ b/packages/kbn-config/src/config_service.mock.ts
@@ -25,13 +25,16 @@ const createConfigServiceMock = ({
setSchema: jest.fn(),
addDeprecationProvider: jest.fn(),
validate: jest.fn(),
+ getHandledDeprecatedConfigs: jest.fn(),
};
+
mocked.atPath.mockReturnValue(new BehaviorSubject(atPath));
mocked.atPathSync.mockReturnValue(atPath);
mocked.getConfig$.mockReturnValue(new BehaviorSubject(new ObjectToConfigAdapter(getConfig$)));
mocked.getUsedPaths.mockResolvedValue([]);
mocked.getUnusedPaths.mockResolvedValue([]);
mocked.isEnabledAtPath.mockResolvedValue(true);
+ mocked.getHandledDeprecatedConfigs.mockReturnValue([]);
return mocked;
};
diff --git a/packages/kbn-config/src/config_service.test.mocks.ts b/packages/kbn-config/src/config_service.test.mocks.ts
index 99539726c3e43..d8da2852b9251 100644
--- a/packages/kbn-config/src/config_service.test.mocks.ts
+++ b/packages/kbn-config/src/config_service.test.mocks.ts
@@ -7,9 +7,15 @@
*/
export const mockPackage = new Proxy({ raw: {} as any }, { get: (obj, prop) => obj.raw[prop] });
+import type { applyDeprecations } from './deprecation/apply_deprecations';
+
jest.mock('../../../package.json', () => mockPackage);
-export const mockApplyDeprecations = jest.fn((config, deprecations, log) => config);
+export const mockApplyDeprecations = jest.fn<
+ Record,
+ Parameters
+>((config, deprecations, createAddDeprecation) => config);
+
jest.mock('./deprecation/apply_deprecations', () => ({
applyDeprecations: mockApplyDeprecations,
}));
diff --git a/packages/kbn-config/src/config_service.test.ts b/packages/kbn-config/src/config_service.test.ts
index e38fff866df89..64404341bc64d 100644
--- a/packages/kbn-config/src/config_service.test.ts
+++ b/packages/kbn-config/src/config_service.test.ts
@@ -72,10 +72,10 @@ test('throws if config at path does not match schema', async () => {
);
await expect(valuesReceived).toMatchInlineSnapshot(`
- Array [
- [Error: [config validation of [key]]: expected value of type [string] but got [number]],
- ]
- `);
+ Array [
+ [Error: [config validation of [key]]: expected value of type [string] but got [number]],
+ ]
+ `);
});
test('re-validate config when updated', async () => {
@@ -97,11 +97,11 @@ test('re-validate config when updated', async () => {
rawConfig$.next({ key: 123 });
- await expect(valuesReceived).toMatchInlineSnapshot(`
- Array [
- "value",
- [Error: [config validation of [key]]: expected value of type [string] but got [number]],
- ]
+ expect(valuesReceived).toMatchInlineSnapshot(`
+ Array [
+ "value",
+ [Error: [config validation of [key]]: expected value of type [string] but got [number]],
+ ]
`);
});
@@ -416,10 +416,10 @@ test('throws during validation is any schema is invalid', async () => {
test('logs deprecation warning during validation', async () => {
const rawConfig = getRawConfigProvider({});
const configService = new ConfigService(rawConfig, defaultEnv, logger);
-
- mockApplyDeprecations.mockImplementationOnce((config, deprecations, log) => {
- log('some deprecation message');
- log('another deprecation message');
+ mockApplyDeprecations.mockImplementationOnce((config, deprecations, createAddDeprecation) => {
+ const addDeprecation = createAddDeprecation!('');
+ addDeprecation({ message: 'some deprecation message' });
+ addDeprecation({ message: 'another deprecation message' });
return config;
});
@@ -437,6 +437,37 @@ test('logs deprecation warning during validation', async () => {
`);
});
+test('does not log warnings for silent deprecations during validation', async () => {
+ const rawConfig = getRawConfigProvider({});
+ const configService = new ConfigService(rawConfig, defaultEnv, logger);
+
+ mockApplyDeprecations
+ .mockImplementationOnce((config, deprecations, createAddDeprecation) => {
+ const addDeprecation = createAddDeprecation!('');
+ addDeprecation({ message: 'some deprecation message', silent: true });
+ addDeprecation({ message: 'another deprecation message' });
+ return config;
+ })
+ .mockImplementationOnce((config, deprecations, createAddDeprecation) => {
+ const addDeprecation = createAddDeprecation!('');
+ addDeprecation({ message: 'I am silent', silent: true });
+ return config;
+ });
+
+ loggerMock.clear(logger);
+ await configService.validate();
+ expect(loggerMock.collect(logger).warn).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "another deprecation message",
+ ],
+ ]
+ `);
+ loggerMock.clear(logger);
+ await configService.validate();
+ expect(loggerMock.collect(logger).warn).toMatchInlineSnapshot(`Array []`);
+});
+
describe('atPathSync', () => {
test('returns the value at path', async () => {
const rawConfig = getRawConfigProvider({ key: 'foo' });
@@ -477,3 +508,36 @@ describe('atPathSync', () => {
expect(configService.atPathSync('key')).toEqual('new-value');
});
});
+
+describe('getHandledDeprecatedConfigs', () => {
+ it('returns all handled deprecated configs', async () => {
+ const rawConfig = getRawConfigProvider({ base: { unused: 'unusedConfig' } });
+ const configService = new ConfigService(rawConfig, defaultEnv, logger);
+
+ configService.addDeprecationProvider('base', ({ unused }) => [unused('unused')]);
+
+ mockApplyDeprecations.mockImplementationOnce((config, deprecations, createAddDeprecation) => {
+ deprecations.forEach((deprecation) => {
+ const addDeprecation = createAddDeprecation!(deprecation.path);
+ addDeprecation({ message: `some deprecation message`, documentationUrl: 'some-url' });
+ });
+ return config;
+ });
+
+ await configService.validate();
+
+ expect(configService.getHandledDeprecatedConfigs()).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "base",
+ Array [
+ Object {
+ "documentationUrl": "some-url",
+ "message": "some deprecation message",
+ },
+ ],
+ ],
+ ]
+ `);
+ });
+});
diff --git a/packages/kbn-config/src/config_service.ts b/packages/kbn-config/src/config_service.ts
index d71327350d212..91927b4c7b5c9 100644
--- a/packages/kbn-config/src/config_service.ts
+++ b/packages/kbn-config/src/config_service.ts
@@ -21,6 +21,7 @@ import {
ConfigDeprecationWithContext,
ConfigDeprecationProvider,
configDeprecationFactory,
+ DeprecatedConfigDetails,
} from './deprecation';
import { LegacyObjectToConfigAdapter } from './legacy';
@@ -43,6 +44,7 @@ export class ConfigService {
private readonly handledPaths: Set = new Set();
private readonly schemas = new Map>();
private readonly deprecations = new BehaviorSubject([]);
+ private readonly handledDeprecatedConfigs = new Map();
constructor(
private readonly rawConfigProvider: RawConfigurationProvider,
@@ -91,6 +93,13 @@ export class ConfigService {
]);
}
+ /**
+ * returns all handled deprecated configs
+ */
+ public getHandledDeprecatedConfigs() {
+ return [...this.handledDeprecatedConfigs.entries()];
+ }
+
/**
* Validate the whole configuration and log the deprecation warnings.
*
@@ -186,8 +195,16 @@ export class ConfigService {
const rawConfig = await this.rawConfigProvider.getConfig$().pipe(take(1)).toPromise();
const deprecations = await this.deprecations.pipe(take(1)).toPromise();
const deprecationMessages: string[] = [];
- const logger = (msg: string) => deprecationMessages.push(msg);
- applyDeprecations(rawConfig, deprecations, logger);
+ const createAddDeprecation = (domainId: string) => (context: DeprecatedConfigDetails) => {
+ if (!context.silent) {
+ deprecationMessages.push(context.message);
+ }
+ const handledDeprecatedConfig = this.handledDeprecatedConfigs.get(domainId) || [];
+ handledDeprecatedConfig.push(context);
+ this.handledDeprecatedConfigs.set(domainId, handledDeprecatedConfig);
+ };
+
+ applyDeprecations(rawConfig, deprecations, createAddDeprecation);
deprecationMessages.forEach((msg) => {
this.deprecationLog.warn(msg);
});
diff --git a/packages/kbn-config/src/deprecation/apply_deprecations.test.ts b/packages/kbn-config/src/deprecation/apply_deprecations.test.ts
index 9e058faf68052..f2c0a43916343 100644
--- a/packages/kbn-config/src/deprecation/apply_deprecations.test.ts
+++ b/packages/kbn-config/src/deprecation/apply_deprecations.test.ts
@@ -32,8 +32,31 @@ describe('applyDeprecations', () => {
expect(handlerC).toHaveBeenCalledTimes(1);
});
+ it('passes path to addDeprecation factory', () => {
+ const addDeprecation = jest.fn();
+ const createAddDeprecation = jest.fn().mockReturnValue(addDeprecation);
+ const initialConfig = { foo: 'bar', deprecated: 'deprecated' };
+ const alteredConfig = { foo: 'bar' };
+
+ const handlerA = jest.fn().mockReturnValue(alteredConfig);
+ const handlerB = jest.fn().mockImplementation((conf) => conf);
+
+ applyDeprecations(
+ initialConfig,
+ [wrapHandler(handlerA, 'pathA'), wrapHandler(handlerB, 'pathB')],
+ createAddDeprecation
+ );
+
+ expect(handlerA).toHaveBeenCalledWith(initialConfig, 'pathA', addDeprecation);
+ expect(handlerB).toHaveBeenCalledWith(alteredConfig, 'pathB', addDeprecation);
+ expect(createAddDeprecation).toBeCalledTimes(2);
+ expect(createAddDeprecation).toHaveBeenNthCalledWith(1, 'pathA');
+ expect(createAddDeprecation).toHaveBeenNthCalledWith(2, 'pathB');
+ });
+
it('calls handlers with correct arguments', () => {
- const logger = () => undefined;
+ const addDeprecation = jest.fn();
+ const createAddDeprecation = jest.fn().mockReturnValue(addDeprecation);
const initialConfig = { foo: 'bar', deprecated: 'deprecated' };
const alteredConfig = { foo: 'bar' };
@@ -43,11 +66,11 @@ describe('applyDeprecations', () => {
applyDeprecations(
initialConfig,
[wrapHandler(handlerA, 'pathA'), wrapHandler(handlerB, 'pathB')],
- logger
+ createAddDeprecation
);
- expect(handlerA).toHaveBeenCalledWith(initialConfig, 'pathA', logger);
- expect(handlerB).toHaveBeenCalledWith(alteredConfig, 'pathB', logger);
+ expect(handlerA).toHaveBeenCalledWith(initialConfig, 'pathA', addDeprecation);
+ expect(handlerB).toHaveBeenCalledWith(alteredConfig, 'pathB', addDeprecation);
});
it('returns the migrated config', () => {
diff --git a/packages/kbn-config/src/deprecation/apply_deprecations.ts b/packages/kbn-config/src/deprecation/apply_deprecations.ts
index 0813440adb57c..6aced541dc30d 100644
--- a/packages/kbn-config/src/deprecation/apply_deprecations.ts
+++ b/packages/kbn-config/src/deprecation/apply_deprecations.ts
@@ -7,23 +7,24 @@
*/
import { cloneDeep } from 'lodash';
-import { ConfigDeprecationWithContext, ConfigDeprecationLogger } from './types';
-
-const noopLogger = (msg: string) => undefined;
+import { ConfigDeprecationWithContext, AddConfigDeprecation } from './types';
+const noopAddDeprecationFactory: () => AddConfigDeprecation = () => () => undefined;
/**
- * Applies deprecations on given configuration and logs any deprecation warning using provided logger.
+ * Applies deprecations on given configuration and passes addDeprecation hook.
+ * This hook is used for logging any deprecation warning using provided logger.
+ * This hook is used for exposing deprecated configs that must be handled by the user before upgrading to next major.
*
* @internal
*/
export const applyDeprecations = (
config: Record,
deprecations: ConfigDeprecationWithContext[],
- logger: ConfigDeprecationLogger = noopLogger
+ createAddDeprecation: (pluginId: string) => AddConfigDeprecation = noopAddDeprecationFactory
) => {
let processed = cloneDeep(config);
deprecations.forEach(({ deprecation, path }) => {
- processed = deprecation(processed, path, logger);
+ processed = deprecation(processed, path, createAddDeprecation(path));
});
return processed;
};
diff --git a/packages/kbn-config/src/deprecation/deprecation_factory.test.ts b/packages/kbn-config/src/deprecation/deprecation_factory.test.ts
index ba8a0cbf7ca57..11a49ed79d170 100644
--- a/packages/kbn-config/src/deprecation/deprecation_factory.test.ts
+++ b/packages/kbn-config/src/deprecation/deprecation_factory.test.ts
@@ -6,17 +6,16 @@
* Side Public License, v 1.
*/
-import { ConfigDeprecationLogger } from './types';
+import { DeprecatedConfigDetails } from './types';
import { configDeprecationFactory } from './deprecation_factory';
describe('DeprecationFactory', () => {
const { rename, unused, renameFromRoot, unusedFromRoot } = configDeprecationFactory;
- let deprecationMessages: string[];
- const logger: ConfigDeprecationLogger = (msg) => deprecationMessages.push(msg);
+ const addDeprecation = jest.fn();
beforeEach(() => {
- deprecationMessages = [];
+ addDeprecation.mockClear();
});
describe('rename', () => {
@@ -30,7 +29,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const processed = rename('deprecated', 'renamed')(rawConfig, 'myplugin', logger);
+ const processed = rename('deprecated', 'renamed')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
renamed: 'toberenamed',
@@ -40,9 +39,18 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages).toMatchInlineSnapshot(`
+ expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
- "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\"",
+ Array [
+ Object {
+ "correctiveActions": Object {
+ "manualSteps": Array [
+ "Replace \\"myplugin.deprecated\\" with \\"myplugin.renamed\\" in the Kibana config file, CLI flag, or environment variable (in Docker only).",
+ ],
+ },
+ "message": "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\"",
+ },
+ ],
]
`);
});
@@ -56,7 +64,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const processed = rename('deprecated', 'new')(rawConfig, 'myplugin', logger);
+ const processed = rename('deprecated', 'new')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
new: 'new',
@@ -66,7 +74,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages.length).toEqual(0);
+ expect(addDeprecation).toHaveBeenCalledTimes(0);
});
it('handles nested keys', () => {
const rawConfig = {
@@ -83,7 +91,7 @@ describe('DeprecationFactory', () => {
const processed = rename('oldsection.deprecated', 'newsection.renamed')(
rawConfig,
'myplugin',
- logger
+ addDeprecation
);
expect(processed).toEqual({
myplugin: {
@@ -97,9 +105,18 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages).toMatchInlineSnapshot(`
+ expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
- "\\"myplugin.oldsection.deprecated\\" is deprecated and has been replaced by \\"myplugin.newsection.renamed\\"",
+ Array [
+ Object {
+ "correctiveActions": Object {
+ "manualSteps": Array [
+ "Replace \\"myplugin.oldsection.deprecated\\" with \\"myplugin.newsection.renamed\\" in the Kibana config file, CLI flag, or environment variable (in Docker only).",
+ ],
+ },
+ "message": "\\"myplugin.oldsection.deprecated\\" is deprecated and has been replaced by \\"myplugin.newsection.renamed\\"",
+ },
+ ],
]
`);
});
@@ -110,15 +127,25 @@ describe('DeprecationFactory', () => {
renamed: 'renamed',
},
};
- const processed = rename('deprecated', 'renamed')(rawConfig, 'myplugin', logger);
+ const processed = rename('deprecated', 'renamed')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
renamed: 'renamed',
},
});
- expect(deprecationMessages).toMatchInlineSnapshot(`
+ expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
- "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\". However both key are present, ignoring \\"myplugin.deprecated\\"",
+ Array [
+ Object {
+ "correctiveActions": Object {
+ "manualSteps": Array [
+ "Make sure \\"myplugin.renamed\\" contains the correct value in the config file, CLI flag, or environment variable (in Docker only).",
+ "Remove \\"myplugin.deprecated\\" from the config.",
+ ],
+ },
+ "message": "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\". However both key are present, ignoring \\"myplugin.deprecated\\"",
+ },
+ ],
]
`);
});
@@ -138,7 +165,7 @@ describe('DeprecationFactory', () => {
const processed = renameFromRoot('myplugin.deprecated', 'myplugin.renamed')(
rawConfig,
'does-not-matter',
- logger
+ addDeprecation
);
expect(processed).toEqual({
myplugin: {
@@ -149,9 +176,18 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages).toMatchInlineSnapshot(`
+ expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
- "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\"",
+ Array [
+ Object {
+ "correctiveActions": Object {
+ "manualSteps": Array [
+ "Replace \\"myplugin.deprecated\\" with \\"myplugin.renamed\\" in the Kibana config file, CLI flag, or environment variable (in Docker only).",
+ ],
+ },
+ "message": "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\"",
+ },
+ ],
]
`);
});
@@ -169,7 +205,7 @@ describe('DeprecationFactory', () => {
const processed = renameFromRoot('oldplugin.deprecated', 'newplugin.renamed')(
rawConfig,
'does-not-matter',
- logger
+ addDeprecation
);
expect(processed).toEqual({
oldplugin: {
@@ -180,9 +216,18 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages).toMatchInlineSnapshot(`
+ expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
- "\\"oldplugin.deprecated\\" is deprecated and has been replaced by \\"newplugin.renamed\\"",
+ Array [
+ Object {
+ "correctiveActions": Object {
+ "manualSteps": Array [
+ "Replace \\"oldplugin.deprecated\\" with \\"newplugin.renamed\\" in the Kibana config file, CLI flag, or environment variable (in Docker only).",
+ ],
+ },
+ "message": "\\"oldplugin.deprecated\\" is deprecated and has been replaced by \\"newplugin.renamed\\"",
+ },
+ ],
]
`);
});
@@ -200,7 +245,7 @@ describe('DeprecationFactory', () => {
const processed = renameFromRoot('myplugin.deprecated', 'myplugin.new')(
rawConfig,
'does-not-matter',
- logger
+ addDeprecation
);
expect(processed).toEqual({
myplugin: {
@@ -211,7 +256,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages.length).toEqual(0);
+ expect(addDeprecation).toBeCalledTimes(0);
});
it('remove the old property but does not overrides the new one if they both exist, and logs a specific message', () => {
@@ -224,16 +269,27 @@ describe('DeprecationFactory', () => {
const processed = renameFromRoot('myplugin.deprecated', 'myplugin.renamed')(
rawConfig,
'does-not-matter',
- logger
+ addDeprecation
);
expect(processed).toEqual({
myplugin: {
renamed: 'renamed',
},
});
- expect(deprecationMessages).toMatchInlineSnapshot(`
+
+ expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
- "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\". However both key are present, ignoring \\"myplugin.deprecated\\"",
+ Array [
+ Object {
+ "correctiveActions": Object {
+ "manualSteps": Array [
+ "Make sure \\"myplugin.renamed\\" contains the correct value in the config file, CLI flag, or environment variable (in Docker only).",
+ "Remove \\"myplugin.deprecated\\" from the config.",
+ ],
+ },
+ "message": "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\". However both key are present, ignoring \\"myplugin.deprecated\\"",
+ },
+ ],
]
`);
});
@@ -250,7 +306,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const processed = unused('deprecated')(rawConfig, 'myplugin', logger);
+ const processed = unused('deprecated')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
valid: 'valid',
@@ -259,9 +315,18 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages).toMatchInlineSnapshot(`
+ expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
- "myplugin.deprecated is deprecated and is no longer used",
+ Array [
+ Object {
+ "correctiveActions": Object {
+ "manualSteps": Array [
+ "Remove \\"myplugin.deprecated\\" from the Kibana config file, CLI flag, or environment variable (in Docker only)",
+ ],
+ },
+ "message": "myplugin.deprecated is deprecated and is no longer used",
+ },
+ ],
]
`);
});
@@ -278,7 +343,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const processed = unused('section.deprecated')(rawConfig, 'myplugin', logger);
+ const processed = unused('section.deprecated')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
valid: 'valid',
@@ -288,9 +353,19 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages).toMatchInlineSnapshot(`
+
+ expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
- "myplugin.section.deprecated is deprecated and is no longer used",
+ Array [
+ Object {
+ "correctiveActions": Object {
+ "manualSteps": Array [
+ "Remove \\"myplugin.section.deprecated\\" from the Kibana config file, CLI flag, or environment variable (in Docker only)",
+ ],
+ },
+ "message": "myplugin.section.deprecated is deprecated and is no longer used",
+ },
+ ],
]
`);
});
@@ -304,7 +379,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const processed = unused('deprecated')(rawConfig, 'myplugin', logger);
+ const processed = unused('deprecated')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
valid: 'valid',
@@ -313,7 +388,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages.length).toEqual(0);
+ expect(addDeprecation).toBeCalledTimes(0);
});
});
@@ -328,7 +403,11 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const processed = unusedFromRoot('myplugin.deprecated')(rawConfig, 'does-not-matter', logger);
+ const processed = unusedFromRoot('myplugin.deprecated')(
+ rawConfig,
+ 'does-not-matter',
+ addDeprecation
+ );
expect(processed).toEqual({
myplugin: {
valid: 'valid',
@@ -337,9 +416,19 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages).toMatchInlineSnapshot(`
+
+ expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
- "myplugin.deprecated is deprecated and is no longer used",
+ Array [
+ Object {
+ "correctiveActions": Object {
+ "manualSteps": Array [
+ "Remove \\"myplugin.deprecated\\" from the Kibana config file, CLI flag, or environment variable (in Docker only)",
+ ],
+ },
+ "message": "myplugin.deprecated is deprecated and is no longer used",
+ },
+ ],
]
`);
});
@@ -353,7 +442,11 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
- const processed = unusedFromRoot('myplugin.deprecated')(rawConfig, 'does-not-matter', logger);
+ const processed = unusedFromRoot('myplugin.deprecated')(
+ rawConfig,
+ 'does-not-matter',
+ addDeprecation
+ );
expect(processed).toEqual({
myplugin: {
valid: 'valid',
@@ -362,7 +455,7 @@ describe('DeprecationFactory', () => {
property: 'value',
},
});
- expect(deprecationMessages.length).toEqual(0);
+ expect(addDeprecation).toBeCalledTimes(0);
});
});
});
diff --git a/packages/kbn-config/src/deprecation/deprecation_factory.ts b/packages/kbn-config/src/deprecation/deprecation_factory.ts
index 73196dc897a51..140846d86ae0b 100644
--- a/packages/kbn-config/src/deprecation/deprecation_factory.ts
+++ b/packages/kbn-config/src/deprecation/deprecation_factory.ts
@@ -9,15 +9,20 @@
import { get } from 'lodash';
import { set } from '@elastic/safer-lodash-set';
import { unset } from '@kbn/std';
-import { ConfigDeprecation, ConfigDeprecationLogger, ConfigDeprecationFactory } from './types';
+import {
+ ConfigDeprecation,
+ AddConfigDeprecation,
+ ConfigDeprecationFactory,
+ DeprecatedConfigDetails,
+} from './types';
const _rename = (
config: Record,
rootPath: string,
- log: ConfigDeprecationLogger,
+ addDeprecation: AddConfigDeprecation,
oldKey: string,
newKey: string,
- silent?: boolean
+ details?: Partial
) => {
const fullOldPath = getPath(rootPath, oldKey);
const oldValue = get(config, fullOldPath);
@@ -32,48 +37,80 @@ const _rename = (
if (newValue === undefined) {
set(config, fullNewPath, oldValue);
- if (!silent) {
- log(`"${fullOldPath}" is deprecated and has been replaced by "${fullNewPath}"`);
- }
+ addDeprecation({
+ message: `"${fullOldPath}" is deprecated and has been replaced by "${fullNewPath}"`,
+ correctiveActions: {
+ manualSteps: [
+ `Replace "${fullOldPath}" with "${fullNewPath}" in the Kibana config file, CLI flag, or environment variable (in Docker only).`,
+ ],
+ },
+ ...details,
+ });
} else {
- if (!silent) {
- log(
- `"${fullOldPath}" is deprecated and has been replaced by "${fullNewPath}". However both key are present, ignoring "${fullOldPath}"`
- );
- }
+ addDeprecation({
+ message: `"${fullOldPath}" is deprecated and has been replaced by "${fullNewPath}". However both key are present, ignoring "${fullOldPath}"`,
+ correctiveActions: {
+ manualSteps: [
+ `Make sure "${fullNewPath}" contains the correct value in the config file, CLI flag, or environment variable (in Docker only).`,
+ `Remove "${fullOldPath}" from the config.`,
+ ],
+ },
+ ...details,
+ });
}
+
return config;
};
const _unused = (
config: Record,
rootPath: string,
- log: ConfigDeprecationLogger,
- unusedKey: string
+ addDeprecation: AddConfigDeprecation,
+ unusedKey: string,
+ details?: Partial
) => {
const fullPath = getPath(rootPath, unusedKey);
if (get(config, fullPath) === undefined) {
return config;
}
unset(config, fullPath);
- log(`${fullPath} is deprecated and is no longer used`);
+ addDeprecation({
+ message: `${fullPath} is deprecated and is no longer used`,
+ correctiveActions: {
+ manualSteps: [
+ `Remove "${fullPath}" from the Kibana config file, CLI flag, or environment variable (in Docker only)`,
+ ],
+ },
+ ...details,
+ });
return config;
};
-const rename = (oldKey: string, newKey: string): ConfigDeprecation => (config, rootPath, log) =>
- _rename(config, rootPath, log, oldKey, newKey);
+const rename = (
+ oldKey: string,
+ newKey: string,
+ details?: Partial
+): ConfigDeprecation => (config, rootPath, addDeprecation) =>
+ _rename(config, rootPath, addDeprecation, oldKey, newKey, details);
-const renameFromRoot = (oldKey: string, newKey: string, silent?: boolean): ConfigDeprecation => (
- config,
- rootPath,
- log
-) => _rename(config, '', log, oldKey, newKey, silent);
+const renameFromRoot = (
+ oldKey: string,
+ newKey: string,
+ details?: Partial
+): ConfigDeprecation => (config, rootPath, addDeprecation) =>
+ _rename(config, '', addDeprecation, oldKey, newKey, details);
-const unused = (unusedKey: string): ConfigDeprecation => (config, rootPath, log) =>
- _unused(config, rootPath, log, unusedKey);
+const unused = (
+ unusedKey: string,
+ details?: Partial
+): ConfigDeprecation => (config, rootPath, addDeprecation) =>
+ _unused(config, rootPath, addDeprecation, unusedKey, details);
-const unusedFromRoot = (unusedKey: string): ConfigDeprecation => (config, rootPath, log) =>
- _unused(config, '', log, unusedKey);
+const unusedFromRoot = (
+ unusedKey: string,
+ details?: Partial
+): ConfigDeprecation => (config, rootPath, addDeprecation) =>
+ _unused(config, '', addDeprecation, unusedKey, details);
const getPath = (rootPath: string, subPath: string) =>
rootPath !== '' ? `${rootPath}.${subPath}` : subPath;
diff --git a/packages/kbn-config/src/deprecation/index.ts b/packages/kbn-config/src/deprecation/index.ts
index 6fe1a53efecbc..3286acca9e584 100644
--- a/packages/kbn-config/src/deprecation/index.ts
+++ b/packages/kbn-config/src/deprecation/index.ts
@@ -6,12 +6,13 @@
* Side Public License, v 1.
*/
-export {
+export type {
ConfigDeprecation,
ConfigDeprecationWithContext,
- ConfigDeprecationLogger,
ConfigDeprecationFactory,
+ AddConfigDeprecation,
ConfigDeprecationProvider,
+ DeprecatedConfigDetails,
} from './types';
export { configDeprecationFactory } from './deprecation_factory';
export { applyDeprecations } from './apply_deprecations';
diff --git a/packages/kbn-config/src/deprecation/types.ts b/packages/kbn-config/src/deprecation/types.ts
index 6e1816867abcf..3b1d004d7ec76 100644
--- a/packages/kbn-config/src/deprecation/types.ts
+++ b/packages/kbn-config/src/deprecation/types.ts
@@ -7,11 +7,33 @@
*/
/**
- * Logger interface used when invoking a {@link ConfigDeprecation}
+ * Config deprecation hook used when invoking a {@link ConfigDeprecation}
*
* @public
*/
-export type ConfigDeprecationLogger = (message: string) => void;
+export type AddConfigDeprecation = (details: DeprecatedConfigDetails) => void;
+
+/**
+ * Deprecated Config Details
+ *
+ * @public
+ */
+export interface DeprecatedConfigDetails {
+ /* The message to be displayed for the deprecation. */
+ message: string;
+ /* (optional) set false to prevent the config service from logging the deprecation message. */
+ silent?: boolean;
+ /* (optional) link to the documentation for more details on the deprecation. */
+ documentationUrl?: string;
+ /* (optional) corrective action needed to fix this deprecation. */
+ correctiveActions?: {
+ /**
+ * Specify a list of manual steps our users need to follow
+ * to fix the deprecation before upgrade.
+ */
+ manualSteps: string[];
+ };
+}
/**
* Configuration deprecation returned from {@link ConfigDeprecationProvider} that handles a single deprecation from the configuration.
@@ -25,7 +47,7 @@ export type ConfigDeprecationLogger = (message: string) => void;
export type ConfigDeprecation = (
config: Record,
fromPath: string,
- logger: ConfigDeprecationLogger
+ addDeprecation: AddConfigDeprecation
) => Record;
/**
@@ -62,6 +84,7 @@ export type ConfigDeprecationProvider = (factory: ConfigDeprecationFactory) => C
*
* @public
*/
+
export interface ConfigDeprecationFactory {
/**
* Rename a configuration property from inside a plugin's configuration path.
@@ -75,7 +98,11 @@ export interface ConfigDeprecationFactory {
* ]
* ```
*/
- rename(oldKey: string, newKey: string): ConfigDeprecation;
+ rename(
+ oldKey: string,
+ newKey: string,
+ details?: Partial
+ ): ConfigDeprecation;
/**
* Rename a configuration property from the root configuration.
* Will log a deprecation warning if the oldKey was found and deprecation applied.
@@ -91,7 +118,11 @@ export interface ConfigDeprecationFactory {
* ]
* ```
*/
- renameFromRoot(oldKey: string, newKey: string, silent?: boolean): ConfigDeprecation;
+ renameFromRoot(
+ oldKey: string,
+ newKey: string,
+ details?: Partial
+ ): ConfigDeprecation;
/**
* Remove a configuration property from inside a plugin's configuration path.
* Will log a deprecation warning if the unused key was found and deprecation applied.
@@ -104,7 +135,7 @@ export interface ConfigDeprecationFactory {
* ]
* ```
*/
- unused(unusedKey: string): ConfigDeprecation;
+ unused(unusedKey: string, details?: Partial): ConfigDeprecation;
/**
* Remove a configuration property from the root configuration.
* Will log a deprecation warning if the unused key was found and deprecation applied.
@@ -120,7 +151,7 @@ export interface ConfigDeprecationFactory {
* ]
* ```
*/
- unusedFromRoot(unusedKey: string): ConfigDeprecation;
+ unusedFromRoot(unusedKey: string, details?: Partial): ConfigDeprecation;
}
/** @internal */
diff --git a/packages/kbn-config/src/env.ts b/packages/kbn-config/src/env.ts
index c4845ab429c57..053bb93ce158c 100644
--- a/packages/kbn-config/src/env.ts
+++ b/packages/kbn-config/src/env.ts
@@ -24,6 +24,7 @@ export interface CliArgs {
/** @deprecated */
quiet?: boolean;
silent?: boolean;
+ verbose?: boolean;
watch: boolean;
basePath: boolean;
oss: boolean;
diff --git a/packages/kbn-config/src/index.ts b/packages/kbn-config/src/index.ts
index 8b0bdb0befbfd..a9ea8265a3768 100644
--- a/packages/kbn-config/src/index.ts
+++ b/packages/kbn-config/src/index.ts
@@ -6,16 +6,16 @@
* Side Public License, v 1.
*/
-export {
- applyDeprecations,
- ConfigDeprecation,
+export type {
ConfigDeprecationFactory,
- configDeprecationFactory,
- ConfigDeprecationLogger,
+ AddConfigDeprecation,
ConfigDeprecationProvider,
ConfigDeprecationWithContext,
+ ConfigDeprecation,
} from './deprecation';
+export { applyDeprecations, configDeprecationFactory } from './deprecation';
+
export {
RawConfigurationProvider,
RawConfigService,
diff --git a/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap b/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap
index 2801e0a0688cc..17ac75e9f3d9e 100644
--- a/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap
+++ b/packages/kbn-config/src/legacy/__snapshots__/legacy_object_to_config_adapter.test.ts.snap
@@ -1,69 +1,5 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`#get correctly handles server config.: default 1`] = `
-Object {
- "autoListen": true,
- "basePath": "/abc",
- "compression": Object {
- "enabled": true,
- },
- "cors": false,
- "customResponseHeaders": Object {
- "custom-header": "custom-value",
- },
- "host": "host",
- "keepaliveTimeout": 5000,
- "maxPayload": 1000,
- "name": "kibana-hostname",
- "port": 1234,
- "publicBaseUrl": "https://myhost.com/abc",
- "rewriteBasePath": false,
- "socketTimeout": 2000,
- "ssl": Object {
- "enabled": true,
- "keyPassphrase": "some-phrase",
- "someNewValue": "new",
- },
- "uuid": undefined,
- "xsrf": Object {
- "allowlist": Array [],
- "disableProtection": false,
- },
-}
-`;
-
-exports[`#get correctly handles server config.: disabled ssl 1`] = `
-Object {
- "autoListen": true,
- "basePath": "/abc",
- "compression": Object {
- "enabled": true,
- },
- "cors": false,
- "customResponseHeaders": Object {
- "custom-header": "custom-value",
- },
- "host": "host",
- "keepaliveTimeout": 5000,
- "maxPayload": 1000,
- "name": "kibana-hostname",
- "port": 1234,
- "publicBaseUrl": "http://myhost.com/abc",
- "rewriteBasePath": false,
- "socketTimeout": 2000,
- "ssl": Object {
- "certificate": "cert",
- "enabled": false,
- "key": "key",
- },
- "uuid": undefined,
- "xsrf": Object {
- "allowlist": Array [],
- "disableProtection": false,
- },
-}
-`;
-
exports[`#get correctly handles silent logging config. 1`] = `
Object {
"appenders": Object {
@@ -78,6 +14,7 @@ Object {
"root": Object {
"level": "off",
},
+ "silent": true,
}
`;
@@ -93,10 +30,13 @@ Object {
"type": "legacy-appender",
},
},
+ "dest": "/some/path.log",
+ "json": true,
"loggers": undefined,
"root": Object {
"level": "all",
},
+ "verbose": true,
}
`;
diff --git a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts b/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts
index 5dd1941545708..47151503e1634 100644
--- a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts
+++ b/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.test.ts
@@ -65,59 +65,6 @@ describe('#get', () => {
expect(configAdapter.get('logging')).toMatchSnapshot();
});
-
- test('correctly handles server config.', () => {
- const configAdapter = new LegacyObjectToConfigAdapter({
- server: {
- name: 'kibana-hostname',
- autoListen: true,
- basePath: '/abc',
- cors: false,
- customResponseHeaders: { 'custom-header': 'custom-value' },
- host: 'host',
- maxPayloadBytes: 1000,
- keepaliveTimeout: 5000,
- socketTimeout: 2000,
- port: 1234,
- publicBaseUrl: 'https://myhost.com/abc',
- rewriteBasePath: false,
- ssl: { enabled: true, keyPassphrase: 'some-phrase', someNewValue: 'new' },
- compression: { enabled: true },
- someNotSupportedValue: 'val',
- xsrf: {
- disableProtection: false,
- allowlist: [],
- },
- },
- });
-
- const configAdapterWithDisabledSSL = new LegacyObjectToConfigAdapter({
- server: {
- name: 'kibana-hostname',
- autoListen: true,
- basePath: '/abc',
- cors: false,
- customResponseHeaders: { 'custom-header': 'custom-value' },
- host: 'host',
- maxPayloadBytes: 1000,
- keepaliveTimeout: 5000,
- socketTimeout: 2000,
- port: 1234,
- publicBaseUrl: 'http://myhost.com/abc',
- rewriteBasePath: false,
- ssl: { enabled: false, certificate: 'cert', key: 'key' },
- compression: { enabled: true },
- someNotSupportedValue: 'val',
- xsrf: {
- disableProtection: false,
- allowlist: [],
- },
- },
- });
-
- expect(configAdapter.get('server')).toMatchSnapshot('default');
- expect(configAdapterWithDisabledSSL.get('server')).toMatchSnapshot('disabled ssl');
- });
});
describe('#set', () => {
diff --git a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts b/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts
index 8ec26ff1f8e71..bc6fd49e2498a 100644
--- a/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts
+++ b/packages/kbn-config/src/legacy/legacy_object_to_config_adapter.ts
@@ -9,15 +9,6 @@
import { ConfigPath } from '../config';
import { ObjectToConfigAdapter } from '../object_to_config_adapter';
-// TODO: fix once core schemas are moved to this package
-type LoggingConfigType = any;
-
-/**
- * @internal
- * @deprecated
- */
-export type LegacyVars = Record;
-
/**
* Represents logging config supported by the legacy platform.
*/
@@ -30,7 +21,7 @@ export interface LegacyLoggingConfig {
events?: Record;
}
-type MixedLoggingConfig = LegacyLoggingConfig & Partial;
+type MixedLoggingConfig = LegacyLoggingConfig & Record;
/**
* Represents adapter between config provided by legacy platform and `Config`
@@ -48,6 +39,7 @@ export class LegacyObjectToConfigAdapter extends ObjectToConfigAdapter {
},
root: { level: 'info', ...root },
loggers,
+ ...legacyLoggingConfig,
};
if (configValue.silent) {
@@ -61,47 +53,11 @@ export class LegacyObjectToConfigAdapter extends ObjectToConfigAdapter {
return loggingConfig;
}
- private static transformServer(configValue: any = {}) {
- // TODO: New platform uses just a subset of `server` config from the legacy platform,
- // new values will be exposed once we need them
- return {
- autoListen: configValue.autoListen,
- basePath: configValue.basePath,
- cors: configValue.cors,
- customResponseHeaders: configValue.customResponseHeaders,
- host: configValue.host,
- maxPayload: configValue.maxPayloadBytes,
- name: configValue.name,
- port: configValue.port,
- publicBaseUrl: configValue.publicBaseUrl,
- rewriteBasePath: configValue.rewriteBasePath,
- ssl: configValue.ssl,
- keepaliveTimeout: configValue.keepaliveTimeout,
- socketTimeout: configValue.socketTimeout,
- compression: configValue.compression,
- uuid: configValue.uuid,
- xsrf: configValue.xsrf,
- };
- }
-
- private static transformPlugins(configValue: LegacyVars = {}) {
- // These properties are the only ones we use from the existing `plugins` config node
- // since `scanDirs` isn't respected by new platform plugin discovery.
- return {
- initialize: configValue.initialize,
- paths: configValue.paths,
- };
- }
-
public get(configPath: ConfigPath) {
const configValue = super.get(configPath);
switch (configPath) {
case 'logging':
return LegacyObjectToConfigAdapter.transformLogging(configValue as LegacyLoggingConfig);
- case 'server':
- return LegacyObjectToConfigAdapter.transformServer(configValue);
- case 'plugins':
- return LegacyObjectToConfigAdapter.transformPlugins(configValue as LegacyVars);
default:
return configValue;
}
diff --git a/packages/kbn-config/src/raw/read_config.test.ts b/packages/kbn-config/src/raw/read_config.test.ts
index 3b56c69098d2c..d428fa6b0a2a1 100644
--- a/packages/kbn-config/src/raw/read_config.test.ts
+++ b/packages/kbn-config/src/raw/read_config.test.ts
@@ -9,7 +9,7 @@
import { relative, resolve } from 'path';
import { getConfigFromFiles } from './read_config';
-const fixtureFile = (name: string) => resolve(`${__dirname}/../../__fixtures__/${name}`);
+const fixtureFile = (name: string) => resolve(`${__dirname}/../__fixtures__/${name}`);
test('reads single yaml from file system and parses to json', () => {
const config = getConfigFromFiles([fixtureFile('config.yml')]);
diff --git a/packages/kbn-config/tsconfig.json b/packages/kbn-config/tsconfig.json
index ba00ddfa6adb6..4e1bf573f488a 100644
--- a/packages/kbn-config/tsconfig.json
+++ b/packages/kbn-config/tsconfig.json
@@ -1,12 +1,22 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "declaration": true,
+ "incremental": false,
"outDir": "./target",
"stripInternal": false,
+ "declaration": true,
"declarationMap": true,
- "types": ["jest", "node"]
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-config/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
- "include": ["./src/**/*.ts"],
- "exclude": ["target"]
+ "include": [
+ "src/**/*.ts"
+ ],
+ "exclude": [
+ "**/__fixtures__/**/*"
+ ]
}
diff --git a/packages/kbn-crypto/package.json b/packages/kbn-crypto/package.json
index 6c7b3f3b0c719..7e26b96218319 100644
--- a/packages/kbn-crypto/package.json
+++ b/packages/kbn-crypto/package.json
@@ -4,6 +4,7 @@
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"scripts": {
"build": "../../node_modules/.bin/tsc",
"kbn:bootstrap": "yarn build",
diff --git a/packages/kbn-crypto/tsconfig.json b/packages/kbn-crypto/tsconfig.json
index e9dd6313e6f79..5005152cac754 100644
--- a/packages/kbn-crypto/tsconfig.json
+++ b/packages/kbn-crypto/tsconfig.json
@@ -1,9 +1,12 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "outDir": "target",
+ "incremental": false,
+ "outDir": "./target",
"declaration": true,
- "declarationMap": true
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-crypto/src"
},
"include": [
"src/**/*"
diff --git a/packages/kbn-dev-utils/package.json b/packages/kbn-dev-utils/package.json
index 7a2c3ce45a57d..e1990fca4e0bb 100644
--- a/packages/kbn-dev-utils/package.json
+++ b/packages/kbn-dev-utils/package.json
@@ -4,6 +4,7 @@
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"scripts": {
"build": "../../node_modules/.bin/tsc",
"kbn:bootstrap": "yarn build",
diff --git a/packages/kbn-dev-utils/tsconfig.json b/packages/kbn-dev-utils/tsconfig.json
index 1c6c671d0b768..65536c576b679 100644
--- a/packages/kbn-dev-utils/tsconfig.json
+++ b/packages/kbn-dev-utils/tsconfig.json
@@ -1,10 +1,18 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
+ "incremental": false,
"outDir": "target",
+ "stripInternal": false,
"target": "ES2019",
"declaration": true,
- "declarationMap": true
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-dev-utils/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
"include": [
"src/**/*"
diff --git a/packages/kbn-docs-utils/package.json b/packages/kbn-docs-utils/package.json
index 089732e9e6b40..26a7fa0e8c957 100644
--- a/packages/kbn-docs-utils/package.json
+++ b/packages/kbn-docs-utils/package.json
@@ -4,6 +4,7 @@
"license": "SSPL-1.0 OR Elastic License 2.0",
"private": "true",
"main": "target/index.js",
+ "types": "target/index.d.ts",
"kibana": {
"devOnly": true
},
diff --git a/packages/kbn-docs-utils/tsconfig.json b/packages/kbn-docs-utils/tsconfig.json
index 3c683f487b9f2..6f4a6fa2af8a5 100644
--- a/packages/kbn-docs-utils/tsconfig.json
+++ b/packages/kbn-docs-utils/tsconfig.json
@@ -1,10 +1,17 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
+ "incremental": false,
"outDir": "./target",
+ "target": "ES2019",
"declaration": true,
+ "declarationMap": true,
"sourceMap": true,
- "target": "ES2019"
+ "sourceRoot": "../../../../packages/kbn-docs-utils/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
"include": [
"src/**/*"
diff --git a/packages/kbn-es-archiver/package.json b/packages/kbn-es-archiver/package.json
index 03ecee34be7e2..047d1dd675d26 100644
--- a/packages/kbn-es-archiver/package.json
+++ b/packages/kbn-es-archiver/package.json
@@ -4,6 +4,7 @@
"license": "SSPL-1.0 OR Elastic License 2.0",
"private": "true",
"main": "target/index.js",
+ "types": "target/index.d.ts",
"kibana": {
"devOnly": true
},
diff --git a/packages/kbn-es-archiver/tsconfig.json b/packages/kbn-es-archiver/tsconfig.json
index 02209a29e5817..0950cd39d0bee 100644
--- a/packages/kbn-es-archiver/tsconfig.json
+++ b/packages/kbn-es-archiver/tsconfig.json
@@ -1,10 +1,17 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
+ "incremental": false,
"outDir": "./target",
+ "target": "ES2019",
"declaration": true,
+ "declarationMap": true,
"sourceMap": true,
- "target": "ES2019"
+ "sourceRoot": "../../../../packages/kbn-es-archiver/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
"include": [
"src/**/*"
diff --git a/packages/kbn-i18n/tsconfig.json b/packages/kbn-i18n/tsconfig.json
index c6380f1cde969..9d4cb8c9b0972 100644
--- a/packages/kbn-i18n/tsconfig.json
+++ b/packages/kbn-i18n/tsconfig.json
@@ -1,5 +1,18 @@
{
"extends": "../../tsconfig.base.json",
+ "compilerOptions": {
+ "incremental": false,
+ "outDir": "./target/types",
+ "emitDeclarationOnly": true,
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../../packages/kbn-i18n/src",
+ "types": [
+ "jest",
+ "node"
+ ]
+ },
"include": [
"src/**/*.ts",
"src/**/*.tsx",
@@ -7,15 +20,6 @@
"types/intl_relativeformat.d.ts"
],
"exclude": [
- "target"
- ],
- "compilerOptions": {
- "declaration": true,
- "emitDeclarationOnly": true,
- "outDir": "./target/types",
- "types": [
- "jest",
- "node"
- ]
- }
+ "**/__fixtures__/**/*"
+ ]
}
diff --git a/packages/kbn-interpreter/common/package.json b/packages/kbn-interpreter/common/package.json
index b569e42220f04..62061138234d9 100644
--- a/packages/kbn-interpreter/common/package.json
+++ b/packages/kbn-interpreter/common/package.json
@@ -1,5 +1,6 @@
{
"private": true,
"main": "../target/common/index.js",
+ "types": "../target/common/index.d.ts",
"jsnext:main": "../src/common/index.js"
}
\ No newline at end of file
diff --git a/packages/kbn-legacy-logging/package.json b/packages/kbn-legacy-logging/package.json
index 1e3752eca6755..96edeccad6658 100644
--- a/packages/kbn-legacy-logging/package.json
+++ b/packages/kbn-legacy-logging/package.json
@@ -4,12 +4,14 @@
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"scripts": {
"build": "tsc",
"kbn:bootstrap": "yarn build",
"kbn:watch": "yarn build --watch"
},
"dependencies": {
- "@kbn/utils": "link:../kbn-utils"
+ "@kbn/utils": "link:../kbn-utils",
+ "@kbn/config-schema": "link:../kbn-config-schema"
}
}
diff --git a/packages/kbn-legacy-logging/src/legacy_logging_server.ts b/packages/kbn-legacy-logging/src/legacy_logging_server.ts
index e1edd06a4b4a2..3ece0f6f1ee47 100644
--- a/packages/kbn-legacy-logging/src/legacy_logging_server.ts
+++ b/packages/kbn-legacy-logging/src/legacy_logging_server.ts
@@ -88,7 +88,7 @@ export class LegacyLoggingServer {
// We set `ops.interval` to max allowed number and `ops` filter to value
// that doesn't exist to avoid logging of ops at all, if turned on it will be
// logged by the "legacy" Kibana.
- const { value: loggingConfig } = legacyLoggingConfigSchema.validate({
+ const loggingConfig = legacyLoggingConfigSchema.validate({
...legacyLoggingConfig,
events: {
...legacyLoggingConfig.events,
diff --git a/packages/kbn-legacy-logging/src/schema.ts b/packages/kbn-legacy-logging/src/schema.ts
index 76d7381ee8728..0330708e746c0 100644
--- a/packages/kbn-legacy-logging/src/schema.ts
+++ b/packages/kbn-legacy-logging/src/schema.ts
@@ -6,11 +6,8 @@
* Side Public License, v 1.
*/
-import Joi from 'joi';
+import { schema } from '@kbn/config-schema';
-const HANDLED_IN_KIBANA_PLATFORM = Joi.any().description(
- 'This key is handled in the new platform ONLY'
-);
/**
* @deprecated
*
@@ -36,46 +33,65 @@ export interface LegacyLoggingConfig {
};
}
-export const legacyLoggingConfigSchema = Joi.object()
- .keys({
- appenders: HANDLED_IN_KIBANA_PLATFORM,
- loggers: HANDLED_IN_KIBANA_PLATFORM,
- root: HANDLED_IN_KIBANA_PLATFORM,
-
- silent: Joi.boolean().default(false),
- quiet: Joi.boolean().when('silent', {
- is: true,
- then: Joi.boolean().default(true).valid(true),
- otherwise: Joi.boolean().default(false),
+export const legacyLoggingConfigSchema = schema.object({
+ silent: schema.boolean({ defaultValue: false }),
+ quiet: schema.conditional(
+ schema.siblingRef('silent'),
+ true,
+ schema.boolean({
+ defaultValue: true,
+ validate: (quiet) => {
+ if (!quiet) {
+ return 'must be true when `silent` is true';
+ }
+ },
+ }),
+ schema.boolean({ defaultValue: false })
+ ),
+ verbose: schema.conditional(
+ schema.siblingRef('quiet'),
+ true,
+ schema.boolean({
+ defaultValue: false,
+ validate: (verbose) => {
+ if (verbose) {
+ return 'must be false when `quiet` is true';
+ }
+ },
+ }),
+ schema.boolean({ defaultValue: false })
+ ),
+ events: schema.recordOf(schema.string(), schema.any(), { defaultValue: {} }),
+ dest: schema.string({ defaultValue: 'stdout' }),
+ filter: schema.recordOf(schema.string(), schema.any(), { defaultValue: {} }),
+ json: schema.conditional(
+ schema.siblingRef('dest'),
+ 'stdout',
+ schema.boolean({
+ defaultValue: !process.stdout.isTTY,
+ }),
+ schema.boolean({
+ defaultValue: true,
+ })
+ ),
+ timezone: schema.maybe(schema.string()),
+ rotate: schema.object({
+ enabled: schema.boolean({ defaultValue: false }),
+ everyBytes: schema.number({
+ min: 1048576, // > 1MB
+ max: 1073741825, // < 1GB
+ defaultValue: 10485760, // 10MB
}),
- verbose: Joi.boolean().when('quiet', {
- is: true,
- then: Joi.valid(false).default(false),
- otherwise: Joi.boolean().default(false),
+ keepFiles: schema.number({
+ min: 2,
+ max: 1024,
+ defaultValue: 7,
}),
- events: Joi.any().default({}),
- dest: Joi.string().default('stdout'),
- filter: Joi.any().default({}),
- json: Joi.boolean().when('dest', {
- is: 'stdout',
- then: Joi.boolean().default(!process.stdout.isTTY),
- otherwise: Joi.boolean().default(true),
+ pollingInterval: schema.number({
+ min: 5000,
+ max: 3600000,
+ defaultValue: 10000,
}),
- timezone: Joi.string(),
- rotate: Joi.object()
- .keys({
- enabled: Joi.boolean().default(false),
- everyBytes: Joi.number()
- // > 1MB
- .greater(1048576)
- // < 1GB
- .less(1073741825)
- // 10MB
- .default(10485760),
- keepFiles: Joi.number().greater(2).less(1024).default(7),
- pollingInterval: Joi.number().greater(5000).less(3600000).default(10000),
- usePolling: Joi.boolean().default(false),
- })
- .default(),
- })
- .default();
+ usePolling: schema.boolean({ defaultValue: false }),
+ }),
+});
diff --git a/packages/kbn-legacy-logging/tsconfig.json b/packages/kbn-legacy-logging/tsconfig.json
index 8fd202a2dce8b..5f8d38ec90bcd 100644
--- a/packages/kbn-legacy-logging/tsconfig.json
+++ b/packages/kbn-legacy-logging/tsconfig.json
@@ -1,11 +1,19 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
+ "incremental": false,
"outDir": "target",
"stripInternal": false,
"declaration": true,
"declarationMap": true,
- "types": ["jest", "node"]
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-legacy-logging/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
- "include": ["./src/**/*"]
+ "include": [
+ "src/**/*"
+ ]
}
diff --git a/packages/kbn-logging/package.json b/packages/kbn-logging/package.json
index 8d3ffa09b083e..c7db148c75a2a 100644
--- a/packages/kbn-logging/package.json
+++ b/packages/kbn-logging/package.json
@@ -4,6 +4,7 @@
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"scripts": {
"build": "../../node_modules/.bin/tsc",
"kbn:bootstrap": "yarn build",
diff --git a/packages/kbn-logging/tsconfig.json b/packages/kbn-logging/tsconfig.json
index c55c05de30a52..adec4c1966036 100644
--- a/packages/kbn-logging/tsconfig.json
+++ b/packages/kbn-logging/tsconfig.json
@@ -1,11 +1,19 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
+ "incremental": false,
"outDir": "target",
"stripInternal": false,
"declaration": true,
"declarationMap": true,
- "types": ["jest", "node"]
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-logging/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
- "include": ["./src/**/*.ts"]
+ "include": [
+ "src/**/*.ts"
+ ]
}
diff --git a/packages/kbn-monaco/package.json b/packages/kbn-monaco/package.json
index e99661f8db598..bdf36915bab3a 100644
--- a/packages/kbn-monaco/package.json
+++ b/packages/kbn-monaco/package.json
@@ -3,6 +3,7 @@
"version": "1.0.0",
"private": true,
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"license": "SSPL-1.0 OR Elastic License 2.0",
"scripts": {
"build": "node ./scripts/build.js",
diff --git a/packages/kbn-monaco/tsconfig.json b/packages/kbn-monaco/tsconfig.json
index 6d3f433c6a6d1..e6ec96b12c6cf 100644
--- a/packages/kbn-monaco/tsconfig.json
+++ b/packages/kbn-monaco/tsconfig.json
@@ -1,9 +1,12 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
+ "incremental": false,
"outDir": "./target",
"declaration": true,
+ "declarationMap": true,
"sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-monaco/src",
"types": [
"jest",
"node"
diff --git a/packages/kbn-optimizer/index.d.ts b/packages/kbn-optimizer/index.d.ts
deleted file mode 100644
index 004ac67f4b0c4..0000000000000
--- a/packages/kbn-optimizer/index.d.ts
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-export * from './src/index';
diff --git a/packages/kbn-optimizer/limits.yml b/packages/kbn-optimizer/limits.yml
index f93849e011d41..a027768ad66a0 100644
--- a/packages/kbn-optimizer/limits.yml
+++ b/packages/kbn-optimizer/limits.yml
@@ -9,7 +9,7 @@ pageLoadAssetSize:
charts: 195358
cloud: 21076
console: 46091
- core: 692106
+ core: 397521
crossClusterReplication: 65408
dashboard: 374194
dashboardEnhanced: 65646
@@ -24,13 +24,13 @@ pageLoadAssetSize:
enterpriseSearch: 35741
esUiShared: 326654
expressions: 224136
- features: 31211
- globalSearch: 43548
- globalSearchBar: 62888
+ features: 21723
+ globalSearch: 29696
+ globalSearchBar: 50403
globalSearchProviders: 25554
graph: 31504
grokdebugger: 26779
- home: 41661
+ home: 30182
indexLifecycleManagement: 107090
indexManagement: 140608
indexPatternManagement: 28222
@@ -45,13 +45,12 @@ pageLoadAssetSize:
kibanaUtils: 198829
lens: 96624
licenseManagement: 41817
- licensing: 39008
+ licensing: 29004
lists: 202261
logstash: 53548
management: 46112
- maps: 183610
+ maps: 80000
mapsLegacy: 87859
- mapsLegacyLicensing: 20214
ml: 82187
monitoring: 80000
navigation: 37269
@@ -73,8 +72,8 @@ pageLoadAssetSize:
share: 99061
snapshotRestore: 79032
spaces: 387915
- telemetry: 91832
- telemetryManagementSection: 52443
+ telemetry: 51957
+ telemetryManagementSection: 38586
tileMap: 65337
timelion: 29920
transform: 41007
@@ -108,3 +107,4 @@ pageLoadAssetSize:
fileUpload: 25664
banners: 17946
mapsEms: 26072
+ timelines: 28613
diff --git a/packages/kbn-optimizer/package.json b/packages/kbn-optimizer/package.json
index 29e33bf23424b..ac73fbc0fc16a 100644
--- a/packages/kbn-optimizer/package.json
+++ b/packages/kbn-optimizer/package.json
@@ -4,8 +4,9 @@
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"scripts": {
- "build": "../../node_modules/.bin/babel src --out-dir target --copy-files --delete-dir-on-start --extensions .ts --ignore *.test.ts --source-maps=inline",
+ "build": "../../node_modules/.bin/tsc",
"kbn:bootstrap": "yarn build",
"kbn:watch": "yarn build --watch"
},
diff --git a/packages/kbn-optimizer/src/cli.ts b/packages/kbn-optimizer/src/cli.ts
index 6e3106dbc2af7..d5b9996dfb2cd 100644
--- a/packages/kbn-optimizer/src/cli.ts
+++ b/packages/kbn-optimizer/src/cli.ts
@@ -6,8 +6,6 @@
* Side Public License, v 1.
*/
-import 'source-map-support/register';
-
import Path from 'path';
import { REPO_ROOT } from '@kbn/utils';
diff --git a/packages/kbn-optimizer/src/common/bundle.test.ts b/packages/kbn-optimizer/src/common/bundle.test.ts
index ff9aa6fd90628..9dbaae9f36f20 100644
--- a/packages/kbn-optimizer/src/common/bundle.test.ts
+++ b/packages/kbn-optimizer/src/common/bundle.test.ts
@@ -42,7 +42,6 @@ it('creates cache keys', () => {
"id": "bar",
"manifestPath": undefined,
"outputDir": "/foo/bar/target",
- "pageLoadAssetSizeLimit": undefined,
"publicDirNames": Array [
"public",
],
diff --git a/packages/kbn-optimizer/src/common/bundle.ts b/packages/kbn-optimizer/src/common/bundle.ts
index 64b44de0dd1b3..08946deec0b47 100644
--- a/packages/kbn-optimizer/src/common/bundle.ts
+++ b/packages/kbn-optimizer/src/common/bundle.ts
@@ -11,6 +11,7 @@ import Fs from 'fs';
import { BundleCache } from './bundle_cache';
import { UnknownVals } from './ts_helpers';
+import { omit } from './obj_helpers';
import { includes, ascending, entriesToObject } from './array_helpers';
const VALID_BUNDLE_TYPES = ['plugin' as const, 'entry' as const];
@@ -90,7 +91,7 @@ export class Bundle {
*/
createCacheKey(files: string[], mtimes: Map): unknown {
return {
- spec: this.toSpec(),
+ spec: omit(this.toSpec(), ['pageLoadAssetSizeLimit']),
mtimes: entriesToObject(
files.map((p) => [p, mtimes.get(p)] as const).sort(ascending((e) => e[0]))
),
diff --git a/packages/kbn-optimizer/src/common/index.ts b/packages/kbn-optimizer/src/common/index.ts
index 0f2c6a3517603..7914d74fa9299 100644
--- a/packages/kbn-optimizer/src/common/index.ts
+++ b/packages/kbn-optimizer/src/common/index.ts
@@ -18,3 +18,4 @@ export * from './array_helpers';
export * from './event_stream_helpers';
export * from './parse_path';
export * from './theme_tags';
+export * from './obj_helpers';
diff --git a/packages/kbn-pm/src/utils/bazel/ensure_yarn_integrity_exists.ts b/packages/kbn-optimizer/src/common/obj_helpers.ts
similarity index 58%
rename from packages/kbn-pm/src/utils/bazel/ensure_yarn_integrity_exists.ts
rename to packages/kbn-optimizer/src/common/obj_helpers.ts
index 90786bc0ea55e..f238eb22e93a1 100644
--- a/packages/kbn-pm/src/utils/bazel/ensure_yarn_integrity_exists.ts
+++ b/packages/kbn-optimizer/src/common/obj_helpers.ts
@@ -6,13 +6,12 @@
* Side Public License, v 1.
*/
-import { join } from 'path';
-import { writeFile } from '../fs';
-
-export async function ensureYarnIntegrityFileExists(nodeModulesPath: string) {
- try {
- await writeFile(join(nodeModulesPath, '.yarn-integrity'), '', { flag: 'wx' });
- } catch {
- // no-op
+export function omit(obj: T, keys: K[]): Omit {
+ const result: any = {};
+ for (const [key, value] of Object.entries(obj) as any) {
+ if (!keys.includes(key)) {
+ result[key] = value;
+ }
}
+ return result as Omit;
}
diff --git a/packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts b/packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts
index a86f231b79806..50c9e7e12904f 100644
--- a/packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts
+++ b/packages/kbn-optimizer/src/integration_tests/basic_optimization.test.ts
@@ -16,7 +16,7 @@ import del from 'del';
import { tap, filter } from 'rxjs/operators';
import { REPO_ROOT } from '@kbn/utils';
import { ToolingLog } from '@kbn/dev-utils';
-import { runOptimizer, OptimizerConfig, OptimizerUpdate, logOptimizerState } from '@kbn/optimizer';
+import { runOptimizer, OptimizerConfig, OptimizerUpdate, logOptimizerState } from '../index';
import { allValuesFrom } from '../common';
@@ -135,7 +135,7 @@ it('builds expected bundles, saves bundle counts to metadata', async () => {
/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/plugins/foo/public/ext.ts,
/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/plugins/foo/public/index.ts,
/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/plugins/foo/public/lib.ts,
- /packages/kbn-optimizer/target/worker/entry_point_creator.js,
+ /packages/kbn-optimizer/src/worker/entry_point_creator.ts,
/packages/kbn-ui-shared-deps/public_path_module_creator.js,
]
`);
@@ -161,7 +161,7 @@ it('builds expected bundles, saves bundle counts to metadata', async () => {
/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/plugins/bar/public/lib.ts,
/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/src/core/public/core_app/styles/_globals_v8dark.scss,
/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/src/core/public/core_app/styles/_globals_v8light.scss,
- /packages/kbn-optimizer/target/worker/entry_point_creator.js,
+ /packages/kbn-optimizer/src/worker/entry_point_creator.ts,
/packages/kbn-ui-shared-deps/public_path_module_creator.js,
]
`);
@@ -175,7 +175,7 @@ it('builds expected bundles, saves bundle counts to metadata', async () => {
Array [
/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/x-pack/baz/kibana.json,
/packages/kbn-optimizer/src/__fixtures__/__tmp__/mock_repo/x-pack/baz/public/index.ts,
- /packages/kbn-optimizer/target/worker/entry_point_creator.js,
+ /packages/kbn-optimizer/src/worker/entry_point_creator.ts,
/packages/kbn-ui-shared-deps/public_path_module_creator.js,
]
`);
diff --git a/packages/kbn-optimizer/src/optimizer/observe_worker.ts b/packages/kbn-optimizer/src/optimizer/observe_worker.ts
index edf4545ae52b3..9f0ed9af556fb 100644
--- a/packages/kbn-optimizer/src/optimizer/observe_worker.ts
+++ b/packages/kbn-optimizer/src/optimizer/observe_worker.ts
@@ -61,18 +61,26 @@ function usingWorkerProc(
) {
return Rx.using(
(): ProcResource => {
- const proc = execa.node(require.resolve('../worker/run_worker'), [], {
- nodeOptions: [
- ...process.execArgv,
- ...(inspectFlag && config.inspectWorkers
- ? [`${inspectFlag}=${inspectPortCounter++}`]
- : []),
- ...(config.maxWorkerCount <= 3 ? ['--max-old-space-size=2048'] : []),
- ],
- buffer: false,
- stderr: 'pipe',
- stdout: 'pipe',
- });
+ const workerPath = require.resolve('../worker/run_worker');
+ const proc = execa.node(
+ workerPath.endsWith('.ts')
+ ? require.resolve('../worker/run_worker_from_source') // workerFromSourcePath
+ : workerPath,
+ [],
+ {
+ nodeOptions: [
+ '--preserve-symlinks',
+ '--preserve-symlinks-main',
+ ...(inspectFlag && config.inspectWorkers
+ ? [`${inspectFlag}=${inspectPortCounter++}`]
+ : []),
+ ...(config.maxWorkerCount <= 3 ? ['--max-old-space-size=2048'] : []),
+ ],
+ buffer: false,
+ stderr: 'pipe',
+ stdout: 'pipe',
+ }
+ );
return {
proc,
diff --git a/packages/kbn-optimizer/src/optimizer/optimizer_config.test.ts b/packages/kbn-optimizer/src/optimizer/optimizer_config.test.ts
index c546a0c6cf992..8becc76a23ca2 100644
--- a/packages/kbn-optimizer/src/optimizer/optimizer_config.test.ts
+++ b/packages/kbn-optimizer/src/optimizer/optimizer_config.test.ts
@@ -457,7 +457,7 @@ describe('OptimizerConfig::create()', () => {
[Window],
],
"invocationCallOrder": Array [
- 22,
+ 25,
],
"results": Array [
Object {
@@ -480,7 +480,7 @@ describe('OptimizerConfig::create()', () => {
[Window],
],
"invocationCallOrder": Array [
- 25,
+ 28,
],
"results": Array [
Object {
@@ -505,7 +505,7 @@ describe('OptimizerConfig::create()', () => {
[Window],
],
"invocationCallOrder": Array [
- 23,
+ 26,
],
"results": Array [
Object {
diff --git a/packages/kbn-optimizer/src/optimizer/optimizer_config.ts b/packages/kbn-optimizer/src/optimizer/optimizer_config.ts
index 9110b6db27e92..2dbe48c15483f 100644
--- a/packages/kbn-optimizer/src/optimizer/optimizer_config.ts
+++ b/packages/kbn-optimizer/src/optimizer/optimizer_config.ts
@@ -17,6 +17,7 @@ import {
ThemeTag,
ThemeTags,
parseThemeTags,
+ omit,
} from '../common';
import { findKibanaPlatformPlugins, KibanaPlatformPlugin } from './kibana_platform_plugins';
@@ -40,16 +41,6 @@ function pickMaxWorkerCount(dist: boolean) {
return Math.max(maxWorkers, 2);
}
-function omit(obj: T, keys: K[]): Omit {
- const result: any = {};
- for (const [key, value] of Object.entries(obj) as any) {
- if (!keys.includes(key)) {
- result[key] = value;
- }
- }
- return result as Omit;
-}
-
interface Options {
/** absolute path to root of the repo/build */
repoRoot: string;
diff --git a/packages/kbn-optimizer/src/worker/bundle_ref_module.ts b/packages/kbn-optimizer/src/worker/bundle_ref_module.ts
index 563b4ecb4bc37..f7604f0f78f71 100644
--- a/packages/kbn-optimizer/src/worker/bundle_ref_module.ts
+++ b/packages/kbn-optimizer/src/worker/bundle_ref_module.ts
@@ -16,7 +16,6 @@ export class BundleRefModule extends Module {
public built = false;
public buildMeta?: any;
public buildInfo?: any;
- public exportsArgument = '__webpack_exports__';
constructor(public readonly ref: BundleRef) {
super('kbn/bundleRef', null);
@@ -45,7 +44,9 @@ export class BundleRefModule extends Module {
build(_: any, __: any, ___: any, ____: any, callback: () => void) {
this.built = true;
this.buildMeta = {};
- this.buildInfo = {};
+ this.buildInfo = {
+ exportsArgument: '__webpack_exports__',
+ };
callback();
}
diff --git a/src/core/server/utils/index.ts b/packages/kbn-optimizer/src/worker/run_worker_from_source.js
similarity index 80%
rename from src/core/server/utils/index.ts
rename to packages/kbn-optimizer/src/worker/run_worker_from_source.js
index b0776c48f3bed..bebe984a447d6 100644
--- a/src/core/server/utils/index.ts
+++ b/packages/kbn-optimizer/src/worker/run_worker_from_source.js
@@ -6,5 +6,5 @@
* Side Public License, v 1.
*/
-export * from './from_root';
-export * from './package_json';
+require('@kbn/optimizer').registerNodeAutoTranspilation();
+require('./run_worker');
diff --git a/packages/kbn-optimizer/tsconfig.json b/packages/kbn-optimizer/tsconfig.json
index 20b06b5658cbc..f2d508cf14a55 100644
--- a/packages/kbn-optimizer/tsconfig.json
+++ b/packages/kbn-optimizer/tsconfig.json
@@ -1,10 +1,17 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-optimizer"
+ "incremental": false,
+ "outDir": "./target",
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-optimizer/src"
},
"include": [
- "index.d.ts",
"src/**/*"
+ ],
+ "exclude": [
+ "**/__fixtures__/**/*"
]
}
diff --git a/packages/kbn-plugin-generator/package.json b/packages/kbn-plugin-generator/package.json
index a0a18bfe7d1cb..ae4dfbc670f19 100644
--- a/packages/kbn-plugin-generator/package.json
+++ b/packages/kbn-plugin-generator/package.json
@@ -4,6 +4,7 @@
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "target/index.js",
+ "types": "target/index.d.ts",
"scripts": {
"kbn:bootstrap": "node scripts/build",
"kbn:watch": "node scripts/build --watch"
diff --git a/packages/kbn-plugin-generator/tsconfig.json b/packages/kbn-plugin-generator/tsconfig.json
index c54ff041d7065..5e885527a7608 100644
--- a/packages/kbn-plugin-generator/tsconfig.json
+++ b/packages/kbn-plugin-generator/tsconfig.json
@@ -1,12 +1,22 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
+ "incremental": false,
"outDir": "target",
"target": "ES2019",
"declaration": true,
"declarationMap": true,
- "sourceMap": true
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-plugin-generator/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
- "include": ["src/**/*"],
- "exclude": ["src/template/*"]
+ "include": [
+ "src/**/*"
+ ],
+ "exclude": [
+ "src/template/*"
+ ],
}
diff --git a/packages/kbn-plugin-helpers/package.json b/packages/kbn-plugin-helpers/package.json
index cc845ef9d027f..6b9dd4d51baf9 100644
--- a/packages/kbn-plugin-helpers/package.json
+++ b/packages/kbn-plugin-helpers/package.json
@@ -8,6 +8,7 @@
"devOnly": true
},
"main": "target/index.js",
+ "types": "target/index.d.ts",
"bin": {
"plugin-helpers": "bin/plugin-helpers.js"
},
diff --git a/packages/kbn-plugin-helpers/tsconfig.json b/packages/kbn-plugin-helpers/tsconfig.json
index 651bc79d6e707..87d11843f398a 100644
--- a/packages/kbn-plugin-helpers/tsconfig.json
+++ b/packages/kbn-plugin-helpers/tsconfig.json
@@ -1,10 +1,17 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
+ "incremental": false,
"outDir": "target",
+ "target": "ES2018",
"declaration": true,
+ "declarationMap": true,
"sourceMap": true,
- "target": "ES2018"
+ "sourceRoot": "../../../../packages/kbn-plugin-helpers/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
"include": [
"src/**/*"
diff --git a/packages/kbn-pm/dist/index.js b/packages/kbn-pm/dist/index.js
index 9bf332bf82319..7c5d0390d9fba 100644
--- a/packages/kbn-pm/dist/index.js
+++ b/packages/kbn-pm/dist/index.js
@@ -209,7 +209,7 @@ async function run(argv) {
},
default: {
cache: true,
- 'force-install': true,
+ 'force-install': false,
offline: false,
validate: true
},
@@ -550,7 +550,7 @@ Object.defineProperty(exports, "pickLevelFromFlags", { enumerable: true, get: fu
Object.defineProperty(exports, "parseLogLevel", { enumerable: true, get: function () { return log_levels_1.parseLogLevel; } });
var tooling_log_collecting_writer_1 = __webpack_require__(127);
Object.defineProperty(exports, "ToolingLogCollectingWriter", { enumerable: true, get: function () { return tooling_log_collecting_writer_1.ToolingLogCollectingWriter; } });
-
+//# sourceMappingURL=index.js.map
/***/ }),
/* 6 */
@@ -628,7 +628,7 @@ class ToolingLog {
}
}
exports.ToolingLog = ToolingLog;
-
+//# sourceMappingURL=tooling_log.js.map
/***/ }),
/* 7 */
@@ -6749,7 +6749,7 @@ class ToolingLogTextWriter {
}
}
exports.ToolingLogTextWriter = ToolingLogTextWriter;
-
+//# sourceMappingURL=tooling_log_text_writer.js.map
/***/ }),
/* 112 */
@@ -8790,7 +8790,7 @@ function parseLogLevel(name) {
};
}
exports.parseLogLevel = parseLogLevel;
-
+//# sourceMappingURL=log_levels.js.map
/***/ }),
/* 127 */
@@ -8823,7 +8823,7 @@ class ToolingLogCollectingWriter extends tooling_log_text_writer_1.ToolingLogTex
}
}
exports.ToolingLogCollectingWriter = ToolingLogCollectingWriter;
-
+//# sourceMappingURL=tooling_log_collecting_writer.js.map
/***/ }),
/* 128 */
@@ -8910,8 +8910,11 @@ const BootstrapCommand = {
const nonBazelProjectsOnly = await Object(_utils_projects__WEBPACK_IMPORTED_MODULE_4__["getNonBazelProjectsOnly"])(projects);
const batchedNonBazelProjects = Object(_utils_projects__WEBPACK_IMPORTED_MODULE_4__["topologicallyBatchProjects"])(nonBazelProjectsOnly, projectGraph);
const kibanaProjectPath = ((_projects$get = projects.get('kibana')) === null || _projects$get === void 0 ? void 0 : _projects$get.path) || '';
- const runOffline = (options === null || options === void 0 ? void 0 : options.offline) === true;
- const forceInstall = !!options && options['force-install'] === true; // Ensure we have a `node_modules/.yarn-integrity` file as we depend on it
+ const runOffline = (options === null || options === void 0 ? void 0 : options.offline) === true; // Force install is set in case a flag is passed or
+ // if the `.yarn-integrity` file is not found which
+ // will be indicated by the return of yarnIntegrityFileExists.
+
+ const forceInstall = !!options && options['force-install'] === true || !(await Object(_utils_bazel__WEBPACK_IMPORTED_MODULE_9__["yarnIntegrityFileExists"])(Object(path__WEBPACK_IMPORTED_MODULE_0__["resolve"])(kibanaProjectPath, 'node_modules'))); // Ensure we have a `node_modules/.yarn-integrity` file as we depend on it
// for bazel to know it has to re-install the node_modules after a reset or a clean
await Object(_utils_bazel__WEBPACK_IMPORTED_MODULE_9__["ensureYarnIntegrityFileExists"])(Object(path__WEBPACK_IMPORTED_MODULE_0__["resolve"])(kibanaProjectPath, 'node_modules')); // Install bazel machinery tools if needed
@@ -8925,9 +8928,6 @@ const BootstrapCommand = {
// That way non bazel projects could depend on bazel projects but not the other way around
// That is only intended during the migration process while non Bazel projects are not removed at all.
//
- // Until we have our first package build within Bazel we will always need to directly call the yarn rule
- // otherwise yarn install won't trigger as we don't have any npm dependency within Bazel
- // TODO: Change CLI default in order to not force install as soon as we have our first Bazel package being built
if (forceInstall) {
await Object(_utils_bazel__WEBPACK_IMPORTED_MODULE_9__["runBazel"])(['run', '@nodejs//:yarn'], runOffline);
@@ -9105,6 +9105,7 @@ __webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isDirectory", function() { return isDirectory; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isFile", function() { return isFile; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "createSymlink", function() { return createSymlink; });
+/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "tryRealpath", function() { return tryRealpath; });
/* harmony import */ var cmd_shim__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(132);
/* harmony import */ var cmd_shim__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(cmd_shim__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var del__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(143);
@@ -9137,6 +9138,7 @@ const symlink = Object(util__WEBPACK_IMPORTED_MODULE_5__["promisify"])(fs__WEBPA
const chmod = Object(util__WEBPACK_IMPORTED_MODULE_5__["promisify"])(fs__WEBPACK_IMPORTED_MODULE_2___default.a.chmod);
const cmdShim = Object(util__WEBPACK_IMPORTED_MODULE_5__["promisify"])(cmd_shim__WEBPACK_IMPORTED_MODULE_0___default.a);
const mkdir = Object(util__WEBPACK_IMPORTED_MODULE_5__["promisify"])(fs__WEBPACK_IMPORTED_MODULE_2___default.a.mkdir);
+const realpathNative = Object(util__WEBPACK_IMPORTED_MODULE_5__["promisify"])(fs__WEBPACK_IMPORTED_MODULE_2___default.a.realpath.native);
const mkdirp = async path => await mkdir(path, {
recursive: true
});
@@ -9220,6 +9222,20 @@ async function forceCreate(src, dest, type) {
await symlink(src, dest, type);
}
+async function tryRealpath(path) {
+ let calculatedPath = path;
+
+ try {
+ calculatedPath = await realpathNative(path);
+ } catch (error) {
+ if (error.code !== 'ENOENT') {
+ throw error;
+ }
+ }
+
+ return calculatedPath;
+}
+
/***/ }),
/* 132 */
/***/ (function(module, exports, __webpack_require__) {
@@ -14439,6 +14455,7 @@ module.exports = FastGlob;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
+exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;
const utils = __webpack_require__(165);
function generate(patterns, settings) {
const positivePatterns = getPositivePatterns(patterns);
@@ -14510,6 +14527,7 @@ exports.convertPatternGroupToTask = convertPatternGroupToTask;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
+exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0;
const array = __webpack_require__(166);
exports.array = array;
const errno = __webpack_require__(167);
@@ -14533,6 +14551,7 @@ exports.string = string;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
+exports.splitWhen = exports.flatten = void 0;
function flatten(items) {
return items.reduce((collection, item) => [].concat(collection, item), []);
}
@@ -14561,6 +14580,7 @@ exports.splitWhen = splitWhen;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
+exports.isEnoentCodeError = void 0;
function isEnoentCodeError(error) {
return error.code === 'ENOENT';
}
@@ -14574,6 +14594,7 @@ exports.isEnoentCodeError = isEnoentCodeError;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
+exports.createDirentFromStats = void 0;
class DirentFromStats {
constructor(name, stats) {
this.name = name;
@@ -14599,6 +14620,7 @@ exports.createDirentFromStats = createDirentFromStats;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
+exports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0;
const path = __webpack_require__(4);
const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\
const UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\())/g;
@@ -14638,6 +14660,7 @@ exports.removeLeadingDotSegment = removeLeadingDotSegment;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
+exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0;
const path = __webpack_require__(4);
const globParent = __webpack_require__(171);
const micromatch = __webpack_require__(174);
@@ -14654,6 +14677,14 @@ function isStaticPattern(pattern, options = {}) {
}
exports.isStaticPattern = isStaticPattern;
function isDynamicPattern(pattern, options = {}) {
+ /**
+ * A special case with an empty string is necessary for matching patterns that start with a forward slash.
+ * An empty string cannot be a dynamic pattern.
+ * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.
+ */
+ if (pattern === '') {
+ return false;
+ }
/**
* When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check
* filepath directly (without read directory).
@@ -14728,12 +14759,23 @@ function expandBraceExpansion(pattern) {
}
exports.expandBraceExpansion = expandBraceExpansion;
function getPatternParts(pattern, options) {
- const info = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));
- // See micromatch/picomatch#58 for more details
- if (info.parts.length === 0) {
- return [pattern];
+ let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));
+ /**
+ * The scan method returns an empty array in some cases.
+ * See micromatch/picomatch#58 for more details.
+ */
+ if (parts.length === 0) {
+ parts = [pattern];
+ }
+ /**
+ * The scan method does not return an empty part for the pattern with a forward slash.
+ * This is another part of micromatch/picomatch#58.
+ */
+ if (parts[0].startsWith('/')) {
+ parts[0] = parts[0].slice(1);
+ parts.unshift('');
}
- return info.parts;
+ return parts;
}
exports.getPatternParts = getPatternParts;
function makeRe(pattern, options) {
@@ -18947,6 +18989,7 @@ module.exports = parse;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
+exports.merge = void 0;
const merge2 = __webpack_require__(146);
function merge(streams) {
const mergedStream = merge2(streams);
@@ -18970,6 +19013,7 @@ function propagateCloseEventToSources(streams) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
+exports.isEmpty = exports.isString = void 0;
function isString(input) {
return typeof input === 'string';
}
@@ -20298,8 +20342,7 @@ class DeepFilter {
return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);
}
_filter(basePath, entry, matcher, negativeRe) {
- const depth = this._getEntryLevel(basePath, entry.path);
- if (this._isSkippedByDeep(depth)) {
+ if (this._isSkippedByDeep(basePath, entry.path)) {
return false;
}
if (this._isSkippedSymbolicLink(entry)) {
@@ -20311,22 +20354,31 @@ class DeepFilter {
}
return this._isSkippedByNegativePatterns(filepath, negativeRe);
}
- _isSkippedByDeep(entryDepth) {
- return entryDepth >= this._settings.deep;
- }
- _isSkippedSymbolicLink(entry) {
- return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
+ _isSkippedByDeep(basePath, entryPath) {
+ /**
+ * Avoid unnecessary depth calculations when it doesn't matter.
+ */
+ if (this._settings.deep === Infinity) {
+ return false;
+ }
+ return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;
}
_getEntryLevel(basePath, entryPath) {
- const basePathDepth = basePath.split('/').length;
const entryPathDepth = entryPath.split('/').length;
- return entryPathDepth - (basePath === '' ? 0 : basePathDepth);
+ if (basePath === '') {
+ return entryPathDepth;
+ }
+ const basePathDepth = basePath.split('/').length;
+ return entryPathDepth - basePathDepth;
+ }
+ _isSkippedSymbolicLink(entry) {
+ return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
}
_isSkippedByPositivePatterns(entryPath, matcher) {
return !this._settings.baseNameMatch && !matcher.match(entryPath);
}
- _isSkippedByNegativePatterns(entryPath, negativeRe) {
- return !utils.pattern.matchAny(entryPath, negativeRe);
+ _isSkippedByNegativePatterns(entryPath, patternsRe) {
+ return !utils.pattern.matchAny(entryPath, patternsRe);
}
}
exports.default = DeepFilter;
@@ -20454,20 +20506,21 @@ class EntryFilter {
return (entry) => this._filter(entry, positiveRe, negativeRe);
}
_filter(entry, positiveRe, negativeRe) {
- if (this._settings.unique) {
- if (this._isDuplicateEntry(entry)) {
- return false;
- }
- this._createIndexRecord(entry);
+ if (this._settings.unique && this._isDuplicateEntry(entry)) {
+ return false;
}
if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {
return false;
}
- if (this._isSkippedByAbsoluteNegativePatterns(entry, negativeRe)) {
+ if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) {
return false;
}
const filepath = this._settings.baseNameMatch ? entry.name : entry.path;
- return this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);
+ const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);
+ if (this._settings.unique && isMatched) {
+ this._createIndexRecord(entry);
+ }
+ return isMatched;
}
_isDuplicateEntry(entry) {
return this.index.has(entry.path);
@@ -20481,12 +20534,12 @@ class EntryFilter {
_onlyDirectoryFilter(entry) {
return this._settings.onlyDirectories && !entry.dirent.isDirectory();
}
- _isSkippedByAbsoluteNegativePatterns(entry, negativeRe) {
+ _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {
if (!this._settings.absolute) {
return false;
}
- const fullpath = utils.path.makeAbsolute(this._settings.cwd, entry.path);
- return this._isMatchToPatterns(fullpath, negativeRe);
+ const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath);
+ return utils.pattern.matchAny(fullpath, patternsRe);
}
_isMatchToPatterns(entryPath, patternsRe) {
const filepath = utils.path.removeLeadingDotSegment(entryPath);
@@ -20676,9 +20729,14 @@ exports.default = ReaderSync;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
+exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
const fs = __webpack_require__(134);
const os = __webpack_require__(121);
-const CPU_COUNT = os.cpus().length;
+/**
+ * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.
+ * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107
+ */
+const CPU_COUNT = Math.max(os.cpus().length, 1);
exports.DEFAULT_FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
lstatSync: fs.lstatSync,
@@ -22981,11 +23039,11 @@ class Project {
ensureValidProjectDependency(project) {
const relativePathToProject = normalizePath(path__WEBPACK_IMPORTED_MODULE_1___default.a.relative(this.path, project.path));
- const relativePathToProjectIfBazelPkg = normalizePath(path__WEBPACK_IMPORTED_MODULE_1___default.a.relative(this.path, `bazel/bin/packages/${path__WEBPACK_IMPORTED_MODULE_1___default.a.basename(project.path)}`));
+ const relativePathToProjectIfBazelPkg = normalizePath(path__WEBPACK_IMPORTED_MODULE_1___default.a.relative(this.path, `${__dirname}/../../../bazel-bin/packages/${path__WEBPACK_IMPORTED_MODULE_1___default.a.basename(project.path)}/npm_module`));
const versionInPackageJson = this.allDependencies[project.name];
const expectedVersionInPackageJson = `link:${relativePathToProject}`;
const expectedVersionInPackageJsonIfBazelPkg = `link:${relativePathToProjectIfBazelPkg}`; // TODO: after introduce bazel to build all the packages and completely remove the support for kbn packages
- // do not allow child projects to hold dependencies
+ // do not allow child projects to hold dependencies, unless they are meant to be published externally
if (versionInPackageJson === expectedVersionInPackageJson || versionInPackageJson === expectedVersionInPackageJsonIfBazelPkg) {
return;
@@ -23143,7 +23201,7 @@ const createProductionPackageJson = pkgJson => _objectSpread(_objectSpread({}, p
dependencies: transformDependencies(pkgJson.dependencies)
});
const isLinkDependency = depVersion => depVersion.startsWith('link:');
-const isBazelPackageDependency = depVersion => depVersion.startsWith('link:bazel/bin/');
+const isBazelPackageDependency = depVersion => depVersion.startsWith('link:bazel-bin/');
/**
* Replaces `link:` dependencies with `file:` dependencies. When installing
* dependencies, these `file:` dependencies will be copied into `node_modules`
@@ -23153,7 +23211,7 @@ const isBazelPackageDependency = depVersion => depVersion.startsWith('link:bazel
* will then _copy_ the `file:` dependencies into `node_modules` instead of
* symlinking like we do in development.
*
- * Additionally it also taken care of replacing `link:bazel/bin/` with
+ * Additionally it also taken care of replacing `link:bazel-bin/` with
* `file:` so we can also support the copy of the Bazel packages dist already into
* build/packages to be copied into the node_modules
*/
@@ -23170,7 +23228,7 @@ function transformDependencies(dependencies = {}) {
}
if (isBazelPackageDependency(depVersion)) {
- newDeps[name] = depVersion.replace('link:bazel/bin/', 'file:');
+ newDeps[name] = depVersion.replace('link:bazel-bin/', 'file:').replace('/npm_module', '');
continue;
}
@@ -48065,8 +48123,10 @@ function addProjectToTree(tree, pathParts, project) {
"use strict";
__webpack_require__.r(__webpack_exports__);
-/* harmony import */ var _ensure_yarn_integrity_exists__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(373);
-/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "ensureYarnIntegrityFileExists", function() { return _ensure_yarn_integrity_exists__WEBPACK_IMPORTED_MODULE_0__["ensureYarnIntegrityFileExists"]; });
+/* harmony import */ var _yarn_integrity__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(373);
+/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "yarnIntegrityFileExists", function() { return _yarn_integrity__WEBPACK_IMPORTED_MODULE_0__["yarnIntegrityFileExists"]; });
+
+/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "ensureYarnIntegrityFileExists", function() { return _yarn_integrity__WEBPACK_IMPORTED_MODULE_0__["ensureYarnIntegrityFileExists"]; });
/* harmony import */ var _get_cache_folders__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(374);
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "getBazelDiskCacheFolder", function() { return _get_cache_folders__WEBPACK_IMPORTED_MODULE_1__["getBazelDiskCacheFolder"]; });
@@ -48099,6 +48159,7 @@ __webpack_require__.r(__webpack_exports__);
"use strict";
__webpack_require__.r(__webpack_exports__);
+/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "yarnIntegrityFileExists", function() { return yarnIntegrityFileExists; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ensureYarnIntegrityFileExists", function() { return ensureYarnIntegrityFileExists; });
/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(4);
/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(path__WEBPACK_IMPORTED_MODULE_0__);
@@ -48112,9 +48173,27 @@ __webpack_require__.r(__webpack_exports__);
*/
+async function yarnIntegrityFileExists(nodeModulesPath) {
+ try {
+ const nodeModulesRealPath = await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["tryRealpath"])(nodeModulesPath);
+ const yarnIntegrityFilePath = Object(path__WEBPACK_IMPORTED_MODULE_0__["join"])(nodeModulesRealPath, '.yarn-integrity'); // check if the file already exists
+
+ if (await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["isFile"])(yarnIntegrityFilePath)) {
+ return true;
+ }
+ } catch {// no-op
+ }
+
+ return false;
+}
async function ensureYarnIntegrityFileExists(nodeModulesPath) {
try {
- await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["writeFile"])(Object(path__WEBPACK_IMPORTED_MODULE_0__["join"])(nodeModulesPath, '.yarn-integrity'), '', {
+ const nodeModulesRealPath = await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["tryRealpath"])(nodeModulesPath);
+ const yarnIntegrityFilePath = Object(path__WEBPACK_IMPORTED_MODULE_0__["join"])(nodeModulesRealPath, '.yarn-integrity'); // ensure node_modules folder is created
+
+ await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["mkdirp"])(nodeModulesRealPath); // write a blank file in case it doesn't exists
+
+ await Object(_fs__WEBPACK_IMPORTED_MODULE_1__["writeFile"])(yarnIntegrityFilePath, '', {
flag: 'wx'
});
} catch {// no-op
@@ -54377,7 +54456,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = __webpack_require__(7);
tslib_1.__exportStar(__webpack_require__(476), exports);
tslib_1.__exportStar(__webpack_require__(477), exports);
-
+//# sourceMappingURL=index.js.map
/***/ }),
/* 476 */
@@ -54436,7 +54515,7 @@ function observeLines(readable) {
operators_1.catchError(() => Rx.empty())));
}
exports.observeLines = observeLines;
-
+//# sourceMappingURL=observe_lines.js.map
/***/ }),
/* 477 */
@@ -54465,7 +54544,7 @@ function observeReadable(readable) {
return Rx.race(Rx.fromEvent(readable, 'end').pipe(operators_1.first(), operators_1.ignoreElements()), Rx.fromEvent(readable, 'error').pipe(operators_1.first(), operators_1.mergeMap((err) => Rx.throwError(err))));
}
exports.observeReadable = observeReadable;
-
+//# sourceMappingURL=observe_readable.js.map
/***/ }),
/* 478 */
@@ -59798,7 +59877,7 @@ class CiStatsReporter {
}
}
exports.CiStatsReporter = CiStatsReporter;
-
+//# sourceMappingURL=ci_stats_reporter.js.map
/***/ }),
/* 516 */
@@ -63258,7 +63337,7 @@ function parseConfig(log) {
return;
}
exports.parseConfig = parseConfig;
-
+//# sourceMappingURL=ci_stats_config.js.map
/***/ }),
/* 557 */
@@ -63599,7 +63678,7 @@ __webpack_require__.r(__webpack_exports__);
/* harmony import */ var _build_bazel_production_projects__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(564);
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "buildBazelProductionProjects", function() { return _build_bazel_production_projects__WEBPACK_IMPORTED_MODULE_0__["buildBazelProductionProjects"]; });
-/* harmony import */ var _build_non_bazel_production_projects__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(783);
+/* harmony import */ var _build_non_bazel_production_projects__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(812);
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "buildNonBazelProductionProjects", function() { return _build_non_bazel_production_projects__WEBPACK_IMPORTED_MODULE_1__["buildNonBazelProductionProjects"]; });
/*
@@ -63625,7 +63704,7 @@ __webpack_require__.r(__webpack_exports__);
/* harmony import */ var globby__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(globby__WEBPACK_IMPORTED_MODULE_1__);
/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(4);
/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(path__WEBPACK_IMPORTED_MODULE_2__);
-/* harmony import */ var _build_non_bazel_production_projects__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(783);
+/* harmony import */ var _build_non_bazel_production_projects__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(812);
/* harmony import */ var _utils_bazel__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(372);
/* harmony import */ var _utils_fs__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(131);
/* harmony import */ var _utils_log__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(246);
@@ -63656,7 +63735,7 @@ async function buildBazelProductionProjects({
const projectNames = [...projects.values()].map(project => project.name);
_utils_log__WEBPACK_IMPORTED_MODULE_6__["log"].info(`Preparing Bazel projects production build for [${projectNames.join(', ')}]`);
await Object(_utils_bazel__WEBPACK_IMPORTED_MODULE_4__["runBazel"])(['build', '//packages:build']);
- _utils_log__WEBPACK_IMPORTED_MODULE_6__["log"].info(`All Bazel projects production builds for [${projectNames.join(', ')}] are complete}]`);
+ _utils_log__WEBPACK_IMPORTED_MODULE_6__["log"].info(`All Bazel projects production builds for [${projectNames.join(', ')}] are complete`);
for (const project of projects.values()) {
await copyToBuild(project, kibanaRoot, buildRoot);
@@ -63680,7 +63759,7 @@ async function copyToBuild(project, kibanaRoot, buildRoot) {
const relativeProjectPath = Object(path__WEBPACK_IMPORTED_MODULE_2__["relative"])(kibanaRoot, project.path);
const buildProjectPath = Object(path__WEBPACK_IMPORTED_MODULE_2__["resolve"])(buildRoot, relativeProjectPath);
await cpy__WEBPACK_IMPORTED_MODULE_0___default()(['**/*'], buildProjectPath, {
- cwd: Object(path__WEBPACK_IMPORTED_MODULE_2__["join"])(kibanaRoot, 'bazel', 'bin', 'packages', Object(path__WEBPACK_IMPORTED_MODULE_2__["basename"])(buildProjectPath), 'npm_module'),
+ cwd: Object(path__WEBPACK_IMPORTED_MODULE_2__["join"])(kibanaRoot, 'bazel-bin', 'packages', Object(path__WEBPACK_IMPORTED_MODULE_2__["basename"])(buildProjectPath), 'npm_module'),
dot: true,
onlyFiles: true,
parents: true
@@ -63702,12 +63781,12 @@ async function applyCorrectPermissions(project, kibanaRoot, buildRoot) {
const buildProjectPath = Object(path__WEBPACK_IMPORTED_MODULE_2__["resolve"])(buildRoot, relativeProjectPath);
const allPluginPaths = await globby__WEBPACK_IMPORTED_MODULE_1___default()([`**/*`], {
onlyFiles: false,
- cwd: Object(path__WEBPACK_IMPORTED_MODULE_2__["join"])(kibanaRoot, 'bazel', 'bin', 'packages', Object(path__WEBPACK_IMPORTED_MODULE_2__["basename"])(buildProjectPath), 'npm_module'),
+ cwd: buildProjectPath,
dot: true
});
for (const pluginPath of allPluginPaths) {
- const resolvedPluginPath = Object(path__WEBPACK_IMPORTED_MODULE_2__["resolve"])(buildRoot, pluginPath);
+ const resolvedPluginPath = Object(path__WEBPACK_IMPORTED_MODULE_2__["resolve"])(buildProjectPath, pluginPath);
if (await Object(_utils_fs__WEBPACK_IMPORTED_MODULE_5__["isFile"])(resolvedPluginPath)) {
await Object(_utils_fs__WEBPACK_IMPORTED_MODULE_5__["chmod"])(resolvedPluginPath, 0o644);
@@ -90227,265 +90306,385 @@ module.exports = CpyError;
"use strict";
-const arrayUnion = __webpack_require__(775);
-const glob = __webpack_require__(147);
-const fastGlob = __webpack_require__(572);
-const dirGlob = __webpack_require__(776);
-const gitignore = __webpack_require__(780);
+const fs = __webpack_require__(134);
+const arrayUnion = __webpack_require__(145);
+const merge2 = __webpack_require__(146);
+const fastGlob = __webpack_require__(775);
+const dirGlob = __webpack_require__(232);
+const gitignore = __webpack_require__(810);
+const {FilterStream, UniqueStream} = __webpack_require__(811);
const DEFAULT_FILTER = () => false;
const isNegative = pattern => pattern[0] === '!';
const assertPatternsInput = patterns => {
- if (!patterns.every(x => typeof x === 'string')) {
+ if (!patterns.every(pattern => typeof pattern === 'string')) {
throw new TypeError('Patterns must be a string or an array of strings');
}
};
-const generateGlobTasks = (patterns, taskOpts) => {
- patterns = [].concat(patterns);
+const checkCwdOption = (options = {}) => {
+ if (!options.cwd) {
+ return;
+ }
+
+ let stat;
+ try {
+ stat = fs.statSync(options.cwd);
+ } catch {
+ return;
+ }
+
+ if (!stat.isDirectory()) {
+ throw new Error('The `cwd` option must be a path to a directory');
+ }
+};
+
+const getPathString = p => p.stats instanceof fs.Stats ? p.path : p;
+
+const generateGlobTasks = (patterns, taskOptions) => {
+ patterns = arrayUnion([].concat(patterns));
assertPatternsInput(patterns);
+ checkCwdOption(taskOptions);
const globTasks = [];
- taskOpts = Object.assign({
+ taskOptions = {
ignore: [],
- expandDirectories: true
- }, taskOpts);
+ expandDirectories: true,
+ ...taskOptions
+ };
- patterns.forEach((pattern, i) => {
+ for (const [index, pattern] of patterns.entries()) {
if (isNegative(pattern)) {
- return;
+ continue;
}
const ignore = patterns
- .slice(i)
- .filter(isNegative)
+ .slice(index)
+ .filter(pattern => isNegative(pattern))
.map(pattern => pattern.slice(1));
- const opts = Object.assign({}, taskOpts, {
- ignore: taskOpts.ignore.concat(ignore)
- });
+ const options = {
+ ...taskOptions,
+ ignore: taskOptions.ignore.concat(ignore)
+ };
- globTasks.push({pattern, opts});
- });
+ globTasks.push({pattern, options});
+ }
return globTasks;
};
const globDirs = (task, fn) => {
- let opts = {cwd: task.opts.cwd};
+ let options = {};
+ if (task.options.cwd) {
+ options.cwd = task.options.cwd;
+ }
- if (Array.isArray(task.opts.expandDirectories)) {
- opts = Object.assign(opts, {files: task.opts.expandDirectories});
- } else if (typeof task.opts.expandDirectories === 'object') {
- opts = Object.assign(opts, task.opts.expandDirectories);
+ if (Array.isArray(task.options.expandDirectories)) {
+ options = {
+ ...options,
+ files: task.options.expandDirectories
+ };
+ } else if (typeof task.options.expandDirectories === 'object') {
+ options = {
+ ...options,
+ ...task.options.expandDirectories
+ };
}
- return fn(task.pattern, opts);
+ return fn(task.pattern, options);
};
-const getPattern = (task, fn) => task.opts.expandDirectories ? globDirs(task, fn) : [task.pattern];
+const getPattern = (task, fn) => task.options.expandDirectories ? globDirs(task, fn) : [task.pattern];
-module.exports = (patterns, opts) => {
- let globTasks;
+const getFilterSync = options => {
+ return options && options.gitignore ?
+ gitignore.sync({cwd: options.cwd, ignore: options.ignore}) :
+ DEFAULT_FILTER;
+};
- try {
- globTasks = generateGlobTasks(patterns, opts);
- } catch (err) {
- return Promise.reject(err);
+const globToTask = task => glob => {
+ const {options} = task;
+ if (options.ignore && Array.isArray(options.ignore) && options.expandDirectories) {
+ options.ignore = dirGlob.sync(options.ignore);
}
- const getTasks = Promise.all(globTasks.map(task => Promise.resolve(getPattern(task, dirGlob))
- .then(globs => Promise.all(globs.map(glob => ({
- pattern: glob,
- opts: task.opts
- }))))
- ))
- .then(tasks => arrayUnion.apply(null, tasks));
-
- const getFilter = () => {
- return Promise.resolve(
- opts && opts.gitignore ?
- gitignore({cwd: opts.cwd, ignore: opts.ignore}) :
- DEFAULT_FILTER
- );
+ return {
+ pattern: glob,
+ options
};
-
- return getFilter()
- .then(filter => {
- return getTasks
- .then(tasks => Promise.all(tasks.map(task => fastGlob(task.pattern, task.opts))))
- .then(paths => arrayUnion.apply(null, paths))
- .then(paths => paths.filter(p => !filter(p)));
- });
};
-module.exports.sync = (patterns, opts) => {
- const globTasks = generateGlobTasks(patterns, opts);
+module.exports = async (patterns, options) => {
+ const globTasks = generateGlobTasks(patterns, options);
- const getFilter = () => {
- return opts && opts.gitignore ?
- gitignore.sync({cwd: opts.cwd, ignore: opts.ignore}) :
+ const getFilter = async () => {
+ return options && options.gitignore ?
+ gitignore({cwd: options.cwd, ignore: options.ignore}) :
DEFAULT_FILTER;
};
- const tasks = globTasks.reduce((tasks, task) => {
- const newTask = getPattern(task, dirGlob.sync).map(glob => ({
- pattern: glob,
- opts: task.opts
+ const getTasks = async () => {
+ const tasks = await Promise.all(globTasks.map(async task => {
+ const globs = await getPattern(task, dirGlob);
+ return Promise.all(globs.map(globToTask(task)));
}));
- return tasks.concat(newTask);
- }, []);
-
- const filter = getFilter();
-
- return tasks.reduce(
- (matches, task) => arrayUnion(matches, fastGlob.sync(task.pattern, task.opts)),
- []
- ).filter(p => !filter(p));
-};
-
-module.exports.generateGlobTasks = generateGlobTasks;
-
-module.exports.hasMagic = (patterns, opts) => []
- .concat(patterns)
- .some(pattern => glob.hasMagic(pattern, opts));
-
-module.exports.gitignore = gitignore;
+ return arrayUnion(...tasks);
+ };
-/***/ }),
-/* 775 */
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
-
-var arrayUniq = __webpack_require__(571);
+ const [filter, tasks] = await Promise.all([getFilter(), getTasks()]);
+ const paths = await Promise.all(tasks.map(task => fastGlob(task.pattern, task.options)));
-module.exports = function () {
- return arrayUniq([].concat.apply([], arguments));
+ return arrayUnion(...paths).filter(path_ => !filter(getPathString(path_)));
};
+module.exports.sync = (patterns, options) => {
+ const globTasks = generateGlobTasks(patterns, options);
-/***/ }),
-/* 776 */
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
-
-const path = __webpack_require__(4);
-const arrify = __webpack_require__(777);
-const pathType = __webpack_require__(778);
+ const tasks = [];
+ for (const task of globTasks) {
+ const newTask = getPattern(task, dirGlob.sync).map(globToTask(task));
+ tasks.push(...newTask);
+ }
-const getExtensions = extensions => extensions.length > 1 ? `{${extensions.join(',')}}` : extensions[0];
-const getPath = filepath => filepath[0] === '!' ? filepath.slice(1) : filepath;
+ const filter = getFilterSync(options);
-const addExtensions = (file, extensions) => {
- if (path.extname(file)) {
- return `**/${file}`;
+ let matches = [];
+ for (const task of tasks) {
+ matches = arrayUnion(matches, fastGlob.sync(task.pattern, task.options));
}
- return `**/${file}.${getExtensions(extensions)}`;
+ return matches.filter(path_ => !filter(path_));
};
-const getGlob = (dir, opts) => {
- opts = Object.assign({}, opts);
-
- if (opts.files && !Array.isArray(opts.files)) {
- throw new TypeError(`\`options.files\` must be an \`Array\`, not \`${typeof opts.files}\``);
- }
+module.exports.stream = (patterns, options) => {
+ const globTasks = generateGlobTasks(patterns, options);
- if (opts.extensions && !Array.isArray(opts.extensions)) {
- throw new TypeError(`\`options.extensions\` must be an \`Array\`, not \`${typeof opts.extensions}\``);
+ const tasks = [];
+ for (const task of globTasks) {
+ const newTask = getPattern(task, dirGlob.sync).map(globToTask(task));
+ tasks.push(...newTask);
}
- if (opts.files && opts.extensions) {
- return opts.files.map(x => path.join(dir, addExtensions(x, opts.extensions)));
- } else if (opts.files) {
- return opts.files.map(x => path.join(dir, `**/${x}`));
- } else if (opts.extensions) {
- return [path.join(dir, `**/*.${getExtensions(opts.extensions)}`)];
- }
+ const filter = getFilterSync(options);
+ const filterStream = new FilterStream(p => !filter(p));
+ const uniqueStream = new UniqueStream();
- return [path.join(dir, '**')];
+ return merge2(tasks.map(task => fastGlob.stream(task.pattern, task.options)))
+ .pipe(filterStream)
+ .pipe(uniqueStream);
};
-module.exports = (input, opts) => {
- return Promise.all(arrify(input).map(x => pathType.dir(getPath(x))
- .then(isDir => isDir ? getGlob(x, opts) : x)))
- .then(globs => [].concat.apply([], globs));
-};
+module.exports.generateGlobTasks = generateGlobTasks;
-module.exports.sync = (input, opts) => {
- const globs = arrify(input).map(x => pathType.dirSync(getPath(x)) ? getGlob(x, opts) : x);
- return [].concat.apply([], globs);
-};
+module.exports.hasMagic = (patterns, options) => []
+ .concat(patterns)
+ .some(pattern => fastGlob.isDynamicPattern(pattern, options));
+
+module.exports.gitignore = gitignore;
/***/ }),
-/* 777 */
+/* 775 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
-
-module.exports = function (val) {
- if (val === null || val === undefined) {
- return [];
- }
-
- return Array.isArray(val) ? val : [val];
-};
+
+const taskManager = __webpack_require__(776);
+const async_1 = __webpack_require__(796);
+const stream_1 = __webpack_require__(806);
+const sync_1 = __webpack_require__(807);
+const settings_1 = __webpack_require__(809);
+const utils = __webpack_require__(777);
+async function FastGlob(source, options) {
+ assertPatternsInput(source);
+ const works = getWorks(source, async_1.default, options);
+ const result = await Promise.all(works);
+ return utils.array.flatten(result);
+}
+// https://github.com/typescript-eslint/typescript-eslint/issues/60
+// eslint-disable-next-line no-redeclare
+(function (FastGlob) {
+ function sync(source, options) {
+ assertPatternsInput(source);
+ const works = getWorks(source, sync_1.default, options);
+ return utils.array.flatten(works);
+ }
+ FastGlob.sync = sync;
+ function stream(source, options) {
+ assertPatternsInput(source);
+ const works = getWorks(source, stream_1.default, options);
+ /**
+ * The stream returned by the provider cannot work with an asynchronous iterator.
+ * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams.
+ * This affects performance (+25%). I don't see best solution right now.
+ */
+ return utils.stream.merge(works);
+ }
+ FastGlob.stream = stream;
+ function generateTasks(source, options) {
+ assertPatternsInput(source);
+ const patterns = [].concat(source);
+ const settings = new settings_1.default(options);
+ return taskManager.generate(patterns, settings);
+ }
+ FastGlob.generateTasks = generateTasks;
+ function isDynamicPattern(source, options) {
+ assertPatternsInput(source);
+ const settings = new settings_1.default(options);
+ return utils.pattern.isDynamicPattern(source, settings);
+ }
+ FastGlob.isDynamicPattern = isDynamicPattern;
+ function escapePath(source) {
+ assertPatternsInput(source);
+ return utils.path.escape(source);
+ }
+ FastGlob.escapePath = escapePath;
+})(FastGlob || (FastGlob = {}));
+function getWorks(source, _Provider, options) {
+ const patterns = [].concat(source);
+ const settings = new settings_1.default(options);
+ const tasks = taskManager.generate(patterns, settings);
+ const provider = new _Provider(settings);
+ return tasks.map(provider.read, provider);
+}
+function assertPatternsInput(input) {
+ const source = [].concat(input);
+ const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item));
+ if (!isValidSource) {
+ throw new TypeError('Patterns must be a string (non empty) or an array of strings');
+ }
+}
+module.exports = FastGlob;
/***/ }),
-/* 778 */
+/* 776 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0;
+const utils = __webpack_require__(777);
+function generate(patterns, settings) {
+ const positivePatterns = getPositivePatterns(patterns);
+ const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore);
+ const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings));
+ const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings));
+ const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false);
+ const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true);
+ return staticTasks.concat(dynamicTasks);
+}
+exports.generate = generate;
+function convertPatternsToTasks(positive, negative, dynamic) {
+ const positivePatternsGroup = groupPatternsByBaseDirectory(positive);
+ // When we have a global group – there is no reason to divide the patterns into independent tasks.
+ // In this case, the global task covers the rest.
+ if ('.' in positivePatternsGroup) {
+ const task = convertPatternGroupToTask('.', positive, negative, dynamic);
+ return [task];
+ }
+ return convertPatternGroupsToTasks(positivePatternsGroup, negative, dynamic);
+}
+exports.convertPatternsToTasks = convertPatternsToTasks;
+function getPositivePatterns(patterns) {
+ return utils.pattern.getPositivePatterns(patterns);
+}
+exports.getPositivePatterns = getPositivePatterns;
+function getNegativePatternsAsPositive(patterns, ignore) {
+ const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore);
+ const positive = negative.map(utils.pattern.convertToPositivePattern);
+ return positive;
+}
+exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive;
+function groupPatternsByBaseDirectory(patterns) {
+ const group = {};
+ return patterns.reduce((collection, pattern) => {
+ const base = utils.pattern.getBaseDirectory(pattern);
+ if (base in collection) {
+ collection[base].push(pattern);
+ }
+ else {
+ collection[base] = [pattern];
+ }
+ return collection;
+ }, group);
+}
+exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory;
+function convertPatternGroupsToTasks(positive, negative, dynamic) {
+ return Object.keys(positive).map((base) => {
+ return convertPatternGroupToTask(base, positive[base], negative, dynamic);
+ });
+}
+exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks;
+function convertPatternGroupToTask(base, positive, negative, dynamic) {
+ return {
+ dynamic,
+ positive,
+ negative,
+ base,
+ patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern))
+ };
+}
+exports.convertPatternGroupToTask = convertPatternGroupToTask;
-const fs = __webpack_require__(134);
-const pify = __webpack_require__(779);
-
-function type(fn, fn2, fp) {
- if (typeof fp !== 'string') {
- return Promise.reject(new TypeError(`Expected a string, got ${typeof fp}`));
- }
-
- return pify(fs[fn])(fp)
- .then(stats => stats[fn2]())
- .catch(err => {
- if (err.code === 'ENOENT') {
- return false;
- }
- throw err;
- });
-}
+/***/ }),
+/* 777 */
+/***/ (function(module, exports, __webpack_require__) {
-function typeSync(fn, fn2, fp) {
- if (typeof fp !== 'string') {
- throw new TypeError(`Expected a string, got ${typeof fp}`);
- }
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0;
+const array = __webpack_require__(778);
+exports.array = array;
+const errno = __webpack_require__(779);
+exports.errno = errno;
+const fs = __webpack_require__(780);
+exports.fs = fs;
+const path = __webpack_require__(781);
+exports.path = path;
+const pattern = __webpack_require__(782);
+exports.pattern = pattern;
+const stream = __webpack_require__(794);
+exports.stream = stream;
+const string = __webpack_require__(795);
+exports.string = string;
- try {
- return fs[fn](fp)[fn2]();
- } catch (err) {
- if (err.code === 'ENOENT') {
- return false;
- }
- throw err;
- }
-}
+/***/ }),
+/* 778 */
+/***/ (function(module, exports, __webpack_require__) {
-exports.file = type.bind(null, 'stat', 'isFile');
-exports.dir = type.bind(null, 'stat', 'isDirectory');
-exports.symlink = type.bind(null, 'lstat', 'isSymbolicLink');
-exports.fileSync = typeSync.bind(null, 'statSync', 'isFile');
-exports.dirSync = typeSync.bind(null, 'statSync', 'isDirectory');
-exports.symlinkSync = typeSync.bind(null, 'lstatSync', 'isSymbolicLink');
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.splitWhen = exports.flatten = void 0;
+function flatten(items) {
+ return items.reduce((collection, item) => [].concat(collection, item), []);
+}
+exports.flatten = flatten;
+function splitWhen(items, predicate) {
+ const result = [[]];
+ let groupIndex = 0;
+ for (const item of items) {
+ if (predicate(item)) {
+ groupIndex++;
+ result[groupIndex] = [];
+ }
+ else {
+ result[groupIndex].push(item);
+ }
+ }
+ return result;
+}
+exports.splitWhen = splitWhen;
/***/ }),
@@ -90493,155 +90692,2976 @@ exports.symlinkSync = typeSync.bind(null, 'lstatSync', 'isSymbolicLink');
/***/ (function(module, exports, __webpack_require__) {
"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isEnoentCodeError = void 0;
+function isEnoentCodeError(error) {
+ return error.code === 'ENOENT';
+}
+exports.isEnoentCodeError = isEnoentCodeError;
-const processFn = (fn, opts) => function () {
- const P = opts.promiseModule;
- const args = new Array(arguments.length);
+/***/ }),
+/* 780 */
+/***/ (function(module, exports, __webpack_require__) {
- for (let i = 0; i < arguments.length; i++) {
- args[i] = arguments[i];
- }
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.createDirentFromStats = void 0;
+class DirentFromStats {
+ constructor(name, stats) {
+ this.name = name;
+ this.isBlockDevice = stats.isBlockDevice.bind(stats);
+ this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
+ this.isDirectory = stats.isDirectory.bind(stats);
+ this.isFIFO = stats.isFIFO.bind(stats);
+ this.isFile = stats.isFile.bind(stats);
+ this.isSocket = stats.isSocket.bind(stats);
+ this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
+ }
+}
+function createDirentFromStats(name, stats) {
+ return new DirentFromStats(name, stats);
+}
+exports.createDirentFromStats = createDirentFromStats;
- return new P((resolve, reject) => {
- if (opts.errorFirst) {
- args.push(function (err, result) {
- if (opts.multiArgs) {
- const results = new Array(arguments.length - 1);
- for (let i = 1; i < arguments.length; i++) {
- results[i - 1] = arguments[i];
- }
+/***/ }),
+/* 781 */
+/***/ (function(module, exports, __webpack_require__) {
- if (err) {
- results.unshift(err);
- reject(results);
- } else {
- resolve(results);
- }
- } else if (err) {
- reject(err);
- } else {
- resolve(result);
- }
- });
- } else {
- args.push(function (result) {
- if (opts.multiArgs) {
- const results = new Array(arguments.length - 1);
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0;
+const path = __webpack_require__(4);
+const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\
+const UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\())/g;
+/**
+ * Designed to work only with simple paths: `dir\\file`.
+ */
+function unixify(filepath) {
+ return filepath.replace(/\\/g, '/');
+}
+exports.unixify = unixify;
+function makeAbsolute(cwd, filepath) {
+ return path.resolve(cwd, filepath);
+}
+exports.makeAbsolute = makeAbsolute;
+function escape(pattern) {
+ return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\$2');
+}
+exports.escape = escape;
+function removeLeadingDotSegment(entry) {
+ // We do not use `startsWith` because this is 10x slower than current implementation for some cases.
+ // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with
+ if (entry.charAt(0) === '.') {
+ const secondCharactery = entry.charAt(1);
+ if (secondCharactery === '/' || secondCharactery === '\\') {
+ return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT);
+ }
+ }
+ return entry;
+}
+exports.removeLeadingDotSegment = removeLeadingDotSegment;
+
+
+/***/ }),
+/* 782 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0;
+const path = __webpack_require__(4);
+const globParent = __webpack_require__(171);
+const micromatch = __webpack_require__(783);
+const picomatch = __webpack_require__(185);
+const GLOBSTAR = '**';
+const ESCAPE_SYMBOL = '\\';
+const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/;
+const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[.*]/;
+const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\(.*\|.*\)/;
+const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\(.*\)/;
+const BRACE_EXPANSIONS_SYMBOLS_RE = /{.*(?:,|\.\.).*}/;
+function isStaticPattern(pattern, options = {}) {
+ return !isDynamicPattern(pattern, options);
+}
+exports.isStaticPattern = isStaticPattern;
+function isDynamicPattern(pattern, options = {}) {
+ /**
+ * A special case with an empty string is necessary for matching patterns that start with a forward slash.
+ * An empty string cannot be a dynamic pattern.
+ * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'.
+ */
+ if (pattern === '') {
+ return false;
+ }
+ /**
+ * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check
+ * filepath directly (without read directory).
+ */
+ if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) {
+ return true;
+ }
+ if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) {
+ return true;
+ }
+ if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) {
+ return true;
+ }
+ if (options.braceExpansion !== false && BRACE_EXPANSIONS_SYMBOLS_RE.test(pattern)) {
+ return true;
+ }
+ return false;
+}
+exports.isDynamicPattern = isDynamicPattern;
+function convertToPositivePattern(pattern) {
+ return isNegativePattern(pattern) ? pattern.slice(1) : pattern;
+}
+exports.convertToPositivePattern = convertToPositivePattern;
+function convertToNegativePattern(pattern) {
+ return '!' + pattern;
+}
+exports.convertToNegativePattern = convertToNegativePattern;
+function isNegativePattern(pattern) {
+ return pattern.startsWith('!') && pattern[1] !== '(';
+}
+exports.isNegativePattern = isNegativePattern;
+function isPositivePattern(pattern) {
+ return !isNegativePattern(pattern);
+}
+exports.isPositivePattern = isPositivePattern;
+function getNegativePatterns(patterns) {
+ return patterns.filter(isNegativePattern);
+}
+exports.getNegativePatterns = getNegativePatterns;
+function getPositivePatterns(patterns) {
+ return patterns.filter(isPositivePattern);
+}
+exports.getPositivePatterns = getPositivePatterns;
+function getBaseDirectory(pattern) {
+ return globParent(pattern, { flipBackslashes: false });
+}
+exports.getBaseDirectory = getBaseDirectory;
+function hasGlobStar(pattern) {
+ return pattern.includes(GLOBSTAR);
+}
+exports.hasGlobStar = hasGlobStar;
+function endsWithSlashGlobStar(pattern) {
+ return pattern.endsWith('/' + GLOBSTAR);
+}
+exports.endsWithSlashGlobStar = endsWithSlashGlobStar;
+function isAffectDepthOfReadingPattern(pattern) {
+ const basename = path.basename(pattern);
+ return endsWithSlashGlobStar(pattern) || isStaticPattern(basename);
+}
+exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern;
+function expandPatternsWithBraceExpansion(patterns) {
+ return patterns.reduce((collection, pattern) => {
+ return collection.concat(expandBraceExpansion(pattern));
+ }, []);
+}
+exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion;
+function expandBraceExpansion(pattern) {
+ return micromatch.braces(pattern, {
+ expand: true,
+ nodupes: true
+ });
+}
+exports.expandBraceExpansion = expandBraceExpansion;
+function getPatternParts(pattern, options) {
+ let { parts } = picomatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true }));
+ /**
+ * The scan method returns an empty array in some cases.
+ * See micromatch/picomatch#58 for more details.
+ */
+ if (parts.length === 0) {
+ parts = [pattern];
+ }
+ /**
+ * The scan method does not return an empty part for the pattern with a forward slash.
+ * This is another part of micromatch/picomatch#58.
+ */
+ if (parts[0].startsWith('/')) {
+ parts[0] = parts[0].slice(1);
+ parts.unshift('');
+ }
+ return parts;
+}
+exports.getPatternParts = getPatternParts;
+function makeRe(pattern, options) {
+ return micromatch.makeRe(pattern, options);
+}
+exports.makeRe = makeRe;
+function convertPatternsToRe(patterns, options) {
+ return patterns.map((pattern) => makeRe(pattern, options));
+}
+exports.convertPatternsToRe = convertPatternsToRe;
+function matchAny(entry, patternsRe) {
+ return patternsRe.some((patternRe) => patternRe.test(entry));
+}
+exports.matchAny = matchAny;
+
+
+/***/ }),
+/* 783 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+const util = __webpack_require__(112);
+const braces = __webpack_require__(784);
+const picomatch = __webpack_require__(185);
+const utils = __webpack_require__(188);
+const isEmptyString = val => typeof val === 'string' && (val === '' || val === './');
+
+/**
+ * Returns an array of strings that match one or more glob patterns.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm(list, patterns[, options]);
+ *
+ * console.log(mm(['a.js', 'a.txt'], ['*.js']));
+ * //=> [ 'a.js' ]
+ * ```
+ * @param {String|Array} list List of strings to match.
+ * @param {String|Array} patterns One or more glob patterns to use for matching.
+ * @param {Object} options See available [options](#options)
+ * @return {Array} Returns an array of matches
+ * @summary false
+ * @api public
+ */
+
+const micromatch = (list, patterns, options) => {
+ patterns = [].concat(patterns);
+ list = [].concat(list);
+
+ let omit = new Set();
+ let keep = new Set();
+ let items = new Set();
+ let negatives = 0;
+
+ let onResult = state => {
+ items.add(state.output);
+ if (options && options.onResult) {
+ options.onResult(state);
+ }
+ };
+
+ for (let i = 0; i < patterns.length; i++) {
+ let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true);
+ let negated = isMatch.state.negated || isMatch.state.negatedExtglob;
+ if (negated) negatives++;
+
+ for (let item of list) {
+ let matched = isMatch(item, true);
+
+ let match = negated ? !matched.isMatch : matched.isMatch;
+ if (!match) continue;
+
+ if (negated) {
+ omit.add(matched.output);
+ } else {
+ omit.delete(matched.output);
+ keep.add(matched.output);
+ }
+ }
+ }
+
+ let result = negatives === patterns.length ? [...items] : [...keep];
+ let matches = result.filter(item => !omit.has(item));
+
+ if (options && matches.length === 0) {
+ if (options.failglob === true) {
+ throw new Error(`No matches found for "${patterns.join(', ')}"`);
+ }
+
+ if (options.nonull === true || options.nullglob === true) {
+ return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns;
+ }
+ }
+
+ return matches;
+};
+
+/**
+ * Backwards compatibility
+ */
+
+micromatch.match = micromatch;
+
+/**
+ * Returns a matcher function from the given glob `pattern` and `options`.
+ * The returned function takes a string to match as its only argument and returns
+ * true if the string is a match.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.matcher(pattern[, options]);
+ *
+ * const isMatch = mm.matcher('*.!(*a)');
+ * console.log(isMatch('a.a')); //=> false
+ * console.log(isMatch('a.b')); //=> true
+ * ```
+ * @param {String} `pattern` Glob pattern
+ * @param {Object} `options`
+ * @return {Function} Returns a matcher function.
+ * @api public
+ */
+
+micromatch.matcher = (pattern, options) => picomatch(pattern, options);
+
+/**
+ * Returns true if **any** of the given glob `patterns` match the specified `string`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.isMatch(string, patterns[, options]);
+ *
+ * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true
+ * console.log(mm.isMatch('a.a', 'b.*')); //=> false
+ * ```
+ * @param {String} str The string to test.
+ * @param {String|Array} patterns One or more glob patterns to use for matching.
+ * @param {Object} [options] See available [options](#options).
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);
+
+/**
+ * Backwards compatibility
+ */
+
+micromatch.any = micromatch.isMatch;
+
+/**
+ * Returns a list of strings that _**do not match any**_ of the given `patterns`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.not(list, patterns[, options]);
+ *
+ * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a'));
+ * //=> ['b.b', 'c.c']
+ * ```
+ * @param {Array} `list` Array of strings to match.
+ * @param {String|Array} `patterns` One or more glob pattern to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Array} Returns an array of strings that **do not match** the given patterns.
+ * @api public
+ */
+
+micromatch.not = (list, patterns, options = {}) => {
+ patterns = [].concat(patterns).map(String);
+ let result = new Set();
+ let items = [];
+
+ let onResult = state => {
+ if (options.onResult) options.onResult(state);
+ items.push(state.output);
+ };
+
+ let matches = micromatch(list, patterns, { ...options, onResult });
+
+ for (let item of items) {
+ if (!matches.includes(item)) {
+ result.add(item);
+ }
+ }
+ return [...result];
+};
+
+/**
+ * Returns true if the given `string` contains the given pattern. Similar
+ * to [.isMatch](#isMatch) but the pattern can match any part of the string.
+ *
+ * ```js
+ * var mm = require('micromatch');
+ * // mm.contains(string, pattern[, options]);
+ *
+ * console.log(mm.contains('aa/bb/cc', '*b'));
+ * //=> true
+ * console.log(mm.contains('aa/bb/cc', '*d'));
+ * //=> false
+ * ```
+ * @param {String} `str` The string to match.
+ * @param {String|Array} `patterns` Glob pattern to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if the patter matches any part of `str`.
+ * @api public
+ */
+
+micromatch.contains = (str, pattern, options) => {
+ if (typeof str !== 'string') {
+ throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
+ }
+
+ if (Array.isArray(pattern)) {
+ return pattern.some(p => micromatch.contains(str, p, options));
+ }
+
+ if (typeof pattern === 'string') {
+ if (isEmptyString(str) || isEmptyString(pattern)) {
+ return false;
+ }
+
+ if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) {
+ return true;
+ }
+ }
+
+ return micromatch.isMatch(str, pattern, { ...options, contains: true });
+};
+
+/**
+ * Filter the keys of the given object with the given `glob` pattern
+ * and `options`. Does not attempt to match nested keys. If you need this feature,
+ * use [glob-object][] instead.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.matchKeys(object, patterns[, options]);
+ *
+ * const obj = { aa: 'a', ab: 'b', ac: 'c' };
+ * console.log(mm.matchKeys(obj, '*b'));
+ * //=> { ab: 'b' }
+ * ```
+ * @param {Object} `object` The object with keys to filter.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Object} Returns an object with only keys that match the given patterns.
+ * @api public
+ */
+
+micromatch.matchKeys = (obj, patterns, options) => {
+ if (!utils.isObject(obj)) {
+ throw new TypeError('Expected the first argument to be an object');
+ }
+ let keys = micromatch(Object.keys(obj), patterns, options);
+ let res = {};
+ for (let key of keys) res[key] = obj[key];
+ return res;
+};
+
+/**
+ * Returns true if some of the strings in the given `list` match any of the given glob `patterns`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.some(list, patterns[, options]);
+ *
+ * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
+ * // true
+ * console.log(mm.some(['foo.js'], ['*.js', '!foo.js']));
+ * // false
+ * ```
+ * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.some = (list, patterns, options) => {
+ let items = [].concat(list);
+
+ for (let pattern of [].concat(patterns)) {
+ let isMatch = picomatch(String(pattern), options);
+ if (items.some(item => isMatch(item))) {
+ return true;
+ }
+ }
+ return false;
+};
+
+/**
+ * Returns true if every string in the given `list` matches
+ * any of the given glob `patterns`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.every(list, patterns[, options]);
+ *
+ * console.log(mm.every('foo.js', ['foo.js']));
+ * // true
+ * console.log(mm.every(['foo.js', 'bar.js'], ['*.js']));
+ * // true
+ * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js']));
+ * // false
+ * console.log(mm.every(['foo.js'], ['*.js', '!foo.js']));
+ * // false
+ * ```
+ * @param {String|Array} `list` The string or array of strings to test.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.every = (list, patterns, options) => {
+ let items = [].concat(list);
+
+ for (let pattern of [].concat(patterns)) {
+ let isMatch = picomatch(String(pattern), options);
+ if (!items.every(item => isMatch(item))) {
+ return false;
+ }
+ }
+ return true;
+};
+
+/**
+ * Returns true if **all** of the given `patterns` match
+ * the specified string.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.all(string, patterns[, options]);
+ *
+ * console.log(mm.all('foo.js', ['foo.js']));
+ * // true
+ *
+ * console.log(mm.all('foo.js', ['*.js', '!foo.js']));
+ * // false
+ *
+ * console.log(mm.all('foo.js', ['*.js', 'foo.js']));
+ * // true
+ *
+ * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js']));
+ * // true
+ * ```
+ * @param {String|Array} `str` The string to test.
+ * @param {String|Array} `patterns` One or more glob patterns to use for matching.
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+micromatch.all = (str, patterns, options) => {
+ if (typeof str !== 'string') {
+ throw new TypeError(`Expected a string: "${util.inspect(str)}"`);
+ }
+
+ return [].concat(patterns).every(p => picomatch(p, options)(str));
+};
+
+/**
+ * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.capture(pattern, string[, options]);
+ *
+ * console.log(mm.capture('test/*.js', 'test/foo.js'));
+ * //=> ['foo']
+ * console.log(mm.capture('test/*.js', 'foo/bar.css'));
+ * //=> null
+ * ```
+ * @param {String} `glob` Glob pattern to use for matching.
+ * @param {String} `input` String to match
+ * @param {Object} `options` See available [options](#options) for changing how matches are performed
+ * @return {Boolean} Returns an array of captures if the input matches the glob pattern, otherwise `null`.
+ * @api public
+ */
+
+micromatch.capture = (glob, input, options) => {
+ let posix = utils.isWindows(options);
+ let regex = picomatch.makeRe(String(glob), { ...options, capture: true });
+ let match = regex.exec(posix ? utils.toPosixSlashes(input) : input);
+
+ if (match) {
+ return match.slice(1).map(v => v === void 0 ? '' : v);
+ }
+};
+
+/**
+ * Create a regular expression from the given glob `pattern`.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * // mm.makeRe(pattern[, options]);
+ *
+ * console.log(mm.makeRe('*.js'));
+ * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/
+ * ```
+ * @param {String} `pattern` A glob pattern to convert to regex.
+ * @param {Object} `options`
+ * @return {RegExp} Returns a regex created from the given pattern.
+ * @api public
+ */
+
+micromatch.makeRe = (...args) => picomatch.makeRe(...args);
+
+/**
+ * Scan a glob pattern to separate the pattern into segments. Used
+ * by the [split](#split) method.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * const state = mm.scan(pattern[, options]);
+ * ```
+ * @param {String} `pattern`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with
+ * @api public
+ */
+
+micromatch.scan = (...args) => picomatch.scan(...args);
+
+/**
+ * Parse a glob pattern to create the source string for a regular
+ * expression.
+ *
+ * ```js
+ * const mm = require('micromatch');
+ * const state = mm(pattern[, options]);
+ * ```
+ * @param {String} `glob`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with useful properties and output to be used as regex source string.
+ * @api public
+ */
+
+micromatch.parse = (patterns, options) => {
+ let res = [];
+ for (let pattern of [].concat(patterns || [])) {
+ for (let str of braces(String(pattern), options)) {
+ res.push(picomatch.parse(str, options));
+ }
+ }
+ return res;
+};
+
+/**
+ * Process the given brace `pattern`.
+ *
+ * ```js
+ * const { braces } = require('micromatch');
+ * console.log(braces('foo/{a,b,c}/bar'));
+ * //=> [ 'foo/(a|b|c)/bar' ]
+ *
+ * console.log(braces('foo/{a,b,c}/bar', { expand: true }));
+ * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ]
+ * ```
+ * @param {String} `pattern` String with brace pattern to process.
+ * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options.
+ * @return {Array}
+ * @api public
+ */
+
+micromatch.braces = (pattern, options) => {
+ if (typeof pattern !== 'string') throw new TypeError('Expected a string');
+ if ((options && options.nobrace === true) || !/\{.*\}/.test(pattern)) {
+ return [pattern];
+ }
+ return braces(pattern, options);
+};
+
+/**
+ * Expand braces
+ */
+
+micromatch.braceExpand = (pattern, options) => {
+ if (typeof pattern !== 'string') throw new TypeError('Expected a string');
+ return micromatch.braces(pattern, { ...options, expand: true });
+};
+
+/**
+ * Expose micromatch
+ */
+
+module.exports = micromatch;
+
+
+/***/ }),
+/* 784 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+const stringify = __webpack_require__(785);
+const compile = __webpack_require__(787);
+const expand = __webpack_require__(791);
+const parse = __webpack_require__(792);
+
+/**
+ * Expand the given pattern or create a regex-compatible string.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
+ * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
+ * ```
+ * @param {String} `str`
+ * @param {Object} `options`
+ * @return {String}
+ * @api public
+ */
+
+const braces = (input, options = {}) => {
+ let output = [];
+
+ if (Array.isArray(input)) {
+ for (let pattern of input) {
+ let result = braces.create(pattern, options);
+ if (Array.isArray(result)) {
+ output.push(...result);
+ } else {
+ output.push(result);
+ }
+ }
+ } else {
+ output = [].concat(braces.create(input, options));
+ }
+
+ if (options && options.expand === true && options.nodupes === true) {
+ output = [...new Set(output)];
+ }
+ return output;
+};
+
+/**
+ * Parse the given `str` with the given `options`.
+ *
+ * ```js
+ * // braces.parse(pattern, [, options]);
+ * const ast = braces.parse('a/{b,c}/d');
+ * console.log(ast);
+ * ```
+ * @param {String} pattern Brace pattern to parse
+ * @param {Object} options
+ * @return {Object} Returns an AST
+ * @api public
+ */
+
+braces.parse = (input, options = {}) => parse(input, options);
+
+/**
+ * Creates a braces string from an AST, or an AST node.
+ *
+ * ```js
+ * const braces = require('braces');
+ * let ast = braces.parse('foo/{a,b}/bar');
+ * console.log(stringify(ast.nodes[2])); //=> '{a,b}'
+ * ```
+ * @param {String} `input` Brace pattern or AST.
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.stringify = (input, options = {}) => {
+ if (typeof input === 'string') {
+ return stringify(braces.parse(input, options), options);
+ }
+ return stringify(input, options);
+};
+
+/**
+ * Compiles a brace pattern into a regex-compatible, optimized string.
+ * This method is called by the main [braces](#braces) function by default.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.compile('a/{b,c}/d'));
+ * //=> ['a/(b|c)/d']
+ * ```
+ * @param {String} `input` Brace pattern or AST.
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.compile = (input, options = {}) => {
+ if (typeof input === 'string') {
+ input = braces.parse(input, options);
+ }
+ return compile(input, options);
+};
+
+/**
+ * Expands a brace pattern into an array. This method is called by the
+ * main [braces](#braces) function when `options.expand` is true. Before
+ * using this method it's recommended that you read the [performance notes](#performance))
+ * and advantages of using [.compile](#compile) instead.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.expand('a/{b,c}/d'));
+ * //=> ['a/b/d', 'a/c/d'];
+ * ```
+ * @param {String} `pattern` Brace pattern
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.expand = (input, options = {}) => {
+ if (typeof input === 'string') {
+ input = braces.parse(input, options);
+ }
+
+ let result = expand(input, options);
+
+ // filter out empty strings if specified
+ if (options.noempty === true) {
+ result = result.filter(Boolean);
+ }
+
+ // filter out duplicates if specified
+ if (options.nodupes === true) {
+ result = [...new Set(result)];
+ }
+
+ return result;
+};
+
+/**
+ * Processes a brace pattern and returns either an expanded array
+ * (if `options.expand` is true), a highly optimized regex-compatible string.
+ * This method is called by the main [braces](#braces) function.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
+ * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
+ * ```
+ * @param {String} `pattern` Brace pattern
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.create = (input, options = {}) => {
+ if (input === '' || input.length < 3) {
+ return [input];
+ }
+
+ return options.expand !== true
+ ? braces.compile(input, options)
+ : braces.expand(input, options);
+};
+
+/**
+ * Expose "braces"
+ */
+
+module.exports = braces;
+
+
+/***/ }),
+/* 785 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+const utils = __webpack_require__(786);
+
+module.exports = (ast, options = {}) => {
+ let stringify = (node, parent = {}) => {
+ let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);
+ let invalidNode = node.invalid === true && options.escapeInvalid === true;
+ let output = '';
+
+ if (node.value) {
+ if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {
+ return '\\' + node.value;
+ }
+ return node.value;
+ }
+
+ if (node.value) {
+ return node.value;
+ }
+
+ if (node.nodes) {
+ for (let child of node.nodes) {
+ output += stringify(child);
+ }
+ }
+ return output;
+ };
+
+ return stringify(ast);
+};
+
+
+
+/***/ }),
+/* 786 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+exports.isInteger = num => {
+ if (typeof num === 'number') {
+ return Number.isInteger(num);
+ }
+ if (typeof num === 'string' && num.trim() !== '') {
+ return Number.isInteger(Number(num));
+ }
+ return false;
+};
+
+/**
+ * Find a node of the given type
+ */
+
+exports.find = (node, type) => node.nodes.find(node => node.type === type);
+
+/**
+ * Find a node of the given type
+ */
+
+exports.exceedsLimit = (min, max, step = 1, limit) => {
+ if (limit === false) return false;
+ if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
+ return ((Number(max) - Number(min)) / Number(step)) >= limit;
+};
+
+/**
+ * Escape the given node with '\\' before node.value
+ */
+
+exports.escapeNode = (block, n = 0, type) => {
+ let node = block.nodes[n];
+ if (!node) return;
+
+ if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
+ if (node.escaped !== true) {
+ node.value = '\\' + node.value;
+ node.escaped = true;
+ }
+ }
+};
+
+/**
+ * Returns true if the given brace node should be enclosed in literal braces
+ */
+
+exports.encloseBrace = node => {
+ if (node.type !== 'brace') return false;
+ if ((node.commas >> 0 + node.ranges >> 0) === 0) {
+ node.invalid = true;
+ return true;
+ }
+ return false;
+};
+
+/**
+ * Returns true if a brace node is invalid.
+ */
+
+exports.isInvalidBrace = block => {
+ if (block.type !== 'brace') return false;
+ if (block.invalid === true || block.dollar) return true;
+ if ((block.commas >> 0 + block.ranges >> 0) === 0) {
+ block.invalid = true;
+ return true;
+ }
+ if (block.open !== true || block.close !== true) {
+ block.invalid = true;
+ return true;
+ }
+ return false;
+};
+
+/**
+ * Returns true if a node is an open or close node
+ */
+
+exports.isOpenOrClose = node => {
+ if (node.type === 'open' || node.type === 'close') {
+ return true;
+ }
+ return node.open === true || node.close === true;
+};
+
+/**
+ * Reduce an array of text nodes.
+ */
+
+exports.reduce = nodes => nodes.reduce((acc, node) => {
+ if (node.type === 'text') acc.push(node.value);
+ if (node.type === 'range') node.type = 'text';
+ return acc;
+}, []);
+
+/**
+ * Flatten an array
+ */
+
+exports.flatten = (...args) => {
+ const result = [];
+ const flat = arr => {
+ for (let i = 0; i < arr.length; i++) {
+ let ele = arr[i];
+ Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);
+ }
+ return result;
+ };
+ flat(args);
+ return result;
+};
+
+
+/***/ }),
+/* 787 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+const fill = __webpack_require__(788);
+const utils = __webpack_require__(786);
+
+const compile = (ast, options = {}) => {
+ let walk = (node, parent = {}) => {
+ let invalidBlock = utils.isInvalidBrace(parent);
+ let invalidNode = node.invalid === true && options.escapeInvalid === true;
+ let invalid = invalidBlock === true || invalidNode === true;
+ let prefix = options.escapeInvalid === true ? '\\' : '';
+ let output = '';
+
+ if (node.isOpen === true) {
+ return prefix + node.value;
+ }
+ if (node.isClose === true) {
+ return prefix + node.value;
+ }
+
+ if (node.type === 'open') {
+ return invalid ? (prefix + node.value) : '(';
+ }
+
+ if (node.type === 'close') {
+ return invalid ? (prefix + node.value) : ')';
+ }
+
+ if (node.type === 'comma') {
+ return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');
+ }
+
+ if (node.value) {
+ return node.value;
+ }
+
+ if (node.nodes && node.ranges > 0) {
+ let args = utils.reduce(node.nodes);
+ let range = fill(...args, { ...options, wrap: false, toRegex: true });
+
+ if (range.length !== 0) {
+ return args.length > 1 && range.length > 1 ? `(${range})` : range;
+ }
+ }
+
+ if (node.nodes) {
+ for (let child of node.nodes) {
+ output += walk(child, node);
+ }
+ }
+ return output;
+ };
+
+ return walk(ast);
+};
+
+module.exports = compile;
+
+
+/***/ }),
+/* 788 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+/*!
+ * fill-range
+ *
+ * Copyright (c) 2014-present, Jon Schlinkert.
+ * Licensed under the MIT License.
+ */
+
+
+
+const util = __webpack_require__(112);
+const toRegexRange = __webpack_require__(789);
+
+const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
+
+const transform = toNumber => {
+ return value => toNumber === true ? Number(value) : String(value);
+};
+
+const isValidValue = value => {
+ return typeof value === 'number' || (typeof value === 'string' && value !== '');
+};
+
+const isNumber = num => Number.isInteger(+num);
+
+const zeros = input => {
+ let value = `${input}`;
+ let index = -1;
+ if (value[0] === '-') value = value.slice(1);
+ if (value === '0') return false;
+ while (value[++index] === '0');
+ return index > 0;
+};
+
+const stringify = (start, end, options) => {
+ if (typeof start === 'string' || typeof end === 'string') {
+ return true;
+ }
+ return options.stringify === true;
+};
+
+const pad = (input, maxLength, toNumber) => {
+ if (maxLength > 0) {
+ let dash = input[0] === '-' ? '-' : '';
+ if (dash) input = input.slice(1);
+ input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));
+ }
+ if (toNumber === false) {
+ return String(input);
+ }
+ return input;
+};
+
+const toMaxLen = (input, maxLength) => {
+ let negative = input[0] === '-' ? '-' : '';
+ if (negative) {
+ input = input.slice(1);
+ maxLength--;
+ }
+ while (input.length < maxLength) input = '0' + input;
+ return negative ? ('-' + input) : input;
+};
+
+const toSequence = (parts, options) => {
+ parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
+ parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
+
+ let prefix = options.capture ? '' : '?:';
+ let positives = '';
+ let negatives = '';
+ let result;
+
+ if (parts.positives.length) {
+ positives = parts.positives.join('|');
+ }
+
+ if (parts.negatives.length) {
+ negatives = `-(${prefix}${parts.negatives.join('|')})`;
+ }
+
+ if (positives && negatives) {
+ result = `${positives}|${negatives}`;
+ } else {
+ result = positives || negatives;
+ }
+
+ if (options.wrap) {
+ return `(${prefix}${result})`;
+ }
+
+ return result;
+};
+
+const toRange = (a, b, isNumbers, options) => {
+ if (isNumbers) {
+ return toRegexRange(a, b, { wrap: false, ...options });
+ }
+
+ let start = String.fromCharCode(a);
+ if (a === b) return start;
+
+ let stop = String.fromCharCode(b);
+ return `[${start}-${stop}]`;
+};
+
+const toRegex = (start, end, options) => {
+ if (Array.isArray(start)) {
+ let wrap = options.wrap === true;
+ let prefix = options.capture ? '' : '?:';
+ return wrap ? `(${prefix}${start.join('|')})` : start.join('|');
+ }
+ return toRegexRange(start, end, options);
+};
+
+const rangeError = (...args) => {
+ return new RangeError('Invalid range arguments: ' + util.inspect(...args));
+};
+
+const invalidRange = (start, end, options) => {
+ if (options.strictRanges === true) throw rangeError([start, end]);
+ return [];
+};
+
+const invalidStep = (step, options) => {
+ if (options.strictRanges === true) {
+ throw new TypeError(`Expected step "${step}" to be a number`);
+ }
+ return [];
+};
+
+const fillNumbers = (start, end, step = 1, options = {}) => {
+ let a = Number(start);
+ let b = Number(end);
+
+ if (!Number.isInteger(a) || !Number.isInteger(b)) {
+ if (options.strictRanges === true) throw rangeError([start, end]);
+ return [];
+ }
+
+ // fix negative zero
+ if (a === 0) a = 0;
+ if (b === 0) b = 0;
+
+ let descending = a > b;
+ let startString = String(start);
+ let endString = String(end);
+ let stepString = String(step);
+ step = Math.max(Math.abs(step), 1);
+
+ let padded = zeros(startString) || zeros(endString) || zeros(stepString);
+ let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;
+ let toNumber = padded === false && stringify(start, end, options) === false;
+ let format = options.transform || transform(toNumber);
+
+ if (options.toRegex && step === 1) {
+ return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);
+ }
+
+ let parts = { negatives: [], positives: [] };
+ let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));
+ let range = [];
+ let index = 0;
+
+ while (descending ? a >= b : a <= b) {
+ if (options.toRegex === true && step > 1) {
+ push(a);
+ } else {
+ range.push(pad(format(a, index), maxLen, toNumber));
+ }
+ a = descending ? a - step : a + step;
+ index++;
+ }
+
+ if (options.toRegex === true) {
+ return step > 1
+ ? toSequence(parts, options)
+ : toRegex(range, null, { wrap: false, ...options });
+ }
+
+ return range;
+};
+
+const fillLetters = (start, end, step = 1, options = {}) => {
+ if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {
+ return invalidRange(start, end, options);
+ }
+
+
+ let format = options.transform || (val => String.fromCharCode(val));
+ let a = `${start}`.charCodeAt(0);
+ let b = `${end}`.charCodeAt(0);
+
+ let descending = a > b;
+ let min = Math.min(a, b);
+ let max = Math.max(a, b);
+
+ if (options.toRegex && step === 1) {
+ return toRange(min, max, false, options);
+ }
+
+ let range = [];
+ let index = 0;
+
+ while (descending ? a >= b : a <= b) {
+ range.push(format(a, index));
+ a = descending ? a - step : a + step;
+ index++;
+ }
+
+ if (options.toRegex === true) {
+ return toRegex(range, null, { wrap: false, options });
+ }
+
+ return range;
+};
+
+const fill = (start, end, step, options = {}) => {
+ if (end == null && isValidValue(start)) {
+ return [start];
+ }
+
+ if (!isValidValue(start) || !isValidValue(end)) {
+ return invalidRange(start, end, options);
+ }
+
+ if (typeof step === 'function') {
+ return fill(start, end, 1, { transform: step });
+ }
+
+ if (isObject(step)) {
+ return fill(start, end, 0, step);
+ }
+
+ let opts = { ...options };
+ if (opts.capture === true) opts.wrap = true;
+ step = step || opts.step || 1;
+
+ if (!isNumber(step)) {
+ if (step != null && !isObject(step)) return invalidStep(step, opts);
+ return fill(start, end, 1, step);
+ }
+
+ if (isNumber(start) && isNumber(end)) {
+ return fillNumbers(start, end, step, opts);
+ }
+
+ return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);
+};
+
+module.exports = fill;
+
+
+/***/ }),
+/* 789 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+/*!
+ * to-regex-range
+ *
+ * Copyright (c) 2015-present, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+
+
+const isNumber = __webpack_require__(790);
+
+const toRegexRange = (min, max, options) => {
+ if (isNumber(min) === false) {
+ throw new TypeError('toRegexRange: expected the first argument to be a number');
+ }
+
+ if (max === void 0 || min === max) {
+ return String(min);
+ }
+
+ if (isNumber(max) === false) {
+ throw new TypeError('toRegexRange: expected the second argument to be a number.');
+ }
+
+ let opts = { relaxZeros: true, ...options };
+ if (typeof opts.strictZeros === 'boolean') {
+ opts.relaxZeros = opts.strictZeros === false;
+ }
+
+ let relax = String(opts.relaxZeros);
+ let shorthand = String(opts.shorthand);
+ let capture = String(opts.capture);
+ let wrap = String(opts.wrap);
+ let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap;
+
+ if (toRegexRange.cache.hasOwnProperty(cacheKey)) {
+ return toRegexRange.cache[cacheKey].result;
+ }
+
+ let a = Math.min(min, max);
+ let b = Math.max(min, max);
+
+ if (Math.abs(a - b) === 1) {
+ let result = min + '|' + max;
+ if (opts.capture) {
+ return `(${result})`;
+ }
+ if (opts.wrap === false) {
+ return result;
+ }
+ return `(?:${result})`;
+ }
+
+ let isPadded = hasPadding(min) || hasPadding(max);
+ let state = { min, max, a, b };
+ let positives = [];
+ let negatives = [];
+
+ if (isPadded) {
+ state.isPadded = isPadded;
+ state.maxLen = String(state.max).length;
+ }
+
+ if (a < 0) {
+ let newMin = b < 0 ? Math.abs(b) : 1;
+ negatives = splitToPatterns(newMin, Math.abs(a), state, opts);
+ a = state.a = 0;
+ }
+
+ if (b >= 0) {
+ positives = splitToPatterns(a, b, state, opts);
+ }
+
+ state.negatives = negatives;
+ state.positives = positives;
+ state.result = collatePatterns(negatives, positives, opts);
+
+ if (opts.capture === true) {
+ state.result = `(${state.result})`;
+ } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) {
+ state.result = `(?:${state.result})`;
+ }
+
+ toRegexRange.cache[cacheKey] = state;
+ return state.result;
+};
+
+function collatePatterns(neg, pos, options) {
+ let onlyNegative = filterPatterns(neg, pos, '-', false, options) || [];
+ let onlyPositive = filterPatterns(pos, neg, '', false, options) || [];
+ let intersected = filterPatterns(neg, pos, '-?', true, options) || [];
+ let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive);
+ return subpatterns.join('|');
+}
+
+function splitToRanges(min, max) {
+ let nines = 1;
+ let zeros = 1;
+
+ let stop = countNines(min, nines);
+ let stops = new Set([max]);
+
+ while (min <= stop && stop <= max) {
+ stops.add(stop);
+ nines += 1;
+ stop = countNines(min, nines);
+ }
+
+ stop = countZeros(max + 1, zeros) - 1;
+
+ while (min < stop && stop <= max) {
+ stops.add(stop);
+ zeros += 1;
+ stop = countZeros(max + 1, zeros) - 1;
+ }
+
+ stops = [...stops];
+ stops.sort(compare);
+ return stops;
+}
+
+/**
+ * Convert a range to a regex pattern
+ * @param {Number} `start`
+ * @param {Number} `stop`
+ * @return {String}
+ */
+
+function rangeToPattern(start, stop, options) {
+ if (start === stop) {
+ return { pattern: start, count: [], digits: 0 };
+ }
+
+ let zipped = zip(start, stop);
+ let digits = zipped.length;
+ let pattern = '';
+ let count = 0;
+
+ for (let i = 0; i < digits; i++) {
+ let [startDigit, stopDigit] = zipped[i];
+
+ if (startDigit === stopDigit) {
+ pattern += startDigit;
+
+ } else if (startDigit !== '0' || stopDigit !== '9') {
+ pattern += toCharacterClass(startDigit, stopDigit, options);
+
+ } else {
+ count++;
+ }
+ }
+
+ if (count) {
+ pattern += options.shorthand === true ? '\\d' : '[0-9]';
+ }
+
+ return { pattern, count: [count], digits };
+}
+
+function splitToPatterns(min, max, tok, options) {
+ let ranges = splitToRanges(min, max);
+ let tokens = [];
+ let start = min;
+ let prev;
+
+ for (let i = 0; i < ranges.length; i++) {
+ let max = ranges[i];
+ let obj = rangeToPattern(String(start), String(max), options);
+ let zeros = '';
+
+ if (!tok.isPadded && prev && prev.pattern === obj.pattern) {
+ if (prev.count.length > 1) {
+ prev.count.pop();
+ }
+
+ prev.count.push(obj.count[0]);
+ prev.string = prev.pattern + toQuantifier(prev.count);
+ start = max + 1;
+ continue;
+ }
+
+ if (tok.isPadded) {
+ zeros = padZeros(max, tok, options);
+ }
+
+ obj.string = zeros + obj.pattern + toQuantifier(obj.count);
+ tokens.push(obj);
+ start = max + 1;
+ prev = obj;
+ }
+
+ return tokens;
+}
+
+function filterPatterns(arr, comparison, prefix, intersection, options) {
+ let result = [];
+
+ for (let ele of arr) {
+ let { string } = ele;
+
+ // only push if _both_ are negative...
+ if (!intersection && !contains(comparison, 'string', string)) {
+ result.push(prefix + string);
+ }
+
+ // or _both_ are positive
+ if (intersection && contains(comparison, 'string', string)) {
+ result.push(prefix + string);
+ }
+ }
+ return result;
+}
+
+/**
+ * Zip strings
+ */
+
+function zip(a, b) {
+ let arr = [];
+ for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);
+ return arr;
+}
+
+function compare(a, b) {
+ return a > b ? 1 : b > a ? -1 : 0;
+}
+
+function contains(arr, key, val) {
+ return arr.some(ele => ele[key] === val);
+}
+
+function countNines(min, len) {
+ return Number(String(min).slice(0, -len) + '9'.repeat(len));
+}
+
+function countZeros(integer, zeros) {
+ return integer - (integer % Math.pow(10, zeros));
+}
+
+function toQuantifier(digits) {
+ let [start = 0, stop = ''] = digits;
+ if (stop || start > 1) {
+ return `{${start + (stop ? ',' + stop : '')}}`;
+ }
+ return '';
+}
+
+function toCharacterClass(a, b, options) {
+ return `[${a}${(b - a === 1) ? '' : '-'}${b}]`;
+}
+
+function hasPadding(str) {
+ return /^-?(0+)\d/.test(str);
+}
+
+function padZeros(value, tok, options) {
+ if (!tok.isPadded) {
+ return value;
+ }
+
+ let diff = Math.abs(tok.maxLen - String(value).length);
+ let relax = options.relaxZeros !== false;
+
+ switch (diff) {
+ case 0:
+ return '';
+ case 1:
+ return relax ? '0?' : '0';
+ case 2:
+ return relax ? '0{0,2}' : '00';
+ default: {
+ return relax ? `0{0,${diff}}` : `0{${diff}}`;
+ }
+ }
+}
+
+/**
+ * Cache
+ */
+
+toRegexRange.cache = {};
+toRegexRange.clearCache = () => (toRegexRange.cache = {});
+
+/**
+ * Expose `toRegexRange`
+ */
+
+module.exports = toRegexRange;
+
+
+/***/ }),
+/* 790 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+/*!
+ * is-number
+ *
+ * Copyright (c) 2014-present, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+
+
+module.exports = function(num) {
+ if (typeof num === 'number') {
+ return num - num === 0;
+ }
+ if (typeof num === 'string' && num.trim() !== '') {
+ return Number.isFinite ? Number.isFinite(+num) : isFinite(+num);
+ }
+ return false;
+};
+
+
+/***/ }),
+/* 791 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+const fill = __webpack_require__(788);
+const stringify = __webpack_require__(785);
+const utils = __webpack_require__(786);
+
+const append = (queue = '', stash = '', enclose = false) => {
+ let result = [];
+
+ queue = [].concat(queue);
+ stash = [].concat(stash);
+
+ if (!stash.length) return queue;
+ if (!queue.length) {
+ return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;
+ }
+
+ for (let item of queue) {
+ if (Array.isArray(item)) {
+ for (let value of item) {
+ result.push(append(value, stash, enclose));
+ }
+ } else {
+ for (let ele of stash) {
+ if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
+ result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));
+ }
+ }
+ }
+ return utils.flatten(result);
+};
+
+const expand = (ast, options = {}) => {
+ let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;
+
+ let walk = (node, parent = {}) => {
+ node.queue = [];
+
+ let p = parent;
+ let q = parent.queue;
+
+ while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
+ p = p.parent;
+ q = p.queue;
+ }
+
+ if (node.invalid || node.dollar) {
+ q.push(append(q.pop(), stringify(node, options)));
+ return;
+ }
+
+ if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
+ q.push(append(q.pop(), ['{}']));
+ return;
+ }
+
+ if (node.nodes && node.ranges > 0) {
+ let args = utils.reduce(node.nodes);
+
+ if (utils.exceedsLimit(...args, options.step, rangeLimit)) {
+ throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
+ }
+
+ let range = fill(...args, options);
+ if (range.length === 0) {
+ range = stringify(node, options);
+ }
+
+ q.push(append(q.pop(), range));
+ node.nodes = [];
+ return;
+ }
+
+ let enclose = utils.encloseBrace(node);
+ let queue = node.queue;
+ let block = node;
+
+ while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
+ block = block.parent;
+ queue = block.queue;
+ }
+
+ for (let i = 0; i < node.nodes.length; i++) {
+ let child = node.nodes[i];
+
+ if (child.type === 'comma' && node.type === 'brace') {
+ if (i === 1) queue.push('');
+ queue.push('');
+ continue;
+ }
+
+ if (child.type === 'close') {
+ q.push(append(q.pop(), queue, enclose));
+ continue;
+ }
+
+ if (child.value && child.type !== 'open') {
+ queue.push(append(queue.pop(), child.value));
+ continue;
+ }
+
+ if (child.nodes) {
+ walk(child, node);
+ }
+ }
+
+ return queue;
+ };
+
+ return utils.flatten(walk(ast));
+};
+
+module.exports = expand;
+
+
+/***/ }),
+/* 792 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+const stringify = __webpack_require__(785);
+
+/**
+ * Constants
+ */
+
+const {
+ MAX_LENGTH,
+ CHAR_BACKSLASH, /* \ */
+ CHAR_BACKTICK, /* ` */
+ CHAR_COMMA, /* , */
+ CHAR_DOT, /* . */
+ CHAR_LEFT_PARENTHESES, /* ( */
+ CHAR_RIGHT_PARENTHESES, /* ) */
+ CHAR_LEFT_CURLY_BRACE, /* { */
+ CHAR_RIGHT_CURLY_BRACE, /* } */
+ CHAR_LEFT_SQUARE_BRACKET, /* [ */
+ CHAR_RIGHT_SQUARE_BRACKET, /* ] */
+ CHAR_DOUBLE_QUOTE, /* " */
+ CHAR_SINGLE_QUOTE, /* ' */
+ CHAR_NO_BREAK_SPACE,
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE
+} = __webpack_require__(793);
+
+/**
+ * parse
+ */
+
+const parse = (input, options = {}) => {
+ if (typeof input !== 'string') {
+ throw new TypeError('Expected a string');
+ }
+
+ let opts = options || {};
+ let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
+ if (input.length > max) {
+ throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
+ }
+
+ let ast = { type: 'root', input, nodes: [] };
+ let stack = [ast];
+ let block = ast;
+ let prev = ast;
+ let brackets = 0;
+ let length = input.length;
+ let index = 0;
+ let depth = 0;
+ let value;
+ let memo = {};
+
+ /**
+ * Helpers
+ */
+
+ const advance = () => input[index++];
+ const push = node => {
+ if (node.type === 'text' && prev.type === 'dot') {
+ prev.type = 'text';
+ }
+
+ if (prev && prev.type === 'text' && node.type === 'text') {
+ prev.value += node.value;
+ return;
+ }
+
+ block.nodes.push(node);
+ node.parent = block;
+ node.prev = prev;
+ prev = node;
+ return node;
+ };
+
+ push({ type: 'bos' });
+
+ while (index < length) {
+ block = stack[stack.length - 1];
+ value = advance();
+
+ /**
+ * Invalid chars
+ */
+
+ if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
+ continue;
+ }
+
+ /**
+ * Escaped chars
+ */
+
+ if (value === CHAR_BACKSLASH) {
+ push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
+ continue;
+ }
+
+ /**
+ * Right square bracket (literal): ']'
+ */
+
+ if (value === CHAR_RIGHT_SQUARE_BRACKET) {
+ push({ type: 'text', value: '\\' + value });
+ continue;
+ }
+
+ /**
+ * Left square bracket: '['
+ */
+
+ if (value === CHAR_LEFT_SQUARE_BRACKET) {
+ brackets++;
+
+ let closed = true;
+ let next;
+
+ while (index < length && (next = advance())) {
+ value += next;
+
+ if (next === CHAR_LEFT_SQUARE_BRACKET) {
+ brackets++;
+ continue;
+ }
+
+ if (next === CHAR_BACKSLASH) {
+ value += advance();
+ continue;
+ }
+
+ if (next === CHAR_RIGHT_SQUARE_BRACKET) {
+ brackets--;
+
+ if (brackets === 0) {
+ break;
+ }
+ }
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Parentheses
+ */
+
+ if (value === CHAR_LEFT_PARENTHESES) {
+ block = push({ type: 'paren', nodes: [] });
+ stack.push(block);
+ push({ type: 'text', value });
+ continue;
+ }
+
+ if (value === CHAR_RIGHT_PARENTHESES) {
+ if (block.type !== 'paren') {
+ push({ type: 'text', value });
+ continue;
+ }
+ block = stack.pop();
+ push({ type: 'text', value });
+ block = stack[stack.length - 1];
+ continue;
+ }
+
+ /**
+ * Quotes: '|"|`
+ */
+
+ if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
+ let open = value;
+ let next;
+
+ if (options.keepQuotes !== true) {
+ value = '';
+ }
+
+ while (index < length && (next = advance())) {
+ if (next === CHAR_BACKSLASH) {
+ value += next + advance();
+ continue;
+ }
+
+ if (next === open) {
+ if (options.keepQuotes === true) value += next;
+ break;
+ }
+
+ value += next;
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Left curly brace: '{'
+ */
+
+ if (value === CHAR_LEFT_CURLY_BRACE) {
+ depth++;
+
+ let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
+ let brace = {
+ type: 'brace',
+ open: true,
+ close: false,
+ dollar,
+ depth,
+ commas: 0,
+ ranges: 0,
+ nodes: []
+ };
+
+ block = push(brace);
+ stack.push(block);
+ push({ type: 'open', value });
+ continue;
+ }
+
+ /**
+ * Right curly brace: '}'
+ */
+
+ if (value === CHAR_RIGHT_CURLY_BRACE) {
+ if (block.type !== 'brace') {
+ push({ type: 'text', value });
+ continue;
+ }
+
+ let type = 'close';
+ block = stack.pop();
+ block.close = true;
+
+ push({ type, value });
+ depth--;
+
+ block = stack[stack.length - 1];
+ continue;
+ }
+
+ /**
+ * Comma: ','
+ */
+
+ if (value === CHAR_COMMA && depth > 0) {
+ if (block.ranges > 0) {
+ block.ranges = 0;
+ let open = block.nodes.shift();
+ block.nodes = [open, { type: 'text', value: stringify(block) }];
+ }
+
+ push({ type: 'comma', value });
+ block.commas++;
+ continue;
+ }
+
+ /**
+ * Dot: '.'
+ */
+
+ if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
+ let siblings = block.nodes;
+
+ if (depth === 0 || siblings.length === 0) {
+ push({ type: 'text', value });
+ continue;
+ }
+
+ if (prev.type === 'dot') {
+ block.range = [];
+ prev.value += value;
+ prev.type = 'range';
+
+ if (block.nodes.length !== 3 && block.nodes.length !== 5) {
+ block.invalid = true;
+ block.ranges = 0;
+ prev.type = 'text';
+ continue;
+ }
+
+ block.ranges++;
+ block.args = [];
+ continue;
+ }
+
+ if (prev.type === 'range') {
+ siblings.pop();
+
+ let before = siblings[siblings.length - 1];
+ before.value += prev.value + value;
+ prev = before;
+ block.ranges--;
+ continue;
+ }
+
+ push({ type: 'dot', value });
+ continue;
+ }
+
+ /**
+ * Text
+ */
+
+ push({ type: 'text', value });
+ }
+
+ // Mark imbalanced braces and brackets as invalid
+ do {
+ block = stack.pop();
+
+ if (block.type !== 'root') {
+ block.nodes.forEach(node => {
+ if (!node.nodes) {
+ if (node.type === 'open') node.isOpen = true;
+ if (node.type === 'close') node.isClose = true;
+ if (!node.nodes) node.type = 'text';
+ node.invalid = true;
+ }
+ });
+
+ // get the location of the block on parent.nodes (block's siblings)
+ let parent = stack[stack.length - 1];
+ let index = parent.nodes.indexOf(block);
+ // replace the (invalid) block with it's nodes
+ parent.nodes.splice(index, 1, ...block.nodes);
+ }
+ } while (stack.length > 0);
+
+ push({ type: 'eos' });
+ return ast;
+};
+
+module.exports = parse;
+
+
+/***/ }),
+/* 793 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+module.exports = {
+ MAX_LENGTH: 1024 * 64,
+
+ // Digits
+ CHAR_0: '0', /* 0 */
+ CHAR_9: '9', /* 9 */
+
+ // Alphabet chars.
+ CHAR_UPPERCASE_A: 'A', /* A */
+ CHAR_LOWERCASE_A: 'a', /* a */
+ CHAR_UPPERCASE_Z: 'Z', /* Z */
+ CHAR_LOWERCASE_Z: 'z', /* z */
+
+ CHAR_LEFT_PARENTHESES: '(', /* ( */
+ CHAR_RIGHT_PARENTHESES: ')', /* ) */
+
+ CHAR_ASTERISK: '*', /* * */
+
+ // Non-alphabetic chars.
+ CHAR_AMPERSAND: '&', /* & */
+ CHAR_AT: '@', /* @ */
+ CHAR_BACKSLASH: '\\', /* \ */
+ CHAR_BACKTICK: '`', /* ` */
+ CHAR_CARRIAGE_RETURN: '\r', /* \r */
+ CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
+ CHAR_COLON: ':', /* : */
+ CHAR_COMMA: ',', /* , */
+ CHAR_DOLLAR: '$', /* . */
+ CHAR_DOT: '.', /* . */
+ CHAR_DOUBLE_QUOTE: '"', /* " */
+ CHAR_EQUAL: '=', /* = */
+ CHAR_EXCLAMATION_MARK: '!', /* ! */
+ CHAR_FORM_FEED: '\f', /* \f */
+ CHAR_FORWARD_SLASH: '/', /* / */
+ CHAR_HASH: '#', /* # */
+ CHAR_HYPHEN_MINUS: '-', /* - */
+ CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
+ CHAR_LEFT_CURLY_BRACE: '{', /* { */
+ CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
+ CHAR_LINE_FEED: '\n', /* \n */
+ CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
+ CHAR_PERCENT: '%', /* % */
+ CHAR_PLUS: '+', /* + */
+ CHAR_QUESTION_MARK: '?', /* ? */
+ CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
+ CHAR_RIGHT_CURLY_BRACE: '}', /* } */
+ CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
+ CHAR_SEMICOLON: ';', /* ; */
+ CHAR_SINGLE_QUOTE: '\'', /* ' */
+ CHAR_SPACE: ' ', /* */
+ CHAR_TAB: '\t', /* \t */
+ CHAR_UNDERSCORE: '_', /* _ */
+ CHAR_VERTICAL_LINE: '|', /* | */
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
+};
+
+
+/***/ }),
+/* 794 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.merge = void 0;
+const merge2 = __webpack_require__(146);
+function merge(streams) {
+ const mergedStream = merge2(streams);
+ streams.forEach((stream) => {
+ stream.once('error', (error) => mergedStream.emit('error', error));
+ });
+ mergedStream.once('close', () => propagateCloseEventToSources(streams));
+ mergedStream.once('end', () => propagateCloseEventToSources(streams));
+ return mergedStream;
+}
+exports.merge = merge;
+function propagateCloseEventToSources(streams) {
+ streams.forEach((stream) => stream.emit('close'));
+}
+
+
+/***/ }),
+/* 795 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isEmpty = exports.isString = void 0;
+function isString(input) {
+ return typeof input === 'string';
+}
+exports.isString = isString;
+function isEmpty(input) {
+ return input === '';
+}
+exports.isEmpty = isEmpty;
+
+
+/***/ }),
+/* 796 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const stream_1 = __webpack_require__(797);
+const provider_1 = __webpack_require__(799);
+class ProviderAsync extends provider_1.default {
+ constructor() {
+ super(...arguments);
+ this._reader = new stream_1.default(this._settings);
+ }
+ read(task) {
+ const root = this._getRootDirectory(task);
+ const options = this._getReaderOptions(task);
+ const entries = [];
+ return new Promise((resolve, reject) => {
+ const stream = this.api(root, task, options);
+ stream.once('error', reject);
+ stream.on('data', (entry) => entries.push(options.transform(entry)));
+ stream.once('end', () => resolve(entries));
+ });
+ }
+ api(root, task, options) {
+ if (task.dynamic) {
+ return this._reader.dynamic(root, options);
+ }
+ return this._reader.static(task.patterns, options);
+ }
+}
+exports.default = ProviderAsync;
+
+
+/***/ }),
+/* 797 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const stream_1 = __webpack_require__(138);
+const fsStat = __webpack_require__(195);
+const fsWalk = __webpack_require__(200);
+const reader_1 = __webpack_require__(798);
+class ReaderStream extends reader_1.default {
+ constructor() {
+ super(...arguments);
+ this._walkStream = fsWalk.walkStream;
+ this._stat = fsStat.stat;
+ }
+ dynamic(root, options) {
+ return this._walkStream(root, options);
+ }
+ static(patterns, options) {
+ const filepaths = patterns.map(this._getFullEntryPath, this);
+ const stream = new stream_1.PassThrough({ objectMode: true });
+ stream._write = (index, _enc, done) => {
+ return this._getEntry(filepaths[index], patterns[index], options)
+ .then((entry) => {
+ if (entry !== null && options.entryFilter(entry)) {
+ stream.push(entry);
+ }
+ if (index === filepaths.length - 1) {
+ stream.end();
+ }
+ done();
+ })
+ .catch(done);
+ };
+ for (let i = 0; i < filepaths.length; i++) {
+ stream.write(i);
+ }
+ return stream;
+ }
+ _getEntry(filepath, pattern, options) {
+ return this._getStat(filepath)
+ .then((stats) => this._makeEntry(stats, pattern))
+ .catch((error) => {
+ if (options.errorFilter(error)) {
+ return null;
+ }
+ throw error;
+ });
+ }
+ _getStat(filepath) {
+ return new Promise((resolve, reject) => {
+ this._stat(filepath, this._fsStatSettings, (error, stats) => {
+ return error === null ? resolve(stats) : reject(error);
+ });
+ });
+ }
+}
+exports.default = ReaderStream;
+
+
+/***/ }),
+/* 798 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const path = __webpack_require__(4);
+const fsStat = __webpack_require__(195);
+const utils = __webpack_require__(777);
+class Reader {
+ constructor(_settings) {
+ this._settings = _settings;
+ this._fsStatSettings = new fsStat.Settings({
+ followSymbolicLink: this._settings.followSymbolicLinks,
+ fs: this._settings.fs,
+ throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks
+ });
+ }
+ _getFullEntryPath(filepath) {
+ return path.resolve(this._settings.cwd, filepath);
+ }
+ _makeEntry(stats, pattern) {
+ const entry = {
+ name: pattern,
+ path: pattern,
+ dirent: utils.fs.createDirentFromStats(pattern, stats)
+ };
+ if (this._settings.stats) {
+ entry.stats = stats;
+ }
+ return entry;
+ }
+ _isFatalError(error) {
+ return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors;
+ }
+}
+exports.default = Reader;
+
+
+/***/ }),
+/* 799 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const path = __webpack_require__(4);
+const deep_1 = __webpack_require__(800);
+const entry_1 = __webpack_require__(803);
+const error_1 = __webpack_require__(804);
+const entry_2 = __webpack_require__(805);
+class Provider {
+ constructor(_settings) {
+ this._settings = _settings;
+ this.errorFilter = new error_1.default(this._settings);
+ this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions());
+ this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions());
+ this.entryTransformer = new entry_2.default(this._settings);
+ }
+ _getRootDirectory(task) {
+ return path.resolve(this._settings.cwd, task.base);
+ }
+ _getReaderOptions(task) {
+ const basePath = task.base === '.' ? '' : task.base;
+ return {
+ basePath,
+ pathSegmentSeparator: '/',
+ concurrency: this._settings.concurrency,
+ deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative),
+ entryFilter: this.entryFilter.getFilter(task.positive, task.negative),
+ errorFilter: this.errorFilter.getFilter(),
+ followSymbolicLinks: this._settings.followSymbolicLinks,
+ fs: this._settings.fs,
+ stats: this._settings.stats,
+ throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink,
+ transform: this.entryTransformer.getTransformer()
+ };
+ }
+ _getMicromatchOptions() {
+ return {
+ dot: this._settings.dot,
+ matchBase: this._settings.baseNameMatch,
+ nobrace: !this._settings.braceExpansion,
+ nocase: !this._settings.caseSensitiveMatch,
+ noext: !this._settings.extglob,
+ noglobstar: !this._settings.globstar,
+ posix: true,
+ strictSlashes: false
+ };
+ }
+}
+exports.default = Provider;
+
+
+/***/ }),
+/* 800 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const utils = __webpack_require__(777);
+const partial_1 = __webpack_require__(801);
+class DeepFilter {
+ constructor(_settings, _micromatchOptions) {
+ this._settings = _settings;
+ this._micromatchOptions = _micromatchOptions;
+ }
+ getFilter(basePath, positive, negative) {
+ const matcher = this._getMatcher(positive);
+ const negativeRe = this._getNegativePatternsRe(negative);
+ return (entry) => this._filter(basePath, entry, matcher, negativeRe);
+ }
+ _getMatcher(patterns) {
+ return new partial_1.default(patterns, this._settings, this._micromatchOptions);
+ }
+ _getNegativePatternsRe(patterns) {
+ const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern);
+ return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);
+ }
+ _filter(basePath, entry, matcher, negativeRe) {
+ if (this._isSkippedByDeep(basePath, entry.path)) {
+ return false;
+ }
+ if (this._isSkippedSymbolicLink(entry)) {
+ return false;
+ }
+ const filepath = utils.path.removeLeadingDotSegment(entry.path);
+ if (this._isSkippedByPositivePatterns(filepath, matcher)) {
+ return false;
+ }
+ return this._isSkippedByNegativePatterns(filepath, negativeRe);
+ }
+ _isSkippedByDeep(basePath, entryPath) {
+ /**
+ * Avoid unnecessary depth calculations when it doesn't matter.
+ */
+ if (this._settings.deep === Infinity) {
+ return false;
+ }
+ return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;
+ }
+ _getEntryLevel(basePath, entryPath) {
+ const entryPathDepth = entryPath.split('/').length;
+ if (basePath === '') {
+ return entryPathDepth;
+ }
+ const basePathDepth = basePath.split('/').length;
+ return entryPathDepth - basePathDepth;
+ }
+ _isSkippedSymbolicLink(entry) {
+ return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
+ }
+ _isSkippedByPositivePatterns(entryPath, matcher) {
+ return !this._settings.baseNameMatch && !matcher.match(entryPath);
+ }
+ _isSkippedByNegativePatterns(entryPath, patternsRe) {
+ return !utils.pattern.matchAny(entryPath, patternsRe);
+ }
+}
+exports.default = DeepFilter;
+
+
+/***/ }),
+/* 801 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const matcher_1 = __webpack_require__(802);
+class PartialMatcher extends matcher_1.default {
+ match(filepath) {
+ const parts = filepath.split('/');
+ const levels = parts.length;
+ const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels);
+ for (const pattern of patterns) {
+ const section = pattern.sections[0];
+ /**
+ * In this case, the pattern has a globstar and we must read all directories unconditionally,
+ * but only if the level has reached the end of the first group.
+ *
+ * fixtures/{a,b}/**
+ * ^ true/false ^ always true
+ */
+ if (!pattern.complete && levels > section.length) {
+ return true;
+ }
+ const match = parts.every((part, index) => {
+ const segment = pattern.segments[index];
+ if (segment.dynamic && segment.patternRe.test(part)) {
+ return true;
+ }
+ if (!segment.dynamic && segment.pattern === part) {
+ return true;
+ }
+ return false;
+ });
+ if (match) {
+ return true;
+ }
+ }
+ return false;
+ }
+}
+exports.default = PartialMatcher;
+
+
+/***/ }),
+/* 802 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const utils = __webpack_require__(777);
+class Matcher {
+ constructor(_patterns, _settings, _micromatchOptions) {
+ this._patterns = _patterns;
+ this._settings = _settings;
+ this._micromatchOptions = _micromatchOptions;
+ this._storage = [];
+ this._fillStorage();
+ }
+ _fillStorage() {
+ /**
+ * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level).
+ * So, before expand patterns with brace expansion into separated patterns.
+ */
+ const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns);
+ for (const pattern of patterns) {
+ const segments = this._getPatternSegments(pattern);
+ const sections = this._splitSegmentsIntoSections(segments);
+ this._storage.push({
+ complete: sections.length <= 1,
+ pattern,
+ segments,
+ sections
+ });
+ }
+ }
+ _getPatternSegments(pattern) {
+ const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions);
+ return parts.map((part) => {
+ const dynamic = utils.pattern.isDynamicPattern(part, this._settings);
+ if (!dynamic) {
+ return {
+ dynamic: false,
+ pattern: part
+ };
+ }
+ return {
+ dynamic: true,
+ pattern: part,
+ patternRe: utils.pattern.makeRe(part, this._micromatchOptions)
+ };
+ });
+ }
+ _splitSegmentsIntoSections(segments) {
+ return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern));
+ }
+}
+exports.default = Matcher;
+
+
+/***/ }),
+/* 803 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const utils = __webpack_require__(777);
+class EntryFilter {
+ constructor(_settings, _micromatchOptions) {
+ this._settings = _settings;
+ this._micromatchOptions = _micromatchOptions;
+ this.index = new Map();
+ }
+ getFilter(positive, negative) {
+ const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions);
+ const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions);
+ return (entry) => this._filter(entry, positiveRe, negativeRe);
+ }
+ _filter(entry, positiveRe, negativeRe) {
+ if (this._settings.unique && this._isDuplicateEntry(entry)) {
+ return false;
+ }
+ if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) {
+ return false;
+ }
+ if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) {
+ return false;
+ }
+ const filepath = this._settings.baseNameMatch ? entry.name : entry.path;
+ const isMatched = this._isMatchToPatterns(filepath, positiveRe) && !this._isMatchToPatterns(entry.path, negativeRe);
+ if (this._settings.unique && isMatched) {
+ this._createIndexRecord(entry);
+ }
+ return isMatched;
+ }
+ _isDuplicateEntry(entry) {
+ return this.index.has(entry.path);
+ }
+ _createIndexRecord(entry) {
+ this.index.set(entry.path, undefined);
+ }
+ _onlyFileFilter(entry) {
+ return this._settings.onlyFiles && !entry.dirent.isFile();
+ }
+ _onlyDirectoryFilter(entry) {
+ return this._settings.onlyDirectories && !entry.dirent.isDirectory();
+ }
+ _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) {
+ if (!this._settings.absolute) {
+ return false;
+ }
+ const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath);
+ return utils.pattern.matchAny(fullpath, patternsRe);
+ }
+ _isMatchToPatterns(entryPath, patternsRe) {
+ const filepath = utils.path.removeLeadingDotSegment(entryPath);
+ return utils.pattern.matchAny(filepath, patternsRe);
+ }
+}
+exports.default = EntryFilter;
- for (let i = 0; i < arguments.length; i++) {
- results[i] = arguments[i];
- }
- resolve(results);
- } else {
- resolve(result);
- }
- });
- }
+/***/ }),
+/* 804 */
+/***/ (function(module, exports, __webpack_require__) {
- fn.apply(this, args);
- });
-};
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const utils = __webpack_require__(777);
+class ErrorFilter {
+ constructor(_settings) {
+ this._settings = _settings;
+ }
+ getFilter() {
+ return (error) => this._isNonFatalError(error);
+ }
+ _isNonFatalError(error) {
+ return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors;
+ }
+}
+exports.default = ErrorFilter;
-module.exports = (obj, opts) => {
- opts = Object.assign({
- exclude: [/.+(Sync|Stream)$/],
- errorFirst: true,
- promiseModule: Promise
- }, opts);
- const filter = key => {
- const match = pattern => typeof pattern === 'string' ? key === pattern : pattern.test(key);
- return opts.include ? opts.include.some(match) : !opts.exclude.some(match);
- };
+/***/ }),
+/* 805 */
+/***/ (function(module, exports, __webpack_require__) {
- let ret;
- if (typeof obj === 'function') {
- ret = function () {
- if (opts.excludeMain) {
- return obj.apply(this, arguments);
- }
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const utils = __webpack_require__(777);
+class EntryTransformer {
+ constructor(_settings) {
+ this._settings = _settings;
+ }
+ getTransformer() {
+ return (entry) => this._transform(entry);
+ }
+ _transform(entry) {
+ let filepath = entry.path;
+ if (this._settings.absolute) {
+ filepath = utils.path.makeAbsolute(this._settings.cwd, filepath);
+ filepath = utils.path.unixify(filepath);
+ }
+ if (this._settings.markDirectories && entry.dirent.isDirectory()) {
+ filepath += '/';
+ }
+ if (!this._settings.objectMode) {
+ return filepath;
+ }
+ return Object.assign(Object.assign({}, entry), { path: filepath });
+ }
+}
+exports.default = EntryTransformer;
- return processFn(obj, opts).apply(this, arguments);
- };
- } else {
- ret = Object.create(Object.getPrototypeOf(obj));
- }
- for (const key in obj) { // eslint-disable-line guard-for-in
- const x = obj[key];
- ret[key] = typeof x === 'function' && filter(key) ? processFn(x, opts) : x;
- }
+/***/ }),
+/* 806 */
+/***/ (function(module, exports, __webpack_require__) {
- return ret;
-};
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const stream_1 = __webpack_require__(138);
+const stream_2 = __webpack_require__(797);
+const provider_1 = __webpack_require__(799);
+class ProviderStream extends provider_1.default {
+ constructor() {
+ super(...arguments);
+ this._reader = new stream_2.default(this._settings);
+ }
+ read(task) {
+ const root = this._getRootDirectory(task);
+ const options = this._getReaderOptions(task);
+ const source = this.api(root, task, options);
+ const destination = new stream_1.Readable({ objectMode: true, read: () => { } });
+ source
+ .once('error', (error) => destination.emit('error', error))
+ .on('data', (entry) => destination.emit('data', options.transform(entry)))
+ .once('end', () => destination.emit('end'));
+ destination
+ .once('close', () => source.destroy());
+ return destination;
+ }
+ api(root, task, options) {
+ if (task.dynamic) {
+ return this._reader.dynamic(root, options);
+ }
+ return this._reader.static(task.patterns, options);
+ }
+}
+exports.default = ProviderStream;
/***/ }),
-/* 780 */
+/* 807 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const sync_1 = __webpack_require__(808);
+const provider_1 = __webpack_require__(799);
+class ProviderSync extends provider_1.default {
+ constructor() {
+ super(...arguments);
+ this._reader = new sync_1.default(this._settings);
+ }
+ read(task) {
+ const root = this._getRootDirectory(task);
+ const options = this._getReaderOptions(task);
+ const entries = this.api(root, task, options);
+ return entries.map(options.transform);
+ }
+ api(root, task, options) {
+ if (task.dynamic) {
+ return this._reader.dynamic(root, options);
+ }
+ return this._reader.static(task.patterns, options);
+ }
+}
+exports.default = ProviderSync;
+
+
+/***/ }),
+/* 808 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const fsStat = __webpack_require__(195);
+const fsWalk = __webpack_require__(200);
+const reader_1 = __webpack_require__(798);
+class ReaderSync extends reader_1.default {
+ constructor() {
+ super(...arguments);
+ this._walkSync = fsWalk.walkSync;
+ this._statSync = fsStat.statSync;
+ }
+ dynamic(root, options) {
+ return this._walkSync(root, options);
+ }
+ static(patterns, options) {
+ const entries = [];
+ for (const pattern of patterns) {
+ const filepath = this._getFullEntryPath(pattern);
+ const entry = this._getEntry(filepath, pattern, options);
+ if (entry === null || !options.entryFilter(entry)) {
+ continue;
+ }
+ entries.push(entry);
+ }
+ return entries;
+ }
+ _getEntry(filepath, pattern, options) {
+ try {
+ const stats = this._getStat(filepath);
+ return this._makeEntry(stats, pattern);
+ }
+ catch (error) {
+ if (options.errorFilter(error)) {
+ return null;
+ }
+ throw error;
+ }
+ }
+ _getStat(filepath) {
+ return this._statSync(filepath, this._fsStatSettings);
+ }
+}
+exports.default = ReaderSync;
+
+
+/***/ }),
+/* 809 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
+const fs = __webpack_require__(134);
+const os = __webpack_require__(121);
+/**
+ * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero.
+ * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107
+ */
+const CPU_COUNT = Math.max(os.cpus().length, 1);
+exports.DEFAULT_FILE_SYSTEM_ADAPTER = {
+ lstat: fs.lstat,
+ lstatSync: fs.lstatSync,
+ stat: fs.stat,
+ statSync: fs.statSync,
+ readdir: fs.readdir,
+ readdirSync: fs.readdirSync
+};
+class Settings {
+ constructor(_options = {}) {
+ this._options = _options;
+ this.absolute = this._getValue(this._options.absolute, false);
+ this.baseNameMatch = this._getValue(this._options.baseNameMatch, false);
+ this.braceExpansion = this._getValue(this._options.braceExpansion, true);
+ this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true);
+ this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT);
+ this.cwd = this._getValue(this._options.cwd, process.cwd());
+ this.deep = this._getValue(this._options.deep, Infinity);
+ this.dot = this._getValue(this._options.dot, false);
+ this.extglob = this._getValue(this._options.extglob, true);
+ this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true);
+ this.fs = this._getFileSystemMethods(this._options.fs);
+ this.globstar = this._getValue(this._options.globstar, true);
+ this.ignore = this._getValue(this._options.ignore, []);
+ this.markDirectories = this._getValue(this._options.markDirectories, false);
+ this.objectMode = this._getValue(this._options.objectMode, false);
+ this.onlyDirectories = this._getValue(this._options.onlyDirectories, false);
+ this.onlyFiles = this._getValue(this._options.onlyFiles, true);
+ this.stats = this._getValue(this._options.stats, false);
+ this.suppressErrors = this._getValue(this._options.suppressErrors, false);
+ this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false);
+ this.unique = this._getValue(this._options.unique, true);
+ if (this.onlyDirectories) {
+ this.onlyFiles = false;
+ }
+ if (this.stats) {
+ this.objectMode = true;
+ }
+ }
+ _getValue(option, value) {
+ return option === undefined ? value : option;
+ }
+ _getFileSystemMethods(methods = {}) {
+ return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods);
+ }
+}
+exports.default = Settings;
+
+
+/***/ }),
+/* 810 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
+const {promisify} = __webpack_require__(112);
const fs = __webpack_require__(134);
const path = __webpack_require__(4);
-const fastGlob = __webpack_require__(572);
-const gitIgnore = __webpack_require__(781);
-const pify = __webpack_require__(779);
-const slash = __webpack_require__(782);
+const fastGlob = __webpack_require__(775);
+const gitIgnore = __webpack_require__(235);
+const slash = __webpack_require__(236);
const DEFAULT_IGNORE = [
'**/node_modules/**',
- '**/bower_components/**',
'**/flow-typed/**',
'**/coverage/**',
'**/.git'
];
-const readFileP = pify(fs.readFile);
+const readFileP = promisify(fs.readFile);
const mapGitIgnorePatternTo = base => ignore => {
if (ignore.startsWith('!')) {
- return '!' + path.posix.join(base, ignore.substr(1));
+ return '!' + path.posix.join(base, ignore.slice(1));
}
return path.posix.join(base, ignore);
};
-const parseGitIgnore = (content, opts) => {
- const base = slash(path.relative(opts.cwd, path.dirname(opts.fileName)));
+const parseGitIgnore = (content, options) => {
+ const base = slash(path.relative(options.cwd, path.dirname(options.fileName)));
return content
.split(/\r?\n/)
.filter(Boolean)
- .filter(l => l.charAt(0) !== '#')
+ .filter(line => !line.startsWith('#'))
.map(mapGitIgnorePatternTo(base));
};
const reduceIgnore = files => {
- return files.reduce((ignores, file) => {
+ const ignores = gitIgnore();
+ for (const file of files) {
ignores.add(parseGitIgnore(file.content, {
cwd: file.cwd,
fileName: file.filePath
}));
- return ignores;
- }, gitIgnore());
+ }
+
+ return ignores;
+};
+
+const ensureAbsolutePathForCwd = (cwd, p) => {
+ cwd = slash(cwd);
+ if (path.isAbsolute(p)) {
+ if (slash(p).startsWith(cwd)) {
+ return p;
+ }
+
+ throw new Error(`Path ${p} is not in cwd ${cwd}`);
+ }
+
+ return path.join(cwd, p);
};
const getIsIgnoredPredecate = (ignores, cwd) => {
- return p => ignores.ignores(slash(path.relative(cwd, p)));
+ return p => ignores.ignores(slash(path.relative(cwd, ensureAbsolutePathForCwd(cwd, p.path || p))));
};
-const getFile = (file, cwd) => {
+const getFile = async (file, cwd) => {
const filePath = path.join(cwd, file);
- return readFileP(filePath, 'utf8')
- .then(content => ({
- content,
- cwd,
- filePath
- }));
+ const content = await readFileP(filePath, 'utf8');
+
+ return {
+ cwd,
+ filePath,
+ content
+ };
};
const getFileSync = (file, cwd) => {
@@ -90649,490 +93669,103 @@ const getFileSync = (file, cwd) => {
const content = fs.readFileSync(filePath, 'utf8');
return {
- content,
cwd,
- filePath
+ filePath,
+ content
};
};
-const normalizeOpts = opts => {
- opts = opts || {};
- const ignore = opts.ignore || [];
- const cwd = opts.cwd || process.cwd();
+const normalizeOptions = ({
+ ignore = [],
+ cwd = slash(process.cwd())
+} = {}) => {
return {ignore, cwd};
};
-module.exports = o => {
- const opts = normalizeOpts(o);
-
- return fastGlob('**/.gitignore', {ignore: DEFAULT_IGNORE.concat(opts.ignore), cwd: opts.cwd})
- .then(paths => Promise.all(paths.map(file => getFile(file, opts.cwd))))
- .then(files => reduceIgnore(files))
- .then(ignores => getIsIgnoredPredecate(ignores, opts.cwd));
-};
+module.exports = async options => {
+ options = normalizeOptions(options);
-module.exports.sync = o => {
- const opts = normalizeOpts(o);
+ const paths = await fastGlob('**/.gitignore', {
+ ignore: DEFAULT_IGNORE.concat(options.ignore),
+ cwd: options.cwd
+ });
- const paths = fastGlob.sync('**/.gitignore', {ignore: DEFAULT_IGNORE.concat(opts.ignore), cwd: opts.cwd});
- const files = paths.map(file => getFileSync(file, opts.cwd));
+ const files = await Promise.all(paths.map(file => getFile(file, options.cwd)));
const ignores = reduceIgnore(files);
- return getIsIgnoredPredecate(ignores, opts.cwd);
-};
-
-
-/***/ }),
-/* 781 */
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
-
-var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-module.exports = function () {
- return new IgnoreBase();
+ return getIsIgnoredPredecate(ignores, options.cwd);
};
-// A simple implementation of make-array
-function make_array(subject) {
- return Array.isArray(subject) ? subject : [subject];
-}
-
-var REGEX_BLANK_LINE = /^\s+$/;
-var REGEX_LEADING_EXCAPED_EXCLAMATION = /^\\\!/;
-var REGEX_LEADING_EXCAPED_HASH = /^\\#/;
-var SLASH = '/';
-var KEY_IGNORE = typeof Symbol !== 'undefined' ? Symbol.for('node-ignore')
-/* istanbul ignore next */
-: 'node-ignore';
-
-var IgnoreBase = function () {
- function IgnoreBase() {
- _classCallCheck(this, IgnoreBase);
-
- this._rules = [];
- this[KEY_IGNORE] = true;
- this._initCache();
- }
-
- _createClass(IgnoreBase, [{
- key: '_initCache',
- value: function _initCache() {
- this._cache = {};
- }
-
- // @param {Array.|string|Ignore} pattern
-
- }, {
- key: 'add',
- value: function add(pattern) {
- this._added = false;
-
- if (typeof pattern === 'string') {
- pattern = pattern.split(/\r?\n/g);
- }
-
- make_array(pattern).forEach(this._addPattern, this);
-
- // Some rules have just added to the ignore,
- // making the behavior changed.
- if (this._added) {
- this._initCache();
- }
-
- return this;
- }
-
- // legacy
-
- }, {
- key: 'addPattern',
- value: function addPattern(pattern) {
- return this.add(pattern);
- }
- }, {
- key: '_addPattern',
- value: function _addPattern(pattern) {
- // #32
- if (pattern && pattern[KEY_IGNORE]) {
- this._rules = this._rules.concat(pattern._rules);
- this._added = true;
- return;
- }
-
- if (this._checkPattern(pattern)) {
- var rule = this._createRule(pattern);
- this._added = true;
- this._rules.push(rule);
- }
- }
- }, {
- key: '_checkPattern',
- value: function _checkPattern(pattern) {
- // > A blank line matches no files, so it can serve as a separator for readability.
- return pattern && typeof pattern === 'string' && !REGEX_BLANK_LINE.test(pattern)
-
- // > A line starting with # serves as a comment.
- && pattern.indexOf('#') !== 0;
- }
- }, {
- key: 'filter',
- value: function filter(paths) {
- var _this = this;
-
- return make_array(paths).filter(function (path) {
- return _this._filter(path);
- });
- }
- }, {
- key: 'createFilter',
- value: function createFilter() {
- var _this2 = this;
-
- return function (path) {
- return _this2._filter(path);
- };
- }
- }, {
- key: 'ignores',
- value: function ignores(path) {
- return !this._filter(path);
- }
- }, {
- key: '_createRule',
- value: function _createRule(pattern) {
- var origin = pattern;
- var negative = false;
-
- // > An optional prefix "!" which negates the pattern;
- if (pattern.indexOf('!') === 0) {
- negative = true;
- pattern = pattern.substr(1);
- }
-
- pattern = pattern
- // > Put a backslash ("\") in front of the first "!" for patterns that begin with a literal "!", for example, `"\!important!.txt"`.
- .replace(REGEX_LEADING_EXCAPED_EXCLAMATION, '!')
- // > Put a backslash ("\") in front of the first hash for patterns that begin with a hash.
- .replace(REGEX_LEADING_EXCAPED_HASH, '#');
-
- var regex = make_regex(pattern, negative);
-
- return {
- origin: origin,
- pattern: pattern,
- negative: negative,
- regex: regex
- };
- }
-
- // @returns `Boolean` true if the `path` is NOT ignored
-
- }, {
- key: '_filter',
- value: function _filter(path, slices) {
- if (!path) {
- return false;
- }
-
- if (path in this._cache) {
- return this._cache[path];
- }
-
- if (!slices) {
- // path/to/a.js
- // ['path', 'to', 'a.js']
- slices = path.split(SLASH);
- }
-
- slices.pop();
-
- return this._cache[path] = slices.length
- // > It is not possible to re-include a file if a parent directory of that file is excluded.
- // If the path contains a parent directory, check the parent first
- ? this._filter(slices.join(SLASH) + SLASH, slices) && this._test(path)
-
- // Or only test the path
- : this._test(path);
- }
-
- // @returns {Boolean} true if a file is NOT ignored
-
- }, {
- key: '_test',
- value: function _test(path) {
- // Explicitly define variable type by setting matched to `0`
- var matched = 0;
-
- this._rules.forEach(function (rule) {
- // if matched = true, then we only test negative rules
- // if matched = false, then we test non-negative rules
- if (!(matched ^ rule.negative)) {
- matched = rule.negative ^ rule.regex.test(path);
- }
- });
-
- return !matched;
- }
- }]);
-
- return IgnoreBase;
-}();
-
-// > If the pattern ends with a slash,
-// > it is removed for the purpose of the following description,
-// > but it would only find a match with a directory.
-// > In other words, foo/ will match a directory foo and paths underneath it,
-// > but will not match a regular file or a symbolic link foo
-// > (this is consistent with the way how pathspec works in general in Git).
-// '`foo/`' will not match regular file '`foo`' or symbolic link '`foo`'
-// -> ignore-rules will not deal with it, because it costs extra `fs.stat` call
-// you could use option `mark: true` with `glob`
-
-// '`foo/`' should not continue with the '`..`'
-
-
-var DEFAULT_REPLACER_PREFIX = [
-
-// > Trailing spaces are ignored unless they are quoted with backslash ("\")
-[
-// (a\ ) -> (a )
-// (a ) -> (a)
-// (a \ ) -> (a )
-/\\?\s+$/, function (match) {
- return match.indexOf('\\') === 0 ? ' ' : '';
-}],
-
-// replace (\ ) with ' '
-[/\\\s/g, function () {
- return ' ';
-}],
-
-// Escape metacharacters
-// which is written down by users but means special for regular expressions.
-
-// > There are 12 characters with special meanings:
-// > - the backslash \,
-// > - the caret ^,
-// > - the dollar sign $,
-// > - the period or dot .,
-// > - the vertical bar or pipe symbol |,
-// > - the question mark ?,
-// > - the asterisk or star *,
-// > - the plus sign +,
-// > - the opening parenthesis (,
-// > - the closing parenthesis ),
-// > - and the opening square bracket [,
-// > - the opening curly brace {,
-// > These special characters are often called "metacharacters".
-[/[\\\^$.|?*+()\[{]/g, function (match) {
- return '\\' + match;
-}],
-
-// leading slash
-[
-
-// > A leading slash matches the beginning of the pathname.
-// > For example, "/*.c" matches "cat-file.c" but not "mozilla-sha1/sha1.c".
-// A leading slash matches the beginning of the pathname
-/^\//, function () {
- return '^';
-}],
-
-// replace special metacharacter slash after the leading slash
-[/\//g, function () {
- return '\\/';
-}], [
-// > A leading "**" followed by a slash means match in all directories.
-// > For example, "**/foo" matches file or directory "foo" anywhere,
-// > the same as pattern "foo".
-// > "**/foo/bar" matches file or directory "bar" anywhere that is directly under directory "foo".
-// Notice that the '*'s have been replaced as '\\*'
-/^\^*\\\*\\\*\\\//,
-
-// '**/foo' <-> 'foo'
-function () {
- return '^(?:.*\\/)?';
-}]];
-
-var DEFAULT_REPLACER_SUFFIX = [
-// starting
-[
-// there will be no leading '/' (which has been replaced by section "leading slash")
-// If starts with '**', adding a '^' to the regular expression also works
-/^(?=[^\^])/, function () {
- return !/\/(?!$)/.test(this)
- // > If the pattern does not contain a slash /, Git treats it as a shell glob pattern
- // Actually, if there is only a trailing slash, git also treats it as a shell glob pattern
- ? '(?:^|\\/)'
-
- // > Otherwise, Git treats the pattern as a shell glob suitable for consumption by fnmatch(3)
- : '^';
-}],
-
-// two globstars
-[
-// Use lookahead assertions so that we could match more than one `'/**'`
-/\\\/\\\*\\\*(?=\\\/|$)/g,
-
-// Zero, one or several directories
-// should not use '*', or it will be replaced by the next replacer
-
-// Check if it is not the last `'/**'`
-function (match, index, str) {
- return index + 6 < str.length
-
- // case: /**/
- // > A slash followed by two consecutive asterisks then a slash matches zero or more directories.
- // > For example, "a/**/b" matches "a/b", "a/x/b", "a/x/y/b" and so on.
- // '/**/'
- ? '(?:\\/[^\\/]+)*'
+module.exports.sync = options => {
+ options = normalizeOptions(options);
- // case: /**
- // > A trailing `"/**"` matches everything inside.
+ const paths = fastGlob.sync('**/.gitignore', {
+ ignore: DEFAULT_IGNORE.concat(options.ignore),
+ cwd: options.cwd
+ });
- // #21: everything inside but it should not include the current folder
- : '\\/.+';
-}],
+ const files = paths.map(file => getFileSync(file, options.cwd));
+ const ignores = reduceIgnore(files);
-// intermediate wildcards
-[
-// Never replace escaped '*'
-// ignore rule '\*' will match the path '*'
-
-// 'abc.*/' -> go
-// 'abc.*' -> skip this rule
-/(^|[^\\]+)\\\*(?=.+)/g,
-
-// '*.js' matches '.js'
-// '*.js' doesn't match 'abc'
-function (match, p1) {
- return p1 + '[^\\/]*';
-}],
-
-// trailing wildcard
-[/(\^|\\\/)?\\\*$/, function (match, p1) {
- return (p1
- // '\^':
- // '/*' does not match ''
- // '/*' does not match everything
-
- // '\\\/':
- // 'abc/*' does not match 'abc/'
- ? p1 + '[^/]+'
-
- // 'a*' matches 'a'
- // 'a*' matches 'aa'
- : '[^/]*') + '(?=$|\\/$)';
-}], [
-// unescape
-/\\\\\\/g, function () {
- return '\\';
-}]];
-
-var POSITIVE_REPLACERS = [].concat(DEFAULT_REPLACER_PREFIX, [
-
-// 'f'
-// matches
-// - /f(end)
-// - /f/
-// - (start)f(end)
-// - (start)f/
-// doesn't match
-// - oof
-// - foo
-// pseudo:
-// -> (^|/)f(/|$)
-
-// ending
-[
-// 'js' will not match 'js.'
-// 'ab' will not match 'abc'
-/(?:[^*\/])$/,
-
-// 'js*' will not match 'a.js'
-// 'js/' will not match 'a.js'
-// 'js' will match 'a.js' and 'a.js/'
-function (match) {
- return match + '(?=$|\\/)';
-}]], DEFAULT_REPLACER_SUFFIX);
-
-var NEGATIVE_REPLACERS = [].concat(DEFAULT_REPLACER_PREFIX, [
-
-// #24, #38
-// The MISSING rule of [gitignore docs](https://git-scm.com/docs/gitignore)
-// A negative pattern without a trailing wildcard should not
-// re-include the things inside that directory.
-
-// eg:
-// ['node_modules/*', '!node_modules']
-// should ignore `node_modules/a.js`
-[/(?:[^*])$/, function (match) {
- return match + '(?=$|\\/$)';
-}]], DEFAULT_REPLACER_SUFFIX);
+ return getIsIgnoredPredecate(ignores, options.cwd);
+};
-// A simple cache, because an ignore rule only has only one certain meaning
-var cache = {};
-// @param {pattern}
-function make_regex(pattern, negative) {
- var r = cache[pattern];
- if (r) {
- return r;
- }
+/***/ }),
+/* 811 */
+/***/ (function(module, exports, __webpack_require__) {
- var replacers = negative ? NEGATIVE_REPLACERS : POSITIVE_REPLACERS;
+"use strict";
- var source = replacers.reduce(function (prev, current) {
- return prev.replace(current[0], current[1].bind(pattern));
- }, pattern);
+const {Transform} = __webpack_require__(138);
- return cache[pattern] = new RegExp(source, 'i');
+class ObjectTransform extends Transform {
+ constructor() {
+ super({
+ objectMode: true
+ });
+ }
}
-// Windows
-// --------------------------------------------------------------
-/* istanbul ignore if */
-if (
-// Detect `process` so that it can run in browsers.
-typeof process !== 'undefined' && (process.env && process.env.IGNORE_TEST_WIN32 || process.platform === 'win32')) {
+class FilterStream extends ObjectTransform {
+ constructor(filter) {
+ super();
+ this._filter = filter;
+ }
- var filter = IgnoreBase.prototype._filter;
- var make_posix = function make_posix(str) {
- return (/^\\\\\?\\/.test(str) || /[^\x00-\x80]+/.test(str) ? str : str.replace(/\\/g, '/')
- );
- };
+ _transform(data, encoding, callback) {
+ if (this._filter(data)) {
+ this.push(data);
+ }
- IgnoreBase.prototype._filter = function (path, slices) {
- path = make_posix(path);
- return filter.call(this, path, slices);
- };
+ callback();
+ }
}
+class UniqueStream extends ObjectTransform {
+ constructor() {
+ super();
+ this._pushed = new Set();
+ }
-/***/ }),
-/* 782 */
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
-
-module.exports = function (str) {
- var isExtendedLengthPath = /^\\\\\?\\/.test(str);
- var hasNonAscii = /[^\x00-\x80]+/.test(str);
+ _transform(data, encoding, callback) {
+ if (!this._pushed.has(data)) {
+ this.push(data);
+ this._pushed.add(data);
+ }
- if (isExtendedLengthPath || hasNonAscii) {
- return str;
+ callback();
}
+}
- return str.replace(/\\/g, '/');
+module.exports = {
+ FilterStream,
+ UniqueStream
};
/***/ }),
-/* 783 */
+/* 812 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
diff --git a/packages/kbn-pm/package.json b/packages/kbn-pm/package.json
index 0fa79fff6e0d9..050aadd402d8a 100644
--- a/packages/kbn-pm/package.json
+++ b/packages/kbn-pm/package.json
@@ -9,7 +9,7 @@
},
"scripts": {
"build": "../../node_modules/.bin/webpack",
- "kbn:watch": "../../node_modules/.bin/webpack --watch --progress",
+ "kbn:watch": "../../node_modules/.bin/webpack --watch",
"prettier": "../../node_modules/.bin/prettier --write './src/**/*.ts'"
},
"devDependencies": {
diff --git a/packages/kbn-pm/src/cli.ts b/packages/kbn-pm/src/cli.ts
index 6d033b4121d99..f6ea4d7124ab2 100644
--- a/packages/kbn-pm/src/cli.ts
+++ b/packages/kbn-pm/src/cli.ts
@@ -75,7 +75,7 @@ export async function run(argv: string[]) {
},
default: {
cache: true,
- 'force-install': true,
+ 'force-install': false,
offline: false,
validate: true,
},
diff --git a/packages/kbn-pm/src/commands/bootstrap.ts b/packages/kbn-pm/src/commands/bootstrap.ts
index 4a6a43ff2d91f..b383a52be63f5 100644
--- a/packages/kbn-pm/src/commands/bootstrap.ts
+++ b/packages/kbn-pm/src/commands/bootstrap.ts
@@ -17,7 +17,12 @@ import { getAllChecksums } from '../utils/project_checksums';
import { BootstrapCacheFile } from '../utils/bootstrap_cache_file';
import { readYarnLock } from '../utils/yarn_lock';
import { validateDependencies } from '../utils/validate_dependencies';
-import { ensureYarnIntegrityFileExists, installBazelTools, runBazel } from '../utils/bazel';
+import {
+ ensureYarnIntegrityFileExists,
+ installBazelTools,
+ runBazel,
+ yarnIntegrityFileExists,
+} from '../utils/bazel';
export const BootstrapCommand: ICommand = {
description: 'Install dependencies and crosslink projects',
@@ -33,7 +38,13 @@ export const BootstrapCommand: ICommand = {
const batchedNonBazelProjects = topologicallyBatchProjects(nonBazelProjectsOnly, projectGraph);
const kibanaProjectPath = projects.get('kibana')?.path || '';
const runOffline = options?.offline === true;
- const forceInstall = !!options && options['force-install'] === true;
+
+ // Force install is set in case a flag is passed or
+ // if the `.yarn-integrity` file is not found which
+ // will be indicated by the return of yarnIntegrityFileExists.
+ const forceInstall =
+ (!!options && options['force-install'] === true) ||
+ !(await yarnIntegrityFileExists(resolve(kibanaProjectPath, 'node_modules')));
// Ensure we have a `node_modules/.yarn-integrity` file as we depend on it
// for bazel to know it has to re-install the node_modules after a reset or a clean
@@ -51,9 +62,6 @@ export const BootstrapCommand: ICommand = {
// That way non bazel projects could depend on bazel projects but not the other way around
// That is only intended during the migration process while non Bazel projects are not removed at all.
//
- // Until we have our first package build within Bazel we will always need to directly call the yarn rule
- // otherwise yarn install won't trigger as we don't have any npm dependency within Bazel
- // TODO: Change CLI default in order to not force install as soon as we have our first Bazel package being built
if (forceInstall) {
await runBazel(['run', '@nodejs//:yarn'], runOffline);
}
diff --git a/packages/kbn-pm/src/production/build_bazel_production_projects.ts b/packages/kbn-pm/src/production/build_bazel_production_projects.ts
index 313622d44276a..07c0b651f5ad1 100644
--- a/packages/kbn-pm/src/production/build_bazel_production_projects.ts
+++ b/packages/kbn-pm/src/production/build_bazel_production_projects.ts
@@ -37,7 +37,7 @@ export async function buildBazelProductionProjects({
log.info(`Preparing Bazel projects production build for [${projectNames.join(', ')}]`);
await runBazel(['build', '//packages:build']);
- log.info(`All Bazel projects production builds for [${projectNames.join(', ')}] are complete}]`);
+ log.info(`All Bazel projects production builds for [${projectNames.join(', ')}] are complete`);
for (const project of projects.values()) {
await copyToBuild(project, kibanaRoot, buildRoot);
@@ -62,7 +62,7 @@ async function copyToBuild(project: Project, kibanaRoot: string, buildRoot: stri
const buildProjectPath = resolve(buildRoot, relativeProjectPath);
await copy(['**/*'], buildProjectPath, {
- cwd: join(kibanaRoot, 'bazel', 'bin', 'packages', basename(buildProjectPath), 'npm_module'),
+ cwd: join(kibanaRoot, 'bazel-bin', 'packages', basename(buildProjectPath), 'npm_module'),
dot: true,
onlyFiles: true,
parents: true,
@@ -88,12 +88,12 @@ async function applyCorrectPermissions(project: Project, kibanaRoot: string, bui
const buildProjectPath = resolve(buildRoot, relativeProjectPath);
const allPluginPaths = await globby([`**/*`], {
onlyFiles: false,
- cwd: join(kibanaRoot, 'bazel', 'bin', 'packages', basename(buildProjectPath), 'npm_module'),
+ cwd: buildProjectPath,
dot: true,
});
for (const pluginPath of allPluginPaths) {
- const resolvedPluginPath = resolve(buildRoot, pluginPath);
+ const resolvedPluginPath = resolve(buildProjectPath, pluginPath);
if (await isFile(resolvedPluginPath)) {
await chmod(resolvedPluginPath, 0o644);
}
diff --git a/packages/kbn-pm/src/utils/__snapshots__/link_project_executables.test.ts.snap b/packages/kbn-pm/src/utils/__snapshots__/link_project_executables.test.ts.snap
index c037c2a4976b4..8aeae04c265cf 100644
--- a/packages/kbn-pm/src/utils/__snapshots__/link_project_executables.test.ts.snap
+++ b/packages/kbn-pm/src/utils/__snapshots__/link_project_executables.test.ts.snap
@@ -11,6 +11,7 @@ Object {
"mkdirp": Array [],
"readFile": Array [],
"rmdirp": Array [],
+ "tryRealpath": Array [],
"unlink": Array [],
"writeFile": Array [],
}
@@ -27,6 +28,7 @@ Object {
"mkdirp": Array [],
"readFile": Array [],
"rmdirp": Array [],
+ "tryRealpath": Array [],
"unlink": Array [],
"writeFile": Array [],
}
diff --git a/packages/kbn-pm/src/utils/bazel/index.ts b/packages/kbn-pm/src/utils/bazel/index.ts
index 0b755ba2446a0..a3651039161b8 100644
--- a/packages/kbn-pm/src/utils/bazel/index.ts
+++ b/packages/kbn-pm/src/utils/bazel/index.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-export * from './ensure_yarn_integrity_exists';
+export * from './yarn_integrity';
export * from './get_cache_folders';
export * from './install_tools';
export * from './run';
diff --git a/packages/kbn-pm/src/utils/bazel/yarn_integrity.ts b/packages/kbn-pm/src/utils/bazel/yarn_integrity.ts
new file mode 100644
index 0000000000000..3a72f5ca080b8
--- /dev/null
+++ b/packages/kbn-pm/src/utils/bazel/yarn_integrity.ts
@@ -0,0 +1,41 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { join } from 'path';
+import { isFile, mkdirp, tryRealpath, writeFile } from '../fs';
+
+export async function yarnIntegrityFileExists(nodeModulesPath: string) {
+ try {
+ const nodeModulesRealPath = await tryRealpath(nodeModulesPath);
+ const yarnIntegrityFilePath = join(nodeModulesRealPath, '.yarn-integrity');
+
+ // check if the file already exists
+ if (await isFile(yarnIntegrityFilePath)) {
+ return true;
+ }
+ } catch {
+ // no-op
+ }
+
+ return false;
+}
+
+export async function ensureYarnIntegrityFileExists(nodeModulesPath: string) {
+ try {
+ const nodeModulesRealPath = await tryRealpath(nodeModulesPath);
+ const yarnIntegrityFilePath = join(nodeModulesRealPath, '.yarn-integrity');
+
+ // ensure node_modules folder is created
+ await mkdirp(nodeModulesRealPath);
+
+ // write a blank file in case it doesn't exists
+ await writeFile(yarnIntegrityFilePath, '', { flag: 'wx' });
+ } catch {
+ // no-op
+ }
+}
diff --git a/packages/kbn-pm/src/utils/fs.ts b/packages/kbn-pm/src/utils/fs.ts
index dd961b8321446..5739d319e08e7 100644
--- a/packages/kbn-pm/src/utils/fs.ts
+++ b/packages/kbn-pm/src/utils/fs.ts
@@ -20,6 +20,7 @@ const symlink = promisify(fs.symlink);
export const chmod = promisify(fs.chmod);
const cmdShim = promisify(cmdShimCb);
const mkdir = promisify(fs.mkdir);
+const realpathNative = promisify(fs.realpath.native);
export const mkdirp = async (path: string) => await mkdir(path, { recursive: true });
export const rmdirp = async (path: string) => await del(path, { force: true });
export const unlink = promisify(fs.unlink);
@@ -96,3 +97,17 @@ async function forceCreate(src: string, dest: string, type: string) {
await symlink(src, dest, type);
}
+
+export async function tryRealpath(path: string): Promise {
+ let calculatedPath = path;
+
+ try {
+ calculatedPath = await realpathNative(path);
+ } catch (error) {
+ if (error.code !== 'ENOENT') {
+ throw error;
+ }
+ }
+
+ return calculatedPath;
+}
diff --git a/packages/kbn-pm/src/utils/package_json.ts b/packages/kbn-pm/src/utils/package_json.ts
index b405b544ab800..e635c2566e65a 100644
--- a/packages/kbn-pm/src/utils/package_json.ts
+++ b/packages/kbn-pm/src/utils/package_json.ts
@@ -35,7 +35,7 @@ export const createProductionPackageJson = (pkgJson: IPackageJson) => ({
export const isLinkDependency = (depVersion: string) => depVersion.startsWith('link:');
export const isBazelPackageDependency = (depVersion: string) =>
- depVersion.startsWith('link:bazel/bin/');
+ depVersion.startsWith('link:bazel-bin/');
/**
* Replaces `link:` dependencies with `file:` dependencies. When installing
@@ -46,7 +46,7 @@ export const isBazelPackageDependency = (depVersion: string) =>
* will then _copy_ the `file:` dependencies into `node_modules` instead of
* symlinking like we do in development.
*
- * Additionally it also taken care of replacing `link:bazel/bin/` with
+ * Additionally it also taken care of replacing `link:bazel-bin/` with
* `file:` so we can also support the copy of the Bazel packages dist already into
* build/packages to be copied into the node_modules
*/
@@ -61,7 +61,7 @@ export function transformDependencies(dependencies: IPackageDependencies = {}) {
}
if (isBazelPackageDependency(depVersion)) {
- newDeps[name] = depVersion.replace('link:bazel/bin/', 'file:');
+ newDeps[name] = depVersion.replace('link:bazel-bin/', 'file:').replace('/npm_module', '');
continue;
}
diff --git a/packages/kbn-pm/src/utils/project.ts b/packages/kbn-pm/src/utils/project.ts
index 797a9a36df78f..5d2a0547b2577 100644
--- a/packages/kbn-pm/src/utils/project.ts
+++ b/packages/kbn-pm/src/utils/project.ts
@@ -92,7 +92,10 @@ export class Project {
public ensureValidProjectDependency(project: Project) {
const relativePathToProject = normalizePath(Path.relative(this.path, project.path));
const relativePathToProjectIfBazelPkg = normalizePath(
- Path.relative(this.path, `bazel/bin/packages/${Path.basename(project.path)}`)
+ Path.relative(
+ this.path,
+ `${__dirname}/../../../bazel-bin/packages/${Path.basename(project.path)}/npm_module`
+ )
);
const versionInPackageJson = this.allDependencies[project.name];
@@ -100,7 +103,7 @@ export class Project {
const expectedVersionInPackageJsonIfBazelPkg = `link:${relativePathToProjectIfBazelPkg}`;
// TODO: after introduce bazel to build all the packages and completely remove the support for kbn packages
- // do not allow child projects to hold dependencies
+ // do not allow child projects to hold dependencies, unless they are meant to be published externally
if (
versionInPackageJson === expectedVersionInPackageJson ||
versionInPackageJson === expectedVersionInPackageJsonIfBazelPkg
diff --git a/packages/kbn-pm/tsconfig.json b/packages/kbn-pm/tsconfig.json
index 175c4701f2e5b..558cff6556ff6 100644
--- a/packages/kbn-pm/tsconfig.json
+++ b/packages/kbn-pm/tsconfig.json
@@ -1,16 +1,14 @@
{
"extends": "../../tsconfig.base.json",
- "include": [
- "./index.d.ts",
- "./src/**/*.ts",
- "./dist/*.d.ts"
- ],
- "exclude": [],
"compilerOptions": {
"tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-pm",
"types": [
"jest",
"node"
]
- }
+ },
+ "include": [
+ "./index.d.ts",
+ "./src/**/*.ts"
+ ]
}
diff --git a/packages/kbn-server-http-tools/package.json b/packages/kbn-server-http-tools/package.json
index a8f99689f3335..6c65a0dd6e475 100644
--- a/packages/kbn-server-http-tools/package.json
+++ b/packages/kbn-server-http-tools/package.json
@@ -1,6 +1,7 @@
{
"name": "@kbn/server-http-tools",
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"version": "1.0.0",
"license": "SSPL-1.0 OR Elastic License 2.0",
"private": true,
diff --git a/packages/kbn-server-http-tools/tsconfig.json b/packages/kbn-server-http-tools/tsconfig.json
index ec84b963aed70..2f3e4626a04ce 100644
--- a/packages/kbn-server-http-tools/tsconfig.json
+++ b/packages/kbn-server-http-tools/tsconfig.json
@@ -1,14 +1,14 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "outDir": "target",
+ "incremental": false,
+ "outDir": "./target",
"declaration": true,
- "declarationMap": true
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-server-http-tools/src"
},
"include": [
"src/**/*"
- ],
- "dependencies": {
- "@kbn/std": "link:../kbn-std"
- }
+ ]
}
diff --git a/packages/kbn-std/tsconfig.json b/packages/kbn-std/tsconfig.json
index fd186a6e43d1c..d2ed46dcad6f8 100644
--- a/packages/kbn-std/tsconfig.json
+++ b/packages/kbn-std/tsconfig.json
@@ -1,13 +1,23 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "declaration": true,
+ "incremental": false,
"declarationDir": "./target",
"outDir": "./target",
"stripInternal": true,
+ "declaration": true,
"declarationMap": true,
- "types": ["jest", "node"]
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-std/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
- "include": ["./src/**/*.ts"],
- "exclude": ["target"]
+ "include": [
+ "./src/**/*.ts"
+ ],
+ "exclude": [
+ "**/__fixture__/**/*"
+ ]
}
diff --git a/packages/kbn-storybook/package.json b/packages/kbn-storybook/package.json
index 75801948bb20b..fdc7359aab58d 100644
--- a/packages/kbn-storybook/package.json
+++ b/packages/kbn-storybook/package.json
@@ -4,6 +4,7 @@
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"kibana": {
"devOnly": true
},
diff --git a/packages/kbn-storybook/tsconfig.json b/packages/kbn-storybook/tsconfig.json
index 814a3963c9f49..db10d4630ff9c 100644
--- a/packages/kbn-storybook/tsconfig.json
+++ b/packages/kbn-storybook/tsconfig.json
@@ -1,9 +1,19 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
- "declaration": true,
+ "incremental": false,
"outDir": "target",
- "skipLibCheck": true
+ "skipLibCheck": true,
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-storybook",
+ "types": [
+ "node"
+ ]
},
- "include": ["*.ts", "lib/*.ts"]
+ "include": [
+ "*.ts",
+ "lib/*.ts"
+ ]
}
diff --git a/packages/kbn-telemetry-tools/package.json b/packages/kbn-telemetry-tools/package.json
index 28d67c73eb49e..2ae1f596a1c68 100644
--- a/packages/kbn-telemetry-tools/package.json
+++ b/packages/kbn-telemetry-tools/package.json
@@ -3,6 +3,7 @@
"version": "1.0.0",
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"private": true,
"kibana": {
"devOnly": true
diff --git a/packages/kbn-telemetry-tools/tsconfig.json b/packages/kbn-telemetry-tools/tsconfig.json
index 98512053a5c92..39946fe9907e5 100644
--- a/packages/kbn-telemetry-tools/tsconfig.json
+++ b/packages/kbn-telemetry-tools/tsconfig.json
@@ -1,7 +1,12 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "tsBuildInfoFile": "../../build/tsbuildinfo/packages/kbn-telemetry-tools"
+ "incremental": false,
+ "outDir": "./target",
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-telemetry-tools/src"
},
"include": [
"src/**/*",
diff --git a/packages/kbn-test/index.d.ts b/packages/kbn-test/index.d.ts
deleted file mode 100644
index 004ac67f4b0c4..0000000000000
--- a/packages/kbn-test/index.d.ts
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-export * from './src/index';
diff --git a/packages/kbn-test/jest-preset.js b/packages/kbn-test/jest-preset.js
index 4949d6d1f9fad..225f93d487823 100644
--- a/packages/kbn-test/jest-preset.js
+++ b/packages/kbn-test/jest-preset.js
@@ -107,4 +107,7 @@ module.exports = {
'!**/*.d.ts',
'!**/index.{js,ts}',
],
+
+ // A custom resolver to preserve symlinks by default
+ resolver: '/packages/kbn-test/target/jest/setup/preserve_symlinks_resolver.js',
};
diff --git a/packages/kbn-test/package.json b/packages/kbn-test/package.json
index 0f0ba8d79a1c1..a2dc8f84cfb51 100644
--- a/packages/kbn-test/package.json
+++ b/packages/kbn-test/package.json
@@ -4,6 +4,7 @@
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target/index.js",
+ "types": "./target/types/index.d.ts",
"scripts": {
"build": "node scripts/build",
"kbn:bootstrap": "node scripts/build --source-maps",
diff --git a/packages/kbn-test/src/functional_test_runner/fake_mocha_types.d.ts b/packages/kbn-test/src/functional_test_runner/fake_mocha_types.ts
similarity index 100%
rename from packages/kbn-test/src/functional_test_runner/fake_mocha_types.d.ts
rename to packages/kbn-test/src/functional_test_runner/fake_mocha_types.ts
diff --git a/packages/kbn-test/src/functional_test_runner/lib/index.ts b/packages/kbn-test/src/functional_test_runner/lib/index.ts
index eef9e833fe5a8..1cb1e58a265d5 100644
--- a/packages/kbn-test/src/functional_test_runner/lib/index.ts
+++ b/packages/kbn-test/src/functional_test_runner/lib/index.ts
@@ -10,6 +10,7 @@ export { Lifecycle } from './lifecycle';
export { LifecyclePhase } from './lifecycle_phase';
export { readConfigFile, Config } from './config';
export { readProviderSpec, ProviderCollection } from './providers';
+// @internal
export { runTests, setupMocha } from './mocha';
export { FailureMetadata } from './failure_metadata';
export * from './docker_servers';
diff --git a/packages/kbn-test/src/functional_test_runner/lib/mocha/index.ts b/packages/kbn-test/src/functional_test_runner/lib/mocha/index.ts
index de55df34fa88b..4f27980db61d1 100644
--- a/packages/kbn-test/src/functional_test_runner/lib/mocha/index.ts
+++ b/packages/kbn-test/src/functional_test_runner/lib/mocha/index.ts
@@ -7,5 +7,7 @@
*/
// @ts-ignore will be replaced shortly
+// @internal
export { setupMocha } from './setup_mocha';
+// @internal
export { runTests } from './run_tests';
diff --git a/packages/kbn-test/src/index.ts b/packages/kbn-test/src/index.ts
index 919dc8b4477f3..ef167bc5d7819 100644
--- a/packages/kbn-test/src/index.ts
+++ b/packages/kbn-test/src/index.ts
@@ -6,6 +6,7 @@
* Side Public License, v 1.
*/
+// @internal
import {
runTestsCli,
processRunTestsCliOptions,
@@ -14,27 +15,34 @@ import {
// @ts-ignore not typed yet
} from './functional_tests/cli';
+// @internal
export { runTestsCli, processRunTestsCliOptions, startServersCli, processStartServersCliOptions };
// @ts-ignore not typed yet
+// @internal
export { runTests, startServers } from './functional_tests/tasks';
// @ts-ignore not typed yet
+// @internal
export { KIBANA_ROOT } from './functional_tests/lib/paths';
// @ts-ignore not typed yet
+// @internal
export { esTestConfig, createLegacyEsTestCluster } from './legacy_es';
// @ts-ignore not typed yet
+// @internal
export { kbnTestConfig, kibanaServerTestUser, kibanaTestUser, adminTestUser } from './kbn';
// @ts-ignore not typed yet
+// @internal
export { setupUsers, DEFAULT_SUPERUSER_PASS } from './functional_tests/lib/auth';
export { readConfigFile } from './functional_test_runner/lib/config/read_config_file';
export { runFtrCli } from './functional_test_runner/cli';
+// @internal
export { setupJUnitReportGeneration, escapeCdata } from './mocha';
export { runFailedTestsReporterCli } from './failed_tests_reporter';
diff --git a/packages/kbn-test/src/jest/setup/preserve_symlinks_resolver.js b/packages/kbn-test/src/jest/setup/preserve_symlinks_resolver.js
new file mode 100644
index 0000000000000..711bf2c9aa189
--- /dev/null
+++ b/packages/kbn-test/src/jest/setup/preserve_symlinks_resolver.js
@@ -0,0 +1,30 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+// Inspired in a discussion found at https://github.com/facebook/jest/issues/5356 as Jest currently doesn't
+// offer any other option to preserve symlinks.
+//
+// It would be available once https://github.com/facebook/jest/pull/9976 got merged.
+
+const resolve = require('resolve');
+
+module.exports = (request, options) => {
+ try {
+ return resolve.sync(request, {
+ basedir: options.basedir,
+ extensions: options.extensions,
+ preserveSymlinks: true,
+ });
+ } catch (error) {
+ if (error.code === 'MODULE_NOT_FOUND') {
+ return options.defaultResolver(request, options);
+ }
+
+ throw error;
+ }
+};
diff --git a/packages/kbn-test/src/kbn_archiver_cli.ts b/packages/kbn-test/src/kbn_archiver_cli.ts
index 98bfa6eaa4046..04581a8354668 100644
--- a/packages/kbn-test/src/kbn_archiver_cli.ts
+++ b/packages/kbn-test/src/kbn_archiver_cli.ts
@@ -10,7 +10,7 @@ import Path from 'path';
import Url from 'url';
import { RunWithCommands, createFlagError, Flags } from '@kbn/dev-utils';
-import { KbnClient } from '@kbn/test';
+import { KbnClient } from './kbn_client';
import { readConfigFile } from './functional_test_runner';
diff --git a/packages/kbn-test/src/mocha/index.ts b/packages/kbn-test/src/mocha/index.ts
index 1cff5202f33b9..4ada51c7ae013 100644
--- a/packages/kbn-test/src/mocha/index.ts
+++ b/packages/kbn-test/src/mocha/index.ts
@@ -7,8 +7,11 @@
*/
// @ts-ignore not typed yet
+// @internal
export { setupJUnitReportGeneration } from './junit_report_generation';
// @ts-ignore not typed yet
+// @internal
export { recordLog, snapshotLogsForRunnable } from './log_cache';
// @ts-ignore not typed yet
+// @internal
export { escapeCdata } from './xml';
diff --git a/packages/kbn-test/tsconfig.json b/packages/kbn-test/tsconfig.json
index 6d94389f82caa..8536ad7e0c12f 100644
--- a/packages/kbn-test/tsconfig.json
+++ b/packages/kbn-test/tsconfig.json
@@ -1,22 +1,26 @@
{
"extends": "../../tsconfig.base.json",
- "include": [
- "types/**/*",
- "src/**/*",
- "index.d.ts"
- ],
- "exclude": [
- "types/ftr_globals/**/*"
- ],
"compilerOptions": {
- "declaration": true,
- "emitDeclarationOnly": true,
+ "incremental": false,
"outDir": "./target/types",
+ "stripInternal": true,
+ "emitDeclarationOnly": true,
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../../packages/kbn-test/src",
"types": [
"jest",
"node"
],
- "stripInternal": true,
- "declarationMap": true
- }
+ },
+ "include": [
+ "types/**/*",
+ "src/**/*",
+ "index.d.ts"
+ ],
+ "exclude": [
+ "types/ftr_globals/**/*",
+ "**/__fixtures__/**/*"
+ ]
}
diff --git a/packages/kbn-ui-shared-deps/entry.js b/packages/kbn-ui-shared-deps/entry.js
index ede617908fd3d..f14c793d22a09 100644
--- a/packages/kbn-ui-shared-deps/entry.js
+++ b/packages/kbn-ui-shared-deps/entry.js
@@ -47,3 +47,5 @@ export const LodashFp = require('lodash/fp');
// runtime deps which don't need to be copied across all bundles
export const TsLib = require('tslib');
export const KbnAnalytics = require('@kbn/analytics');
+export const KbnStd = require('@kbn/std');
+export const SaferLodashSet = require('@elastic/safer-lodash-set');
diff --git a/packages/kbn-ui-shared-deps/index.js b/packages/kbn-ui-shared-deps/index.js
index d1217dd8db0d4..0542bc89ff9e4 100644
--- a/packages/kbn-ui-shared-deps/index.js
+++ b/packages/kbn-ui-shared-deps/index.js
@@ -58,5 +58,7 @@ exports.externals = {
*/
tslib: '__kbnSharedDeps__.TsLib',
'@kbn/analytics': '__kbnSharedDeps__.KbnAnalytics',
+ '@kbn/std': '__kbnSharedDeps__.KbnStd',
+ '@elastic/safer-lodash-set': '__kbnSharedDeps__.SaferLodashSet',
};
exports.publicPathLoader = require.resolve('./public_path_loader');
diff --git a/packages/kbn-ui-shared-deps/webpack.config.js b/packages/kbn-ui-shared-deps/webpack.config.js
index 135884fbf13e7..76e6843bea2f8 100644
--- a/packages/kbn-ui-shared-deps/webpack.config.js
+++ b/packages/kbn-ui-shared-deps/webpack.config.js
@@ -177,22 +177,22 @@ exports.getWebpackConfig = ({ dev = false } = {}) => ({
compiler.hooks.emit.tap('MetricsPlugin', (compilation) => {
const metrics = [
{
- group: '@kbn/ui-shared-deps asset size',
- id: 'kbn-ui-shared-deps.js',
+ group: 'page load bundle size',
+ id: 'kbnUiSharedDeps-js',
value: compilation.assets['kbn-ui-shared-deps.js'].size(),
},
{
- group: '@kbn/ui-shared-deps asset size',
- id: 'kbn-ui-shared-deps.@elastic.js',
- value: compilation.assets['kbn-ui-shared-deps.@elastic.js'].size(),
- },
- {
- group: '@kbn/ui-shared-deps asset size',
- id: 'css',
+ group: 'page load bundle size',
+ id: 'kbnUiSharedDeps-css',
value:
compilation.assets['kbn-ui-shared-deps.css'].size() +
compilation.assets['kbn-ui-shared-deps.v7.light.css'].size(),
},
+ {
+ group: 'page load bundle size',
+ id: 'kbnUiSharedDeps-elastic',
+ value: compilation.assets['kbn-ui-shared-deps.@elastic.js'].size(),
+ },
];
compilation.emitAsset(
diff --git a/packages/kbn-utility-types/package.json b/packages/kbn-utility-types/package.json
index 33419ee0f1ec4..ad7dcc6b906c3 100644
--- a/packages/kbn-utility-types/package.json
+++ b/packages/kbn-utility-types/package.json
@@ -3,7 +3,7 @@
"version": "1.0.0",
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
- "main": "target",
+ "main": "target/index.js",
"types": "target/index.d.ts",
"kibana": {
"devOnly": false
diff --git a/packages/kbn-utility-types/tsconfig.json b/packages/kbn-utility-types/tsconfig.json
index c2d206526e6f4..cfa782e5d38d2 100644
--- a/packages/kbn-utility-types/tsconfig.json
+++ b/packages/kbn-utility-types/tsconfig.json
@@ -1,18 +1,22 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
- "declaration": true,
- "declarationDir": "./target",
+ "incremental": false,
"outDir": "./target",
+ "declarationDir": "./target",
"stripInternal": true,
+ "declaration": true,
"declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-utility-types",
"types": [
- "node",
- "jest"
+ "jest",
+ "node"
]
},
- "include": ["index.ts", "jest/**/*", "test-d/**/*"],
- "exclude": [
- "target"
+ "include": [
+ "index.ts",
+ "jest/**/*",
+ "test-d/**/*"
]
}
diff --git a/packages/kbn-utils/package.json b/packages/kbn-utils/package.json
index 902eef82736fe..b6bb7759c40ef 100644
--- a/packages/kbn-utils/package.json
+++ b/packages/kbn-utils/package.json
@@ -1,6 +1,7 @@
{
"name": "@kbn/utils",
"main": "./target/index.js",
+ "types": "./target/index.d.ts",
"version": "1.0.0",
"license": "SSPL-1.0 OR Elastic License 2.0",
"private": true,
diff --git a/packages/kbn-utils/src/package_json/index.ts b/packages/kbn-utils/src/package_json/index.ts
index 40ce353780749..d9304cee2ca38 100644
--- a/packages/kbn-utils/src/package_json/index.ts
+++ b/packages/kbn-utils/src/package_json/index.ts
@@ -14,3 +14,7 @@ export const kibanaPackageJson = {
__dirname: dirname(resolve(REPO_ROOT, 'package.json')),
...require(resolve(REPO_ROOT, 'package.json')),
};
+
+export const isKibanaDistributable = () => {
+ return kibanaPackageJson.build && kibanaPackageJson.build.distributable === true;
+};
diff --git a/packages/kbn-utils/tsconfig.json b/packages/kbn-utils/tsconfig.json
index e9dd6313e6f79..e6c83767c30dc 100644
--- a/packages/kbn-utils/tsconfig.json
+++ b/packages/kbn-utils/tsconfig.json
@@ -1,9 +1,16 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
+ "incremental": false,
"outDir": "target",
"declaration": true,
- "declarationMap": true
+ "declarationMap": true,
+ "sourceMap": true,
+ "sourceRoot": "../../../../packages/kbn-utils/src",
+ "types": [
+ "jest",
+ "node"
+ ]
},
"include": [
"src/**/*"
diff --git a/scripts/build_kibana_platform_plugins.js b/scripts/build_kibana_platform_plugins.js
index fa630e0bb1808..9038d08364400 100644
--- a/scripts/build_kibana_platform_plugins.js
+++ b/scripts/build_kibana_platform_plugins.js
@@ -7,6 +7,7 @@
*/
require('../src/setup_node_env/ensure_node_preserve_symlinks');
+require('source-map-support/register');
require('@kbn/optimizer').runKbnOptimizerCli({
defaultLimitsPath: require.resolve('../packages/kbn-optimizer/limits.yml'),
});
diff --git a/src/cli/cli.js b/src/cli/cli.js
index 4540bf4a3f93c..d3bff4f492a80 100644
--- a/src/cli/cli.js
+++ b/src/cli/cli.js
@@ -7,7 +7,7 @@
*/
import _ from 'lodash';
-import { pkg } from '../core/server/utils';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
import Command from './command';
import serveCommand from './serve/serve';
diff --git a/src/cli/serve/serve.js b/src/cli/serve/serve.js
index 86b4ac53841f7..ad83965efde33 100644
--- a/src/cli/serve/serve.js
+++ b/src/cli/serve/serve.js
@@ -12,8 +12,7 @@ import { statSync } from 'fs';
import { resolve } from 'path';
import url from 'url';
-import { getConfigPath, fromRoot } from '@kbn/utils';
-import { IS_KIBANA_DISTRIBUTABLE } from '../../legacy/utils';
+import { getConfigPath, fromRoot, isKibanaDistributable } from '@kbn/utils';
import { readKeystore } from '../keystore/read_keystore';
function canRequire(path) {
@@ -65,9 +64,10 @@ function applyConfigOverrides(rawConfig, opts, extraCliOptions) {
delete rawConfig.xpack;
}
- if (opts.dev) {
- set('env', 'development');
+ // only used to set cliArgs.envName, we don't want to inject that into the config
+ delete extraCliOptions.env;
+ if (opts.dev) {
if (!has('elasticsearch.username')) {
set('elasticsearch.username', 'kibana_system');
}
@@ -184,7 +184,7 @@ export default function (program) {
.option('--plugins ', 'an alias for --plugin-dir', pluginDirCollector)
.option('--optimize', 'Deprecated, running the optimizer is no longer required');
- if (!IS_KIBANA_DISTRIBUTABLE) {
+ if (!isKibanaDistributable()) {
command
.option('--oss', 'Start Kibana without X-Pack')
.option(
@@ -228,6 +228,7 @@ export default function (program) {
// no longer supported
quiet: !!opts.quiet,
silent: !!opts.silent,
+ verbose: !!opts.verbose,
watch: !!opts.watch,
runExamples: !!opts.runExamples,
// We want to run without base path when the `--run-examples` flag is given so that we can use local
diff --git a/src/cli_encryption_keys/cli_encryption_keys.js b/src/cli_encryption_keys/cli_encryption_keys.js
index e922b9354d291..acee81aabb706 100644
--- a/src/cli_encryption_keys/cli_encryption_keys.js
+++ b/src/cli_encryption_keys/cli_encryption_keys.js
@@ -6,7 +6,8 @@
* Side Public License, v 1.
*/
-import { pkg } from '../core/server/utils';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
+
import Command from '../cli/command';
import { EncryptionConfig } from './encryption_config';
diff --git a/src/cli_keystore/cli_keystore.js b/src/cli_keystore/cli_keystore.js
index b325f685766aa..9f44e5d56e9d2 100644
--- a/src/cli_keystore/cli_keystore.js
+++ b/src/cli_keystore/cli_keystore.js
@@ -7,8 +7,8 @@
*/
import _ from 'lodash';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
-import { pkg } from '../core/server/utils';
import Command from '../cli/command';
import { Keystore } from '../cli/keystore';
diff --git a/src/cli_plugin/cli.js b/src/cli_plugin/cli.js
index 24ccba6a23397..5ef142192c509 100644
--- a/src/cli_plugin/cli.js
+++ b/src/cli_plugin/cli.js
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { pkg } from '../core/server/utils';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
import Command from '../cli/command';
import { listCommand } from './list';
import { installCommand } from './install';
diff --git a/src/cli_plugin/install/index.js b/src/cli_plugin/install/index.js
index c028facc28e2b..2683dd41d2bb3 100644
--- a/src/cli_plugin/install/index.js
+++ b/src/cli_plugin/install/index.js
@@ -6,8 +6,7 @@
* Side Public License, v 1.
*/
-import { getConfigPath } from '@kbn/utils';
-import { pkg } from '../../core/server/utils';
+import { getConfigPath, kibanaPackageJson as pkg } from '@kbn/utils';
import { install } from './install';
import { Logger } from '../lib/logger';
import { parse, parseMilliseconds } from './settings';
diff --git a/src/cli_plugin/install/kibana.js b/src/cli_plugin/install/kibana.js
index 29cb8df7401b6..1de157b951d03 100644
--- a/src/cli_plugin/install/kibana.js
+++ b/src/cli_plugin/install/kibana.js
@@ -9,7 +9,7 @@
import path from 'path';
import { statSync } from 'fs';
-import { versionSatisfies, cleanVersion } from '../../legacy/utils/version';
+import { versionSatisfies, cleanVersion } from './utils/version';
export function existingInstall(settings, logger) {
try {
diff --git a/src/cli_plugin/install/settings.js b/src/cli_plugin/install/settings.js
index 94473cc12aab2..e1536d66e0529 100644
--- a/src/cli_plugin/install/settings.js
+++ b/src/cli_plugin/install/settings.js
@@ -7,10 +7,8 @@
*/
import { resolve } from 'path';
-
import expiry from 'expiry-js';
-
-import { fromRoot } from '../../core/server/utils';
+import { fromRoot } from '@kbn/utils';
function generateUrls({ version, plugin }) {
return [
diff --git a/src/cli_plugin/install/settings.test.js b/src/cli_plugin/install/settings.test.js
index f06fd7eca7902..c7985763524ed 100644
--- a/src/cli_plugin/install/settings.test.js
+++ b/src/cli_plugin/install/settings.test.js
@@ -7,8 +7,8 @@
*/
import { createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { fromRoot } from '@kbn/utils';
-import { fromRoot } from '../../core/server/utils';
import { parseMilliseconds, parse } from './settings';
const SECOND = 1000;
diff --git a/src/legacy/utils/version.js b/src/cli_plugin/install/utils/version.js
similarity index 100%
rename from src/legacy/utils/version.js
rename to src/cli_plugin/install/utils/version.js
diff --git a/src/cli_plugin/list/index.js b/src/cli_plugin/list/index.js
index ce55b939b8a4c..02d1ed19f8445 100644
--- a/src/cli_plugin/list/index.js
+++ b/src/cli_plugin/list/index.js
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { fromRoot } from '../../core/server/utils';
+import { fromRoot } from '@kbn/utils';
import { list } from './list';
import { Logger } from '../lib/logger';
import { logWarnings } from '../lib/log_warnings';
diff --git a/src/cli_plugin/remove/settings.js b/src/cli_plugin/remove/settings.js
index 333fa7cb0f2e1..2381770ee0a65 100644
--- a/src/cli_plugin/remove/settings.js
+++ b/src/cli_plugin/remove/settings.js
@@ -7,8 +7,7 @@
*/
import { resolve } from 'path';
-
-import { fromRoot } from '../../core/server/utils';
+import { fromRoot } from '@kbn/utils';
export function parse(command, options) {
const settings = {
diff --git a/src/core/public/core_system.ts b/src/core/public/core_system.ts
index 278bbe469e862..b68a7ced118d2 100644
--- a/src/core/public/core_system.ts
+++ b/src/core/public/core_system.ts
@@ -28,6 +28,7 @@ import { DocLinksService } from './doc_links';
import { RenderingService } from './rendering';
import { SavedObjectsService } from './saved_objects';
import { IntegrationsService } from './integrations';
+import { DeprecationsService } from './deprecations';
import { CoreApp } from './core_app';
import type { InternalApplicationSetup, InternalApplicationStart } from './application/types';
@@ -82,7 +83,7 @@ export class CoreSystem {
private readonly rendering: RenderingService;
private readonly integrations: IntegrationsService;
private readonly coreApp: CoreApp;
-
+ private readonly deprecations: DeprecationsService;
private readonly rootDomElement: HTMLElement;
private readonly coreContext: CoreContext;
private fatalErrorsSetup: FatalErrorsSetup | null = null;
@@ -113,6 +114,7 @@ export class CoreSystem {
this.rendering = new RenderingService();
this.application = new ApplicationService();
this.integrations = new IntegrationsService();
+ this.deprecations = new DeprecationsService();
this.coreContext = { coreId: Symbol('core'), env: injectedMetadata.env };
this.plugins = new PluginsService(this.coreContext, injectedMetadata.uiPlugins);
@@ -195,6 +197,7 @@ export class CoreSystem {
injectedMetadata,
notifications,
});
+ const deprecations = this.deprecations.start({ http });
this.coreApp.start({ application, http, notifications, uiSettings });
@@ -210,6 +213,7 @@ export class CoreSystem {
overlays,
uiSettings,
fatalErrors,
+ deprecations,
};
await this.plugins.start(core);
@@ -252,6 +256,7 @@ export class CoreSystem {
this.chrome.stop();
this.i18n.stop();
this.application.stop();
+ this.deprecations.stop();
this.rootDomElement.textContent = '';
}
}
diff --git a/src/core/public/deprecations/deprecations_client.test.ts b/src/core/public/deprecations/deprecations_client.test.ts
new file mode 100644
index 0000000000000..2f52f7b4af195
--- /dev/null
+++ b/src/core/public/deprecations/deprecations_client.test.ts
@@ -0,0 +1,187 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { httpServiceMock } from '../http/http_service.mock';
+import { DeprecationsClient } from './deprecations_client';
+import type { DomainDeprecationDetails } from '../../server/types';
+
+describe('DeprecationsClient', () => {
+ const http = httpServiceMock.createSetupContract();
+ const mockDeprecations = [
+ { domainId: 'testPluginId-1' },
+ { domainId: 'testPluginId-1' },
+ { domainId: 'testPluginId-2' },
+ ];
+
+ beforeEach(() => {
+ http.fetch.mockReset();
+ http.fetch.mockResolvedValue({ deprecations: mockDeprecations });
+ });
+
+ describe('getAllDeprecations', () => {
+ it('returns a list of deprecations', async () => {
+ const deprecationsClient = new DeprecationsClient({ http });
+ const deprecations = await deprecationsClient.getAllDeprecations();
+ expect(http.fetch).toBeCalledTimes(1);
+ expect(http.fetch).toBeCalledWith('/api/deprecations/', {
+ asSystemRequest: true,
+ });
+
+ expect(deprecations).toEqual(mockDeprecations);
+ });
+ });
+
+ describe('getDeprecations', () => {
+ it('returns deprecations for a single domainId', async () => {
+ const deprecationsClient = new DeprecationsClient({ http });
+ const deprecations = await deprecationsClient.getDeprecations('testPluginId-1');
+
+ expect(deprecations.length).toBe(2);
+ expect(deprecations).toEqual([
+ { domainId: 'testPluginId-1' },
+ { domainId: 'testPluginId-1' },
+ ]);
+ });
+
+ it('returns [] if the domainId does not have any deprecations', async () => {
+ const deprecationsClient = new DeprecationsClient({ http });
+ const deprecations = await deprecationsClient.getDeprecations('testPluginId-4');
+
+ expect(deprecations).toEqual([]);
+ });
+
+ it('calls the fetch api', async () => {
+ const deprecationsClient = new DeprecationsClient({ http });
+ http.fetch.mockResolvedValueOnce({
+ deprecations: [{ domainId: 'testPluginId-1' }, { domainId: 'testPluginId-1' }],
+ });
+ http.fetch.mockResolvedValueOnce({
+ deprecations: [{ domainId: 'testPluginId-2' }, { domainId: 'testPluginId-2' }],
+ });
+ const results = [
+ ...(await deprecationsClient.getDeprecations('testPluginId-1')),
+ ...(await deprecationsClient.getDeprecations('testPluginId-2')),
+ ];
+
+ expect(http.fetch).toBeCalledTimes(2);
+ expect(results).toEqual([
+ { domainId: 'testPluginId-1' },
+ { domainId: 'testPluginId-1' },
+ { domainId: 'testPluginId-2' },
+ { domainId: 'testPluginId-2' },
+ ]);
+ });
+ });
+
+ describe('isDeprecationResolvable', () => {
+ it('returns true if deprecation has correctiveActions.api', async () => {
+ const deprecationsClient = new DeprecationsClient({ http });
+ const mockDeprecationDetails: DomainDeprecationDetails = {
+ domainId: 'testPluginId-1',
+ message: 'some-message',
+ level: 'warning',
+ correctiveActions: {
+ api: {
+ path: 'some-path',
+ method: 'POST',
+ },
+ },
+ };
+
+ const isResolvable = deprecationsClient.isDeprecationResolvable(mockDeprecationDetails);
+
+ expect(isResolvable).toBe(true);
+ });
+
+ it('returns false if deprecation is missing correctiveActions.api', async () => {
+ const deprecationsClient = new DeprecationsClient({ http });
+ const mockDeprecationDetails: DomainDeprecationDetails = {
+ domainId: 'testPluginId-1',
+ message: 'some-message',
+ level: 'warning',
+ correctiveActions: {},
+ };
+
+ const isResolvable = deprecationsClient.isDeprecationResolvable(mockDeprecationDetails);
+
+ expect(isResolvable).toBe(false);
+ });
+ });
+
+ describe('resolveDeprecation', () => {
+ it('fails if deprecation is not resolvable', async () => {
+ const deprecationsClient = new DeprecationsClient({ http });
+ const mockDeprecationDetails: DomainDeprecationDetails = {
+ domainId: 'testPluginId-1',
+ message: 'some-message',
+ level: 'warning',
+ correctiveActions: {},
+ };
+ const result = await deprecationsClient.resolveDeprecation(mockDeprecationDetails);
+
+ expect(result).toEqual({
+ status: 'fail',
+ reason: 'deprecation has no correctiveAction via api.',
+ });
+ });
+
+ it('fetches the deprecation api', async () => {
+ const deprecationsClient = new DeprecationsClient({ http });
+ const mockDeprecationDetails: DomainDeprecationDetails = {
+ domainId: 'testPluginId-1',
+ message: 'some-message',
+ level: 'warning',
+ correctiveActions: {
+ api: {
+ path: 'some-path',
+ method: 'POST',
+ body: {
+ extra_param: 123,
+ },
+ },
+ },
+ };
+ const result = await deprecationsClient.resolveDeprecation(mockDeprecationDetails);
+
+ expect(http.fetch).toBeCalledTimes(1);
+ expect(http.fetch).toBeCalledWith({
+ path: 'some-path',
+ method: 'POST',
+ asSystemRequest: true,
+ body: JSON.stringify({
+ extra_param: 123,
+ deprecationDetails: { domainId: 'testPluginId-1' },
+ }),
+ });
+ expect(result).toEqual({ status: 'ok' });
+ });
+
+ it('fails when fetch fails', async () => {
+ const deprecationsClient = new DeprecationsClient({ http });
+ const mockResponse = 'Failed to fetch';
+ const mockDeprecationDetails: DomainDeprecationDetails = {
+ domainId: 'testPluginId-1',
+ message: 'some-message',
+ level: 'warning',
+ correctiveActions: {
+ api: {
+ path: 'some-path',
+ method: 'POST',
+ body: {
+ extra_param: 123,
+ },
+ },
+ },
+ };
+ http.fetch.mockRejectedValue({ body: { message: mockResponse } });
+ const result = await deprecationsClient.resolveDeprecation(mockDeprecationDetails);
+
+ expect(result).toEqual({ status: 'fail', reason: mockResponse });
+ });
+ });
+});
diff --git a/src/core/public/deprecations/deprecations_client.ts b/src/core/public/deprecations/deprecations_client.ts
new file mode 100644
index 0000000000000..e510ab1e79d17
--- /dev/null
+++ b/src/core/public/deprecations/deprecations_client.ts
@@ -0,0 +1,78 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import type { HttpStart } from '../http';
+import type { DomainDeprecationDetails, DeprecationsGetResponse } from '../../server/types';
+
+/* @internal */
+export interface DeprecationsClientDeps {
+ http: Pick;
+}
+
+/* @internal */
+export type ResolveDeprecationResponse = { status: 'ok' } | { status: 'fail'; reason: string };
+
+export class DeprecationsClient {
+ private readonly http: Pick;
+ constructor({ http }: DeprecationsClientDeps) {
+ this.http = http;
+ }
+
+ private fetchDeprecations = async (): Promise => {
+ const { deprecations } = await this.http.fetch('/api/deprecations/', {
+ asSystemRequest: true,
+ });
+
+ return deprecations;
+ };
+
+ public getAllDeprecations = async () => {
+ return await this.fetchDeprecations();
+ };
+
+ public getDeprecations = async (domainId: string) => {
+ const deprecations = await this.fetchDeprecations();
+ return deprecations.filter((deprecation) => deprecation.domainId === domainId);
+ };
+
+ public isDeprecationResolvable = (details: DomainDeprecationDetails) => {
+ return typeof details.correctiveActions.api === 'object';
+ };
+
+ public resolveDeprecation = async (
+ details: DomainDeprecationDetails
+ ): Promise => {
+ const { domainId, correctiveActions } = details;
+ // explicit check required for TS type guard
+ if (typeof correctiveActions.api !== 'object') {
+ return {
+ status: 'fail',
+ reason: 'deprecation has no correctiveAction via api.',
+ };
+ }
+
+ const { body, method, path } = correctiveActions.api;
+ try {
+ await this.http.fetch({
+ path,
+ method,
+ asSystemRequest: true,
+ body: JSON.stringify({
+ ...body,
+ deprecationDetails: { domainId },
+ }),
+ });
+ return { status: 'ok' };
+ } catch (err) {
+ return {
+ status: 'fail',
+ reason: err.body.message,
+ };
+ }
+ };
+}
diff --git a/src/core/public/deprecations/deprecations_service.mock.ts b/src/core/public/deprecations/deprecations_service.mock.ts
new file mode 100644
index 0000000000000..5bcd52982d513
--- /dev/null
+++ b/src/core/public/deprecations/deprecations_service.mock.ts
@@ -0,0 +1,36 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import type { PublicMethodsOf } from '@kbn/utility-types';
+import { DeprecationsService } from './deprecations_service';
+import type { DeprecationsServiceStart } from './deprecations_service';
+
+const createServiceMock = (): jest.Mocked => ({
+ getAllDeprecations: jest.fn().mockResolvedValue([]),
+ getDeprecations: jest.fn().mockResolvedValue([]),
+ isDeprecationResolvable: jest.fn().mockReturnValue(false),
+ resolveDeprecation: jest.fn().mockResolvedValue({ status: 'ok', payload: {} }),
+});
+
+const createMock = () => {
+ const mocked: jest.Mocked> = {
+ setup: jest.fn(),
+ start: jest.fn(),
+ stop: jest.fn(),
+ };
+
+ mocked.setup.mockReturnValue(void 0);
+ mocked.start.mockReturnValue(createServiceMock());
+ return mocked;
+};
+
+export const deprecationsServiceMock = {
+ create: createMock,
+ createSetupContract: () => void 0,
+ createStartContract: createServiceMock,
+};
diff --git a/src/core/public/deprecations/deprecations_service.ts b/src/core/public/deprecations/deprecations_service.ts
new file mode 100644
index 0000000000000..d06e0071d2bc7
--- /dev/null
+++ b/src/core/public/deprecations/deprecations_service.ts
@@ -0,0 +1,60 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import type { CoreService } from '../../types';
+import type { HttpStart } from '../http';
+import { DeprecationsClient, ResolveDeprecationResponse } from './deprecations_client';
+import type { DomainDeprecationDetails } from '../../server/types';
+
+/**
+ * DeprecationsService provides methods to fetch domain deprecation details from
+ * the Kibana server.
+ *
+ * @public
+ */
+export interface DeprecationsServiceStart {
+ /**
+ * Grabs deprecations details for all domains.
+ */
+ getAllDeprecations: () => Promise;
+ /**
+ * Grabs deprecations for a specific domain.
+ *
+ * @param {string} domainId
+ */
+ getDeprecations: (domainId: string) => Promise;
+ /**
+ * Returns a boolean if the provided deprecation can be automatically resolvable.
+ *
+ * @param {DomainDeprecationDetails} details
+ */
+ isDeprecationResolvable: (details: DomainDeprecationDetails) => boolean;
+ /**
+ * Calls the correctiveActions.api to automatically resolve the depprecation.
+ *
+ * @param {DomainDeprecationDetails} details
+ */
+ resolveDeprecation: (details: DomainDeprecationDetails) => Promise;
+}
+
+export class DeprecationsService implements CoreService {
+ public setup(): void {}
+
+ public start({ http }: { http: HttpStart }): DeprecationsServiceStart {
+ const deprecationsClient = new DeprecationsClient({ http });
+
+ return {
+ getAllDeprecations: deprecationsClient.getAllDeprecations,
+ getDeprecations: deprecationsClient.getDeprecations,
+ isDeprecationResolvable: deprecationsClient.isDeprecationResolvable,
+ resolveDeprecation: deprecationsClient.resolveDeprecation,
+ };
+ }
+
+ public stop(): void {}
+}
diff --git a/src/core/server/utils/from_root.ts b/src/core/public/deprecations/index.ts
similarity index 63%
rename from src/core/server/utils/from_root.ts
rename to src/core/public/deprecations/index.ts
index 377f4d0e29ca5..092cbed613ac2 100644
--- a/src/core/server/utils/from_root.ts
+++ b/src/core/public/deprecations/index.ts
@@ -6,9 +6,6 @@
* Side Public License, v 1.
*/
-import { resolve } from 'path';
-import { pkg } from './package_json';
-
-export function fromRoot(...args: string[]) {
- return resolve(pkg.__dirname, ...args);
-}
+export { DeprecationsService } from './deprecations_service';
+export type { DeprecationsServiceStart } from './deprecations_service';
+export type { ResolveDeprecationResponse } from './deprecations_client';
diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts
index ef3172b620b23..b179c998f1126 100644
--- a/src/core/public/doc_links/doc_links_service.ts
+++ b/src/core/public/doc_links/doc_links_service.ts
@@ -216,6 +216,7 @@ export class DocLinksService {
},
maps: {
guide: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/maps.html`,
+ importGeospatialPrivileges: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/import-geospatial-data.html#import-geospatial-privileges`,
},
monitoring: {
alertsKibana: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/kibana-alerts.html`,
@@ -271,8 +272,10 @@ export class DocLinksService {
painlessExecute: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/painless/${DOC_LINK_VERSION}/painless-execute-api.html`,
painlessExecuteAPIContexts: `${ELASTIC_WEBSITE_URL}guide/en/elasticsearch/painless/${DOC_LINK_VERSION}/painless-execute-api.html#_contexts`,
putComponentTemplateMetadata: `${ELASTICSEARCH_DOCS}indices-component-template.html#component-templates-metadata`,
+ putEnrichPolicy: `${ELASTICSEARCH_DOCS}put-enrich-policy-api.html`,
putSnapshotLifecyclePolicy: `${ELASTICSEARCH_DOCS}slm-api-put-policy.html`,
- putWatch: `${ELASTICSEARCH_DOCS}/watcher-api-put-watch.html`,
+ putWatch: `${ELASTICSEARCH_DOCS}watcher-api-put-watch.html`,
+ simulatePipeline: `${ELASTICSEARCH_DOCS}simulate-pipeline-api.html`,
updateTransform: `${ELASTICSEARCH_DOCS}update-transform.html`,
},
plugins: {
@@ -293,9 +296,47 @@ export class DocLinksService {
restoreSnapshotApi: `${ELASTICSEARCH_DOCS}restore-snapshot-api.html#restore-snapshot-api-request-body`,
},
ingest: {
+ append: `${ELASTICSEARCH_DOCS}append-processor.html`,
+ bytes: `${ELASTICSEARCH_DOCS}bytes-processor.html`,
+ circle: `${ELASTICSEARCH_DOCS}ingest-circle-processor.html`,
+ convert: `${ELASTICSEARCH_DOCS}convert-processor.html`,
+ csv: `${ELASTICSEARCH_DOCS}csv-processor.html`,
+ date: `${ELASTICSEARCH_DOCS}date-processor.html`,
+ dateIndexName: `${ELASTICSEARCH_DOCS}date-index-name-processor.html`,
+ dissect: `${ELASTICSEARCH_DOCS}dissect-processor.html`,
+ dissectKeyModifiers: `${ELASTICSEARCH_DOCS}dissect-processor.html#dissect-key-modifiers`,
+ dotExpander: `${ELASTICSEARCH_DOCS}dot-expand-processor.html`,
+ drop: `${ELASTICSEARCH_DOCS}drop-processor.html`,
+ enrich: `${ELASTICSEARCH_DOCS}ingest-enriching-data.html`,
+ fail: `${ELASTICSEARCH_DOCS}fail-processor.html`,
+ foreach: `${ELASTICSEARCH_DOCS}foreach-processor.html`,
+ geoIp: `${ELASTICSEARCH_DOCS}geoip-processor.html`,
+ grok: `${ELASTICSEARCH_DOCS}grok-processor.html`,
+ gsub: `${ELASTICSEARCH_DOCS}gsub-processor.html`,
+ htmlString: `${ELASTICSEARCH_DOCS}htmlstrip-processor.html`,
+ inference: `${ELASTICSEARCH_DOCS}inference-processor.html`,
+ inferenceClassification: `${ELASTICSEARCH_DOCS}inference-processor.html#inference-processor-classification-opt`,
+ inferenceRegression: `${ELASTICSEARCH_DOCS}inference-processor.html#inference-processor-regression-opt`,
+ join: `${ELASTICSEARCH_DOCS}join-processor.html`,
+ json: `${ELASTICSEARCH_DOCS}json-processor.html`,
+ kv: `${ELASTICSEARCH_DOCS}kv-processor.html`,
+ lowercase: `${ELASTICSEARCH_DOCS}lowercase-processor.html`,
+ pipeline: `${ELASTICSEARCH_DOCS}pipeline-processor.html`,
pipelines: `${ELASTICSEARCH_DOCS}ingest.html`,
pipelineFailure: `${ELASTICSEARCH_DOCS}ingest.html#handling-pipeline-failures`,
processors: `${ELASTICSEARCH_DOCS}processors.html`,
+ remove: `${ELASTICSEARCH_DOCS}remove-processor.html`,
+ rename: `${ELASTICSEARCH_DOCS}rename-processor.html`,
+ script: `${ELASTICSEARCH_DOCS}script-processor.html`,
+ set: `${ELASTICSEARCH_DOCS}set-processor.html`,
+ setSecurityUser: `${ELASTICSEARCH_DOCS}ingest-node-set-security-user-processor.html`,
+ sort: `${ELASTICSEARCH_DOCS}sort-processor.html`,
+ split: `${ELASTICSEARCH_DOCS}split-processor.html`,
+ trim: `${ELASTICSEARCH_DOCS}trim-processor.html`,
+ uppercase: `${ELASTICSEARCH_DOCS}uppercase-processor.html`,
+ uriParts: `${ELASTICSEARCH_DOCS}uri-parts-processor.html`,
+ urlDecode: `${ELASTICSEARCH_DOCS}urldecode-processor.html`,
+ userAgent: `${ELASTICSEARCH_DOCS}user-agent-processor.html`,
},
},
});
@@ -443,6 +484,7 @@ export interface DocLinksStart {
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putWatch: string;
+ simulatePipeline: string;
updateTransform: string;
}>;
readonly observability: Record;
diff --git a/src/core/public/index.ts b/src/core/public/index.ts
index c7b4c370eb6d7..750f2e27dc950 100644
--- a/src/core/public/index.ts
+++ b/src/core/public/index.ts
@@ -65,6 +65,7 @@ import { UiSettingsState, IUiSettingsClient } from './ui_settings';
import { ApplicationSetup, Capabilities, ApplicationStart } from './application';
import { DocLinksStart } from './doc_links';
import { SavedObjectsStart } from './saved_objects';
+import { DeprecationsServiceStart } from './deprecations';
export type { PackageInfo, EnvironmentMode, IExternalUrlPolicy } from '../server/types';
export type { CoreContext, CoreSystem } from './core_system';
@@ -184,6 +185,8 @@ export type {
ErrorToastOptions,
} from './notifications';
+export type { DeprecationsServiceStart, ResolveDeprecationResponse } from './deprecations';
+
export type { MountPoint, UnmountCallback, PublicUiSettingsParams } from './types';
export { URL_MAX_LENGTH } from './core_app';
@@ -268,6 +271,8 @@ export interface CoreStart {
uiSettings: IUiSettingsClient;
/** {@link FatalErrorsStart} */
fatalErrors: FatalErrorsStart;
+ /** {@link DeprecationsServiceStart} */
+ deprecations: DeprecationsServiceStart;
/**
* exposed temporarily until https://github.com/elastic/kibana/issues/41990 done
* use *only* to retrieve config values. There is no way to set injected values
diff --git a/src/core/public/mocks.ts b/src/core/public/mocks.ts
index e47de84ea12b2..bd7623beba651 100644
--- a/src/core/public/mocks.ts
+++ b/src/core/public/mocks.ts
@@ -24,6 +24,7 @@ import { overlayServiceMock } from './overlays/overlay_service.mock';
import { uiSettingsServiceMock } from './ui_settings/ui_settings_service.mock';
import { savedObjectsServiceMock } from './saved_objects/saved_objects_service.mock';
import { injectedMetadataServiceMock } from './injected_metadata/injected_metadata_service.mock';
+import { deprecationsServiceMock } from './deprecations/deprecations_service.mock';
export { chromeServiceMock } from './chrome/chrome_service.mock';
export { docLinksServiceMock } from './doc_links/doc_links_service.mock';
@@ -37,6 +38,7 @@ export { uiSettingsServiceMock } from './ui_settings/ui_settings_service.mock';
export { savedObjectsServiceMock } from './saved_objects/saved_objects_service.mock';
export { scopedHistoryMock } from './application/scoped_history.mock';
export { applicationServiceMock } from './application/application_service.mock';
+export { deprecationsServiceMock } from './deprecations/deprecations_service.mock';
function createCoreSetupMock({
basePath = '',
@@ -57,6 +59,7 @@ function createCoreSetupMock({
http: httpServiceMock.createSetupContract({ basePath }),
notifications: notificationServiceMock.createSetupContract(),
uiSettings: uiSettingsServiceMock.createSetupContract(),
+ deprecations: deprecationsServiceMock.createSetupContract(),
injectedMetadata: {
getInjectedVar: injectedMetadataServiceMock.createSetupContract().getInjectedVar,
},
@@ -76,6 +79,7 @@ function createCoreStartMock({ basePath = '' } = {}) {
overlays: overlayServiceMock.createStartContract(),
uiSettings: uiSettingsServiceMock.createStartContract(),
savedObjects: savedObjectsServiceMock.createStartContract(),
+ deprecations: deprecationsServiceMock.createStartContract(),
injectedMetadata: {
getInjectedVar: injectedMetadataServiceMock.createStartContract().getInjectedVar,
},
diff --git a/src/core/public/plugins/plugin_context.ts b/src/core/public/plugins/plugin_context.ts
index b59516fa121fb..49c895aa80fc4 100644
--- a/src/core/public/plugins/plugin_context.ts
+++ b/src/core/public/plugins/plugin_context.ts
@@ -139,5 +139,6 @@ export function createPluginStartContext<
getInjectedVar: deps.injectedMetadata.getInjectedVar,
},
fatalErrors: deps.fatalErrors,
+ deprecations: deps.deprecations,
};
}
diff --git a/src/core/public/plugins/plugins_service.test.ts b/src/core/public/plugins/plugins_service.test.ts
index e70b78f237d75..d7114f14e2f00 100644
--- a/src/core/public/plugins/plugins_service.test.ts
+++ b/src/core/public/plugins/plugins_service.test.ts
@@ -34,6 +34,7 @@ import { httpServiceMock } from '../http/http_service.mock';
import { CoreSetup, CoreStart, PluginInitializerContext } from '..';
import { docLinksServiceMock } from '../doc_links/doc_links_service.mock';
import { savedObjectsServiceMock } from '../saved_objects/saved_objects_service.mock';
+import { deprecationsServiceMock } from '../deprecations/deprecations_service.mock';
export let mockPluginInitializers: Map;
@@ -101,6 +102,7 @@ describe('PluginsService', () => {
uiSettings: uiSettingsServiceMock.createStartContract(),
savedObjects: savedObjectsServiceMock.createStartContract(),
fatalErrors: fatalErrorsServiceMock.createStartContract(),
+ deprecations: deprecationsServiceMock.createStartContract(),
};
mockStartContext = {
...mockStartDeps,
diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md
index 5a5ae253bac7f..8327428991e13 100644
--- a/src/core/public/public.api.md
+++ b/src/core/public/public.api.md
@@ -432,6 +432,8 @@ export interface CoreStart {
// (undocumented)
chrome: ChromeStart;
// (undocumented)
+ deprecations: DeprecationsServiceStart;
+ // (undocumented)
docLinks: DocLinksStart;
// (undocumented)
fatalErrors: FatalErrorsStart;
@@ -472,6 +474,15 @@ export class CoreSystem {
// @internal (undocumented)
export const DEFAULT_APP_CATEGORIES: Record;
+// @public
+export interface DeprecationsServiceStart {
+ // Warning: (ae-forgotten-export) The symbol "DomainDeprecationDetails" needs to be exported by the entry point index.d.ts
+ getAllDeprecations: () => Promise;
+ getDeprecations: (domainId: string) => Promise;
+ isDeprecationResolvable: (details: DomainDeprecationDetails) => boolean;
+ resolveDeprecation: (details: DomainDeprecationDetails) => Promise;
+}
+
// @public (undocumented)
export interface DocLinksStart {
// (undocumented)
@@ -616,6 +627,7 @@ export interface DocLinksStart {
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putWatch: string;
+ simulatePipeline: string;
updateTransform: string;
}>;
readonly observability: Record