diff --git a/.backportrc.json b/.backportrc.json
index 8f458343c51af..3f1d639e9a480 100644
--- a/.backportrc.json
+++ b/.backportrc.json
@@ -3,6 +3,7 @@
"targetBranchChoices": [
{ "name": "master", "checked": true },
{ "name": "7.x", "checked": true },
+ "7.9",
"7.8",
"7.7",
"7.6",
@@ -26,7 +27,7 @@
"targetPRLabels": ["backport"],
"branchLabelMapping": {
"^v8.0.0$": "master",
- "^v7.9.0$": "7.x",
+ "^v7.10.0$": "7.x",
"^v(\\d+).(\\d+).\\d+$": "$1.$2"
}
}
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index f053c6da9c29b..2ad82ded6cb38 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -24,29 +24,20 @@
/src/plugins/vis_type_xy/ @elastic/kibana-app
/src/plugins/visualize/ @elastic/kibana-app
-# Core UI
-# Exclude tutorials folder for now because they are not owned by Kibana app and most will move out soon
-/src/plugins/home/public @elastic/kibana-core-ui
-/src/plugins/home/server/*.ts @elastic/kibana-core-ui
-/src/plugins/home/server/services/ @elastic/kibana-core-ui
-# Exclude tutorial resources folder for now because they are not owned by Kibana app and most will move out soon
-/src/legacy/core_plugins/kibana/public/home/*.ts @elastic/kibana-core-ui
-/src/legacy/core_plugins/kibana/public/home/*.scss @elastic/kibana-core-ui
-/src/legacy/core_plugins/kibana/public/home/np_ready/ @elastic/kibana-core-ui
-
# App Architecture
+/examples/bfetch_explorer/ @elastic/kibana-app-arch
+/examples/dashboard_embeddable_examples/ @elastic/kibana-app-arch
+/examples/demo_search/ @elastic/kibana-app-arch
/examples/developer_examples/ @elastic/kibana-app-arch
+/examples/embeddable_examples/ @elastic/kibana-app-arch
+/examples/embeddable_explorer/ @elastic/kibana-app-arch
+/examples/state_container_examples/ @elastic/kibana-app-arch
+/examples/ui_actions_examples/ @elastic/kibana-app-arch
+/examples/ui_actions_explorer/ @elastic/kibana-app-arch
/examples/url_generators_examples/ @elastic/kibana-app-arch
/examples/url_generators_explorer/ @elastic/kibana-app-arch
-/packages/kbn-interpreter/ @elastic/kibana-app-arch
/packages/elastic-datemath/ @elastic/kibana-app-arch
-/src/legacy/core_plugins/embeddable_api/ @elastic/kibana-app-arch
-/src/legacy/core_plugins/interpreter/ @elastic/kibana-app-arch
-/src/legacy/core_plugins/kibana_react/ @elastic/kibana-app-arch
-/src/legacy/core_plugins/kibana/public/management/ @elastic/kibana-app-arch
-/src/legacy/core_plugins/kibana/server/routes/api/management/ @elastic/kibana-app-arch
-/src/legacy/core_plugins/visualizations/ @elastic/kibana-app-arch
-/src/legacy/server/index_patterns/ @elastic/kibana-app-arch
+/packages/kbn-interpreter/ @elastic/kibana-app-arch
/src/plugins/advanced_settings/ @elastic/kibana-app-arch
/src/plugins/bfetch/ @elastic/kibana-app-arch
/src/plugins/data/ @elastic/kibana-app-arch
@@ -61,9 +52,10 @@
/src/plugins/share/ @elastic/kibana-app-arch
/src/plugins/ui_actions/ @elastic/kibana-app-arch
/src/plugins/visualizations/ @elastic/kibana-app-arch
-/x-pack/plugins/advanced_ui_actions/ @elastic/kibana-app-arch
+/x-pack/examples/ui_actions_enhanced_examples/ @elastic/kibana-app-arch
/x-pack/plugins/data_enhanced/ @elastic/kibana-app-arch
-/x-pack/plugins/drilldowns/ @elastic/kibana-app-arch
+/x-pack/plugins/embeddable_enhanced/ @elastic/kibana-app-arch
+/x-pack/plugins/ui_actions_enhanced/ @elastic/kibana-app-arch
# APM
/x-pack/plugins/apm/ @elastic/apm-ui
@@ -79,6 +71,16 @@
/x-pack/plugins/canvas/ @elastic/kibana-canvas
/x-pack/test/functional/apps/canvas/ @elastic/kibana-canvas
+# Core UI
+# Exclude tutorials folder for now because they are not owned by Kibana app and most will move out soon
+/src/plugins/home/public @elastic/kibana-core-ui
+/src/plugins/home/server/*.ts @elastic/kibana-core-ui
+/src/plugins/home/server/services/ @elastic/kibana-core-ui
+# Exclude tutorial resources folder for now because they are not owned by Kibana app and most will move out soon
+/src/legacy/core_plugins/kibana/public/home/*.ts @elastic/kibana-core-ui
+/src/legacy/core_plugins/kibana/public/home/*.scss @elastic/kibana-core-ui
+/src/legacy/core_plugins/kibana/public/home/np_ready/ @elastic/kibana-core-ui
+
# Observability UIs
/x-pack/legacy/plugins/infra/ @elastic/logs-metrics-ui
/x-pack/plugins/infra/ @elastic/logs-metrics-ui
diff --git a/docs/developer/advanced/development-basepath.asciidoc b/docs/developer/advanced/development-basepath.asciidoc
index f0b760a21ea0c..cb341b9591174 100644
--- a/docs/developer/advanced/development-basepath.asciidoc
+++ b/docs/developer/advanced/development-basepath.asciidoc
@@ -1,5 +1,5 @@
[[development-basepath]]
-=== Considerations for basepath
+== Considerations for basepath
In dev mode, {kib} by default runs behind a proxy which adds a random path component to its URL.
diff --git a/docs/developer/advanced/development-es-snapshots.asciidoc b/docs/developer/advanced/development-es-snapshots.asciidoc
index 92fae7a241edf..4c801bf750979 100644
--- a/docs/developer/advanced/development-es-snapshots.asciidoc
+++ b/docs/developer/advanced/development-es-snapshots.asciidoc
@@ -1,32 +1,32 @@
[[development-es-snapshots]]
-=== Daily Elasticsearch Snapshots
+== Daily {es} Snapshots
-For local development and CI, {kib}, by default, uses Elasticsearch snapshots that are built daily when running tasks that require Elasticsearch (e.g. functional tests).
+For local development and CI, {kib}, by default, uses {es} snapshots that are built daily when running tasks that require {es} (e.g. functional tests).
-A snapshot is just a group of tarballs, one for each supported distribution/architecture/os of Elasticsearch, and a JSON-based manifest file containing metadata about the distributions.
+A snapshot is just a group of tarballs, one for each supported distribution/architecture/os of {es}, and a JSON-based manifest file containing metadata about the distributions.
-https://ci.kibana.dev/es-snapshots[A dashboard] is available that shows the current status and compatibility of the latest Elasticsearch snapshots.
+https://ci.kibana.dev/es-snapshots[A dashboard] is available that shows the current status and compatibility of the latest {es} snapshots.
-==== Process Overview
+=== Process Overview
-1. Elasticsearch snapshots are built for each current tracked branch of {kib}.
+1. {es} snapshots are built for each current tracked branch of {kib}.
2. Each snapshot is uploaded to a public Google Cloud Storage bucket, `kibana-ci-es-snapshots-daily`.
** At this point, the snapshot is not automatically used in CI or local development. It needs to be tested/verified first.
3. Each snapshot is tested with the latest commit of the corresponding {kib} branch, using the full CI suite.
4. After CI
** If the snapshot passes, it is promoted and automatically used in CI and local development.
-** If the snapshot fails, the issue must be investigated and resolved. A new incompatibility may exist between Elasticsearch and {kib}.
+** If the snapshot fails, the issue must be investigated and resolved. A new incompatibility may exist between {es} and {kib}.
-==== Using the latest snapshot
+=== Using the latest snapshot
-When developing locally, you may wish to use the most recent Elasticsearch snapshot, even if it's failing CI. To do so, prefix your commands with the follow environment variable:
+When developing locally, you may wish to use the most recent {es} snapshot, even if it's failing CI. To do so, prefix your commands with the follow environment variable:
["source","bash"]
-----------
KBN_ES_SNAPSHOT_USE_UNVERIFIED=true
-----------
-You can use this flag with any command that downloads and runs Elasticsearch snapshots, such as `scripts/es` or the FTR.
+You can use this flag with any command that downloads and runs {es} snapshots, such as `scripts/es` or the FTR.
For example, to run functional tests with the latest snapshot:
@@ -35,7 +35,7 @@ For example, to run functional tests with the latest snapshot:
KBN_ES_SNAPSHOT_USE_UNVERIFIED=true node scripts/functional_tests_server
-----------
-===== For Pull Requests
+==== For Pull Requests
Currently, there is not a way to run your pull request with the latest unverified snapshot without a code change. You can, however, do it with a small code change.
@@ -45,9 +45,9 @@ Currently, there is not a way to run your pull request with the latest unverifie
Your pull request should then use the latest snapshot the next time that it runs. Just don't merge the change to `Jenkinsfile`!
-==== Google Cloud Storage buckets
+=== Google Cloud Storage buckets
-===== kibana-ci-es-snapshots-daily
+==== kibana-ci-es-snapshots-daily
This bucket stores snapshots that are created on a daily basis, and is the primary location used by `kbn-es` to download snapshots.
@@ -61,7 +61,7 @@ The file structure for this bucket looks like this:
* `/archives//*.tar.gz.sha512`
* `/archives//manifest.json`
-===== kibana-ci-es-snapshots-permanent
+==== kibana-ci-es-snapshots-permanent
This bucket stores only the most recently promoted snapshot for each version. Old snapshots are only deleted when new ones are uploaded.
@@ -73,18 +73,18 @@ The file structure for this bucket looks like this:
* `/*.tar.gz.sha512`
* `/manifest.json`
-==== How snapshots are built, tested, and promoted
+=== How snapshots are built, tested, and promoted
-Each day, a https://kibana-ci.elastic.co/job/elasticsearch+snapshots+trigger/[Jenkins job] runs that triggers Elasticsearch builds for each currently tracked branch/version. This job is automatically updated with the correct branches whenever we release new versions of {kib}.
+Each day, a https://kibana-ci.elastic.co/job/elasticsearch+snapshots+trigger/[Jenkins job] runs that triggers {es} builds for each currently tracked branch/version. This job is automatically updated with the correct branches whenever we release new versions of {kib}.
-===== Build
+==== Build
-https://kibana-ci.elastic.co/job/elasticsearch+snapshots+build/[This Jenkins job] builds the Elasticsearch snapshots and uploads them to GCS.
+https://kibana-ci.elastic.co/job/elasticsearch+snapshots+build/[This Jenkins job] builds the {es} snapshots and uploads them to GCS.
The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/master/.ci/es-snapshots/Jenkinsfile_build_es[in the {kib} repo].
-1. Checkout Elasticsearch repo for the given branch/version.
-2. Run `./gradlew -p distribution/archives assemble --parallel` to create all of the Elasticsearch distributions.
+1. Checkout {es} repo for the given branch/version.
+2. Run `./gradlew -p distribution/archives assemble --parallel` to create all of the {es} distributions.
3. Create a tarball for each distribution.
4. Create a manifest JSON file containing info about the distribution, as well as its download URL.
5. Upload the tarballs and manifest to a unique location in the GCS bucket `kibana-ci-es-snapshots-daily`.
@@ -93,9 +93,9 @@ The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/ma
** This allows the `KBN_ES_SNAPSHOT_USE_UNVERIFIED` flag to work.
7. Trigger the verification job, to run the full {kib} CI test suite with this snapshot.
-===== Verification and Promotion
+==== Verification and Promotion
-https://kibana-ci.elastic.co/job/elasticsearch+snapshots+verify/[This Jenkins job] tests the latest Elasticsearch snapshot with the full {kib} CI pipeline, and promotes if it there are no test failures.
+https://kibana-ci.elastic.co/job/elasticsearch+snapshots+verify/[This Jenkins job] tests the latest {es} snapshot with the full {kib} CI pipeline, and promotes if it there are no test failures.
The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/master/.ci/es-snapshots/Jenkinsfile_verify_es[in the {kib} repo].
diff --git a/docs/developer/advanced/index.asciidoc b/docs/developer/advanced/index.asciidoc
index 139940ee42fe2..5c53bedd95e72 100644
--- a/docs/developer/advanced/index.asciidoc
+++ b/docs/developer/advanced/index.asciidoc
@@ -5,8 +5,8 @@
* <>
* <>
-include::development-es-snapshots.asciidoc[]
+include::development-es-snapshots.asciidoc[leveloffset=+1]
-include::running-elasticsearch.asciidoc[]
+include::running-elasticsearch.asciidoc[leveloffset=+1]
-include::development-basepath.asciidoc[]
\ No newline at end of file
+include::development-basepath.asciidoc[leveloffset=+1]
\ No newline at end of file
diff --git a/docs/developer/advanced/running-elasticsearch.asciidoc b/docs/developer/advanced/running-elasticsearch.asciidoc
index b03c231678eee..2361f805c7635 100644
--- a/docs/developer/advanced/running-elasticsearch.asciidoc
+++ b/docs/developer/advanced/running-elasticsearch.asciidoc
@@ -1,13 +1,13 @@
[[running-elasticsearch]]
-=== Running elasticsearch during development
+== Running {es} during development
-There are many ways to run Elasticsearch while you are developing.
+There are many ways to run {es} while you are developing.
-[float]
+[discrete]
-==== By snapshot
+=== By snapshot
-This will run a snapshot of elasticsearch that is usually built nightly. Read more about <>.
+This will run a snapshot of {es} that is usually built nightly. Read more about <>.
[source,bash]
----
@@ -25,36 +25,36 @@ yarn es snapshot --help
**Keeping data between snapshots**
-If you want to keep the data inside your Elasticsearch between usages of this command, you should use the following command, to keep your data folder outside the downloaded snapshot folder:
+If you want to keep the data inside your {es} between usages of this command, you should use the following command, to keep your data folder outside the downloaded snapshot folder:
[source,bash]
----
yarn es snapshot -E path.data=../data
----
-==== By source
+=== By source
-If you have the Elasticsearch repo checked out locally and wish to run against that, use `source`. By default, it will reference an elasticsearch checkout which is a sibling to the {kib} directory named elasticsearch. If you wish to use a checkout in another location you can provide that by supplying --source-path
+If you have the {es} repo checked out locally and wish to run against that, use `source`. By default, it will reference an {es} checkout which is a sibling to the {kib} directory named elasticsearch. If you wish to use a checkout in another location you can provide that by supplying --source-path
[source,bash]
----
yarn es source
----
-==== From an archive
+=== From an archive
-Use this if you already have a distributable. For released versions, one can be obtained on the Elasticsearch downloads page.
+Use this if you already have a distributable. For released versions, one can be obtained on the {es} downloads page.
[source,bash]
----
yarn es archive
----
-Each of these will run Elasticsearch with a basic license. Additional options are available, pass --help for more information.
+Each of these will run {es} with a basic license. Additional options are available, pass --help for more information.
-==== From a remote host
+=== From a remote host
-You can save some system resources, and the effort of generating sample data, if you have a remote Elasticsearch cluster to connect to. (Elasticians: you do! Check with your team about where to find credentials)
+You can save some system resources, and the effort of generating sample data, if you have a remote {es} cluster to connect to. (Elasticians: you do! Check with your team about where to find credentials)
You'll need to create a kibana.dev.yml (<>) and add the following to it:
@@ -75,7 +75,7 @@ kibana.index: '.{YourGitHubHandle}-kibana'
xpack.task_manager.index: '.{YourGitHubHandle}-task-manager-kibana'
----
-===== Running remote clusters
+==== Running remote clusters
Setup remote clusters for cross cluster search (CCS) and cross cluster replication (CCR).
@@ -95,7 +95,7 @@ yarn es snapshot -E transport.port=9500 -E http.port=9201 -E path.data=../data_p
Once both clusters are running, start {kib}. {kib} will connect to the primary cluster.
-Setup the remote cluster in {kib} from either Management -> Elasticsearch -> Remote Clusters UI or by running the following script in Console.
+Setup the remote cluster in {kib} from either Management -> {es} -> Remote Clusters UI or by running the following script in Console.
[source,bash]
----
diff --git a/docs/developer/architecture/add-data-tutorials.asciidoc b/docs/developer/architecture/add-data-tutorials.asciidoc
index e16b1bc039a10..3891b87a00e64 100644
--- a/docs/developer/architecture/add-data-tutorials.asciidoc
+++ b/docs/developer/architecture/add-data-tutorials.asciidoc
@@ -1,16 +1,16 @@
[[add-data-tutorials]]
-=== Add data tutorials
+== Add data tutorials
`Add Data` in the {kib} Home application contains tutorials for setting up data flows in the Elastic stack.
Each tutorial contains three sets of instructions:
-* `On Premise.` Set up a data flow when both {kib} and Elasticsearch are running on premise.
-* `On Premise Elastic Cloud.` Set up a data flow when {kib} is running on premise and Elasticsearch is running on Elastic Cloud.
-* `Elastic Cloud.` Set up a data flow when both {kib} and Elasticsearch are running on Elastic Cloud.
+* `On Premise.` Set up a data flow when both {kib} and {es} are running on premise.
+* `On Premise Elastic Cloud.` Set up a data flow when {kib} is running on premise and {es} is running on Elastic Cloud.
+* `Elastic Cloud.` Set up a data flow when both {kib} and {es} are running on Elastic Cloud.
-[float]
-==== Creating a new tutorial
+[discrete]
+=== Creating a new tutorial
1. Create a new directory in the link:https://github.com/elastic/kibana/tree/master/src/plugins/home/server/tutorials[tutorials directory].
2. In the new directory, create a file called `index.ts` that exports a function.
The function must return a function object that conforms to the `TutorialSchema` interface link:{kib-repo}tree/{branch}/src/plugins/home/server/services/tutorials/lib/tutorial_schema.ts[tutorial schema].
@@ -23,15 +23,15 @@ The function must return a function object that conforms to the `TutorialSchema`
If you are creating a new plugin and the tutorial is only related to that plugin, you can also place the `TutorialSchema` object into your plugin folder. Add `home` to the `requiredPlugins` list in your `kibana.json` file.
Then register the tutorial object by calling `home.tutorials.registerTutorial(tutorialObject)` in the `setup` lifecycle of your server plugin.
-[float]
-===== Variables
+[discrete]
+==== Variables
String values can contain variables that are substituted when rendered. Variables are specified by `{}`.
For example: `{config.docs.version}` is rendered as `6.2` when running the tutorial in {kib} 6.2.
link:{kib-repo}tree/{branch}/src/legacy/core_plugins/kibana/public/home/np_ready/components/tutorial/replace_template_strings.js#L23[Provided variables]
-[float]
-===== Markdown
+[discrete]
+==== Markdown
String values can contain limited Markdown syntax.
link:{kib-repo}tree/{branch}/src/legacy/core_plugins/kibana/public/home/components/tutorial/content.js#L8[Enabled Markdown grammars]
diff --git a/docs/developer/architecture/development-visualize-index.asciidoc b/docs/developer/architecture/development-visualize-index.asciidoc
index 551c41833fb72..d41ee32c1fb27 100644
--- a/docs/developer/architecture/development-visualize-index.asciidoc
+++ b/docs/developer/architecture/development-visualize-index.asciidoc
@@ -1,5 +1,5 @@
[[development-visualize-index]]
-=== Developing Visualizations
+== Developing Visualizations
[IMPORTANT]
==============================================
diff --git a/docs/developer/architecture/index.asciidoc b/docs/developer/architecture/index.asciidoc
index d726a8bd3642d..b7ca1cb352b0b 100644
--- a/docs/developer/architecture/index.asciidoc
+++ b/docs/developer/architecture/index.asciidoc
@@ -18,8 +18,8 @@ A few notable services are called out below.
* <>
* <>
-include::add-data-tutorials.asciidoc[]
+include::add-data-tutorials.asciidoc[leveloffset=+1]
-include::development-visualize-index.asciidoc[]
+include::development-visualize-index.asciidoc[leveloffset=+1]
-include::security/index.asciidoc[]
+include::security/index.asciidoc[leveloffset=+1]
diff --git a/docs/developer/architecture/security/feature-registration.asciidoc b/docs/developer/architecture/security/feature-registration.asciidoc
index 164f6d1cf9c74..3724624dbb917 100644
--- a/docs/developer/architecture/security/feature-registration.asciidoc
+++ b/docs/developer/architecture/security/feature-registration.asciidoc
@@ -1,13 +1,13 @@
[[development-plugin-feature-registration]]
-==== Plugin feature registration
+== Plugin feature registration
If your plugin will be used with {kib}'s default distribution, then you have the ability to register the features that your plugin provides. Features are typically apps in {kib}; once registered, you can toggle them via Spaces, and secure them via Roles when security is enabled.
-===== UI Capabilities
+=== UI Capabilities
Registering features also gives your plugin access to “UI Capabilities”. These capabilities are boolean flags that you can use to conditionally render your interface, based on the current user's permissions. For example, you can hide or disable a Save button if the current user is not authorized.
-===== Registering a feature
+=== Registering a feature
Feature registration is controlled via the built-in `xpack_main` plugin. To register a feature, call `xpack_main`'s `registerFeature` function from your plugin's `init` function, and provide the appropriate details:
@@ -21,7 +21,7 @@ init(server) {
}
-----------
-===== Feature details
+=== Feature details
Registering a feature consists of the following fields. For more information, consult the {kib-repo}blob/{branch}/x-pack/plugins/features/server/feature_registry.ts[feature registry interface].
@@ -65,12 +65,12 @@ Registering a feature consists of the following fields. For more information, co
|The ID of the navigation link associated with your feature.
|===
-====== Privilege definition
+==== Privilege definition
The `privileges` section of feature registration allows plugins to implement read/write and read-only modes for their applications.
For a full explanation of fields and options, consult the {kib-repo}blob/{branch}/x-pack/plugins/features/server/feature_registry.ts[feature registry interface].
-===== Using UI Capabilities
+=== Using UI Capabilities
UI Capabilities are available to your public (client) plugin code. These capabilities are read-only, and are used to inform the UI. This object is namespaced by feature id. For example, if your feature id is “foo”, then your UI Capabilities are stored at `uiCapabilities.foo`.
To access capabilities, import them from `ui/capabilities`:
@@ -86,7 +86,7 @@ if (canUserSave) {
-----------
[[example-1-canvas]]
-===== Example 1: Canvas Application
+=== Example 1: Canvas Application
["source","javascript"]
-----------
init(server) {
@@ -141,7 +141,7 @@ if (canUserSave) {
Because the `read` privilege does not define the `save` capability, users with read-only access will have their `uiCapabilities.canvas.save` flag set to `false`.
[[example-2-dev-tools]]
-===== Example 2: Dev Tools
+=== Example 2: Dev Tools
["source","javascript"]
-----------
@@ -176,7 +176,7 @@ init(server) {
},
privilegesTooltip: i18n.translate('xpack.features.devToolsPrivilegesTooltip', {
defaultMessage:
- 'User should also be granted the appropriate Elasticsearch cluster and index privileges',
+ 'User should also be granted the appropriate {es} cluster and index privileges',
}),
});
}
@@ -199,7 +199,7 @@ server.route({
-----------
[[example-3-discover]]
-===== Example 3: Discover
+=== Example 3: Discover
Discover takes advantage of subfeature privileges to allow fine-grained access control. In this example,
a single "Create Short URLs" subfeature privilege is defined, which allows users to grant access to this feature without having to grant the `all` privilege to Discover. In other words, you can grant `read` access to Discover, and also grant the ability to create short URLs.
diff --git a/docs/developer/architecture/security/index.asciidoc b/docs/developer/architecture/security/index.asciidoc
index 55b2450caf7a7..09739142c8f79 100644
--- a/docs/developer/architecture/security/index.asciidoc
+++ b/docs/developer/architecture/security/index.asciidoc
@@ -1,12 +1,14 @@
[[development-security]]
-=== Security
+== Security
-{kib} has generally been able to implement security transparently to core and plugin developers, and this largely remains the case. {kib} on two methods that the elasticsearch `Cluster` provides: `callWithRequest` and `callWithInternalUser`.
+{kib} has generally been able to implement security transparently to core and plugin developers, and this largely remains the case. {kib} on two methods that the {es} `Cluster` provides: `callWithRequest` and `callWithInternalUser`.
-`callWithRequest` executes requests against Elasticsearch using the authentication credentials of the {kib} end-user. So, if you log into {kib} with the user of `foo` when `callWithRequest` is used, {kib} execute the request against Elasticsearch as the user `foo`. Historically, `callWithRequest` has been used extensively to perform actions that are initiated at the request of {kib} end-users.
+`callWithRequest` executes requests against {es} using the authentication credentials of the {kib} end-user. So, if you log into {kib} with the user of `foo` when `callWithRequest` is used, {kib} execute the request against {es} as the user `foo`. Historically, `callWithRequest` has been used extensively to perform actions that are initiated at the request of {kib} end-users.
-`callWithInternalUser` executes requests against Elasticsearch using the internal {kib} server user, and has historically been used for performing actions that aren't initiated by {kib} end users; for example, creating the initial `.kibana` index or performing health checks against Elasticsearch.
+`callWithInternalUser` executes requests against {es} using the internal {kib} server user, and has historically been used for performing actions that aren't initiated by {kib} end users; for example, creating the initial `.kibana` index or performing health checks against {es}.
-However, with the changes that role-based access control (RBAC) introduces, this is no longer cut and dry. {kib} now requires all access to the `.kibana` index goes through the `SavedObjectsClient`. This used to be a best practice, as the `SavedObjectsClient` was responsible for translating the documents stored in Elasticsearch to and from Saved Objects, but RBAC is now taking advantage of this abstraction to implement access control and determine when to use `callWithRequest` versus `callWithInternalUser`.
+However, with the changes that role-based access control (RBAC) introduces, this is no longer cut and dry. {kib} now requires all access to the `.kibana` index goes through the `SavedObjectsClient`. This used to be a best practice, as the `SavedObjectsClient` was responsible for translating the documents stored in {es} to and from Saved Objects, but RBAC is now taking advantage of this abstraction to implement access control and determine when to use `callWithRequest` versus `callWithInternalUser`.
-include::rbac.asciidoc[]
+include::rbac.asciidoc[leveloffset=+1]
+
+include::feature-registration.asciidoc[leveloffset=+1]
diff --git a/docs/developer/architecture/security/rbac.asciidoc b/docs/developer/architecture/security/rbac.asciidoc
index ae1979e856e23..7b35a91ca73d0 100644
--- a/docs/developer/architecture/security/rbac.asciidoc
+++ b/docs/developer/architecture/security/rbac.asciidoc
@@ -1,9 +1,9 @@
[[development-security-rbac]]
-==== Role-based access control
+== Role-based access control
Role-based access control (RBAC) in {kib} relies upon the
{ref}/security-privileges.html#application-privileges[application privileges]
-that Elasticsearch exposes. This allows {kib} to define the privileges that
+that {es} exposes. This allows {kib} to define the privileges that
{kib} wishes to grant to users, assign them to the relevant users using roles,
and then authorize the user to perform a specific action. This is handled within
a secured instance of the `SavedObjectsClient` and available transparently to
@@ -11,7 +11,7 @@ consumers when using `request.getSavedObjectsClient()` or
`savedObjects.getScopedSavedObjectsClient()`.
[[development-rbac-privileges]]
-===== {kib} Privileges
+=== {kib} Privileges
When {kib} first starts up, it executes the following `POST` request against {es}. This synchronizes the definition of the privileges with various `actions` which are later used to authorize a user:
@@ -56,7 +56,7 @@ The application is created by concatenating the prefix of `kibana-` with the val
==============================================
[[development-rbac-assigning-privileges]]
-===== Assigning {kib} Privileges
+=== Assigning {kib} Privileges
{kib} privileges are assigned to specific roles using the `applications` element. For example, the following role assigns the <> privilege at `*` `resources` (which will in the future be used to secure spaces) to the default {kib} `application`:
@@ -81,7 +81,7 @@ Roles that grant <> should be managed using the <>
* <>
-include::stability.asciidoc[]
+include::stability.asciidoc[leveloffset=+1]
-include::security.asciidoc[]
+include::security.asciidoc[leveloffset=+1]
diff --git a/docs/developer/best-practices/security.asciidoc b/docs/developer/best-practices/security.asciidoc
index 26fcc73ce2b90..79ecb08295064 100644
--- a/docs/developer/best-practices/security.asciidoc
+++ b/docs/developer/best-practices/security.asciidoc
@@ -1,5 +1,5 @@
[[security-best-practices]]
-=== Security best practices
+== Security best practices
* XSS
** Check for usages of `dangerouslySetInnerHtml`, `Element.innerHTML`,
@@ -44,7 +44,7 @@ sensitive information which end up in the HTTP Response
** Ensure no sensitive cookies are forwarded to external resources.
** Ensure that all user controllable variables that are used in
constructing a URL are escaped properly. This is relevant when using
-`transport.request` with the Elasticsearch client as no automatic
+`transport.request` with the {es} client as no automatic
escaping is performed.
* Reverse tabnabbing -
https://github.com/OWASP/CheatSheetSeries/blob/master/cheatsheets/HTML5_Security_Cheat_Sheet.md#tabnabbing
diff --git a/docs/developer/best-practices/stability.asciidoc b/docs/developer/best-practices/stability.asciidoc
index 68237a034be52..f4b7ae1229909 100644
--- a/docs/developer/best-practices/stability.asciidoc
+++ b/docs/developer/best-practices/stability.asciidoc
@@ -1,10 +1,10 @@
[[stability]]
-=== Stability
+== Stability
Ensure your feature will work under all possible {kib} scenarios.
-[float]
-==== Environmental configuration scenarios
+[discrete]
+=== Environmental configuration scenarios
* Cloud
** Does the feature work on *cloud environment*?
@@ -32,16 +32,16 @@ non-standard {kib} indices. (create their own custom roles)
sessions. (we have had many discuss/SDH tickets around this)
* If a proxy/loadbalancer is running between ES and {kib}
-[float]
-==== Kibana.yml settings
+[discrete]
+=== Kibana.yml settings
* Using a custom {kib} index alias
* When optional dependencies are disabled
** Ensure all your required dependencies are listed in kibana.json
dependency list!
-[float]
-==== Test coverage
+[discrete]
+=== Test coverage
* Does the feature have sufficient unit test coverage? (does it handle
storeinSessions?)
@@ -49,16 +49,16 @@ storeinSessions?)
* Does the feature have sufficient Rest API coverage test coverage?
* Does the feature have sufficient Integration test coverage?
-[float]
-==== Browser coverage
+[discrete]
+=== Browser coverage
Refer to the list of browsers and OS {kib} supports
https://www.elastic.co/support/matrix
Does the feature work efficiently on the list of supported browsers?
-[float]
-==== Upgrade Scenarios - Migration scenarios-
+[discrete]
+=== Upgrade Scenarios - Migration scenarios-
Does the feature affect old
indices, saved objects ? - Has the feature been tested with {kib}
diff --git a/docs/developer/contributing/development-accessibility-tests.asciidoc b/docs/developer/contributing/development-accessibility-tests.asciidoc
index a3ffefb94cd2a..facf7ff14a6c1 100644
--- a/docs/developer/contributing/development-accessibility-tests.asciidoc
+++ b/docs/developer/contributing/development-accessibility-tests.asciidoc
@@ -1,5 +1,5 @@
[[development-accessibility-tests]]
-==== Automated Accessibility Testing
+== Automated Accessibility Testing
To run the tests locally:
diff --git a/docs/developer/contributing/development-documentation.asciidoc b/docs/developer/contributing/development-documentation.asciidoc
index d9fae42eef87e..99e55963f57af 100644
--- a/docs/developer/contributing/development-documentation.asciidoc
+++ b/docs/developer/contributing/development-documentation.asciidoc
@@ -1,18 +1,18 @@
[[development-documentation]]
-=== Documentation during development
+== Documentation during development
Docs should be written during development and accompany PRs when relevant. There are multiple types of documentation, and different places to add each.
-[float]
-==== Developer services documentation
+[discrete]
+=== Developer services documentation
Documentation about specific services a plugin offers should be encapsulated in:
* README.asciidoc at the base of the plugin folder.
* Typescript comments for all public services.
-[float]
-==== End user documentation
+[discrete]
+=== End user documentation
Documentation about user facing features should be written in http://asciidoc.org/[asciidoc] at
{kib-repo}/tree/master/docs[https://github.com/elastic/kibana/tree/master/docs]
@@ -27,8 +27,8 @@ README for getting the docs tooling set up.
node scripts/docs.js --open
```
-[float]
-==== General developer documentation and guidelines
+[discrete]
+=== General developer documentation and guidelines
General developer guildlines and documentation, like this right here, should be written in http://asciidoc.org/[asciidoc]
at {kib-repo}/tree/master/docs/developer[https://github.com/elastic/kibana/tree/master/docs/developer]
diff --git a/docs/developer/contributing/development-functional-tests.asciidoc b/docs/developer/contributing/development-functional-tests.asciidoc
index 442fc1ac755d3..580a5a000f391 100644
--- a/docs/developer/contributing/development-functional-tests.asciidoc
+++ b/docs/developer/contributing/development-functional-tests.asciidoc
@@ -1,10 +1,10 @@
[[development-functional-tests]]
-=== Functional Testing
+== Functional Testing
We use functional tests to make sure the {kib} UI works as expected. It replaces hours of manual testing by automating user interaction. To have better control over our functional test environment, and to make it more accessible to plugin authors, {kib} uses a tool called the `FunctionalTestRunner`.
-[float]
-==== Running functional tests
+[discrete]
+=== Running functional tests
The `FunctionalTestRunner` is very bare bones and gets most of its functionality from its config file, located at {blob}test/functional/config.js[test/functional/config.js]. If you’re writing a plugin outside the {kib} repo, you will have your own config file.
See <> for more info.
@@ -12,27 +12,27 @@ The `FunctionalTestRunner` is very bare bones and gets most of its functionality
There are three ways to run the tests depending on your goals:
1. Easiest option:
-** Description: Starts up {kib} & Elasticsearch servers, followed by running tests. This is much slower when running the tests multiple times because slow startup time for the servers. Recommended for single-runs.
+** Description: Starts up {kib} & {es} servers, followed by running tests. This is much slower when running the tests multiple times because slow startup time for the servers. Recommended for single-runs.
** `node scripts/functional_tests`
-*** does everything in a single command, including running Elasticsearch and {kib} locally
+*** does everything in a single command, including running {es} and {kib} locally
*** tears down everything after the tests run
*** exit code reports success/failure of the tests
2. Best for development:
-** Description: Two commands, run in separate terminals, separate the components that are long-running and slow from those that are ephemeral and fast. Tests can be re-run much faster, and this still runs Elasticsearch & {kib} locally.
+** Description: Two commands, run in separate terminals, separate the components that are long-running and slow from those that are ephemeral and fast. Tests can be re-run much faster, and this still runs {es} & {kib} locally.
** `node scripts/functional_tests_server`
-*** starts Elasticsearch and {kib} servers
+*** starts {es} and {kib} servers
*** slow to start
*** can be reused for multiple executions of the tests, thereby saving some time when re-running tests
*** automatically restarts the {kib} server when relevant changes are detected
** `node scripts/functional_test_runner`
-*** runs the tests against {kib} & Elasticsearch servers that were started by `node scripts/functional_tests_server`
+*** runs the tests against {kib} & {es} servers that were started by `node scripts/functional_tests_server`
*** exit code reports success or failure of the tests
3. Custom option:
-** Description: Runs tests against instances of Elasticsearch & {kib} started some other way (like Elastic Cloud, or an instance you are managing in some other way).
+** Description: Runs tests against instances of {es} & {kib} started some other way (like Elastic Cloud, or an instance you are managing in some other way).
** just executes the functional tests
-** url, credentials, etc. for Elasticsearch and {kib} are specified via environment variables
+** url, credentials, etc. for {es} and {kib} are specified via environment variables
** Here's an example that runs against an Elastic Cloud instance. Note that you must run the same branch of tests as the version of {kib} you're testing.
+
["source","shell"]
@@ -91,15 +91,15 @@ export TEST_THROTTLE_NETWORK=1
node scripts/functional_test_runner --exclude-tag skipCloud
----------
-[float]
-===== More about `node scripts/functional_test_runner`
+[discrete]
+==== More about `node scripts/functional_test_runner`
When run without any arguments the `FunctionalTestRunner` automatically loads the configuration in the standard location, but you can override that behavior with the `--config` flag. List configs with multiple --config arguments.
-* `--config test/functional/config.js` starts Elasticsearch and {kib} servers with the WebDriver tests configured to run in Chrome.
-* `--config test/functional/config.firefox.js` starts Elasticsearch and {kib} servers with the WebDriver tests configured to run in Firefox.
-* `--config test/api_integration/config.js` starts Elasticsearch and {kib} servers with the api integration tests configuration.
-* `--config test/accessibility/config.ts` starts Elasticsearch and {kib} servers with the WebDriver tests configured to run an accessibility audit using https://www.deque.com/axe/[axe].
+* `--config test/functional/config.js` starts {es} and {kib} servers with the WebDriver tests configured to run in Chrome.
+* `--config test/functional/config.firefox.js` starts {es} and {kib} servers with the WebDriver tests configured to run in Firefox.
+* `--config test/api_integration/config.js` starts {es} and {kib} servers with the api integration tests configuration.
+* `--config test/accessibility/config.ts` starts {es} and {kib} servers with the WebDriver tests configured to run an accessibility audit using https://www.deque.com/axe/[axe].
There are also command line flags for `--bail` and `--grep`, which behave just like their mocha counterparts. For instance, use `--grep=foo` to run only tests that match a regular expression.
@@ -108,11 +108,11 @@ Logging can also be customized with `--quiet`, `--debug`, or `--verbose` flags.
Use the `--help` flag for more options.
-[float]
-==== Writing functional tests
+[discrete]
+=== Writing functional tests
-[float]
-===== Environment
+[discrete]
+==== Environment
The tests are written in https://mochajs.org[mocha] using https://github.com/elastic/kibana/tree/master/packages/kbn-expect[@kbn/expect] for assertions.
@@ -120,8 +120,8 @@ We use https://www.w3.org/TR/webdriver1/[WebDriver Protocol] to run tests in bot
The `FunctionalTestRunner` automatically transpiles functional tests using babel, so that tests can use the same ECMAScript features that {kib} source code uses. See {blob}style_guides/js_style_guide.md[style_guides/js_style_guide.md].
-[float]
-===== Definitions
+[discrete]
+==== Definitions
**Provider:**
@@ -179,8 +179,8 @@ To run tests on Firefox locally, use `config.firefox.js`:
node scripts/functional_test_runner --config test/functional/config.firefox.js
-----------
-[float]
-===== Using the test_user service
+[discrete]
+==== Using the test_user service
Tests should run at the positive security boundry condition, meaning that they should be run with the mimimum privileges required (and documented) and not as the superuser.
This prevents the type of regression where additional privleges accidentally become required to perform the same action.
@@ -198,8 +198,8 @@ Here we are setting the `test_user` to have the `kibana_user` role and also role
Tests should normally setRoles() in the before() and restoreDefaults() in the after().
-[float]
-===== Anatomy of a test file
+[discrete]
+==== Anatomy of a test file
This annotated example file shows the basic structure every test suite uses. It starts by importing https://github.com/elastic/kibana/tree/master/packages/kbn-expect[`@kbn/expect`] and defining its default export: an anonymous Test Provider. The test provider then destructures the Provider API for the `getService()` and `getPageObjects()` functions. It uses these functions to collect the dependencies of this suite. The rest of the test file will look pretty normal to mocha.js users. `describe()`, `it()`, `before()` and the lot are used to define suites that happen to automate a browser via services and objects of type `PageObject`.
@@ -222,7 +222,7 @@ export default function ({ getService, getPageObject }) {
describe('My Test Suite', () => {
// most suites start with a before hook that navigates to a specific
- // app/page and restores some archives into elasticsearch with esArchiver
+ // app/page and restores some archives into {es} with esArchiver
before(async () => {
await Promise.all([
// start with an empty .kibana index
@@ -235,7 +235,7 @@ export default function ({ getService, getPageObject }) {
});
// right after the before() hook definition, add the teardown steps
- // that will tidy up elasticsearch for other test suites
+ // that will tidy up {es} for other test suites
after(async () => {
// we unload the empty_kibana archive but not the makelogs
// archive because we don't make any changes to it, and subsequent
@@ -257,9 +257,9 @@ export default function ({ getService, getPageObject }) {
}
----
-[float]
+[discrete]
[[functional_test_runner_provider_api]]
-==== Provider API
+=== Provider API
The first and only argument to all providers is a Provider API Object. This object can be used to load service/page objects and config/test files.
@@ -280,11 +280,11 @@ Within a test Provider the API is exactly the same as the service providers API
[horizontal]
`loadTestFile(path)`::: Load the test file at path in place. Use this method to nest suites from other files into a higher-level suite
-[float]
-==== Service Index
+[discrete]
+=== Service Index
-[float]
-===== Built-in Services
+[discrete]
+==== Built-in Services
The `FunctionalTestRunner` comes with three built-in services:
@@ -304,8 +304,8 @@ The `FunctionalTestRunner` comes with three built-in services:
* Exposes lifecycle events for basic coordination. Handlers can return a promise and resolve/fail asynchronously
* Phases include: `beforeLoadTests`, `beforeTests`, `beforeEachTest`, `cleanup`
-[float]
-===== {kib} Services
+[discrete]
+==== {kib} Services
The {kib} functional tests define the vast majority of the actual functionality used by tests.
@@ -377,7 +377,7 @@ Full list of services that are used in functional tests can be found here: {blob
**Low-level utilities:**:::
* es
** Source: {blob}test/common/services/es.ts[test/common/services/es.ts]
-** Elasticsearch client
+** {es} client
** Higher level options: `kibanaServer.uiSettings` or `esArchiver`
* remote
** Source: {blob}test/functional/services/remote/remote.ts[test/functional/services/remote/remote.ts]
@@ -387,8 +387,8 @@ Full list of services that are used in functional tests can be found here: {blob
** For searching and manipulating with DOM elements, use `testSubjects` and `find` services
** See the https://seleniumhq.github.io/selenium/docs/api/javascript/[selenium-webdriver docs] for the full API.
-[float]
-===== Custom Services
+[discrete]
+==== Custom Services
Services are intentionally generic. They can be literally anything (even nothing). Some services have helpers for interacting with a specific types of UI elements, like `pointSeriesVis`, and others are more foundational, like `log` or `config`. Whenever you want to provide some functionality in a reusable package, consider making a custom service.
@@ -427,8 +427,8 @@ export default function () {
}
-----------
-[float]
-==== PageObjects
+[discrete]
+=== PageObjects
The purpose for each PageObject is pretty self-explanatory. The visualize PageObject provides helpers for interacting with the visualize app, dashboard is the same for the dashboard app, and so on.
@@ -436,13 +436,13 @@ One exception is the "common" PageObject. A holdover from the intern implementat
Please add new methods to existing or new services rather than further expanding the CommonPage class.
-[float]
-==== Gotchas
+[discrete]
+=== Gotchas
Remember that you can’t run an individual test in the file (`it` block) because the whole `describe` needs to be run in order. There should only be one top level `describe` in a file.
-[float]
-===== Functional Test Timing
+[discrete]
+==== Functional Test Timing
Another important gotcha is writing stable tests by being mindful of timing. All methods on `remote` run asynchronously. It’s better to write interactions that wait for changes on the UI to appear before moving onto the next step.
@@ -480,8 +480,8 @@ class AppPage {
Writing in this way will ensure your test timings are not flaky or based on assumptions about UI updates after interactions.
-[float]
-==== Debugging
+[discrete]
+=== Debugging
From the command line run:
@@ -503,8 +503,8 @@ const log = getService(‘log’);
log.debug(‘done clicking menu’);
-----------
-[float]
-==== MacOS testing performance tip
+[discrete]
+=== MacOS testing performance tip
macOS users on a machine with a discrete graphics card may see significant speedups (up to 2x) when running tests by changing your terminal emulator's GPU settings. In iTerm2:
* Open Preferences (Command + ,)
diff --git a/docs/developer/contributing/development-github.asciidoc b/docs/developer/contributing/development-github.asciidoc
index 027b4e73aa9de..a6d4e29940487 100644
--- a/docs/developer/contributing/development-github.asciidoc
+++ b/docs/developer/contributing/development-github.asciidoc
@@ -1,16 +1,16 @@
[[development-github]]
-=== How we use git and github
+== How we use git and github
-[float]
-==== Forking
+[discrete]
+=== Forking
We follow the https://help.github.com/articles/fork-a-repo/[GitHub
forking model] for collaborating on {kib} code. This model assumes that
you have a remote called `upstream` which points to the official {kib}
repo, which we'll refer to in later code snippets.
-[float]
-==== Branching
+[discrete]
+=== Branching
* All work on the next major release goes into master.
* Past major release branches are named `{majorVersion}.x`. They contain
@@ -24,8 +24,8 @@ if the next patch release is `5.3.1`, work for it should go into the
branches.
* Where appropriate, we'll backport changes into older release branches.
-[float]
-==== Commits and Merging
+[discrete]
+=== Commits and Merging
* Feel free to make as many commits as you want, while working on a
branch.
@@ -38,8 +38,8 @@ explanation of _why_ you made the changes that you did.
feature branch, and force-pushing (see below for instructions).
* When merging, we'll squash your commits into a single commit.
-[float]
-===== Rebasing and fixing merge conflicts
+[discrete]
+==== Rebasing and fixing merge conflicts
Rebasing can be tricky, and fixing merge conflicts can be even trickier
because it involves force pushing. This is all compounded by the fact
@@ -106,7 +106,7 @@ hint: See the 'Note about fast-forwards' in 'git push --help' for details.
Assuming you've successfully rebased and you're happy with the code, you should force push instead.
-[float]
-==== Creating a pull request
+[discrete]
+=== Creating a pull request
See <> for the next steps on getting your code changes merged into {kib}.
\ No newline at end of file
diff --git a/docs/developer/contributing/development-pull-request.asciidoc b/docs/developer/contributing/development-pull-request.asciidoc
index 5d3c30fec7383..070eff449af5b 100644
--- a/docs/developer/contributing/development-pull-request.asciidoc
+++ b/docs/developer/contributing/development-pull-request.asciidoc
@@ -1,16 +1,16 @@
[[development-pull-request]]
-=== Submitting a pull request
+== Submitting a pull request
-[float]
-==== What Goes Into a Pull Request
+[discrete]
+=== What Goes Into a Pull Request
* Please include an explanation of your changes in your PR description.
* Links to relevant issues, external resources, or related PRs are very important and useful.
* Please update any tests that pertain to your code, and add new tests where appropriate.
* Update or add docs when appropriate. Read more about <>.
-[float]
-==== Submitting a Pull Request
+[discrete]
+=== Submitting a Pull Request
1. Push your local changes to your forked copy of the repository and submit a pull request.
2. Describe what your changes do and mention the number of the issue where discussion has taken place, e.g., “Closes #123″.
@@ -22,8 +22,8 @@ Always submit your pull against master unless the bug is only present in an olde
Then sit back and wait. There will probably be discussion about the Pull Request and, if any changes are needed, we'll work with you to get your Pull Request merged into {kib}.
-[float]
-==== What to expect during the pull request review process
+[discrete]
+=== What to expect during the pull request review process
Most PRs go through several iterations of feedback and updates. Depending on the scope and complexity of the PR, the process can take weeks. Please
be patient and understand we hold our code base to a high standard.
diff --git a/docs/developer/contributing/development-tests.asciidoc b/docs/developer/contributing/development-tests.asciidoc
index b470ea61669b2..78a2a90b69ce5 100644
--- a/docs/developer/contributing/development-tests.asciidoc
+++ b/docs/developer/contributing/development-tests.asciidoc
@@ -1,10 +1,10 @@
[[development-tests]]
-=== Testing
+== Testing
To ensure that your changes will not break other functionality, please run the test suite and build (<>) before submitting your Pull Request.
-[float]
-==== Running specific {kib} tests
+[discrete]
+=== Running specific {kib} tests
The following table outlines possible test file locations and how to
invoke them:
@@ -47,8 +47,8 @@ Examples: - Run the entire elasticsearch_service test suite:
string: ``` yarn test:ftr:server –config test/api_integration/config.js
yarn test:ftr:runner –config test/api_integration/config
-[float]
-==== Cross-browser compatibility
+[discrete]
+=== Cross-browser compatibility
**Testing IE on OS X**
@@ -71,8 +71,8 @@ your computer name).
`http://computer.local:5601` to test {kib}.
* Alternatively you can use browserstack
-[float]
-==== Running browser automation tests
+[discrete]
+=== Running browser automation tests
Check out <> to learn more about how you can run
and develop functional tests for {kib} core and plugins.
@@ -80,17 +80,17 @@ and develop functional tests for {kib} core and plugins.
You can also look into the {kib-repo}tree/{branch}/scripts/README.md[Scripts README.md]
to learn more about using the node scripts we provide for building
{kib}, running integration tests, and starting up {kib} and
-Elasticsearch while you develop.
+{es} while you develop.
-[float]
+[discrete]
==== More testing information:
* <>
* <>
* <>
-include::development-functional-tests.asciidoc[]
+include::development-functional-tests.asciidoc[leveloffset=+1]
-include::development-unit-tests.asciidoc[]
+include::development-unit-tests.asciidoc[leveloffset=+1]
-include::development-accessibility-tests.asciidoc[]
\ No newline at end of file
+include::development-accessibility-tests.asciidoc[leveloffset=+1]
\ No newline at end of file
diff --git a/docs/developer/contributing/development-unit-tests.asciidoc b/docs/developer/contributing/development-unit-tests.asciidoc
index 0009533c9a7c4..8b4954150bb5b 100644
--- a/docs/developer/contributing/development-unit-tests.asciidoc
+++ b/docs/developer/contributing/development-unit-tests.asciidoc
@@ -1,11 +1,11 @@
[[development-unit-tests]]
-==== Unit testing frameworks
+== Unit testing frameworks
{kib} is migrating unit testing from `Mocha` to `Jest`. Legacy unit tests
still exist in Mocha but all new unit tests should be written in Jest.
-[float]
-===== Mocha (legacy)
+[discrete]
+=== Mocha (legacy)
Mocha tests are contained in `__tests__` directories.
@@ -16,8 +16,8 @@ Mocha tests are contained in `__tests__` directories.
yarn test:mocha
-----------
-[float]
-==== Jest
+[discrete]
+== Jest
Jest tests are stored in the same directory as source code files with the `.test.{js,mjs,ts,tsx}` suffix.
*Running Jest Unit Tests*
@@ -27,8 +27,8 @@ Jest tests are stored in the same directory as source code files with the `.test
yarn test:jest
-----------
-[float]
-====== Writing Jest Unit Tests
+[discrete]
+==== Writing Jest Unit Tests
In order to write those tests there are two main things you need to be aware of.
The first one is the different between `jest.mock` and `jest.doMock`
@@ -37,8 +37,8 @@ test files with `babel-jest` both techniques are needed
specially for the tests implemented on Typescript in order to benefit from the
auto-inference types feature.
-[float]
-====== Jest.mock vs Jest.doMock
+[discrete]
+==== Jest.mock vs Jest.doMock
Both methods are essentially the same on their roots however the `jest.mock`
calls will get hoisted to the top of the file and can only reference variables
@@ -47,8 +47,8 @@ reference pretty much any variable we want, however we have to assure those refe
variables are instantiated at the time we need them which lead us to the next
section where we'll talk about our jest mock files pattern.
-[float]
-====== Jest Mock Files Pattern
+[discrete]
+==== Jest Mock Files Pattern
Specially on typescript it is pretty common to have in unit tests
`jest.doMock` calls which reference for example imported types. Any error
@@ -76,9 +76,9 @@ like: `import * as Mocks from './mymodule.test.mocks'`,
or just `import './mymodule.test.mocks'` if there isn't anything
exported to be used.
-[float]
+[discrete]
[[debugging-unit-tests]]
-===== Debugging Unit Tests
+=== Debugging Unit Tests
The standard `yarn test` task runs several sub tasks and can take
several minutes to complete, making debugging failures pretty painful.
@@ -127,8 +127,8 @@ description.
image:http://i.imgur.com/DwHxgfq.png[Browser test debugging]
-[float]
-===== Unit Testing Plugins
+[discrete]
+=== Unit Testing Plugins
This should work super if you’re using the
https://github.com/elastic/kibana/tree/master/packages/kbn-plugin-generator[Kibana
diff --git a/docs/developer/contributing/index.asciidoc b/docs/developer/contributing/index.asciidoc
index 4f987f31cf1f6..99ab83bc2f073 100644
--- a/docs/developer/contributing/index.asciidoc
+++ b/docs/developer/contributing/index.asciidoc
@@ -23,7 +23,7 @@ Read <> to get your environment up and running, the
Please make sure you have signed the [Contributor License Agreement](http://www.elastic.co/contributor-agreement/). We are not asking you to assign copyright to us, but to give us the right to distribute your code without restriction. We ask this of all contributors in order to assure our users of the origin and continuing existence of the code. You only need to sign the CLA once.
-[float]
+[discrete]
[[kibana-localization]]
=== Localization
@@ -32,7 +32,7 @@ Read <> for details on our localization prac
Note that we cannot support accepting contributions to the translations from any source other than the translators we have engaged to do the work.
We are still to develop a proper process to accept any contributed translations. We certainly appreciate that people care enough about the localization effort to want to help improve the quality. We aim to build out a more comprehensive localization process for the future and will notify you once contributions can be supported, but for the time being, we are not able to incorporate suggestions.
-[float]
+[discrete]
[[kibana-release-notes-process]]
=== Release Notes Process
@@ -43,7 +43,7 @@ access to GitHub labels.
The Release Notes summarize what the PRs accomplish in language that is meaningful to users.
To generate the Release Notes, the team runs a script against this repo to collect the merged PRs against the release.
-[float]
+[discrete]
==== Create the Release Notes text
The text that appears in the Release Notes is pulled directly from your PR title, or a single paragraph of text that you specify in the PR description.
@@ -59,7 +59,7 @@ When you create the Release Notes text, use the following best practices:
* When you create a bug fix PR, start with `Fixes`.
* When you create a deprecation PR, start with `Deprecates`.
-[float]
+[discrete]
==== Add your labels
[arabic]
@@ -72,18 +72,18 @@ When you create the Release Notes text, use the following best practices:
* To **NOT** include your changes in the Release Notes, use `release_note:skip`.
-include::development-github.asciidoc[]
+include::development-github.asciidoc[leveloffset=+1]
-include::development-tests.asciidoc[]
+include::development-tests.asciidoc[leveloffset=+1]
-include::interpreting-ci-failures.asciidoc[]
+include::interpreting-ci-failures.asciidoc[leveloffset=+1]
-include::development-documentation.asciidoc[]
+include::development-documentation.asciidoc[leveloffset=+1]
-include::development-pull-request.asciidoc[]
+include::development-pull-request.asciidoc[leveloffset=+1]
-include::kibana-issue-reporting.asciidoc[]
+include::kibana-issue-reporting.asciidoc[leveloffset=+1]
-include::pr-review.asciidoc[]
+include::pr-review.asciidoc[leveloffset=+1]
-include::linting.asciidoc[]
+include::linting.asciidoc[leveloffset=+1]
diff --git a/docs/developer/contributing/interpreting-ci-failures.asciidoc b/docs/developer/contributing/interpreting-ci-failures.asciidoc
index ba3999a310198..bb623bc7a541c 100644
--- a/docs/developer/contributing/interpreting-ci-failures.asciidoc
+++ b/docs/developer/contributing/interpreting-ci-failures.asciidoc
@@ -1,19 +1,19 @@
[[interpreting-ci-failures]]
-=== Interpreting CI Failures
+== Interpreting CI Failures
{kib} CI uses a Jenkins feature called "Pipelines" to automate testing of the code in pull requests and on tracked branches. Pipelines are defined within the repository via the `Jenkinsfile` at the root of the project.
More information about Jenkins Pipelines can be found link:https://jenkins.io/doc/book/pipeline/[in the Jenkins book].
-[float]
-==== Github Checks
+[discrete]
+=== Github Checks
When a test fails it will be reported to Github via Github Checks. We currently bucket tests into several categories which run in parallel to make CI faster. Groups like `ciGroup{X}` get a single check in Github, and other tests like linting, or type checks, get their own checks.
Clicking the link next to the check in the conversation tab of a pull request will take you to the log output from that section of the tests. If that log output is truncated, or doesn't clearly identify what happened, you can usually get more complete information by visiting Jenkins directly.
-[float]
-==== Viewing Job Executions in Jenkins
+[discrete]
+=== Viewing Job Executions in Jenkins
To view the results of a job execution in Jenkins, either click the link in the comment left by `@elasticmachine` or search for the `kibana-ci` check in the list at the bottom of the PR. This link will take you to the top-level page for the specific job execution that failed.
@@ -24,8 +24,8 @@ image::images/job_view.png[]
3. *Google Cloud Storage (GCS) Upload Report:* Link to the screen which lists out the artifacts uploaded to GCS during this job execution.
4. *Pipeline Steps:*: A breakdown of the pipline that was executed, along with individual log output for each step in the pipeline.
-[float]
-==== Viewing ciGroup/test Logs
+[discrete]
+=== Viewing ciGroup/test Logs
To view the logs for a failed specific ciGroup, jest, mocha, type checkers, linters, etc., click on the *Pipeline Steps* link in from the Job page.
diff --git a/docs/developer/contributing/kibana-issue-reporting.asciidoc b/docs/developer/contributing/kibana-issue-reporting.asciidoc
index 36c50b612d675..63366ae2aa6bb 100644
--- a/docs/developer/contributing/kibana-issue-reporting.asciidoc
+++ b/docs/developer/contributing/kibana-issue-reporting.asciidoc
@@ -1,8 +1,8 @@
[[kibana-issue-reporting]]
-=== Effective issue reporting in {kib}
+== Effective issue reporting in {kib}
-[float]
-==== Voicing the importance of an issue
+[discrete]
+=== Voicing the importance of an issue
We seriously appreciate thoughtful comments. If an issue is important to
you, add a comment with a solid write up of your use case and explain
@@ -17,8 +17,8 @@ https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments[
thumbs up reaction] on the issue itself and on the comment which best
summarizes your thoughts.
-[float]
-==== "`My issue isn’t getting enough attention`"
+[discrete]
+=== "`My issue isn’t getting enough attention`"
First of all, *sorry about that!* We want you to have a great time with
{kib}.
@@ -31,8 +31,8 @@ more pressing issues.
Feel free to bump your issues if you think they’ve been neglected for a
prolonged period.
-[float]
-==== "`I want to help!`"
+[discrete]
+=== "`I want to help!`"
*Now we’re talking*. If you have a bug fix or new feature that you would
like to contribute to {kib}, please *find or open an issue about it
diff --git a/docs/developer/contributing/linting.asciidoc b/docs/developer/contributing/linting.asciidoc
index 234bd90478907..0d05afa504538 100644
--- a/docs/developer/contributing/linting.asciidoc
+++ b/docs/developer/contributing/linting.asciidoc
@@ -1,5 +1,5 @@
[[kibana-linting]]
-=== Linting
+== Linting
A note about linting: We use http://eslint.org[eslint] to check that the
link:STYLEGUIDE.md[styleguide] is being followed. It runs in a
@@ -34,8 +34,8 @@ for your editor, and browse our
https://github.com/elastic/kibana/blob/master/.editorconfig[`.editorconfig`]
file to see what config rules we set up.
-[float]
-==== Setup Guide for VS Code Users
+[discrete]
+== Setup Guide for VS Code Users
Note that for VSCode, to enable "`live`" linting of TypeScript (and
other) file types, you will need to modify your local settings, as shown
diff --git a/docs/developer/contributing/pr-review.asciidoc b/docs/developer/contributing/pr-review.asciidoc
index ebab3b24aaaee..885725795b0b9 100644
--- a/docs/developer/contributing/pr-review.asciidoc
+++ b/docs/developer/contributing/pr-review.asciidoc
@@ -1,5 +1,5 @@
[[pr-review]]
-=== Pull request review guidelines
+== Pull request review guidelines
Every change made to {kib} must be held to a high standard, and while the responsibility for quality in a pull request ultimately lies with the author, {kib} team members have the responsibility as reviewers to verify during their review process.
@@ -10,24 +10,24 @@ It is not expected nor intended for a PR review to take the shape of this docume
While the review process is always done by Elastic staff members, these guidelines apply to all pull requests regardless of whether they are authored by community members or Elastic staff.
-[float]
-==== Target audience
+[discrete]
+=== Target audience
The target audience for this document are pull request reviewers. For {kib} maintainers, the PR review is the only part of the contributing process in which we have complete control. The author of any given pull request may not be up to speed on the latest expectations we have for pull requests, and they may have never read our guidelines at all. It's our responsibility as reviewers to guide folks through this process, but it's hard to do that consistently without a common set of documented principles.
Pull request authors can benefit from reading this document as well because it'll help establish a common set of expectations between authors and reviewers early.
-[float]
-==== Reject fast
+[discrete]
+=== Reject fast
Every pull request is different, and before reviewing any given PR, reviewers should consider the optimal way to approach the PR review so that if the change is ultimately rejected, it is done so as early in the process as possible.
For example, a reviewer may want to do a product level review as early as possible for a PR that includes a new UI feature. On the other hand, perhaps the author is submitting a new feature that has been rejected in the past due to key architectural decisions, in which case it may be appropriate for the reviewer to focus on the soundness of the architecture before diving into anything else.
-[float]
-==== The big three
+[discrete]
+=== The big three
There are a lot of discrete requirements and guidelines we want to follow in all of our pull requests, but three things in particular stand out as important above all the rest.
@@ -58,20 +58,20 @@ This isn't simply a question of enough test files. The code in the tests themsel
All of our code should have unit tests that verify its behaviors, including not only the "happy path", but also edge cases, error handling, etc. When you change an existing API of a module, then there should always be at least one failing unit test, which in turn means we need to verify that all code consuming that API properly handles the change if necessary. For modules at a high enough level, this will mean we have breaking change in the product, which we'll need to handle accordingly.
-In addition to extensive unit test coverage, PRs should include relevant functional and integration tests. In some cases, we may simply be testing a programmatic interface (e.g. a service) that is integrating with the file system, the network, Elasticsearch, etc. In other cases, we'll be testing REST APIs over HTTP or comparing screenshots/snapshots with prior known acceptable state. In the worst case, we are doing browser-based functional testing on a running instance of {kib} using selenium.
+In addition to extensive unit test coverage, PRs should include relevant functional and integration tests. In some cases, we may simply be testing a programmatic interface (e.g. a service) that is integrating with the file system, the network, {es}, etc. In other cases, we'll be testing REST APIs over HTTP or comparing screenshots/snapshots with prior known acceptable state. In the worst case, we are doing browser-based functional testing on a running instance of {kib} using selenium.
Enhancements are pretty much always going to have extensive unit tests as a base as well as functional and integration testing. Bug fixes should always include regression tests to ensure that same bug does not manifest again in the future.
--
-[float]
-==== Product level review
+[discrete]
+=== Product level review
Reviewers are not simply evaluating the code itself, they are also evaluating the quality of the user-facing change in the product. This generally means they need to check out the branch locally and "play around" with it. In addition to the "do we want this change in the product" details, the reviewer should be looking for bugs and evaluating how approachable and useful the feature is as implemented. Special attention should be given to error scenarios and edge cases to ensure they are all handled well within the product.
-[float]
-==== Consistency, style, readability
+[discrete]
+=== Consistency, style, readability
Having a relatively consistent codebase is an important part of us building a sustainable project. With dozens of active contributors at any given time, we rely on automation to help ensure consistency - we enforce a comprehensive set of linting rules through CI. We're also rolling out prettier to make this even more automatic.
@@ -86,8 +86,8 @@ When in doubt, relying on "prior art" in the codebase, especially in and around
There may also be times when a person is inspired by a particular contribution to introduce a new way to style code that we already have different style guidelines or "prior art" for. It's OK to bring this up in a pull request, but ultimately that discussion should branch off into a separate issue or pull request to update the appropriate guide. If this change is prompted by a reviewer, then the original PR should not be blocked on this. If the change is prompted by the author, then they can either update the PR to be consistent with our existing guidelines (preferred) or they can choose to block the PR entirely on that separate styleguide discussion.
-[float]
-==== Nitpicking
+[discrete]
+=== Nitpicking
Nitpicking is when a reviewer identifies trivial and unimportant details in a pull request and asks the author to change them. This is a completely subjective category that is impossible to define universally, and it's equally impractical to define a blanket policy on nitpicking that everyone will be happy with.
@@ -96,14 +96,14 @@ Reviewers should feel comfortable giving any feedback they have on a pull reques
Often, reviewers have an opinion about whether the feedback they are about to give is a nitpick or not. While not required, it can be really helpful to identify that feedback as such, for example "nit: a newline after this would be helpful". This helps the author understand your intention.
-[float]
-==== Handling disagreements
+[discrete]
+=== Handling disagreements
Conflicting opinions between reviewers and authors happen, and sometimes it is hard to reconcile those opinions. Ideally folks can work together in the spirit of these guidelines toward a consensus, but if that doesn't work out it may be best to bring a third person into the discussion. Our pull requests generally have two reviewers, so an appropriate third person may already be obvious. Otherwise, reach out to the functional area that is most appropriate or to technical leadership if an area isn't obvious.
-[float]
-==== Inappropriate review feedback
+[discrete]
+=== Inappropriate review feedback
Whether or not a bit of feedback is appropriate for a pull request is often dependent on the motivation for giving the feedback in the first place.
@@ -112,8 +112,8 @@ _Demanding_ an author make changes based primarily on the mindset of "how would
Inflammatory feedback such as "this is crap" isn't feedback at all. It's both mean and unhelpful, and it is never appropriate.
-[float]
-==== A checklist
+[discrete]
+=== A checklist
Establishing a comprehensive checklist for all of the things that should happen in all possible pull requests is impractical, but that doesn't mean we lack a concrete set of minimum requirements that we can enumerate. The following items should be double checked for any pull request:
diff --git a/docs/developer/getting-started/building-kibana.asciidoc b/docs/developer/getting-started/building-kibana.asciidoc
index e1f1ca336a5da..72054b1628fc2 100644
--- a/docs/developer/getting-started/building-kibana.asciidoc
+++ b/docs/developer/getting-started/building-kibana.asciidoc
@@ -1,5 +1,5 @@
[[building-kibana]]
-=== Building a {kib} distributable
+== Building a {kib} distributable
The following commands will build a {kib} production distributable.
@@ -15,8 +15,8 @@ You can get all build options using the following command:
yarn build --help
----
-[float]
-==== Building OS packages
+[discrete]
+=== Building OS packages
Packages are built using fpm, dpkg, and rpm. Package building has only been tested on Linux and is not supported on any other platform.
diff --git a/docs/developer/getting-started/debugging.asciidoc b/docs/developer/getting-started/debugging.asciidoc
index b369dcda748af..a3fb12ec1f6a3 100644
--- a/docs/developer/getting-started/debugging.asciidoc
+++ b/docs/developer/getting-started/debugging.asciidoc
@@ -1,15 +1,15 @@
[[kibana-debugging]]
-=== Debugging {kib}
+== Debugging {kib}
For information about how to debug unit tests, refer to <>.
-[float]
-==== Server Code
+[discrete]
+=== Server Code
`yarn debug` will start the server with Node's inspect flag. {kib}'s development mode will start three processes on ports `9229`, `9230`, and `9231`. Chrome's developer tools need to be configured to connect to all three connections. Add `localhost:` for each {kib} process in Chrome's developer tools connection tab.
-[float]
-==== Instrumenting with Elastic APM
+[discrete]
+=== Instrumenting with Elastic APM
{kib} ships with the
https://github.com/elastic/apm-agent-nodejs[Elastic APM Node.js Agent]
@@ -18,7 +18,7 @@ built-in for debugging purposes.
Its default configuration is meant to be used by core {kib} developers
only, but it can easily be re-configured to your needs. In its default
configuration it’s disabled and will, once enabled, send APM data to a
-centrally managed Elasticsearch cluster accessible only to Elastic
+centrally managed {es} cluster accessible only to Elastic
employees.
To change the location where data is sent, use the
diff --git a/docs/developer/getting-started/development-plugin-resources.asciidoc b/docs/developer/getting-started/development-plugin-resources.asciidoc
index dfe8efc4fef57..8f81138b81ed7 100644
--- a/docs/developer/getting-started/development-plugin-resources.asciidoc
+++ b/docs/developer/getting-started/development-plugin-resources.asciidoc
@@ -1,14 +1,14 @@
[[development-plugin-resources]]
-=== Plugin Resources
+== Plugin Resources
Here are some resources that are helpful for getting started with plugin development.
-[float]
-==== Some light reading
+[discrete]
+=== Some light reading
If you haven't already, start with <>. If you are planning to add your plugin to the {kib} repo, read the <> guide, if you are building a plugin externally, read <>. In both cases, read up on our recommended <>.
-[float]
-==== Creating an empty plugin
+[discrete]
+=== Creating an empty plugin
You can use the <> to get a basic structure for a new plugin. Plugins that are not part of the
{kib} repo should be developed inside the `plugins` folder. If you are building a new plugin to check in to the {kib} repo,
@@ -18,15 +18,15 @@ you will choose between a few locations:
- {kib-repo}tree/{branch}/src/plugins[src/plugins] for open source licensed plugins
- {kib-repo}tree/{branch}/examples[examples] for developer example plugins (these will not be included in the distributables)
-[float]
-==== Elastic UI Framework
+[discrete]
+=== Elastic UI Framework
If you're developing a plugin that has a user interface, take a look at our https://elastic.github.io/eui[Elastic UI Framework].
It documents the CSS and React components we use to build {kib}'s user interface.
You're welcome to use these components, but be aware that they are rapidly evolving, and we might introduce breaking changes that will disrupt your plugin's UI.
-[float]
-==== TypeScript Support
+[discrete]
+=== TypeScript Support
We recommend your plugin code is written in http://www.typescriptlang.org/[TypeScript].
To enable TypeScript support, create a `tsconfig.json` file at the root of your plugin that looks something like this:
@@ -48,14 +48,14 @@ TypeScript code is automatically converted into JavaScript during development,
but not in the distributable version of {kib}. If you use the
{kib-repo}blob/{branch}/packages/kbn-plugin-helpers[@kbn/plugin-helpers] to build your plugin, then your `.ts` and `.tsx` files will be permanently transpiled before your plugin is archived. If you have your own build process, make sure to run the TypeScript compiler on your source files and ship the compilation output so that your plugin will work with the distributable version of {kib}.
-[float]
-==== {kib} platform migration guide
+[discrete]
+=== {kib} platform migration guide
{kib-repo}blob/{branch}/src/core/MIGRATION.md#migrating-legacy-plugins-to-the-new-platform[This guide]
provides an action plan for moving a legacy plugin to the new platform.
-[float]
-==== Externally developed plugins
+[discrete]
+=== Externally developed plugins
If you are building a plugin outside of the {kib} repo, read <>.
diff --git a/docs/developer/getting-started/index.asciidoc b/docs/developer/getting-started/index.asciidoc
index 47c4a52daf303..2ac51b6cf86f8 100644
--- a/docs/developer/getting-started/index.asciidoc
+++ b/docs/developer/getting-started/index.asciidoc
@@ -3,7 +3,7 @@
Get started building your own plugins, or contributing directly to the {kib} repo.
-[float]
+[discrete]
[[get-kibana-code]]
=== Get the code
@@ -15,7 +15,7 @@ git clone https://github.com/[YOUR_USERNAME]/kibana.git kibana
cd kibana
----
-[float]
+[discrete]
=== Install dependencies
Install the version of Node.js listed in the `.node-version` file. This
@@ -67,11 +67,11 @@ corrupted packages in your yarn cache which you can clean with:
yarn cache clean
----
-[float]
+[discrete]
=== Configure environmental settings
[[increase-nodejs-heap-size]]
-[float]
+[discrete]
==== Increase node.js heap size
{kib} is a big project and for some commands it can happen that the
@@ -81,10 +81,10 @@ by setting the `--max_old_space_size` option on the command line. To set
the limit for all commands, simply add the following line to your shell
config: `export NODE_OPTIONS="--max_old_space_size=2048"`.
-[float]
-=== Run Elasticsearch
+[discrete]
+=== Run {es}
-Run the latest Elasticsearch snapshot. Specify an optional license with the `--license` flag.
+Run the latest {es} snapshot. Specify an optional license with the `--license` flag.
[source,bash]
----
@@ -96,7 +96,7 @@ yarn es snapshot --license trial
Read about more options for <>, like connecting to a remote host, running from source,
preserving data inbetween runs, running remote cluster, etc.
-[float]
+[discrete]
=== Run {kib}
In another terminal window, start up {kib}. Include developer examples by adding an optional `--run-examples` flag.
@@ -110,13 +110,13 @@ View all available options by running `yarn start --help`
Read about more advanced options for <>.
-[float]
+[discrete]
=== Code away!
You are now ready to start developing. Changes to your files should be picked up automatically. Server side changes will
cause the {kib} server to reboot.
-[float]
+[discrete]
=== More information
* <>
@@ -129,12 +129,12 @@ cause the {kib} server to reboot.
* <>
-include::running-kibana-advanced.asciidoc[]
+include::running-kibana-advanced.asciidoc[leveloffset=+1]
-include::sample-data.asciidoc[]
+include::sample-data.asciidoc[leveloffset=+1]
-include::debugging.asciidoc[]
+include::debugging.asciidoc[leveloffset=+1]
-include::building-kibana.asciidoc[]
+include::building-kibana.asciidoc[leveloffset=+1]
-include::development-plugin-resources.asciidoc[]
\ No newline at end of file
+include::development-plugin-resources.asciidoc[leveloffset=+1]
\ No newline at end of file
diff --git a/docs/developer/getting-started/running-kibana-advanced.asciidoc b/docs/developer/getting-started/running-kibana-advanced.asciidoc
index e36f38de1b366..c3b7847b0f8ba 100644
--- a/docs/developer/getting-started/running-kibana-advanced.asciidoc
+++ b/docs/developer/getting-started/running-kibana-advanced.asciidoc
@@ -1,5 +1,5 @@
[[running-kibana-advanced]]
-=== Running {kib}
+== Running {kib}
Change to your local {kib} directory. Start the development server.
@@ -23,8 +23,8 @@ By default, you can log in with username `elastic` and password
`changeme`. See the `--help` options on `yarn es ` if
you’d like to configure a different password.
-[float]
-==== Running {kib} in Open-Source mode
+[discrete]
+=== Running {kib} in Open-Source mode
If you’re looking to only work with the open-source software, supply the
license type to `yarn es`:
@@ -41,8 +41,8 @@ And start {kib} with only open-source code:
yarn start --oss
----
-[float]
-==== Unsupported URL Type
+[discrete]
+=== Unsupported URL Type
If you’re installing dependencies and seeing an error that looks
something like
@@ -56,9 +56,9 @@ need to run `yarn kbn bootstrap`. For more info, see
link:#setting-up-your-development-environment[Setting Up Your
Development Environment] above.
-[float]
+[discrete]
[[customize-kibana-yml]]
-==== Customizing `config/kibana.dev.yml`
+=== Customizing `config/kibana.dev.yml`
The `config/kibana.yml` file stores user configuration directives.
Since this file is checked into source control, however, developer
@@ -70,8 +70,8 @@ non-dev version and accepts any of the
https://www.elastic.co/guide/en/kibana/current/settings.html[standard
settings].
-[float]
-==== Potential Optimization Pitfalls
+[discrete]
+=== Potential Optimization Pitfalls
* Webpack is trying to include a file in the bundle that I deleted and
is now complaining about it is missing
@@ -79,9 +79,9 @@ is now complaining about it is missing
directory, but webpack isn’t adapting
* (if you discover other scenarios, please send a PR!)
-[float]
-==== Setting Up SSL
+[discrete]
+=== Setting Up SSL
{kib} includes self-signed certificates that can be used for
development purposes in the browser and for communicating with
-Elasticsearch: `yarn start --ssl` & `yarn es snapshot --ssl`.
\ No newline at end of file
+{es}: `yarn start --ssl` & `yarn es snapshot --ssl`.
\ No newline at end of file
diff --git a/docs/developer/getting-started/sample-data.asciidoc b/docs/developer/getting-started/sample-data.asciidoc
index 376211ceb2634..0d313cbabe64e 100644
--- a/docs/developer/getting-started/sample-data.asciidoc
+++ b/docs/developer/getting-started/sample-data.asciidoc
@@ -1,17 +1,17 @@
[[sample-data]]
-=== Installing sample data
+== Installing sample data
-There are a couple ways to easily get data ingested into Elasticsearch.
+There are a couple ways to easily get data ingested into {es}.
-[float]
-==== Sample data packages available for one click installation
+[discrete]
+=== Sample data packages available for one click installation
The easiest is to install one or more of our vailable sample data packages. If you have no data, you should be
prompted to install when running {kib} for the first time. You can also access and install the sample data packages
by going to the home page and clicking "add sample data".
-[float]
-==== makelogs script
+[discrete]
+=== makelogs script
The provided `makelogs` script will generate sample data.
@@ -22,10 +22,10 @@ node scripts/makelogs --auth :
The default username and password combination are `elastic:changeme`
-Make sure to execute `node scripts/makelogs` *after* elasticsearch is up and running!
+Make sure to execute `node scripts/makelogs` *after* {es} is up and running!
-[float]
-==== CSV upload
+[discrete]
+=== CSV upload
If running with a platinum or trial license, you can also use the CSV uploader provided inside the Machine learning app.
Navigate to the Data visualizer to upload your data from a file.
\ No newline at end of file
diff --git a/docs/developer/plugin/external-plugin-functional-tests.asciidoc b/docs/developer/plugin/external-plugin-functional-tests.asciidoc
index 44f636d627011..706bf6af8ed9b 100644
--- a/docs/developer/plugin/external-plugin-functional-tests.asciidoc
+++ b/docs/developer/plugin/external-plugin-functional-tests.asciidoc
@@ -1,10 +1,10 @@
[[external-plugin-functional-tests]]
-=== Functional Tests for Plugins outside the {kib} repo
+== Functional Tests for Plugins outside the {kib} repo
Plugins use the `FunctionalTestRunner` by running it out of the {kib} repo. Ensure that your {kib} Development Environment is setup properly before continuing.
-[float]
-==== Writing your own configuration
+[discrete]
+=== Writing your own configuration
Every project or plugin should have its own `FunctionalTestRunner` config file. Just like {kib}'s, this config file will define all of the test files to load, providers for Services and PageObjects, as well as configuration options for certain services.
@@ -82,8 +82,8 @@ From the root of your repo you should now be able to run the `FunctionalTestRunn
node ../../kibana/scripts/functional_test_runner
-----------
-[float]
-==== Using esArchiver
+[discrete]
+=== Using esArchiver
We're working on documentation for this, but for now the best place to look is the original {kib-repo}/issues/10359[pull request].
diff --git a/docs/developer/plugin/external-plugin-localization.asciidoc b/docs/developer/plugin/external-plugin-localization.asciidoc
index c151832ab53fa..d30dec1a8f46b 100644
--- a/docs/developer/plugin/external-plugin-localization.asciidoc
+++ b/docs/developer/plugin/external-plugin-localization.asciidoc
@@ -1,10 +1,10 @@
[[external-plugin-localization]]
-=== Localization for plugins outside the {kib} repo
+== Localization for plugins outside the {kib} repo
To introduce localization for your plugin, use our i18n tool to create IDs and default messages. You can then extract these IDs with respective default messages into localization JSON files for {kib} to use when running your plugin.
-[float]
-==== Adding localization to your plugin
+[discrete]
+=== Adding localization to your plugin
You must add a `translations` directory at the root of your plugin. This directory will contain the translation files that {kib} uses.
@@ -19,8 +19,8 @@ You must add a `translations` directory at the root of your plugin. This directo
-----------
-[float]
-==== Using {kib} i18n tooling
+[discrete]
+=== Using {kib} i18n tooling
To simplify the localization process, {kib} provides tools for the following functions:
* Verify all translations have translatable strings and extract default messages from templates
@@ -51,8 +51,8 @@ An example {kib} `.i18nrc.json` is {blob}.i18nrc.json[here].
Full documentation about i18n tooling is {blob}src/dev/i18n/README.md[here].
-[float]
-==== Extracting default messages
+[discrete]
+=== Extracting default messages
To extract the default messages from your plugin, run the following command:
["source","shell"]
@@ -62,8 +62,8 @@ node scripts/i18n_extract --output-dir ./translations --include-config ../kibana
This outputs a `en.json` file inside the `translations` directory. To localize other languages, clone the file and translate each string.
-[float]
-==== Checking i18n messages
+[discrete]
+=== Checking i18n messages
Checking i18n does the following:
@@ -80,8 +80,8 @@ node scripts/i18n_check --fix --include-config ../kibana-extra/myPlugin/.i18nrc.
-----------
-[float]
-==== Implementing i18n in the UI
+[discrete]
+=== Implementing i18n in the UI
{kib} relies on several UI frameworks (ReactJS and AngularJS) and
requires localization in different environments (browser and NodeJS).
@@ -97,8 +97,8 @@ so both React and AngularJS frameworks use the same engine and the same
message syntax.
-[float]
-===== i18n for vanilla JavaScript
+[discrete]
+==== i18n for vanilla JavaScript
["source","js"]
-----------
@@ -111,8 +111,8 @@ export const HELLO_WORLD = i18n.translate('hello.wonderful.world', {
Full details are {kib-repo}tree/master/packages/kbn-i18n#vanilla-js[here].
-[float]
-===== i18n for React
+[discrete]
+==== i18n for React
To localize strings in React, use either `FormattedMessage` or `i18n.translate`.
@@ -137,8 +137,8 @@ Full details are {kib-repo}tree/master/packages/kbn-i18n#react[here].
-[float]
-===== i18n for Angular
+[discrete]
+==== i18n for Angular
You are encouraged to use `i18n.translate()` by statically importing `i18n` from `@kbn/i18n` wherever possible in your Angular code. Angular wrappers use the translation `service` with the i18n engine under the hood.
@@ -156,8 +156,8 @@ The translation directive has the following syntax:
Full details are {kib-repo}tree/master/packages/kbn-i18n#angularjs[here].
-[float]
-==== Resources
+[discrete]
+=== Resources
To learn more about i18n tooling, see {blob}src/dev/i18n/README.md[i18n dev tooling].
diff --git a/docs/developer/plugin/index.asciidoc b/docs/developer/plugin/index.asciidoc
index 73f1d2c908fa7..dd83cf234dea4 100644
--- a/docs/developer/plugin/index.asciidoc
+++ b/docs/developer/plugin/index.asciidoc
@@ -9,9 +9,9 @@ The {kib} plugin interfaces are in a state of constant development. We cannot p
Most developers who contribute code directly to the {kib} repo are writing code inside plugins, so our <> docs are the best place to
start. However, there are a few differences when developing plugins outside the {kib} repo. These differences are covered here.
-[float]
+[discrete]
[[automatic-plugin-generator]]
-==== Automatic plugin generator
+=== Automatic plugin generator
We recommend that you kick-start your plugin by generating it with the {kib-repo}tree/{branch}/packages/kbn-plugin-generator[Kibana Plugin Generator]. Run the following in the {kib} repo, and you will be asked a couple questions, see some progress bars, and have a freshly generated plugin ready for you to play with in {kib}'s `plugins` folder.
@@ -20,7 +20,7 @@ We recommend that you kick-start your plugin by generating it with the {kib-repo
node scripts/generate_plugin my_plugin_name # replace "my_plugin_name" with your desired plugin name
-----------
-[float]
+[discrete]
=== Plugin location
The {kib} directory must be named `kibana`, and your plugin directory should be located in the root of `kibana` in a `plugins` directory, for example:
@@ -37,6 +37,6 @@ The {kib} directory must be named `kibana`, and your plugin directory should be
* <>
* <>
-include::external-plugin-functional-tests.asciidoc[]
+include::external-plugin-functional-tests.asciidoc[leveloffset=+1]
-include::external-plugin-localization.asciidoc[]
+include::external-plugin-localization.asciidoc[leveloffset=+1]
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.embeddable.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.embeddable.md
new file mode 100644
index 0000000000000..027ae4209b77f
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.embeddable.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [ApplyGlobalFilterActionContext](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.md) > [embeddable](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.embeddable.md)
+
+## ApplyGlobalFilterActionContext.embeddable property
+
+Signature:
+
+```typescript
+embeddable?: IEmbeddable;
+```
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.filters.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.filters.md
new file mode 100644
index 0000000000000..6d1d20580fb19
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.filters.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [ApplyGlobalFilterActionContext](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.md) > [filters](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.filters.md)
+
+## ApplyGlobalFilterActionContext.filters property
+
+Signature:
+
+```typescript
+filters: Filter[];
+```
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.md
new file mode 100644
index 0000000000000..62817cd0a1e33
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.md
@@ -0,0 +1,20 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [ApplyGlobalFilterActionContext](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.md)
+
+## ApplyGlobalFilterActionContext interface
+
+Signature:
+
+```typescript
+export interface ApplyGlobalFilterActionContext
+```
+
+## Properties
+
+| Property | Type | Description |
+| --- | --- | --- |
+| [embeddable](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.embeddable.md) | IEmbeddable
| |
+| [filters](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.filters.md) | Filter[]
| |
+| [timeFieldName](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.timefieldname.md) | string
| |
+
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.timefieldname.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.timefieldname.md
new file mode 100644
index 0000000000000..a5cf58018ec65
--- /dev/null
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.timefieldname.md
@@ -0,0 +1,11 @@
+
+
+[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [ApplyGlobalFilterActionContext](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.md) > [timeFieldName](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.timefieldname.md)
+
+## ApplyGlobalFilterActionContext.timeFieldName property
+
+Signature:
+
+```typescript
+timeFieldName?: string;
+```
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md
index 4852ad15781c7..db41936f35cca 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.md
@@ -48,6 +48,7 @@
| Interface | Description |
| --- | --- |
| [AggParamOption](./kibana-plugin-plugins-data-public.aggparamoption.md) | |
+| [ApplyGlobalFilterActionContext](./kibana-plugin-plugins-data-public.applyglobalfilteractioncontext.md) | |
| [DataPublicPluginSetup](./kibana-plugin-plugins-data-public.datapublicpluginsetup.md) | |
| [DataPublicPluginStart](./kibana-plugin-plugins-data-public.datapublicpluginstart.md) | |
| [EsQueryConfig](./kibana-plugin-plugins-data-public.esqueryconfig.md) | |
diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.plugin.setup.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.plugin.setup.md
index 7bae595e75ad0..a0c9b38792825 100644
--- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.plugin.setup.md
+++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.plugin.setup.md
@@ -7,14 +7,14 @@
Signature:
```typescript
-setup(core: CoreSetup, { expressions, uiActions, usageCollection }: DataSetupDependencies): DataPublicPluginSetup;
+setup(core: CoreSetup, { expressions, uiActions, usageCollection }: DataSetupDependencies): DataPublicPluginSetup;
```
## Parameters
| Parameter | Type | Description |
| --- | --- | --- |
-| core | CoreSetup
| |
+| core | CoreSetup<DataStartDependencies, DataPublicPluginStart>
| |
| { expressions, uiActions, usageCollection } | DataSetupDependencies
| |
Returns:
diff --git a/docs/user/alerting/action-types.asciidoc b/docs/user/alerting/action-types.asciidoc
index e8dcf689df8e4..1743edb10f92b 100644
--- a/docs/user/alerting/action-types.asciidoc
+++ b/docs/user/alerting/action-types.asciidoc
@@ -23,6 +23,10 @@ a| <>
| Add a message to a Kibana log.
+a| <>
+
+| Push or update data to a new incident in ServiceNow.
+
a| <>
| Send a message to a Slack channel or user.
@@ -55,3 +59,4 @@ include::action-types/server-log.asciidoc[]
include::action-types/slack.asciidoc[]
include::action-types/webhook.asciidoc[]
include::action-types/pre-configured-connectors.asciidoc[]
+include::action-types/servicenow.asciidoc[]
diff --git a/docs/user/alerting/action-types/servicenow.asciidoc b/docs/user/alerting/action-types/servicenow.asciidoc
new file mode 100644
index 0000000000000..32f828aea2357
--- /dev/null
+++ b/docs/user/alerting/action-types/servicenow.asciidoc
@@ -0,0 +1,72 @@
+[role="xpack"]
+[[servicenow-action-type]]
+=== ServiceNow action
+
+The ServiceNow action type uses the https://developer.servicenow.com/app.do#!/rest_api_doc?v=orlando&id=c_TableAPI[V2 Table API] to create ServiceNow incidents.
+
+[float]
+[[servicenow-connector-configuration]]
+==== Connector configuration
+
+ServiceNow connectors have the following configuration properties:
+
+Name:: The name of the connector. The name is used to identify a connector in the management UI connector listing, or in the connector list when configuring an action.
+URL:: ServiceNow instance URL.
+Username:: Username for HTTP Basic authentication.
+Password:: Password for HTTP Basic authentication.
+
+[float]
+[[Preconfigured-servicenow-configuration]]
+==== Preconfigured action type
+
+[source,text]
+--
+ my-servicenow:
+ name: preconfigured-servicenow-action-type
+ actionTypeId: .servicenow
+ config:
+ apiUrl: https://dev94428.service-now.com/
+ secrets:
+ username: testuser
+ password: passwordkeystorevalue
+--
+
+`config` defines the action type specific to the configuration and contains the following properties:
+
+[cols="2*<"]
+|===
+
+| `apiUrl`
+| An address that corresponds to *Sender*.
+
+|===
+
+`secrets` defines sensitive information for the action type:
+
+[cols="2*<"]
+|===
+
+| `username`
+| A string that corresponds to *User*.
+
+| `password`
+| A string that corresponds to *Password*. Should be stored in the <>.
+
+|===
+
+[[servicenow-action-configuration]]
+==== Action configuration
+
+ServiceNow actions have the following configuration properties:
+
+Urgency:: The extent to which the incident resolution can delay.
+Severity:: The severity of the incident.
+Impact:: The effect an incident has on business. Can be measured by the number of affected users or by how critical it is to the business in question.
+Short description:: A short description of the incident, used for searching the contents of the knowledge base.
+Description:: The details about the incident.
+Additional comments:: Additional information for the client, such as how to troubleshoot the issue.
+
+[[configuring-servicenow]]
+==== Configuring and testing ServiceNow
+
+ServiceNow offers free https://developer.servicenow.com/dev.do#!/guides/madrid/now-platform/pdi-guide/obtaining-a-pdi[Personal Developer Instances], which you can use to test incidents.
diff --git a/docs/user/ml/images/ml-annotations-list.jpg b/docs/user/ml/images/ml-annotations-list.jpg
deleted file mode 100644
index 8b1194dd20c0f..0000000000000
Binary files a/docs/user/ml/images/ml-annotations-list.jpg and /dev/null differ
diff --git a/docs/user/ml/images/ml-annotations-list.png b/docs/user/ml/images/ml-annotations-list.png
new file mode 100644
index 0000000000000..f1a0b66241126
Binary files /dev/null and b/docs/user/ml/images/ml-annotations-list.png differ
diff --git a/docs/user/ml/images/ml-job-management.jpg b/docs/user/ml/images/ml-job-management.jpg
deleted file mode 100644
index efdf7923c0faa..0000000000000
Binary files a/docs/user/ml/images/ml-job-management.jpg and /dev/null differ
diff --git a/docs/user/ml/images/ml-job-management.png b/docs/user/ml/images/ml-job-management.png
new file mode 100644
index 0000000000000..4589c7093a7cf
Binary files /dev/null and b/docs/user/ml/images/ml-job-management.png differ
diff --git a/docs/user/ml/images/ml-settings.jpg b/docs/user/ml/images/ml-settings.jpg
deleted file mode 100644
index 3713be005924d..0000000000000
Binary files a/docs/user/ml/images/ml-settings.jpg and /dev/null differ
diff --git a/docs/user/ml/images/ml-settings.png b/docs/user/ml/images/ml-settings.png
new file mode 100644
index 0000000000000..f5c9fca647389
Binary files /dev/null and b/docs/user/ml/images/ml-settings.png differ
diff --git a/docs/user/ml/images/ml-single-metric-viewer.jpg b/docs/user/ml/images/ml-single-metric-viewer.jpg
deleted file mode 100644
index 2fbb9387d1e29..0000000000000
Binary files a/docs/user/ml/images/ml-single-metric-viewer.jpg and /dev/null differ
diff --git a/docs/user/ml/images/ml-single-metric-viewer.png b/docs/user/ml/images/ml-single-metric-viewer.png
new file mode 100644
index 0000000000000..04c21d9bc533a
Binary files /dev/null and b/docs/user/ml/images/ml-single-metric-viewer.png differ
diff --git a/docs/user/ml/images/outliers.png b/docs/user/ml/images/outliers.png
index 3f4c5f6c6bbf0..874ebbc79201c 100644
Binary files a/docs/user/ml/images/outliers.png and b/docs/user/ml/images/outliers.png differ
diff --git a/docs/user/ml/index.asciidoc b/docs/user/ml/index.asciidoc
index 1bc74ce87de08..214dae2b96e04 100644
--- a/docs/user/ml/index.asciidoc
+++ b/docs/user/ml/index.asciidoc
@@ -47,20 +47,20 @@ create {anomaly-jobs} and manage jobs and {dfeeds} from the *Job Management*
pane:
[role="screenshot"]
-image::user/ml/images/ml-job-management.jpg[Job Management]
+image::user/ml/images/ml-job-management.png[Job Management]
You can use the *Settings* pane to create and edit
{ml-docs}/ml-calendars.html[calendars] and the filters that are used in
{ml-docs}/ml-rules.html[custom rules]:
[role="screenshot"]
-image::user/ml/images/ml-settings.jpg[Calendar Management]
+image::user/ml/images/ml-settings.png[Calendar Management]
The *Anomaly Explorer* and *Single Metric Viewer* display the results of your
{anomaly-jobs}. For example:
[role="screenshot"]
-image::user/ml/images/ml-single-metric-viewer.jpg[Single Metric Viewer]
+image::user/ml/images/ml-single-metric-viewer.png[Single Metric Viewer]
You can optionally add annotations by drag-selecting a period of time in
the *Single Metric Viewer* and adding a description. For example, you can add an
@@ -68,7 +68,7 @@ explanation for anomalies in that time period or provide notes about what is
occurring in your operational environment at that time:
[role="screenshot"]
-image::user/ml/images/ml-annotations-list.jpg[Single Metric Viewer with annotations]
+image::user/ml/images/ml-annotations-list.png[Single Metric Viewer with annotations]
In some circumstances, annotations are also added automatically. For example, if
the {anomaly-job} detects that there is missing data, it annotates the affected
@@ -94,8 +94,8 @@ The Elastic {ml} {dfanalytics} feature enables you to analyze your data using
indices that contain the results alongside your source data.
If you have a license that includes the {ml-features}, you can create
-{dfanalytics-jobs} and view their results on the *Analytics* page
-in {kib}. For example:
+{dfanalytics-jobs} and view their results on the *Data Frame Analytics* page in
+{kib}. For example:
[role="screenshot"]
image::user/ml/images/outliers.png[{oldetection-cap} results in {kib}]
diff --git a/docs/visualize/images/vega_lite_tutorial_1.png b/docs/visualize/images/vega_lite_tutorial_1.png
new file mode 100644
index 0000000000000..4e8d0aba3635b
Binary files /dev/null and b/docs/visualize/images/vega_lite_tutorial_1.png differ
diff --git a/docs/visualize/images/vega_lite_tutorial_2.png b/docs/visualize/images/vega_lite_tutorial_2.png
new file mode 100644
index 0000000000000..523ae91514a11
Binary files /dev/null and b/docs/visualize/images/vega_lite_tutorial_2.png differ
diff --git a/docs/visualize/images/vega_tutorial_3.png b/docs/visualize/images/vega_tutorial_3.png
new file mode 100644
index 0000000000000..e025ecc585807
Binary files /dev/null and b/docs/visualize/images/vega_tutorial_3.png differ
diff --git a/docs/visualize/images/vega_tutorial_4.png b/docs/visualize/images/vega_tutorial_4.png
new file mode 100644
index 0000000000000..c8ee311e9bf5e
Binary files /dev/null and b/docs/visualize/images/vega_tutorial_4.png differ
diff --git a/docs/visualize/vega.asciidoc b/docs/visualize/vega.asciidoc
index 24bd3a44bebba..3a1c57da93f07 100644
--- a/docs/visualize/vega.asciidoc
+++ b/docs/visualize/vega.asciidoc
@@ -3,71 +3,1287 @@
experimental[]
-Build custom visualizations from multiple data sources using Vega
-and Vega-Lite.
+Build custom visualizations using Vega and Vega-Lite, backed by one or more
+data sources including {es}, Elastic Map Service, URL,
+or static data. Use the {kib} extensions to Vega to embed Vega into
+your dashboard, and to add interactivity to the visualizations.
-* *Vega* — A declarative format to create visualizations using JSON.
- Generate interactive displays using D3.
+Vega and Vega-Lite are both declarative formats to create visualizations
+using JSON. Both use a different syntax for declaring visualizations,
+and are not fully interchangeable.
-* *Vega-Lite* — An easier format to use than Vega that enables more rapid
- data analysis. Compiles into Vega.
+[float]
+[[when-to-vega]]
+=== When to use Vega
-For more information about Vega and Vega-Lite, refer to
-<>.
+Vega and Vega-Lite are capable of building most of the visualizations
+that {kib} provides, but with higher complexity. The most common reason
+to use Vega in {kib} is that {kib} is missing support for the query or
+visualization, for example:
-[float]
-[[create-vega-viz]]
-=== Create Vega visualizations
+* Aggregations using the `nested` or `parent/child` mapping
+* Aggregations without a {kib} index pattern
+* Queries using custom time filters
+* Complex calculations
+* Extracting data from _source instead of aggregation
+* Scatter charts
+* Sankey charts
+* Custom maps
+* Using a visual theme that {kib} does not provide
+
+[[vega-lite-tutorial]]
+=== Tutorial: First visualization in Vega-Lite
-You create Vega visualizations by using the text editor, which is
-preconfigured with the options you need.
+In this tutorial, you will learn about how to edit Vega-Lite in {kib} to create
+a stacked area chart from an {es} search query. It will give you a starting point
+for a more comprehensive
+https://vega.github.io/vega-lite/tutorials/getting_started.html[introduction to Vega-Lite],
+while only covering the basics.
+
+In this tutorial, you will build a stacked area chart from one of the {kib} sample data
+sets.
[role="screenshot"]
-image::images/vega_lite_default.png[]
+image::visualize/images/vega_lite_tutorial_1.png[]
-[float]
-[[vega-schema]]
-==== Change the Vega version
+Before beginning this tutorial, install the <>
+set.
+
+When you first open the Vega editor in {kib}, you will see a pre-populated
+line chart which shows the total number of documents across all your indices
+within the time range.
-The default visualization uses Vega-Lite version 2. To use Vega version 4, edit
-the `schema`.
+[role="screenshot"]
+image::visualize/images/vega_lite_default.png[]
+
+The text editor contains a Vega-Lite spec written in https://hjson.github.io/[HJSON],
+which is similar to JSON but optimized for human editing. HJSON supports:
-Go to `$schema`, enter `https://vega.github.io/schema/vega/v5.json`, then click
-*Update*.
+* Comments using // or /* syntax
+* Object keys without quotes
+* String values without quotes
+* Optional commas
+* Double or single quotes
+* Multiline strings
[float]
-[[vega-type]]
-==== Change the visualization type
+==== Small steps
-The default visualization is a line chart. To change the visualization type,
-change the `mark` value. The supported visualization types are listed in the
-text editor.
+Always work on Vega in the smallest steps possible, and save your work frequently.
+Small changes will cause unexpected results. Click the "Save" button now.
-Go to `mark`, change the value to a different visualization type, then click
-*Update*.
+The first step is to change the index to one of the <>
+sets. Change
+
+```yaml
+index: _all
+```
+
+to:
+
+```yaml
+index: kibana_sample_data_ecommerce
+```
+
+Click "Update". The result is probably not what you expect. You should see a flat
+line with 0 results.
+
+You've only changed the index, so the difference must be the query is returning
+no results. You can try the <>,
+but intuition may be faster for this particular problem.
+
+In this case, the problem is that you are querying the field `@timestamp`,
+which does not exist in the `kibana_sample_data_ecommerce` data. Find and replace
+`@timestamp` with `order_date`. This fixes the problem, leaving you with this spec:
+
+.Expand Vega-Lite spec
+[%collapsible%closed]
+====
+[source,yaml]
+----
+{
+ $schema: https://vega.github.io/schema/vega-lite/v4.json
+ title: Event counts from ecommerce
+ data: {
+ url: {
+ %context%: true
+ %timefield%: order_date
+ index: kibana_sample_data_ecommerce
+ body: {
+ aggs: {
+ time_buckets: {
+ date_histogram: {
+ field: order_date
+ interval: {%autointerval%: true}
+ extended_bounds: {
+ min: {%timefilter%: "min"}
+ max: {%timefilter%: "max"}
+ }
+ min_doc_count: 0
+ }
+ }
+ }
+ size: 0
+ }
+ }
+ format: {property: "aggregations.time_buckets.buckets" }
+ }
+
+ mark: line
+
+ encoding: {
+ x: {
+ field: key
+ type: temporal
+ axis: { title: null }
+ }
+ y: {
+ field: doc_count
+ type: quantitative
+ axis: { title: "Document count" }
+ }
+ }
+}
+----
+
+====
+
+Now, let's make the visualization more interesting by adding another aggregation
+to create a stacked area chart. To verify that you have constructed the right
+query, it is easiest to use the {kib} Dev Tools in a separate tab from the
+Vega editor. Open the Dev Tools from the Management section of the navigation.
+
+This query is roughly equivalent to the one that is used in the default
+Vega-Lite spec. Copy it into the Dev Tools:
+
+```js
+POST kibana_sample_data_ecommerce/_search
+{
+ "query": {
+ "range": {
+ "order_date": {
+ "gte": "now-7d"
+ }
+ }
+ },
+ "aggs": {
+ "time_buckets": {
+ "date_histogram": {
+ "field": "order_date",
+ "fixed_interval": "1d",
+ "extended_bounds": {
+ "min": "now-7d"
+ },
+ "min_doc_count": 0
+ }
+ }
+ },
+ "size": 0
+}
+```
+
+There's not enough data to create a stacked bar in the original query, so we
+will add a new
+{ref}/search-aggregations-bucket-terms-aggregation.html[terms aggregation]:
+
+```js
+POST kibana_sample_data_ecommerce/_search
+{
+ "query": {
+ "range": {
+ "order_date": {
+ "gte": "now-7d"
+ }
+ }
+ },
+ "aggs": {
+ "categories": {
+ "terms": { "field": "category.keyword" },
+ "aggs": {
+ "time_buckets": {
+ "date_histogram": {
+ "field": "order_date",
+ "fixed_interval": "1d",
+ "extended_bounds": {
+ "min": "now-7d"
+ },
+ "min_doc_count": 0
+ }
+ }
+ }
+ }
+ },
+ "size": 0
+}
+```
+
+You'll see that the response format looks different from the previous query:
+
+```json
+{
+ "aggregations" : {
+ "categories" : {
+ "doc_count_error_upper_bound" : 0,
+ "sum_other_doc_count" : 0,
+ "buckets" : [{
+ "key" : "Men's Clothing",
+ "doc_count" : 1661,
+ "time_buckets" : {
+ "buckets" : [{
+ "key_as_string" : "2020-06-30T00:00:00.000Z",
+ "key" : 1593475200000,
+ "doc_count" : 19
+ }, {
+ "key_as_string" : "2020-07-01T00:00:00.000Z",
+ "key" : 1593561600000,
+ "doc_count" : 71
+ }]
+ }
+ }]
+ }
+ }
+}
+```
+
+Now that we have data that we're happy with, it's time to convert from an
+isolated {es} query into a query with {kib} integration. Looking at the
+<>, you will
+see the full list of special tokens that are used in this query, such
+as `%context: true`. This query has also replaced `"fixed_interval": "1d"`
+with `interval: {%autointerval%: true}`. Copy the final query into
+your spec:
+
+```yaml
+ data: {
+ url: {
+ %context%: true
+ %timefield%: order_date
+ index: kibana_sample_data_ecommerce
+ body: {
+ aggs: {
+ categories: {
+ terms: { field: "category.keyword" }
+ aggs: {
+ time_buckets: {
+ date_histogram: {
+ field: order_date
+ interval: {%autointerval%: true}
+ extended_bounds: {
+ min: {%timefilter%: "min"}
+ max: {%timefilter%: "max"}
+ }
+ min_doc_count: 0
+ }
+ }
+ }
+ }
+ }
+ size: 0
+ }
+ }
+ format: {property: "aggregations.categories.buckets" }
+ }
+```
+
+If you copy and paste that into your Vega-Lite spec, and click "Update",
+you will see a warning saying `Infinite extent for field "key": [Infinity, -Infinity]`.
+Let's use our <> to understand why.
+
+Vega-Lite generates data using the names `source_0` and `data_0`. `source_0` contains
+the results from the {es} query, and `data_0` contains the visually encoded results
+which are shown in the chart. To debug this problem, you need to compare both.
+
+To look at the source, open the browser dev tools console and type
+`VEGA_DEBUG.view.data('source_0')`. You will see:
+
+```js
+[{
+ doc_count: 454
+ key: "Men's Clothing"
+ time_buckets: {buckets: Array(57)}
+ Symbol(vega_id): 12822
+}, ...]
+```
+
+To compare to the visually encoded data, open the browser dev tools console and type
+`VEGA_DEBUG.view.data('data_0')`. You will see:
+
+```js
+[{
+ doc_count: 454
+ key: NaN
+ time_buckets: {buckets: Array(57)}
+ Symbol(vega_id): 13879
+}]
+```
+
+The issue seems to be that the `key` property is not being converted the right way,
+which makes sense because the `key` is now `Men's Clothing` instead of a timestamp.
+
+To fix this, try updating the `encoding` of your Vega-Lite spec to:
+
+```yaml
+ encoding: {
+ x: {
+ field: time_buckets.buckets.key
+ type: temporal
+ axis: { title: null }
+ }
+ y: {
+ field: time_buckets.buckets.doc_count
+ type: quantitative
+ axis: { title: "Document count" }
+ }
+ }
+```
+
+This will show more errors, and you can inspect `VEGA_DEBUG.view.data('data_0')` to
+understand why. This now shows:
+
+```js
+[{
+ doc_count: 454
+ key: "Men's Clothing"
+ time_buckets: {buckets: Array(57)}
+ time_buckets.buckets.doc_count: undefined
+ time_buckets.buckets.key: null
+ Symbol(vega_id): 14094
+}]
+```
+
+It looks like the problem is that the `time_buckets` inner array is not being
+extracted by Vega. The solution is to use a Vega-lite
+https://vega.github.io/vega-lite/docs/flatten.html[flatten transformation], available in {kib} 7.9 and later.
+If using an older version of Kibana, the flatten transformation is available in Vega
+but not Vega-Lite.
+
+Add this section in between the `data` and `encoding` section:
+
+```yaml
+ transform: [{
+ flatten: ["time_buckets.buckets"]
+ }]
+```
+
+This does not yet produce the results you expect. Inspect the transformed data
+by typing `VEGA_DEBUG.view.data('data_0')` into the console again:
+
+```js
+[{
+ doc_count: 453
+ key: "Men's Clothing"
+ time_bucket.buckets.doc_count: undefined
+ time_buckets: {buckets: Array(57)}
+ time_buckets.buckets: {
+ key_as_string: "2020-06-30T15:00:00.000Z",
+ key: 1593529200000,
+ doc_count: 2
+ }
+ time_buckets.buckets.key: null
+ Symbol(vega_id): 21564
+}]
+```
+
+The debug view shows `undefined` values where you would expect to see numbers, and
+the cause is that there are duplicate names which are confusing Vega-Lite. This can
+be fixed by making this change to the `transform` and `encoding` blocks:
+
+```yaml
+ transform: [{
+ flatten: ["time_buckets.buckets"],
+ as: ["buckets"]
+ }]
+
+ mark: area
+
+ encoding: {
+ x: {
+ field: buckets.key
+ type: temporal
+ axis: { title: null }
+ }
+ y: {
+ field: buckets.doc_count
+ type: quantitative
+ axis: { title: "Document count" }
+ }
+ color: {
+ field: key
+ type: nominal
+ }
+ }
+```
+
+At this point, you have a stacked area chart that shows the top categories,
+but the chart is still missing some common features that we expect from a {kib}
+visualization. Let's add hover states and tooltips next.
+
+Hover states are handled differently in Vega-Lite and Vega. In Vega-Lite this is
+done using a concept called `selection`, which has many permutations that are not
+covered in this tutorial. We will be adding a simple tooltip and hover state.
+
+Because {kib} has enabled the https://vega.github.io/vega-lite/docs/tooltip.html[Vega tooltip plugin],
+tooltips can be defined in several ways:
+
+* Automatic tooltip based on the data, via `{ content: "data" }`
+* Array of fields, like `[{ field: "key", type: "nominal" }]`
+* Defining a custom Javascript object using the `calculate` transform
+
+For the simple tooltip, add this to your encoding:
+
+```yaml
+ encoding: {
+ tooltip: [{
+ field: buckets.key
+ type: temporal
+ title: "Date"
+ }, {
+ field: key
+ type: nominal
+ title: "Category"
+ }, {
+ field: buckets.doc_count
+ type: quantitative
+ title: "Count"
+ }]
+ }
+```
+
+As you hover over the area series in your chart, a multi-line tooltip will
+appear, but it won't indicate the nearest point that it's pointing to. To
+indicate the nearest point, we need to add a second layer.
+
+The first step is to remove the `mark: area` from your visualization.
+Once you've removed the previous mark, add a composite mark at the end of
+the Vega-Lite spec:
+
+```yaml
+ layer: [{
+ mark: area
+ }, {
+ mark: point
+ }]
+```
+
+You'll see that the points are not appearing to line up with the area chart,
+and the reason is that the points are not being stacked. Change your Y encoding
+to this:
+
+```yaml
+ y: {
+ field: buckets.doc_count
+ type: quantitative
+ axis: { title: "Document count" }
+ stack: true
+ }
+```
+
+Now, we will add a `selection` block inside the point mark:
+
+```yaml
+ layer: [{
+ mark: area
+ }, {
+ mark: point
+
+ selection: {
+ pointhover: {
+ type: single
+ on: mouseover
+ clear: mouseout
+ empty: none
+ fields: ["buckets.key", "key"]
+ nearest: true
+ }
+ }
+
+ encoding: {
+ size: {
+ condition: {
+ selection: pointhover
+ value: 100
+ }
+ value: 5
+ }
+ fill: {
+ condition: {
+ selection: pointhover
+ value: white
+ }
+ }
+ }
+ }]
+```
+
+Now that you've enabled a selection, try moving the mouse around the visualization
+and seeing the points respond to the nearest position:
+
+[role="screenshot"]
+image::visualize/images/vega_lite_tutorial_2.png[]
+
+The final result of this tutorial is this spec:
+
+.Expand final Vega-Lite spec
+[%collapsible%closed]
+====
+[source,yaml]
+----
+{
+ $schema: https://vega.github.io/schema/vega-lite/v4.json
+ title: Event counts from ecommerce
+ data: {
+ url: {
+ %context%: true
+ %timefield%: order_date
+ index: kibana_sample_data_ecommerce
+ body: {
+ aggs: {
+ categories: {
+ terms: { field: "category.keyword" }
+ aggs: {
+ time_buckets: {
+ date_histogram: {
+ field: order_date
+ interval: {%autointerval%: true}
+ extended_bounds: {
+ min: {%timefilter%: "min"}
+ max: {%timefilter%: "max"}
+ }
+ min_doc_count: 0
+ }
+ }
+ }
+ }
+ }
+ size: 0
+ }
+ }
+ format: {property: "aggregations.categories.buckets" }
+ }
+
+ transform: [{
+ flatten: ["time_buckets.buckets"]
+ as: ["buckets"]
+ }]
+
+ encoding: {
+ x: {
+ field: buckets.key
+ type: temporal
+ axis: { title: null }
+ }
+ y: {
+ field: buckets.doc_count
+ type: quantitative
+ axis: { title: "Document count" }
+ stack: true
+ }
+ color: {
+ field: key
+ type: nominal
+ title: "Category"
+ }
+ tooltip: [{
+ field: buckets.key
+ type: temporal
+ title: "Date"
+ }, {
+ field: key
+ type: nominal
+ title: "Category"
+ }, {
+ field: buckets.doc_count
+ type: quantitative
+ title: "Count"
+ }]
+ }
+
+ layer: [{
+ mark: area
+ }, {
+ mark: point
+
+ selection: {
+ pointhover: {
+ type: single
+ on: mouseover
+ clear: mouseout
+ empty: none
+ fields: ["buckets.key", "key"]
+ nearest: true
+ }
+ }
+
+ encoding: {
+ size: {
+ condition: {
+ selection: pointhover
+ value: 100
+ }
+ value: 5
+ }
+ fill: {
+ condition: {
+ selection: pointhover
+ value: white
+ }
+ }
+ }
+ }]
+}
+----
+
+====
+
+[[vega-tutorial]]
+=== Tutorial: Updating {kib} filters from Vega
+
+In this tutorial you will build an area chart in Vega using an {es} search query,
+and add a click handler and drag handler to update {kib} filters.
+This tutorial is not a full https://vega.github.io/vega/tutorials/[Vega tutorial],
+but will cover the basics of creating Vega visualizations into {kib}.
+
+First, create an almost-blank Vega chart by pasting this into the editor:
+
+```yaml
+{
+ $schema: "https://vega.github.io/schema/vega/v5.json"
+ data: [{
+ name: source_0
+ }]
+
+ scales: [{
+ name: x
+ type: time
+ range: width
+ }, {
+ name: y
+ type: linear
+ range: height
+ }]
+
+ axes: [{
+ orient: bottom
+ scale: x
+ }, {
+ orient: left
+ scale: y
+ }]
+
+ marks: [
+ {
+ type: area
+ from: {
+ data: source_0
+ }
+ encode: {
+ update: {
+ }
+ }
+ }
+ ]
+}
+```
+
+Despite being almost blank, this Vega spec still contains the minimum requirements:
+
+* Data
+* Scales
+* Marks
+* (optional) Axes
+
+Next, add a valid {es} search query in the `data` block:
+
+```yaml
+ data: [
+ {
+ name: source_0
+ url: {
+ %context%: true
+ %timefield%: order_date
+ index: kibana_sample_data_ecommerce
+ body: {
+ aggs: {
+ time_buckets: {
+ date_histogram: {
+ field: order_date
+ fixed_interval: "3h"
+ extended_bounds: {
+ min: {%timefilter%: "min"}
+ max: {%timefilter%: "max"}
+ }
+ min_doc_count: 0
+ }
+ }
+ }
+ size: 0
+ }
+ }
+ format: { property: "aggregations.time_buckets.buckets" }
+ }
+ ]
+```
+
+Click "Update", and nothing will change in the visualization. The first step
+is to change the X and Y scales based on the data:
+
+```yaml
+ scales: [{
+ name: x
+ type: time
+ range: width
+ domain: {
+ data: source_0
+ field: key
+ }
+ }, {
+ name: y
+ type: linear
+ range: height
+ domain: {
+ data: source_0
+ field: doc_count
+ }
+ }]
+```
+
+Click "Update", and you will see that the X and Y axes are now showing labels based
+on the real data.
+
+Next, encode the fields `key` and `doc_count` as the X and Y values:
+
+```yaml
+ marks: [
+ {
+ type: area
+ from: {
+ data: source_0
+ }
+ encode: {
+ update: {
+ x: {
+ scale: x
+ field: key
+ }
+ y: {
+ scale: y
+ value: 0
+ }
+ y2: {
+ scale: y
+ field: doc_count
+ }
+ }
+ }
+ }
+ ]
+```
+
+Click "Update" and you will get a basic area chart:
+
+[role="screenshot"]
+image::visualize/images/vega_tutorial_3.png[]
+
+Next, add a new block to the `marks` section. This will show clickable points to filter for a specific
+date:
+
+```yaml
+ {
+ name: point
+ type: symbol
+ style: ["point"]
+ from: {
+ data: source_0
+ }
+ encode: {
+ update: {
+ x: {
+ scale: x
+ field: key
+ }
+ y: {
+ scale: y
+ field: doc_count
+ }
+ size: {
+ value: 100
+ }
+ fill: {
+ value: black
+ }
+ }
+ }
+ }
+```
+
+Next, we will create a Vega signal to make the points clickable. You can access
+the clicked `datum` in the expression used to update. In this case, you want
+clicks on points to add a time filter with the 3-hour interval defined above.
+
+```yaml
+ signals: [
+ {
+ name: point_click
+ on: [{
+ events: {
+ source: scope
+ type: click
+ markname: point
+ }
+ update: '''kibanaSetTimeFilter(datum.key, datum.key + 3 * 60 * 60 * 1000)'''
+ }]
+ }
+ ]
+```
+
+This event is using the {kib} custom function `kibanaSetTimeFilter` to generate a filter that
+gets applied to the entire dashboard on click.
+
+The mouse cursor does not currently indicate that the chart is interactive. Find the `marks` section,
+and update the mark named `point` by adding `cursor: { value: "pointer" }` to
+the `encoding` section like this:
+
+```yaml
+ {
+ name: point
+ type: symbol
+ style: ["point"]
+ from: {
+ data: source_0
+ }
+ encode: {
+ update: {
+ ...
+ cursor: { value: "pointer" }
+ }
+ }
+ }
+```
+
+Next, we will add a drag interaction which will allow the user to narrow into
+a specific time range in the visualization. This will require adding more signals, and
+adding a rectangle overlay:
+
+[role="screenshot"]
+image::visualize/images/vega_tutorial_4.png[]
+
+The first step is to add a new `signal` to track the X position of the cursor:
+
+```yaml
+ {
+ name: currentX
+ value: -1
+ on: [{
+ events: {
+ type: mousemove
+ source: view
+ },
+ update: "clamp(x(), 0, width)"
+ }, {
+ events: {
+ type: mouseout
+ source: view
+ }
+ update: "-1"
+ }]
+ }
+```
+
+Now add a new `mark` to indicate the current cursor position:
+
+```yaml
+ {
+ type: rule
+ interactive: false
+ encode: {
+ update: {
+ y: {value: 0}
+ y2: {signal: "height"}
+ stroke: {value: "gray"}
+ strokeDash: {
+ value: [2, 1]
+ }
+ x: {signal: "max(currentX,0)"}
+ defined: {signal: "currentX > 0"}
+ }
+ }
+ }
+```
+
+Next, add a signal to track the current selected range, which will update
+until the user releases the mouse button or uses the escape key:
+
+
+```yaml
+ {
+ name: selected
+ value: [0, 0]
+ on: [{
+ events: {
+ type: mousedown
+ source: view
+ }
+ update: "[clamp(x(), 0, width), clamp(x(), 0, width)]"
+ }, {
+ events: {
+ type: mousemove
+ source: window
+ consume: true
+ between: [{
+ type: mousedown
+ source: view
+ }, {
+ merge: [{
+ type: mouseup
+ source: window
+ }, {
+ type: keydown
+ source: window
+ filter: "event.key === 'Escape'"
+ }]
+ }]
+ }
+ update: "[selected[0], clamp(x(), 0, width)]"
+ }, {
+ events: {
+ type: keydown
+ source: window
+ filter: "event.key === 'Escape'"
+ }
+ update: "[0, 0]"
+ }]
+ }
+```
+
+Now that there is a signal which tracks the time range from the user, we need to indicate
+the range visually by adding a new mark which only appears conditionally:
+
+```yaml
+ {
+ type: rect
+ name: selectedRect
+ encode: {
+ update: {
+ height: {signal: "height"}
+ fill: {value: "#333"}
+ fillOpacity: {value: 0.2}
+ x: {signal: "selected[0]"}
+ x2: {signal: "selected[1]"}
+ defined: {signal: "selected[0] !== selected[1]"}
+ }
+ }
+ }
+```
+
+Finally, add a new signal which will update the {kib} time filter when the mouse is released while
+dragging:
+
+```yaml
+ {
+ name: applyTimeFilter
+ value: null
+ on: [{
+ events: {
+ type: mouseup
+ source: view
+ }
+ update: '''selected[0] !== selected[1] ? kibanaSetTimeFilter(
+ invert('x',selected[0]),
+ invert('x',selected[1])) : null'''
+ }]
+ }
+```
+
+Putting this all together, your visualization now supports the main features of
+standard visualizations in {kib}, but with the potential to add even more control.
+The final Vega spec for this tutorial is here:
+
+.Expand final Vega spec
+[%collapsible%closed]
+====
+[source,yaml]
+----
+{
+ $schema: "https://vega.github.io/schema/vega/v5.json"
+ data: [
+ {
+ name: source_0
+ url: {
+ %context%: true
+ %timefield%: order_date
+ index: kibana_sample_data_ecommerce
+ body: {
+ aggs: {
+ time_buckets: {
+ date_histogram: {
+ field: order_date
+ fixed_interval: "3h"
+ extended_bounds: {
+ min: {%timefilter%: "min"}
+ max: {%timefilter%: "max"}
+ }
+ min_doc_count: 0
+ }
+ }
+ }
+ size: 0
+ }
+ }
+ format: { property: "aggregations.time_buckets.buckets" }
+ }
+ ]
+
+ scales: [{
+ name: x
+ type: time
+ range: width
+ domain: {
+ data: source_0
+ field: key
+ }
+ }, {
+ name: y
+ type: linear
+ range: height
+ domain: {
+ data: source_0
+ field: doc_count
+ }
+ }]
+
+ axes: [{
+ orient: bottom
+ scale: x
+ }, {
+ orient: left
+ scale: y
+ }]
+
+ marks: [
+ {
+ type: area
+ from: {
+ data: source_0
+ }
+ encode: {
+ update: {
+ x: {
+ scale: x
+ field: key
+ }
+ y: {
+ scale: y
+ value: 0
+ }
+ y2: {
+ scale: y
+ field: doc_count
+ }
+ }
+ }
+ },
+ {
+ name: point
+ type: symbol
+ style: ["point"]
+ from: {
+ data: source_0
+ }
+ encode: {
+ update: {
+ x: {
+ scale: x
+ field: key
+ }
+ y: {
+ scale: y
+ field: doc_count
+ }
+ size: {
+ value: 100
+ }
+ fill: {
+ value: black
+ }
+ cursor: { value: "pointer" }
+ }
+ }
+ },
+ {
+ type: rule
+ interactive: false
+ encode: {
+ update: {
+ y: {value: 0}
+ y2: {signal: "height"}
+ stroke: {value: "gray"}
+ strokeDash: {
+ value: [2, 1]
+ }
+ x: {signal: "max(currentX,0)"}
+ defined: {signal: "currentX > 0"}
+ }
+ }
+ },
+ {
+ type: rect
+ name: selectedRect
+ encode: {
+ update: {
+ height: {signal: "height"}
+ fill: {value: "#333"}
+ fillOpacity: {value: 0.2}
+ x: {signal: "selected[0]"}
+ x2: {signal: "selected[1]"}
+ defined: {signal: "selected[0] !== selected[1]"}
+ }
+ }
+ }
+ ]
+
+ signals: [
+ {
+ name: point_click
+ on: [{
+ events: {
+ source: scope
+ type: click
+ markname: point
+ }
+ update: '''kibanaSetTimeFilter(datum.key, datum.key + 3 * 60 * 60 * 1000)'''
+ }]
+ }
+ {
+ name: currentX
+ value: -1
+ on: [{
+ events: {
+ type: mousemove
+ source: view
+ },
+ update: "clamp(x(), 0, width)"
+ }, {
+ events: {
+ type: mouseout
+ source: view
+ }
+ update: "-1"
+ }]
+ }
+ {
+ name: selected
+ value: [0, 0]
+ on: [{
+ events: {
+ type: mousedown
+ source: view
+ }
+ update: "[clamp(x(), 0, width), clamp(x(), 0, width)]"
+ }, {
+ events: {
+ type: mousemove
+ source: window
+ consume: true
+ between: [{
+ type: mousedown
+ source: view
+ }, {
+ merge: [{
+ type: mouseup
+ source: window
+ }, {
+ type: keydown
+ source: window
+ filter: "event.key === 'Escape'"
+ }]
+ }]
+ }
+ update: "[selected[0], clamp(x(), 0, width)]"
+ }, {
+ events: {
+ type: keydown
+ source: window
+ filter: "event.key === 'Escape'"
+ }
+ update: "[0, 0]"
+ }]
+ }
+ {
+ name: applyTimeFilter
+ value: null
+ on: [{
+ events: {
+ type: mouseup
+ source: view
+ }
+ update: '''selected[0] !== selected[1] ? kibanaSetTimeFilter(
+ invert('x',selected[0]),
+ invert('x',selected[1])) : null'''
+ }]
+ }
+ ]
+}
+
+----
+====
+
+[[vega-reference]]
+=== Reference for {kib} extensions
+
+{kib} has extended Vega and Vega-Lite with extensions that support:
+
+* Default height and width
+* Default theme to match {kib}
+* Writing {es} queries using the time range and filters from dashboards
+* Using the Elastic Map Service in Vega maps
+* Additional tooltip styling
+* Advanced setting to enable URL loading from any domain
+* Limited debugging support using the browser dev tools
+* (Vega only) Expression functions which can update the time range and dashboard filters
-[float]
[[vega-sizing-and-positioning]]
-==== Change the layout
+==== Default height and width
By default, Vega visualizations use the `autosize = { type: 'fit', contains: 'padding' }` layout.
`fit` uses all available space, ignores `width` and `height` values,
and respects the padding values. To override this behavior, change the
`autosize` value.
-[[vega-querying-elasticsearch]]
-=== Query {es}
+[[vega-theme]]
+==== Default theme to match {kib}
+
+{kib} registers a default https://vega.github.io/vega/docs/schemes/[Vega color scheme]
+with the id `elastic`, and sets a default color for each `mark` type.
+Override it by providing a different `stroke`, `fill`, or `color` (Vega-Lite) value.
+
+[[vega-queries]]
+==== Writing {es} queries in Vega
+
+experimental[] {kib} extends the Vega https://vega.github.io/vega/docs/data/[data] elements
+with support for direct {es} queries specified as a `url`.
-experimental[] Vega https://vega.github.io/vega/docs/data/[data] elements
-use embedded and external data with a `"url"` parameter. {kib} adds support for
-direct {es} queries by overloading
-the `"url"` value.
+Because of this, {kib} is **unable to support dynamically loaded data**,
+which would otherwise work in Vega. All data is fetched before it's passed to
+the Vega renderer.
-NOTE: With Vega, you dynamically load your data by setting signals as data URLs.
-Since {kib} is unable to support dynamically loaded data, all data is fetched
-before it's passed to the Vega renderer.
+To define an {es} query in Vega, set the `url` to an object. {kib} will parse
+the object looking for special tokens that allow your query to integrate with {kib}.
+These tokens are:
-For example, count the number of documents in all indices:
+* `%context%: true`: Set at the top level, and replaces the `query` section with filters from dashboard
+* `%timefield%: `: Set at the top level, integrates the query with the dashboard time filter
+* `{%timefilter%: true}`: Replaced by an {es} range query with upper and lower bounds
+* `{%timefilter%: "min" | "max"}`: Replaced only by the upper or lower bounds
+* `{%timefilter: true, shift: -1, unit: 'hour'}`: Generates a time range query one hour in the past
+* `{%autointerval%: true}`: Replaced by the string which contains the automatic {kib} time interval, such as `1h`
+* `{%autointerval%: 10}`: Replaced by a string which is approximately dividing the time into 10 ranges, allowing
+ you to influence the automatic interval
+* `"%dashboard_context-must_clause%"`: String replaced by object containing filters
+* `"%dashboard_context-filter_clause%"`: String replaced by an object containing filters
+* `"%dashboard_context-must_not_clause%"`: String replaced by an object containing filters
+
+Putting this together, an example query that counts the number of documents in
+a specific index:
[source,yaml]
----
@@ -80,8 +1296,8 @@ url: {
%context%: true
// Which indexes to search
- index: _all
- // The body element may contain "aggs" and "query" subfields
+ index: kibana_sample_data_logs
+ // The body element may contain "aggs" and "query" keys
body: {
aggs: {
time_buckets: {
@@ -183,7 +1399,7 @@ except that the time range is shifted back by 10 minutes:
}
----
-NOTE: When using `"%context%": true` or defining a value for "%timefield%"` the body cannot contain a query. To customize the query within the VEGA specification (e.g. add an additional filter, or shift the timefilter), define your query and use the placeholders as in the example above. The placeholders will be replaced by the actual context of the dashboard or visualization once parsed.
+NOTE: When using `"%context%": true` or defining a value for `"%timefield%"` the body cannot contain a query. To customize the query within the VEGA specification (e.g. add an additional filter, or shift the timefilter), define your query and use the placeholders as in the example above. The placeholders will be replaced by the actual context of the dashboard or visualization once parsed.
The `"%timefilter%"` can also be used to specify a single min or max
value. The date_histogram's `extended_bounds` can be set
@@ -194,6 +1410,7 @@ also supported. The `"interval"` can also be set dynamically, depending
on the currently picked range: `"interval": {"%autointerval%": 10}` will
try to get about 10-15 data points (buckets).
+[float]
[[vega-esmfiles]]
=== Access Elastic Map Service files
@@ -260,21 +1477,44 @@ Additionally, you can use `latitude`, `longitude`, and `zoom` signals.
These signals can be used in the graph, or can be updated to modify the
position of the map.
-Vega visualization ignore the `autosize`, `width`, `height`, and `padding`
-values, using `fit` model with zero padding.
+[float]
+[[vega-tooltip]]
+==== Additional tooltip styling
+
+{kib} has installed the https://vega.github.io/vega-lite/docs/tooltip.html[Vega tooltip plugin],
+so tooltips can be defined in the ways documented there. Beyond that, {kib} also supports
+a configuration option for changing the tooltip position and padding:
+
+```js
+{
+ config: {
+ kibana: {
+ tooltips: {
+ position: 'top',
+ padding: 15
+ }
+ }
+ }
+}
+```
+
+[[vega-url-loading]]
+==== Advanced setting to enable URL loading from any domain
-[[vega-debugging]]
-=== Debugging Vega
+Vega can load data from any URL, but this is disabled by default in {kib}.
+To change this, set `vis_type_vega.enableExternalUrls: true` in `kibana.yml`,
+then restart {kib}.
[[vega-browser-debugging-console]]
==== Browser debugging console
experimental[] Use browser debugging tools (for example, F12 or Ctrl+Shift+J in Chrome) to
inspect the `VEGA_DEBUG` variable:
-+
+
* `view` — Access to the Vega View object. See https://vega.github.io/vega/docs/api/debugging/[Vega Debugging Guide]
-on how to inspect data and signals at runtime. For Vega-Lite, `VEGA_DEBUG.view.data('source_0')` gets the main data set.
-For Vega, it uses the data name as defined in your Vega spec.
+on how to inspect data and signals at runtime. For Vega-Lite,
+`VEGA_DEBUG.view.data('source_0')` gets the pre-transformed data, and `VEGA_DEBUG.view.data('data_0')`
+gets the encoded data. For Vega, it uses the data name as defined in your Vega spec.
* `vega_spec` — Vega JSON graph specification after some modifications by {kib}. In case
of Vega-Lite, this is the output of the Vega-Lite compiler.
@@ -283,7 +1523,7 @@ of Vega-Lite, this is the output of the Vega-Lite compiler.
Vega-Lite compilation.
[[vega-data]]
-==== Data
+==== Debugging data
experimental[] If you are using an {es} query, make sure your resulting data is
what you expected. The easiest way to view it is by using the "networking"
@@ -294,45 +1534,52 @@ https://www.elastic.co/guide/en/kibana/current/console-kibana.html[Dev Tools]. P
`GET /_search`, then add your query as the following lines
(just the value of the `"query"` field).
-If you need to share your graph with someone, copy the
-raw data response to https://gist.github.com/[gist.github.com], possibly
-with a `.json` extension, use the `[raw]` button, and use that url
-directly in your graph.
+[[vega-getting-help]]
+==== Asking for help with a Vega spec
-To restrict Vega from using non-ES data sources, add `vega.enableExternalUrls: false`
-to your kibana.yml file.
+Because of the dynamic nature of the data in {es}, it is hard to help you with
+Vega specs unless you can share a dataset. To do this, use the browser developer
+tools and type:
-[[vega-notes]]
-[[vega-useful-links]]
-=== Resources and examples
+`JSON.stringify(VEGA_DEBUG.vegalite_spec, null, 2)`
-experimental[] To learn more about Vega and Vega-List, refer to the resources and examples.
+Copy the response to https://gist.github.com/[gist.github.com], possibly
+with a `.json` extension, use the `[raw]` button, and share that when
+asking for help.
-==== Vega editor
-The https://vega.github.io/editor/[Vega Editor] includes examples for Vega & Vega-Lite, but does not support any
-{kib}-specific features like {es} requests and interactive base maps.
+[float]
+[[vega-expression-functions]]
+==== (Vega only) Expression functions which can update the time range and dashboard filters
-==== Vega-Lite resources
-* https://vega.github.io/vega-lite/tutorials/getting_started.html[Tutorials]
-* https://vega.github.io/vega-lite/docs/[Docs]
-* https://vega.github.io/vega-lite/examples/[Examples]
+{kib} has extended the Vega expression language with these functions:
-==== Vega resources
-* https://vega.github.io/vega/tutorials/[Tutorials]
-* https://vega.github.io/vega/docs/[Docs]
-* https://vega.github.io/vega/examples/[Examples]
+```js
+/**
+ * @param {object} query Elastic Query DSL snippet, as used in the query DSL editor
+ * @param {string} [index] as defined in Kibana, or default if missing
+ */
+kibanaAddFilter(query, index)
-TIP: When you use the examples, you may
-need to modify the "data" section to use absolute URL. For example,
-replace `"url": "data/world-110m.json"` with
-`"url": "https://vega.github.io/editor/data/world-110m.json"`.
+/**
+ * @param {object} query Elastic Query DSL snippet, as used in the query DSL editor
+ * @param {string} [index] as defined in Kibana, or default if missing
+ */
+kibanaRemoveFilter(query, index)
+
+kibanaRemoveAllFilters()
+/**
+ * Update dashboard time filter to the new values
+ * @param {number|string|Date} start
+ * @param {number|string|Date} end
+ */
+kibanaSetTimeFilter(start, end)
+```
+
+[float]
[[vega-additional-configuration-options]]
==== Additional configuration options
-These options are specific to the {kib}. link:#vega-with-a-map[Map support] has
-additional configuration options.
-
[source,yaml]
----
{
@@ -343,12 +1590,37 @@ additional configuration options.
controlsLocation: top
// Can be `vertical` or `horizontal` (default).
controlsDirection: vertical
- // If true, hides most of Vega and VegaLite warnings
+ // If true, hides most of Vega and Vega-Lite warnings
hideWarnings: true
// Vega renderer to use: `svg` or `canvas` (default)
renderer: canvas
}
}
- /* the rest of Vega code */
}
----
+
+
+[[vega-notes]]
+[[vega-useful-links]]
+=== Resources and examples
+
+experimental[] To learn more about Vega and Vega-Lite, refer to the resources and examples.
+
+==== Vega editor
+The https://vega.github.io/editor/[Vega Editor] includes examples for Vega & Vega-Lite, but does not support any
+{kib}-specific features like {es} requests and interactive base maps.
+
+==== Vega-Lite resources
+* https://vega.github.io/vega-lite/tutorials/getting_started.html[Tutorials]
+* https://vega.github.io/vega-lite/docs/[Docs]
+* https://vega.github.io/vega-lite/examples/[Examples]
+
+==== Vega resources
+* https://vega.github.io/vega/tutorials/[Tutorials]
+* https://vega.github.io/vega/docs/[Docs]
+* https://vega.github.io/vega/examples/[Examples]
+
+TIP: When you use the examples in {kib}, you may
+need to modify the "data" section to use absolute URL. For example,
+replace `"url": "data/world-110m.json"` with
+`"url": "https://vega.github.io/editor/data/world-110m.json"`.
diff --git a/examples/routing_example/README.md b/examples/routing_example/README.md
new file mode 100644
index 0000000000000..0a88707bf70bb
--- /dev/null
+++ b/examples/routing_example/README.md
@@ -0,0 +1,9 @@
+Team owner: Platform
+
+A working example of a plugin that registers and uses multiple custom routes.
+
+Read more:
+
+- [IRouter API Docs](../../docs/development/core/server/kibana-plugin-core-server.irouter.md)
+- [HttpHandler (core.http.fetch) API Docs](../../docs/development/core/public/kibana-plugin-core-public.httphandler.md)
+- [Routing Conventions](../../STYLEGUIDE.md#api-endpoints)
\ No newline at end of file
diff --git a/examples/routing_example/common/index.ts b/examples/routing_example/common/index.ts
new file mode 100644
index 0000000000000..5aa47b1f69cdf
--- /dev/null
+++ b/examples/routing_example/common/index.ts
@@ -0,0 +1,27 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export const RANDOM_NUMBER_ROUTE_PATH = '/api/random_number';
+
+export const RANDOM_NUMBER_BETWEEN_ROUTE_PATH = '/api/random_number_between';
+
+export const POST_MESSAGE_ROUTE_PATH = '/api/post_message';
+
+// Internal APIs should use the `internal` prefix, instead of the `api` prefix.
+export const INTERNAL_GET_MESSAGE_BY_ID_ROUTE = '/internal/get_message';
diff --git a/examples/routing_example/kibana.json b/examples/routing_example/kibana.json
new file mode 100644
index 0000000000000..37851a0da5a85
--- /dev/null
+++ b/examples/routing_example/kibana.json
@@ -0,0 +1,9 @@
+{
+ "id": "routingExample",
+ "version": "0.0.1",
+ "kibanaVersion": "kibana",
+ "server": true,
+ "ui": true,
+ "requiredPlugins": ["developerExamples"],
+ "optionalPlugins": []
+}
diff --git a/examples/routing_example/public/app.tsx b/examples/routing_example/public/app.tsx
new file mode 100644
index 0000000000000..3b33cb33ccb01
--- /dev/null
+++ b/examples/routing_example/public/app.tsx
@@ -0,0 +1,105 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import React from 'react';
+import ReactDOM from 'react-dom';
+import { AppMountParameters } from 'kibana/public';
+import {
+ EuiPage,
+ EuiPageBody,
+ EuiPageContent,
+ EuiText,
+ EuiHorizontalRule,
+ EuiPageContentHeader,
+ EuiListGroup,
+} from '@elastic/eui';
+import { RandomNumberRouteExample } from './random_number_example';
+import { RandomNumberBetweenRouteExample } from './random_number_between_example';
+import { Services } from './services';
+import { PostMessageRouteExample } from './post_message_example';
+import { GetMessageRouteExample } from './get_message_example';
+
+type Props = Services;
+
+function RoutingExplorer({
+ fetchRandomNumber,
+ fetchRandomNumberBetween,
+ addSuccessToast,
+ postMessage,
+ getMessageById,
+}: Props) {
+ return (
+
+
+
+
+
+ Routing examples
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+}
+
+export const renderApp = (props: Props, element: AppMountParameters['element']) => {
+ ReactDOM.render(, element);
+
+ return () => ReactDOM.unmountComponentAtNode(element);
+};
diff --git a/examples/routing_example/public/get_message_example.tsx b/examples/routing_example/public/get_message_example.tsx
new file mode 100644
index 0000000000000..3c34326564d2b
--- /dev/null
+++ b/examples/routing_example/public/get_message_example.tsx
@@ -0,0 +1,96 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import React, { useCallback } from 'react';
+import { useState } from 'react';
+import {
+ EuiText,
+ EuiButton,
+ EuiLoadingSpinner,
+ EuiFieldText,
+ EuiCallOut,
+ EuiFormRow,
+} from '@elastic/eui';
+import { HttpFetchError } from '../../../src/core/public';
+import { isError } from './is_error';
+import { Services } from './services';
+
+interface Props {
+ getMessageById: Services['getMessageById'];
+}
+
+export function GetMessageRouteExample({ getMessageById }: Props) {
+ const [error, setError] = useState();
+ const [isFetching, setIsFetching] = useState(false);
+ const [message, setMessage] = useState('');
+ const [id, setId] = useState('');
+
+ const doFetch = useCallback(async () => {
+ if (isFetching) return;
+ setIsFetching(true);
+ const response = await getMessageById(id);
+
+ if (isError(response)) {
+ setError(response);
+ setMessage('');
+ } else {
+ setError(undefined);
+ setMessage(response);
+ }
+
+ setIsFetching(false);
+ }, [isFetching, getMessageById, setMessage, id]);
+
+ return (
+
+
+ GET example with param
+
+ This examples uses a simple GET route that takes an id as a param in the route path.
+
+ setId(e.target.value)}
+ data-test-subj="routingExampleGetMessageId"
+ />
+
+
+
+ doFetch()}
+ >
+ {isFetching ? : 'Get message'}
+
+
+
+ {error !== undefined ? (
+
+ {error.message}
+
+ ) : null}
+ {message !== '' ? (
+
+ Message is:
{message}
+
+ ) : null}
+
+
+ );
+}
diff --git a/examples/routing_example/public/index.ts b/examples/routing_example/public/index.ts
new file mode 100644
index 0000000000000..2bb703e71cbef
--- /dev/null
+++ b/examples/routing_example/public/index.ts
@@ -0,0 +1,23 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { PluginInitializer } from 'kibana/public';
+import { RoutingExamplePlugin } from './plugin';
+
+export const plugin: PluginInitializer<{}, {}> = () => new RoutingExamplePlugin();
diff --git a/examples/routing_example/public/is_error.ts b/examples/routing_example/public/is_error.ts
new file mode 100644
index 0000000000000..528cca5b50d5d
--- /dev/null
+++ b/examples/routing_example/public/is_error.ts
@@ -0,0 +1,24 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { HttpFetchError } from '../../../src/core/public';
+
+export function isError(error: T | HttpFetchError): error is HttpFetchError {
+ return error instanceof HttpFetchError;
+}
diff --git a/examples/routing_example/public/plugin.tsx b/examples/routing_example/public/plugin.tsx
new file mode 100644
index 0000000000000..eabdd2ade05b2
--- /dev/null
+++ b/examples/routing_example/public/plugin.tsx
@@ -0,0 +1,78 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import {
+ CoreStart,
+ Plugin,
+ CoreSetup,
+ AppMountParameters,
+ AppNavLinkStatus,
+} from '../../../src/core/public';
+import { DeveloperExamplesSetup } from '../../developer_examples/public';
+import { getServices } from './services';
+
+interface SetupDeps {
+ developerExamples: DeveloperExamplesSetup;
+}
+
+export class RoutingExamplePlugin implements Plugin<{}, {}, SetupDeps, {}> {
+ public setup(core: CoreSetup, { developerExamples }: SetupDeps) {
+ core.application.register({
+ id: 'routingExample',
+ title: 'Routing',
+ navLinkStatus: AppNavLinkStatus.hidden,
+ async mount(params: AppMountParameters) {
+ const [coreStart] = await core.getStartServices();
+ const startServices = getServices(coreStart);
+ const { renderApp } = await import('./app');
+ return renderApp(startServices, params.element);
+ },
+ });
+
+ developerExamples.register({
+ appId: 'routingExample',
+ title: 'Routing',
+ description: `Examples show how to use core routing and fetch services to register and query your own custom routes.`,
+ links: [
+ {
+ label: 'IRouter',
+ href:
+ 'https://github.com/elastic/kibana/blob/master/docs/development/core/server/kibana-plugin-core-server.irouter.md',
+ iconType: 'logoGithub',
+ target: '_blank',
+ size: 's',
+ },
+ {
+ label: 'HttpHandler (core.http.fetch)',
+ href:
+ 'https://github.com/elastic/kibana/blob/master/docs/development/core/public/kibana-plugin-core-public.httphandler.md',
+ iconType: 'logoGithub',
+ target: '_blank',
+ size: 's',
+ },
+ ],
+ });
+ return {};
+ }
+
+ public start(core: CoreStart) {
+ return {};
+ }
+
+ public stop() {}
+}
diff --git a/examples/routing_example/public/post_message_example.tsx b/examples/routing_example/public/post_message_example.tsx
new file mode 100644
index 0000000000000..3004d66c4aa97
--- /dev/null
+++ b/examples/routing_example/public/post_message_example.tsx
@@ -0,0 +1,103 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import React, { useCallback } from 'react';
+import { useState } from 'react';
+import {
+ EuiText,
+ EuiButton,
+ EuiLoadingSpinner,
+ EuiFieldText,
+ EuiCallOut,
+ EuiFormRow,
+ EuiTextArea,
+} from '@elastic/eui';
+import { HttpFetchError } from '../../../src/core/public';
+import { isError } from './is_error';
+import { Services } from './services';
+
+interface Props {
+ postMessage: Services['postMessage'];
+ addSuccessToast: Services['addSuccessToast'];
+}
+
+export function PostMessageRouteExample({ postMessage, addSuccessToast }: Props) {
+ const [error, setError] = useState();
+ const [isPosting, setIsPosting] = useState(false);
+ const [message, setMessage] = useState('');
+ const [id, setId] = useState('');
+
+ const doFetch = useCallback(async () => {
+ if (isPosting) return;
+ setIsPosting(true);
+ const response = await postMessage(message, id);
+
+ if (response && isError(response)) {
+ setError(response);
+ } else {
+ setError(undefined);
+ addSuccessToast('Message was added!');
+ setMessage('');
+ setId('');
+ }
+
+ setIsPosting(false);
+ }, [isPosting, postMessage, addSuccessToast, setMessage, message, id]);
+
+ return (
+
+
+ POST example with body
+
+ This examples uses a simple POST route that takes a body parameter and an id as a param in
+ the route path.
+
+
+ setId(e.target.value)}
+ data-test-subj="routingExampleSetMessageId"
+ />
+
+
+ setMessage(e.target.value)}
+ />
+
+
+
+ doFetch()}
+ >
+ {isPosting ? : 'Post message'}
+
+
+
+ {error !== undefined ? (
+
+ {error.message}
+
+ ) : null}
+
+
+ );
+}
diff --git a/examples/routing_example/public/random_number_between_example.tsx b/examples/routing_example/public/random_number_between_example.tsx
new file mode 100644
index 0000000000000..9f75060193114
--- /dev/null
+++ b/examples/routing_example/public/random_number_between_example.tsx
@@ -0,0 +1,98 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import React, { useCallback } from 'react';
+import { useState } from 'react';
+import {
+ EuiText,
+ EuiButton,
+ EuiLoadingSpinner,
+ EuiFieldText,
+ EuiCallOut,
+ EuiFormRow,
+} from '@elastic/eui';
+import { HttpFetchError } from '../../../src/core/public';
+import { isError } from './is_error';
+import { Services } from './services';
+
+interface Props {
+ fetchRandomNumberBetween: Services['fetchRandomNumberBetween'];
+}
+
+export function RandomNumberBetweenRouteExample({ fetchRandomNumberBetween }: Props) {
+ const [error, setError] = useState();
+ const [randomNumber, setRandomNumber] = useState(0);
+ const [isFetching, setIsFetching] = useState(false);
+ const [maxInput, setMaxInput] = useState('10');
+
+ const doFetch = useCallback(async () => {
+ if (isFetching) return;
+ setIsFetching(true);
+ const response = await fetchRandomNumberBetween(Number.parseInt(maxInput, 10));
+
+ if (isError(response)) {
+ setError(response);
+ } else {
+ setRandomNumber(response);
+ }
+
+ setIsFetching(false);
+ }, [isFetching, maxInput, fetchRandomNumberBetween]);
+
+ return (
+
+
+ GET example with query
+
+ This examples uses a simple GET route that takes a query parameter in the request and
+ returns a single number.
+
+
+ setMaxInput(e.target.value)}
+ isInvalid={isNaN(Number(maxInput))}
+ />
+
+
+
+ doFetch()}
+ >
+ {isFetching ? : 'Generate random number'}
+
+
+
+ {error !== undefined ? (
+
+ {error.message}
+
+ ) : null}
+ {randomNumber > -1 ? (
+
+ Random number is
+
{randomNumber}
+
+ ) : null}
+
+
+ );
+}
diff --git a/examples/routing_example/public/random_number_example.tsx b/examples/routing_example/public/random_number_example.tsx
new file mode 100644
index 0000000000000..6b073826c854f
--- /dev/null
+++ b/examples/routing_example/public/random_number_example.tsx
@@ -0,0 +1,78 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import React, { useCallback } from 'react';
+import { useState } from 'react';
+import { EuiText, EuiButton, EuiLoadingSpinner, EuiCallOut } from '@elastic/eui';
+import { HttpFetchError } from '../../../src/core/public';
+import { Services } from './services';
+import { isError } from './is_error';
+
+interface Props {
+ fetchRandomNumber: Services['fetchRandomNumber'];
+}
+
+export function RandomNumberRouteExample({ fetchRandomNumber }: Props) {
+ const [error, setError] = useState(undefined);
+ const [randomNumber, setRandomNumber] = useState(0);
+ const [isFetching, setIsFetching] = useState(false);
+
+ const doFetch = useCallback(async () => {
+ if (isFetching) return;
+ setIsFetching(true);
+ const response = await fetchRandomNumber();
+
+ if (isError(response)) {
+ setError(response);
+ } else {
+ setRandomNumber(response);
+ }
+
+ setIsFetching(false);
+ }, [isFetching, fetchRandomNumber]);
+
+ return (
+
+
+ GET example
+
+ This examples uses a simple GET route that takes no parameters or body in the request and
+ returns a single number.
+
+ doFetch()}
+ >
+ {isFetching ? : 'Generate a random number'}
+
+
+ {error !== undefined ? (
+
+ {error}
+
+ ) : null}
+ {randomNumber > -1 ? (
+
+ Random number is
{randomNumber}
+
+ ) : null}
+
+
+ );
+}
diff --git a/examples/routing_example/public/services.ts b/examples/routing_example/public/services.ts
new file mode 100644
index 0000000000000..08a79270372fd
--- /dev/null
+++ b/examples/routing_example/public/services.ts
@@ -0,0 +1,78 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { CoreStart, HttpFetchError } from 'kibana/public';
+import {
+ RANDOM_NUMBER_ROUTE_PATH,
+ RANDOM_NUMBER_BETWEEN_ROUTE_PATH,
+ POST_MESSAGE_ROUTE_PATH,
+ INTERNAL_GET_MESSAGE_BY_ID_ROUTE,
+} from '../common';
+
+export interface Services {
+ fetchRandomNumber: () => Promise;
+ fetchRandomNumberBetween: (max: number) => Promise;
+ postMessage: (message: string, id: string) => Promise;
+ getMessageById: (id: string) => Promise;
+ addSuccessToast: (message: string) => void;
+}
+
+export function getServices(core: CoreStart): Services {
+ return {
+ addSuccessToast: (message: string) => core.notifications.toasts.addSuccess(message),
+ fetchRandomNumber: async () => {
+ try {
+ const response = await core.http.fetch<{ randomNumber: number }>(RANDOM_NUMBER_ROUTE_PATH);
+ return response.randomNumber;
+ } catch (e) {
+ return e;
+ }
+ },
+ fetchRandomNumberBetween: async (max: number) => {
+ try {
+ const response = await core.http.fetch<{ randomNumber: number }>(
+ RANDOM_NUMBER_BETWEEN_ROUTE_PATH,
+ { query: { max } }
+ );
+ return response.randomNumber;
+ } catch (e) {
+ return e;
+ }
+ },
+ postMessage: async (message: string, id: string) => {
+ try {
+ await core.http.post(`${POST_MESSAGE_ROUTE_PATH}/${id}`, {
+ body: JSON.stringify({ message }),
+ });
+ } catch (e) {
+ return e;
+ }
+ },
+ getMessageById: async (id: string) => {
+ try {
+ const response = await core.http.get<{ message: string }>(
+ `${INTERNAL_GET_MESSAGE_BY_ID_ROUTE}/${id}`
+ );
+ return response.message;
+ } catch (e) {
+ return e;
+ }
+ },
+ };
+}
diff --git a/examples/routing_example/server/index.ts b/examples/routing_example/server/index.ts
new file mode 100644
index 0000000000000..77a0d9bb95549
--- /dev/null
+++ b/examples/routing_example/server/index.ts
@@ -0,0 +1,24 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { PluginInitializer } from 'kibana/server';
+
+import { RoutingExamplePlugin } from './plugin';
+
+export const plugin: PluginInitializer<{}, {}> = () => new RoutingExamplePlugin();
diff --git a/examples/routing_example/server/plugin.ts b/examples/routing_example/server/plugin.ts
new file mode 100644
index 0000000000000..8e92fafc7b30c
--- /dev/null
+++ b/examples/routing_example/server/plugin.ts
@@ -0,0 +1,37 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { Plugin, CoreSetup, CoreStart } from 'kibana/server';
+import { registerRoutes } from './routes';
+
+export class RoutingExamplePlugin implements Plugin<{}, {}> {
+ public setup(core: CoreSetup) {
+ const router = core.http.createRouter();
+
+ registerRoutes(router);
+
+ return {};
+ }
+
+ public start(core: CoreStart) {
+ return {};
+ }
+
+ public stop() {}
+}
diff --git a/examples/routing_example/server/routes/index.ts b/examples/routing_example/server/routes/index.ts
new file mode 100644
index 0000000000000..ea575cf371bb7
--- /dev/null
+++ b/examples/routing_example/server/routes/index.ts
@@ -0,0 +1,19 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+export { registerRoutes } from './register_routes';
diff --git a/examples/routing_example/server/routes/message_routes.ts b/examples/routing_example/server/routes/message_routes.ts
new file mode 100644
index 0000000000000..2b4ec5e11fb4a
--- /dev/null
+++ b/examples/routing_example/server/routes/message_routes.ts
@@ -0,0 +1,90 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { schema } from '@kbn/config-schema';
+import { POST_MESSAGE_ROUTE_PATH, INTERNAL_GET_MESSAGE_BY_ID_ROUTE } from '../../common';
+
+import { IRouter } from '../../../../src/core/server';
+
+/**
+ *
+ * NOTE: DON'T USE IN MEMORY DATA STRUCTURES TO STORE DATA!
+ *
+ * That won't work in a system with multiple Kibanas, which is a setup we recommend for
+ * load balancing. I'm only doing so here to simplify the routing example. In real life,
+ * Elasticsearch should be used to persist data that can be shared across multiple Kibana
+ * instances.
+ */
+
+const messages: { [key: string]: string } = {};
+
+/**
+ * @param router Pushes a message with an id onto an in memory map.
+ */
+export function registerPostMessageRoute(router: IRouter) {
+ router.post(
+ {
+ path: `${POST_MESSAGE_ROUTE_PATH}/{id}`,
+ validate: {
+ params: schema.object({
+ // This parameter name matches the one in POST_MESSAGE_ROUTE_PATH: `api/post_message/{id}`.
+ // Params are often used for ids like this.
+ id: schema.string(),
+ }),
+ body: schema.object({
+ message: schema.string({ maxLength: 100 }),
+ }),
+ },
+ },
+ async (context, request, response) => {
+ if (messages[request.params.id]) {
+ return response.badRequest({
+ body: `Message with id ${request.params.id} already exists`,
+ });
+ }
+
+ // See note above. NEVER DO THIS IN REAL CODE! Data should only be persisted in Elasticsearch.
+ messages[request.params.id] = request.body.message;
+
+ return response.ok();
+ }
+ );
+}
+
+/**
+ * @param router Returns the message with the given id from an in memory array.
+ */
+export function registerGetMessageByIdRoute(router: IRouter) {
+ router.get(
+ {
+ path: `${INTERNAL_GET_MESSAGE_BY_ID_ROUTE}/{id}`,
+ validate: {
+ params: schema.object({
+ id: schema.string(),
+ }),
+ },
+ },
+ async (context, request, response) => {
+ if (!messages[request.params.id]) {
+ return response.notFound();
+ }
+ return response.ok({ body: { message: messages[request.params.id] } });
+ }
+ );
+}
diff --git a/examples/routing_example/server/routes/random_number_between_generator.ts b/examples/routing_example/server/routes/random_number_between_generator.ts
new file mode 100644
index 0000000000000..9481578e540fe
--- /dev/null
+++ b/examples/routing_example/server/routes/random_number_between_generator.ts
@@ -0,0 +1,47 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { schema } from '@kbn/config-schema';
+import { RANDOM_NUMBER_BETWEEN_ROUTE_PATH } from '../../common';
+
+import { IRouter } from '../../../../src/core/server';
+
+/**
+ *
+ * @param router Registers a get route that returns a random number between one and another number suplied by the user.
+ */
+export function registerGetRandomNumberBetweenRoute(router: IRouter) {
+ router.get(
+ {
+ path: RANDOM_NUMBER_BETWEEN_ROUTE_PATH,
+ validate: {
+ query: schema.object({
+ max: schema.number({ defaultValue: 10 }),
+ }),
+ },
+ },
+ async (context, request, response) => {
+ return response.ok({
+ body: {
+ randomNumber: Math.random() * request.query.max,
+ },
+ });
+ }
+ );
+}
diff --git a/examples/routing_example/server/routes/random_number_generator.ts b/examples/routing_example/server/routes/random_number_generator.ts
new file mode 100644
index 0000000000000..2cfce45b957ae
--- /dev/null
+++ b/examples/routing_example/server/routes/random_number_generator.ts
@@ -0,0 +1,43 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { RANDOM_NUMBER_ROUTE_PATH } from '../../common';
+
+import { IRouter } from '../../../../src/core/server';
+
+/**
+ *
+ * @param router Registers a get route that returns a random number between one and ten. It has no input
+ * parameters, and returns a random number in the body.
+ */
+export function registerGetRandomNumberRoute(router: IRouter) {
+ router.get(
+ {
+ path: RANDOM_NUMBER_ROUTE_PATH,
+ validate: {},
+ },
+ async (context, request, response) => {
+ return response.ok({
+ body: {
+ randomNumber: Math.random() * 10,
+ },
+ });
+ }
+ );
+}
diff --git a/examples/routing_example/server/routes/register_routes.ts b/examples/routing_example/server/routes/register_routes.ts
new file mode 100644
index 0000000000000..f556c0ed2c2fd
--- /dev/null
+++ b/examples/routing_example/server/routes/register_routes.ts
@@ -0,0 +1,30 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { IRouter } from 'kibana/server';
+import { registerGetRandomNumberRoute } from './random_number_generator';
+import { registerGetRandomNumberBetweenRoute } from './random_number_between_generator';
+import { registerGetMessageByIdRoute, registerPostMessageRoute } from './message_routes';
+
+export function registerRoutes(router: IRouter) {
+ registerGetRandomNumberRoute(router);
+ registerGetRandomNumberBetweenRoute(router);
+ registerGetMessageByIdRoute(router);
+ registerPostMessageRoute(router);
+}
diff --git a/examples/routing_example/tsconfig.json b/examples/routing_example/tsconfig.json
new file mode 100644
index 0000000000000..9bbd9021b2e0a
--- /dev/null
+++ b/examples/routing_example/tsconfig.json
@@ -0,0 +1,16 @@
+{
+ "extends": "../../tsconfig.json",
+ "compilerOptions": {
+ "outDir": "./target",
+ "skipLibCheck": true
+ },
+ "include": [
+ "index.ts",
+ "public/**/*.ts",
+ "public/**/*.tsx",
+ "server/**/*.ts",
+ "common/**/*.ts",
+ "../../typings/**/*",
+ ],
+ "exclude": []
+}
diff --git a/examples/ui_actions_explorer/public/actions/actions.tsx b/examples/ui_actions_explorer/public/actions/actions.tsx
index 4ef8d5bf4d9c6..6d83362e998bc 100644
--- a/examples/ui_actions_explorer/public/actions/actions.tsx
+++ b/examples/ui_actions_explorer/public/actions/actions.tsx
@@ -31,7 +31,7 @@ export const ACTION_VIEW_IN_MAPS = 'ACTION_VIEW_IN_MAPS';
export const ACTION_TRAVEL_GUIDE = 'ACTION_TRAVEL_GUIDE';
export const ACTION_CALL_PHONE_NUMBER = 'ACTION_CALL_PHONE_NUMBER';
export const ACTION_EDIT_USER = 'ACTION_EDIT_USER';
-export const ACTION_PHONE_USER = 'ACTION_PHONE_USER';
+export const ACTION_TRIGGER_PHONE_USER = 'ACTION_TRIGGER_PHONE_USER';
export const ACTION_SHOWCASE_PLUGGABILITY = 'ACTION_SHOWCASE_PLUGGABILITY';
export const showcasePluggability = createAction({
@@ -120,19 +120,13 @@ export interface UserContext {
update: (user: User) => void;
}
-export const createPhoneUserAction = (getUiActionsApi: () => Promise) =>
- createAction({
- type: ACTION_PHONE_USER,
+export const createTriggerPhoneTriggerAction = (getUiActionsApi: () => Promise) =>
+ createAction({
+ type: ACTION_TRIGGER_PHONE_USER,
getDisplayName: () => 'Call phone number',
+ shouldAutoExecute: async () => true,
isCompatible: async ({ user }) => user.phone !== undefined,
execute: async ({ user }) => {
- // One option - execute the more specific action directly.
- // makePhoneCallAction.execute({ phone: user.phone });
-
- // Another option - emit the trigger and automatically get *all* the actions attached
- // to the phone number trigger.
- // TODO: we need to figure out the best way to handle these nested actions however, since
- // we don't want multiple context menu's to pop up.
if (user.phone !== undefined) {
(await getUiActionsApi()).executeTriggerActions(PHONE_TRIGGER, { phone: user.phone });
}
diff --git a/examples/ui_actions_explorer/public/plugin.tsx b/examples/ui_actions_explorer/public/plugin.tsx
index 670138b43b9c4..b28e5e7a9f692 100644
--- a/examples/ui_actions_explorer/public/plugin.tsx
+++ b/examples/ui_actions_explorer/public/plugin.tsx
@@ -23,7 +23,6 @@ import {
PHONE_TRIGGER,
USER_TRIGGER,
COUNTRY_TRIGGER,
- createPhoneUserAction,
lookUpWeatherAction,
viewInMapsAction,
createEditUserAction,
@@ -37,7 +36,8 @@ import {
ACTION_CALL_PHONE_NUMBER,
ACTION_TRAVEL_GUIDE,
ACTION_VIEW_IN_MAPS,
- ACTION_PHONE_USER,
+ ACTION_TRIGGER_PHONE_USER,
+ createTriggerPhoneTriggerAction,
} from './actions/actions';
import { DeveloperExamplesSetup } from '../../developer_examples/public';
import image from './ui_actions.png';
@@ -64,7 +64,7 @@ declare module '../../../src/plugins/ui_actions/public' {
[ACTION_CALL_PHONE_NUMBER]: PhoneContext;
[ACTION_TRAVEL_GUIDE]: CountryContext;
[ACTION_VIEW_IN_MAPS]: CountryContext;
- [ACTION_PHONE_USER]: UserContext;
+ [ACTION_TRIGGER_PHONE_USER]: UserContext;
}
}
@@ -84,7 +84,7 @@ export class UiActionsExplorerPlugin implements Plugin (await startServices)[1].uiActions)
+ createTriggerPhoneTriggerAction(async () => (await startServices)[1].uiActions)
);
deps.uiActions.addTriggerAction(
USER_TRIGGER,
diff --git a/package.json b/package.json
index 53aa6b25f190b..541fcc46a3d6e 100644
--- a/package.json
+++ b/package.json
@@ -237,7 +237,7 @@
"pug": "^2.0.4",
"query-string": "5.1.1",
"raw-loader": "3.1.0",
- "re2": "1.14.0",
+ "re2": "^1.15.4",
"react": "^16.12.0",
"react-color": "^2.13.8",
"react-dom": "^16.12.0",
diff --git a/packages/kbn-optimizer/src/report_optimizer_stats.ts b/packages/kbn-optimizer/src/report_optimizer_stats.ts
index 2f92f3d648ab7..5057c717efcc3 100644
--- a/packages/kbn-optimizer/src/report_optimizer_stats.ts
+++ b/packages/kbn-optimizer/src/report_optimizer_stats.ts
@@ -35,6 +35,8 @@ interface Entry {
stats: Fs.Stats;
}
+const IGNORED_EXTNAME = ['.map', '.br', '.gz'];
+
const getFiles = (dir: string, parent?: string) =>
flatten(
Fs.readdirSync(dir).map((name): Entry | Entry[] => {
@@ -51,7 +53,19 @@ const getFiles = (dir: string, parent?: string) =>
stats,
};
})
- );
+ ).filter((file) => {
+ const filename = Path.basename(file.relPath);
+ if (filename.startsWith('.')) {
+ return false;
+ }
+
+ const ext = Path.extname(filename);
+ if (IGNORED_EXTNAME.includes(ext)) {
+ return false;
+ }
+
+ return true;
+ });
export function reportOptimizerStats(reporter: CiStatsReporter, config: OptimizerConfig) {
return pipeClosure((update$: OptimizerUpdate$) => {
@@ -70,10 +84,7 @@ export function reportOptimizerStats(reporter: CiStatsReporter, config: Optimize
// make the cache read from the cache file since it was likely updated by the worker
bundle.cache.refresh();
- const outputFiles = getFiles(bundle.outputDir).filter(
- (file) => !(file.relPath.startsWith('.') || file.relPath.endsWith('.map'))
- );
-
+ const outputFiles = getFiles(bundle.outputDir);
const entryName = `${bundle.id}.${bundle.type}.js`;
const entry = outputFiles.find((f) => f.relPath === entryName);
if (!entry) {
diff --git a/renovate.json5 b/renovate.json5
index 1ba6dc0ff7e1b..6424894622c9f 100644
--- a/renovate.json5
+++ b/renovate.json5
@@ -25,7 +25,7 @@
'Team:Operations',
'renovate',
'v8.0.0',
- 'v7.9.0',
+ 'v7.10.0',
],
major: {
labels: [
@@ -33,7 +33,7 @@
'Team:Operations',
'renovate',
'v8.0.0',
- 'v7.9.0',
+ 'v7.10.0',
'renovate:major',
],
},
@@ -254,7 +254,7 @@
'Team:Operations',
'renovate',
'v8.0.0',
- 'v7.9.0',
+ 'v7.10.0',
':ml',
],
},
diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts
index 0662586797164..70b25cb78787a 100644
--- a/src/core/public/doc_links/doc_links_service.ts
+++ b/src/core/public/doc_links/doc_links_service.ts
@@ -111,8 +111,8 @@ export class DocLinksService {
},
kibana: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/index.html`,
siem: {
- guide: `${ELASTIC_WEBSITE_URL}guide/en/siem/guide/${DOC_LINK_VERSION}/index.html`,
- gettingStarted: `${ELASTIC_WEBSITE_URL}guide/en/siem/guide/${DOC_LINK_VERSION}/install-siem.html`,
+ guide: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/index.html`,
+ gettingStarted: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/install-siem.html`,
},
query: {
luceneQuerySyntax: `${ELASTICSEARCH_DOCS}query-dsl-query-string-query.html#query-string-syntax`,
diff --git a/src/core/server/elasticsearch/elasticsearch_service.test.ts b/src/core/server/elasticsearch/elasticsearch_service.test.ts
index b36af2a7e4671..4375f09f1ce0b 100644
--- a/src/core/server/elasticsearch/elasticsearch_service.test.ts
+++ b/src/core/server/elasticsearch/elasticsearch_service.test.ts
@@ -17,11 +17,9 @@
* under the License.
*/
-import { first } from 'rxjs/operators';
-
import { MockLegacyClusterClient, MockClusterClient } from './elasticsearch_service.test.mocks';
-
import { BehaviorSubject } from 'rxjs';
+import { first } from 'rxjs/operators';
import { Env } from '../config';
import { getEnvOptions } from '../config/__mocks__/env';
import { CoreContext } from '../core_context';
@@ -227,28 +225,34 @@ describe('#setup', () => {
});
it('esNodeVersionCompatibility$ only starts polling when subscribed to', async (done) => {
- mockLegacyClusterClientInstance.callAsInternalUser.mockRejectedValue(new Error());
+ const mockedClient = mockClusterClientInstance.asInternalUser;
+ mockedClient.nodes.info.mockImplementation(() =>
+ elasticsearchClientMock.createClientError(new Error())
+ );
const setupContract = await elasticsearchService.setup(setupDeps);
await delay(10);
- expect(mockLegacyClusterClientInstance.callAsInternalUser).toHaveBeenCalledTimes(0);
+ expect(mockedClient.nodes.info).toHaveBeenCalledTimes(0);
setupContract.esNodesCompatibility$.subscribe(() => {
- expect(mockLegacyClusterClientInstance.callAsInternalUser).toHaveBeenCalledTimes(1);
+ expect(mockedClient.nodes.info).toHaveBeenCalledTimes(1);
done();
});
});
it('esNodeVersionCompatibility$ stops polling when unsubscribed from', async (done) => {
- mockLegacyClusterClientInstance.callAsInternalUser.mockRejectedValue(new Error());
+ const mockedClient = mockClusterClientInstance.asInternalUser;
+ mockedClient.nodes.info.mockImplementation(() =>
+ elasticsearchClientMock.createClientError(new Error())
+ );
const setupContract = await elasticsearchService.setup(setupDeps);
- expect(mockLegacyClusterClientInstance.callAsInternalUser).toHaveBeenCalledTimes(0);
+ expect(mockedClient.nodes.info).toHaveBeenCalledTimes(0);
const sub = setupContract.esNodesCompatibility$.subscribe(async () => {
sub.unsubscribe();
await delay(100);
- expect(mockLegacyClusterClientInstance.callAsInternalUser).toHaveBeenCalledTimes(1);
+ expect(mockedClient.nodes.info).toHaveBeenCalledTimes(1);
done();
});
});
@@ -353,16 +357,19 @@ describe('#stop', () => {
it('stops pollEsNodeVersions even if there are active subscriptions', async (done) => {
expect.assertions(2);
- mockLegacyClusterClientInstance.callAsInternalUser.mockRejectedValue(new Error());
+ const mockedClient = mockClusterClientInstance.asInternalUser;
+ mockedClient.nodes.info.mockImplementation(() =>
+ elasticsearchClientMock.createClientError(new Error())
+ );
const setupContract = await elasticsearchService.setup(setupDeps);
setupContract.esNodesCompatibility$.subscribe(async () => {
- expect(mockLegacyClusterClientInstance.callAsInternalUser).toHaveBeenCalledTimes(1);
+ expect(mockedClient.nodes.info).toHaveBeenCalledTimes(1);
await elasticsearchService.stop();
await delay(100);
- expect(mockLegacyClusterClientInstance.callAsInternalUser).toHaveBeenCalledTimes(1);
+ expect(mockedClient.nodes.info).toHaveBeenCalledTimes(1);
done();
});
});
diff --git a/src/core/server/elasticsearch/elasticsearch_service.ts b/src/core/server/elasticsearch/elasticsearch_service.ts
index 9b05fb9887a3b..69bf593dd5862 100644
--- a/src/core/server/elasticsearch/elasticsearch_service.ts
+++ b/src/core/server/elasticsearch/elasticsearch_service.ts
@@ -78,9 +78,10 @@ export class ElasticsearchService
this.getAuthHeaders = deps.http.getAuthHeaders;
this.legacyClient = this.createLegacyClusterClient('data', config);
+ this.client = this.createClusterClient('data', config);
const esNodesCompatibility$ = pollEsNodesVersion({
- callWithInternalUser: this.legacyClient.callAsInternalUser,
+ internalClient: this.client.asInternalUser,
log: this.log,
ignoreVersionMismatch: config.ignoreVersionMismatch,
esVersionCheckInterval: config.healthCheckDelay.asMilliseconds(),
@@ -109,7 +110,6 @@ export class ElasticsearchService
}
const config = await this.config$.pipe(first()).toPromise();
- this.client = this.createClusterClient('data', config);
const createClient = (
type: string,
@@ -120,7 +120,7 @@ export class ElasticsearchService
};
return {
- client: this.client,
+ client: this.client!,
createClient,
legacy: {
client: this.legacyClient,
@@ -133,7 +133,7 @@ export class ElasticsearchService
this.log.debug('Stopping elasticsearch service');
this.stop$.next();
if (this.client) {
- this.client.close();
+ await this.client.close();
}
if (this.legacyClient) {
this.legacyClient.close();
diff --git a/src/core/server/elasticsearch/version_check/ensure_es_version.test.ts b/src/core/server/elasticsearch/version_check/ensure_es_version.test.ts
index 3d1218d4a8e8b..21adac081acf7 100644
--- a/src/core/server/elasticsearch/version_check/ensure_es_version.test.ts
+++ b/src/core/server/elasticsearch/version_check/ensure_es_version.test.ts
@@ -18,6 +18,7 @@
*/
import { mapNodesVersionCompatibility, pollEsNodesVersion, NodesInfo } from './ensure_es_version';
import { loggingSystemMock } from '../../logging/logging_system.mock';
+import { elasticsearchClientMock } from '../client/mocks';
import { take, delay } from 'rxjs/operators';
import { TestScheduler } from 'rxjs/testing';
import { of } from 'rxjs';
@@ -27,6 +28,9 @@ const mockLogger = mockLoggerFactory.get('mock logger');
const KIBANA_VERSION = '5.1.0';
+const createEsSuccess = elasticsearchClientMock.createClientResponse;
+const createEsError = elasticsearchClientMock.createClientError;
+
function createNodes(...versions: string[]): NodesInfo {
const nodes = {} as any;
versions
@@ -111,25 +115,34 @@ describe('mapNodesVersionCompatibility', () => {
});
describe('pollEsNodesVersion', () => {
- const callWithInternalUser = jest.fn();
+ let internalClient: ReturnType;
const getTestScheduler = () =>
new TestScheduler((actual, expected) => {
expect(actual).toEqual(expected);
});
beforeEach(() => {
- callWithInternalUser.mockReset();
+ internalClient = elasticsearchClientMock.createInternalClient();
});
+ const nodeInfosSuccessOnce = (infos: NodesInfo) => {
+ internalClient.nodes.info.mockImplementationOnce(() => createEsSuccess(infos));
+ };
+ const nodeInfosErrorOnce = (error: any) => {
+ internalClient.nodes.info.mockImplementationOnce(() => createEsError(error));
+ };
+
it('returns iscCompatible=false and keeps polling when a poll request throws', (done) => {
expect.assertions(3);
const expectedCompatibilityResults = [false, false, true];
jest.clearAllMocks();
- callWithInternalUser.mockResolvedValueOnce(createNodes('5.1.0', '5.2.0', '5.0.0'));
- callWithInternalUser.mockRejectedValueOnce(new Error('mock request error'));
- callWithInternalUser.mockResolvedValueOnce(createNodes('5.1.0', '5.2.0', '5.1.1-Beta1'));
+
+ nodeInfosSuccessOnce(createNodes('5.1.0', '5.2.0', '5.0.0'));
+ nodeInfosErrorOnce('mock request error');
+ nodeInfosSuccessOnce(createNodes('5.1.0', '5.2.0', '5.1.1-Beta1'));
+
pollEsNodesVersion({
- callWithInternalUser,
+ internalClient,
esVersionCheckInterval: 1,
ignoreVersionMismatch: false,
kibanaVersion: KIBANA_VERSION,
@@ -148,9 +161,11 @@ describe('pollEsNodesVersion', () => {
it('returns compatibility results', (done) => {
expect.assertions(1);
const nodes = createNodes('5.1.0', '5.2.0', '5.0.0');
- callWithInternalUser.mockResolvedValueOnce(nodes);
+
+ nodeInfosSuccessOnce(nodes);
+
pollEsNodesVersion({
- callWithInternalUser,
+ internalClient,
esVersionCheckInterval: 1,
ignoreVersionMismatch: false,
kibanaVersion: KIBANA_VERSION,
@@ -168,15 +183,15 @@ describe('pollEsNodesVersion', () => {
it('only emits if the node versions changed since the previous poll', (done) => {
expect.assertions(4);
- callWithInternalUser.mockResolvedValueOnce(createNodes('5.1.0', '5.2.0', '5.0.0')); // emit
- callWithInternalUser.mockResolvedValueOnce(createNodes('5.0.0', '5.1.0', '5.2.0')); // ignore, same versions, different ordering
- callWithInternalUser.mockResolvedValueOnce(createNodes('5.1.1', '5.2.0', '5.0.0')); // emit
- callWithInternalUser.mockResolvedValueOnce(createNodes('5.1.1', '5.1.2', '5.1.3')); // emit
- callWithInternalUser.mockResolvedValueOnce(createNodes('5.1.1', '5.1.2', '5.1.3')); // ignore
- callWithInternalUser.mockResolvedValueOnce(createNodes('5.0.0', '5.1.0', '5.2.0')); // emit, different from previous version
+ nodeInfosSuccessOnce(createNodes('5.1.0', '5.2.0', '5.0.0')); // emit
+ nodeInfosSuccessOnce(createNodes('5.0.0', '5.1.0', '5.2.0')); // ignore, same versions, different ordering
+ nodeInfosSuccessOnce(createNodes('5.1.1', '5.2.0', '5.0.0')); // emit
+ nodeInfosSuccessOnce(createNodes('5.1.1', '5.1.2', '5.1.3')); // emit
+ nodeInfosSuccessOnce(createNodes('5.1.1', '5.1.2', '5.1.3')); // ignore
+ nodeInfosSuccessOnce(createNodes('5.0.0', '5.1.0', '5.2.0')); // emit, different from previous version
pollEsNodesVersion({
- callWithInternalUser,
+ internalClient,
esVersionCheckInterval: 1,
ignoreVersionMismatch: false,
kibanaVersion: KIBANA_VERSION,
@@ -192,14 +207,21 @@ describe('pollEsNodesVersion', () => {
it('starts polling immediately and then every esVersionCheckInterval', () => {
expect.assertions(1);
- callWithInternalUser.mockReturnValueOnce([createNodes('5.1.0', '5.2.0', '5.0.0')]);
- callWithInternalUser.mockReturnValueOnce([createNodes('5.1.1', '5.2.0', '5.0.0')]);
+
+ // @ts-expect-error we need to return an incompatible type to use the testScheduler here
+ internalClient.nodes.info.mockReturnValueOnce([
+ { body: createNodes('5.1.0', '5.2.0', '5.0.0') },
+ ]);
+ // @ts-expect-error we need to return an incompatible type to use the testScheduler here
+ internalClient.nodes.info.mockReturnValueOnce([
+ { body: createNodes('5.1.1', '5.2.0', '5.0.0') },
+ ]);
getTestScheduler().run(({ expectObservable }) => {
const expected = 'a 99ms (b|)';
const esNodesCompatibility$ = pollEsNodesVersion({
- callWithInternalUser,
+ internalClient,
esVersionCheckInterval: 100,
ignoreVersionMismatch: false,
kibanaVersion: KIBANA_VERSION,
@@ -227,15 +249,17 @@ describe('pollEsNodesVersion', () => {
getTestScheduler().run(({ expectObservable }) => {
const expected = '100ms a 99ms (b|)';
- callWithInternalUser.mockReturnValueOnce(
- of(createNodes('5.1.0', '5.2.0', '5.0.0')).pipe(delay(100))
+ internalClient.nodes.info.mockReturnValueOnce(
+ // @ts-expect-error we need to return an incompatible type to use the testScheduler here
+ of({ body: createNodes('5.1.0', '5.2.0', '5.0.0') }).pipe(delay(100))
);
- callWithInternalUser.mockReturnValueOnce(
- of(createNodes('5.1.1', '5.2.0', '5.0.0')).pipe(delay(100))
+ internalClient.nodes.info.mockReturnValueOnce(
+ // @ts-expect-error we need to return an incompatible type to use the testScheduler here
+ of({ body: createNodes('5.1.1', '5.2.0', '5.0.0') }).pipe(delay(100))
);
const esNodesCompatibility$ = pollEsNodesVersion({
- callWithInternalUser,
+ internalClient,
esVersionCheckInterval: 10,
ignoreVersionMismatch: false,
kibanaVersion: KIBANA_VERSION,
@@ -256,6 +280,6 @@ describe('pollEsNodesVersion', () => {
});
});
- expect(callWithInternalUser).toHaveBeenCalledTimes(2);
+ expect(internalClient.nodes.info).toHaveBeenCalledTimes(2);
});
});
diff --git a/src/core/server/elasticsearch/version_check/ensure_es_version.ts b/src/core/server/elasticsearch/version_check/ensure_es_version.ts
index dc56d982d7b4a..5f926215d167f 100644
--- a/src/core/server/elasticsearch/version_check/ensure_es_version.ts
+++ b/src/core/server/elasticsearch/version_check/ensure_es_version.ts
@@ -29,10 +29,10 @@ import {
esVersionEqualsKibana,
} from './es_kibana_version_compatability';
import { Logger } from '../../logging';
-import { LegacyAPICaller } from '../legacy';
+import type { ElasticsearchClient } from '../client';
export interface PollEsNodesVersionOptions {
- callWithInternalUser: LegacyAPICaller;
+ internalClient: ElasticsearchClient;
log: Logger;
kibanaVersion: string;
ignoreVersionMismatch: boolean;
@@ -137,7 +137,7 @@ function compareNodes(prev: NodesVersionCompatibility, curr: NodesVersionCompati
}
export const pollEsNodesVersion = ({
- callWithInternalUser,
+ internalClient,
log,
kibanaVersion,
ignoreVersionMismatch,
@@ -147,10 +147,11 @@ export const pollEsNodesVersion = ({
return timer(0, healthCheckInterval).pipe(
exhaustMap(() => {
return from(
- callWithInternalUser('nodes.info', {
- filterPath: ['nodes.*.version', 'nodes.*.http.publish_address', 'nodes.*.ip'],
+ internalClient.nodes.info({
+ filter_path: ['nodes.*.version', 'nodes.*.http.publish_address', 'nodes.*.ip'],
})
).pipe(
+ map(({ body }) => body),
catchError((_err) => {
return of({ nodes: {} });
})
diff --git a/src/dev/build/tasks/patch_native_modules_task.js b/src/dev/build/tasks/patch_native_modules_task.js
index a10010ed5255f..c30d1fd774b55 100644
--- a/src/dev/build/tasks/patch_native_modules_task.js
+++ b/src/dev/build/tasks/patch_native_modules_task.js
@@ -26,21 +26,21 @@ const DOWNLOAD_DIRECTORY = '.native_modules';
const packages = [
{
name: 're2',
- version: '1.14.0',
+ version: '1.15.4',
destinationPath: 'node_modules/re2/build/Release/re2.node',
extractMethod: 'gunzip',
archives: {
darwin: {
- url: 'https://github.com/uhop/node-re2/releases/download/1.14.0/darwin-x64-64.gz',
- sha256: '54c8386cb7cd53895cf379522114bfe82378e300e127e58d392ddd40a77e396f',
+ url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/darwin-x64-64.gz',
+ sha256: '595c6653d796493ddb288fc0732a0d1df8560099796f55a1dd242357d96bb8d6',
},
linux: {
- url: 'https://github.com/uhop/node-re2/releases/download/1.14.0/linux-x64-64.gz',
- sha256: 'f54f059035e71a7ccb3fa201080e260c41d228d13a8247974b4bb157691b6757',
+ url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/linux-x64-64.gz',
+ sha256: 'e743587bc96314edf10c3e659c03168bc374a5cd9a6623ee99d989251e331f28',
},
win32: {
- url: 'https://github.com/uhop/node-re2/releases/download/1.14.0/win32-x64-64.gz',
- sha256: 'de708446a8b802f4634c2cfef097c2625a2811fdcd8133dfd7b7c485f966caa9',
+ url: 'https://github.com/uhop/node-re2/releases/download/1.15.4/win32-x64-64.gz',
+ sha256: 'b33de62cda24fb02dc80a19fb79977d686468ac746e97cd211059d2d4c75d529',
},
},
},
diff --git a/src/dev/ci_setup/setup_env.sh b/src/dev/ci_setup/setup_env.sh
index 343ff47199375..86927b694679a 100644
--- a/src/dev/ci_setup/setup_env.sh
+++ b/src/dev/ci_setup/setup_env.sh
@@ -126,6 +126,7 @@ export PATH="$PATH:$yarnGlobalDir"
# use a proxy to fetch chromedriver/geckodriver asset
export GECKODRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
export CHROMEDRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
+export RE2_DOWNLOAD_MIRROR="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache"
export CYPRESS_DOWNLOAD_MIRROR="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/cypress"
export CHECKS_REPORTER_ACTIVE=false
diff --git a/src/dev/renovate/config.ts b/src/dev/renovate/config.ts
index d868f0a89b98c..c9688fc0ae0bd 100644
--- a/src/dev/renovate/config.ts
+++ b/src/dev/renovate/config.ts
@@ -21,7 +21,7 @@ import { RENOVATE_PACKAGE_GROUPS } from './package_groups';
import { PACKAGE_GLOBS } from './package_globs';
import { wordRegExp, maybeFlatMap, maybeMap, getTypePackageName } from './utils';
-const DEFAULT_LABELS = ['release_note:skip', 'Team:Operations', 'renovate', 'v8.0.0', 'v7.9.0'];
+const DEFAULT_LABELS = ['release_note:skip', 'Team:Operations', 'renovate', 'v8.0.0', 'v7.10.0'];
export const RENOVATE_CONFIG = {
extends: ['config:base'],
diff --git a/src/es_archiver/lib/indices/create_index_stream.ts b/src/es_archiver/lib/indices/create_index_stream.ts
index df9d3bb623ad6..5629f95c7c9c6 100644
--- a/src/es_archiver/lib/indices/create_index_stream.ts
+++ b/src/es_archiver/lib/indices/create_index_stream.ts
@@ -18,6 +18,8 @@
*/
import { Transform, Readable } from 'stream';
+import { inspect } from 'util';
+
import { get, once } from 'lodash';
import { Client } from 'elasticsearch';
import { ToolingLog } from '@kbn/dev-utils';
@@ -84,6 +86,18 @@ export function createCreateIndexStream({
stats.createdIndex(index, { settings });
} catch (err) {
+ if (
+ err?.body?.error?.reason?.includes('index exists with the same name as the alias') &&
+ attemptNumber < 3
+ ) {
+ const aliasStr = inspect(aliases);
+ log.info(
+ `failed to create aliases [${aliasStr}] because ES indicated an index/alias already exists, trying again`
+ );
+ await attemptToCreate(attemptNumber + 1);
+ return;
+ }
+
if (
get(err, 'body.error.type') !== 'resource_already_exists_exception' ||
attemptNumber >= 3
diff --git a/src/plugins/data/common/field_formats/converters/url.test.ts b/src/plugins/data/common/field_formats/converters/url.test.ts
index 5ee195f8c7752..771bde85626d0 100644
--- a/src/plugins/data/common/field_formats/converters/url.test.ts
+++ b/src/plugins/data/common/field_formats/converters/url.test.ts
@@ -167,8 +167,8 @@ describe('UrlFormat', () => {
});
});
- describe('whitelist', () => {
- test('should assume a relative url if the value is not in the whitelist without a base path', () => {
+ describe('allow-list', () => {
+ test('should assume a relative url if the value is not in the allow-list without a base path', () => {
const parsedUrl = {
origin: 'http://kibana',
basePath: '',
@@ -193,7 +193,7 @@ describe('UrlFormat', () => {
);
});
- test('should assume a relative url if the value is not in the whitelist with a basepath', () => {
+ test('should assume a relative url if the value is not in the allow-list with a basepath', () => {
const parsedUrl = {
origin: 'http://kibana',
basePath: '/xyz',
diff --git a/src/plugins/data/common/field_formats/converters/url.ts b/src/plugins/data/common/field_formats/converters/url.ts
index b797159b53486..2630c97b0821b 100644
--- a/src/plugins/data/common/field_formats/converters/url.ts
+++ b/src/plugins/data/common/field_formats/converters/url.ts
@@ -161,8 +161,8 @@ export class UrlFormat extends FieldFormat {
return this.generateImgHtml(url, imageLabel);
default:
- const inWhitelist = allowedUrlSchemes.some((scheme) => url.indexOf(scheme) === 0);
- if (!inWhitelist && !parsedUrl) {
+ const allowed = allowedUrlSchemes.some((scheme) => url.indexOf(scheme) === 0);
+ if (!allowed && !parsedUrl) {
return url;
}
@@ -178,7 +178,7 @@ export class UrlFormat extends FieldFormat {
* UNSUPPORTED
* - app/kibana
*/
- if (!inWhitelist) {
+ if (!allowed) {
// Handles urls like: `#/discover`
if (url[0] === '#') {
prefix = `${origin}${pathname}`;
diff --git a/src/plugins/data/public/actions/apply_filter_action.ts b/src/plugins/data/public/actions/apply_filter_action.ts
index 7e8ed5ec8fb22..a2621e6ce8802 100644
--- a/src/plugins/data/public/actions/apply_filter_action.ts
+++ b/src/plugins/data/public/actions/apply_filter_action.ts
@@ -22,6 +22,7 @@ import { toMountPoint } from '../../../kibana_react/public';
import { ActionByType, createAction, IncompatibleActionError } from '../../../ui_actions/public';
import { getOverlays, getIndexPatterns } from '../services';
import { applyFiltersPopover } from '../ui/apply_filters';
+import type { IEmbeddable } from '../../../embeddable/public';
import { Filter, FilterManager, TimefilterContract, esFilters } from '..';
export const ACTION_GLOBAL_APPLY_FILTER = 'ACTION_GLOBAL_APPLY_FILTER';
@@ -29,6 +30,7 @@ export const ACTION_GLOBAL_APPLY_FILTER = 'ACTION_GLOBAL_APPLY_FILTER';
export interface ApplyGlobalFilterActionContext {
filters: Filter[];
timeFieldName?: string;
+ embeddable?: IEmbeddable;
}
async function isCompatible(context: ApplyGlobalFilterActionContext) {
diff --git a/src/plugins/data/public/actions/filters/create_filters_from_range_select.ts b/src/plugins/data/public/actions/filters/create_filters_from_range_select.ts
index a0eb49d773f3d..d9aa1b8ec8048 100644
--- a/src/plugins/data/public/actions/filters/create_filters_from_range_select.ts
+++ b/src/plugins/data/public/actions/filters/create_filters_from_range_select.ts
@@ -22,7 +22,7 @@ import moment from 'moment';
import { esFilters, IFieldType, RangeFilterParams } from '../../../public';
import { getIndexPatterns } from '../../../public/services';
import { deserializeAggConfig } from '../../search/expressions/utils';
-import { RangeSelectContext } from '../../../../embeddable/public';
+import type { RangeSelectContext } from '../../../../embeddable/public';
export async function createFiltersFromRangeSelectAction(event: RangeSelectContext['data']) {
const column: Record = event.table.columns[event.column];
diff --git a/src/plugins/data/public/actions/filters/create_filters_from_value_click.ts b/src/plugins/data/public/actions/filters/create_filters_from_value_click.ts
index 1974b9f776748..9429df91f693c 100644
--- a/src/plugins/data/public/actions/filters/create_filters_from_value_click.ts
+++ b/src/plugins/data/public/actions/filters/create_filters_from_value_click.ts
@@ -21,7 +21,7 @@ import { KibanaDatatable } from '../../../../../plugins/expressions/public';
import { deserializeAggConfig } from '../../search/expressions';
import { esFilters, Filter } from '../../../public';
import { getIndexPatterns } from '../../../public/services';
-import { ValueClickContext } from '../../../../embeddable/public';
+import type { ValueClickContext } from '../../../../embeddable/public';
/**
* For terms aggregations on `__other__` buckets, this assembles a list of applicable filter
diff --git a/src/plugins/data/public/actions/index.ts b/src/plugins/data/public/actions/index.ts
index ef9014aafe82d..692996cf6fd19 100644
--- a/src/plugins/data/public/actions/index.ts
+++ b/src/plugins/data/public/actions/index.ts
@@ -17,8 +17,12 @@
* under the License.
*/
-export { ACTION_GLOBAL_APPLY_FILTER, createFilterAction } from './apply_filter_action';
+export {
+ ACTION_GLOBAL_APPLY_FILTER,
+ createFilterAction,
+ ApplyGlobalFilterActionContext,
+} from './apply_filter_action';
export { createFiltersFromValueClickAction } from './filters/create_filters_from_value_click';
export { createFiltersFromRangeSelectAction } from './filters/create_filters_from_range_select';
-export { selectRangeAction } from './select_range_action';
-export { valueClickAction } from './value_click_action';
+export * from './select_range_action';
+export * from './value_click_action';
diff --git a/src/plugins/data/public/actions/select_range_action.ts b/src/plugins/data/public/actions/select_range_action.ts
index 49766143b5588..1781da980dc30 100644
--- a/src/plugins/data/public/actions/select_range_action.ts
+++ b/src/plugins/data/public/actions/select_range_action.ts
@@ -17,60 +17,39 @@
* under the License.
*/
-import { i18n } from '@kbn/i18n';
import {
- createAction,
- IncompatibleActionError,
ActionByType,
+ APPLY_FILTER_TRIGGER,
+ createAction,
+ UiActionsStart,
} from '../../../../plugins/ui_actions/public';
import { createFiltersFromRangeSelectAction } from './filters/create_filters_from_range_select';
-import { RangeSelectContext } from '../../../embeddable/public';
-import { FilterManager, TimefilterContract, esFilters } from '..';
-
-export const ACTION_SELECT_RANGE = 'ACTION_SELECT_RANGE';
+import type { RangeSelectContext } from '../../../embeddable/public';
export type SelectRangeActionContext = RangeSelectContext;
-async function isCompatible(context: SelectRangeActionContext) {
- try {
- return Boolean(await createFiltersFromRangeSelectAction(context.data));
- } catch {
- return false;
- }
-}
+export const ACTION_SELECT_RANGE = 'ACTION_SELECT_RANGE';
-export function selectRangeAction(
- filterManager: FilterManager,
- timeFilter: TimefilterContract
+export function createSelectRangeAction(
+ getStartServices: () => { uiActions: UiActionsStart }
): ActionByType {
return createAction({
type: ACTION_SELECT_RANGE,
id: ACTION_SELECT_RANGE,
- getIconType: () => 'filter',
- getDisplayName: () => {
- return i18n.translate('data.filter.applyFilterActionTitle', {
- defaultMessage: 'Apply filter to current view',
- });
- },
- isCompatible,
- execute: async ({ data }: SelectRangeActionContext) => {
- if (!(await isCompatible({ data }))) {
- throw new IncompatibleActionError();
- }
-
- const selectedFilters = await createFiltersFromRangeSelectAction(data);
-
- if (data.timeFieldName) {
- const { timeRangeFilter, restOfFilters } = esFilters.extractTimeFilter(
- data.timeFieldName,
- selectedFilters
- );
- filterManager.addFilters(restOfFilters);
- if (timeRangeFilter) {
- esFilters.changeTimeFilter(timeFilter, timeRangeFilter);
+ shouldAutoExecute: async () => true,
+ execute: async (context: SelectRangeActionContext) => {
+ try {
+ const filters = await createFiltersFromRangeSelectAction(context.data);
+ if (filters.length > 0) {
+ await getStartServices().uiActions.getTrigger(APPLY_FILTER_TRIGGER).exec({
+ filters,
+ embeddable: context.embeddable,
+ timeFieldName: context.data.timeFieldName,
+ });
}
- } else {
- filterManager.addFilters(selectedFilters);
+ } catch (e) {
+ // eslint-disable-next-line no-console
+ console.warn(`Error [ACTION_SELECT_RANGE]: can\'t extract filters from action context`);
}
},
});
diff --git a/src/plugins/data/public/actions/value_click_action.ts b/src/plugins/data/public/actions/value_click_action.ts
index dd74a7ee507f3..81e62380eacfb 100644
--- a/src/plugins/data/public/actions/value_click_action.ts
+++ b/src/plugins/data/public/actions/value_click_action.ts
@@ -17,98 +17,41 @@
* under the License.
*/
-import { i18n } from '@kbn/i18n';
-import { toMountPoint } from '../../../../plugins/kibana_react/public';
import {
ActionByType,
+ APPLY_FILTER_TRIGGER,
createAction,
- IncompatibleActionError,
+ UiActionsStart,
} from '../../../../plugins/ui_actions/public';
-import { getOverlays, getIndexPatterns } from '../services';
-import { applyFiltersPopover } from '../ui/apply_filters';
import { createFiltersFromValueClickAction } from './filters/create_filters_from_value_click';
-import { ValueClickContext } from '../../../embeddable/public';
-import { Filter, FilterManager, TimefilterContract, esFilters } from '..';
-
-export const ACTION_VALUE_CLICK = 'ACTION_VALUE_CLICK';
+import type { Filter } from '../../common/es_query/filters';
+import type { ValueClickContext } from '../../../embeddable/public';
export type ValueClickActionContext = ValueClickContext;
+export const ACTION_VALUE_CLICK = 'ACTION_VALUE_CLICK';
-async function isCompatible(context: ValueClickActionContext) {
- try {
- const filters: Filter[] = await createFiltersFromValueClickAction(context.data);
- return filters.length > 0;
- } catch {
- return false;
- }
-}
-
-export function valueClickAction(
- filterManager: FilterManager,
- timeFilter: TimefilterContract
+export function createValueClickAction(
+ getStartServices: () => { uiActions: UiActionsStart }
): ActionByType {
return createAction({
type: ACTION_VALUE_CLICK,
id: ACTION_VALUE_CLICK,
- getIconType: () => 'filter',
- getDisplayName: () => {
- return i18n.translate('data.filter.applyFilterActionTitle', {
- defaultMessage: 'Apply filter to current view',
- });
- },
- isCompatible,
- execute: async ({ data }: ValueClickActionContext) => {
- if (!(await isCompatible({ data }))) {
- throw new IncompatibleActionError();
- }
-
- const filters: Filter[] = await createFiltersFromValueClickAction(data);
-
- let selectedFilters = filters;
-
- if (filters.length > 1) {
- const indexPatterns = await Promise.all(
- filters.map((filter) => {
- return getIndexPatterns().get(filter.meta.index!);
- })
- );
-
- const filterSelectionPromise: Promise = new Promise((resolve) => {
- const overlay = getOverlays().openModal(
- toMountPoint(
- applyFiltersPopover(
- filters,
- indexPatterns,
- () => {
- overlay.close();
- resolve([]);
- },
- (filterSelection: Filter[]) => {
- overlay.close();
- resolve(filterSelection);
- }
- )
- ),
- {
- 'data-test-subj': 'selectFilterOverlay',
- }
- );
- });
-
- selectedFilters = await filterSelectionPromise;
- }
-
- if (data.timeFieldName) {
- const { timeRangeFilter, restOfFilters } = esFilters.extractTimeFilter(
- data.timeFieldName,
- selectedFilters
- );
- filterManager.addFilters(restOfFilters);
- if (timeRangeFilter) {
- esFilters.changeTimeFilter(timeFilter, timeRangeFilter);
+ shouldAutoExecute: async () => true,
+ execute: async (context: ValueClickActionContext) => {
+ try {
+ const filters: Filter[] = await createFiltersFromValueClickAction(context.data);
+ if (filters.length > 0) {
+ await getStartServices().uiActions.getTrigger(APPLY_FILTER_TRIGGER).exec({
+ filters,
+ embeddable: context.embeddable,
+ timeFieldName: context.data.timeFieldName,
+ });
}
- } else {
- filterManager.addFilters(selectedFilters);
+ } catch (e) {
+ // eslint-disable-next-line no-console
+ console.warn(
+ `Error [ACTION_EMIT_APPLY_FILTER_TRIGGER]: can\'t extract filters from action context`
+ );
}
},
});
diff --git a/src/plugins/data/public/index.ts b/src/plugins/data/public/index.ts
index 6328e694193c9..846471420327f 100644
--- a/src/plugins/data/public/index.ts
+++ b/src/plugins/data/public/index.ts
@@ -438,6 +438,8 @@ export {
export { isTimeRange, isQuery, isFilter, isFilters } from '../common';
+export { ApplyGlobalFilterActionContext } from './actions';
+
export * from '../common/field_mapping';
/*
diff --git a/src/plugins/data/public/plugin.ts b/src/plugins/data/public/plugin.ts
index 323a32ea362ac..68c0f506f121d 100644
--- a/src/plugins/data/public/plugin.ts
+++ b/src/plugins/data/public/plugin.ts
@@ -69,18 +69,15 @@ import {
createFilterAction,
createFiltersFromValueClickAction,
createFiltersFromRangeSelectAction,
-} from './actions';
-import { ApplyGlobalFilterActionContext } from './actions/apply_filter_action';
-import {
- selectRangeAction,
- SelectRangeActionContext,
+ ApplyGlobalFilterActionContext,
ACTION_SELECT_RANGE,
-} from './actions/select_range_action';
-import {
- valueClickAction,
ACTION_VALUE_CLICK,
+ SelectRangeActionContext,
ValueClickActionContext,
-} from './actions/value_click_action';
+ createValueClickAction,
+ createSelectRangeAction,
+} from './actions';
+
import { SavedObjectsClientPublicToCommon } from './index_patterns';
import { indexPatternLoad } from './index_patterns/expressions/load_index_pattern';
@@ -92,7 +89,14 @@ declare module '../../ui_actions/public' {
}
}
-export class DataPublicPlugin implements Plugin {
+export class DataPublicPlugin
+ implements
+ Plugin<
+ DataPublicPluginSetup,
+ DataPublicPluginStart,
+ DataSetupDependencies,
+ DataStartDependencies
+ > {
private readonly autocomplete: AutocompleteService;
private readonly searchService: SearchService;
private readonly fieldFormatsService: FieldFormatsService;
@@ -110,13 +114,13 @@ export class DataPublicPlugin implements Plugin,
{ expressions, uiActions, usageCollection }: DataSetupDependencies
): DataPublicPluginSetup {
const startServices = createStartServicesGetter(core.getStartServices);
const getInternalStartServices = (): InternalStartServices => {
- const { core: coreStart, self }: any = startServices();
+ const { core: coreStart, self } = startServices();
return {
fieldFormats: self.fieldFormats,
notifications: coreStart.notifications,
@@ -140,12 +144,16 @@ export class DataPublicPlugin implements Plugin ({
+ uiActions: startServices().plugins.uiActions,
+ }))
);
uiActions.addTriggerAction(
VALUE_CLICK_TRIGGER,
- valueClickAction(queryService.filterManager, queryService.timefilter.timefilter)
+ createValueClickAction(() => ({
+ uiActions: startServices().plugins.uiActions,
+ }))
);
return {
diff --git a/src/plugins/data/public/public.api.md b/src/plugins/data/public/public.api.md
index f8b8cb43b2297..38e0416233e25 100644
--- a/src/plugins/data/public/public.api.md
+++ b/src/plugins/data/public/public.api.md
@@ -250,6 +250,20 @@ export class AggParamType extends Ba
makeAgg: (agg: TAggConfig, state?: AggConfigSerialized) => TAggConfig;
}
+// Warning: (ae-missing-release-tag) "ApplyGlobalFilterActionContext" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
+//
+// @public (undocumented)
+export interface ApplyGlobalFilterActionContext {
+ // Warning: (ae-forgotten-export) The symbol "IEmbeddable" needs to be exported by the entry point index.d.ts
+ //
+ // (undocumented)
+ embeddable?: IEmbeddable;
+ // (undocumented)
+ filters: Filter[];
+ // (undocumented)
+ timeFieldName?: string;
+}
+
// Warning: (ae-forgotten-export) The symbol "DateNanosFormat" needs to be exported by the entry point index.d.ts
// Warning: (ae-forgotten-export) The symbol "DateFormat" needs to be exported by the entry point index.d.ts
// Warning: (ae-missing-release-tag) "baseFormattersPublic" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
@@ -1443,18 +1457,16 @@ export type PhrasesFilter = Filter & {
meta: PhrasesFilterMeta;
};
+// Warning: (ae-forgotten-export) The symbol "DataSetupDependencies" needs to be exported by the entry point index.d.ts
+// Warning: (ae-forgotten-export) The symbol "DataStartDependencies" needs to be exported by the entry point index.d.ts
// Warning: (ae-missing-release-tag) "DataPublicPlugin" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
//
// @public (undocumented)
-export class Plugin implements Plugin_2 {
+export class Plugin implements Plugin_2 {
// Warning: (ae-forgotten-export) The symbol "ConfigSchema" needs to be exported by the entry point index.d.ts
constructor(initializerContext: PluginInitializerContext_2);
- // Warning: (ae-forgotten-export) The symbol "DataSetupDependencies" needs to be exported by the entry point index.d.ts
- //
// (undocumented)
- setup(core: CoreSetup, { expressions, uiActions, usageCollection }: DataSetupDependencies): DataPublicPluginSetup;
- // Warning: (ae-forgotten-export) The symbol "DataStartDependencies" needs to be exported by the entry point index.d.ts
- //
+ setup(core: CoreSetup, { expressions, uiActions, usageCollection }: DataSetupDependencies): DataPublicPluginSetup;
// (undocumented)
start(core: CoreStart_2, { uiActions }: DataStartDependencies): DataPublicPluginStart;
// (undocumented)
diff --git a/src/plugins/dev_tools/public/plugin.ts b/src/plugins/dev_tools/public/plugin.ts
index 3ee44aaa0816e..45fa3634bc87e 100644
--- a/src/plugins/dev_tools/public/plugin.ts
+++ b/src/plugins/dev_tools/public/plugin.ts
@@ -61,7 +61,7 @@ export class DevToolsPlugin implements Plugin {
}),
updater$: this.appStateUpdater,
euiIconType: 'devToolsApp',
- order: 9001,
+ order: 9010,
category: DEFAULT_APP_CATEGORIES.management,
mount: async (params: AppMountParameters) => {
const { element, history } = params;
diff --git a/src/plugins/discover/public/application/components/sidebar/discover_field.tsx b/src/plugins/discover/public/application/components/sidebar/discover_field.tsx
index 5f40c55e30e7e..724908281146d 100644
--- a/src/plugins/discover/public/application/components/sidebar/discover_field.tsx
+++ b/src/plugins/discover/public/application/components/sidebar/discover_field.tsx
@@ -17,7 +17,7 @@
* under the License.
*/
import React from 'react';
-import { EuiButton, EuiText } from '@elastic/eui';
+import { EuiButton } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { DiscoverFieldDetails } from './discover_field_details';
import { FieldIcon } from '../../../../../kibana_react/public';
@@ -108,6 +108,13 @@ export function DiscoverField({
}
};
+ function wrapOnDot(str?: string) {
+ // u200B is a non-width white-space character, which allows
+ // the browser to efficiently word-wrap right after the dot
+ // without us having to draw a lot of extra DOM elements, etc
+ return str ? str.replace(/\./g, '.\u200B') : '';
+ }
+
return (
<>
-
-
- {useShortDots ? shortenDottedString(field.name) : field.displayName}
-
+
+ {useShortDots ? wrapOnDot(shortenDottedString(field.name)) : wrapOnDot(field.displayName)}
{field.name !== '_source' && !selected && (
diff --git a/src/plugins/discover/public/application/components/sidebar/discover_sidebar.scss b/src/plugins/discover/public/application/components/sidebar/discover_sidebar.scss
index ae7e915f09773..07efd64752c84 100644
--- a/src/plugins/discover/public/application/components/sidebar/discover_sidebar.scss
+++ b/src/plugins/discover/public/application/components/sidebar/discover_sidebar.scss
@@ -23,13 +23,6 @@
margin-bottom: 0;
}
-.dscFieldList--selected,
-.dscFieldList--unpopular,
-.dscFieldList--popular {
- padding-left: $euiSizeS;
- padding-right: $euiSizeS;
-}
-
.dscFieldListHeader {
padding: $euiSizeS $euiSizeS 0 $euiSizeS;
background-color: lightOrDarkTheme(tint($euiColorPrimary, 90%), $euiColorLightShade);
@@ -40,8 +33,7 @@
}
.dscFieldChooser {
- padding-left: $euiSizeS !important;
- padding-right: $euiSizeS !important;
+ padding-left: $euiSize;
}
.dscFieldChooser__toggle {
@@ -55,12 +47,12 @@
display: flex;
align-items: center;
justify-content: space-between;
- padding: 0 2px;
cursor: pointer;
font-size: $euiFontSizeXS;
border-top: solid 1px transparent;
border-bottom: solid 1px transparent;
line-height: normal;
+ margin-bottom: $euiSizeXS * 0.5;
&:hover,
&:focus {
@@ -72,28 +64,25 @@
.dscSidebarItem--active {
border-top: 1px solid $euiColorLightShade;
- background: shade($euiColorLightestShade, 5%);
color: $euiColorFullShade;
- .euiText {
- font-weight: bold;
- }
}
.dscSidebarField {
- padding: $euiSizeXS 0;
+ padding: $euiSizeXS;
display: flex;
- align-items: flex-start;
+ align-items: center;
max-width: 100%;
- margin: 0;
width: 100%;
border: none;
- border-radius: 0;
+ border-radius: $euiBorderRadius - 1px;
text-align: left;
}
.dscSidebarField__name {
margin-left: $euiSizeS;
flex-grow: 1;
+ word-break: break-word;
+ padding-right: 1px;
}
.dscSidebarField__fieldIcon {
diff --git a/src/plugins/discover/public/application/components/sidebar/discover_sidebar.tsx b/src/plugins/discover/public/application/components/sidebar/discover_sidebar.tsx
index 96e04c13d70e9..e8ed8b80da3bb 100644
--- a/src/plugins/discover/public/application/components/sidebar/discover_sidebar.tsx
+++ b/src/plugins/discover/public/application/components/sidebar/discover_sidebar.tsx
@@ -19,7 +19,7 @@
import './discover_sidebar.scss';
import React, { useCallback, useEffect, useState, useMemo } from 'react';
import { i18n } from '@kbn/i18n';
-import { EuiButtonIcon, EuiTitle } from '@elastic/eui';
+import { EuiButtonIcon, EuiTitle, EuiSpacer } from '@elastic/eui';
import { sortBy } from 'lodash';
import { FormattedMessage, I18nProvider } from '@kbn/i18n/react';
import { DiscoverField } from './discover_field';
@@ -199,6 +199,7 @@ export function DiscoverSidebar({
/>
+
{
const sortedActions = [...regularActions, ...extraActions].sort(sortByOrderField);
return await buildContextMenuForActions({
- actions: sortedActions,
- actionContext: { embeddable: this.props.embeddable },
+ actions: sortedActions.map((action) => [action, { embeddable: this.props.embeddable }]),
closeMenu: this.closeMyContextMenuPanel,
});
};
diff --git a/src/plugins/es_ui_shared/public/forms/multi_content/multi_content_context.tsx b/src/plugins/es_ui_shared/public/forms/multi_content/multi_content_context.tsx
index 210b0cedccd06..c5659745f229a 100644
--- a/src/plugins/es_ui_shared/public/forms/multi_content/multi_content_context.tsx
+++ b/src/plugins/es_ui_shared/public/forms/multi_content/multi_content_context.tsx
@@ -17,7 +17,7 @@
* under the License.
*/
-import React, { useEffect, useCallback, createContext, useContext } from 'react';
+import React, { useEffect, useCallback, createContext, useContext, useRef } from 'react';
import { useMultiContent, HookProps, Content, MultiContent } from './use_multi_content';
@@ -55,7 +55,14 @@ export function useMultiContentContext(contentId: K) {
- const { updateContentAt, saveSnapshotAndRemoveContent, getData } = useMultiContentContext();
+ const isMounted = useRef(false);
+ const defaultValue = useRef(undefined);
+ const {
+ updateContentAt,
+ saveSnapshotAndRemoveContent,
+ getData,
+ getSingleContentData,
+ } = useMultiContentContext();
const updateContent = useCallback(
(content: Content) => {
@@ -71,12 +78,22 @@ export function useContent(contentId: K) {
};
}, [contentId, saveSnapshotAndRemoveContent]);
- const data = getData();
- const defaultValue = data[contentId];
+ useEffect(() => {
+ if (isMounted.current === false) {
+ isMounted.current = true;
+ }
+ }, []);
+
+ if (isMounted.current === false) {
+ // Only read the default value once, on component mount to avoid re-rendering the
+ // consumer each time the multi-content validity ("isValid") changes.
+ defaultValue.current = getSingleContentData(contentId);
+ }
return {
- defaultValue,
+ defaultValue: defaultValue.current!,
updateContent,
getData,
+ getSingleContentData,
};
}
diff --git a/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts b/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts
index adc68a39a4a5b..8d470f6454b0e 100644
--- a/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts
+++ b/src/plugins/es_ui_shared/public/forms/multi_content/use_multi_content.ts
@@ -45,6 +45,7 @@ export interface MultiContent {
updateContentAt: (id: keyof T, content: Content) => void;
saveSnapshotAndRemoveContent: (id: keyof T) => void;
getData: () => T;
+ getSingleContentData: (contentId: K) => T[K];
validate: () => Promise;
validation: Validation;
}
@@ -109,9 +110,22 @@ export function useMultiContent({
};
}, [stateData, validation]);
+ /**
+ * Read a single content data.
+ */
+ const getSingleContentData = useCallback(
+ (contentId: K): T[K] => {
+ if (contents.current[contentId]) {
+ return contents.current[contentId].getData();
+ }
+ return stateData[contentId];
+ },
+ [stateData]
+ );
+
const updateContentValidity = useCallback(
(updatedData: { [key in keyof T]?: boolean | undefined }): boolean | undefined => {
- let allContentValidity: boolean | undefined;
+ let isAllContentValid: boolean | undefined = validation.isValid;
setValidation((prev) => {
if (
@@ -120,7 +134,7 @@ export function useMultiContent({
)
) {
// No change in validation, nothing to update
- allContentValidity = prev.isValid;
+ isAllContentValid = prev.isValid;
return prev;
}
@@ -129,21 +143,21 @@ export function useMultiContent({
...updatedData,
};
- allContentValidity = Object.values(nextContentsValidityState).some(
+ isAllContentValid = Object.values(nextContentsValidityState).some(
(_isValid) => _isValid === undefined
)
? undefined
: Object.values(nextContentsValidityState).every(Boolean);
return {
- isValid: allContentValidity,
+ isValid: isAllContentValid,
contents: nextContentsValidityState,
};
});
- return allContentValidity;
+ return isAllContentValid;
},
- []
+ [validation.isValid]
);
/**
@@ -163,7 +177,7 @@ export function useMultiContent({
}
return Boolean(updateContentValidity(updatedValidation));
- }, [updateContentValidity]);
+ }, [validation.isValid, updateContentValidity]);
/**
* Update a content. It replaces the content in our "contents" map and update
@@ -186,7 +200,7 @@ export function useMultiContent({
});
}
},
- [updateContentValidity, onChange]
+ [updateContentValidity, onChange, getData, validate]
);
/**
@@ -211,6 +225,7 @@ export function useMultiContent({
return {
getData,
+ getSingleContentData,
validate,
validation,
updateContentAt,
diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.ts
index 4c4a7f0642022..4c8e91b13b1b7 100644
--- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.ts
+++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/form_data_provider.ts
@@ -29,6 +29,7 @@ interface Props {
export const FormDataProvider = React.memo(({ children, pathsToWatch }: Props) => {
const form = useFormContext();
+ const { subscribe } = form;
const previousRawData = useRef(form.__getFormData$().value);
const [formData, setFormData] = useState(previousRawData.current);
@@ -54,9 +55,9 @@ export const FormDataProvider = React.memo(({ children, pathsToWatch }: Props) =
);
useEffect(() => {
- const subscription = form.subscribe(onFormData);
+ const subscription = subscribe(onFormData);
return subscription.unsubscribe;
- }, [form.subscribe, onFormData]);
+ }, [subscribe, onFormData]);
return children(formData);
});
diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_array.ts b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_array.ts
index 1605c09f575f6..3688421964d2e 100644
--- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_array.ts
+++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_array.ts
@@ -17,7 +17,7 @@
* under the License.
*/
-import { useState, useEffect, useRef } from 'react';
+import { useState, useEffect, useRef, useCallback } from 'react';
import { useFormContext } from '../form_context';
@@ -83,14 +83,18 @@ export const UseArray = ({
const [items, setItems] = useState(initialState);
- const updatePaths = (_rows: ArrayItem[]) =>
- _rows.map(
- (row, index) =>
- ({
- ...row,
- path: `${path}[${index}]`,
- } as ArrayItem)
- );
+ const updatePaths = useCallback(
+ (_rows: ArrayItem[]) => {
+ return _rows.map(
+ (row, index) =>
+ ({
+ ...row,
+ path: `${path}[${index}]`,
+ } as ArrayItem)
+ );
+ },
+ [path]
+ );
const addItem = () => {
setItems((previousItems) => {
@@ -108,11 +112,13 @@ export const UseArray = ({
useEffect(() => {
if (didMountRef.current) {
- setItems(updatePaths(items));
+ setItems((prev) => {
+ return updatePaths(prev);
+ });
} else {
didMountRef.current = true;
}
- }, [path]);
+ }, [path, updatePaths]);
return children({ items, addItem, removeItem });
};
diff --git a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_field.test.tsx b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_field.test.tsx
index 7ad32cb0bc3f0..f00beb470a9fc 100644
--- a/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_field.test.tsx
+++ b/src/plugins/es_ui_shared/static/forms/hook_form_lib/components/use_field.test.tsx
@@ -30,8 +30,9 @@ describe('', () => {
const TestComp = ({ onData }: { onData: OnUpdateHandler }) => {
const { form } = useForm();
+ const { subscribe } = form;
- useEffect(() => form.subscribe(onData).unsubscribe, [form]);
+ useEffect(() => subscribe(onData).unsubscribe, [subscribe, onData]);
return (