diff --git a/.eslintrc.js b/.eslintrc.js index 7dd0860aac04e..03a674993ab50 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -189,13 +189,6 @@ module.exports = { 'react-hooks/exhaustive-deps': 'off', }, }, - { - files: ['x-pack/legacy/plugins/uptime/**/*.{js,ts,tsx}'], - rules: { - 'react-hooks/exhaustive-deps': 'off', - 'react-hooks/rules-of-hooks': 'off', - }, - }, /** * Files that require Apache 2.0 headers, settings diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 53270e4517192..1137fb99f81a7 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -8,6 +8,16 @@ /src/plugins/share/ @elastic/kibana-app /src/legacy/server/url_shortening/ @elastic/kibana-app /src/legacy/server/sample_data/ @elastic/kibana-app +/src/legacy/core_plugins/kibana/public/dashboard/ @elastic/kibana-app +/src/legacy/core_plugins/kibana/public/discover/ @elastic/kibana-app +/src/legacy/core_plugins/kibana/public/visualize/ @elastic/kibana-app +/src/legacy/core_plugins/kibana/public/local_application_service/ @elastic/kibana-app +/src/legacy/core_plugins/kibana/public/home/ @elastic/kibana-app +/src/legacy/core_plugins/kibana/public/dev_tools/ @elastic/kibana-app +/src/plugins/home/ @elastic/kibana-app +/src/plugins/kibana_legacy/ @elastic/kibana-app +/src/plugins/timelion/ @elastic/kibana-app +/src/plugins/dev_tools/ @elastic/kibana-app # App Architecture /src/plugins/data/ @elastic/kibana-app-arch @@ -57,6 +67,13 @@ /x-pack/test/functional/services/transform_ui/ @elastic/ml-ui /x-pack/test/functional/services/transform.ts @elastic/ml-ui +# Maps +/x-pack/legacy/plugins/maps/ @elastic/kibana-gis +/x-pack/test/api_integration/apis/maps/ @elastic/kibana-gis +/x-pack/test/functional/apps/maps/ @elastic/kibana-gis +/x-pack/test/functional/es_archives/maps/ @elastic/kibana-gis +/x-pack/test/visual_regression/tests/maps/index.js @elastic/kibana-gis + # Operations /src/dev/ @elastic/kibana-operations /src/setup_node_env/ @elastic/kibana-operations diff --git a/.github/workflows/pr-project-assigner.yml b/.github/workflows/pr-project-assigner.yml index aea8a9cad6b1f..59123731dce66 100644 --- a/.github/workflows/pr-project-assigner.yml +++ b/.github/workflows/pr-project-assigner.yml @@ -11,5 +11,11 @@ jobs: uses: elastic/github-actions/project-assigner@v1.0.0 id: project_assigner with: - issue-mappings: '[{"label": "Team:AppAch", "projectName": "kibana-app-arch", "columnId": 6173897}]' - ghToken: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + issue-mappings: | + [ + { "label": "Team:AppArch", "projectName": "kibana-app-arch", "columnId": 6173897 }, + { "label": "Feature:Lens", "projectName": "Lens", "columnId": 6219362 }, + { "label": "Team:Platform", "projectName": "kibana-platform", "columnId": 5514360 }, + {"label": "Team:Canvas", "projectName": "canvas", "columnId": 6187580} + ] + ghToken: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/project-assigner.yml b/.github/workflows/project-assigner.yml index c7f17993249eb..aec3bf88f0ee2 100644 --- a/.github/workflows/project-assigner.yml +++ b/.github/workflows/project-assigner.yml @@ -11,7 +11,7 @@ jobs: uses: elastic/github-actions/project-assigner@v1.0.0 id: project_assigner with: - issue-mappings: '[{"label": "Team:AppArch", "projectName": "kibana-app-arch", "columnId": 6173895}]' + issue-mappings: '[{"label": "Team:AppArch", "projectName": "kibana-app-arch", "columnId": 6173895}, {"label": "Feature:Lens", "projectName": "Lens", "columnId": 6219363}, {"label": "Team:Canvas", "projectName": "canvas", "columnId": 6187593}]' ghToken: ${{ secrets.GITHUB_TOKEN }} - + diff --git a/.gitignore b/.gitignore index e7391a5c292d0..02b20da297fc6 100644 --- a/.gitignore +++ b/.gitignore @@ -29,7 +29,6 @@ disabledPlugins webpackstats.json /config/* !/config/kibana.yml -!/config/apm.js coverage selenium .babel_register_cache.json diff --git a/.i18nrc.json b/.i18nrc.json index 23f3d6ee33829..4bc0f773ee8b5 100644 --- a/.i18nrc.json +++ b/.i18nrc.json @@ -11,6 +11,7 @@ "embeddableApi": "src/plugins/embeddable", "embeddableExamples": "examples/embeddable_examples", "share": "src/plugins/share", + "home": "src/plugins/home", "esUi": "src/plugins/es_ui_shared", "devTools": "src/plugins/dev_tools", "expressions": "src/plugins/expressions", diff --git a/.node-version b/.node-version index 95abd2ac49910..06c9b9d306348 100644 --- a/.node-version +++ b/.node-version @@ -1 +1 @@ -10.15.2 +10.18.0 diff --git a/.nvmrc b/.nvmrc index 95abd2ac49910..06c9b9d306348 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -10.15.2 +10.18.0 diff --git a/NOTICE.txt b/NOTICE.txt index 230e511746022..955c3127fa955 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,5 +1,5 @@ Kibana source code with Kibana X-Pack source code -Copyright 2012-2019 Elasticsearch B.V. +Copyright 2012-2020 Elasticsearch B.V. --- Pretty handling of logarithmic axes. diff --git a/config/apm.js b/config/apm.js deleted file mode 100644 index 0cfcd759f163b..0000000000000 --- a/config/apm.js +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * DO NOT EDIT THIS FILE! - * - * This file contains the configuration for the Elastic APM instrumentaion of - * Kibana itself and is only intented to be used during development of Kibana. - * - * Instrumentation is turned off by default. Once activated it will send APM - * data to an Elasticsearch cluster accessible by Elastic employees. - * - * To modify the configuration, either use environment variables, or create a - * file named `config/apm.dev.js`, which exports a config object as described - * in the docs. - * - * For an overview over the available configuration files, see: - * https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuration.html - * - * For general information about Elastic APM, see: - * https://www.elastic.co/guide/en/apm/get-started/current/index.html - */ - -const { readFileSync } = require('fs'); -const { join } = require('path'); -const { execSync } = require('child_process'); -const merge = require('lodash.merge'); - -module.exports = merge( - { - active: false, - serverUrl: 'https://f1542b814f674090afd914960583265f.apm.us-central1.gcp.cloud.es.io:443', - // The secretToken below is intended to be hardcoded in this file even though - // it makes it public. This is not a security/privacy issue. Normally we'd - // instead disable the need for a secretToken in the APM Server config where - // the data is transmitted to, but due to how it's being hosted, it's easier, - // for now, to simply leave it in. - secretToken: 'R0Gjg46pE9K9wGestd', - globalLabels: {}, - centralConfig: false, - logUncaughtExceptions: true, - }, - devConfig() -); - -const rev = gitRev(); -if (rev !== null) module.exports.globalLabels.git_rev = rev; - -try { - const filename = join(__dirname, '..', 'data', 'uuid'); - module.exports.globalLabels.kibana_uuid = readFileSync(filename, 'utf-8'); -} catch (e) {} // eslint-disable-line no-empty - -function gitRev() { - try { - return execSync('git rev-parse --short HEAD', { - encoding: 'utf-8', - stdio: ['ignore', 'pipe', 'ignore'], - }).trim(); - } catch (e) { - return null; - } -} - -function devConfig() { - try { - return require('./apm.dev'); // eslint-disable-line import/no-unresolved - } catch (e) { - return {}; - } -} diff --git a/docs/apm/advanced-queries.asciidoc b/docs/apm/advanced-queries.asciidoc index 1f064c1cad3fd..942882f8c4dfb 100644 --- a/docs/apm/advanced-queries.asciidoc +++ b/docs/apm/advanced-queries.asciidoc @@ -1,9 +1,7 @@ [[advanced-queries]] === Advanced queries -When querying, you're simply searching and selecting data from fields in Elasticsearch documents. -It may be helpful to view some of your documents in {kibana-ref}/discover.html[Discover] to better understand how APM data is stored in Elasticsearch. - +When querying in the APM app, you're simply searching and selecting data from fields in Elasticsearch documents. Queries entered into the query bar are also added as parameters to the URL, so it's easy to share a specific query or view with others. @@ -13,11 +11,48 @@ In the screenshot below, you can begin to see some of the transaction fields ava image::apm/images/apm-query-bar.png[Example of the Kibana Query bar in APM app in Kibana] [float] -==== Example queries +==== Example APM app queries * Exclude response times slower than 2000 ms: `transaction.duration.us > 2000000` * Filter by response status code: `context.response.status_code >= 400` * Filter by single user ID: `context.user.id : 12` -* View _all_ transactions for an endpoint, instead of just a sample - `processor.event: "transaction" AND transaction.name: ""` TIP: Read the {kibana-ref}/kuery-query.html[Kibana Query Language Enhancements] documentation to learn more about the capabilities of the {kib} query language. + +[float] +[[discover-advanced-queries]] +=== Querying in the Discover app + +It may also be helpful to view your APM data in the {kibana-ref}/discover.html[Discover app]. +Querying documents in Discover works the same way as querying in the APM app, +and all of the example queries listed above can also be used in the Discover app. + +[float] +==== Example Discover app query + +One example where you may want to make use of the Discover app, +is for viewing _all_ transactions for an endpoint, instead of just a sample. + +TIP: Starting in v7.6, you can view 10 samples per bucket in the APM app, instead of just one. + +Use the APM app to find a transaction name and time bucket that you're interested in learning more about. +Then, switch to the Discover app and make a search: + +["source","sh"] +----- +processor.event: "transaction" AND transaction.name: "" and transaction.duration.us > 13000 and transaction.duration.us < 14000` +----- + +In this example, we're interested in viewing all of the `APIRestController#customers` transactions +that took between 13 and 14 milliseconds. Here's what Discover returns: + +[role="screenshot"] +image::apm/images/advanced-discover.png[View all transactions in bucket] + +You can now explore the data until you find a specific transaction that you're interested in. +Copy that transaction's `transaction.id`, and paste it into the APM app to view the data in the context of the APM app: + +[role="screenshot"] +image::apm/images/specific-transaction-search.png[View specific transaction in apm app] +[role="screenshot"] +image::apm/images/specific-transaction.png[View specific transaction in apm app] diff --git a/docs/apm/images/advanced-discover.png b/docs/apm/images/advanced-discover.png new file mode 100644 index 0000000000000..56ba58b2c1d41 Binary files /dev/null and b/docs/apm/images/advanced-discover.png differ diff --git a/docs/apm/images/specific-transaction-search.png b/docs/apm/images/specific-transaction-search.png new file mode 100644 index 0000000000000..4ed548f015713 Binary files /dev/null and b/docs/apm/images/specific-transaction-search.png differ diff --git a/docs/apm/images/specific-transaction.png b/docs/apm/images/specific-transaction.png new file mode 100644 index 0000000000000..9911dbd879f41 Binary files /dev/null and b/docs/apm/images/specific-transaction.png differ diff --git a/docs/canvas/canvas-elements.asciidoc b/docs/canvas/canvas-elements.asciidoc index c5c6f116ee34e..dc605a47de383 100644 --- a/docs/canvas/canvas-elements.asciidoc +++ b/docs/canvas/canvas-elements.asciidoc @@ -20,24 +20,24 @@ When you add elements to your workpad, you can: [[add-canvas-element]] === Add elements to your workpad -Choose the elements to display on your workpad, then familiarize yourself with the element using the preconfigured demo data. +Choose the elements to display on your workpad, then familiarize yourself with the element using the preconfigured demo data. By default, every element you add to a workpad uses demo data until you change the data source. The demo data includes a small sample data set that you can use to experiment with your element. . Click *Add element*. -. In the *Elements* window, select the element you want to use. +. In the *Elements* window, select the element you want to use. + [role="screenshot"] image::images/canvas-element-select.gif[Canvas elements] -. Play around with the default settings and see what the element can do. +. Play around with the default settings and see what the element can do. -TIP: Want to use a different element? You can delete the element by selecting it, clicking the *Element options* icon in the top right corner, then selecting *Delete*. +TIP: Want to use a different element? You can delete the element by selecting it, clicking the *Element options* icon in the top right, then selecting *Delete*. [float] [[connect-element-data]] === Connect the element to your data -When you are ready to move on from the demo data, connect the element to your own data. +When you have finished using the demo data, connect the element to a data source. . Make sure that the element is selected, then select *Data*. @@ -45,55 +45,51 @@ When you are ready to move on from the demo data, connect the element to your ow [float] [[elasticsearch-sql-data-source]] -==== Connect to Elasticsearch SQL +==== Connect to {es} SQL -Access your data in Elasticsearch using the Elasticsearch SQL syntax. +Access your data in {es} using SQL syntax. For information about SQL syntax, refer to {ref}/sql-spec.html[SQL language]. -Unfamiliar with writing Elasticsearch SQL queries? For more information, refer to {ref}/sql-spec.html[SQL language]. +. Click *{es} SQL*. -. Click *Elasticsearch SQL*. +. In the *{es} SQL query* box, enter your query, then *Preview* it. -. In the *Elasticearch SQL query* box, enter your query, then *Preview* it. - -. If everything looks correct, *Save* it. +. If everything looks correct, *Save* it. [float] [[elasticsearch-raw-doc-data-source]] -==== Connect to Elasticsearch raw data +==== Connect to {es} raw data -Use the Lucene query syntax to use your raw data in Elasticsearch. +Access your raw data in {es} without the use of aggregations. Use {es} raw data when you have low volume datasets, or to plot exact, non-aggregated values. -For for more information about the Lucene query string sytax, refer to <>. +To use targeted queries, you can enter a query using the <>. -. Click *Elasticsearch raw documents*. +. Click *{es} raw documents*. -. In the *Index* field, enter the index pattern that you want to display. +. In the *Index* field, enter the index pattern that you want to display. . From the *Fields* dropdown, select the associated fields you want to display. . To sort the data, select an option from the *Sort Field* and *Sort Order* dropdowns. -. For more targeted queries, enter a *Query* using the Lucene query string syntax. +. For more targeted queries, enter a *Query* using the Lucene query string syntax. -. *Preview* the query. +. *Preview* the query. -. If your query looks correct, *Save* it. +. If your query looks correct, *Save* it. [float] [[timelion-data-source]] ==== Connect to Timelion -Use <> queries to use your time series data. +Access your time series data using <> queries. To use Timelion queries, you can enter a query using the <>. . Click *Timelion*. -. Enter a *Query* using the Lucene query string syntax. -+ -For for more information about the Lucene query string syntax, refer to <>. +. Enter a *Query* using the Lucene query string syntax. . Enter the *Interval*, then *Preview* the query. -. If your query looks correct, *Save* it. +. If your query looks correct, *Save* it. [float] [[configure-display-options]] @@ -109,7 +105,7 @@ When you connect your element to a data source, the element often appears as a w . Click *Display* -. Change the display options for the element. +. Change the display options for the element. [float] [[element-display-container]] @@ -122,7 +118,7 @@ Further define the appearance of the element container and border. . Expand *Container style*. . Change the *Appearance* and *Border* options. - + [float] [[apply-element-styles]] ==== Apply a set of styles @@ -155,7 +151,7 @@ Increase or decrease how often your data refreshes on your workpad. [role="screenshot"] image::images/canvas-refresh-interval.png[Element data refresh interval] -TIP: To manually refresh the data, click the *Refresh data* icon. +TIP: To manually refresh the data, click the *Refresh data* icon. [float] [[organize-element]] @@ -223,7 +219,7 @@ Change the order of how the elements are displayed on your workpad. . Select an element. -. In the top right corder, click the *Element options* icon. +. In the top right corder, click the *Element options* icon. . Select *Order*, then select the order that you want the element to appear. @@ -262,7 +258,7 @@ When you have run out of room on your workpad page, add more pages. . Click *Page 1*, then click *+*. -. On the *Page* editor panel on the right, select the page transition from the *Transition* dropdown. +. On the *Page* editor panel on the right, select the page transition from the *Transition* dropdown. + [role="screenshot"] image::images/canvas-add-pages.gif[Add pages] diff --git a/docs/canvas/canvas-expression-lifecycle.asciidoc b/docs/canvas/canvas-expression-lifecycle.asciidoc new file mode 100644 index 0000000000000..895c1382c4d36 --- /dev/null +++ b/docs/canvas/canvas-expression-lifecycle.asciidoc @@ -0,0 +1,261 @@ +[role="xpack"] +[[canvas-expression-lifecycle]] +== Canvas expression lifecycle + +Elements in Canvas are all created using an *expression language* that defines how to retrieve, manipulate, and ultimately visualize data. The goal is to allow you to do most of what you need without understanding the *expression language*, but learning how it works unlocks a lot of Canvas's power. + + +[[canvas-expressions-always-start-with-a-function]] +=== Expressions always start with a function + +Expressions simply execute <> in a specific order, which produce some output value. That output can then be inserted into another function, and another after that, until it produces the output you need. + +To use demo dataset available in Canvas to produce a table, run the following expression: + +[source,text] +---- +filters +| demodata +| table +| render +---- + +This expression starts out with the <> function, which provides the value of any time filters or dropdown filters in the workpad. This is then inserted into <>, a function that returns exactly what you expect, demo data. Because the <> function receives the filter information from the <> function before it, it applies those filters to reduce the set of data it returns. We call the output from the previous function _context_. + +The filtered <> becomes the _context_ of the next function, <>, which creates a table visualization from this data set. The <> function isn’t strictly required, but by being explicit, you have the option of providing arguments to control things like the font used in the table. The output of the <> function becomes the _context_ of the <> function. Like the <>, the <> function isn’t required either, but it allows access to other arguments, such as styling the border of the element or injecting custom CSS. + + +[[canvas-function-arguments]] +=== Function arguments + +Let’s look at another expression, which uses the same <> function, but instead produces a pie chart. + +image::images/canvas-functions-can-take-arguments-pie-chart.png[Pie Chart, height=400] +[source,text] +---- +filters +| demodata +| pointseries color="state" size="max(price)" +| pie +| render +---- + +To produce a filtered set of random data, the expression uses the <> and <> functions. This time, however, the output becomes the context for the <> function, which is a way to aggregate your data, similar to how Elasticsearch works, but more generalized. In this case, the data is split up using the `color` and `size` dimensions, using arguments on the <> function. Each unique value in the state column will have an associated size value, which in this case, will be the maximum value of the price column. + +If the expression stopped there, it would produce a `pointseries` data type as the output of this expression. But instead of looking at the raw values, the result is inserted into the <> function, which will produce an output that will render a pie visualization. And just like before, this is inserted into the <> function, which is useful for its arguments. + +The end result is a simple pie chart that uses the default color palette, but the <> function can take additional arguments that control how it gets rendered. For example, you can provide a `hole` argument to turn your pie chart into a donut chart by changing the expression to: + + +image::images/canvas-functions-can-take-arguments-donut-chart.png[Donut Chart, height=400] +[source,text] +---- +filters +| demodata +| pointseries color="state" size="max(price)" +| pie hole=50 +| render +---- + + +[[canvas-aliases-and-unnamed-arguments]] +=== Aliases and unnamed arguments + +Argument definitions have one canonical name, which is always provided in the underlying code. When argument definitions are used in an expression, they often include aliases that make them easier or faster to type. + +For example, the <> function has 2 arguments: + +* `expression` - Produces a calculated value. +* `name` - The name of column. + +The `expression` argument includes some aliases, namely `exp`, `fn`, and `function`. That means that you can use any of those four options to provide that argument’s value. + +So `mapColumn name=newColumn fn={string example}` is equal to `mapColumn name=newColumn expression={string example}`. + +There’s also a special type of alias which allows you to leave off the argument’s name entirely. The alias for this is an underscore, which indicates that the argument is an _unnamed_ argument and can be provided without explicitly naming it in the expression. The `name` argument here uses the _unnamed_ alias, which means that you can further simplify our example to `mapColumn newColumn fn={string example}`. + +NOTE: There can only be one _unnamed_ argument for each function. + + +[[canvas-change-your-expression-change-your-output]] +=== Change your expression, change your output +You can substitute one function for another to change the output. For example, you could change the visualization by swapping out the <> function for another renderer, a function that returns a `render` data type. + +Let’s change that last pie chart into a bubble chart by replacing the <> function with the <> function. This is possible because both functions can accept a `pointseries` data type as their _context_. Switching the functions will work, but it won’t produce a useful visualization on its own since you don’t have the x-axis and y-axis defined. You will also need to modify the <> function to change its output. In this case, you can change the `size` argument to `y`, so the maximum price values are plotted on the y-axis, and add an `x` argument using the `@timestamp` field in the data to plot those values over time. This leaves you with the following expression and produces a bubble chart showing the max price of each state over time: + +image::images/canvas-change-your-expression-chart.png[Bubble Chart, height=400] +[source,text] +---- +filters +| demodata +| pointseries color="state" y="max(price)" x="@timestamp" +| plot +| render +---- + +Similar to the <> function, the <> function takes arguments that control the design elements of the visualization. As one example, passing a `legend` argument with a value of `false` to the function will hide the legend on the chart. + +image::images/canvas-change-your-expression-chart-no-legend.png[Bubble Chart Without Legend, height=400] +[source,text,subs=+quotes] +---- +filters +| demodata +| pointseries color="state" y="max(price)" x="@timestamp" +| plot *legend=false* +| render +---- + + +[[canvas-fetch-and-manipulate-data]] +=== Fetch and manipulate data +So far, you have only seen expressions as a way to produce visualizations, but that’s not really what’s happening. Expressions only produce data, which is then used to create something, which in the case of Canvas, means rendering an element. An element can be a visualization, driven by data, but it can also be something much simpler, like a static image. Either way, an expression is used to produce an output that is used to render the desired result. For example, here’s an expression that shows an image: + +[source,text] +---- +image dataurl=https://placekitten.com/160/160 mode="cover" +---- + +But as mentioned, this doesn’t actually _render that image_, but instead it _produces some output that can be used to render that image_. That’s an important distinction, and you can see the actual output by adding in the render function and telling it to produce debug output. For example: + +[source,text] +---- +image dataurl=https://placekitten.com/160/160 mode="cover" +| render as=debug +---- + +The follow appears as JSON output: + +[source,JSON] +---- +{ + "type": "image", + "mode": "cover", + "dataurl": "https://placekitten.com/160/160" +} +---- + +NOTE: You may need to expand the element’s size to see the whole output. + +Canvas uses this output’s data type to map to a specific renderer and passes the entire output into it. It’s up to the image render function to produce an image on the workpad’s page. In this case, the expression produces some JSON output, but expressions can also produce other, simpler data, like a string or a number. Typically, useful results use JSON. + +Canvas uses the output to render an element, but other applications can use expressions to do pretty much anything. As stated previously, expressions simply execute functions, and the functions are all written in Javascript. That means if you can do something in Javascript, you can do it with an expression. + +This can include: + +* Sending emails +* Sending notifications +* Reading from a file +* Writing to a file +* Controlling devices with WebUSB or Web Bluetooth +* Consuming external APIs + +If your Javascript works in the environment where the code will run, such as in Node.js or in a browser, you can do it with an expression. + +[[canvas-expressions-compose-functions-with-subexpressions]] +=== Compose functions with sub-expressions + +You may have noticed another syntax in examples from other sections, namely expressions inside of curly brackets. These are called sub-expressions, and they can be used to provide a calculated value to another expression, instead of just a static one. + +A simple example of this is when you upload your own images to a Canvas workpad. That upload becomes an asset, and that asset can be retrieved using the `asset` function. Usually you’ll just do this from the UI, adding an image element to the page and uploading your image from the control in the sidebar, or picking an existing asset from there as well. In both cases, the system will consume that asset via the `asset` function, and you’ll end up with an expression similar to this: + +[source,text] +---- +image dataurl={asset 3cb3ec3a-84d7-48fa-8709-274ad5cc9e0b} +---- + +Sub-expressions are executed before the function that uses them is executed. In this case, `asset` will be run first, it will produce a value, the base64-encoded value of the image and that value will be used as the value for the `dataurl` argument in the <> function. After the asset function executes, you will get the following output: + +[source,text] +---- +image dataurl="data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0" +---- + +Since all of the sub-expressions are now resolved into actual values, the <> function can be executed to produce its JSON output, just as it’s explained previously. In the case of images, the ability to nest sub-expressions is particularly useful to show one of several images conditionally. For example, you could swap between two images based on some calculated value by mixing in the <> function, like in this example expression: + +[source,text] +---- +demodata +| image dataurl={ + if condition={getCell price | gte 100} + then={asset 3cb3ec3a-84d7-48fa-8709-274ad5cc9e0b} + else={asset cbc11a1f-8f25-4163-94b4-2c3a060192e7} +} +---- + +NOTE: The examples in this section can’t be copy and pasted directly, since the values used throughout will not exist in your workpad. + +Here, the expression to use for the value of the `condition` argument, `getCell price | gte 100`, runs first since it is nested deeper. + +The expression does the following: + +* Retrieves the value from the *price* column in the first row of the `demodata` data table +* Inputs the value to the `gte` function +* Compares the value to `100` +* Returns `true` if the value is 100 or greater, and `false` if the value is 100 or less + +That boolean value becomes the value for the `condition` argument. The output from the `then` expression is used as the output when `condition` is `true`. The output from the `else` expression is used when `condition` is false. In both cases, a base64-encoded image will be returned, and one of the two images will be displayed. + +You might be wondering how the <> function in the sub-expression accessed the data from the <> function, even though <> was not being directly inserted into <>. The answer is simple, but important to understand. When nested sub-expressions are executed, they automatically receive the same _context_, or output of the previous function that its parent function receives. In this specific expression, demodata’s data table is automatically provided to the nested expression’s `getCell` function, which allows that expression to pull out a value and compare it to another value. + +The passing of the _context_ is automatic, and it happens no matter how deeply you nest your sub-expressions. To demonstrate this, let’s modify the expression slightly to compare the value of the price against multiple conditions using the <> function. + +[source,text] +---- +demodata +| image dataurl={ + if condition={getCell price | all {gte 100} {neq 105}} + then={asset 3cb3ec3a-84d7-48fa-8709-274ad5cc9e0b} + else={asset cbc11a1f-8f25-4163-94b4-2c3a060192e7} +} +---- + +This time, `getCell price` is run, and the result is passed into the next function as the context. Then, each sub-expression of the <> function is run, with the context given to their parent, which in this case is the result of `getCell price`. If `all` of these sub-expressions evaluate to `true`, then the `if` condition argument will be true. + +Sub-expressions can seem a little foreign, especially if you aren’t a developer, but they’re worth getting familiar with, since they provide a ton of power and flexibility. Since you can nest any expression you want, you can also use this behavior to mix data from multiple indices, or even data from multiple sources. As an example, you could query an API for a value to use as part of the query provided to <>. + +This whole section is really just scratching the surface, but hopefully after reading it, you at least understand how to read expressions and make sense of what they are doing. With a little practice, you’ll get the hang of mixing _context_ and sub-expressions together to turn any input into your desired output. + +[[canvas-handling-context-and-argument-types]] +=== Handling context and argument types +If you look through the <>, you may notice that all of them define what a function accepts and what it returns. Additionally, every argument includes a type property that specifies the kind of data that can be used. These two types of values are actually the same, and can be used as a guide for how to deal with piping to other functions and using subexpressions for argument values. + +To explain how this works, consider the following expression from the previous section: + +[source,text] +---- +image dataurl={asset 3cb3ec3a-84d7-48fa-8709-274ad5cc9e0b} +---- + +If you <> for the `image` function, you’ll see that it accepts the `null` data type and returns an `image` data type. Accepting `null` effectively means that it does not use context at all, so if you insert anything to `image`, the value that was produced previously will be ignored. When the function executes, it will produce an `image` output, which is simply an object of type `image` that contains the information required to render an image. + +NOTE: The function does not render an image itself. + +As explained in the "<>" section, the output of an expression is just data. So the `image` type here is just a specific shape of data, not an actual image. + +Next, let’s take a look at the `asset` function. Like `image`, it accepts `null`, but it returns something different, a `string` in this case. Because `asset` will produce a string, its output can be used as the input for any function or argument that accepts a string. + +<> for the `dataurl` argument, its type is `string`, meaning it will accept any kind of string. There are some rules about the value of the string that the function itself enforces, but as far as the interpreter is concerned, that expression is valid because the argument accepts a string and the output of `asset` is a string. + +The interpreter also attempts to cast some input types into others, which allows you to use a string input even when the function or argument calls for a number. Keep in mind that it’s not able to convert any string value, but if the string is a number, it can easily be cast into a `number` type. Take the following expression for example: + +[source,text] +---- +string "0.4" +| revealImage image={asset asset-06511b39-ec44-408a-a5f3-abe2da44a426} +---- + +If you <> for the `revealImage` function, you’ll see that it accepts a `number` but the `string` function returns a `string` type. In this case, because the string value is a number, it can be converted into a `number` type and used without you having to do anything else. + +Most `primitive` types can be converted automatically, as you might expect. You just saw that a `string` can be cast into a `number`, but you can also pretty easily cast things into `boolean` too, and you can cast anything to `null`. + +There are other useful type casting options available. For example, something of type `datatable` can be cast to a type `pointseries` simply by only preserving specific columns from the data (namely x, y, size, color, and text). This allows you to treat your source data, which is generally of type `datatable`, like a `pointseries` type simply by convention. + +You can fetch data from Elasticsearch using `essql`, which allows you to aggregate the data, provide a custom name for the value, and insert that data directly to another function that only accepts `pointseries` even though `essql` will output a `datatable` type. This makes the following example expression valid: + +[source,text] +---- +essql "SELECT user AS x, sum(cost) AS y FROM index GROUP BY user" +| plot +---- + +In the docs you can see that `essql` returns a `datatable` type, but `plot` expects a `pointseries` context. This works because the `datatable` output will have the columns `x` and `y` as a result of using `AS` in the sql statement to name them. Because the data follows the convention of the `pointseries` data type, casting it into `pointseries` is possible, and it can be passed directly to `plot` as a result. diff --git a/docs/development/core/public/kibana-plugin-public.app.approute.md b/docs/development/core/public/kibana-plugin-public.app.approute.md new file mode 100644 index 0000000000000..7f35f4346b6b3 --- /dev/null +++ b/docs/development/core/public/kibana-plugin-public.app.approute.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [App](./kibana-plugin-public.app.md) > [appRoute](./kibana-plugin-public.app.approute.md) + +## App.appRoute property + +Override the application's routing path from `/app/${id}`. Must be unique across registered applications. Should not include the base path from HTTP. + +Signature: + +```typescript +appRoute?: string; +``` diff --git a/docs/development/core/public/kibana-plugin-public.app.md b/docs/development/core/public/kibana-plugin-public.app.md index edab4f88497f6..acf07cbf62e91 100644 --- a/docs/development/core/public/kibana-plugin-public.app.md +++ b/docs/development/core/public/kibana-plugin-public.app.md @@ -16,6 +16,7 @@ export interface App extends AppBase | Property | Type | Description | | --- | --- | --- | +| [appRoute](./kibana-plugin-public.app.approute.md) | string | Override the application's routing path from /app/${id}. Must be unique across registered applications. Should not include the base path from HTTP. | | [chromeless](./kibana-plugin-public.app.chromeless.md) | boolean | Hide the UI chrome when the application is mounted. Defaults to false. Takes precedence over chrome service visibility settings. | | [mount](./kibana-plugin-public.app.mount.md) | AppMount | AppMountDeprecated | A mount function called when the user navigates to this app's route. May have signature of [AppMount](./kibana-plugin-public.appmount.md) or [AppMountDeprecated](./kibana-plugin-public.appmountdeprecated.md). | diff --git a/docs/development/core/public/kibana-plugin-public.appmountparameters.appbasepath.md b/docs/development/core/public/kibana-plugin-public.appmountparameters.appbasepath.md index a1544373ee698..7cd709d615729 100644 --- a/docs/development/core/public/kibana-plugin-public.appmountparameters.appbasepath.md +++ b/docs/development/core/public/kibana-plugin-public.appmountparameters.appbasepath.md @@ -4,7 +4,7 @@ ## AppMountParameters.appBasePath property -The base path for configuring the application's router. +The route path for configuring navigation to the application. This string should not include the base path from HTTP. Signature: @@ -22,6 +22,7 @@ export class MyPlugin implements Plugin { setup({ application }) { application.register({ id: 'my-app', + appRoute: '/my-app', async mount(params) { const { renderApp } = await import('./application'); return renderApp(params); diff --git a/docs/development/core/public/kibana-plugin-public.appmountparameters.md b/docs/development/core/public/kibana-plugin-public.appmountparameters.md index 8733f9cd4915d..aa5ca93ed8ff0 100644 --- a/docs/development/core/public/kibana-plugin-public.appmountparameters.md +++ b/docs/development/core/public/kibana-plugin-public.appmountparameters.md @@ -15,6 +15,6 @@ export interface AppMountParameters | Property | Type | Description | | --- | --- | --- | -| [appBasePath](./kibana-plugin-public.appmountparameters.appbasepath.md) | string | The base path for configuring the application's router. | +| [appBasePath](./kibana-plugin-public.appmountparameters.appbasepath.md) | string | The route path for configuring navigation to the application. This string should not include the base path from HTTP. | | [element](./kibana-plugin-public.appmountparameters.element.md) | HTMLElement | The container element to render the application into. | diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.md b/docs/development/core/public/kibana-plugin-public.httpservicebase.md deleted file mode 100644 index 9ea77c95b343e..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.md +++ /dev/null @@ -1,37 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) - -## HttpServiceBase interface - - -Signature: - -```typescript -export interface HttpServiceBase -``` - -## Properties - -| Property | Type | Description | -| --- | --- | --- | -| [anonymousPaths](./kibana-plugin-public.httpservicebase.anonymouspaths.md) | IAnonymousPaths | APIs for denoting certain paths for not requiring authentication | -| [basePath](./kibana-plugin-public.httpservicebase.basepath.md) | IBasePath | APIs for manipulating the basePath on URL segments. | -| [delete](./kibana-plugin-public.httpservicebase.delete.md) | HttpHandler | Makes an HTTP request with the DELETE method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | -| [fetch](./kibana-plugin-public.httpservicebase.fetch.md) | HttpHandler | Makes an HTTP request. Defaults to a GET request unless overriden. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | -| [get](./kibana-plugin-public.httpservicebase.get.md) | HttpHandler | Makes an HTTP request with the GET method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | -| [head](./kibana-plugin-public.httpservicebase.head.md) | HttpHandler | Makes an HTTP request with the HEAD method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | -| [options](./kibana-plugin-public.httpservicebase.options.md) | HttpHandler | Makes an HTTP request with the OPTIONS method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | -| [patch](./kibana-plugin-public.httpservicebase.patch.md) | HttpHandler | Makes an HTTP request with the PATCH method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | -| [post](./kibana-plugin-public.httpservicebase.post.md) | HttpHandler | Makes an HTTP request with the POST method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | -| [put](./kibana-plugin-public.httpservicebase.put.md) | HttpHandler | Makes an HTTP request with the PUT method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | - -## Methods - -| Method | Description | -| --- | --- | -| [addLoadingCount(countSource$)](./kibana-plugin-public.httpservicebase.addloadingcount.md) | Adds a new source of loading counts. Used to show the global loading indicator when sum of all observed counts are more than 0. | -| [getLoadingCount$()](./kibana-plugin-public.httpservicebase.getloadingcount_.md) | Get the sum of all loading count sources as a single Observable. | -| [intercept(interceptor)](./kibana-plugin-public.httpservicebase.intercept.md) | Adds a new [HttpInterceptor](./kibana-plugin-public.httpinterceptor.md) to the global HTTP client. | -| [removeAllInterceptors()](./kibana-plugin-public.httpservicebase.removeallinterceptors.md) | Removes all configured interceptors. | - diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.removeallinterceptors.md b/docs/development/core/public/kibana-plugin-public.httpservicebase.removeallinterceptors.md deleted file mode 100644 index 0432ec29a22b6..0000000000000 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.removeallinterceptors.md +++ /dev/null @@ -1,17 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [removeAllInterceptors](./kibana-plugin-public.httpservicebase.removeallinterceptors.md) - -## HttpServiceBase.removeAllInterceptors() method - -Removes all configured interceptors. - -Signature: - -```typescript -removeAllInterceptors(): void; -``` -Returns: - -`void` - diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.addloadingcount.md b/docs/development/core/public/kibana-plugin-public.httpsetup.addloadingcountsource.md similarity index 62% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.addloadingcount.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.addloadingcountsource.md index e984fea48625d..a2fe66bb55c77 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.addloadingcount.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.addloadingcountsource.md @@ -1,15 +1,15 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [addLoadingCount](./kibana-plugin-public.httpservicebase.addloadingcount.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [addLoadingCountSource](./kibana-plugin-public.httpsetup.addloadingcountsource.md) -## HttpServiceBase.addLoadingCount() method +## HttpSetup.addLoadingCountSource() method Adds a new source of loading counts. Used to show the global loading indicator when sum of all observed counts are more than 0. Signature: ```typescript -addLoadingCount(countSource$: Observable): void; +addLoadingCountSource(countSource$: Observable): void; ``` ## Parameters diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.anonymouspaths.md b/docs/development/core/public/kibana-plugin-public.httpsetup.anonymouspaths.md similarity index 57% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.anonymouspaths.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.anonymouspaths.md index e94757c5eb031..a9268ca1d8ed6 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.anonymouspaths.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.anonymouspaths.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [anonymousPaths](./kibana-plugin-public.httpservicebase.anonymouspaths.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [anonymousPaths](./kibana-plugin-public.httpsetup.anonymouspaths.md) -## HttpServiceBase.anonymousPaths property +## HttpSetup.anonymousPaths property APIs for denoting certain paths for not requiring authentication diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.basepath.md b/docs/development/core/public/kibana-plugin-public.httpsetup.basepath.md similarity index 57% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.basepath.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.basepath.md index 6c5f690a5c607..6b0726dc8ef2b 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.basepath.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.basepath.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [basePath](./kibana-plugin-public.httpservicebase.basepath.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [basePath](./kibana-plugin-public.httpsetup.basepath.md) -## HttpServiceBase.basePath property +## HttpSetup.basePath property APIs for manipulating the basePath on URL segments. diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.delete.md b/docs/development/core/public/kibana-plugin-public.httpsetup.delete.md similarity index 63% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.delete.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.delete.md index 73022ef4f2946..565f0eb336d4f 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.delete.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.delete.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [delete](./kibana-plugin-public.httpservicebase.delete.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [delete](./kibana-plugin-public.httpsetup.delete.md) -## HttpServiceBase.delete property +## HttpSetup.delete property Makes an HTTP request with the DELETE method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.fetch.md b/docs/development/core/public/kibana-plugin-public.httpsetup.fetch.md similarity index 64% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.fetch.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.fetch.md index 3a1ae4892a3dd..2d6447363fa9b 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.fetch.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.fetch.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [fetch](./kibana-plugin-public.httpservicebase.fetch.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [fetch](./kibana-plugin-public.httpsetup.fetch.md) -## HttpServiceBase.fetch property +## HttpSetup.fetch property Makes an HTTP request. Defaults to a GET request unless overriden. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.get.md b/docs/development/core/public/kibana-plugin-public.httpsetup.get.md similarity index 63% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.get.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.get.md index a61b3dd140e50..0c484e33e9b58 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.get.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.get.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [get](./kibana-plugin-public.httpservicebase.get.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [get](./kibana-plugin-public.httpsetup.get.md) -## HttpServiceBase.get property +## HttpSetup.get property Makes an HTTP request with the GET method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.getloadingcount_.md b/docs/development/core/public/kibana-plugin-public.httpsetup.getloadingcount_.md similarity index 59% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.getloadingcount_.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.getloadingcount_.md index 0b2129330cd01..628b62b2ffc27 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.getloadingcount_.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.getloadingcount_.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [getLoadingCount$](./kibana-plugin-public.httpservicebase.getloadingcount_.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [getLoadingCount$](./kibana-plugin-public.httpsetup.getloadingcount_.md) -## HttpServiceBase.getLoadingCount$() method +## HttpSetup.getLoadingCount$() method Get the sum of all loading count sources as a single Observable. diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.head.md b/docs/development/core/public/kibana-plugin-public.httpsetup.head.md similarity index 63% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.head.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.head.md index 4624d95f03bf3..e4d49c843e572 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.head.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.head.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [head](./kibana-plugin-public.httpservicebase.head.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [head](./kibana-plugin-public.httpsetup.head.md) -## HttpServiceBase.head property +## HttpSetup.head property Makes an HTTP request with the HEAD method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.intercept.md b/docs/development/core/public/kibana-plugin-public.httpsetup.intercept.md similarity index 72% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.intercept.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.intercept.md index 8cf5bf813df09..1bda0c6166e65 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.intercept.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.intercept.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [intercept](./kibana-plugin-public.httpservicebase.intercept.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [intercept](./kibana-plugin-public.httpsetup.intercept.md) -## HttpServiceBase.intercept() method +## HttpSetup.intercept() method Adds a new [HttpInterceptor](./kibana-plugin-public.httpinterceptor.md) to the global HTTP client. diff --git a/docs/development/core/public/kibana-plugin-public.httpsetup.md b/docs/development/core/public/kibana-plugin-public.httpsetup.md index 7ef037ea7abd1..8a14d26c57ca3 100644 --- a/docs/development/core/public/kibana-plugin-public.httpsetup.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.md @@ -2,12 +2,35 @@ [Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) -## HttpSetup type +## HttpSetup interface -See [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) Signature: ```typescript -export declare type HttpSetup = HttpServiceBase; +export interface HttpSetup ``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [anonymousPaths](./kibana-plugin-public.httpsetup.anonymouspaths.md) | IAnonymousPaths | APIs for denoting certain paths for not requiring authentication | +| [basePath](./kibana-plugin-public.httpsetup.basepath.md) | IBasePath | APIs for manipulating the basePath on URL segments. | +| [delete](./kibana-plugin-public.httpsetup.delete.md) | HttpHandler | Makes an HTTP request with the DELETE method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | +| [fetch](./kibana-plugin-public.httpsetup.fetch.md) | HttpHandler | Makes an HTTP request. Defaults to a GET request unless overriden. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | +| [get](./kibana-plugin-public.httpsetup.get.md) | HttpHandler | Makes an HTTP request with the GET method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | +| [head](./kibana-plugin-public.httpsetup.head.md) | HttpHandler | Makes an HTTP request with the HEAD method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | +| [options](./kibana-plugin-public.httpsetup.options.md) | HttpHandler | Makes an HTTP request with the OPTIONS method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | +| [patch](./kibana-plugin-public.httpsetup.patch.md) | HttpHandler | Makes an HTTP request with the PATCH method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | +| [post](./kibana-plugin-public.httpsetup.post.md) | HttpHandler | Makes an HTTP request with the POST method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | +| [put](./kibana-plugin-public.httpsetup.put.md) | HttpHandler | Makes an HTTP request with the PUT method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. | + +## Methods + +| Method | Description | +| --- | --- | +| [addLoadingCountSource(countSource$)](./kibana-plugin-public.httpsetup.addloadingcountsource.md) | Adds a new source of loading counts. Used to show the global loading indicator when sum of all observed counts are more than 0. | +| [getLoadingCount$()](./kibana-plugin-public.httpsetup.getloadingcount_.md) | Get the sum of all loading count sources as a single Observable. | +| [intercept(interceptor)](./kibana-plugin-public.httpsetup.intercept.md) | Adds a new [HttpInterceptor](./kibana-plugin-public.httpinterceptor.md) to the global HTTP client. | + diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.options.md b/docs/development/core/public/kibana-plugin-public.httpsetup.options.md similarity index 62% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.options.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.options.md index 0820beb2752f2..4ea5be8826bff 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.options.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.options.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [options](./kibana-plugin-public.httpservicebase.options.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [options](./kibana-plugin-public.httpsetup.options.md) -## HttpServiceBase.options property +## HttpSetup.options property Makes an HTTP request with the OPTIONS method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.patch.md b/docs/development/core/public/kibana-plugin-public.httpsetup.patch.md similarity index 63% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.patch.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.patch.md index 00e1ffc0e16bf..ef1d50005b012 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.patch.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.patch.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [patch](./kibana-plugin-public.httpservicebase.patch.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [patch](./kibana-plugin-public.httpsetup.patch.md) -## HttpServiceBase.patch property +## HttpSetup.patch property Makes an HTTP request with the PATCH method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.post.md b/docs/development/core/public/kibana-plugin-public.httpsetup.post.md similarity index 63% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.post.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.post.md index 3771a7c910895..1c19c35ac3038 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.post.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.post.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [post](./kibana-plugin-public.httpservicebase.post.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [post](./kibana-plugin-public.httpsetup.post.md) -## HttpServiceBase.post property +## HttpSetup.post property Makes an HTTP request with the POST method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. diff --git a/docs/development/core/public/kibana-plugin-public.httpservicebase.put.md b/docs/development/core/public/kibana-plugin-public.httpsetup.put.md similarity index 63% rename from docs/development/core/public/kibana-plugin-public.httpservicebase.put.md rename to docs/development/core/public/kibana-plugin-public.httpsetup.put.md index 6e43aafa916bc..e5243d8c80dae 100644 --- a/docs/development/core/public/kibana-plugin-public.httpservicebase.put.md +++ b/docs/development/core/public/kibana-plugin-public.httpsetup.put.md @@ -1,8 +1,8 @@ -[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) > [put](./kibana-plugin-public.httpservicebase.put.md) +[Home](./index.md) > [kibana-plugin-public](./kibana-plugin-public.md) > [HttpSetup](./kibana-plugin-public.httpsetup.md) > [put](./kibana-plugin-public.httpsetup.put.md) -## HttpServiceBase.put property +## HttpSetup.put property Makes an HTTP request with the PUT method. See [HttpHandler](./kibana-plugin-public.httphandler.md) for options. diff --git a/docs/development/core/public/kibana-plugin-public.httpstart.md b/docs/development/core/public/kibana-plugin-public.httpstart.md index bb9247c63897a..9abf319acf00d 100644 --- a/docs/development/core/public/kibana-plugin-public.httpstart.md +++ b/docs/development/core/public/kibana-plugin-public.httpstart.md @@ -4,10 +4,10 @@ ## HttpStart type -See [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) +See [HttpSetup](./kibana-plugin-public.httpsetup.md) Signature: ```typescript -export declare type HttpStart = HttpServiceBase; +export declare type HttpStart = HttpSetup; ``` diff --git a/docs/development/core/public/kibana-plugin-public.md b/docs/development/core/public/kibana-plugin-public.md index 2c43f36ede09e..e2c2866b57b6b 100644 --- a/docs/development/core/public/kibana-plugin-public.md +++ b/docs/development/core/public/kibana-plugin-public.md @@ -56,7 +56,7 @@ The plugin integrates with the core system via lifecycle events: `setup` | [HttpHeadersInit](./kibana-plugin-public.httpheadersinit.md) | | | [HttpInterceptor](./kibana-plugin-public.httpinterceptor.md) | An object that may define global interceptor functions for different parts of the request and response lifecycle. See [IHttpInterceptController](./kibana-plugin-public.ihttpinterceptcontroller.md). | | [HttpRequestInit](./kibana-plugin-public.httprequestinit.md) | Fetch API options available to [HttpHandler](./kibana-plugin-public.httphandler.md)s. | -| [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) | | +| [HttpSetup](./kibana-plugin-public.httpsetup.md) | | | [I18nStart](./kibana-plugin-public.i18nstart.md) | I18nStart.Context is required by any localizable React component from @kbn/i18n and @elastic/eui packages and is supposed to be used as the topmost component for any i18n-compatible React tree. | | [IAnonymousPaths](./kibana-plugin-public.ianonymouspaths.md) | APIs for denoting paths as not requiring authentication | | [IBasePath](./kibana-plugin-public.ibasepath.md) | APIs for manipulating the basePath on URL segments. | @@ -118,8 +118,7 @@ The plugin integrates with the core system via lifecycle events: `setup` | [HandlerContextType](./kibana-plugin-public.handlercontexttype.md) | Extracts the type of the first argument of a [HandlerFunction](./kibana-plugin-public.handlerfunction.md) to represent the type of the context. | | [HandlerFunction](./kibana-plugin-public.handlerfunction.md) | A function that accepts a context object and an optional number of additional arguments. Used for the generic types in [IContextContainer](./kibana-plugin-public.icontextcontainer.md) | | [HandlerParameters](./kibana-plugin-public.handlerparameters.md) | Extracts the types of the additional arguments of a [HandlerFunction](./kibana-plugin-public.handlerfunction.md), excluding the [HandlerContextType](./kibana-plugin-public.handlercontexttype.md). | -| [HttpSetup](./kibana-plugin-public.httpsetup.md) | See [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) | -| [HttpStart](./kibana-plugin-public.httpstart.md) | See [HttpServiceBase](./kibana-plugin-public.httpservicebase.md) | +| [HttpStart](./kibana-plugin-public.httpstart.md) | See [HttpSetup](./kibana-plugin-public.httpsetup.md) | | [IContextProvider](./kibana-plugin-public.icontextprovider.md) | A function that returns a context value for a specific key of given context type. | | [IToasts](./kibana-plugin-public.itoasts.md) | Methods for adding and removing global toast messages. See [ToastsApi](./kibana-plugin-public.toastsapi.md). | | [MountPoint](./kibana-plugin-public.mountpoint.md) | A function that should mount DOM content inside the provided container element and return a handler to unmount it. | diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsclient.find.md b/docs/development/core/public/kibana-plugin-public.savedobjectsclient.find.md index a4fa3f17d0d94..1ce18834f5319 100644 --- a/docs/development/core/public/kibana-plugin-public.savedobjectsclient.find.md +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsclient.find.md @@ -9,5 +9,5 @@ Search for objects Signature: ```typescript -find: (options: Pick) => Promise>; +find: (options: Pick) => Promise>; ``` diff --git a/docs/development/core/public/kibana-plugin-public.savedobjectsclient.md b/docs/development/core/public/kibana-plugin-public.savedobjectsclient.md index 3c4e33db4af91..6033c667c1866 100644 --- a/docs/development/core/public/kibana-plugin-public.savedobjectsclient.md +++ b/docs/development/core/public/kibana-plugin-public.savedobjectsclient.md @@ -20,7 +20,7 @@ export declare class SavedObjectsClient | [bulkGet](./kibana-plugin-public.savedobjectsclient.bulkget.md) | | (objects?: {
id: string;
type: string;
}[]) => Promise<SavedObjectsBatchResponse<SavedObjectAttributes>> | Returns an array of objects by id | | [create](./kibana-plugin-public.savedobjectsclient.create.md) | | <T extends SavedObjectAttributes>(type: string, attributes: T, options?: SavedObjectsCreateOptions) => Promise<SimpleSavedObject<T>> | Persists an object | | [delete](./kibana-plugin-public.savedobjectsclient.delete.md) | | (type: string, id: string) => Promise<{}> | Deletes an object | -| [find](./kibana-plugin-public.savedobjectsclient.find.md) | | <T extends SavedObjectAttributes>(options: Pick<SavedObjectFindOptionsServer, "search" | "filter" | "type" | "page" | "perPage" | "sortField" | "fields" | "searchFields" | "hasReference" | "defaultSearchOperator">) => Promise<SavedObjectsFindResponsePublic<T>> | Search for objects | +| [find](./kibana-plugin-public.savedobjectsclient.find.md) | | <T extends SavedObjectAttributes>(options: Pick<SavedObjectFindOptionsServer, "search" | "filter" | "type" | "page" | "fields" | "searchFields" | "defaultSearchOperator" | "hasReference" | "sortField" | "perPage">) => Promise<SavedObjectsFindResponsePublic<T>> | Search for objects | | [get](./kibana-plugin-public.savedobjectsclient.get.md) | | <T extends SavedObjectAttributes>(type: string, id: string) => Promise<SimpleSavedObject<T>> | Fetches a single object | ## Methods diff --git a/docs/development/core/server/kibana-plugin-server.corestart.md b/docs/development/core/server/kibana-plugin-server.corestart.md index e523717a37ac8..167c69d5fe329 100644 --- a/docs/development/core/server/kibana-plugin-server.corestart.md +++ b/docs/development/core/server/kibana-plugin-server.corestart.md @@ -18,4 +18,5 @@ export interface CoreStart | --- | --- | --- | | [capabilities](./kibana-plugin-server.corestart.capabilities.md) | CapabilitiesStart | [CapabilitiesStart](./kibana-plugin-server.capabilitiesstart.md) | | [savedObjects](./kibana-plugin-server.corestart.savedobjects.md) | SavedObjectsServiceStart | [SavedObjectsServiceStart](./kibana-plugin-server.savedobjectsservicestart.md) | +| [uiSettings](./kibana-plugin-server.corestart.uisettings.md) | UiSettingsServiceStart | [UiSettingsServiceStart](./kibana-plugin-server.uisettingsservicestart.md) | diff --git a/docs/development/core/server/kibana-plugin-server.corestart.uisettings.md b/docs/development/core/server/kibana-plugin-server.corestart.uisettings.md new file mode 100644 index 0000000000000..323e929f2918e --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.corestart.uisettings.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [CoreStart](./kibana-plugin-server.corestart.md) > [uiSettings](./kibana-plugin-server.corestart.uisettings.md) + +## CoreStart.uiSettings property + +[UiSettingsServiceStart](./kibana-plugin-server.uisettingsservicestart.md) + +Signature: + +```typescript +uiSettings: UiSettingsServiceStart; +``` diff --git a/docs/development/core/server/kibana-plugin-server.irenderoptions.includeusersettings.md b/docs/development/core/server/kibana-plugin-server.irenderoptions.includeusersettings.md new file mode 100644 index 0000000000000..cedf3d27d0887 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.irenderoptions.includeusersettings.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [IRenderOptions](./kibana-plugin-server.irenderoptions.md) > [includeUserSettings](./kibana-plugin-server.irenderoptions.includeusersettings.md) + +## IRenderOptions.includeUserSettings property + +Set whether to output user settings in the page metadata. `true` by default. + +Signature: + +```typescript +includeUserSettings?: boolean; +``` diff --git a/docs/development/core/server/kibana-plugin-server.irenderoptions.md b/docs/development/core/server/kibana-plugin-server.irenderoptions.md new file mode 100644 index 0000000000000..34bed8b5e078c --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.irenderoptions.md @@ -0,0 +1,19 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [IRenderOptions](./kibana-plugin-server.irenderoptions.md) + +## IRenderOptions interface + + +Signature: + +```typescript +export interface IRenderOptions +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [includeUserSettings](./kibana-plugin-server.irenderoptions.includeusersettings.md) | boolean | Set whether to output user settings in the page metadata. true by default. | + diff --git a/docs/development/core/server/kibana-plugin-server.irouter.handlelegacyerrors.md b/docs/development/core/server/kibana-plugin-server.irouter.handlelegacyerrors.md index 2367420068064..ff71f13466cf8 100644 --- a/docs/development/core/server/kibana-plugin-server.irouter.handlelegacyerrors.md +++ b/docs/development/core/server/kibana-plugin-server.irouter.handlelegacyerrors.md @@ -9,5 +9,5 @@ Wrap a router handler to catch and converts legacy boom errors to proper custom Signature: ```typescript -handleLegacyErrors:

(handler: RequestHandler) => RequestHandler; +handleLegacyErrors: (handler: RequestHandler) => RequestHandler; ``` diff --git a/docs/development/core/server/kibana-plugin-server.irouter.md b/docs/development/core/server/kibana-plugin-server.irouter.md index 73e96191e02e7..a6536d2ed6763 100644 --- a/docs/development/core/server/kibana-plugin-server.irouter.md +++ b/docs/development/core/server/kibana-plugin-server.irouter.md @@ -18,7 +18,7 @@ export interface IRouter | --- | --- | --- | | [delete](./kibana-plugin-server.irouter.delete.md) | RouteRegistrar<'delete'> | Register a route handler for DELETE request. | | [get](./kibana-plugin-server.irouter.get.md) | RouteRegistrar<'get'> | Register a route handler for GET request. | -| [handleLegacyErrors](./kibana-plugin-server.irouter.handlelegacyerrors.md) | <P extends ObjectType, Q extends ObjectType, B extends ObjectType>(handler: RequestHandler<P, Q, B>) => RequestHandler<P, Q, B> | Wrap a router handler to catch and converts legacy boom errors to proper custom errors. | +| [handleLegacyErrors](./kibana-plugin-server.irouter.handlelegacyerrors.md) | <P, Q, B>(handler: RequestHandler<P, Q, B>) => RequestHandler<P, Q, B> | Wrap a router handler to catch and converts legacy boom errors to proper custom errors. | | [patch](./kibana-plugin-server.irouter.patch.md) | RouteRegistrar<'patch'> | Register a route handler for PATCH request. | | [post](./kibana-plugin-server.irouter.post.md) | RouteRegistrar<'post'> | Register a route handler for POST request. | | [put](./kibana-plugin-server.irouter.put.md) | RouteRegistrar<'put'> | Register a route handler for PUT request. | diff --git a/docs/development/core/server/kibana-plugin-server.iscopedrenderingclient.md b/docs/development/core/server/kibana-plugin-server.iscopedrenderingclient.md new file mode 100644 index 0000000000000..2e6daa58db25f --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.iscopedrenderingclient.md @@ -0,0 +1,19 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [IScopedRenderingClient](./kibana-plugin-server.iscopedrenderingclient.md) + +## IScopedRenderingClient interface + + +Signature: + +```typescript +export interface IScopedRenderingClient +``` + +## Methods + +| Method | Description | +| --- | --- | +| [render(options)](./kibana-plugin-server.iscopedrenderingclient.render.md) | Generate a KibanaResponse which renders an HTML page bootstrapped with the core bundle. Intended as a response body for HTTP route handlers. | + diff --git a/docs/development/core/server/kibana-plugin-server.iscopedrenderingclient.render.md b/docs/development/core/server/kibana-plugin-server.iscopedrenderingclient.render.md new file mode 100644 index 0000000000000..1bc78dd84571d --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.iscopedrenderingclient.render.md @@ -0,0 +1,41 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [IScopedRenderingClient](./kibana-plugin-server.iscopedrenderingclient.md) > [render](./kibana-plugin-server.iscopedrenderingclient.render.md) + +## IScopedRenderingClient.render() method + +Generate a `KibanaResponse` which renders an HTML page bootstrapped with the `core` bundle. Intended as a response body for HTTP route handlers. + +Signature: + +```typescript +render(options?: IRenderOptions): Promise; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| options | IRenderOptions | | + +Returns: + +`Promise` + +## Example + + +```ts +router.get( + { path: '/', validate: false }, + (context, request, response) => + response.ok({ + body: await context.core.rendering.render(), + headers: { + 'content-security-policy': context.core.http.csp.header, + }, + }) +); + +``` + diff --git a/docs/development/core/server/kibana-plugin-server.legacyservicesetupdeps.core.md b/docs/development/core/server/kibana-plugin-server.legacyservicesetupdeps.core.md index 09ebf1170715b..c4c043a903d06 100644 --- a/docs/development/core/server/kibana-plugin-server.legacyservicesetupdeps.core.md +++ b/docs/development/core/server/kibana-plugin-server.legacyservicesetupdeps.core.md @@ -7,7 +7,5 @@ Signature: ```typescript -core: InternalCoreSetup & { - plugins: PluginsServiceSetup; - }; +core: LegacyCoreSetup; ``` diff --git a/docs/development/core/server/kibana-plugin-server.legacyservicesetupdeps.md b/docs/development/core/server/kibana-plugin-server.legacyservicesetupdeps.md index 4475318522dfa..7961cedd2c054 100644 --- a/docs/development/core/server/kibana-plugin-server.legacyservicesetupdeps.md +++ b/docs/development/core/server/kibana-plugin-server.legacyservicesetupdeps.md @@ -18,6 +18,6 @@ export interface LegacyServiceSetupDeps | Property | Type | Description | | --- | --- | --- | -| [core](./kibana-plugin-server.legacyservicesetupdeps.core.md) | InternalCoreSetup & {
plugins: PluginsServiceSetup;
} | | +| [core](./kibana-plugin-server.legacyservicesetupdeps.core.md) | LegacyCoreSetup | | | [plugins](./kibana-plugin-server.legacyservicesetupdeps.plugins.md) | Record<string, unknown> | | diff --git a/docs/development/core/server/kibana-plugin-server.legacyservicestartdeps.core.md b/docs/development/core/server/kibana-plugin-server.legacyservicestartdeps.core.md index c5cf473aaa01a..47018f4594967 100644 --- a/docs/development/core/server/kibana-plugin-server.legacyservicestartdeps.core.md +++ b/docs/development/core/server/kibana-plugin-server.legacyservicestartdeps.core.md @@ -7,7 +7,5 @@ Signature: ```typescript -core: InternalCoreStart & { - plugins: PluginsServiceStart; - }; +core: LegacyCoreStart; ``` diff --git a/docs/development/core/server/kibana-plugin-server.legacyservicestartdeps.md b/docs/development/core/server/kibana-plugin-server.legacyservicestartdeps.md index 801138b64e46a..602fe5356d525 100644 --- a/docs/development/core/server/kibana-plugin-server.legacyservicestartdeps.md +++ b/docs/development/core/server/kibana-plugin-server.legacyservicestartdeps.md @@ -18,6 +18,6 @@ export interface LegacyServiceStartDeps | Property | Type | Description | | --- | --- | --- | -| [core](./kibana-plugin-server.legacyservicestartdeps.core.md) | InternalCoreStart & {
plugins: PluginsServiceStart;
} | | +| [core](./kibana-plugin-server.legacyservicestartdeps.core.md) | LegacyCoreStart | | | [plugins](./kibana-plugin-server.legacyservicestartdeps.plugins.md) | Record<string, unknown> | | diff --git a/docs/development/core/server/kibana-plugin-server.logger.get.md b/docs/development/core/server/kibana-plugin-server.logger.get.md new file mode 100644 index 0000000000000..b4a2d8a124260 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.logger.get.md @@ -0,0 +1,33 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [Logger](./kibana-plugin-server.logger.md) > [get](./kibana-plugin-server.logger.get.md) + +## Logger.get() method + +Returns a new [Logger](./kibana-plugin-server.logger.md) instance extending the current logger context. + +Signature: + +```typescript +get(...childContextPaths: string[]): Logger; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| childContextPaths | string[] | | + +Returns: + +`Logger` + +## Example + + +```typescript +const logger = loggerFactory.get('plugin', 'service'); // 'plugin.service' context +const subLogger = logger.get('feature'); // 'plugin.service.feature' context + +``` + diff --git a/docs/development/core/server/kibana-plugin-server.md b/docs/development/core/server/kibana-plugin-server.md index ea5ca6502b076..5e7f84c55244d 100644 --- a/docs/development/core/server/kibana-plugin-server.md +++ b/docs/development/core/server/kibana-plugin-server.md @@ -21,6 +21,7 @@ The plugin integrates with the core system via lifecycle events: `setup` | [CspConfig](./kibana-plugin-server.cspconfig.md) | CSP configuration for use in Kibana. | | [ElasticsearchErrorHelpers](./kibana-plugin-server.elasticsearcherrorhelpers.md) | Helpers for working with errors returned from the Elasticsearch service.Since the internal data of errors are subject to change, consumers of the Elasticsearch service should always use these helpers to classify errors instead of checking error internals such as body.error.header[WWW-Authenticate] | | [KibanaRequest](./kibana-plugin-server.kibanarequest.md) | Kibana specific abstraction for an incoming request. | +| [RouteValidationError](./kibana-plugin-server.routevalidationerror.md) | Error to return when the validation is not successful. | | [SavedObjectsClient](./kibana-plugin-server.savedobjectsclient.md) | | | [SavedObjectsErrorHelpers](./kibana-plugin-server.savedobjectserrorhelpers.md) | | | [SavedObjectsRepository](./kibana-plugin-server.savedobjectsrepository.md) | | @@ -69,7 +70,9 @@ The plugin integrates with the core system via lifecycle events: `setup` | [IKibanaResponse](./kibana-plugin-server.ikibanaresponse.md) | A response data object, expected to returned as a result of [RequestHandler](./kibana-plugin-server.requesthandler.md) execution | | [IKibanaSocket](./kibana-plugin-server.ikibanasocket.md) | A tiny abstraction for TCP socket. | | [IndexSettingsDeprecationInfo](./kibana-plugin-server.indexsettingsdeprecationinfo.md) | | +| [IRenderOptions](./kibana-plugin-server.irenderoptions.md) | | | [IRouter](./kibana-plugin-server.irouter.md) | Registers route handlers for specified resource path and method. See [RouteConfig](./kibana-plugin-server.routeconfig.md) and [RequestHandler](./kibana-plugin-server.requesthandler.md) for more information about arguments to route registrations. | +| [IScopedRenderingClient](./kibana-plugin-server.iscopedrenderingclient.md) | | | [IUiSettingsClient](./kibana-plugin-server.iuisettingsclient.md) | Server-side client that provides access to the advanced settings stored in elasticsearch. The settings provide control over the behavior of the Kibana application. For example, a user can specify how to display numeric or date fields. Users can adjust the settings via Management UI. | | [KibanaRequestRoute](./kibana-plugin-server.kibanarequestroute.md) | Request specific route information exposed to a handler. | | [LegacyRequest](./kibana-plugin-server.legacyrequest.md) | | @@ -90,11 +93,13 @@ The plugin integrates with the core system via lifecycle events: `setup` | [PluginManifest](./kibana-plugin-server.pluginmanifest.md) | Describes the set of required and optional properties plugin can define in its mandatory JSON manifest file. | | [PluginsServiceSetup](./kibana-plugin-server.pluginsservicesetup.md) | | | [PluginsServiceStart](./kibana-plugin-server.pluginsservicestart.md) | | -| [RequestHandlerContext](./kibana-plugin-server.requesthandlercontext.md) | Plugin specific context passed to a route handler.Provides the following clients: - [savedObjects.client](./kibana-plugin-server.savedobjectsclient.md) - Saved Objects client which uses the credentials of the incoming request - [elasticsearch.dataClient](./kibana-plugin-server.scopedclusterclient.md) - Elasticsearch data client which uses the credentials of the incoming request - [elasticsearch.adminClient](./kibana-plugin-server.scopedclusterclient.md) - Elasticsearch admin client which uses the credentials of the incoming request | +| [RequestHandlerContext](./kibana-plugin-server.requesthandlercontext.md) | Plugin specific context passed to a route handler.Provides the following clients: - [rendering](./kibana-plugin-server.iscopedrenderingclient.md) - Rendering client which uses the data of the incoming request - [savedObjects.client](./kibana-plugin-server.savedobjectsclient.md) - Saved Objects client which uses the credentials of the incoming request - [elasticsearch.dataClient](./kibana-plugin-server.scopedclusterclient.md) - Elasticsearch data client which uses the credentials of the incoming request - [elasticsearch.adminClient](./kibana-plugin-server.scopedclusterclient.md) - Elasticsearch admin client which uses the credentials of the incoming request - [uiSettings.client](./kibana-plugin-server.iuisettingsclient.md) - uiSettings client which uses the credentials of the incoming request | | [RouteConfig](./kibana-plugin-server.routeconfig.md) | Route specific configuration. | | [RouteConfigOptions](./kibana-plugin-server.routeconfigoptions.md) | Additional route options. | | [RouteConfigOptionsBody](./kibana-plugin-server.routeconfigoptionsbody.md) | Additional body options for a route | -| [RouteSchemas](./kibana-plugin-server.routeschemas.md) | RouteSchemas contains the schemas for validating the different parts of a request. | +| [RouteValidationResultFactory](./kibana-plugin-server.routevalidationresultfactory.md) | Validation result factory to be used in the custom validation function to return the valid data or validation errorsSee [RouteValidationFunction](./kibana-plugin-server.routevalidationfunction.md). | +| [RouteValidatorConfig](./kibana-plugin-server.routevalidatorconfig.md) | The configuration object to the RouteValidator class. Set params, query and/or body to specify the validation logic to follow for that property. | +| [RouteValidatorOptions](./kibana-plugin-server.routevalidatoroptions.md) | Additional options for the RouteValidator class to modify its default behaviour. | | [SavedObject](./kibana-plugin-server.savedobject.md) | | | [SavedObjectAttributes](./kibana-plugin-server.savedobjectattributes.md) | The data for a Saved Object is stored as an object in the attributes property. | | [SavedObjectReference](./kibana-plugin-server.savedobjectreference.md) | A reference to another saved object. | @@ -137,6 +142,7 @@ The plugin integrates with the core system via lifecycle events: `setup` | [SessionStorageFactory](./kibana-plugin-server.sessionstoragefactory.md) | SessionStorage factory to bind one to an incoming request | | [UiSettingsParams](./kibana-plugin-server.uisettingsparams.md) | UiSettings parameters defined by the plugins. | | [UiSettingsServiceSetup](./kibana-plugin-server.uisettingsservicesetup.md) | | +| [UiSettingsServiceStart](./kibana-plugin-server.uisettingsservicestart.md) | | | [UserProvidedValues](./kibana-plugin-server.userprovidedvalues.md) | Describes the values explicitly set by user. | | [UuidServiceSetup](./kibana-plugin-server.uuidservicesetup.md) | APIs to access the application's instance uuid. | @@ -199,6 +205,9 @@ The plugin integrates with the core system via lifecycle events: `setup` | [RouteContentType](./kibana-plugin-server.routecontenttype.md) | The set of supported parseable Content-Types | | [RouteMethod](./kibana-plugin-server.routemethod.md) | The set of common HTTP methods supported by Kibana routing. | | [RouteRegistrar](./kibana-plugin-server.routeregistrar.md) | Route handler common definition | +| [RouteValidationFunction](./kibana-plugin-server.routevalidationfunction.md) | The custom validation function if @kbn/config-schema is not a valid solution for your specific plugin requirements. | +| [RouteValidationSpec](./kibana-plugin-server.routevalidationspec.md) | Allowed property validation options: either @kbn/config-schema validations or custom validation functionsSee [RouteValidationFunction](./kibana-plugin-server.routevalidationfunction.md) for custom validation. | +| [RouteValidatorFullConfig](./kibana-plugin-server.routevalidatorfullconfig.md) | Route validations config and options merged into one object | | [SavedObjectAttribute](./kibana-plugin-server.savedobjectattribute.md) | Type definition for a Saved Object attribute value | | [SavedObjectAttributeSingle](./kibana-plugin-server.savedobjectattributesingle.md) | Don't use this type, it's simply a helper type for [SavedObjectAttribute](./kibana-plugin-server.savedobjectattribute.md) | | [SavedObjectsClientContract](./kibana-plugin-server.savedobjectsclientcontract.md) | Saved Objects is Kibana's data persisentence mechanism allowing plugins to use Elasticsearch for storing plugin state.\#\# SavedObjectsClient errorsSince the SavedObjectsClient has its hands in everything we are a little paranoid about the way we present errors back to to application code. Ideally, all errors will be either:1. Caused by bad implementation (ie. undefined is not a function) and as such unpredictable 2. An error that has been classified and decorated appropriately by the decorators in [SavedObjectsErrorHelpers](./kibana-plugin-server.savedobjectserrorhelpers.md)Type 1 errors are inevitable, but since all expected/handle-able errors should be Type 2 the isXYZError() helpers exposed at SavedObjectsErrorHelpers should be used to understand and manage error responses from the SavedObjectsClient.Type 2 errors are decorated versions of the source error, so if the elasticsearch client threw an error it will be decorated based on its type. That means that rather than looking for error.body.error.type or doing substring checks on error.body.error.reason, just use the helpers to understand the meaning of the error:\`\`\`js if (SavedObjectsErrorHelpers.isNotFoundError(error)) { // handle 404 }if (SavedObjectsErrorHelpers.isNotAuthorizedError(error)) { // 401 handling should be automatic, but in case you wanted to know }// always rethrow the error unless you handle it throw error; \`\`\`\#\#\# 404s from missing indexFrom the perspective of application code and APIs the SavedObjectsClient is a black box that persists objects. One of the internal details that users have no control over is that we use an elasticsearch index for persistance and that index might be missing.At the time of writing we are in the process of transitioning away from the operating assumption that the SavedObjects index is always available. Part of this transition is handling errors resulting from an index missing. These used to trigger a 500 error in most cases, and in others cause 404s with different error messages.From my (Spencer) perspective, a 404 from the SavedObjectsApi is a 404; The object the request/call was targeting could not be found. This is why \#14141 takes special care to ensure that 404 errors are generic and don't distinguish between index missing or document missing.\#\#\# 503s from missing indexUnlike all other methods, create requests are supposed to succeed even when the Kibana index does not exist because it will be automatically created by elasticsearch. When that is not the case it is because Elasticsearch's action.auto_create_index setting prevents it from being created automatically so we throw a special 503 with the intention of informing the user that their Elasticsearch settings need to be updated.See [SavedObjectsClient](./kibana-plugin-server.savedobjectsclient.md) See [SavedObjectsErrorHelpers](./kibana-plugin-server.savedobjectserrorhelpers.md) | diff --git a/docs/development/core/server/kibana-plugin-server.requesthandler.md b/docs/development/core/server/kibana-plugin-server.requesthandler.md index 79abfd4293e9f..9fc183ffc334b 100644 --- a/docs/development/core/server/kibana-plugin-server.requesthandler.md +++ b/docs/development/core/server/kibana-plugin-server.requesthandler.md @@ -9,7 +9,7 @@ A function executed when route path matched requested resource path. Request han Signature: ```typescript -export declare type RequestHandler

| Type, Method extends RouteMethod = any> = (context: RequestHandlerContext, request: KibanaRequest, TypeOf, TypeOf, Method>, response: KibanaResponseFactory) => IKibanaResponse | Promise>; +export declare type RequestHandler

= (context: RequestHandlerContext, request: KibanaRequest, response: KibanaResponseFactory) => IKibanaResponse | Promise>; ``` ## Example diff --git a/docs/development/core/server/kibana-plugin-server.requesthandlercontext.core.md b/docs/development/core/server/kibana-plugin-server.requesthandlercontext.core.md index 2d8b27ecb6c67..d1760dafd5bb6 100644 --- a/docs/development/core/server/kibana-plugin-server.requesthandlercontext.core.md +++ b/docs/development/core/server/kibana-plugin-server.requesthandlercontext.core.md @@ -8,6 +8,7 @@ ```typescript core: { + rendering: IScopedRenderingClient; savedObjects: { client: SavedObjectsClientContract; }; diff --git a/docs/development/core/server/kibana-plugin-server.requesthandlercontext.md b/docs/development/core/server/kibana-plugin-server.requesthandlercontext.md index c9fc80596efa9..7c8625a5824ee 100644 --- a/docs/development/core/server/kibana-plugin-server.requesthandlercontext.md +++ b/docs/development/core/server/kibana-plugin-server.requesthandlercontext.md @@ -6,7 +6,7 @@ Plugin specific context passed to a route handler. -Provides the following clients: - [savedObjects.client](./kibana-plugin-server.savedobjectsclient.md) - Saved Objects client which uses the credentials of the incoming request - [elasticsearch.dataClient](./kibana-plugin-server.scopedclusterclient.md) - Elasticsearch data client which uses the credentials of the incoming request - [elasticsearch.adminClient](./kibana-plugin-server.scopedclusterclient.md) - Elasticsearch admin client which uses the credentials of the incoming request +Provides the following clients: - [rendering](./kibana-plugin-server.iscopedrenderingclient.md) - Rendering client which uses the data of the incoming request - [savedObjects.client](./kibana-plugin-server.savedobjectsclient.md) - Saved Objects client which uses the credentials of the incoming request - [elasticsearch.dataClient](./kibana-plugin-server.scopedclusterclient.md) - Elasticsearch data client which uses the credentials of the incoming request - [elasticsearch.adminClient](./kibana-plugin-server.scopedclusterclient.md) - Elasticsearch admin client which uses the credentials of the incoming request - [uiSettings.client](./kibana-plugin-server.iuisettingsclient.md) - uiSettings client which uses the credentials of the incoming request Signature: @@ -18,5 +18,5 @@ export interface RequestHandlerContext | Property | Type | Description | | --- | --- | --- | -| [core](./kibana-plugin-server.requesthandlercontext.core.md) | {
savedObjects: {
client: SavedObjectsClientContract;
};
elasticsearch: {
dataClient: IScopedClusterClient;
adminClient: IScopedClusterClient;
};
uiSettings: {
client: IUiSettingsClient;
};
} | | +| [core](./kibana-plugin-server.requesthandlercontext.core.md) | {
rendering: IScopedRenderingClient;
savedObjects: {
client: SavedObjectsClientContract;
};
elasticsearch: {
dataClient: IScopedClusterClient;
adminClient: IScopedClusterClient;
};
uiSettings: {
client: IUiSettingsClient;
};
} | | diff --git a/docs/development/core/server/kibana-plugin-server.routeconfig.md b/docs/development/core/server/kibana-plugin-server.routeconfig.md index 1970b23c7ec09..4beb12f0d056e 100644 --- a/docs/development/core/server/kibana-plugin-server.routeconfig.md +++ b/docs/development/core/server/kibana-plugin-server.routeconfig.md @@ -9,7 +9,7 @@ Route specific configuration. Signature: ```typescript -export interface RouteConfig

| Type, Method extends RouteMethod> +export interface RouteConfig ``` ## Properties @@ -18,5 +18,5 @@ export interface RouteConfig

RouteConfigOptions<Method> | Additional route options [RouteConfigOptions](./kibana-plugin-server.routeconfigoptions.md). | | [path](./kibana-plugin-server.routeconfig.path.md) | string | The endpoint \_within\_ the router path to register the route. | -| [validate](./kibana-plugin-server.routeconfig.validate.md) | RouteSchemas<P, Q, B> | false | A schema created with @kbn/config-schema that every request will be validated against. | +| [validate](./kibana-plugin-server.routeconfig.validate.md) | RouteValidatorFullConfig<P, Q, B> | false | A schema created with @kbn/config-schema that every request will be validated against. | diff --git a/docs/development/core/server/kibana-plugin-server.routeconfig.validate.md b/docs/development/core/server/kibana-plugin-server.routeconfig.validate.md index e1ec743ae71cc..23a72fc3c68b3 100644 --- a/docs/development/core/server/kibana-plugin-server.routeconfig.validate.md +++ b/docs/development/core/server/kibana-plugin-server.routeconfig.validate.md @@ -9,7 +9,7 @@ A schema created with `@kbn/config-schema` that every request will be validated Signature: ```typescript -validate: RouteSchemas | false; +validate: RouteValidatorFullConfig | false; ``` ## Remarks diff --git a/docs/development/core/server/kibana-plugin-server.routeregistrar.md b/docs/development/core/server/kibana-plugin-server.routeregistrar.md index 0f5f49636fdd5..901d260fee21d 100644 --- a/docs/development/core/server/kibana-plugin-server.routeregistrar.md +++ b/docs/development/core/server/kibana-plugin-server.routeregistrar.md @@ -9,5 +9,5 @@ Route handler common definition Signature: ```typescript -export declare type RouteRegistrar =

| Type>(route: RouteConfig, handler: RequestHandler) => void; +export declare type RouteRegistrar = (route: RouteConfig, handler: RequestHandler) => void; ``` diff --git a/docs/development/core/server/kibana-plugin-server.routeschemas.body.md b/docs/development/core/server/kibana-plugin-server.routeschemas.body.md deleted file mode 100644 index 78a9d25c25d9d..0000000000000 --- a/docs/development/core/server/kibana-plugin-server.routeschemas.body.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteSchemas](./kibana-plugin-server.routeschemas.md) > [body](./kibana-plugin-server.routeschemas.body.md) - -## RouteSchemas.body property - -Signature: - -```typescript -body?: B; -``` diff --git a/docs/development/core/server/kibana-plugin-server.routeschemas.md b/docs/development/core/server/kibana-plugin-server.routeschemas.md deleted file mode 100644 index 77b980551a8ff..0000000000000 --- a/docs/development/core/server/kibana-plugin-server.routeschemas.md +++ /dev/null @@ -1,22 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteSchemas](./kibana-plugin-server.routeschemas.md) - -## RouteSchemas interface - -RouteSchemas contains the schemas for validating the different parts of a request. - -Signature: - -```typescript -export interface RouteSchemas

| Type> -``` - -## Properties - -| Property | Type | Description | -| --- | --- | --- | -| [body](./kibana-plugin-server.routeschemas.body.md) | B | | -| [params](./kibana-plugin-server.routeschemas.params.md) | P | | -| [query](./kibana-plugin-server.routeschemas.query.md) | Q | | - diff --git a/docs/development/core/server/kibana-plugin-server.routeschemas.params.md b/docs/development/core/server/kibana-plugin-server.routeschemas.params.md deleted file mode 100644 index 3dbf9fed94dc0..0000000000000 --- a/docs/development/core/server/kibana-plugin-server.routeschemas.params.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteSchemas](./kibana-plugin-server.routeschemas.md) > [params](./kibana-plugin-server.routeschemas.params.md) - -## RouteSchemas.params property - -Signature: - -```typescript -params?: P; -``` diff --git a/docs/development/core/server/kibana-plugin-server.routeschemas.query.md b/docs/development/core/server/kibana-plugin-server.routeschemas.query.md deleted file mode 100644 index 5be5830cb4bc8..0000000000000 --- a/docs/development/core/server/kibana-plugin-server.routeschemas.query.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteSchemas](./kibana-plugin-server.routeschemas.md) > [query](./kibana-plugin-server.routeschemas.query.md) - -## RouteSchemas.query property - -Signature: - -```typescript -query?: Q; -``` diff --git a/docs/development/core/server/kibana-plugin-server.routevalidationerror._constructor_.md b/docs/development/core/server/kibana-plugin-server.routevalidationerror._constructor_.md new file mode 100644 index 0000000000000..551e13faaf154 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidationerror._constructor_.md @@ -0,0 +1,21 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidationError](./kibana-plugin-server.routevalidationerror.md) > [(constructor)](./kibana-plugin-server.routevalidationerror._constructor_.md) + +## RouteValidationError.(constructor) + +Constructs a new instance of the `RouteValidationError` class + +Signature: + +```typescript +constructor(error: Error | string, path?: string[]); +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| error | Error | string | | +| path | string[] | | + diff --git a/docs/development/core/server/kibana-plugin-server.routevalidationerror.md b/docs/development/core/server/kibana-plugin-server.routevalidationerror.md new file mode 100644 index 0000000000000..71bd72dca2eab --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidationerror.md @@ -0,0 +1,20 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidationError](./kibana-plugin-server.routevalidationerror.md) + +## RouteValidationError class + +Error to return when the validation is not successful. + +Signature: + +```typescript +export declare class RouteValidationError extends SchemaTypeError +``` + +## Constructors + +| Constructor | Modifiers | Description | +| --- | --- | --- | +| [(constructor)(error, path)](./kibana-plugin-server.routevalidationerror._constructor_.md) | | Constructs a new instance of the RouteValidationError class | + diff --git a/docs/development/core/server/kibana-plugin-server.routevalidationfunction.md b/docs/development/core/server/kibana-plugin-server.routevalidationfunction.md new file mode 100644 index 0000000000000..34fa096aaae78 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidationfunction.md @@ -0,0 +1,42 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidationFunction](./kibana-plugin-server.routevalidationfunction.md) + +## RouteValidationFunction type + +The custom validation function if @kbn/config-schema is not a valid solution for your specific plugin requirements. + +Signature: + +```typescript +export declare type RouteValidationFunction = (data: any, validationResult: RouteValidationResultFactory) => { + value: T; + error?: never; +} | { + value?: never; + error: RouteValidationError; +}; +``` + +## Example + +The validation should look something like: + +```typescript +interface MyExpectedBody { + bar: string; + baz: number; +} + +const myBodyValidation: RouteValidationFunction = (data, validationResult) => { + const { ok, badRequest } = validationResult; + const { bar, baz } = data || {}; + if (typeof bar === 'string' && typeof baz === 'number') { + return ok({ bar, baz }); + } else { + return badRequest('Wrong payload', ['body']); + } +} + +``` + diff --git a/docs/development/core/server/kibana-plugin-server.routevalidationresultfactory.badrequest.md b/docs/development/core/server/kibana-plugin-server.routevalidationresultfactory.badrequest.md new file mode 100644 index 0000000000000..36ea6103fb352 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidationresultfactory.badrequest.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidationResultFactory](./kibana-plugin-server.routevalidationresultfactory.md) > [badRequest](./kibana-plugin-server.routevalidationresultfactory.badrequest.md) + +## RouteValidationResultFactory.badRequest property + +Signature: + +```typescript +badRequest: (error: Error | string, path?: string[]) => { + error: RouteValidationError; + }; +``` diff --git a/docs/development/core/server/kibana-plugin-server.routevalidationresultfactory.md b/docs/development/core/server/kibana-plugin-server.routevalidationresultfactory.md new file mode 100644 index 0000000000000..5f44b490e9a17 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidationresultfactory.md @@ -0,0 +1,23 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidationResultFactory](./kibana-plugin-server.routevalidationresultfactory.md) + +## RouteValidationResultFactory interface + +Validation result factory to be used in the custom validation function to return the valid data or validation errors + +See [RouteValidationFunction](./kibana-plugin-server.routevalidationfunction.md). + +Signature: + +```typescript +export interface RouteValidationResultFactory +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [badRequest](./kibana-plugin-server.routevalidationresultfactory.badrequest.md) | (error: Error | string, path?: string[]) => {
error: RouteValidationError;
} | | +| [ok](./kibana-plugin-server.routevalidationresultfactory.ok.md) | <T>(value: T) => {
value: T;
} | | + diff --git a/docs/development/core/server/kibana-plugin-server.routevalidationresultfactory.ok.md b/docs/development/core/server/kibana-plugin-server.routevalidationresultfactory.ok.md new file mode 100644 index 0000000000000..eca6a31bd547f --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidationresultfactory.ok.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidationResultFactory](./kibana-plugin-server.routevalidationresultfactory.md) > [ok](./kibana-plugin-server.routevalidationresultfactory.ok.md) + +## RouteValidationResultFactory.ok property + +Signature: + +```typescript +ok: (value: T) => { + value: T; + }; +``` diff --git a/docs/development/core/server/kibana-plugin-server.routevalidationspec.md b/docs/development/core/server/kibana-plugin-server.routevalidationspec.md new file mode 100644 index 0000000000000..f5fc06544043f --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidationspec.md @@ -0,0 +1,15 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidationSpec](./kibana-plugin-server.routevalidationspec.md) + +## RouteValidationSpec type + +Allowed property validation options: either @kbn/config-schema validations or custom validation functions + +See [RouteValidationFunction](./kibana-plugin-server.routevalidationfunction.md) for custom validation. + +Signature: + +```typescript +export declare type RouteValidationSpec = ObjectType | Type | RouteValidationFunction; +``` diff --git a/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.body.md b/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.body.md new file mode 100644 index 0000000000000..8b5d2c0413087 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.body.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidatorConfig](./kibana-plugin-server.routevalidatorconfig.md) > [body](./kibana-plugin-server.routevalidatorconfig.body.md) + +## RouteValidatorConfig.body property + +Validation logic for the body payload + +Signature: + +```typescript +body?: RouteValidationSpec; +``` diff --git a/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.md b/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.md new file mode 100644 index 0000000000000..4637da7741d80 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.md @@ -0,0 +1,22 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidatorConfig](./kibana-plugin-server.routevalidatorconfig.md) + +## RouteValidatorConfig interface + +The configuration object to the RouteValidator class. Set `params`, `query` and/or `body` to specify the validation logic to follow for that property. + +Signature: + +```typescript +export interface RouteValidatorConfig +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [body](./kibana-plugin-server.routevalidatorconfig.body.md) | RouteValidationSpec<B> | Validation logic for the body payload | +| [params](./kibana-plugin-server.routevalidatorconfig.params.md) | RouteValidationSpec<P> | Validation logic for the URL params | +| [query](./kibana-plugin-server.routevalidatorconfig.query.md) | RouteValidationSpec<Q> | Validation logic for the Query params | + diff --git a/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.params.md b/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.params.md new file mode 100644 index 0000000000000..11de25ff3b19f --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.params.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidatorConfig](./kibana-plugin-server.routevalidatorconfig.md) > [params](./kibana-plugin-server.routevalidatorconfig.params.md) + +## RouteValidatorConfig.params property + +Validation logic for the URL params + +Signature: + +```typescript +params?: RouteValidationSpec

; +``` diff --git a/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.query.md b/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.query.md new file mode 100644 index 0000000000000..510325c2dfff7 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidatorconfig.query.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidatorConfig](./kibana-plugin-server.routevalidatorconfig.md) > [query](./kibana-plugin-server.routevalidatorconfig.query.md) + +## RouteValidatorConfig.query property + +Validation logic for the Query params + +Signature: + +```typescript +query?: RouteValidationSpec; +``` diff --git a/docs/development/core/server/kibana-plugin-server.routevalidatorfullconfig.md b/docs/development/core/server/kibana-plugin-server.routevalidatorfullconfig.md new file mode 100644 index 0000000000000..0f3785b954a3a --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidatorfullconfig.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidatorFullConfig](./kibana-plugin-server.routevalidatorfullconfig.md) + +## RouteValidatorFullConfig type + +Route validations config and options merged into one object + +Signature: + +```typescript +export declare type RouteValidatorFullConfig = RouteValidatorConfig & RouteValidatorOptions; +``` diff --git a/docs/development/core/server/kibana-plugin-server.routevalidatoroptions.md b/docs/development/core/server/kibana-plugin-server.routevalidatoroptions.md new file mode 100644 index 0000000000000..00b029d9928e3 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidatoroptions.md @@ -0,0 +1,20 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidatorOptions](./kibana-plugin-server.routevalidatoroptions.md) + +## RouteValidatorOptions interface + +Additional options for the RouteValidator class to modify its default behaviour. + +Signature: + +```typescript +export interface RouteValidatorOptions +``` + +## Properties + +| Property | Type | Description | +| --- | --- | --- | +| [unsafe](./kibana-plugin-server.routevalidatoroptions.unsafe.md) | {
params?: boolean;
query?: boolean;
body?: boolean;
} | Set the unsafe config to avoid running some additional internal \*safe\* validations on top of your custom validation | + diff --git a/docs/development/core/server/kibana-plugin-server.routevalidatoroptions.unsafe.md b/docs/development/core/server/kibana-plugin-server.routevalidatoroptions.unsafe.md new file mode 100644 index 0000000000000..0406a372c4e9d --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.routevalidatoroptions.unsafe.md @@ -0,0 +1,17 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [RouteValidatorOptions](./kibana-plugin-server.routevalidatoroptions.md) > [unsafe](./kibana-plugin-server.routevalidatoroptions.unsafe.md) + +## RouteValidatorOptions.unsafe property + +Set the `unsafe` config to avoid running some additional internal \*safe\* validations on top of your custom validation + +Signature: + +```typescript +unsafe?: { + params?: boolean; + query?: boolean; + body?: boolean; + }; +``` diff --git a/docs/development/core/server/kibana-plugin-server.uisettingsservicesetup.register.md b/docs/development/core/server/kibana-plugin-server.uisettingsservicesetup.register.md index 8091a7cec44aa..0047b5275408e 100644 --- a/docs/development/core/server/kibana-plugin-server.uisettingsservicesetup.register.md +++ b/docs/development/core/server/kibana-plugin-server.uisettingsservicesetup.register.md @@ -24,5 +24,17 @@ register(settings: Record): void; ## Example -setup(core: CoreSetup){ core.uiSettings.register(\[{ foo: { name: i18n.translate('my foo settings'), value: true, description: 'add some awesomeness', }, }\]); } + +```ts +setup(core: CoreSetup){ + core.uiSettings.register([{ + foo: { + name: i18n.translate('my foo settings'), + value: true, + description: 'add some awesomeness', + }, + }]); +} + +``` diff --git a/docs/development/core/server/kibana-plugin-server.uisettingsservicestart.asscopedtoclient.md b/docs/development/core/server/kibana-plugin-server.uisettingsservicestart.asscopedtoclient.md new file mode 100644 index 0000000000000..072dd39faa084 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.uisettingsservicestart.asscopedtoclient.md @@ -0,0 +1,37 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [UiSettingsServiceStart](./kibana-plugin-server.uisettingsservicestart.md) > [asScopedToClient](./kibana-plugin-server.uisettingsservicestart.asscopedtoclient.md) + +## UiSettingsServiceStart.asScopedToClient() method + +Creates a [IUiSettingsClient](./kibana-plugin-server.iuisettingsclient.md) with provided \*scoped\* saved objects client. + +This should only be used in the specific case where the client needs to be accessed from outside of the scope of a [RequestHandler](./kibana-plugin-server.requesthandler.md). + +Signature: + +```typescript +asScopedToClient(savedObjectsClient: SavedObjectsClientContract): IUiSettingsClient; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| savedObjectsClient | SavedObjectsClientContract | | + +Returns: + +`IUiSettingsClient` + +## Example + + +```ts +start(core: CoreStart) { + const soClient = core.savedObjects.getScopedClient(arbitraryRequest); + const uiSettingsClient = core.uiSettings.asScopedToClient(soClient); +} + +``` + diff --git a/docs/development/core/server/kibana-plugin-server.uisettingsservicestart.md b/docs/development/core/server/kibana-plugin-server.uisettingsservicestart.md new file mode 100644 index 0000000000000..ee3563552275a --- /dev/null +++ b/docs/development/core/server/kibana-plugin-server.uisettingsservicestart.md @@ -0,0 +1,19 @@ + + +[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [UiSettingsServiceStart](./kibana-plugin-server.uisettingsservicestart.md) + +## UiSettingsServiceStart interface + + +Signature: + +```typescript +export interface UiSettingsServiceStart +``` + +## Methods + +| Method | Description | +| --- | --- | +| [asScopedToClient(savedObjectsClient)](./kibana-plugin-server.uisettingsservicestart.asscopedtoclient.md) | Creates a [IUiSettingsClient](./kibana-plugin-server.iuisettingsclient.md) with provided \*scoped\* saved objects client.This should only be used in the specific case where the client needs to be accessed from outside of the scope of a [RequestHandler](./kibana-plugin-server.requesthandler.md). | + diff --git a/docs/images/canvas-change-your-expression-chart-no-legend.png b/docs/images/canvas-change-your-expression-chart-no-legend.png new file mode 100644 index 0000000000000..f54437c1eba3c Binary files /dev/null and b/docs/images/canvas-change-your-expression-chart-no-legend.png differ diff --git a/docs/images/canvas-change-your-expression-chart.png b/docs/images/canvas-change-your-expression-chart.png new file mode 100755 index 0000000000000..4400ce4dfb2c3 Binary files /dev/null and b/docs/images/canvas-change-your-expression-chart.png differ diff --git a/docs/images/canvas-functions-can-take-arguments-donut-chart.png b/docs/images/canvas-functions-can-take-arguments-donut-chart.png new file mode 100644 index 0000000000000..d126830c4fdc9 Binary files /dev/null and b/docs/images/canvas-functions-can-take-arguments-donut-chart.png differ diff --git a/docs/images/canvas-functions-can-take-arguments-pie-chart.png b/docs/images/canvas-functions-can-take-arguments-pie-chart.png new file mode 100644 index 0000000000000..be923675bea80 Binary files /dev/null and b/docs/images/canvas-functions-can-take-arguments-pie-chart.png differ diff --git a/docs/limitations.asciidoc b/docs/limitations.asciidoc index 0b26a3cdcf71a..9bcba3b65d660 100644 --- a/docs/limitations.asciidoc +++ b/docs/limitations.asciidoc @@ -12,7 +12,7 @@ These {stack} features also have limitations that affect {kib}: * {ref}/watcher-limitations.html[Alerting] -* {stack-ov}/ml-limitations.html[Machine learning] +* {ml-docs}/ml-limitations.html[Machine learning] * {ref}/security-limitations.html[Security] -- diff --git a/docs/maps/maps-aggregations.asciidoc b/docs/maps/maps-aggregations.asciidoc index 98aa21f6a07a3..627fd49dafa51 100644 --- a/docs/maps/maps-aggregations.asciidoc +++ b/docs/maps/maps-aggregations.asciidoc @@ -47,6 +47,7 @@ To enable top hits: . Set *Entity* to the field that identifies entities in your documents. This field will be used in the terms aggregation to group your documents into entity buckets. . Set *Documents per entity* to configure the maximum number of documents accumulated per entity. +This setting is limited to the `index.max_inner_result_window` index setting, which defaults to 100. [role="screenshot"] image::maps/images/top_hits.png[] diff --git a/docs/maps/vector-layer.asciidoc b/docs/maps/vector-layer.asciidoc index 1b9d0e6556f54..1d4ba9912529a 100644 --- a/docs/maps/vector-layer.asciidoc +++ b/docs/maps/vector-layer.asciidoc @@ -15,7 +15,7 @@ See map.regionmap.* in <> for details. *Documents*:: Vector data from a Kibana index pattern. The index must contain at least one field mapped as {ref}/geo-point.html[geo_point] or {ref}/geo-shape.html[geo_shape]. -NOTE: Document results are limited to the first 10000 matching documents. +NOTE: Document results are limited to the `index.max_result_window` index setting, which defaults to 10000. Use <> to plot large data sets. *Grid aggregation*:: Geospatial data grouped in grids with metrics for each gridded cell. diff --git a/docs/maps/vector-style-properties.asciidoc b/docs/maps/vector-style-properties.asciidoc index f51632218add1..5656a7f04d0e3 100644 --- a/docs/maps/vector-style-properties.asciidoc +++ b/docs/maps/vector-style-properties.asciidoc @@ -8,32 +8,52 @@ Point, polygon, and line features support different styling properties. [[point-style-properties]] ==== Point style properties +You can add text labels to your Point features by configuring label style properties. + +[cols="2*"] +|=== +|*Label* +|Specifies label content. +|*Label color* +|The text color. +|*Label size* +|The size of the text font, in pixels. +|=== + You can symbolize Point features as *Circle markers* or *Icons*. Use *Circle marker* to symbolize Points as circles. -*Fill color*:: The fill color of the point features. - -*Border color*:: The border color of the point features. - -*Border width*:: The border width of the point features. - -*Symbol size*:: The radius of the symbol size, in pixels. +[cols="2*"] +|=== +|*Border color* +|The border color of the point features. +|*Border width* +|The border width of the point features. +|*Fill color* +|The fill color of the point features. +|*Symbol size* +|The radius of the symbol size, in pixels. +|=== Use *Icon* to symbolize Points as icons. -*Fill color*:: The fill color of the point features. - -*Border color*:: The border color of the point features. - -*Border width*:: The border width of the point features. +[cols="2*"] +|=== +|*Border color* +|The border color of the point features. +|*Border width* +|The border width of the point features. +|*Fill color* +|The fill color of the point features. +|*Symbol orientation* +|The symbol orientation rotating the icon clockwise. +|*Symbol size* +|The radius of the symbol size, in pixels. +|=== -*Symbol orientation*:: The symbol orientation rotating the icon clockwise. - -*Symbol size*:: The radius of the symbol size, in pixels. -+ Available icons -+ + [role="screenshot"] image::maps/images/maki-icons.png[] @@ -42,17 +62,25 @@ image::maps/images/maki-icons.png[] [[polygon-style-properties]] ==== Polygon style properties -*Fill color*:: The fill color of the polygon features. - -*Border color*:: The border color of the polygon features. - -*Border width*:: The border width of the polygon features. +[cols="2*"] +|=== +|*Border color* +|The border color of the polygon features. +|*Border width* +|The border width of the polygon features. +|*Fill color* +|The fill color of the polygon features. +|=== [float] [[line-style-properties]] ==== Line style properties -*Border color*:: The color of the line features. - -*Border width*:: The width of the line features. +[cols="2*"] +|=== +|*Border color* +|The color of the line features. +|*Border width* +|The width of the line features. +|=== diff --git a/docs/user/canvas.asciidoc b/docs/user/canvas.asciidoc index c58635ba97769..5c5f5c2f80bf9 100644 --- a/docs/user/canvas.asciidoc +++ b/docs/user/canvas.asciidoc @@ -37,6 +37,8 @@ include::{kib-repo-dir}/canvas/canvas-present-workpad.asciidoc[] include::{kib-repo-dir}/canvas/canvas-share-workpad.asciidoc[] +include::{kib-repo-dir}/canvas/canvas-expression-lifecycle.asciidoc[] + include::{kib-repo-dir}/canvas/canvas-function-reference.asciidoc[] -include::{kib-repo-dir}/canvas/canvas-tinymath-functions.asciidoc[] +include::{kib-repo-dir}/canvas/canvas-tinymath-functions.asciidoc[] \ No newline at end of file diff --git a/docs/user/ml/index.asciidoc b/docs/user/ml/index.asciidoc index a2c23aad98d5b..cca0dc5e4530f 100644 --- a/docs/user/ml/index.asciidoc +++ b/docs/user/ml/index.asciidoc @@ -50,8 +50,8 @@ pane: image::user/ml/images/ml-job-management.jpg[Job Management] You can use the *Settings* pane to create and edit -{stack-ov}/ml-calendars.html[calendars] and the filters that are used in -{stack-ov}/ml-rules.html[custom rules]: +{ml-docs}/ml-calendars.html[calendars] and the filters that are used in +{ml-docs}/ml-rules.html[custom rules]: [role="screenshot"] image::user/ml/images/ml-settings.jpg[Calendar Management] @@ -73,7 +73,7 @@ image::user/ml/images/ml-annotations-list.jpg[Single Metric Viewer with annotati In some circumstances, annotations are also added automatically. For example, if the {anomaly-job} detects that there is missing data, it annotates the affected time period. For more information, see -{stack-ov}/ml-delayed-data-detection.html[Handling delayed data]. The +{ml-docs}/ml-delayed-data-detection.html[Handling delayed data]. The *Job Management* pane shows the full list of annotations for each job. NOTE: The {kib} {ml-features} use pop-ups. You must configure your web @@ -82,7 +82,7 @@ browser so that it does not block pop-up windows or create an exception for your For more information about the {anomaly-detect} feature, see https://www.elastic.co/what-is/elastic-stack-machine-learning[{ml-cap} in the {stack}] -and {stack-ov}/xpack-ml.html[{ml-cap} {anomaly-detect}]. +and {ml-docs}/xpack-ml.html[{ml-cap} {anomaly-detect}]. [[xpack-ml-dfanalytics]] == {dfanalytics-cap} @@ -99,4 +99,4 @@ in {kib}. For example: image::user/ml/images/outliers.jpg[{oldetection-cap} results in {kib}] For more information about the {dfanalytics} feature, see -{stack-ov}/ml-dfanalytics.html[{ml-cap} {dfanalytics}]. \ No newline at end of file +{ml-docs}/ml-dfanalytics.html[{ml-cap} {dfanalytics}]. \ No newline at end of file diff --git a/docs/user/reporting/reporting-troubleshooting.asciidoc b/docs/user/reporting/reporting-troubleshooting.asciidoc index 92464c24b45ea..ca7fa6abcc9d9 100644 --- a/docs/user/reporting/reporting-troubleshooting.asciidoc +++ b/docs/user/reporting/reporting-troubleshooting.asciidoc @@ -17,6 +17,7 @@ dependencies for Chromium. Make sure Kibana server OS has the appropriate packages installed for the distribution. If you are using CentOS/RHEL systems, install the following packages: + * `ipa-gothic-fonts` * `xorg-x11-fonts-100dpi` * `xorg-x11-fonts-75dpi` @@ -28,6 +29,7 @@ If you are using CentOS/RHEL systems, install the following packages: * `freetype` If you are using Ubuntu/Debian systems, install the following packages: + * `fonts-liberation` * `libfontconfig1` @@ -105,9 +107,10 @@ has its own command-line method to generate its own debug logs, which can someti caused by Kibana or Chromium. See more at https://github.com/GoogleChrome/puppeteer/blob/v1.19.0/README.md#debugging-tips Using Puppeteer's debug method when launching Kibana would look like: -> Enable verbose logging - internal DevTools protocol traffic will be logged via the debug module under the puppeteer namespace. -> ``` -> env DEBUG="puppeteer:*" ./bin/kibana -> ``` +``` +env DEBUG="puppeteer:*" ./bin/kibana +``` +The internal DevTools protocol traffic will be logged via the `debug` module under the `puppeteer` namespace. + The Puppeteer logs are very verbose and could possibly contain sensitive information. Handle the generated output with care. diff --git a/docs/user/reporting/watch-example.asciidoc b/docs/user/reporting/watch-example.asciidoc index 4c769c85975c4..627e31017230c 100644 --- a/docs/user/reporting/watch-example.asciidoc +++ b/docs/user/reporting/watch-example.asciidoc @@ -56,7 +56,16 @@ report from the Kibana UI. //For more information, see <>. //<>. -NOTE: Reporting is integrated with Watcher only as an email attachment type. +[NOTE] +==== +Reporting is integrated with Watcher only as an email attachment type. + +The report Generation URL might contain date-math expressions +that cause the watch to fail with a `parse_exception`. +Remove curly braces `{` `}` from date-math expressions and +URL-encode characters to avoid this. +For example: `...(range:(%27@timestamp%27:(gte:now-15m%2Fd,lte:now%2Fd))))...` For more information about configuring watches, see {ref}/how-watcher-works.html[How Watcher works]. +==== diff --git a/package.json b/package.json index 7ba3cb5435b68..a6201c0f074fe 100644 --- a/package.json +++ b/package.json @@ -112,10 +112,11 @@ "dependencies": { "@babel/core": "^7.5.5", "@babel/register": "^7.7.0", + "@elastic/apm-rum": "^4.6.0", "@elastic/charts": "^14.0.0", "@elastic/datemath": "5.0.2", "@elastic/ems-client": "1.0.5", - "@elastic/eui": "17.0.0", + "@elastic/eui": "17.3.1", "@elastic/filesaver": "1.1.2", "@elastic/good": "8.1.1-kibana2", "@elastic/numeral": "2.3.3", @@ -233,7 +234,7 @@ "react-resize-detector": "^4.2.0", "react-router-dom": "^5.1.2", "react-sizeme": "^2.3.6", - "react-use": "^13.10.2", + "react-use": "^13.13.0", "reactcss": "1.2.3", "redux": "4.0.0", "redux-actions": "2.6.5", @@ -364,8 +365,8 @@ "@types/uuid": "^3.4.4", "@types/vinyl-fs": "^2.4.11", "@types/zen-observable": "^0.8.0", - "@typescript-eslint/eslint-plugin": "^2.10.0", - "@typescript-eslint/parser": "^2.10.0", + "@typescript-eslint/eslint-plugin": "^2.12.0", + "@typescript-eslint/parser": "^2.12.0", "angular-mocks": "^1.7.8", "archiver": "^3.1.1", "axe-core": "^3.3.2", @@ -467,7 +468,7 @@ "zlib": "^1.0.5" }, "engines": { - "node": "10.15.2", + "node": "10.18.0", "yarn": "^1.21.1" } } diff --git a/packages/eslint-config-kibana/package.json b/packages/eslint-config-kibana/package.json index 7917297883b03..04602d196a7f3 100644 --- a/packages/eslint-config-kibana/package.json +++ b/packages/eslint-config-kibana/package.json @@ -15,8 +15,8 @@ }, "homepage": "https://github.com/elastic/eslint-config-kibana#readme", "peerDependencies": { - "@typescript-eslint/eslint-plugin": "^2.10.0", - "@typescript-eslint/parser": "^2.10.0", + "@typescript-eslint/eslint-plugin": "^2.12.0", + "@typescript-eslint/parser": "^2.12.0", "babel-eslint": "^10.0.3", "eslint": "^6.5.1", "eslint-plugin-babel": "^5.3.0", diff --git a/packages/kbn-analytics/scripts/build.js b/packages/kbn-analytics/scripts/build.js index b7fbe629246ec..bb28c1460c9c2 100644 --- a/packages/kbn-analytics/scripts/build.js +++ b/packages/kbn-analytics/scripts/build.js @@ -55,7 +55,9 @@ run( '--extensions', '.ts,.js,.tsx', ...(flags.watch ? ['--watch'] : ['--quiet']), - ...(flags['source-maps'] ? ['--source-maps', 'inline'] : []), + ...(!flags['source-maps'] || !!process.env.CODE_COVERAGE + ? [] + : ['--source-maps', 'inline']), ], wait: true, env: { diff --git a/packages/kbn-config-schema/src/index.ts b/packages/kbn-config-schema/src/index.ts index 56b3096433c24..fc3e3c541846a 100644 --- a/packages/kbn-config-schema/src/index.ts +++ b/packages/kbn-config-schema/src/index.ts @@ -59,6 +59,7 @@ import { export { ObjectType, TypeOf, Type }; export { ByteSizeValue } from './byte_size_value'; +export { SchemaTypeError, ValidationError } from './errors'; function any(options?: TypeOptions) { return new AnyType(options); diff --git a/packages/kbn-i18n/scripts/build.js b/packages/kbn-i18n/scripts/build.js index ccdddc87dbc18..0764451c74575 100644 --- a/packages/kbn-i18n/scripts/build.js +++ b/packages/kbn-i18n/scripts/build.js @@ -55,7 +55,9 @@ run( '--extensions', '.ts,.js,.tsx', ...(flags.watch ? ['--watch'] : ['--quiet']), - ...(flags['source-maps'] ? ['--source-maps', 'inline'] : []), + ...(!flags['source-maps'] || !!process.env.CODE_COVERAGE + ? [] + : ['--source-maps', 'inline']), ], wait: true, env: { diff --git a/packages/kbn-utility-types/index.ts b/packages/kbn-utility-types/index.ts index 495b5fb374b43..36bbc8cc82873 100644 --- a/packages/kbn-utility-types/index.ts +++ b/packages/kbn-utility-types/index.ts @@ -18,6 +18,7 @@ */ import { PromiseType } from 'utility-types'; +export { $Values, Required, Optional, Class } from 'utility-types'; /** * Returns wrapped type of a promise. diff --git a/packages/kbn-utility-types/package.json b/packages/kbn-utility-types/package.json index a79d08677020b..a999eb41eb781 100644 --- a/packages/kbn-utility-types/package.json +++ b/packages/kbn-utility-types/package.json @@ -13,7 +13,7 @@ "clean": "del target" }, "dependencies": { - "utility-types": "^3.7.0" + "utility-types": "^3.10.0" }, "devDependencies": { "del-cli": "^3.0.0", diff --git a/rfcs/text/0007_lifecycle_unblocked.md b/rfcs/text/0007_lifecycle_unblocked.md new file mode 100644 index 0000000000000..cb978d3dcd7ba --- /dev/null +++ b/rfcs/text/0007_lifecycle_unblocked.md @@ -0,0 +1,374 @@ +- Start Date: 2019-09-11 +- RFC PR: (leave this empty) +- Kibana Issue: (leave this empty) + +## Table of contents +- [Summary](#summary) +- [Motivation](#motivation) +- [Detailed design](#detailed-design) + - [

  1. Synchronous lifecycle methods
](#ollisynchronous-lifecycle-methodsliol) + - [
  1. Synchronous Context Provider functions
](#ol-start2lisynchronous-context-provider-functionsliol) + - [
  1. Core should not expose API's as observables
](#ol-start3licore-should-not-expose-apis-as-observablesliol) + - [
  1. Complete example code
](#ol-start4licomplete-example-codeliol) + - [
  1. Core should expose a status signal for Core services & plugins
](#ol-start5licore-should-expose-a-status-signal-for-core-services-amp-pluginsliol) +- [Drawbacks](#drawbacks) +- [Alternatives](#alternatives) + - [
  1. Introduce a lifecycle/context provider timeout
](#olliintroduce-a-lifecyclecontext-provider-timeoutliol) + - [
  1. Treat anything that blocks Kibana from starting up as a bug
](#ol-start2litreat-anything-that-blocks-kibana-from-starting-up-as-a-bugliol) +- [Adoption strategy](#adoption-strategy) +- [How we teach this](#how-we-teach-this) +- [Unresolved questions](#unresolved-questions) +- [Footnotes](#footnotes) + +# Summary + +Prevent plugin lifecycle methods from blocking Kibana startup by making the +following changes: +1. Synchronous lifecycle methods +2. Synchronous context provider functions +3. Core should not expose API's as observables + +# Motivation +Plugin lifecycle methods and context provider functions are async +(promise-returning) functions. Core runs these functions in series and waits +for each plugin's lifecycle/context provider function to resolve before +calling the next. This allows plugins to depend on the API's returned from +other plugins. + +With the current design, a single lifecycle method that blocks will block all +of Kibana from starting up. Similarly, a blocking context provider will block +all the handlers that depend on that context. Plugins (including legacy +plugins) rely heavily on this blocking behaviour to ensure that all conditions +required for their plugin's operation are met before their plugin is started +and exposes it's API's. This means a single plugin with a network error that +isn't retried or a dependency on an external host that is down, could block +all of Kibana from starting up. + +We should make it impossible for a single plugin lifecycle function to stall +all of kibana. + +# Detailed design + +### 1. Synchronous lifecycle methods +Lifecycle methods are synchronous functions, they can perform async operations +but Core doesn't wait for these to complete. This guarantees that no plugin +lifecycle function can block other plugins or core from starting up [1]. + +Core will still expose special API's that are able block the setup lifecycle +such as registering Saved Object migrations, but this will be limited to +operations where the risk of blocking all of kibana starting up is limited. + +### 2. Synchronous Context Provider functions +Making context provider functions synchronous guarantees that a context +handler will never be blocked by registered context providers. They can expose +async API's which could potentially have blocking behaviour. + +```ts +export type IContextProvider< + THandler extends HandlerFunction, + TContextName extends keyof HandlerContextType +> = ( + context: Partial>, + ...rest: HandlerParameters +) => + | HandlerContextType[TContextName]; +``` + +### 3. Core should not expose API's as observables +All Core API's should be reactive: when internal state changes, their behaviour +should change accordingly. But, exposing these internal state changes as part +of the API contract leaks internal implementation details consumers can't do +anything useful with and don't care about. + +For example: Core currently exposes `core.elasticsearch.adminClient$`, an +Observable which emits a pre-configured elasticsearch client every time there's +a configuration change. This includes changes to the logging configuration and +might in the future include updating the authentication headers sent to +elasticsearch https://github.com/elastic/kibana/issues/19829. As a plugin +author who wants to make search requests against elasticsearch I shouldn't +have to care about, react to, or keep track of, how many times the underlying +configuration has changed. I want to use the `callAsInternalUser` method and I +expect Core to use the most up to date configuration to send this request. + +> Note: It would not be desirable for Core to dynamically load all +> configuration changes. Changing the Elasticsearch `hosts` could mean Kibana +> is pointing to a completely new Elasticsearch cluster. Since this is a risky +> change to make and would likely require core and almost all plugins to +> completely re-initialize, it's safer to require a complete Kibana restart. + +This does not mean we should remove all observables from Core's API's. When an +API consumer is interested in the *state changes itself* it absolutely makes +sense to expose this as an Observable. Good examples of this is exposing +plugin config as this is state that changes over time to which a plugin should +directly react to. + +This is important in the context of synchronous lifecycle methods and context +handlers since exposing convenient API's become very ugly: + +*(3.1): exposing Observable-based API's through the route handler context:* +```ts +// Before: Using an async context provider +coreSetup.http.registerRouteHandlerContext(coreId, 'core', async (context, req) => { + const adminClient = await coreSetup.elasticsearch.adminClient$.pipe(take(1)).toPromise(); + const dataClient = await coreSetup.elasticsearch.dataClient$.pipe(take(1)).toPromise(); + return { + elasticsearch: { + adminClient: adminClient.asScoped(req), + dataClient: dataClient.asScoped(req), + }, + }; +}); + +// After: Using a synchronous context provider +coreSetup.http.registerRouteHandlerContext(coreId, 'core', async (context, req) => { + return { + elasticsearch: { + // (3.1.1) We can expose a convenient API by doing a lot of work + adminClient: () => { + callAsInternalUser: async (...args) => { + const adminClient = await coreSetup.elasticsearch.adminClient$.pipe(take(1)).toPromise(); + return adminClient.asScoped(req).callAsinternalUser(args); + }, + callAsCurrentUser: async (...args) => { + adminClient = await coreSetup.elasticsearch.adminClient$.pipe(take(1)).toPromise(); + return adminClient.asScoped(req).callAsCurrentUser(args); + } + }, + // (3.1.2) Or a lazy approach which perpetuates the problem to consumers: + dataClient: async () => { + const dataClient = await coreSetup.elasticsearch.dataClient$.pipe(take(1)).toPromise(); + return dataClient.asScoped(req); + }, + }, + }; +}); +``` + +### 4. Complete example code +*(4.1) Doing async operations in a plugin's setup lifecycle* +```ts +export class Plugin { + public setup(core: CoreSetup) { + // Async setup is possible and any operations involving async API's + // will still block until these API's are ready, (savedObjects find only + // resolves once the elasticsearch client has established a connection to + // the cluster). The difference is that these details are now internal to + // the API. + (async () => { + const docs = await core.savedObjects.client.find({...}); + ... + await core.savedObjects.client.update(...); + })(); + } +} +``` + +*(4.2) Exposing an API from a plugin's setup lifecycle* +```ts +export class Plugin { + constructor(private readonly initializerContext: PluginInitializerContext) {} + private async initSavedConfig(core: CoreSetup) { + // Note: pulling a config value here means our code isn't reactive to + // changes, but this is equivalent to doing it in an async setup lifecycle. + const config = await this.initializerContext.config + .create>() + .pipe(first()) + .toPromise(); + try { + const savedConfig = await core.savedObjects.internalRepository.get({...}); + return Object.assign({}, config, savedConfig); + } catch (e) { + if (SavedObjectErrorHelpers.isNotFoundError(e)) { + return await core.savedObjects.internalRepository.create(config, {...}); + } + } + } + public setup(core: CoreSetup) { + // savedConfigPromise resolves with the same kind of "setup state" that a + // plugin would have constructed in an async setup lifecycle. + const savedConfigPromise = initSavedConfig(core); + return { + ping: async () => { + const savedConfig = await savedConfigPromise; + if (config.allowPing === false || savedConfig.allowPing === false) { + throw new Error('ping() has been disabled'); + } + // Note: the elasticsearch client no longer exposes an adminClient$ + // observable, improving the ergonomics of consuming the API. + return await core.elasticsearch.adminClient.callAsInternalUser('ping', ...); + } + }; + } +} +``` + +*(4.3) Exposing an observable free Elasticsearch API from the route context* +```ts +coreSetup.http.registerRouteHandlerContext(coreId, 'core', async (context, req) => { + return { + elasticsearch: { + adminClient: coreSetup.elasticsearch.adminClient.asScoped(req), + dataClient: coreSetup.elasticsearch.adminClient.asScoped(req), + }, + }; +}); +``` + +### 5. Core should expose a status signal for Core services & plugins +Core should expose a global mechanism for core services and plugins to signal +their status. This is equivalent to the legacy status API +`kibana.Plugin.status` which allowed plugins to set their status to e.g. 'red' +or 'green'. The exact design of this API is outside of the scope of this RFC. + +What is important, is that there is a global mechanism to signal status +changes which Core then makes visible to system administrators in the Kibana +logs and the `/status` HTTP API. Plugins should be able to inspect and +subscribe to status changes from any of their dependencies. + +This will provide an obvious mechanism for plugins to signal that the +conditions which are required for this plugin to operate are not currently +present and manual intervention might be required. Status changes can happen +in both setup and start lifecycles e.g.: + - [setup] a required remote host is down + - [start] a remote host which was up during setup, started returning + connection timeout errors. + +# Drawbacks +Not being able to block on a lifecycle method means plugins can no longer be +certain that all setup is "complete" before they expose their API's or reach +the start lifecycle. + +A plugin might want to poll an external host to ensure that the host is up in +its setup lifecycle before making network requests to this host in it's start +lifecycle. + +Even if Kibana was using a valid, but incorrect configuration for the remote +host, with synchronous lifecycles Kibana would still start up. Although the +status API and logs would indicate a problem, these might not be monitored +leading to the error only being discovered once someone tries to use it's +functionality. This is an acceptable drawback because it buys us isolation. +Some problems might go unnoticed, but no single plugin should affect the +availability of all other plugins. + +In effect, the plugin is polling the world to construct a snapshot +of state which drives future behaviour. Modeling this with lifecycle functions +is insufficient since it assumes that any state constructed in the setup +lifecycle is static and won't and can't be changed in the future. + +For example: a plugin's setup lifecycle might poll for the existence of a +custom Elasticsearch index and if it doesn't exist, create it. Should there be +an Elasticsearch restore which deletes the index, the plugin wouldn't be able +to gracefully recover by simply running it's setup lifecycle a second time. + +The once-off nature of lifecycle methods are incompatible with the real-world +dynamic conditions under which plugins run. Not being able to block a +lifecycle method is, therefore, only a drawback when plugins are authored under +the false illusion of stability. + +# Alternatives +## 1. Introduce a lifecycle/context provider timeout +Lifecycle methods and context providers would timeout after X seconds and any +API's they expose would not be available if the timeout had been reached. + +Drawbacks: +1. A blocking setup lifecycle makes it easy for plugin authors to fall into + the trap of assuming that their plugin's behaviour can continue to operate + based on the snapshot of conditions present during setup. + +2. For lifecycle methods: there would be no way to recover from a timeout, + once a timeout had been reached the API will remain unavailable. + + Context providers have the benefit of being re-created for each handler + call, so a single timeout would not permanently disable the API. + +3. Plugins have less control over their behaviour. When an upstream server + becomes unavailable, a plugin might prefer to keep retrying the request + indefinitely or only timeout after more than X seconds. It also isn't able + to expose detailed error information to downstream consumers such as + specifying which host or service is unavailable. + +4. (minor) Introduces an additional failure condition that needs to be handled. + Consumers should handle the API not being available in setup, as well as, + error responses from the API itself. Since remote hosts like Elasticsearch + could go down even after a successful setup, this effectively means API + consumers have to handle the same error condition in two places. + +## 2. Treat anything that blocks Kibana from starting up as a bug +Keep the existing New Platform blocking behaviour, but through strong +conventions and developer awareness minimize the risk of plugins blocking +Kibana's startup indefinetely. By logging detailed diagnostic info on any +plugins that appear to be blocking startup, we can aid system administrators +to recover a blocked Kibana. + +A parallel can be drawn between Kibana's async plugin initialization and the TC39 +proposal for [top-level await](https://github.com/tc39/proposal-top-level-await). +> enables modules to act as big async functions: With top-level await, +> ECMAScript Modules (ESM) can await resources, causing other modules who +> import them to wait before they start evaluating their body + +They believe the benefits outweigh the risk of modules blocking loading since: + - [developer education should result in correct usage](https://github.com/tc39/proposal-top-level-await#will-top-level-await-cause-developers-to-make-their-code-block-longer-than-it-should) + - [there are existing unavoidable ways in which modules could block loading such as infinite loops or recursion](https://github.com/tc39/proposal-top-level-await#does-top-level-await-increase-the-risk-of-deadlocks) + + +Drawbacks: +1. A blocking setup lifecycle makes it easy for plugin authors to fall into + the trap of assuming that their plugin's behaviour can continue to operate + based on the snapshot of conditions present during setup. +2. This opens up the potential for a bug in Elastic or third-party plugins to + effectively "break" kibana. Instead of a single plugin being disabled all + of kibana would be down requiring manual intervention by a system + administrator. + +# Adoption strategy +Although the eventual goal is to have sync-only lifecycles / providers, we +will start by deprecating async behaviour and implementing a 30s timeout as +per alternative (1). This will immediately lower the impact of plugin bugs +while at the same time enabling a more incremental rollout and the flexibility +to discover use cases that would require adopting Core API's to support sync +lifecycles / providers. + +Adoption and implementation should be handled as follows: + - Adopt Core API’s to make sync lifecycles easier (3) + - Update migration guide and other documentation examples. + - Deprecate async lifecycles / context providers with a warning. Add a + timeout of 30s after which a plugin and it's dependencies will be disabled. + - Refactor existing plugin lifecycles which are easily converted to sync + - Future: remove async timeout lifecycles / context providers + +The following New Platform plugins or shims currently rely on async lifecycle +functions and will be impacted: +1. [region_map](https://github.com/elastic/kibana/blob/6039709929caf0090a4130b8235f3a53bd04ed84/src/legacy/core_plugins/region_map/public/plugin.ts#L68) +2. [tile_map](https://github.com/elastic/kibana/blob/6039709929caf0090a4130b8235f3a53bd04ed84/src/legacy/core_plugins/tile_map/public/plugin.ts#L62) +3. [vis_type_table](https://github.com/elastic/kibana/blob/6039709929caf0090a4130b8235f3a53bd04ed84/src/legacy/core_plugins/vis_type_table/public/plugin.ts#L61) +4. [vis_type_vega](https://github.com/elastic/kibana/blob/6039709929caf0090a4130b8235f3a53bd04ed84/src/legacy/core_plugins/vis_type_vega/public/plugin.ts#L59) +5. [timelion](https://github.com/elastic/kibana/blob/9d69b72a5f200e58220231035b19da852fc6b0a5/src/plugins/timelion/server/plugin.ts#L40) +6. [code](https://github.com/elastic/kibana/blob/5049b460b47d4ae3432e1d9219263bb4be441392/x-pack/legacy/plugins/code/server/plugin.ts#L129-L149) +7. [spaces](https://github.com/elastic/kibana/blob/096c7ee51136327f778845c636d7c4f1188e5db2/x-pack/legacy/plugins/spaces/server/new_platform/plugin.ts#L95) +8. [licensing](https://github.com/elastic/kibana/blob/4667c46caef26f8f47714504879197708debae32/x-pack/plugins/licensing/server/plugin.ts) +9. [security](https://github.com/elastic/kibana/blob/0f2324e44566ce2cf083d89082841e57d2db6ef6/x-pack/plugins/security/server/plugin.ts#L96) + +# How we teach this + +Async Plugin lifecycle methods and async context provider functions have been +deprecated. In the future all lifecycle methods will by sync only. Plugins +should treat the setup lifecycle as a place in time to register functionality +with core or other plugins' API's and not as a mechanism to kick off and wait +for any initialization that's required for the plugin to be able to run. + +# Unresolved questions +1. ~~Are the drawbacks worth the benefits or can we live with Kibana potentially +being blocked for the sake of convenient async lifecycle stages?~~ + +2. Should core provide conventions or patterns for plugins to construct a + snapshot of state and reactively updating this state and the behaviour it + drives as the state of the world changes? + +3. Do plugins ever need to read config values and pass these as parameters to + Core API’s? If so we would have to expose synchronous config values to + support sync lifecycles. + +# Footnotes +[1] Synchronous lifecycles can still be blocked by e.g. an infine for loop, +but this would always be unintentional behaviour in contrast to intentional +async behaviour like blocking until an external service becomes available. diff --git a/src/apm.js b/src/apm.js index cea6f8fc072aa..e3f4d84d9b523 100644 --- a/src/apm.js +++ b/src/apm.js @@ -17,21 +17,81 @@ * under the License. */ -const { existsSync } = require('fs'); const { join } = require('path'); -const { name, version } = require('../package.json'); +const { readFileSync } = require('fs'); +const { execSync } = require('child_process'); +const merge = require('lodash.merge'); +const { name, version, build } = require('../package.json'); -module.exports = function(serviceName = name) { - if (process.env.kbnWorkerType === 'optmzr') return; +const ROOT_DIR = join(__dirname, '..'); + +function gitRev() { + try { + return execSync('git rev-parse --short HEAD', { + encoding: 'utf-8', + stdio: ['ignore', 'pipe', 'ignore'], + }).trim(); + } catch (e) { + return null; + } +} + +function devConfig() { + try { + const apmDevConfigPath = join(ROOT_DIR, 'config', 'apm.dev.js'); + return require(apmDevConfigPath); // eslint-disable-line import/no-dynamic-require + } catch (e) { + return {}; + } +} + +const apmConfig = merge( + { + active: false, + serverUrl: 'https://f1542b814f674090afd914960583265f.apm.us-central1.gcp.cloud.es.io:443', + // The secretToken below is intended to be hardcoded in this file even though + // it makes it public. This is not a security/privacy issue. Normally we'd + // instead disable the need for a secretToken in the APM Server config where + // the data is transmitted to, but due to how it's being hosted, it's easier, + // for now, to simply leave it in. + secretToken: 'R0Gjg46pE9K9wGestd', + globalLabels: {}, + breakdownMetrics: true, + centralConfig: false, + logUncaughtExceptions: true, + }, + devConfig() +); + +try { + const filename = join(ROOT_DIR, 'data', 'uuid'); + apmConfig.globalLabels.kibana_uuid = readFileSync(filename, 'utf-8'); +} catch (e) {} // eslint-disable-line no-empty - const conf = { - serviceName: `${serviceName}-${version.replace(/\./g, '_')}`, +const rev = gitRev(); +if (rev !== null) apmConfig.globalLabels.git_rev = rev; + +function getConfig(serviceName) { + return { + ...apmConfig, + ...{ + serviceName: `${serviceName}-${version.replace(/\./g, '_')}`, + }, }; +} + +/** + * Flag to disable APM RUM support on all kibana builds by default + */ +const isKibanaDistributable = Boolean(build && build.distributable === true); - const configFile = join(__dirname, '..', 'config', 'apm.js'); +module.exports = function(serviceName = name) { + if (process.env.kbnWorkerType === 'optmzr') return; - if (existsSync(configFile)) conf.configFile = configFile; - else conf.active = false; + const conf = getConfig(serviceName); require('elastic-apm-node').start(conf); }; + +module.exports.getConfig = getConfig; +module.exports.isKibanaDistributable = isKibanaDistributable; diff --git a/src/cli/cluster/__mocks__/cluster.js b/src/cli/cluster/cluster.mock.ts similarity index 85% rename from src/cli/cluster/__mocks__/cluster.js rename to src/cli/cluster/cluster.mock.ts index d653771136ae6..332f8aad53ba1 100644 --- a/src/cli/cluster/__mocks__/cluster.js +++ b/src/cli/cluster/cluster.mock.ts @@ -18,12 +18,15 @@ */ /* eslint-env jest */ +// eslint-disable-next-line max-classes-per-file import EventEmitter from 'events'; import { assign, random } from 'lodash'; import { delay } from 'bluebird'; class MockClusterFork extends EventEmitter { - constructor(cluster) { + public exitCode = 0; + + constructor(cluster: MockCluster) { super(); let dead = true; @@ -49,9 +52,9 @@ class MockClusterFork extends EventEmitter { send: jest.fn(), }); - jest.spyOn(this, 'on'); - jest.spyOn(this, 'off'); - jest.spyOn(this, 'emit'); + jest.spyOn(this as EventEmitter, 'on'); + jest.spyOn(this as EventEmitter, 'off'); + jest.spyOn(this as EventEmitter, 'emit'); (async () => { await wait(); @@ -61,11 +64,7 @@ class MockClusterFork extends EventEmitter { } } -class MockCluster extends EventEmitter { +export class MockCluster extends EventEmitter { fork = jest.fn(() => new MockClusterFork(this)); setupMaster = jest.fn(); } - -export function mockCluster() { - return new MockCluster(); -} diff --git a/src/legacy/core_plugins/kibana/server/routes/api/scripts/index.js b/src/cli/cluster/cluster_manager.test.mocks.ts similarity index 85% rename from src/legacy/core_plugins/kibana/server/routes/api/scripts/index.js rename to src/cli/cluster/cluster_manager.test.mocks.ts index 441963b02f14f..53984fd12cbf1 100644 --- a/src/legacy/core_plugins/kibana/server/routes/api/scripts/index.js +++ b/src/cli/cluster/cluster_manager.test.mocks.ts @@ -17,8 +17,6 @@ * under the License. */ -import { registerLanguages } from './register_languages'; - -export function scriptsApi(server) { - registerLanguages(server); -} +import { MockCluster } from './cluster.mock'; +export const mockCluster = new MockCluster(); +jest.mock('cluster', () => mockCluster); diff --git a/src/cli/cluster/cluster_manager.test.js b/src/cli/cluster/cluster_manager.test.ts similarity index 84% rename from src/cli/cluster/cluster_manager.test.js rename to src/cli/cluster/cluster_manager.test.ts index be8a096db9a66..bd37e854e1691 100644 --- a/src/cli/cluster/cluster_manager.test.js +++ b/src/cli/cluster/cluster_manager.test.ts @@ -17,8 +17,7 @@ * under the License. */ -import { mockCluster } from './__mocks__/cluster'; -jest.mock('cluster', () => mockCluster()); +import { mockCluster } from './cluster_manager.test.mocks'; jest.mock('readline', () => ({ createInterface: jest.fn(() => ({ on: jest.fn(), @@ -27,15 +26,14 @@ jest.mock('readline', () => ({ })), })); -import cluster from 'cluster'; import { sample } from 'lodash'; -import ClusterManager from './cluster_manager'; -import Worker from './worker'; +import { ClusterManager } from './cluster_manager'; +import { Worker } from './worker'; describe('CLI cluster manager', () => { beforeEach(() => { - cluster.fork.mockImplementation(() => { + mockCluster.fork.mockImplementation(() => { return { process: { kill: jest.fn(), @@ -44,16 +42,16 @@ describe('CLI cluster manager', () => { off: jest.fn(), on: jest.fn(), send: jest.fn(), - }; + } as any; }); }); afterEach(() => { - cluster.fork.mockReset(); + mockCluster.fork.mockReset(); }); test('has two workers', () => { - const manager = ClusterManager.create({}); + const manager = new ClusterManager({}, {} as any); expect(manager.workers).toHaveLength(2); for (const worker of manager.workers) expect(worker).toBeInstanceOf(Worker); @@ -63,7 +61,7 @@ describe('CLI cluster manager', () => { }); test('delivers broadcast messages to other workers', () => { - const manager = ClusterManager.create({}); + const manager = new ClusterManager({}, {} as any); for (const worker of manager.workers) { Worker.prototype.start.call(worker); // bypass the debounced start method @@ -76,10 +74,10 @@ describe('CLI cluster manager', () => { messenger.emit('broadcast', football); for (const worker of manager.workers) { if (worker === messenger) { - expect(worker.fork.send).not.toHaveBeenCalled(); + expect(worker.fork!.send).not.toHaveBeenCalled(); } else { - expect(worker.fork.send).toHaveBeenCalledTimes(1); - expect(worker.fork.send).toHaveBeenCalledWith(football); + expect(worker.fork!.send).toHaveBeenCalledTimes(1); + expect(worker.fork!.send).toHaveBeenCalledWith(football); } } }); @@ -88,7 +86,7 @@ describe('CLI cluster manager', () => { test('correctly configures `BasePathProxy`.', async () => { const basePathProxyMock = { start: jest.fn() }; - ClusterManager.create({}, {}, basePathProxyMock); + new ClusterManager({}, {} as any, basePathProxyMock as any); expect(basePathProxyMock.start).toHaveBeenCalledWith({ shouldRedirectFromOldBasePath: expect.any(Function), @@ -97,13 +95,13 @@ describe('CLI cluster manager', () => { }); describe('proxy is configured with the correct `shouldRedirectFromOldBasePath` and `blockUntil` functions.', () => { - let clusterManager; - let shouldRedirectFromOldBasePath; - let blockUntil; + let clusterManager: ClusterManager; + let shouldRedirectFromOldBasePath: (path: string) => boolean; + let blockUntil: () => Promise; beforeEach(async () => { const basePathProxyMock = { start: jest.fn() }; - clusterManager = ClusterManager.create({}, {}, basePathProxyMock); + clusterManager = new ClusterManager({}, {} as any, basePathProxyMock as any); jest.spyOn(clusterManager.server, 'on'); jest.spyOn(clusterManager.server, 'off'); @@ -146,7 +144,7 @@ describe('CLI cluster manager', () => { expect(clusterManager.server.on).toHaveBeenCalledTimes(2); expect(clusterManager.server.on).toHaveBeenCalledWith('crashed', expect.any(Function)); - const [, [eventName, onCrashed]] = clusterManager.server.on.mock.calls; + const [, [eventName, onCrashed]] = (clusterManager.server.on as jest.Mock).mock.calls; // Check event name to make sure we call the right callback, // in Jest 23 we could use `toHaveBeenNthCalledWith` instead. expect(eventName).toBe('crashed'); @@ -164,7 +162,7 @@ describe('CLI cluster manager', () => { expect(clusterManager.server.on).toHaveBeenCalledTimes(2); expect(clusterManager.server.on).toHaveBeenCalledWith('listening', expect.any(Function)); - const [[eventName, onListening]] = clusterManager.server.on.mock.calls; + const [[eventName, onListening]] = (clusterManager.server.on as jest.Mock).mock.calls; // Check event name to make sure we call the right callback, // in Jest 23 we could use `toHaveBeenNthCalledWith` instead. expect(eventName).toBe('listening'); diff --git a/src/cli/cluster/cluster_manager.js b/src/cli/cluster/cluster_manager.ts similarity index 83% rename from src/cli/cluster/cluster_manager.js rename to src/cli/cluster/cluster_manager.ts index cd1b3a0dadfc6..3fa4bdcbc5fa5 100644 --- a/src/cli/cluster/cluster_manager.js +++ b/src/cli/cluster/cluster_manager.ts @@ -20,26 +20,38 @@ import { resolve } from 'path'; import { format as formatUrl } from 'url'; import opn from 'opn'; - import { debounce, invoke, bindAll, once, uniq } from 'lodash'; import * as Rx from 'rxjs'; import { first, mapTo, filter, map, take } from 'rxjs/operators'; import { REPO_ROOT } from '@kbn/dev-utils'; +import { FSWatcher } from 'chokidar'; + +import { LegacyConfig } from '../../core/server/legacy'; +import { BasePathProxyServer } from '../../core/server/http'; +// @ts-ignore import Log from '../log'; -import Worker from './worker'; -import { Config } from '../../legacy/server/config/config'; +import { Worker } from './worker'; process.env.kbnWorkerType = 'managr'; -export default class ClusterManager { - static create(opts, settings = {}, basePathProxy) { - return new ClusterManager(opts, Config.withDefaultSchema(settings), basePathProxy); - } - - constructor(opts, config, basePathProxy) { +export class ClusterManager { + public optimizer: Worker; + public server: Worker; + public workers: Worker[]; + + private watcher: FSWatcher | null = null; + private basePathProxy: BasePathProxyServer | undefined; + private log: any; + private addedCount = 0; + private inReplMode: boolean; + + constructor( + opts: Record, + config: LegacyConfig, + basePathProxy?: BasePathProxyServer + ) { this.log = new Log(opts.quiet, opts.silent); - this.addedCount = 0; this.inReplMode = !!opts.repl; this.basePathProxy = basePathProxy; @@ -79,7 +91,7 @@ export default class ClusterManager { worker.on('broadcast', msg => { this.workers.forEach(to => { if (to !== worker && to.online) { - to.fork.send(msg); + to.fork!.send(msg); } }); }); @@ -90,10 +102,10 @@ export default class ClusterManager { // and all workers. This is only used by LogRotator service // when the cluster mode is enabled this.server.on('reloadLoggingConfigFromServerWorker', () => { - process.emit('message', { reloadLoggingConfig: true }); + process.emit('message' as any, { reloadLoggingConfig: true } as any); this.workers.forEach(worker => { - worker.fork.send({ reloadLoggingConfig: true }); + worker.fork!.send({ reloadLoggingConfig: true }); }); }); @@ -111,9 +123,9 @@ export default class ClusterManager { } if (opts.watch) { - const pluginPaths = config.get('plugins.paths'); + const pluginPaths = config.get('plugins.paths'); const scanDirs = [ - ...config.get('plugins.scanDirs'), + ...config.get('plugins.scanDirs'), resolve(REPO_ROOT, 'src/plugins'), resolve(REPO_ROOT, 'x-pack/plugins'), ]; @@ -131,7 +143,7 @@ export default class ClusterManager { resolve(path, 'scripts'), resolve(path, 'docs') ), - [] + [] as string[] ); this.setupWatching(extraPaths, pluginInternalDirsIgnore); @@ -149,7 +161,7 @@ export default class ClusterManager { } } - setupOpen(openUrl) { + setupOpen(openUrl: string) { const serverListening$ = Rx.merge( Rx.fromEvent(this.server, 'listening').pipe(mapTo(true)), Rx.fromEvent(this.server, 'fork:exit').pipe(mapTo(false)), @@ -157,7 +169,7 @@ export default class ClusterManager { ); const optimizeSuccess$ = Rx.fromEvent(this.optimizer, 'optimizeStatus').pipe( - map(msg => !!msg.success) + map((msg: any) => !!msg.success) ); Rx.combineLatest(serverListening$, optimizeSuccess$) @@ -169,8 +181,10 @@ export default class ClusterManager { .then(() => opn(openUrl)); } - setupWatching(extraPaths, pluginInternalDirsIgnore) { + setupWatching(extraPaths: string[], pluginInternalDirsIgnore: string[]) { + // eslint-disable-next-line @typescript-eslint/no-var-requires const chokidar = require('chokidar'); + // eslint-disable-next-line @typescript-eslint/no-var-requires const { fromRoot } = require('../../core/server/utils'); const watchPaths = [ @@ -204,7 +218,7 @@ export default class ClusterManager { ...ignorePaths, 'plugins/java_languageserver', ], - }); + }) as FSWatcher; this.watcher.on('add', this.onWatcherAdd); this.watcher.on('error', this.onWatcherError); @@ -213,8 +227,8 @@ export default class ClusterManager { 'ready', once(() => { // start sending changes to workers - this.watcher.removeListener('add', this.onWatcherAdd); - this.watcher.on('all', this.onWatcherChange); + this.watcher!.removeListener('add', this.onWatcherAdd); + this.watcher!.on('all', this.onWatcherChange); this.log.good('watching for changes', `(${this.addedCount} files)`); this.startCluster(); @@ -229,6 +243,7 @@ export default class ClusterManager { if (this.inReplMode) { return; } + // eslint-disable-next-line @typescript-eslint/no-var-requires const readline = require('readline'); const rl = readline.createInterface(process.stdin, process.stdout); @@ -263,16 +278,16 @@ export default class ClusterManager { this.addedCount += 1; } - onWatcherChange(e, path) { + onWatcherChange(e: any, path: string) { invoke(this.workers, 'onChange', path); } - onWatcherError(err) { + onWatcherError(err: any) { this.log.bad('failed to watch files!\n', err.stack); process.exit(1); // eslint-disable-line no-process-exit } - shouldRedirectFromOldBasePath(path) { + shouldRedirectFromOldBasePath(path: string) { // strip `s/{id}` prefix when checking for need to redirect if (path.startsWith('s/')) { path = path diff --git a/src/cli/cluster/worker.test.js b/src/cli/cluster/worker.test.ts similarity index 80% rename from src/cli/cluster/worker.test.js rename to src/cli/cluster/worker.test.ts index b43cc123abcbb..4f9337681e083 100644 --- a/src/cli/cluster/worker.test.js +++ b/src/cli/cluster/worker.test.ts @@ -17,22 +17,20 @@ * under the License. */ -import { mockCluster } from './__mocks__/cluster'; -jest.mock('cluster', () => mockCluster()); +import { mockCluster } from './cluster_manager.test.mocks'; -import cluster from 'cluster'; - -import Worker from './worker'; +import { Worker, ClusterWorker } from './worker'; +// @ts-ignore import Log from '../log'; -const workersToShutdown = []; +const workersToShutdown: Worker[] = []; -function assertListenerAdded(emitter, event) { +function assertListenerAdded(emitter: NodeJS.EventEmitter, event: any) { expect(emitter.on).toHaveBeenCalledWith(event, expect.any(Function)); } -function assertListenerRemoved(emitter, event) { - const [, onEventListener] = emitter.on.mock.calls.find(([eventName]) => { +function assertListenerRemoved(emitter: NodeJS.EventEmitter, event: any) { + const [, onEventListener] = (emitter.on as jest.Mock).mock.calls.find(([eventName]) => { return eventName === event; }); @@ -44,6 +42,7 @@ function setup(opts = {}) { log: new Log(false, true), ...opts, baseArgv: [], + type: 'test', }); workersToShutdown.push(worker); @@ -53,7 +52,7 @@ function setup(opts = {}) { describe('CLI cluster manager', () => { afterEach(async () => { while (workersToShutdown.length > 0) { - const worker = workersToShutdown.pop(); + const worker = workersToShutdown.pop() as Worker; // If `fork` exists we should set `exitCode` to the non-zero value to // prevent worker from auto restart. if (worker.fork) { @@ -63,14 +62,14 @@ describe('CLI cluster manager', () => { await worker.shutdown(); } - cluster.fork.mockClear(); + mockCluster.fork.mockClear(); }); describe('#onChange', () => { describe('opts.watch = true', () => { test('restarts the fork', () => { const worker = setup({ watch: true }); - jest.spyOn(worker, 'start').mockImplementation(() => {}); + jest.spyOn(worker, 'start').mockResolvedValue(); worker.onChange('/some/path'); expect(worker.changes).toEqual(['/some/path']); expect(worker.start).toHaveBeenCalledTimes(1); @@ -80,7 +79,7 @@ describe('CLI cluster manager', () => { describe('opts.watch = false', () => { test('does not restart the fork', () => { const worker = setup({ watch: false }); - jest.spyOn(worker, 'start').mockImplementation(() => {}); + jest.spyOn(worker, 'start').mockResolvedValue(); worker.onChange('/some/path'); expect(worker.changes).toEqual([]); expect(worker.start).not.toHaveBeenCalled(); @@ -94,13 +93,13 @@ describe('CLI cluster manager', () => { const worker = setup(); await worker.start(); expect(worker).toHaveProperty('online', true); - const fork = worker.fork; - expect(fork.process.kill).not.toHaveBeenCalled(); + const fork = worker.fork as ClusterWorker; + expect(fork!.process.kill).not.toHaveBeenCalled(); assertListenerAdded(fork, 'message'); assertListenerAdded(fork, 'online'); assertListenerAdded(fork, 'disconnect'); await worker.shutdown(); - expect(fork.process.kill).toHaveBeenCalledTimes(1); + expect(fork!.process.kill).toHaveBeenCalledTimes(1); assertListenerRemoved(fork, 'message'); assertListenerRemoved(fork, 'online'); assertListenerRemoved(fork, 'disconnect'); @@ -120,7 +119,7 @@ describe('CLI cluster manager', () => { test(`is bound to fork's message event`, async () => { const worker = setup(); await worker.start(); - expect(worker.fork.on).toHaveBeenCalledWith('message', expect.any(Function)); + expect(worker.fork!.on).toHaveBeenCalledWith('message', expect.any(Function)); }); }); @@ -138,8 +137,8 @@ describe('CLI cluster manager', () => { test('calls #onMessage with message parts', () => { const worker = setup(); jest.spyOn(worker, 'onMessage').mockImplementation(() => {}); - worker.parseIncomingMessage([10, 100, 1000, 10000]); - expect(worker.onMessage).toHaveBeenCalledWith(10, 100, 1000, 10000); + worker.parseIncomingMessage(['event', 'some-data']); + expect(worker.onMessage).toHaveBeenCalledWith('event', 'some-data'); }); }); }); @@ -149,7 +148,7 @@ describe('CLI cluster manager', () => { test('emits the data to be broadcasted', () => { const worker = setup(); const data = {}; - jest.spyOn(worker, 'emit').mockImplementation(() => {}); + jest.spyOn(worker, 'emit').mockImplementation(() => true); worker.onMessage('WORKER_BROADCAST', data); expect(worker.emit).toHaveBeenCalledWith('broadcast', data); }); @@ -158,7 +157,7 @@ describe('CLI cluster manager', () => { describe('when sent WORKER_LISTENING message', () => { test('sets the listening flag and emits the listening event', () => { const worker = setup(); - jest.spyOn(worker, 'emit').mockImplementation(() => {}); + jest.spyOn(worker, 'emit').mockImplementation(() => true); expect(worker).toHaveProperty('listening', false); worker.onMessage('WORKER_LISTENING'); expect(worker).toHaveProperty('listening', true); @@ -170,8 +169,6 @@ describe('CLI cluster manager', () => { test('does nothing', () => { const worker = setup(); worker.onMessage('asdlfkajsdfahsdfiohuasdofihsdoif'); - worker.onMessage({}); - worker.onMessage(23049283094); }); }); }); @@ -185,7 +182,7 @@ describe('CLI cluster manager', () => { await worker.start(); - expect(cluster.fork).toHaveBeenCalledTimes(1); + expect(mockCluster.fork).toHaveBeenCalledTimes(1); expect(worker.on).toHaveBeenCalledWith('fork:online', expect.any(Function)); }); @@ -193,12 +190,12 @@ describe('CLI cluster manager', () => { const worker = setup(); jest.spyOn(process, 'on'); - jest.spyOn(cluster, 'on'); + jest.spyOn(mockCluster, 'on'); await worker.start(); - expect(cluster.on).toHaveBeenCalledTimes(1); - expect(cluster.on).toHaveBeenCalledWith('exit', expect.any(Function)); + expect(mockCluster.on).toHaveBeenCalledTimes(1); + expect(mockCluster.on).toHaveBeenCalledWith('exit', expect.any(Function)); expect(process.on).toHaveBeenCalledTimes(1); expect(process.on).toHaveBeenCalledWith('exit', expect.any(Function)); }); diff --git a/src/cli/cluster/worker.js b/src/cli/cluster/worker.ts similarity index 75% rename from src/cli/cluster/worker.js rename to src/cli/cluster/worker.ts index 2250075f20a60..fb87f1a87654c 100644 --- a/src/cli/cluster/worker.js +++ b/src/cli/cluster/worker.ts @@ -21,25 +21,57 @@ import _ from 'lodash'; import cluster from 'cluster'; import { EventEmitter } from 'events'; -import { BinderFor } from '../../legacy/utils'; +import { BinderFor } from '../../legacy/utils/binder_for'; import { fromRoot } from '../../core/server/utils'; const cliPath = fromRoot('src/cli'); const baseArgs = _.difference(process.argv.slice(2), ['--no-watch']); const baseArgv = [process.execPath, cliPath].concat(baseArgs); +export type ClusterWorker = cluster.Worker & { + killed: boolean; + exitCode?: number; +}; + cluster.setupMaster({ exec: cliPath, silent: false, }); -const dead = fork => { +const dead = (fork: ClusterWorker) => { return fork.isDead() || fork.killed; }; -export default class Worker extends EventEmitter { - constructor(opts) { - opts = opts || {}; +interface WorkerOptions { + type: string; + log: any; // src/cli/log.js + argv?: string[]; + title?: string; + watch?: boolean; + baseArgv?: string[]; +} + +export class Worker extends EventEmitter { + private readonly clusterBinder: BinderFor; + private readonly processBinder: BinderFor; + + private type: string; + private title: string; + private log: any; + private forkBinder: BinderFor | null = null; + private startCount: number; + private watch: boolean; + private env: Record; + + public fork: ClusterWorker | null = null; + public changes: string[]; + + // status flags + public online = false; // the fork can accept messages + public listening = false; // the fork is listening for connections + public crashed = false; // the fork crashed + + constructor(opts: WorkerOptions) { super(); this.log = opts.log; @@ -48,15 +80,9 @@ export default class Worker extends EventEmitter { this.watch = opts.watch !== false; this.startCount = 0; - // status flags - this.online = false; // the fork can accept messages - this.listening = false; // the fork is listening for connections - this.crashed = false; // the fork crashed - this.changes = []; - this.forkBinder = null; // defined when the fork is - this.clusterBinder = new BinderFor(cluster); + this.clusterBinder = new BinderFor(cluster as any); // lack the 'off' method this.processBinder = new BinderFor(process); this.env = { @@ -66,7 +92,7 @@ export default class Worker extends EventEmitter { }; } - onExit(fork, code) { + onExit(fork: ClusterWorker, code: number) { if (this.fork !== fork) return; // we have our fork's exit, so stop listening for others @@ -91,7 +117,7 @@ export default class Worker extends EventEmitter { } } - onChange(path) { + onChange(path: string) { if (!this.watch) return; this.changes.push(path); this.start(); @@ -104,7 +130,7 @@ export default class Worker extends EventEmitter { this.fork.killed = true; // stop listening to the fork, it's just going to die - this.forkBinder.destroy(); + this.forkBinder!.destroy(); // we don't need to react to process.exit anymore this.processBinder.destroy(); @@ -114,12 +140,14 @@ export default class Worker extends EventEmitter { } } - parseIncomingMessage(msg) { - if (!Array.isArray(msg)) return; - this.onMessage(...msg); + parseIncomingMessage(msg: any) { + if (!Array.isArray(msg)) { + return; + } + this.onMessage(msg[0], msg[1]); } - onMessage(type, data) { + onMessage(type: string, data?: any) { switch (type) { case 'WORKER_BROADCAST': this.emit('broadcast', data); @@ -170,16 +198,16 @@ export default class Worker extends EventEmitter { this.log.warn(`restarting ${this.title}...`); } - this.fork = cluster.fork(this.env); + this.fork = cluster.fork(this.env) as ClusterWorker; this.forkBinder = new BinderFor(this.fork); // when the fork sends a message, comes online, or loses its connection, then react - this.forkBinder.on('message', msg => this.parseIncomingMessage(msg)); + this.forkBinder.on('message', (msg: any) => this.parseIncomingMessage(msg)); this.forkBinder.on('online', () => this.onOnline()); this.forkBinder.on('disconnect', () => this.onDisconnect()); // when the cluster says a fork has exited, check if it is ours - this.clusterBinder.on('exit', (fork, code) => this.onExit(fork, code)); + this.clusterBinder.on('exit', (fork: ClusterWorker, code: number) => this.onExit(fork, code)); // when the process exits, make sure we kill our workers this.processBinder.on('exit', () => this.shutdown()); diff --git a/src/core/MIGRATION.md b/src/core/MIGRATION.md index 5bb22579d123e..1c78de966c46f 100644 --- a/src/core/MIGRATION.md +++ b/src/core/MIGRATION.md @@ -46,6 +46,8 @@ - [How to](#how-to) - [Configure plugin](#configure-plugin) - [Handle plugin configuration deprecations](#handle-plugin-config-deprecations) + - [Use scoped services](#use-scoped-services) + - [Declare a custom scoped service](#declare-a-custom-scoped-service) - [Mock new platform services in tests](#mock-new-platform-services-in-tests) - [Writing mocks for your plugin](#writing-mocks-for-your-plugin) - [Using mocks in your tests](#using-mocks-in-your-tests) @@ -1190,22 +1192,23 @@ In server code, `core` can be accessed from either `server.newPlatform` or `kbnS | `server.config()` | [`initializerContext.config.create()`](/docs/development/core/server/kibana-plugin-server.plugininitializercontext.config.md) | Must also define schema. See _[how to configure plugin](#configure-plugin)_ | | `server.route` | [`core.http.createRouter`](/docs/development/core/server/kibana-plugin-server.httpservicesetup.createrouter.md) | [Examples](./MIGRATION_EXAMPLES.md#route-registration) | | `request.getBasePath()` | [`core.http.basePath.get`](/docs/development/core/server/kibana-plugin-server.httpservicesetup.basepath.md) | | -| `server.plugins.elasticsearch.getCluster('data')` | [`core.elasticsearch.dataClient$`](/docs/development/core/server/kibana-plugin-server.elasticsearchservicesetup.dataclient_.md) | Handlers will also include a pre-configured client | -| `server.plugins.elasticsearch.getCluster('admin')` | [`core.elasticsearch.adminClient$`](/docs/development/core/server/kibana-plugin-server.elasticsearchservicesetup.adminclient_.md) | Handlers will also include a pre-configured client | -| `xpackMainPlugin.info.feature(pluginID).registerLicenseCheckResultsGenerator` | [`x-pack licensing plugin`](/x-pack/plugins/licensing/README.md) | | +| `server.plugins.elasticsearch.getCluster('data')` | [`context.elasticsearch.dataClient`](/docs/development/core/server/kibana-plugin-server.iscopedclusterclient.md) | | +| `server.plugins.elasticsearch.getCluster('admin')` | [`context.elasticsearch.adminClient`](/docs/development/core/server/kibana-plugin-server.iscopedclusterclient.md) | | | `server.savedObjects.setScopedSavedObjectsClientFactory` | [`core.savedObjects.setClientFactory`](/docs/development/core/server/kibana-plugin-server.savedobjectsservicesetup.setclientfactory.md) | | | `server.savedObjects.addScopedSavedObjectsClientWrapperFactory` | [`core.savedObjects.addClientWrapper`](/docs/development/core/server/kibana-plugin-server.savedobjectsservicesetup.addclientwrapper.md) | | | `server.savedObjects.getSavedObjectsRepository` | [`core.savedObjects.createInternalRepository`](/docs/development/core/server/kibana-plugin-server.savedobjectsservicesetup.createinternalrepository.md) [`core.savedObjects.createScopedRepository`](/docs/development/core/server/kibana-plugin-server.savedobjectsservicesetup.createscopedrepository.md) | | | `server.savedObjects.getScopedSavedObjectsClient` | [`core.savedObjects.getScopedClient`](/docs/development/core/server/kibana-plugin-server.savedobjectsservicestart.getscopedclient.md) | | | `request.getSavedObjectsClient` | [`context.core.savedObjects.client`](/docs/development/core/server/kibana-plugin-server.requesthandlercontext.core.md) | | +| `request.getUiSettingsService` | [`context.uiSettings.client`](/docs/development/core/server/kibana-plugin-server.iuisettingsclient.md) | | | `kibana.Plugin.deprecations` | [Handle plugin configuration deprecations](#handle-plugin-config-deprecations) and [`PluginConfigDescriptor.deprecations`](docs/development/core/server/kibana-plugin-server.pluginconfigdescriptor.md) | Deprecations from New Platform are not applied to legacy configuration | _See also: [Server's CoreSetup API Docs](/docs/development/core/server/kibana-plugin-server.coresetup.md)_ ##### Plugin services -| Legacy Platform | New Platform | Notes | -| ------------------------------------------- | ------------------------------------------------------------------------------ | ----- | -| `server.plugins.xpack_main.registerFeature` | [`plugins.features.registerFeature`](x-pack/plugins/features/server/plugin.ts) | | +| Legacy Platform | New Platform | Notes | +| ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------ | ----- | +| `server.plugins.xpack_main.registerFeature` | [`plugins.features.registerFeature`](x-pack/plugins/features/server/plugin.ts) | | +| `server.plugins.xpack_main.feature(pluginID).registerLicenseCheckResultsGenerator` | [`x-pack licensing plugin`](/x-pack/plugins/licensing/README.md) | | #### UI Exports @@ -1399,7 +1402,7 @@ export const config: PluginConfigDescriptor = { deprecations: ({ rename, unused }) => [ rename('oldProperty', 'newProperty'), unused('someUnusedProperty'), - ] + ] }; ``` @@ -1413,7 +1416,7 @@ export const config: PluginConfigDescriptor = { deprecations: ({ renameFromRoot, unusedFromRoot }) => [ renameFromRoot('oldplugin.property', 'myplugin.property'), unusedFromRoot('oldplugin.deprecated'), - ] + ] }; ``` @@ -1421,6 +1424,68 @@ Note that deprecations registered in new platform's plugins are not applied to t During migration, if you still need the deprecations to be effective in the legacy plugin, you need to declare them in both plugin definitions. +### Use scoped services +Whenever Kibana needs to get access to data saved in elasticsearch, it should perform a check whether an end-user has access to the data. +In the legacy platform, Kibana requires to bind elasticsearch related API with an incoming request to access elasticsearch service on behalf of a user. +```js + async function handler(req, res) { + const dataCluster = server.plugins.elasticsearch.getCluster('data'); + const data = await dataCluster.callWithRequest(req, 'ping'); + } +``` + +The new platform introduced [a handler interface](/rfcs/text/0003_handler_interface.md) on the server-side to perform that association internally. Core services, that require impersonation with an incoming request, are +exposed via `context` argument of [the request handler interface.](/docs/development/core/server/kibana-plugin-server.requesthandler.md) +The above example looks in the new platform as +```js + async function handler(context, req, res) { + const data = await context.core.elasticsearch.adminClient.callAsInternalUser('ping') + } +``` + +The [request handler context](/docs/development/core/server/kibana-plugin-server.requesthandlercontext.md) exposed the next scoped **core** services: +| Legacy Platform | New Platform | +| --------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------| +| `request.getSavedObjectsClient` | [`context.savedObjects.client`](/docs/development/core/server/kibana-plugin-server.savedobjectsclient.md) | +| `server.plugins.elasticsearch.getCluster('admin')` | [`context.elasticsearch.adminClient`](/docs/development/core/server/kibana-plugin-server.iscopedclusterclient.md) | +| `server.plugins.elasticsearch.getCluster('data')` | [`context.elasticsearch.dataClient`](/docs/development/core/server/kibana-plugin-server.iscopedclusterclient.md) | +| `request.getUiSettingsService` | [`context.uiSettings.client`](/docs/development/core/server/kibana-plugin-server.iuisettingsclient.md) | + +#### Declare a custom scoped service +Plugins can extend the handler context with custom API that will be available to the plugin itself and all dependent plugins. +For example, the plugin creates a custom elasticsearch client and want to use it via the request handler context: + +```ts +import { CoreSetup, IScopedClusterClient } from 'kibana/server'; + +export interface MyPluginContext { + client: IScopedClusterClient; +} + +// extend RequestHandlerContext when a dependent plugin imports MyPluginContext from the file +declare module 'src/core/server' { + interface RequestHandlerContext { + myPlugin?: MyPluginContext; + } +} + +class Plugin { + setup(core: CoreSetup) { + const client = core.elasticsearch.createClient('myClient'); + core.http.registerRouteHandlerContext('myPlugin', (context, req, res) => { + return { client: client.asScoped(req) }; + }); + + router.get( + { path: '/api/my-plugin/', validate }, + async (context, req, res) => { + const data = await context.myPlugin.client.callAsCurrentUser('endpoint'); + ... + } + ); + } +``` + ### Mock new platform services in tests #### Writing mocks for your plugin diff --git a/src/core/public/application/application_service.mock.ts b/src/core/public/application/application_service.mock.ts index a2db755224636..b2e2161c92cc8 100644 --- a/src/core/public/application/application_service.mock.ts +++ b/src/core/public/application/application_service.mock.ts @@ -20,15 +20,13 @@ import { Subject } from 'rxjs'; import { capabilitiesServiceMock } from './capabilities/capabilities_service.mock'; -import { ApplicationService } from './application_service'; import { ApplicationSetup, InternalApplicationStart, ApplicationStart, InternalApplicationSetup, } from './types'; - -type ApplicationServiceContract = PublicMethodsOf; +import { ApplicationServiceContract } from './test_types'; const createSetupContractMock = (): jest.Mocked => ({ register: jest.fn(), @@ -41,23 +39,27 @@ const createInternalSetupContractMock = (): jest.Mocked => ({ +const createStartContractMock = (): jest.Mocked => ({ capabilities: capabilitiesServiceMock.createStartContract().capabilities, navigateToApp: jest.fn(), getUrlForApp: jest.fn(), registerMountContext: jest.fn(), }); -const createInternalStartContractMock = (): jest.Mocked => ({ - availableApps: new Map(), - availableLegacyApps: new Map(), - capabilities: capabilitiesServiceMock.createStartContract().capabilities, - navigateToApp: jest.fn(), - getUrlForApp: jest.fn(), - registerMountContext: jest.fn(), - currentAppId$: new Subject(), - getComponent: jest.fn(), -}); +const createInternalStartContractMock = (): jest.Mocked => { + const currentAppId$ = new Subject(); + + return { + availableApps: new Map(), + availableLegacyApps: new Map(), + capabilities: capabilitiesServiceMock.createStartContract().capabilities, + currentAppId$: currentAppId$.asObservable(), + getComponent: jest.fn(), + getUrlForApp: jest.fn(), + navigateToApp: jest.fn().mockImplementation(appId => currentAppId$.next(appId)), + registerMountContext: jest.fn(), + }; +}; const createMock = (): jest.Mocked => ({ setup: jest.fn().mockReturnValue(createInternalSetupContractMock()), @@ -69,7 +71,6 @@ export const applicationServiceMock = { create: createMock, createSetupContract: createSetupContractMock, createStartContract: createStartContractMock, - createInternalSetupContract: createInternalSetupContractMock, createInternalStartContract: createInternalStartContractMock, }; diff --git a/src/core/public/application/application_service.test.ts b/src/core/public/application/application_service.test.ts new file mode 100644 index 0000000000000..d064b17ace142 --- /dev/null +++ b/src/core/public/application/application_service.test.ts @@ -0,0 +1,441 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { createElement } from 'react'; +import { Subject } from 'rxjs'; +import { bufferCount, skip, takeUntil } from 'rxjs/operators'; +import { shallow } from 'enzyme'; + +import { injectedMetadataServiceMock } from '../injected_metadata/injected_metadata_service.mock'; +import { contextServiceMock } from '../context/context_service.mock'; +import { httpServiceMock } from '../http/http_service.mock'; +import { MockCapabilitiesService, MockHistory } from './application_service.test.mocks'; +import { MockLifecycle } from './test_types'; +import { ApplicationService } from './application_service'; + +function mount() {} + +describe('#setup()', () => { + let setupDeps: MockLifecycle<'setup'>; + let startDeps: MockLifecycle<'start'>; + let service: ApplicationService; + + beforeEach(() => { + const http = httpServiceMock.createSetupContract({ basePath: '/test' }); + setupDeps = { + http, + context: contextServiceMock.createSetupContract(), + injectedMetadata: injectedMetadataServiceMock.createSetupContract(), + }; + setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(false); + startDeps = { http, injectedMetadata: setupDeps.injectedMetadata }; + service = new ApplicationService(); + }); + + describe('register', () => { + it('throws an error if two apps with the same id are registered', () => { + const { register } = service.setup(setupDeps); + + register(Symbol(), { id: 'app1', mount } as any); + expect(() => + register(Symbol(), { id: 'app1', mount } as any) + ).toThrowErrorMatchingInlineSnapshot( + `"An application is already registered with the id \\"app1\\""` + ); + }); + + it('throws error if additional apps are registered after setup', async () => { + const { register } = service.setup(setupDeps); + + await service.start(startDeps); + expect(() => + register(Symbol(), { id: 'app1', mount } as any) + ).toThrowErrorMatchingInlineSnapshot(`"Applications cannot be registered after \\"setup\\""`); + }); + + it('throws an error if an App with the same appRoute is registered', () => { + const { register, registerLegacyApp } = service.setup(setupDeps); + + register(Symbol(), { id: 'app1', mount } as any); + + expect(() => + register(Symbol(), { id: 'app2', mount, appRoute: '/app/app1' } as any) + ).toThrowErrorMatchingInlineSnapshot( + `"An application is already registered with the appRoute \\"/app/app1\\""` + ); + expect(() => registerLegacyApp({ id: 'app1' } as any)).not.toThrow(); + + register(Symbol(), { id: 'app-next', mount, appRoute: '/app/app3' } as any); + + expect(() => + register(Symbol(), { id: 'app2', mount, appRoute: '/app/app3' } as any) + ).toThrowErrorMatchingInlineSnapshot( + `"An application is already registered with the appRoute \\"/app/app3\\""` + ); + expect(() => registerLegacyApp({ id: 'app3' } as any)).not.toThrow(); + }); + + it('throws an error if an App starts with the HTTP base path', () => { + const { register } = service.setup(setupDeps); + + expect(() => + register(Symbol(), { id: 'app2', mount, appRoute: '/test/app2' } as any) + ).toThrowErrorMatchingInlineSnapshot( + `"Cannot register an application route that includes HTTP base path"` + ); + }); + }); + + describe('registerLegacyApp', () => { + it('throws an error if two apps with the same id are registered', () => { + const { registerLegacyApp } = service.setup(setupDeps); + + registerLegacyApp({ id: 'app2' } as any); + expect(() => registerLegacyApp({ id: 'app2' } as any)).toThrowErrorMatchingInlineSnapshot( + `"A legacy application is already registered with the id \\"app2\\""` + ); + }); + + it('throws error if additional apps are registered after setup', async () => { + const { registerLegacyApp } = service.setup(setupDeps); + + await service.start(startDeps); + expect(() => registerLegacyApp({ id: 'app2' } as any)).toThrowErrorMatchingInlineSnapshot( + `"Applications cannot be registered after \\"setup\\""` + ); + }); + + it('throws an error if a LegacyApp with the same appRoute is registered', () => { + const { register, registerLegacyApp } = service.setup(setupDeps); + + registerLegacyApp({ id: 'app1' } as any); + + expect(() => + register(Symbol(), { id: 'app2', mount, appRoute: '/app/app1' } as any) + ).toThrowErrorMatchingInlineSnapshot( + `"An application is already registered with the appRoute \\"/app/app1\\""` + ); + expect(() => registerLegacyApp({ id: 'app1:other' } as any)).not.toThrow(); + }); + }); + + it("`registerMountContext` calls context container's registerContext", () => { + const { registerMountContext } = service.setup(setupDeps); + const container = setupDeps.context.createContextContainer.mock.results[0].value; + const pluginId = Symbol(); + + registerMountContext(pluginId, 'test' as any, mount as any); + expect(container.registerContext).toHaveBeenCalledWith(pluginId, 'test', mount); + }); +}); + +describe('#start()', () => { + let setupDeps: MockLifecycle<'setup'>; + let startDeps: MockLifecycle<'start'>; + let service: ApplicationService; + + beforeEach(() => { + MockHistory.push.mockReset(); + const http = httpServiceMock.createSetupContract({ basePath: '/test' }); + setupDeps = { + http, + context: contextServiceMock.createSetupContract(), + injectedMetadata: injectedMetadataServiceMock.createSetupContract(), + }; + setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(false); + startDeps = { http, injectedMetadata: setupDeps.injectedMetadata }; + service = new ApplicationService(); + }); + + it('rejects if called prior to #setup()', async () => { + await expect(service.start(startDeps)).rejects.toThrowErrorMatchingInlineSnapshot( + `"ApplicationService#setup() must be invoked before start."` + ); + }); + + it('exposes available apps', async () => { + setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); + const { register, registerLegacyApp } = service.setup(setupDeps); + + register(Symbol(), { id: 'app1', mount } as any); + registerLegacyApp({ id: 'app2' } as any); + + const { availableApps, availableLegacyApps } = await service.start(startDeps); + + expect(availableApps).toMatchInlineSnapshot(` + Map { + "app1" => Object { + "appRoute": "/app/app1", + "id": "app1", + "mount": [Function], + }, + } + `); + expect(availableLegacyApps).toMatchInlineSnapshot(` + Map { + "app2" => Object { + "id": "app2", + }, + } + `); + }); + + it('passes appIds to capabilities', async () => { + const { register } = service.setup(setupDeps); + + register(Symbol(), { id: 'app1', mount } as any); + register(Symbol(), { id: 'app2', mount } as any); + register(Symbol(), { id: 'app3', mount } as any); + await service.start(startDeps); + + expect(MockCapabilitiesService.start).toHaveBeenCalledWith({ + appIds: ['app1', 'app2', 'app3'], + http: setupDeps.http, + }); + }); + + it('filters available applications based on capabilities', async () => { + MockCapabilitiesService.start.mockResolvedValueOnce({ + capabilities: { + navLinks: { + app1: true, + app2: false, + legacyApp1: true, + legacyApp2: false, + }, + }, + } as any); + + const { register, registerLegacyApp } = service.setup(setupDeps); + + register(Symbol(), { id: 'app1', mount } as any); + registerLegacyApp({ id: 'legacyApp1' } as any); + register(Symbol(), { id: 'app2', mount } as any); + registerLegacyApp({ id: 'legacyApp2' } as any); + + const { availableApps, availableLegacyApps } = await service.start(startDeps); + + expect(availableApps).toMatchInlineSnapshot(` + Map { + "app1" => Object { + "appRoute": "/app/app1", + "id": "app1", + "mount": [Function], + }, + } + `); + expect(availableLegacyApps).toMatchInlineSnapshot(` + Map { + "legacyApp1" => Object { + "id": "legacyApp1", + }, + } + `); + }); + + describe('getComponent', () => { + it('returns renderable JSX tree', async () => { + service.setup(setupDeps); + + const { getComponent } = await service.start(startDeps); + + expect(() => shallow(createElement(getComponent))).not.toThrow(); + expect(getComponent()).toMatchInlineSnapshot(` + + `); + }); + + it('renders null when in legacy mode', async () => { + setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); + service.setup(setupDeps); + + const { getComponent } = await service.start(startDeps); + + expect(() => shallow(createElement(getComponent))).not.toThrow(); + expect(getComponent()).toBe(null); + }); + }); + + describe('getUrlForApp', () => { + it('creates URL for unregistered appId', async () => { + service.setup(setupDeps); + + const { getUrlForApp } = await service.start(startDeps); + + expect(getUrlForApp('app1')).toBe('/app/app1'); + }); + + it('creates URL for registered appId', async () => { + const { register, registerLegacyApp } = service.setup(setupDeps); + + register(Symbol(), { id: 'app1', mount } as any); + registerLegacyApp({ id: 'legacyApp1' } as any); + register(Symbol(), { id: 'app2', mount, appRoute: '/custom/path' } as any); + + const { getUrlForApp } = await service.start(startDeps); + + expect(getUrlForApp('app1')).toBe('/app/app1'); + expect(getUrlForApp('legacyApp1')).toBe('/app/legacyApp1'); + expect(getUrlForApp('app2')).toBe('/custom/path'); + }); + + it('creates URLs with path parameter', async () => { + service.setup(setupDeps); + + const { getUrlForApp } = await service.start(startDeps); + + expect(getUrlForApp('app1', { path: 'deep/link' })).toBe('/app/app1/deep/link'); + expect(getUrlForApp('app1', { path: '/deep//link/' })).toBe('/app/app1/deep/link'); + expect(getUrlForApp('app1', { path: '//deep/link//' })).toBe('/app/app1/deep/link'); + expect(getUrlForApp('app1', { path: 'deep/link///' })).toBe('/app/app1/deep/link'); + }); + }); + + describe('navigateToApp', () => { + it('changes the browser history to /app/:appId', async () => { + service.setup(setupDeps); + + const { navigateToApp } = await service.start(startDeps); + + navigateToApp('myTestApp'); + expect(MockHistory.push).toHaveBeenCalledWith('/app/myTestApp', undefined); + + navigateToApp('myOtherApp'); + expect(MockHistory.push).toHaveBeenCalledWith('/app/myOtherApp', undefined); + }); + + it('changes the browser history for custom appRoutes', async () => { + const { register } = service.setup(setupDeps); + + register(Symbol(), { id: 'app2', mount, appRoute: '/custom/path' } as any); + + const { navigateToApp } = await service.start(startDeps); + + navigateToApp('myTestApp'); + expect(MockHistory.push).toHaveBeenCalledWith('/app/myTestApp', undefined); + + navigateToApp('app2'); + expect(MockHistory.push).toHaveBeenCalledWith('/custom/path', undefined); + }); + + it('appends a path if specified', async () => { + const { register } = service.setup(setupDeps); + + register(Symbol(), { id: 'app2', mount, appRoute: '/custom/path' } as any); + + const { navigateToApp } = await service.start(startDeps); + + navigateToApp('myTestApp', { path: 'deep/link/to/location/2' }); + expect(MockHistory.push).toHaveBeenCalledWith( + '/app/myTestApp/deep/link/to/location/2', + undefined + ); + + navigateToApp('app2', { path: 'deep/link/to/location/2' }); + expect(MockHistory.push).toHaveBeenCalledWith( + '/custom/path/deep/link/to/location/2', + undefined + ); + }); + + it('includes state if specified', async () => { + const { register } = service.setup(setupDeps); + + register(Symbol(), { id: 'app2', mount, appRoute: '/custom/path' } as any); + + const { navigateToApp } = await service.start(startDeps); + + navigateToApp('myTestApp', { state: 'my-state' }); + expect(MockHistory.push).toHaveBeenCalledWith('/app/myTestApp', 'my-state'); + + navigateToApp('app2', { state: 'my-state' }); + expect(MockHistory.push).toHaveBeenCalledWith('/custom/path', 'my-state'); + }); + + it('redirects when in legacyMode', async () => { + setupDeps.redirectTo = jest.fn(); + setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); + service.setup(setupDeps); + + const { navigateToApp } = await service.start(startDeps); + + navigateToApp('myTestApp'); + expect(setupDeps.redirectTo).toHaveBeenCalledWith('/test/app/myTestApp'); + }); + + it('updates currentApp$ after mounting', async () => { + service.setup(setupDeps); + + const { currentAppId$, navigateToApp } = await service.start(startDeps); + const stop$ = new Subject(); + const promise = currentAppId$.pipe(skip(1), bufferCount(4), takeUntil(stop$)).toPromise(); + + await navigateToApp('alpha'); + await navigateToApp('beta'); + await navigateToApp('gamma'); + await navigateToApp('delta'); + stop$.next(); + + const appIds = await promise; + + expect(appIds).toMatchInlineSnapshot(` + Array [ + "alpha", + "beta", + "gamma", + "delta", + ] + `); + }); + + it('sets window.location.href when navigating to legacy apps', async () => { + setupDeps.http = httpServiceMock.createSetupContract({ basePath: '/test' }); + setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); + setupDeps.redirectTo = jest.fn(); + service.setup(setupDeps); + + const { navigateToApp } = await service.start(startDeps); + + await navigateToApp('alpha'); + expect(setupDeps.redirectTo).toHaveBeenCalledWith('/test/app/alpha'); + }); + + it('handles legacy apps with subapps', async () => { + setupDeps.http = httpServiceMock.createSetupContract({ basePath: '/test' }); + setupDeps.injectedMetadata.getLegacyMode.mockReturnValue(true); + setupDeps.redirectTo = jest.fn(); + + const { registerLegacyApp } = service.setup(setupDeps); + + registerLegacyApp({ id: 'baseApp:legacyApp1' } as any); + + const { navigateToApp } = await service.start(startDeps); + + await navigateToApp('baseApp:legacyApp1'); + expect(setupDeps.redirectTo).toHaveBeenCalledWith('/test/app/baseApp'); + }); + }); +}); diff --git a/src/core/public/application/application_service.test.tsx b/src/core/public/application/application_service.test.tsx deleted file mode 100644 index 32634572466a6..0000000000000 --- a/src/core/public/application/application_service.test.tsx +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { shallow } from 'enzyme'; -import React from 'react'; - -import { injectedMetadataServiceMock } from '../injected_metadata/injected_metadata_service.mock'; -import { MockCapabilitiesService, MockHistory } from './application_service.test.mocks'; -import { ApplicationService } from './application_service'; -import { contextServiceMock } from '../context/context_service.mock'; -import { httpServiceMock } from '../http/http_service.mock'; - -describe('#setup()', () => { - describe('register', () => { - it('throws an error if two apps with the same id are registered', () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - const setup = service.setup({ context }); - setup.register(Symbol(), { id: 'app1', mount: jest.fn() } as any); - expect(() => - setup.register(Symbol(), { id: 'app1', mount: jest.fn() } as any) - ).toThrowErrorMatchingInlineSnapshot( - `"An application is already registered with the id \\"app1\\""` - ); - }); - - it('throws error if additional apps are registered after setup', async () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - const setup = service.setup({ context }); - const http = httpServiceMock.createStartContract(); - const injectedMetadata = injectedMetadataServiceMock.createStartContract(); - await service.start({ http, injectedMetadata }); - expect(() => - setup.register(Symbol(), { id: 'app1' } as any) - ).toThrowErrorMatchingInlineSnapshot(`"Applications cannot be registered after \\"setup\\""`); - }); - - it('logs a warning when registering a deprecated app mount', async () => { - const consoleWarnSpy = jest.spyOn(console, 'warn'); - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - const setup = service.setup({ context }); - setup.register(Symbol(), { id: 'app1', mount: (ctx: any, params: any) => {} } as any); - expect(consoleWarnSpy).toHaveBeenCalledWith( - `App [app1] is using deprecated mount context. Use core.getStartServices() instead.` - ); - consoleWarnSpy.mockRestore(); - }); - }); - - describe('registerLegacyApp', () => { - it('throws an error if two apps with the same id are registered', () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - const setup = service.setup({ context }); - setup.registerLegacyApp({ id: 'app2' } as any); - expect(() => - setup.registerLegacyApp({ id: 'app2' } as any) - ).toThrowErrorMatchingInlineSnapshot( - `"A legacy application is already registered with the id \\"app2\\""` - ); - }); - - it('throws error if additional apps are registered after setup', async () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - const setup = service.setup({ context }); - const http = httpServiceMock.createStartContract(); - const injectedMetadata = injectedMetadataServiceMock.createStartContract(); - await service.start({ http, injectedMetadata }); - expect(() => - setup.registerLegacyApp({ id: 'app2' } as any) - ).toThrowErrorMatchingInlineSnapshot(`"Applications cannot be registered after \\"setup\\""`); - }); - }); - - it("`registerMountContext` calls context container's registerContext", () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - const setup = service.setup({ context }); - const container = context.createContextContainer.mock.results[0].value; - const pluginId = Symbol(); - const noop = () => {}; - setup.registerMountContext(pluginId, 'test' as any, noop as any); - expect(container.registerContext).toHaveBeenCalledWith(pluginId, 'test', noop); - }); -}); - -describe('#start()', () => { - beforeEach(() => { - MockHistory.push.mockReset(); - }); - - it('exposes available apps from capabilities', async () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - const setup = service.setup({ context }); - setup.register(Symbol(), { id: 'app1', mount: jest.fn() } as any); - setup.registerLegacyApp({ id: 'app2' } as any); - - const http = httpServiceMock.createStartContract(); - const injectedMetadata = injectedMetadataServiceMock.createStartContract(); - const startContract = await service.start({ http, injectedMetadata }); - - expect(startContract.availableApps).toMatchInlineSnapshot(` - Map { - "app1" => Object { - "id": "app1", - "mount": [MockFunction], - }, - } - `); - expect(startContract.availableLegacyApps).toMatchInlineSnapshot(` - Map { - "app2" => Object { - "id": "app2", - }, - } - `); - }); - - it('passes registered applications to capabilities', async () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - const setup = service.setup({ context }); - const app1 = { id: 'app1', mount: jest.fn() }; - setup.register(Symbol(), app1 as any); - - const http = httpServiceMock.createStartContract(); - const injectedMetadata = injectedMetadataServiceMock.createStartContract(); - await service.start({ http, injectedMetadata }); - - expect(MockCapabilitiesService.start).toHaveBeenCalledWith({ - apps: new Map([['app1', app1]]), - legacyApps: new Map(), - http, - }); - }); - - it('passes registered legacy applications to capabilities', async () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - const setup = service.setup({ context }); - setup.registerLegacyApp({ id: 'legacyApp1' } as any); - - const http = httpServiceMock.createStartContract(); - const injectedMetadata = injectedMetadataServiceMock.createStartContract(); - await service.start({ http, injectedMetadata }); - - expect(MockCapabilitiesService.start).toHaveBeenCalledWith({ - apps: new Map(), - legacyApps: new Map([['legacyApp1', { id: 'legacyApp1' }]]), - http, - }); - }); - - it('returns renderable JSX tree', async () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - service.setup({ context }); - - const http = httpServiceMock.createStartContract(); - const injectedMetadata = injectedMetadataServiceMock.createStartContract(); - injectedMetadata.getLegacyMode.mockReturnValue(false); - const start = await service.start({ http, injectedMetadata }); - - expect(() => shallow(React.createElement(() => start.getComponent()))).not.toThrow(); - }); - - describe('navigateToApp', () => { - it('changes the browser history to /app/:appId', async () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - service.setup({ context }); - - const http = httpServiceMock.createStartContract(); - const injectedMetadata = injectedMetadataServiceMock.createStartContract(); - injectedMetadata.getLegacyMode.mockReturnValue(false); - const start = await service.start({ http, injectedMetadata }); - - start.navigateToApp('myTestApp'); - expect(MockHistory.push).toHaveBeenCalledWith('/app/myTestApp', undefined); - start.navigateToApp('myOtherApp'); - expect(MockHistory.push).toHaveBeenCalledWith('/app/myOtherApp', undefined); - }); - - it('appends a path if specified', async () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - service.setup({ context }); - - const http = httpServiceMock.createStartContract(); - const injectedMetadata = injectedMetadataServiceMock.createStartContract(); - injectedMetadata.getLegacyMode.mockReturnValue(false); - const start = await service.start({ http, injectedMetadata }); - - start.navigateToApp('myTestApp', { path: 'deep/link/to/location/2' }); - expect(MockHistory.push).toHaveBeenCalledWith( - '/app/myTestApp/deep/link/to/location/2', - undefined - ); - }); - - it('includes state if specified', async () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - service.setup({ context }); - - const http = httpServiceMock.createStartContract(); - const injectedMetadata = injectedMetadataServiceMock.createStartContract(); - injectedMetadata.getLegacyMode.mockReturnValue(false); - const start = await service.start({ http, injectedMetadata }); - - start.navigateToApp('myTestApp', { state: 'my-state' }); - expect(MockHistory.push).toHaveBeenCalledWith('/app/myTestApp', 'my-state'); - }); - - it('redirects when in legacyMode', async () => { - const service = new ApplicationService(); - const context = contextServiceMock.createSetupContract(); - service.setup({ context }); - - const http = httpServiceMock.createStartContract(); - const injectedMetadata = injectedMetadataServiceMock.createStartContract(); - injectedMetadata.getLegacyMode.mockReturnValue(true); - const redirectTo = jest.fn(); - const start = await service.start({ http, injectedMetadata, redirectTo }); - start.navigateToApp('myTestApp'); - expect(redirectTo).toHaveBeenCalledWith('/app/myTestApp'); - }); - }); -}); diff --git a/src/core/public/application/application_service.tsx b/src/core/public/application/application_service.tsx index df00c84028e6f..a96b9dea9b9c7 100644 --- a/src/core/public/application/application_service.tsx +++ b/src/core/public/application/application_service.tsx @@ -17,31 +17,32 @@ * under the License. */ -import { createBrowserHistory } from 'history'; -import { BehaviorSubject } from 'rxjs'; import React from 'react'; +import { BehaviorSubject, Subject } from 'rxjs'; +import { takeUntil } from 'rxjs/operators'; +import { createBrowserHistory, History } from 'history'; -import { InjectedMetadataStart } from '../injected_metadata'; -import { CapabilitiesService } from './capabilities'; -import { AppRouter } from './ui'; -import { HttpStart } from '../http'; +import { InjectedMetadataSetup, InjectedMetadataStart } from '../injected_metadata'; +import { HttpSetup, HttpStart } from '../http'; import { ContextSetup, IContextContainer } from '../context'; +import { AppRouter } from './ui'; +import { CapabilitiesService, Capabilities } from './capabilities'; import { App, LegacyApp, AppMount, AppMountDeprecated, + AppMounter, + LegacyAppMounter, + Mounter, InternalApplicationSetup, InternalApplicationStart, } from './types'; interface SetupDeps { context: ContextSetup; -} - -interface StartDeps { - http: HttpStart; - injectedMetadata: InjectedMetadataStart; + http: HttpSetup; + injectedMetadata: InjectedMetadataSetup; /** * Only necessary for redirecting to legacy apps * @deprecated @@ -49,144 +50,158 @@ interface StartDeps { redirectTo?: (path: string) => void; } -interface AppBox { - app: App; - mount: AppMount; +interface StartDeps { + injectedMetadata: InjectedMetadataStart; + http: HttpStart; } +// Mount functions with two arguments are assumed to expect deprecated `context` object. +const isAppMountDeprecated = (mount: (...args: any[]) => any): mount is AppMountDeprecated => + mount.length === 2; +const filterAvailable = (map: Map, capabilities: Capabilities) => + new Map( + [...map].filter( + ([id]) => capabilities.navLinks[id] === undefined || capabilities.navLinks[id] === true + ) + ); +const findMounter = (mounters: Map, appRoute?: string) => + [...mounters].find(([, mounter]) => mounter.appRoute === appRoute); +const getAppUrl = (mounters: Map, appId: string, path: string = '') => + `/${mounters.get(appId)?.appRoute ?? `/app/${appId}`}/${path}` + .replace(/\/{2,}/g, '/') // Remove duplicate slashes + .replace(/\/$/, ''); // Remove trailing slash + /** * Service that is responsible for registering new applications. * @internal */ export class ApplicationService { - private readonly apps$ = new BehaviorSubject>(new Map()); - private readonly legacyApps$ = new BehaviorSubject>(new Map()); + private readonly apps = new Map(); + private readonly legacyApps = new Map(); + private readonly mounters = new Map(); private readonly capabilities = new CapabilitiesService(); + private currentAppId$ = new BehaviorSubject(undefined); + private stop$ = new Subject(); + private registrationClosed = false; + private history?: History; private mountContext?: IContextContainer; + private navigate?: (url: string, state: any) => void; - public setup({ context }: SetupDeps): InternalApplicationSetup { + public setup({ + context, + http: { basePath }, + injectedMetadata, + redirectTo = (path: string) => (window.location.href = path), + }: SetupDeps): InternalApplicationSetup { + const basename = basePath.get(); + // Only setup history if we're not in legacy mode + if (!injectedMetadata.getLegacyMode()) { + this.history = createBrowserHistory({ basename }); + } + + // If we do not have history available, use redirectTo to do a full page refresh. + this.navigate = (url, state) => + // basePath not needed here because `history` is configured with basename + this.history ? this.history.push(url, state) : redirectTo(basePath.prepend(url)); this.mountContext = context.createContextContainer(); return { - register: (plugin: symbol, app: App) => { - if (this.apps$.value.has(app.id)) { - throw new Error(`An application is already registered with the id "${app.id}"`); - } - if (this.apps$.isStopped) { + registerMountContext: this.mountContext!.registerContext, + register: (plugin, app) => { + app = { appRoute: `/app/${app.id}`, ...app }; + + if (this.registrationClosed) { throw new Error(`Applications cannot be registered after "setup"`); + } else if (this.apps.has(app.id)) { + throw new Error(`An application is already registered with the id "${app.id}"`); + } else if (findMounter(this.mounters, app.appRoute)) { + throw new Error( + `An application is already registered with the appRoute "${app.appRoute}"` + ); + } else if (basename && app.appRoute!.startsWith(basename)) { + throw new Error('Cannot register an application route that includes HTTP base path'); } - let appBox: AppBox; + let handler: AppMount; + if (isAppMountDeprecated(app.mount)) { + handler = this.mountContext!.createHandler(plugin, app.mount); // eslint-disable-next-line no-console console.warn( `App [${app.id}] is using deprecated mount context. Use core.getStartServices() instead.` ); - - appBox = { - app, - mount: this.mountContext!.createHandler(plugin, app.mount), - }; } else { - appBox = { app, mount: app.mount }; + handler = app.mount; } - this.apps$.next(new Map([...this.apps$.value.entries(), [app.id, appBox]])); + const mount: AppMounter = async params => { + const unmount = await handler(params); + this.currentAppId$.next(app.id); + return unmount; + }; + this.apps.set(app.id, app); + this.mounters.set(app.id, { + appRoute: app.appRoute!, + appBasePath: basePath.prepend(app.appRoute!), + mount, + unmountBeforeMounting: false, + }); }, - registerLegacyApp: (app: LegacyApp) => { - if (this.legacyApps$.value.has(app.id)) { + registerLegacyApp: app => { + const appRoute = `/app/${app.id.split(':')[0]}`; + + if (this.registrationClosed) { + throw new Error('Applications cannot be registered after "setup"'); + } else if (this.legacyApps.has(app.id)) { throw new Error(`A legacy application is already registered with the id "${app.id}"`); - } - if (this.legacyApps$.isStopped) { - throw new Error(`Applications cannot be registered after "setup"`); + } else if (basename && appRoute!.startsWith(basename)) { + throw new Error('Cannot register an application route that includes HTTP base path'); } - this.legacyApps$.next(new Map([...this.legacyApps$.value.entries(), [app.id, app]])); + const appBasePath = basePath.prepend(appRoute); + const mount: LegacyAppMounter = () => redirectTo(appBasePath); + this.legacyApps.set(app.id, app); + this.mounters.set(app.id, { + appRoute, + appBasePath, + mount, + unmountBeforeMounting: true, + }); }, - registerMountContext: this.mountContext!.registerContext, }; } - public async start({ - http, - injectedMetadata, - redirectTo = (path: string) => (window.location.href = path), - }: StartDeps): Promise { + public async start({ injectedMetadata, http }: StartDeps): Promise { if (!this.mountContext) { - throw new Error(`ApplicationService#setup() must be invoked before start.`); + throw new Error('ApplicationService#setup() must be invoked before start.'); } - // Disable registration of new applications - this.apps$.complete(); - this.legacyApps$.complete(); - - const legacyMode = injectedMetadata.getLegacyMode(); - const currentAppId$ = new BehaviorSubject(undefined); - const { availableApps, availableLegacyApps, capabilities } = await this.capabilities.start({ + this.registrationClosed = true; + const { capabilities } = await this.capabilities.start({ + appIds: [...this.mounters.keys()], http, - apps: new Map([...this.apps$.value].map(([id, { app }]) => [id, app])), - legacyApps: this.legacyApps$.value, }); - - // Only setup history if we're not in legacy mode - const history = legacyMode ? null : createBrowserHistory({ basename: http.basePath.get() }); + const availableMounters = filterAvailable(this.mounters, capabilities); return { - availableApps, - availableLegacyApps, + availableApps: filterAvailable(this.apps, capabilities), + availableLegacyApps: filterAvailable(this.legacyApps, capabilities), capabilities, + currentAppId$: this.currentAppId$.pipe(takeUntil(this.stop$)), registerMountContext: this.mountContext.registerContext, - currentAppId$, - - getUrlForApp: (appId, options: { path?: string } = {}) => { - return http.basePath.prepend(appPath(appId, options)); - }, - + getUrlForApp: (appId, { path }: { path?: string } = {}) => + getAppUrl(availableMounters, appId, path), navigateToApp: (appId, { path, state }: { path?: string; state?: any } = {}) => { - if (legacyMode) { - // If we're in legacy mode, do a full page refresh to load the NP app. - redirectTo(http.basePath.prepend(appPath(appId, { path }))); - } else { - // basePath not needed here because `history` is configured with basename - history!.push(appPath(appId, { path }), state); - } - }, - - getComponent: () => { - if (legacyMode) { - return null; - } - - // Filter only available apps and map to just the mount function. - const appMounts = new Map( - [...this.apps$.value] - .filter(([id]) => availableApps.has(id)) - .map(([id, { mount }]) => [id, mount]) - ); - - return ( - - ); + this.navigate!(getAppUrl(availableMounters, appId, path), state); + this.currentAppId$.next(appId); }, + getComponent: () => + this.history ? : null, }; } - public stop() {} -} - -const appPath = (appId: string, { path }: { path?: string } = {}): string => - path - ? `/app/${appId}/${path.replace(/^\//, '')}` // Remove preceding slash from path if present - : `/app/${appId}`; - -function isAppMountDeprecated(mount: (...args: any[]) => any): mount is AppMountDeprecated { - // Mount functions with two arguments are assumed to expect deprecated `context` object. - return mount.length === 2; + public stop() { + this.stop$.next(); + this.currentAppId$.complete(); + } } diff --git a/src/core/public/application/capabilities/capabilities_service.mock.ts b/src/core/public/application/capabilities/capabilities_service.mock.ts index 29c3275f0e3b2..54aaa31e08859 100644 --- a/src/core/public/application/capabilities/capabilities_service.mock.ts +++ b/src/core/public/application/capabilities/capabilities_service.mock.ts @@ -17,15 +17,9 @@ * under the License. */ import { CapabilitiesService, CapabilitiesStart } from './capabilities_service'; -import { deepFreeze } from '../../../utils/'; -import { App, LegacyApp } from '../types'; +import { deepFreeze } from '../../../utils'; -const createStartContractMock = ( - apps: ReadonlyMap = new Map(), - legacyApps: ReadonlyMap = new Map() -): jest.Mocked => ({ - availableApps: apps, - availableLegacyApps: legacyApps, +const createStartContractMock = (): jest.Mocked => ({ capabilities: deepFreeze({ catalogue: {}, management: {}, @@ -33,11 +27,8 @@ const createStartContractMock = ( }), }); -type CapabilitiesServiceContract = PublicMethodsOf; -const createMock = (): jest.Mocked => ({ - start: jest - .fn() - .mockImplementation(({ apps, legacyApps }) => createStartContractMock(apps, legacyApps)), +const createMock = (): jest.Mocked> => ({ + start: jest.fn().mockImplementation(createStartContractMock), }); export const capabilitiesServiceMock = { diff --git a/src/core/public/application/capabilities/capabilities_service.test.ts b/src/core/public/application/capabilities/capabilities_service.test.ts index 3245be8dd502d..dfbb449b4d58e 100644 --- a/src/core/public/application/capabilities/capabilities_service.test.ts +++ b/src/core/public/application/capabilities/capabilities_service.test.ts @@ -19,7 +19,6 @@ import { httpServiceMock, HttpSetupMock } from '../../http/http_service.mock'; import { CapabilitiesService } from './capabilities_service'; -import { LegacyApp, App } from '../types'; const mockedCapabilities = { catalogue: {}, @@ -42,36 +41,22 @@ describe('#start', () => { http.post.mockReturnValue(Promise.resolve(mockedCapabilities)); }); - const apps = new Map([ - ['app1', { id: 'app1' }], - ['app2', { id: 'app2', capabilities: { app2: { feature: true } } }], - ['appMissingInCapabilities', { id: 'appMissingInCapabilities' }], - ] as Array<[string, App]>); - const legacyApps = new Map([ - ['legacyApp1', { id: 'legacyApp1' }], - ['legacyApp2', { id: 'legacyApp2', capabilities: { app2: { feature: true } } }], - ] as Array<[string, LegacyApp]>); - - it('filters available apps based on returned navLinks', async () => { + it('only returns capabilities for given appIds', async () => { const service = new CapabilitiesService(); - const startContract = await service.start({ apps, legacyApps, http }); - expect(startContract.availableApps).toEqual( - new Map([ - ['app1', { id: 'app1' }], - ['appMissingInCapabilities', { id: 'appMissingInCapabilities' }], - ]) - ); - expect(startContract.availableLegacyApps).toEqual( - new Map([['legacyApp1', { id: 'legacyApp1' }]]) - ); + const { capabilities } = await service.start({ + http, + appIds: ['app1', 'app2', 'legacyApp1', 'legacyApp2'], + }); + + // @ts-ignore TypeScript knows this shouldn't be possible + expect(() => (capabilities.foo = 'foo')).toThrowError(); }); it('does not allow Capabilities to be modified', async () => { const service = new CapabilitiesService(); const { capabilities } = await service.start({ - apps, - legacyApps, http, + appIds: ['app1', 'app2', 'legacyApp1', 'legacyApp2'], }); // @ts-ignore TypeScript knows this shouldn't be possible diff --git a/src/core/public/application/capabilities/capabilities_service.tsx b/src/core/public/application/capabilities/capabilities_service.tsx index 24d9765953c44..05d718e1073df 100644 --- a/src/core/public/application/capabilities/capabilities_service.tsx +++ b/src/core/public/application/capabilities/capabilities_service.tsx @@ -19,22 +19,16 @@ import { Capabilities } from '../../../types/capabilities'; import { deepFreeze, RecursiveReadonly } from '../../../utils'; -import { LegacyApp, App } from '../types'; import { HttpStart } from '../../http'; interface StartDeps { - apps: ReadonlyMap; - legacyApps: ReadonlyMap; + appIds: string[]; http: HttpStart; } -export { Capabilities }; - /** @internal */ export interface CapabilitiesStart { capabilities: RecursiveReadonly; - availableApps: ReadonlyMap; - availableLegacyApps: ReadonlyMap; } /** @@ -42,41 +36,14 @@ export interface CapabilitiesStart { * @internal */ export class CapabilitiesService { - public async start({ apps, legacyApps, http }: StartDeps): Promise { - const capabilities = await this.fetchCapabilities(http, [...apps.keys(), ...legacyApps.keys()]); - - const availableApps = new Map( - [...apps].filter( - ([appId]) => - capabilities.navLinks[appId] === undefined || capabilities.navLinks[appId] === true - ) - ); - - const availableLegacyApps = new Map( - [...legacyApps].filter( - ([appId]) => - capabilities.navLinks[appId] === undefined || capabilities.navLinks[appId] === true - ) - ); + public async start({ appIds, http }: StartDeps): Promise { + const route = http.anonymousPaths.isAnonymous(window.location.pathname) ? '/defaults' : ''; + const capabilities = await http.post(`/api/core/capabilities${route}`, { + body: JSON.stringify({ applications: appIds }), + }); return { - availableApps, - availableLegacyApps, - capabilities, + capabilities: deepFreeze(capabilities), }; } - - private async fetchCapabilities(http: HttpStart, appIds: string[]): Promise { - const payload = JSON.stringify({ - applications: appIds, - }); - - const url = http.anonymousPaths.isAnonymous(window.location.pathname) - ? '/api/core/capabilities/defaults' - : '/api/core/capabilities'; - const capabilities = await http.post(url, { - body: payload, - }); - return deepFreeze(capabilities); - } } diff --git a/src/core/public/application/capabilities/index.ts b/src/core/public/application/capabilities/index.ts index 9d8bec955eb97..e4112a55ef6bd 100644 --- a/src/core/public/application/capabilities/index.ts +++ b/src/core/public/application/capabilities/index.ts @@ -17,4 +17,5 @@ * under the License. */ -export { Capabilities, CapabilitiesService } from './capabilities_service'; +export { Capabilities } from '../../../types/capabilities'; +export { CapabilitiesService } from './capabilities_service'; diff --git a/src/core/public/application/integration_tests/router.test.tsx b/src/core/public/application/integration_tests/router.test.tsx index 81aef5204c7e2..10544c348afb0 100644 --- a/src/core/public/application/integration_tests/router.test.tsx +++ b/src/core/public/application/integration_tests/router.test.tsx @@ -18,107 +18,161 @@ */ import React from 'react'; -import { mount, ReactWrapper } from 'enzyme'; -import { createMemoryHistory, History } from 'history'; -import { BehaviorSubject } from 'rxjs'; +import { createMemoryHistory, History, createHashHistory } from 'history'; -import { I18nProvider } from '@kbn/i18n/react'; - -import { AppMount, LegacyApp, AppMountParameters } from '../types'; -import { httpServiceMock } from '../../http/http_service.mock'; import { AppRouter, AppNotFound } from '../ui'; - -const createMountHandler = (htmlString: string) => - jest.fn(async ({ appBasePath: basename, element: el }: AppMountParameters) => { - el.innerHTML = `
\nbasename: ${basename}\nhtml: ${htmlString}\n
`; - return jest.fn(() => (el.innerHTML = '')); - }); +import { EitherApp, MockedMounterMap, MockedMounterTuple } from '../test_types'; +import { createRenderer, createAppMounter, createLegacyAppMounter } from './utils'; describe('AppContainer', () => { - let apps: Map, Parameters>>; - let legacyApps: Map; + let mounters: MockedMounterMap; let history: History; - let router: ReactWrapper; - let redirectTo: jest.Mock; - let currentAppId$: BehaviorSubject; + let update: ReturnType; - const navigate = async (path: string) => { + const navigate = (path: string) => { history.push(path); - router.update(); - // flushes any pending promises - return new Promise(resolve => setImmediate(resolve)); + return update(); }; + const mockMountersToMounters = () => + new Map([...mounters].map(([appId, { mounter }]) => [appId, mounter])); + beforeEach(() => { - redirectTo = jest.fn(); - apps = new Map([ - ['app1', createMountHandler('App 1')], - ['app2', createMountHandler('
App 2
')], - ]); - legacyApps = new Map([ - ['legacyApp1', { id: 'legacyApp1' }], - ['baseApp:legacyApp2', { id: 'baseApp:legacyApp2' }], - ]) as Map; + mounters = new Map([ + createAppMounter('app1', 'App 1'), + createLegacyAppMounter('legacyApp1', jest.fn()), + createAppMounter('app2', '
App 2
'), + createLegacyAppMounter('baseApp:legacyApp2', jest.fn()), + createAppMounter('app3', '
App 3
', '/custom/path'), + ] as Array>); history = createMemoryHistory(); - currentAppId$ = new BehaviorSubject(undefined); - // Use 'asdf' as the basepath - const http = httpServiceMock.createStartContract({ basePath: '/asdf' }); - router = mount( - - - - ); + update = createRenderer(); }); - it('calls mountHandler and returned unmount function when navigating between apps', async () => { - await navigate('/app/app1'); - expect(apps.get('app1')!).toHaveBeenCalled(); - expect(router.html()).toMatchInlineSnapshot(` + it('calls mount handler and returned unmount function when navigating between apps', async () => { + const dom1 = await navigate('/app/app1'); + const app1 = mounters.get('app1')!; + + expect(app1.mounter.mount).toHaveBeenCalled(); + expect(dom1?.html()).toMatchInlineSnapshot(` "
- basename: /asdf/app/app1 + basename: /app/app1 html: App 1
" `); - const app1Unmount = await apps.get('app1')!.mock.results[0].value; - await navigate('/app/app2'); - expect(app1Unmount).toHaveBeenCalled(); + const app1Unmount = await app1.mounter.mount.mock.results[0].value; + const dom2 = await navigate('/app/app2'); - expect(apps.get('app2')!).toHaveBeenCalled(); - expect(router.html()).toMatchInlineSnapshot(` + expect(app1Unmount).toHaveBeenCalled(); + expect(mounters.get('app2')!.mounter.mount).toHaveBeenCalled(); + expect(dom2?.html()).toMatchInlineSnapshot(` "
- basename: /asdf/app/app2 + basename: /app/app2 html:
App 2
" `); }); - it('updates currentApp$ after mounting', async () => { - await navigate('/app/app1'); - expect(currentAppId$.value).toEqual('app1'); - await navigate('/app/app2'); - expect(currentAppId$.value).toEqual('app2'); + it('should not mount when partial route path matches', async () => { + mounters.set(...createAppMounter('spaces', '
Custom Space
', '/spaces/fake-login')); + mounters.set(...createAppMounter('login', '
Login Page
', '/fake-login')); + history = createMemoryHistory(); + update = createRenderer(); + + await navigate('/fake-login'); + + expect(mounters.get('spaces')!.mounter.mount).not.toHaveBeenCalled(); + expect(mounters.get('login')!.mounter.mount).toHaveBeenCalled(); + }); + + it('should not mount when partial route path has higher specificity', async () => { + mounters.set(...createAppMounter('login', '
Login Page
', '/fake-login')); + mounters.set(...createAppMounter('spaces', '
Custom Space
', '/spaces/fake-login')); + history = createMemoryHistory(); + update = createRenderer(); + + await navigate('/spaces/fake-login'); + + expect(mounters.get('spaces')!.mounter.mount).toHaveBeenCalled(); + expect(mounters.get('login')!.mounter.mount).not.toHaveBeenCalled(); }); - it('sets window.location.href when navigating to legacy apps', async () => { + it('should not remount when changing pages within app', async () => { + const { mounter, unmount } = mounters.get('app1')!; + await navigate('/app/app1/page1'); + expect(mounter.mount).toHaveBeenCalledTimes(1); + + // Navigating to page within app does not trigger re-render + await navigate('/app/app1/page2'); + expect(mounter.mount).toHaveBeenCalledTimes(1); + expect(unmount).not.toHaveBeenCalled(); + }); + + it('should not remount when going back within app', async () => { + const { mounter, unmount } = mounters.get('app1')!; + await navigate('/app/app1/page1'); + expect(mounter.mount).toHaveBeenCalledTimes(1); + + // Hitting back button within app does not trigger re-render + await navigate('/app/app1/page2'); + history.goBack(); + await update(); + expect(mounter.mount).toHaveBeenCalledTimes(1); + expect(unmount).not.toHaveBeenCalled(); + }); + + it('should not remount when when changing pages within app using hash history', async () => { + history = createHashHistory(); + update = createRenderer(); + + const { mounter, unmount } = mounters.get('app1')!; + await navigate('/app/app1/page1'); + expect(mounter.mount).toHaveBeenCalledTimes(1); + + // Changing hash history does not trigger re-render + await navigate('/app/app1/page2'); + expect(mounter.mount).toHaveBeenCalledTimes(1); + expect(unmount).not.toHaveBeenCalled(); + }); + + it('should unmount when changing between apps', async () => { + const { mounter, unmount } = mounters.get('app1')!; + await navigate('/app/app1/page1'); + expect(mounter.mount).toHaveBeenCalledTimes(1); + + // Navigating to other app triggers unmount + await navigate('/app/app2/page1'); + expect(unmount).toHaveBeenCalledTimes(1); + }); + + it('calls legacy mount handler', async () => { await navigate('/app/legacyApp1'); - expect(redirectTo).toHaveBeenCalledWith('/asdf/app/legacyApp1'); + expect(mounters.get('legacyApp1')!.mounter.mount.mock.calls[0]).toMatchInlineSnapshot(` + Array [ + Object { + "appBasePath": "/app/legacyApp1", + "element":
, + }, + ] + `); }); it('handles legacy apps with subapps', async () => { await navigate('/app/baseApp'); - expect(redirectTo).toHaveBeenCalledWith('/asdf/app/baseApp'); + expect(mounters.get('baseApp:legacyApp2')!.mounter.mount.mock.calls[0]).toMatchInlineSnapshot(` + Array [ + Object { + "appBasePath": "/app/baseApp", + "element":
, + }, + ] + `); }); it('displays error page if no app is found', async () => { - await navigate('/app/unknown'); - expect(router.exists(AppNotFound)).toBe(true); + const dom = await navigate('/app/unknown'); + + expect(dom?.exists(AppNotFound)).toBe(true); }); }); diff --git a/src/core/public/application/integration_tests/utils.tsx b/src/core/public/application/integration_tests/utils.tsx new file mode 100644 index 0000000000000..6367d1fa12697 --- /dev/null +++ b/src/core/public/application/integration_tests/utils.tsx @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React, { ReactElement } from 'react'; +import { mount } from 'enzyme'; + +import { I18nProvider } from '@kbn/i18n/react'; + +import { App, LegacyApp, AppMountParameters } from '../types'; +import { MockedMounter, MockedMounterTuple } from '../test_types'; + +type Dom = ReturnType | null; +type Renderer = () => Dom | Promise; + +export const createRenderer = (element: ReactElement | null): Renderer => { + const dom: Dom = element && mount({element}); + + return () => + new Promise(async resolve => { + if (dom) { + dom.update(); + } + setImmediate(() => resolve(dom)); // flushes any pending promises + }); +}; + +export const createAppMounter = ( + appId: string, + html: string, + appRoute = `/app/${appId}` +): MockedMounterTuple => { + const unmount = jest.fn(); + return [ + appId, + { + mounter: { + appRoute, + appBasePath: appRoute, + mount: jest.fn(async ({ appBasePath: basename, element }: AppMountParameters) => { + Object.assign(element, { + innerHTML: `
\nbasename: ${basename}\nhtml: ${html}\n
`, + }); + unmount.mockImplementation(() => Object.assign(element, { innerHTML: '' })); + return unmount; + }), + }, + unmount, + }, + ]; +}; + +export const createLegacyAppMounter = ( + appId: string, + legacyMount: MockedMounter['mount'] +): MockedMounterTuple => [ + appId, + { + mounter: { + appRoute: `/app/${appId.split(':')[0]}`, + appBasePath: `/app/${appId.split(':')[0]}`, + unmountBeforeMounting: true, + mount: legacyMount, + }, + unmount: jest.fn(), + }, +]; diff --git a/src/core/public/application/test_types.ts b/src/core/public/application/test_types.ts new file mode 100644 index 0000000000000..3d992cb950eb4 --- /dev/null +++ b/src/core/public/application/test_types.ts @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { App, LegacyApp, Mounter, AppUnmount } from './types'; +import { ApplicationService } from './application_service'; + +/** @internal */ +export type ApplicationServiceContract = PublicMethodsOf; +/** @internal */ +export type EitherApp = App | LegacyApp; +/** @internal */ +export type MockedUnmount = jest.Mocked; +/** @internal */ +export type MockedMounter = jest.Mocked>>; +/** @internal */ +export type MockedMounterTuple = [ + string, + { mounter: MockedMounter; unmount: MockedUnmount } +]; +/** @internal */ +export type MockedMounterMap = Map< + string, + { mounter: MockedMounter; unmount: MockedUnmount } +>; +/** @internal */ +export type MockLifecycle< + T extends keyof ApplicationService, + U = Parameters[0] +> = { [P in keyof U]: jest.Mocked }; diff --git a/src/core/public/application/types.ts b/src/core/public/application/types.ts index fd009066fc664..c026851af7eb8 100644 --- a/src/core/public/application/types.ts +++ b/src/core/public/application/types.ts @@ -17,7 +17,7 @@ * under the License. */ -import { Observable, Subject } from 'rxjs'; +import { Observable } from 'rxjs'; import { Capabilities } from './capabilities'; import { ChromeStart } from '../chrome'; @@ -89,6 +89,13 @@ export interface App extends AppBase { * Takes precedence over chrome service visibility settings. */ chromeless?: boolean; + + /** + * Override the application's routing path from `/app/${id}`. + * Must be unique across registered applications. Should not include the + * base path from HTTP. + */ + appRoute?: string; } /** @internal */ @@ -177,7 +184,8 @@ export interface AppMountParameters { element: HTMLElement; /** - * The base path for configuring the application's router. + * The route path for configuring navigation to the application. + * This string should not include the base path from HTTP. * * @example * @@ -189,6 +197,7 @@ export interface AppMountParameters { * setup({ application }) { * application.register({ * id: 'my-app', + * appRoute: '/my-app', * async mount(params) { * const { renderApp } = await import('./application'); * return renderApp(params); @@ -229,6 +238,23 @@ export interface AppMountParameters { */ export type AppUnmount = () => void; +/** @internal */ +export type AppMounter = (params: AppMountParameters) => Promise; + +/** @internal */ +export type LegacyAppMounter = (params: AppMountParameters) => void; + +/** @internal */ +export type Mounter = SelectivePartial< + { + appRoute: string; + appBasePath: string; + mount: T extends LegacyApp ? LegacyAppMounter : AppMounter; + unmountBeforeMounting: T extends LegacyApp ? true : boolean; + }, + T extends LegacyApp ? never : 'unmountBeforeMounting' +>; + /** @public */ export interface ApplicationSetup { /** @@ -352,6 +378,12 @@ export interface InternalApplicationStart ): void; // Internal APIs - currentAppId$: Subject; + currentAppId$: Observable; getComponent(): JSX.Element | null; } + +/** @internal */ +type SelectivePartial = Partial> & + Required>> extends infer U + ? { [P in keyof U]: U[P] } + : never; diff --git a/src/core/public/application/ui/app_container.tsx b/src/core/public/application/ui/app_container.tsx index 9c2bb30e79503..153582e805fa1 100644 --- a/src/core/public/application/ui/app_container.tsx +++ b/src/core/public/application/ui/app_container.tsx @@ -17,95 +17,60 @@ * under the License. */ -import React from 'react'; -import { RouteComponentProps } from 'react-router-dom'; -import { Subject } from 'rxjs'; - -import { LegacyApp, AppMount, AppUnmount } from '../types'; -import { HttpStart } from '../../http'; +import React, { + Fragment, + FunctionComponent, + useLayoutEffect, + useRef, + useState, + MutableRefObject, +} from 'react'; + +import { AppUnmount, Mounter } from '../types'; import { AppNotFound } from './app_not_found_screen'; -interface Props extends RouteComponentProps<{ appId: string }> { - apps: ReadonlyMap; - legacyApps: ReadonlyMap; - basePath: HttpStart['basePath']; - currentAppId$: Subject; - /** - * Only necessary for redirecting to legacy apps - * @deprecated - */ - redirectTo: (path: string) => void; -} - -interface State { - appNotFound: boolean; -} - -export class AppContainer extends React.Component { - private readonly containerDiv = React.createRef(); - private unmountFunc?: AppUnmount; - - state: State = { appNotFound: false }; - - componentDidMount() { - this.mountApp(); - } - - componentWillUnmount() { - this.unmountApp(); - } - - componentDidUpdate(prevProps: Props) { - if (prevProps.match.params.appId !== this.props.match.params.appId) { - this.unmountApp(); - this.mountApp(); - } - } - - async mountApp() { - const { apps, legacyApps, match, basePath, currentAppId$, redirectTo } = this.props; - const { appId } = match.params; - - const mount = apps.get(appId); - if (mount) { - this.unmountFunc = await mount({ - appBasePath: basePath.prepend(`/app/${appId}`), - element: this.containerDiv.current!, - }); - currentAppId$.next(appId); - this.setState({ appNotFound: false }); - return; - } - - const legacyApp = findLegacyApp(appId, legacyApps); - if (legacyApp) { - this.unmountApp(); - redirectTo(basePath.prepend(`/app/${appId}`)); - this.setState({ appNotFound: false }); - return; - } - - this.setState({ appNotFound: true }); - } - - async unmountApp() { - if (this.unmountFunc) { - this.unmountFunc(); - this.unmountFunc = undefined; - } - } - - render() { - return ( - - {this.state.appNotFound && } -
- - ); - } +interface Props { + appId: string; + mounter?: Mounter; } -function findLegacyApp(appId: string, apps: ReadonlyMap) { - const matchingApps = [...apps.entries()].filter(([id]) => id.split(':')[0] === appId); - return matchingApps.length ? matchingApps[0][1] : null; -} +export const AppContainer: FunctionComponent = ({ mounter, appId }: Props) => { + const [appNotFound, setAppNotFound] = useState(false); + const elementRef = useRef(null); + const unmountRef: MutableRefObject = useRef(null); + + useLayoutEffect(() => { + const unmount = () => { + if (unmountRef.current) { + unmountRef.current(); + unmountRef.current = null; + } + }; + const mount = async () => { + if (!mounter) { + return setAppNotFound(true); + } + + if (mounter.unmountBeforeMounting) { + unmount(); + } + + unmountRef.current = + (await mounter.mount({ + appBasePath: mounter.appBasePath, + element: elementRef.current!, + })) || null; + setAppNotFound(false); + }; + + mount(); + return unmount; + }, [mounter]); + + return ( + + {appNotFound && } +
+ + ); +}; diff --git a/src/core/public/application/ui/app_router.tsx b/src/core/public/application/ui/app_router.tsx index 67701a33dabf4..8db46f9794277 100644 --- a/src/core/public/application/ui/app_router.tsx +++ b/src/core/public/application/ui/app_router.tsx @@ -17,37 +17,53 @@ * under the License. */ +import React, { FunctionComponent } from 'react'; import { History } from 'history'; -import React from 'react'; -import { Router, Route } from 'react-router-dom'; -import { Subject } from 'rxjs'; +import { Router, Route, RouteComponentProps, Switch } from 'react-router-dom'; -import { LegacyApp, AppMount } from '../types'; +import { Mounter } from '../types'; import { AppContainer } from './app_container'; -import { HttpStart } from '../../http'; interface Props { - apps: ReadonlyMap; - legacyApps: ReadonlyMap; - basePath: HttpStart['basePath']; - currentAppId$: Subject; + mounters: Map; history: History; - /** - * Only necessary for redirecting to legacy apps - * @deprecated - */ - redirectTo?: (path: string) => void; } -export const AppRouter: React.FunctionComponent = ({ - history, - redirectTo = (path: string) => (window.location.href = path), - ...otherProps -}) => ( +interface Params { + appId: string; +} + +export const AppRouter: FunctionComponent = ({ history, mounters }) => ( - } - /> + + {[...mounters].flatMap(([appId, mounter]) => + // Remove /app paths from the routes as they will be handled by the + // "named" route parameter `:appId` below + mounter.appBasePath.startsWith('/app') + ? [] + : [ + } + />, + ] + )} + ) => { + // Find the mounter including legacy mounters with subapps: + const [id, mounter] = mounters.has(appId) + ? [appId, mounters.get(appId)] + : [...mounters].filter(([key]) => key.split(':')[0] === appId)[0] ?? []; + + return ; + }} + /> + ); diff --git a/src/core/public/chrome/chrome_service.test.ts b/src/core/public/chrome/chrome_service.test.ts index 9656739421686..d9c35b20db03b 100644 --- a/src/core/public/chrome/chrome_service.test.ts +++ b/src/core/public/chrome/chrome_service.test.ts @@ -211,14 +211,14 @@ describe('start', () => { new FakeApp('beta', true), new FakeApp('gamma', false), ]); - const { availableApps, currentAppId$ } = startDeps.application; + const { availableApps, navigateToApp } = startDeps.application; const { chrome, service } = await start({ startDeps }); const promise = chrome .getIsVisible$() .pipe(toArray()) .toPromise(); - [...availableApps.keys()].forEach(appId => currentAppId$.next(appId)); + [...availableApps.keys()].forEach(appId => navigateToApp(appId)); service.stop(); await expect(promise).resolves.toMatchInlineSnapshot(` @@ -233,14 +233,14 @@ describe('start', () => { it('changing visibility has no effect on chrome-hiding application', async () => { const startDeps = defaultStartDeps([new FakeApp('alpha', true)]); - const { currentAppId$ } = startDeps.application; + const { navigateToApp } = startDeps.application; const { chrome, service } = await start({ startDeps }); const promise = chrome .getIsVisible$() .pipe(toArray()) .toPromise(); - currentAppId$.next('alpha'); + navigateToApp('alpha'); chrome.setIsVisible(true); service.stop(); diff --git a/src/core/public/chrome/chrome_service.tsx b/src/core/public/chrome/chrome_service.tsx index 25c00836a4db7..18c0c9870d72f 100644 --- a/src/core/public/chrome/chrome_service.tsx +++ b/src/core/public/chrome/chrome_service.tsx @@ -127,7 +127,7 @@ export class ChromeService { ) ); this.isVisible$ = combineLatest(this.appHidden$, this.toggleHidden$).pipe( - map(([appHidden, chromeHidden]) => !(appHidden || chromeHidden)), + map(([appHidden, toggleHidden]) => !(appHidden || toggleHidden)), takeUntil(this.stop$) ); } diff --git a/src/core/public/core_system.ts b/src/core/public/core_system.ts index abc4c144356e8..485c11aae6508 100644 --- a/src/core/public/core_system.ts +++ b/src/core/public/core_system.ts @@ -174,7 +174,7 @@ export class CoreSystem { [this.legacy.legacyId, [...pluginDependencies.keys()]], ]), }); - const application = this.application.setup({ context }); + const application = this.application.setup({ context, http, injectedMetadata }); const core: InternalCoreSetup = { application, @@ -211,7 +211,7 @@ export class CoreSystem { const injectedMetadata = await this.injectedMetadata.start(); const uiSettings = await this.uiSettings.start(); const docLinks = await this.docLinks.start({ injectedMetadata }); - const http = await this.http.start({ injectedMetadata, fatalErrors: this.fatalErrorsSetup }); + const http = await this.http.start({ injectedMetadata, fatalErrors: this.fatalErrorsSetup! }); const savedObjects = await this.savedObjects.start({ http }); const i18n = await this.i18n.start(); const application = await this.application.start({ http, injectedMetadata }); @@ -307,6 +307,7 @@ export class CoreSystem { this.uiSettings.stop(); this.chrome.stop(); this.i18n.stop(); + this.application.stop(); this.rootDomElement.textContent = ''; } } diff --git a/src/core/public/fatal_errors/__snapshots__/fatal_errors_screen.test.tsx.snap b/src/core/public/fatal_errors/__snapshots__/fatal_errors_screen.test.tsx.snap index 08b72c35ee2d3..b9a4f775c2b56 100644 --- a/src/core/public/fatal_errors/__snapshots__/fatal_errors_screen.test.tsx.snap +++ b/src/core/public/fatal_errors/__snapshots__/fatal_errors_screen.test.tsx.snap @@ -2,20 +2,15 @@ exports[`rendering render matches snapshot 1`] = ` - + { - it(`allows paths that don't start with /`, () => { - const basePath = new BasePath('/foo'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('bar'); - }); - - it(`allows paths that end with '/'`, () => { - const basePath = new BasePath('/foo'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('/bar/'); - }); -}); - -describe('#isAnonymous', () => { - it('returns true for registered paths', () => { - const basePath = new BasePath('/foo'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('/bar'); - expect(anonymousPaths.isAnonymous('/foo/bar')).toBe(true); - }); - - it('returns true for paths registered with a trailing slash, but call "isAnonymous" with no trailing slash', () => { - const basePath = new BasePath('/foo'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('/bar/'); - expect(anonymousPaths.isAnonymous('/foo/bar')).toBe(true); - }); - - it('returns true for paths registered without a trailing slash, but call "isAnonymous" with a trailing slash', () => { - const basePath = new BasePath('/foo'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('/bar'); - expect(anonymousPaths.isAnonymous('/foo/bar/')).toBe(true); - }); - - it('returns true for paths registered without a starting slash', () => { - const basePath = new BasePath('/foo'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('bar'); - expect(anonymousPaths.isAnonymous('/foo/bar')).toBe(true); - }); - - it('returns true for paths registered with a starting slash', () => { - const basePath = new BasePath('/foo'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('/bar'); - expect(anonymousPaths.isAnonymous('/foo/bar')).toBe(true); - }); - - it('when there is no basePath and calling "isAnonymous" without a starting slash, returns true for paths registered with a starting slash', () => { - const basePath = new BasePath('/'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('/bar'); - expect(anonymousPaths.isAnonymous('bar')).toBe(true); - }); - - it('when there is no basePath and calling "isAnonymous" with a starting slash, returns true for paths registered with a starting slash', () => { - const basePath = new BasePath('/'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('/bar'); - expect(anonymousPaths.isAnonymous('/bar')).toBe(true); - }); - - it('returns true for paths whose capitalization is different', () => { - const basePath = new BasePath('/foo'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('/BAR'); - expect(anonymousPaths.isAnonymous('/foo/bar')).toBe(true); - }); - - it('returns false for other paths', () => { - const basePath = new BasePath('/foo'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('/bar'); - expect(anonymousPaths.isAnonymous('/foo/foo')).toBe(false); - }); - - it('returns false for sub-paths of registered paths', () => { - const basePath = new BasePath('/foo'); - const anonymousPaths = new AnonymousPaths(basePath); - anonymousPaths.register('/bar'); - expect(anonymousPaths.isAnonymous('/foo/bar/baz')).toBe(false); - }); -}); diff --git a/src/core/public/http/anonymous_paths.ts b/src/core/public/http/anonymous_paths.ts deleted file mode 100644 index 300c4d64df353..0000000000000 --- a/src/core/public/http/anonymous_paths.ts +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { IAnonymousPaths, IBasePath } from 'src/core/public'; - -export class AnonymousPaths implements IAnonymousPaths { - private readonly paths = new Set(); - - constructor(private basePath: IBasePath) {} - - public isAnonymous(path: string): boolean { - const pathWithoutBasePath = this.basePath.remove(path); - return this.paths.has(this.normalizePath(pathWithoutBasePath)); - } - - public register(path: string) { - this.paths.add(this.normalizePath(path)); - } - - private normalizePath(path: string) { - // always lower-case it - let normalized = path.toLowerCase(); - - // remove the slash from the end - if (normalized.endsWith('/')) { - normalized = normalized.slice(0, normalized.length - 1); - } - - // put a slash at the start - if (!normalized.startsWith('/')) { - normalized = `/${normalized}`; - } - - // it's normalized!!! - return normalized; - } -} diff --git a/src/core/public/http/anonymous_paths_service.test.ts b/src/core/public/http/anonymous_paths_service.test.ts new file mode 100644 index 0000000000000..515715d9a613d --- /dev/null +++ b/src/core/public/http/anonymous_paths_service.test.ts @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { AnonymousPathsService } from './anonymous_paths_service'; +import { BasePath } from './base_path'; + +describe('#setup()', () => { + describe('#register', () => { + it(`allows paths that don't start with /`, () => { + const basePath = new BasePath('/foo'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('bar'); + }); + + it(`allows paths that end with '/'`, () => { + const basePath = new BasePath('/foo'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('/bar/'); + }); + }); + + describe('#isAnonymous', () => { + it('returns true for registered paths', () => { + const basePath = new BasePath('/foo'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('/bar'); + expect(anonymousPaths.isAnonymous('/foo/bar')).toBe(true); + }); + + it('returns true for paths registered with a trailing slash, but call "isAnonymous" with no trailing slash', () => { + const basePath = new BasePath('/foo'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('/bar/'); + expect(anonymousPaths.isAnonymous('/foo/bar')).toBe(true); + }); + + it('returns true for paths registered without a trailing slash, but call "isAnonymous" with a trailing slash', () => { + const basePath = new BasePath('/foo'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('/bar'); + expect(anonymousPaths.isAnonymous('/foo/bar/')).toBe(true); + }); + + it('returns true for paths registered without a starting slash', () => { + const basePath = new BasePath('/foo'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('bar'); + expect(anonymousPaths.isAnonymous('/foo/bar')).toBe(true); + }); + + it('returns true for paths registered with a starting slash', () => { + const basePath = new BasePath('/foo'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('/bar'); + expect(anonymousPaths.isAnonymous('/foo/bar')).toBe(true); + }); + + it('when there is no basePath and calling "isAnonymous" without a starting slash, returns true for paths registered with a starting slash', () => { + const basePath = new BasePath('/'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('/bar'); + expect(anonymousPaths.isAnonymous('bar')).toBe(true); + }); + + it('when there is no basePath and calling "isAnonymous" with a starting slash, returns true for paths registered with a starting slash', () => { + const basePath = new BasePath('/'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('/bar'); + expect(anonymousPaths.isAnonymous('/bar')).toBe(true); + }); + + it('returns true for paths whose capitalization is different', () => { + const basePath = new BasePath('/foo'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('/BAR'); + expect(anonymousPaths.isAnonymous('/foo/bar')).toBe(true); + }); + + it('returns false for other paths', () => { + const basePath = new BasePath('/foo'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('/bar'); + expect(anonymousPaths.isAnonymous('/foo/foo')).toBe(false); + }); + + it('returns false for sub-paths of registered paths', () => { + const basePath = new BasePath('/foo'); + const anonymousPaths = new AnonymousPathsService().setup({ basePath }); + anonymousPaths.register('/bar'); + expect(anonymousPaths.isAnonymous('/foo/bar/baz')).toBe(false); + }); + }); +}); diff --git a/src/core/public/http/anonymous_paths_service.ts b/src/core/public/http/anonymous_paths_service.ts new file mode 100644 index 0000000000000..ee9b3578c0270 --- /dev/null +++ b/src/core/public/http/anonymous_paths_service.ts @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { IAnonymousPaths, IBasePath } from 'src/core/public'; +import { CoreService } from '../../types'; + +interface Deps { + basePath: IBasePath; +} + +export class AnonymousPathsService implements CoreService { + private readonly paths = new Set(); + + public setup({ basePath }: Deps) { + return { + isAnonymous: (path: string): boolean => { + const pathWithoutBasePath = basePath.remove(path); + return this.paths.has(normalizePath(pathWithoutBasePath)); + }, + + register: (path: string) => { + this.paths.add(normalizePath(path)); + }, + + normalizePath, + }; + } + + public start(deps: Deps) { + return this.setup(deps); + } + + public stop() {} +} + +const normalizePath = (path: string) => { + // always lower-case it + let normalized = path.toLowerCase(); + + // remove the slash from the end + if (normalized.endsWith('/')) { + normalized = normalized.slice(0, normalized.length - 1); + } + + // put a slash at the start + if (!normalized.startsWith('/')) { + normalized = `/${normalized}`; + } + + // it's normalized!!! + return normalized; +}; diff --git a/src/core/public/http/base_path_service.test.ts b/src/core/public/http/base_path.test.ts similarity index 98% rename from src/core/public/http/base_path_service.test.ts rename to src/core/public/http/base_path.test.ts index 65403c906e614..63b7fa61cee84 100644 --- a/src/core/public/http/base_path_service.test.ts +++ b/src/core/public/http/base_path.test.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { BasePath } from './base_path_service'; +import { BasePath } from './base_path'; describe('BasePath', () => { describe('#get()', () => { diff --git a/src/core/public/http/base_path_service.ts b/src/core/public/http/base_path.ts similarity index 100% rename from src/core/public/http/base_path_service.ts rename to src/core/public/http/base_path.ts diff --git a/src/core/public/http/fetch.test.ts b/src/core/public/http/fetch.test.ts new file mode 100644 index 0000000000000..adb3d696a962f --- /dev/null +++ b/src/core/public/http/fetch.test.ts @@ -0,0 +1,569 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// @ts-ignore +import fetchMock from 'fetch-mock/es5/client'; +import { readFileSync } from 'fs'; +import { join } from 'path'; + +import { Fetch } from './fetch'; +import { BasePath } from './base_path'; +import { IHttpResponse } from './types'; + +function delay(duration: number) { + return new Promise(r => setTimeout(r, duration)); +} + +describe('Fetch', () => { + const fetchInstance = new Fetch({ + basePath: new BasePath('http://localhost/myBase'), + kibanaVersion: 'VERSION', + }); + afterEach(() => { + fetchMock.restore(); + }); + + describe('http requests', () => { + it('should use supplied request method', async () => { + fetchMock.post('*', {}); + await fetchInstance.fetch('/my/path', { method: 'POST' }); + + expect(fetchMock.lastOptions()!.method).toBe('POST'); + }); + + it('should use supplied Content-Type', async () => { + fetchMock.get('*', {}); + await fetchInstance.fetch('/my/path', { headers: { 'Content-Type': 'CustomContentType' } }); + + expect(fetchMock.lastOptions()!.headers).toMatchObject({ + 'content-type': 'CustomContentType', + }); + }); + + it('should use supplied pathname and querystring', async () => { + fetchMock.get('*', {}); + await fetchInstance.fetch('/my/path', { query: { a: 'b' } }); + + expect(fetchMock.lastUrl()).toBe('http://localhost/myBase/my/path?a=b'); + }); + + it('should use supplied headers', async () => { + fetchMock.get('*', {}); + await fetchInstance.fetch('/my/path', { + headers: { myHeader: 'foo' }, + }); + + expect(fetchMock.lastOptions()!.headers).toEqual({ + 'content-type': 'application/json', + 'kbn-version': 'VERSION', + myheader: 'foo', + }); + }); + + it('should return response', async () => { + fetchMock.get('*', { foo: 'bar' }); + const json = await fetchInstance.fetch('/my/path'); + expect(json).toEqual({ foo: 'bar' }); + }); + + it('should prepend url with basepath by default', async () => { + fetchMock.get('*', {}); + await fetchInstance.fetch('/my/path'); + expect(fetchMock.lastUrl()).toBe('http://localhost/myBase/my/path'); + }); + + it('should not prepend url with basepath when disabled', async () => { + fetchMock.get('*', {}); + await fetchInstance.fetch('my/path', { prependBasePath: false }); + expect(fetchMock.lastUrl()).toBe('/my/path'); + }); + + it('should not include undefined query params', async () => { + fetchMock.get('*', {}); + await fetchInstance.fetch('/my/path', { query: { a: undefined } }); + expect(fetchMock.lastUrl()).toBe('http://localhost/myBase/my/path'); + }); + + it('should make request with defaults', async () => { + fetchMock.get('*', {}); + await fetchInstance.fetch('/my/path'); + + const lastCall = fetchMock.lastCall(); + + expect(lastCall!.request.credentials).toBe('same-origin'); + expect(lastCall![1]).toMatchObject({ + method: 'GET', + headers: { + 'content-type': 'application/json', + 'kbn-version': 'VERSION', + }, + }); + }); + + it('should expose detailed response object when asResponse = true', async () => { + fetchMock.get('*', { foo: 'bar' }); + + const response = await fetchInstance.fetch('/my/path', { asResponse: true }); + + expect(response.request).toBeInstanceOf(Request); + expect(response.response).toBeInstanceOf(Response); + expect(response.body).toEqual({ foo: 'bar' }); + }); + + it('should reject on network error', async () => { + expect.assertions(1); + fetchMock.get('*', { status: 500 }); + + await expect(fetchInstance.fetch('/my/path')).rejects.toThrow(/Internal Server Error/); + }); + + it('should contain error message when throwing response', async () => { + fetchMock.get('*', { status: 404, body: { foo: 'bar' } }); + + await expect(fetchInstance.fetch('/my/path')).rejects.toMatchObject({ + message: 'Not Found', + body: { + foo: 'bar', + }, + response: { + status: 404, + url: 'http://localhost/myBase/my/path', + }, + }); + }); + + it('should support get() helper', async () => { + fetchMock.get('*', {}); + await fetchInstance.get('/my/path', { method: 'POST' }); + + expect(fetchMock.lastOptions()!.method).toBe('GET'); + }); + + it('should support head() helper', async () => { + fetchMock.head('*', {}); + await fetchInstance.head('/my/path', { method: 'GET' }); + + expect(fetchMock.lastOptions()!.method).toBe('HEAD'); + }); + + it('should support post() helper', async () => { + fetchMock.post('*', {}); + await fetchInstance.post('/my/path', { method: 'GET', body: '{}' }); + + expect(fetchMock.lastOptions()!.method).toBe('POST'); + }); + + it('should support put() helper', async () => { + fetchMock.put('*', {}); + await fetchInstance.put('/my/path', { method: 'GET', body: '{}' }); + + expect(fetchMock.lastOptions()!.method).toBe('PUT'); + }); + + it('should support patch() helper', async () => { + fetchMock.patch('*', {}); + await fetchInstance.patch('/my/path', { method: 'GET', body: '{}' }); + + expect(fetchMock.lastOptions()!.method).toBe('PATCH'); + }); + + it('should support delete() helper', async () => { + fetchMock.delete('*', {}); + await fetchInstance.delete('/my/path', { method: 'GET' }); + + expect(fetchMock.lastOptions()!.method).toBe('DELETE'); + }); + + it('should support options() helper', async () => { + fetchMock.mock('*', { method: 'OPTIONS' }); + await fetchInstance.options('/my/path', { method: 'GET' }); + + expect(fetchMock.lastOptions()!.method).toBe('OPTIONS'); + }); + + it('should make requests for NDJSON content', async () => { + const content = readFileSync(join(__dirname, '_import_objects.ndjson'), { + encoding: 'utf-8', + }); + const body = new FormData(); + + body.append('file', content); + fetchMock.post('*', { + body: content, + headers: { 'Content-Type': 'application/ndjson' }, + }); + + const data = await fetchInstance.post('/my/path', { + body, + headers: { + 'Content-Type': undefined, + }, + }); + + expect(data).toBeInstanceOf(Blob); + + const ndjson = await new Response(data).text(); + + expect(ndjson).toEqual(content); + }); + }); + + describe('interception', () => { + beforeEach(async () => { + fetchMock.get('*', { foo: 'bar' }); + }); + + afterEach(() => { + fetchMock.restore(); + fetchInstance.removeAllInterceptors(); + }); + + it('should make request and receive response', async () => { + fetchInstance.intercept({}); + + const body = await fetchInstance.fetch('/my/path'); + + expect(fetchMock.called()).toBe(true); + expect(body).toEqual({ foo: 'bar' }); + }); + + it('should be able to manipulate request instance', async () => { + fetchInstance.intercept({ + request(request) { + request.headers.set('Content-Type', 'CustomContentType'); + }, + }); + fetchInstance.intercept({ + request(request) { + return new Request('/my/route', request); + }, + }); + + const body = await fetchInstance.fetch('/my/path'); + + expect(fetchMock.called()).toBe(true); + expect(body).toEqual({ foo: 'bar' }); + expect(fetchMock.lastOptions()!.headers).toMatchObject({ + 'content-type': 'CustomContentType', + }); + expect(fetchMock.lastUrl()).toBe('/my/route'); + }); + + it('should call interceptors in correct order', async () => { + const order: string[] = []; + + fetchInstance.intercept({ + request() { + order.push('Request 1'); + }, + response() { + order.push('Response 1'); + }, + }); + fetchInstance.intercept({ + request() { + order.push('Request 2'); + }, + response() { + order.push('Response 2'); + }, + }); + fetchInstance.intercept({ + request() { + order.push('Request 3'); + }, + response() { + order.push('Response 3'); + }, + }); + + const body = await fetchInstance.fetch('/my/path'); + + expect(fetchMock.called()).toBe(true); + expect(body).toEqual({ foo: 'bar' }); + expect(order).toEqual([ + 'Request 3', + 'Request 2', + 'Request 1', + 'Response 1', + 'Response 2', + 'Response 3', + ]); + }); + + it('should skip remaining interceptors when controller halts during request', async () => { + const usedSpy = jest.fn(); + const unusedSpy = jest.fn(); + + fetchInstance.intercept({ request: unusedSpy, response: unusedSpy }); + fetchInstance.intercept({ + request(request, controller) { + controller.halt(); + }, + response: unusedSpy, + }); + fetchInstance.intercept({ + request: usedSpy, + response: unusedSpy, + }); + + fetchInstance.fetch('/my/path').then(unusedSpy, unusedSpy); + await delay(1000); + + expect(unusedSpy).toHaveBeenCalledTimes(0); + expect(usedSpy).toHaveBeenCalledTimes(1); + expect(fetchMock.called()).toBe(false); + }); + + it('should skip remaining interceptors when controller halts during response', async () => { + const usedSpy = jest.fn(); + const unusedSpy = jest.fn(); + + fetchInstance.intercept({ + request: usedSpy, + response(response, controller) { + controller.halt(); + }, + }); + fetchInstance.intercept({ request: usedSpy, response: unusedSpy }); + fetchInstance.intercept({ request: usedSpy, response: unusedSpy }); + + fetchInstance.fetch('/my/path').then(unusedSpy, unusedSpy); + await delay(1000); + + expect(fetchMock.called()).toBe(true); + expect(usedSpy).toHaveBeenCalledTimes(3); + expect(unusedSpy).toHaveBeenCalledTimes(0); + }); + + it('should skip remaining interceptors when controller halts during responseError', async () => { + fetchMock.post('*', 401); + + const unusedSpy = jest.fn(); + + fetchInstance.intercept({ + responseError(response, controller) { + controller.halt(); + }, + }); + fetchInstance.intercept({ response: unusedSpy, responseError: unusedSpy }); + + fetchInstance.post('/my/path').then(unusedSpy, unusedSpy); + await delay(1000); + + expect(fetchMock.called()).toBe(true); + expect(unusedSpy).toHaveBeenCalledTimes(0); + }); + + it('should not fetch if exception occurs during request interception', async () => { + const usedSpy = jest.fn(); + const unusedSpy = jest.fn(); + + fetchInstance.intercept({ + request: unusedSpy, + requestError: usedSpy, + response: unusedSpy, + responseError: unusedSpy, + }); + fetchInstance.intercept({ + request() { + throw new Error('Interception Error'); + }, + response: unusedSpy, + responseError: unusedSpy, + }); + fetchInstance.intercept({ request: usedSpy, response: unusedSpy, responseError: unusedSpy }); + + await expect(fetchInstance.fetch('/my/path')).rejects.toThrow(/Interception Error/); + expect(fetchMock.called()).toBe(false); + expect(unusedSpy).toHaveBeenCalledTimes(0); + expect(usedSpy).toHaveBeenCalledTimes(2); + }); + + it('should succeed if request throws but caught by interceptor', async () => { + const usedSpy = jest.fn(); + const unusedSpy = jest.fn(); + + fetchInstance.intercept({ + request: unusedSpy, + requestError({ request }) { + return new Request('/my/route', request); + }, + response: usedSpy, + }); + fetchInstance.intercept({ + request() { + throw new Error('Interception Error'); + }, + response: usedSpy, + }); + fetchInstance.intercept({ request: usedSpy, response: usedSpy }); + + await expect(fetchInstance.fetch('/my/route')).resolves.toEqual({ foo: 'bar' }); + expect(fetchMock.called()).toBe(true); + expect(unusedSpy).toHaveBeenCalledTimes(0); + expect(usedSpy).toHaveBeenCalledTimes(4); + }); + + it('should accumulate request information', async () => { + const routes = ['alpha', 'beta', 'gamma']; + const createRequest = jest.fn( + (request: Request) => new Request(`/api/${routes.shift()}`, request) + ); + + fetchInstance.intercept({ + request: createRequest, + }); + fetchInstance.intercept({ + requestError(httpErrorRequest) { + return httpErrorRequest.request; + }, + }); + fetchInstance.intercept({ + request(request) { + throw new Error('Invalid'); + }, + }); + fetchInstance.intercept({ + request: createRequest, + }); + fetchInstance.intercept({ + request: createRequest, + }); + + await expect(fetchInstance.fetch('/my/route')).resolves.toEqual({ foo: 'bar' }); + expect(fetchMock.called()).toBe(true); + expect(routes.length).toBe(0); + expect(createRequest.mock.calls[0][0].url).toContain('/my/route'); + expect(createRequest.mock.calls[1][0].url).toContain('/api/alpha'); + expect(createRequest.mock.calls[2][0].url).toContain('/api/beta'); + expect(fetchMock.lastCall()!.request.url).toContain('/api/gamma'); + }); + + it('should accumulate response information', async () => { + const bodies = ['alpha', 'beta', 'gamma']; + const createResponse = jest.fn((httpResponse: IHttpResponse) => ({ + body: bodies.shift(), + })); + + fetchInstance.intercept({ + response: createResponse, + }); + fetchInstance.intercept({ + response: createResponse, + }); + fetchInstance.intercept({ + response(httpResponse) { + throw new Error('Invalid'); + }, + }); + fetchInstance.intercept({ + responseError({ error, ...httpResponse }) { + return httpResponse; + }, + }); + fetchInstance.intercept({ + response: createResponse, + }); + + await expect(fetchInstance.fetch('/my/route')).resolves.toEqual('gamma'); + expect(fetchMock.called()).toBe(true); + expect(bodies.length).toBe(0); + expect(createResponse.mock.calls[0][0].body).toEqual({ foo: 'bar' }); + expect(createResponse.mock.calls[1][0].body).toBe('alpha'); + expect(createResponse.mock.calls[2][0].body).toBe('beta'); + }); + + describe('request availability during interception', () => { + it('should be available to responseError when response throws', async () => { + let spiedRequest: Request | undefined; + + fetchInstance.intercept({ + response() { + throw new Error('Internal Server Error'); + }, + }); + fetchInstance.intercept({ + responseError({ request }) { + spiedRequest = request; + }, + }); + + await expect(fetchInstance.fetch('/my/path')).rejects.toThrow(); + expect(fetchMock.called()).toBe(true); + expect(spiedRequest).toBeDefined(); + }); + }); + + describe('response availability during interception', () => { + it('should be available to responseError when network request fails', async () => { + fetchMock.restore(); + fetchMock.get('*', { status: 500 }); + + let spiedResponse: Response | undefined; + + fetchInstance.intercept({ + responseError({ response }) { + spiedResponse = response; + }, + }); + + await expect(fetchInstance.fetch('/my/path')).rejects.toThrow(); + expect(spiedResponse).toBeDefined(); + }); + }); + + it('should actually halt request interceptors in reverse order', async () => { + const unusedSpy = jest.fn(); + + fetchInstance.intercept({ request: unusedSpy }); + fetchInstance.intercept({ + request(request, controller) { + controller.halt(); + }, + }); + + fetchInstance.fetch('/my/path'); + await delay(500); + + expect(unusedSpy).toHaveBeenCalledTimes(0); + }); + + it('should recover from failing request interception via request error interceptor', async () => { + const usedSpy = jest.fn(); + + fetchInstance.intercept({ + requestError(httpErrorRequest) { + return httpErrorRequest.request; + }, + response: usedSpy, + }); + + fetchInstance.intercept({ + request(request, controller) { + throw new Error('Request Error'); + }, + response: usedSpy, + }); + + await expect(fetchInstance.fetch('/my/path')).resolves.toEqual({ foo: 'bar' }); + expect(usedSpy).toHaveBeenCalledTimes(2); + }); + }); +}); diff --git a/src/core/public/http/fetch.ts b/src/core/public/http/fetch.ts index 472b617cacd7f..b86f1f5c08029 100644 --- a/src/core/public/http/fetch.ts +++ b/src/core/public/http/fetch.ts @@ -35,20 +35,30 @@ interface Params { const JSON_CONTENT = /^(application\/(json|x-javascript)|text\/(x-)?javascript|x-json)(;.*)?$/; const NDJSON_CONTENT = /^(application\/ndjson)(;.*)?$/; -export class FetchService { +export class Fetch { private readonly interceptors = new Set(); constructor(private readonly params: Params) {} public intercept(interceptor: HttpInterceptor) { this.interceptors.add(interceptor); - return () => this.interceptors.delete(interceptor); + return () => { + this.interceptors.delete(interceptor); + }; } public removeAllInterceptors() { this.interceptors.clear(); } + public readonly delete = this.shorthand('DELETE'); + public readonly get = this.shorthand('GET'); + public readonly head = this.shorthand('HEAD'); + public readonly options = this.shorthand('options'); + public readonly patch = this.shorthand('PATCH'); + public readonly post = this.shorthand('POST'); + public readonly put = this.shorthand('PUT'); + public fetch: HttpHandler = async ( path: string, options: HttpFetchOptions = {} @@ -152,4 +162,9 @@ export class FetchService { return new HttpResponse({ request, response, body }); } + + private shorthand(method: string) { + return (path: string, options: HttpFetchOptions = {}) => + this.fetch(path, { ...options, method }); + } } diff --git a/src/core/public/http/http_service.mock.ts b/src/core/public/http/http_service.mock.ts index 5887e7b3e96d0..1111fd39ec78e 100644 --- a/src/core/public/http/http_service.mock.ts +++ b/src/core/public/http/http_service.mock.ts @@ -20,7 +20,7 @@ import { HttpService } from './http_service'; import { HttpSetup } from './types'; import { BehaviorSubject } from 'rxjs'; -import { BasePath } from './base_path_service'; +import { BasePath } from './base_path'; export type HttpSetupMock = jest.Mocked & { basePath: BasePath; @@ -41,15 +41,13 @@ const createServiceMock = ({ basePath = '' } = {}): HttpSetupMock => ({ register: jest.fn(), isAnonymous: jest.fn(), }, - addLoadingCount: jest.fn(), + addLoadingCountSource: jest.fn(), getLoadingCount$: jest.fn().mockReturnValue(new BehaviorSubject(0)), - stop: jest.fn(), intercept: jest.fn(), - removeAllInterceptors: jest.fn(), }); const createMock = ({ basePath = '' } = {}) => { - const mocked: jest.Mocked> = { + const mocked: jest.Mocked> = { setup: jest.fn(), start: jest.fn(), stop: jest.fn(), diff --git a/src/legacy/ui/public/saved_objects/saved_object_registry.ts b/src/core/public/http/http_service.test.mocks.ts similarity index 76% rename from src/legacy/ui/public/saved_objects/saved_object_registry.ts rename to src/core/public/http/http_service.test.mocks.ts index 34b91267bfb32..e60dad0509699 100644 --- a/src/legacy/ui/public/saved_objects/saved_object_registry.ts +++ b/src/core/public/http/http_service.test.mocks.ts @@ -17,10 +17,9 @@ * under the License. */ -import { uiRegistry } from '../registry/_registry'; +import { loadingCountServiceMock } from './loading_count_service.mock'; -export const SavedObjectRegistryProvider = uiRegistry({ - name: 'savedObjects', - index: ['loaderProperties.name'], - order: ['loaderProperties.name'], -}); +export const loadingServiceMock = loadingCountServiceMock.create(); +jest.doMock('./loading_count_service', () => ({ + LoadingCountService: jest.fn(() => loadingServiceMock), +})); diff --git a/src/core/public/http/http_service.test.ts b/src/core/public/http/http_service.test.ts index 09f3cca419e4d..f95d25d116976 100644 --- a/src/core/public/http/http_service.test.ts +++ b/src/core/public/http/http_service.test.ts @@ -17,692 +17,22 @@ * under the License. */ -import * as Rx from 'rxjs'; -import { toArray } from 'rxjs/operators'; // @ts-ignore import fetchMock from 'fetch-mock/es5/client'; -import { readFileSync } from 'fs'; -import { join } from 'path'; -import { setup, SetupTap } from '../../../test_utils/public/http_test_setup'; -import { IHttpResponse } from './types'; - -function delay(duration: number) { - return new Promise(r => setTimeout(r, duration)); -} - -const setupFakeBasePath: SetupTap = injectedMetadata => { - injectedMetadata.getBasePath.mockReturnValue('/foo/bar'); -}; - -describe('basePath.get()', () => { - it('returns an empty string if no basePath is injected', () => { - const { http } = setup(injectedMetadata => { - injectedMetadata.getBasePath.mockReturnValue(undefined as any); - }); - - expect(http.basePath.get()).toBe(''); - }); - - it('returns the injected basePath', () => { - const { http } = setup(setupFakeBasePath); - - expect(http.basePath.get()).toBe('/foo/bar'); - }); -}); - -describe('http requests', () => { - afterEach(() => { - fetchMock.restore(); - }); - - it('should use supplied request method', async () => { - const { http } = setup(); - - fetchMock.post('*', {}); - await http.fetch('/my/path', { method: 'POST' }); - - expect(fetchMock.lastOptions()!.method).toBe('POST'); - }); - - it('should use supplied Content-Type', async () => { - const { http } = setup(); - - fetchMock.get('*', {}); - await http.fetch('/my/path', { headers: { 'Content-Type': 'CustomContentType' } }); - - expect(fetchMock.lastOptions()!.headers).toMatchObject({ - 'content-type': 'CustomContentType', - }); - }); - - it('should use supplied pathname and querystring', async () => { - const { http } = setup(); - - fetchMock.get('*', {}); - await http.fetch('/my/path', { query: { a: 'b' } }); - - expect(fetchMock.lastUrl()).toBe('http://localhost/myBase/my/path?a=b'); - }); - - it('should use supplied headers', async () => { - const { http } = setup(); - - fetchMock.get('*', {}); - await http.fetch('/my/path', { - headers: { myHeader: 'foo' }, - }); - - expect(fetchMock.lastOptions()!.headers).toEqual({ - 'content-type': 'application/json', - 'kbn-version': 'kibanaVersion', - myheader: 'foo', - }); - }); - - it('should return response', async () => { - const { http } = setup(); - fetchMock.get('*', { foo: 'bar' }); - const json = await http.fetch('/my/path'); - expect(json).toEqual({ foo: 'bar' }); - }); - - it('should prepend url with basepath by default', async () => { - const { http } = setup(); - fetchMock.get('*', {}); - await http.fetch('/my/path'); - expect(fetchMock.lastUrl()).toBe('http://localhost/myBase/my/path'); - }); - - it('should not prepend url with basepath when disabled', async () => { - const { http } = setup(); - fetchMock.get('*', {}); - await http.fetch('my/path', { prependBasePath: false }); - expect(fetchMock.lastUrl()).toBe('/my/path'); - }); - - it('should not include undefined query params', async () => { - const { http } = setup(); - fetchMock.get('*', {}); - await http.fetch('/my/path', { query: { a: undefined } }); - expect(fetchMock.lastUrl()).toBe('http://localhost/myBase/my/path'); - }); - - it('should make request with defaults', async () => { - const { http } = setup(); - - fetchMock.get('*', {}); - await http.fetch('/my/path'); - - const lastCall = fetchMock.lastCall(); - - expect(lastCall!.request.credentials).toBe('same-origin'); - expect(lastCall![1]).toMatchObject({ - method: 'GET', - headers: { - 'content-type': 'application/json', - 'kbn-version': 'kibanaVersion', - }, - }); - }); - - it('should expose detailed response object when asResponse = true', async () => { - const { http } = setup(); - - fetchMock.get('*', { foo: 'bar' }); - - const response = await http.fetch('/my/path', { asResponse: true }); - - expect(response.request).toBeInstanceOf(Request); - expect(response.response).toBeInstanceOf(Response); - expect(response.body).toEqual({ foo: 'bar' }); - }); - - it('should reject on network error', async () => { - const { http } = setup(); - - expect.assertions(1); - fetchMock.get('*', { status: 500 }); - - await expect(http.fetch('/my/path')).rejects.toThrow(/Internal Server Error/); - }); - - it('should contain error message when throwing response', async () => { - const { http } = setup(); - - fetchMock.get('*', { status: 404, body: { foo: 'bar' } }); - - await expect(http.fetch('/my/path')).rejects.toMatchObject({ - message: 'Not Found', - body: { - foo: 'bar', - }, - response: { - status: 404, - url: 'http://localhost/myBase/my/path', - }, - }); - }); - - it('should support get() helper', async () => { - const { http } = setup(); - - fetchMock.get('*', {}); - await http.get('/my/path', { method: 'POST' }); - - expect(fetchMock.lastOptions()!.method).toBe('GET'); - }); - - it('should support head() helper', async () => { - const { http } = setup(); - - fetchMock.head('*', {}); - await http.head('/my/path', { method: 'GET' }); - - expect(fetchMock.lastOptions()!.method).toBe('HEAD'); - }); - - it('should support post() helper', async () => { - const { http } = setup(); - - fetchMock.post('*', {}); - await http.post('/my/path', { method: 'GET', body: '{}' }); - - expect(fetchMock.lastOptions()!.method).toBe('POST'); - }); - - it('should support put() helper', async () => { - const { http } = setup(); - - fetchMock.put('*', {}); - await http.put('/my/path', { method: 'GET', body: '{}' }); - - expect(fetchMock.lastOptions()!.method).toBe('PUT'); - }); - - it('should support patch() helper', async () => { - const { http } = setup(); - - fetchMock.patch('*', {}); - await http.patch('/my/path', { method: 'GET', body: '{}' }); - - expect(fetchMock.lastOptions()!.method).toBe('PATCH'); - }); - - it('should support delete() helper', async () => { - const { http } = setup(); - - fetchMock.delete('*', {}); - await http.delete('/my/path', { method: 'GET' }); - - expect(fetchMock.lastOptions()!.method).toBe('DELETE'); - }); - - it('should support options() helper', async () => { - const { http } = setup(); - - fetchMock.mock('*', { method: 'OPTIONS' }); - await http.options('/my/path', { method: 'GET' }); - - expect(fetchMock.lastOptions()!.method).toBe('OPTIONS'); - }); - - it('should make requests for NDJSON content', async () => { - const { http } = setup(); - const content = readFileSync(join(__dirname, '_import_objects.ndjson'), { encoding: 'utf-8' }); - const body = new FormData(); - - body.append('file', content); - fetchMock.post('*', { - body: content, - headers: { 'Content-Type': 'application/ndjson' }, - }); - - const data = await http.post('/my/path', { - body, - headers: { - 'Content-Type': undefined, - }, - }); - - expect(data).toBeInstanceOf(Blob); - - const ndjson = await new Response(data).text(); - - expect(ndjson).toEqual(content); - }); -}); - -describe('interception', () => { - const { http } = setup(); - - beforeEach(() => { - fetchMock.get('*', { foo: 'bar' }); - }); - - afterEach(() => { - fetchMock.restore(); - http.removeAllInterceptors(); - }); - - it('should make request and receive response', async () => { - http.intercept({}); - - const body = await http.fetch('/my/path'); - - expect(fetchMock.called()).toBe(true); - expect(body).toEqual({ foo: 'bar' }); - }); - - it('should be able to manipulate request instance', async () => { - http.intercept({ - request(request) { - request.headers.set('Content-Type', 'CustomContentType'); - }, - }); - http.intercept({ - request(request) { - return new Request('/my/route', request); - }, - }); - - const body = await http.fetch('/my/path'); - - expect(fetchMock.called()).toBe(true); - expect(body).toEqual({ foo: 'bar' }); - expect(fetchMock.lastOptions()!.headers).toMatchObject({ - 'content-type': 'CustomContentType', - }); - expect(fetchMock.lastUrl()).toBe('/my/route'); - }); - - it('should call interceptors in correct order', async () => { - const order: string[] = []; - - http.intercept({ - request() { - order.push('Request 1'); - }, - response() { - order.push('Response 1'); - }, - }); - http.intercept({ - request() { - order.push('Request 2'); - }, - response() { - order.push('Response 2'); - }, - }); - http.intercept({ - request() { - order.push('Request 3'); - }, - response() { - order.push('Response 3'); - }, - }); - - const body = await http.fetch('/my/path'); - - expect(fetchMock.called()).toBe(true); - expect(body).toEqual({ foo: 'bar' }); - expect(order).toEqual([ - 'Request 3', - 'Request 2', - 'Request 1', - 'Response 1', - 'Response 2', - 'Response 3', - ]); - }); - - it('should skip remaining interceptors when controller halts during request', async () => { - const usedSpy = jest.fn(); - const unusedSpy = jest.fn(); - - http.intercept({ request: unusedSpy, response: unusedSpy }); - http.intercept({ - request(request, controller) { - controller.halt(); - }, - response: unusedSpy, - }); - http.intercept({ - request: usedSpy, - response: unusedSpy, - }); - - http.fetch('/my/path').then(unusedSpy, unusedSpy); - await delay(1000); - - expect(unusedSpy).toHaveBeenCalledTimes(0); - expect(usedSpy).toHaveBeenCalledTimes(1); - expect(fetchMock.called()).toBe(false); - }); - - it('should skip remaining interceptors when controller halts during response', async () => { - const usedSpy = jest.fn(); - const unusedSpy = jest.fn(); - - http.intercept({ - request: usedSpy, - response(response, controller) { - controller.halt(); - }, - }); - http.intercept({ request: usedSpy, response: unusedSpy }); - http.intercept({ request: usedSpy, response: unusedSpy }); - - http.fetch('/my/path').then(unusedSpy, unusedSpy); - await delay(1000); - - expect(fetchMock.called()).toBe(true); - expect(usedSpy).toHaveBeenCalledTimes(3); - expect(unusedSpy).toHaveBeenCalledTimes(0); - }); - - it('should skip remaining interceptors when controller halts during responseError', async () => { - fetchMock.post('*', 401); - - const unusedSpy = jest.fn(); - - http.intercept({ - responseError(response, controller) { - controller.halt(); - }, - }); - http.intercept({ response: unusedSpy, responseError: unusedSpy }); - - http.post('/my/path').then(unusedSpy, unusedSpy); - await delay(1000); - - expect(fetchMock.called()).toBe(true); - expect(unusedSpy).toHaveBeenCalledTimes(0); - }); - - it('should not fetch if exception occurs during request interception', async () => { - const usedSpy = jest.fn(); - const unusedSpy = jest.fn(); - - http.intercept({ - request: unusedSpy, - requestError: usedSpy, - response: unusedSpy, - responseError: unusedSpy, - }); - http.intercept({ - request() { - throw new Error('Interception Error'); - }, - response: unusedSpy, - responseError: unusedSpy, - }); - http.intercept({ request: usedSpy, response: unusedSpy, responseError: unusedSpy }); - - await expect(http.fetch('/my/path')).rejects.toThrow(/Interception Error/); - expect(fetchMock.called()).toBe(false); - expect(unusedSpy).toHaveBeenCalledTimes(0); - expect(usedSpy).toHaveBeenCalledTimes(2); - }); - - it('should succeed if request throws but caught by interceptor', async () => { - const usedSpy = jest.fn(); - const unusedSpy = jest.fn(); - - http.intercept({ - request: unusedSpy, - requestError({ request }) { - return new Request('/my/route', request); - }, - response: usedSpy, - }); - http.intercept({ - request() { - throw new Error('Interception Error'); - }, - response: usedSpy, - }); - http.intercept({ request: usedSpy, response: usedSpy }); - - await expect(http.fetch('/my/route')).resolves.toEqual({ foo: 'bar' }); - expect(fetchMock.called()).toBe(true); - expect(unusedSpy).toHaveBeenCalledTimes(0); - expect(usedSpy).toHaveBeenCalledTimes(4); - }); - - it('should accumulate request information', async () => { - const routes = ['alpha', 'beta', 'gamma']; - const createRequest = jest.fn( - (request: Request) => new Request(`/api/${routes.shift()}`, request) - ); - - http.intercept({ - request: createRequest, - }); - http.intercept({ - requestError(httpErrorRequest) { - return httpErrorRequest.request; - }, - }); - http.intercept({ - request(request) { - throw new Error('Invalid'); - }, - }); - http.intercept({ - request: createRequest, - }); - http.intercept({ - request: createRequest, - }); - - await expect(http.fetch('/my/route')).resolves.toEqual({ foo: 'bar' }); - expect(fetchMock.called()).toBe(true); - expect(routes.length).toBe(0); - expect(createRequest.mock.calls[0][0].url).toContain('/my/route'); - expect(createRequest.mock.calls[1][0].url).toContain('/api/alpha'); - expect(createRequest.mock.calls[2][0].url).toContain('/api/beta'); - expect(fetchMock.lastCall()!.request.url).toContain('/api/gamma'); - }); - - it('should accumulate response information', async () => { - const bodies = ['alpha', 'beta', 'gamma']; - const createResponse = jest.fn((httpResponse: IHttpResponse) => ({ - body: bodies.shift(), - })); - - http.intercept({ - response: createResponse, - }); - http.intercept({ - response: createResponse, - }); - http.intercept({ - response(httpResponse) { - throw new Error('Invalid'); - }, - }); - http.intercept({ - responseError({ error, ...httpResponse }) { - return httpResponse; - }, - }); - http.intercept({ - response: createResponse, - }); - - await expect(http.fetch('/my/route')).resolves.toEqual('gamma'); - expect(fetchMock.called()).toBe(true); - expect(bodies.length).toBe(0); - expect(createResponse.mock.calls[0][0].body).toEqual({ foo: 'bar' }); - expect(createResponse.mock.calls[1][0].body).toBe('alpha'); - expect(createResponse.mock.calls[2][0].body).toBe('beta'); - }); - - describe('request availability during interception', () => { - it('should be available to responseError when response throws', async () => { - let spiedRequest: Request | undefined; - - http.intercept({ - response() { - throw new Error('Internal Server Error'); - }, - }); - http.intercept({ - responseError({ request }) { - spiedRequest = request; - }, - }); - - await expect(http.fetch('/my/path')).rejects.toThrow(); - expect(fetchMock.called()).toBe(true); - expect(spiedRequest).toBeDefined(); - }); - }); - - describe('response availability during interception', () => { - it('should be available to responseError when network request fails', async () => { - fetchMock.restore(); - fetchMock.get('*', { status: 500 }); - - let spiedResponse: Response | undefined; - - http.intercept({ - responseError({ response }) { - spiedResponse = response; - }, - }); - - await expect(http.fetch('/my/path')).rejects.toThrow(); - expect(spiedResponse).toBeDefined(); - }); - }); - - it('should actually halt request interceptors in reverse order', async () => { - const unusedSpy = jest.fn(); - - http.intercept({ request: unusedSpy }); - http.intercept({ - request(request, controller) { - controller.halt(); - }, - }); - - http.fetch('/my/path'); - await delay(500); - - expect(unusedSpy).toHaveBeenCalledTimes(0); - }); - - it('should recover from failing request interception via request error interceptor', async () => { - const usedSpy = jest.fn(); - - http.intercept({ - requestError(httpErrorRequest) { - return httpErrorRequest.request; - }, - response: usedSpy, - }); - - http.intercept({ - request(request, controller) { - throw new Error('Request Error'); - }, - response: usedSpy, - }); - - await expect(http.fetch('/my/path')).resolves.toEqual({ foo: 'bar' }); - expect(usedSpy).toHaveBeenCalledTimes(2); - }); -}); - -describe('addLoadingCount()', () => { - it('subscribes to passed in sources, unsubscribes on stop', () => { - const { httpService, http } = setup(); - - const unsubA = jest.fn(); - const subA = jest.fn().mockReturnValue(unsubA); - http.addLoadingCount(new Rx.Observable(subA)); - expect(subA).toHaveBeenCalledTimes(1); - expect(unsubA).not.toHaveBeenCalled(); - - const unsubB = jest.fn(); - const subB = jest.fn().mockReturnValue(unsubB); - http.addLoadingCount(new Rx.Observable(subB)); - expect(subB).toHaveBeenCalledTimes(1); - expect(unsubB).not.toHaveBeenCalled(); - +import { loadingServiceMock } from './http_service.test.mocks'; + +import { fatalErrorsServiceMock } from '../fatal_errors/fatal_errors_service.mock'; +import { injectedMetadataServiceMock } from '../injected_metadata/injected_metadata_service.mock'; +import { HttpService } from './http_service'; + +describe('#stop()', () => { + it('calls loadingCount.stop()', () => { + const injectedMetadata = injectedMetadataServiceMock.createSetupContract(); + const fatalErrors = fatalErrorsServiceMock.createSetupContract(); + const httpService = new HttpService(); + httpService.setup({ fatalErrors, injectedMetadata }); + httpService.start({ fatalErrors, injectedMetadata }); httpService.stop(); - - expect(subA).toHaveBeenCalledTimes(1); - expect(unsubA).toHaveBeenCalledTimes(1); - expect(subB).toHaveBeenCalledTimes(1); - expect(unsubB).toHaveBeenCalledTimes(1); - }); - - it('adds a fatal error if source observables emit an error', async () => { - const { http, fatalErrors } = setup(); - - http.addLoadingCount(Rx.throwError(new Error('foo bar'))); - expect(fatalErrors.add.mock.calls).toMatchSnapshot(); - }); - - it('adds a fatal error if source observable emits a negative number', async () => { - const { http, fatalErrors } = setup(); - - http.addLoadingCount(Rx.of(1, 2, 3, 4, -9)); - expect(fatalErrors.add.mock.calls).toMatchSnapshot(); - }); -}); - -describe('getLoadingCount$()', () => { - it('emits 0 initially, the right count when sources emit their own count, and ends with zero', async () => { - const { httpService, http } = setup(); - - const countA$ = new Rx.Subject(); - const countB$ = new Rx.Subject(); - const countC$ = new Rx.Subject(); - const promise = http - .getLoadingCount$() - .pipe(toArray()) - .toPromise(); - - http.addLoadingCount(countA$); - http.addLoadingCount(countB$); - http.addLoadingCount(countC$); - - countA$.next(100); - countB$.next(10); - countC$.next(1); - countA$.complete(); - countB$.next(20); - countC$.complete(); - countB$.next(0); - - httpService.stop(); - expect(await promise).toMatchSnapshot(); - }); - - it('only emits when loading count changes', async () => { - const { httpService, http } = setup(); - - const count$ = new Rx.Subject(); - const promise = http - .getLoadingCount$() - .pipe(toArray()) - .toPromise(); - - http.addLoadingCount(count$); - count$.next(0); - count$.next(0); - count$.next(0); - count$.next(0); - count$.next(0); - count$.next(1); - count$.next(1); - httpService.stop(); - - expect(await promise).toMatchSnapshot(); + expect(loadingServiceMock.stop).toHaveBeenCalled(); }); }); diff --git a/src/core/public/http/http_service.ts b/src/core/public/http/http_service.ts index 477bcd6152d44..567cdd310cbdf 100644 --- a/src/core/public/http/http_service.ts +++ b/src/core/public/http/http_service.ts @@ -17,32 +17,52 @@ * under the License. */ -import { HttpSetup, HttpStart, HttpServiceBase } from './types'; -import { setup } from './http_setup'; +import { HttpSetup, HttpStart } from './types'; import { InjectedMetadataSetup } from '../injected_metadata'; import { FatalErrorsSetup } from '../fatal_errors'; +import { BasePath } from './base_path'; +import { AnonymousPathsService } from './anonymous_paths_service'; +import { LoadingCountService } from './loading_count_service'; +import { Fetch } from './fetch'; +import { CoreService } from '../../types'; interface HttpDeps { injectedMetadata: InjectedMetadataSetup; - fatalErrors: FatalErrorsSetup | null; + fatalErrors: FatalErrorsSetup; } /** @internal */ -export class HttpService { - private service!: HttpServiceBase; +export class HttpService implements CoreService { + private readonly anonymousPaths = new AnonymousPathsService(); + private readonly loadingCount = new LoadingCountService(); - public setup(deps: HttpDeps): HttpSetup { - this.service = setup(deps.injectedMetadata, deps.fatalErrors); - return this.service; + public setup({ injectedMetadata, fatalErrors }: HttpDeps): HttpSetup { + const kibanaVersion = injectedMetadata.getKibanaVersion(); + const basePath = new BasePath(injectedMetadata.getBasePath()); + const fetchService = new Fetch({ basePath, kibanaVersion }); + const loadingCount = this.loadingCount.setup({ fatalErrors }); + + return { + basePath, + anonymousPaths: this.anonymousPaths.setup({ basePath }), + intercept: fetchService.intercept.bind(fetchService), + fetch: fetchService.fetch.bind(fetchService), + delete: fetchService.delete.bind(fetchService), + get: fetchService.get.bind(fetchService), + head: fetchService.head.bind(fetchService), + options: fetchService.options.bind(fetchService), + patch: fetchService.patch.bind(fetchService), + post: fetchService.post.bind(fetchService), + put: fetchService.put.bind(fetchService), + ...loadingCount, + }; } - public start(deps: HttpDeps): HttpStart { - return this.service || this.setup(deps); + public start(deps: HttpDeps) { + return this.setup(deps); } public stop() { - if (this.service) { - this.service.stop(); - } + this.loadingCount.stop(); } } diff --git a/src/core/public/http/http_setup.ts b/src/core/public/http/http_setup.ts deleted file mode 100644 index c63750849f13a..0000000000000 --- a/src/core/public/http/http_setup.ts +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { BehaviorSubject, Observable, Subject } from 'rxjs'; -import { - distinctUntilChanged, - endWith, - map, - pairwise, - startWith, - takeUntil, - tap, -} from 'rxjs/operators'; -import { InjectedMetadataSetup } from '../injected_metadata'; -import { FatalErrorsSetup } from '../fatal_errors'; -import { HttpFetchOptions, HttpServiceBase } from './types'; -import { HttpInterceptController } from './http_intercept_controller'; -import { HttpInterceptHaltError } from './http_intercept_halt_error'; -import { BasePath } from './base_path_service'; -import { AnonymousPaths } from './anonymous_paths'; -import { FetchService } from './fetch'; - -export function checkHalt(controller: HttpInterceptController, error?: Error) { - if (error instanceof HttpInterceptHaltError) { - throw error; - } else if (controller.halted) { - throw new HttpInterceptHaltError(); - } -} - -export const setup = ( - injectedMetadata: InjectedMetadataSetup, - fatalErrors: FatalErrorsSetup | null -): HttpServiceBase => { - const loadingCount$ = new BehaviorSubject(0); - const stop$ = new Subject(); - const kibanaVersion = injectedMetadata.getKibanaVersion(); - const basePath = new BasePath(injectedMetadata.getBasePath()); - const anonymousPaths = new AnonymousPaths(basePath); - - const fetchService = new FetchService({ basePath, kibanaVersion }); - - function shorthand(method: string) { - return (path: string, options: HttpFetchOptions = {}) => - fetchService.fetch(path, { ...options, method }); - } - - function stop() { - stop$.next(); - loadingCount$.complete(); - } - - function addLoadingCount(count$: Observable) { - count$ - .pipe( - distinctUntilChanged(), - - tap(count => { - if (count < 0) { - throw new Error( - 'Observables passed to loadingCount.add() must only emit positive numbers' - ); - } - }), - - // use takeUntil() so that we can finish each stream on stop() the same way we do when they complete, - // by removing the previous count from the total - takeUntil(stop$), - endWith(0), - startWith(0), - pairwise(), - map(([prev, next]) => next - prev) - ) - .subscribe({ - next: delta => { - loadingCount$.next(loadingCount$.getValue() + delta); - }, - error: error => { - if (fatalErrors) { - fatalErrors.add(error); - } - }, - }); - } - - function getLoadingCount$() { - return loadingCount$.pipe(distinctUntilChanged()); - } - - return { - stop, - basePath, - anonymousPaths, - intercept: fetchService.intercept.bind(fetchService), - removeAllInterceptors: fetchService.removeAllInterceptors.bind(fetchService), - fetch: fetchService.fetch.bind(fetchService), - delete: shorthand('DELETE'), - get: shorthand('GET'), - head: shorthand('HEAD'), - options: shorthand('OPTIONS'), - patch: shorthand('PATCH'), - post: shorthand('POST'), - put: shorthand('PUT'), - addLoadingCount, - getLoadingCount$, - }; -}; diff --git a/src/core/public/http/loading_count_service.mock.ts b/src/core/public/http/loading_count_service.mock.ts new file mode 100644 index 0000000000000..79928aa4b160d --- /dev/null +++ b/src/core/public/http/loading_count_service.mock.ts @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { LoadingCountSetup, LoadingCountService } from './loading_count_service'; +import { BehaviorSubject } from 'rxjs'; + +const createSetupContractMock = () => { + const setupContract: jest.Mocked = { + addLoadingCountSource: jest.fn(), + getLoadingCount$: jest.fn(), + }; + setupContract.getLoadingCount$.mockReturnValue(new BehaviorSubject(0)); + return setupContract; +}; + +type LoadingCountServiceContract = PublicMethodsOf; +const createServiceMock = () => { + const mocked: jest.Mocked = { + setup: jest.fn(), + start: jest.fn(), + stop: jest.fn(), + }; + + mocked.setup.mockReturnValue(createSetupContractMock()); + mocked.start.mockReturnValue(createSetupContractMock()); + + return mocked; +}; + +export const loadingCountServiceMock = { + create: createServiceMock, + createSetupContract: createSetupContractMock, + createStartContract: createSetupContractMock, +}; diff --git a/src/core/public/http/loading_count_service.test.ts b/src/core/public/http/loading_count_service.test.ts new file mode 100644 index 0000000000000..3ba4d315178cc --- /dev/null +++ b/src/core/public/http/loading_count_service.test.ts @@ -0,0 +1,152 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Observable, throwError, of, Subject } from 'rxjs'; +import { toArray } from 'rxjs/operators'; + +import { fatalErrorsServiceMock } from '../fatal_errors/fatal_errors_service.mock'; +import { LoadingCountService } from './loading_count_service'; + +describe('LoadingCountService', () => { + const setup = () => { + const fatalErrors = fatalErrorsServiceMock.createSetupContract(); + const service = new LoadingCountService(); + const loadingCount = service.setup({ fatalErrors }); + return { fatalErrors, loadingCount, service }; + }; + + describe('addLoadingCountSource()', () => { + it('subscribes to passed in sources, unsubscribes on stop', () => { + const { service, loadingCount } = setup(); + + const unsubA = jest.fn(); + const subA = jest.fn().mockReturnValue(unsubA); + loadingCount.addLoadingCountSource(new Observable(subA)); + expect(subA).toHaveBeenCalledTimes(1); + expect(unsubA).not.toHaveBeenCalled(); + + const unsubB = jest.fn(); + const subB = jest.fn().mockReturnValue(unsubB); + loadingCount.addLoadingCountSource(new Observable(subB)); + expect(subB).toHaveBeenCalledTimes(1); + expect(unsubB).not.toHaveBeenCalled(); + + service.stop(); + + expect(subA).toHaveBeenCalledTimes(1); + expect(unsubA).toHaveBeenCalledTimes(1); + expect(subB).toHaveBeenCalledTimes(1); + expect(unsubB).toHaveBeenCalledTimes(1); + }); + + it('adds a fatal error if source observables emit an error', () => { + const { loadingCount, fatalErrors } = setup(); + + loadingCount.addLoadingCountSource(throwError(new Error('foo bar'))); + expect(fatalErrors.add.mock.calls).toMatchInlineSnapshot(` + Array [ + Array [ + [Error: foo bar], + ], + ] + `); + }); + + it('adds a fatal error if source observable emits a negative number', () => { + const { loadingCount, fatalErrors } = setup(); + + loadingCount.addLoadingCountSource(of(1, 2, 3, 4, -9)); + expect(fatalErrors.add.mock.calls).toMatchInlineSnapshot(` + Array [ + Array [ + [Error: Observables passed to loadingCount.add() must only emit positive numbers], + ], + ] + `); + }); + }); + + describe('getLoadingCount$()', () => { + it('emits 0 initially, the right count when sources emit their own count, and ends with zero', async () => { + const { service, loadingCount } = setup(); + + const countA$ = new Subject(); + const countB$ = new Subject(); + const countC$ = new Subject(); + const promise = loadingCount + .getLoadingCount$() + .pipe(toArray()) + .toPromise(); + + loadingCount.addLoadingCountSource(countA$); + loadingCount.addLoadingCountSource(countB$); + loadingCount.addLoadingCountSource(countC$); + + countA$.next(100); + countB$.next(10); + countC$.next(1); + countA$.complete(); + countB$.next(20); + countC$.complete(); + countB$.next(0); + + service.stop(); + expect(await promise).toMatchInlineSnapshot(` + Array [ + 0, + 100, + 110, + 111, + 11, + 21, + 20, + 0, + ] + `); + }); + + it('only emits when loading count changes', async () => { + const { service, loadingCount } = setup(); + + const count$ = new Subject(); + const promise = loadingCount + .getLoadingCount$() + .pipe(toArray()) + .toPromise(); + + loadingCount.addLoadingCountSource(count$); + count$.next(0); + count$.next(0); + count$.next(0); + count$.next(0); + count$.next(0); + count$.next(1); + count$.next(1); + service.stop(); + + expect(await promise).toMatchInlineSnapshot(` + Array [ + 0, + 1, + 0, + ] + `); + }); + }); +}); diff --git a/src/core/public/http/loading_count_service.ts b/src/core/public/http/loading_count_service.ts new file mode 100644 index 0000000000000..14b945e0801ca --- /dev/null +++ b/src/core/public/http/loading_count_service.ts @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { BehaviorSubject, Observable, Subject } from 'rxjs'; +import { + distinctUntilChanged, + endWith, + map, + pairwise, + startWith, + takeUntil, + tap, +} from 'rxjs/operators'; +import { FatalErrorsSetup } from '../fatal_errors'; +import { CoreService } from '../../types'; + +/** @public */ +export interface LoadingCountSetup { + addLoadingCountSource(countSource$: Observable): void; + + getLoadingCount$(): Observable; +} + +/** + * See {@link LoadingCountSetup}. + * @public + */ +export type LoadingCountStart = LoadingCountSetup; + +/** @internal */ +export class LoadingCountService implements CoreService { + private readonly stop$ = new Subject(); + private readonly loadingCount$ = new BehaviorSubject(0); + + public setup({ fatalErrors }: { fatalErrors: FatalErrorsSetup }) { + return { + getLoadingCount$: () => this.loadingCount$.pipe(distinctUntilChanged()), + addLoadingCountSource: (count$: Observable) => { + count$ + .pipe( + distinctUntilChanged(), + + tap(count => { + if (count < 0) { + throw new Error( + 'Observables passed to loadingCount.add() must only emit positive numbers' + ); + } + }), + + // use takeUntil() so that we can finish each stream on stop() the same way we do when they complete, + // by removing the previous count from the total + takeUntil(this.stop$), + endWith(0), + startWith(0), + pairwise(), + map(([prev, next]) => next - prev) + ) + .subscribe({ + next: delta => { + this.loadingCount$.next(this.loadingCount$.getValue() + delta); + }, + error: error => fatalErrors.add(error), + }); + }, + }; + } + + public start({ fatalErrors }: { fatalErrors: FatalErrorsSetup }) { + return this.setup({ fatalErrors }); + } + + public stop() { + this.stop$.next(); + this.loadingCount$.complete(); + } +} diff --git a/src/core/public/http/types.ts b/src/core/public/http/types.ts index 48385a72325db..27ffddc79cf65 100644 --- a/src/core/public/http/types.ts +++ b/src/core/public/http/types.ts @@ -20,10 +20,7 @@ import { Observable } from 'rxjs'; /** @public */ -export interface HttpServiceBase { - /** @internal */ - stop(): void; - +export interface HttpSetup { /** * APIs for manipulating the basePath on URL segments. */ @@ -41,11 +38,6 @@ export interface HttpServiceBase { */ intercept(interceptor: HttpInterceptor): () => void; - /** - * Removes all configured interceptors. - */ - removeAllInterceptors(): void; - /** Makes an HTTP request. Defaults to a GET request unless overriden. See {@link HttpHandler} for options. */ fetch: HttpHandler; /** Makes an HTTP request with the DELETE method. See {@link HttpHandler} for options. */ @@ -68,7 +60,7 @@ export interface HttpServiceBase { * more than 0. * @param countSource$ an Observable to subscribe to for loading count updates. */ - addLoadingCount(countSource$: Observable): void; + addLoadingCountSource(countSource$: Observable): void; /** * Get the sum of all loading count sources as a single Observable. @@ -76,6 +68,12 @@ export interface HttpServiceBase { getLoadingCount$(): Observable; } +/** + * See {@link HttpSetup} + * @public + */ +export type HttpStart = HttpSetup; + /** * APIs for manipulating the basePath on URL segments. * @public @@ -112,18 +110,6 @@ export interface IAnonymousPaths { register(path: string): void; } -/** - * See {@link HttpServiceBase} - * @public - */ -export type HttpSetup = HttpServiceBase; - -/** - * See {@link HttpServiceBase} - * @public - */ -export type HttpStart = HttpServiceBase; - /** @public */ export interface HttpHeadersInit { [name: string]: any; diff --git a/src/core/public/index.ts b/src/core/public/index.ts index f83ca2564de8e..7488f9b973b71 100644 --- a/src/core/public/index.ts +++ b/src/core/public/index.ts @@ -121,7 +121,6 @@ export { } from './saved_objects'; export { - HttpServiceBase, HttpHeadersInit, HttpRequestInit, HttpFetchOptions, diff --git a/src/core/public/overlays/flyout/__snapshots__/flyout_service.test.tsx.snap b/src/core/public/overlays/flyout/__snapshots__/flyout_service.test.tsx.snap index 626c91b6a9668..9bd686776138f 100644 --- a/src/core/public/overlays/flyout/__snapshots__/flyout_service.test.tsx.snap +++ b/src/core/public/overlays/flyout/__snapshots__/flyout_service.test.tsx.snap @@ -31,7 +31,7 @@ Array [ ] `; -exports[`FlyoutService openFlyout() renders a flyout to the DOM 2`] = `"
Flyout content
"`; +exports[`FlyoutService openFlyout() renders a flyout to the DOM 2`] = `"
Flyout content
"`; exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 1`] = ` Array [ @@ -74,4 +74,4 @@ Array [ ] `; -exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 2`] = `"
Flyout content 2
"`; +exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 2`] = `"
Flyout content 2
"`; diff --git a/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap b/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap index 3928c54f90179..131ec836f5252 100644 --- a/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap +++ b/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap @@ -29,7 +29,7 @@ Array [ ] `; -exports[`ModalService openModal() renders a modal to the DOM 2`] = `"
Modal content
"`; +exports[`ModalService openModal() renders a modal to the DOM 2`] = `"
Modal content
"`; exports[`ModalService openModal() with a currently active modal replaces the current modal with a new one 1`] = ` Array [ diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md index 83b4e67c1cb15..f61741571dc1d 100644 --- a/src/core/public/public.api.md +++ b/src/core/public/public.api.md @@ -18,6 +18,7 @@ import { UserProvidedValues as UserProvidedValues_2 } from 'src/core/server/type // @public export interface App extends AppBase { + appRoute?: string; chromeless?: boolean; mount: AppMount | AppMountDeprecated; } @@ -544,8 +545,8 @@ export interface HttpRequestInit { } // @public (undocumented) -export interface HttpServiceBase { - addLoadingCount(countSource$: Observable): void; +export interface HttpSetup { + addLoadingCountSource(countSource$: Observable): void; anonymousPaths: IAnonymousPaths; basePath: IBasePath; delete: HttpHandler; @@ -558,16 +559,10 @@ export interface HttpServiceBase { patch: HttpHandler; post: HttpHandler; put: HttpHandler; - removeAllInterceptors(): void; - // @internal (undocumented) - stop(): void; } // @public -export type HttpSetup = HttpServiceBase; - -// @public -export type HttpStart = HttpServiceBase; +export type HttpStart = HttpSetup; // @public export interface I18nStart { @@ -877,7 +872,7 @@ export interface SavedObjectsBulkUpdateOptions { // @public export class SavedObjectsClient { // @internal - constructor(http: HttpServiceBase); + constructor(http: HttpSetup); bulkCreate: (objects?: SavedObjectsBulkCreateObject[], options?: SavedObjectsBulkCreateOptions) => Promise>; bulkGet: (objects?: { id: string; diff --git a/src/core/public/saved_objects/saved_objects_client.ts b/src/core/public/saved_objects/saved_objects_client.ts index c71fe51956c28..dab98ee66cdb1 100644 --- a/src/core/public/saved_objects/saved_objects_client.ts +++ b/src/core/public/saved_objects/saved_objects_client.ts @@ -36,7 +36,7 @@ import { // eslint-disable-next-line @kbn/eslint/no-restricted-paths } from '../../../legacy/ui/public/error_auto_create_index/error_auto_create_index'; import { SimpleSavedObject } from './simple_saved_object'; -import { HttpFetchOptions, HttpServiceBase } from '../http'; +import { HttpFetchOptions, HttpSetup } from '../http'; type SavedObjectsFindOptions = Omit; @@ -158,7 +158,7 @@ export type SavedObjectsClientContract = PublicMethodsOf; * @public */ export class SavedObjectsClient { - private http: HttpServiceBase; + private http: HttpSetup; private batchQueue: BatchQueueEntry[]; /** @@ -194,7 +194,7 @@ export class SavedObjectsClient { ); /** @internal */ - constructor(http: HttpServiceBase) { + constructor(http: HttpSetup) { this.http = http; this.batchQueue = []; } diff --git a/src/core/public/ui_settings/ui_settings_service.test.ts b/src/core/public/ui_settings/ui_settings_service.test.ts index afb68c4844901..2747a78d93fa6 100644 --- a/src/core/public/ui_settings/ui_settings_service.test.ts +++ b/src/core/public/ui_settings/ui_settings_service.test.ts @@ -38,7 +38,7 @@ describe('#stop', () => { it('stops the uiSettingsClient and uiSettingsApi', async () => { const service = new UiSettingsService(); let loadingCount$: Rx.Observable; - defaultDeps.http.addLoadingCount.mockImplementation(obs$ => (loadingCount$ = obs$)); + defaultDeps.http.addLoadingCountSource.mockImplementation(obs$ => (loadingCount$ = obs$)); const client = service.setup(defaultDeps); service.stop(); diff --git a/src/core/public/ui_settings/ui_settings_service.ts b/src/core/public/ui_settings/ui_settings_service.ts index 5a03cd1cfeedc..1e01d15fa337b 100644 --- a/src/core/public/ui_settings/ui_settings_service.ts +++ b/src/core/public/ui_settings/ui_settings_service.ts @@ -38,7 +38,7 @@ export class UiSettingsService { public setup({ http, injectedMetadata }: UiSettingsServiceDeps): IUiSettingsClient { this.uiSettingsApi = new UiSettingsApi(http); - http.addLoadingCount(this.uiSettingsApi.getLoadingCount$()); + http.addLoadingCountSource(this.uiSettingsApi.getLoadingCount$()); // TODO: Migrate away from legacyMetadata https://github.com/elastic/kibana/issues/22779 const legacyMetadata = injectedMetadata.getLegacyMetadata(); diff --git a/src/legacy/core_plugins/kibana/public/visualize/saved_visualizations/saved_visualization_register.js b/src/core/server/config/config.mock.ts similarity index 73% rename from src/legacy/core_plugins/kibana/public/visualize/saved_visualizations/saved_visualization_register.js rename to src/core/server/config/config.mock.ts index c50cda56c7151..e098fa142b9d1 100644 --- a/src/legacy/core_plugins/kibana/public/visualize/saved_visualizations/saved_visualization_register.js +++ b/src/core/server/config/config.mock.ts @@ -17,9 +17,18 @@ * under the License. */ -import { SavedObjectRegistryProvider } from 'ui/saved_objects/saved_object_registry'; -import './saved_visualizations'; +import { Config } from './config'; -SavedObjectRegistryProvider.register(savedVisualizations => { - return savedVisualizations; +type ConfigMock = jest.Mocked; + +const createConfigMock = (): ConfigMock => ({ + has: jest.fn(), + get: jest.fn(), + set: jest.fn(), + getFlattenedPaths: jest.fn(), + toRaw: jest.fn(), }); + +export const configMock = { + create: createConfigMock, +}; diff --git a/src/core/server/http/http_server.mocks.ts b/src/core/server/http/http_server.mocks.ts index 8469a1d23a44b..ba742292e9e83 100644 --- a/src/core/server/http/http_server.mocks.ts +++ b/src/core/server/http/http_server.mocks.ts @@ -77,7 +77,7 @@ function createKibanaRequestMock({ body: schema.object({}, { allowUnknowns: true }), query: schema.object({}, { allowUnknowns: true }), } - ) as KibanaRequest, Readonly<{}>, Readonly<{}>>; + ); } type DeepPartial = T extends any[] diff --git a/src/core/server/http/http_server.test.ts b/src/core/server/http/http_server.test.ts index 27d9f530050be..df357aeaf2731 100644 --- a/src/core/server/http/http_server.test.ts +++ b/src/core/server/http/http_server.test.ts @@ -27,10 +27,18 @@ import supertest from 'supertest'; import { ByteSizeValue, schema } from '@kbn/config-schema'; import { HttpConfig } from './http_config'; -import { Router } from './router'; +import { + Router, + KibanaRequest, + KibanaResponseFactory, + RequestHandler, + RouteValidationResultFactory, + RouteValidationFunction, +} from './router'; import { loggingServiceMock } from '../logging/logging_service.mock'; import { HttpServer } from './http_server'; import { Readable } from 'stream'; +import { RequestHandlerContext } from 'kibana/server'; const cookieOptions = { name: 'sid', @@ -288,6 +296,229 @@ test('valid body', async () => { }); }); +test('valid body with validate function', async () => { + const router = new Router('/foo', logger, enhanceWithContext); + + router.post( + { + path: '/', + validate: { + body: ({ bar, baz } = {}, { ok, badRequest }) => { + if (typeof bar === 'string' && typeof baz === 'number') { + return ok({ bar, baz }); + } else { + return badRequest('Wrong payload', ['body']); + } + }, + }, + }, + (context, req, res) => { + return res.ok({ body: req.body }); + } + ); + + const { registerRouter, server: innerServer } = await server.setup(config); + registerRouter(router); + + await server.start(); + + await supertest(innerServer.listener) + .post('/foo/') + .send({ + bar: 'test', + baz: 123, + }) + .expect(200) + .then(res => { + expect(res.body).toEqual({ bar: 'test', baz: 123 }); + }); +}); + +test('not inline validation - specifying params', async () => { + const router = new Router('/foo', logger, enhanceWithContext); + + const bodyValidation = ( + { bar, baz }: any = {}, + { ok, badRequest }: RouteValidationResultFactory + ) => { + if (typeof bar === 'string' && typeof baz === 'number') { + return ok({ bar, baz }); + } else { + return badRequest('Wrong payload', ['body']); + } + }; + + router.post( + { + path: '/', + validate: { + body: bodyValidation, + }, + }, + (context, req, res) => { + return res.ok({ body: req.body }); + } + ); + + const { registerRouter, server: innerServer } = await server.setup(config); + registerRouter(router); + + await server.start(); + + await supertest(innerServer.listener) + .post('/foo/') + .send({ + bar: 'test', + baz: 123, + }) + .expect(200) + .then(res => { + expect(res.body).toEqual({ bar: 'test', baz: 123 }); + }); +}); + +test('not inline validation - specifying validation handler', async () => { + const router = new Router('/foo', logger, enhanceWithContext); + + const bodyValidation: RouteValidationFunction<{ bar: string; baz: number }> = ( + { bar, baz } = {}, + { ok, badRequest } + ) => { + if (typeof bar === 'string' && typeof baz === 'number') { + return ok({ bar, baz }); + } else { + return badRequest('Wrong payload', ['body']); + } + }; + + router.post( + { + path: '/', + validate: { + body: bodyValidation, + }, + }, + (context, req, res) => { + return res.ok({ body: req.body }); + } + ); + + const { registerRouter, server: innerServer } = await server.setup(config); + registerRouter(router); + + await server.start(); + + await supertest(innerServer.listener) + .post('/foo/') + .send({ + bar: 'test', + baz: 123, + }) + .expect(200) + .then(res => { + expect(res.body).toEqual({ bar: 'test', baz: 123 }); + }); +}); + +// https://github.com/elastic/kibana/issues/47047 +test('not inline handler - KibanaRequest', async () => { + const router = new Router('/foo', logger, enhanceWithContext); + + const handler = ( + context: RequestHandlerContext, + req: KibanaRequest, + res: KibanaResponseFactory + ) => { + const body = { + bar: req.body.bar.toUpperCase(), + baz: req.body.baz.toString(), + }; + + return res.ok({ body }); + }; + + router.post( + { + path: '/', + validate: { + body: ({ bar, baz } = {}, { ok, badRequest }) => { + if (typeof bar === 'string' && typeof baz === 'number') { + return ok({ bar, baz }); + } else { + return badRequest('Wrong payload', ['body']); + } + }, + }, + }, + handler + ); + + const { registerRouter, server: innerServer } = await server.setup(config); + registerRouter(router); + + await server.start(); + + await supertest(innerServer.listener) + .post('/foo/') + .send({ + bar: 'test', + baz: 123, + }) + .expect(200) + .then(res => { + expect(res.body).toEqual({ bar: 'TEST', baz: '123' }); + }); +}); + +test('not inline handler - RequestHandler', async () => { + const router = new Router('/foo', logger, enhanceWithContext); + + const handler: RequestHandler = ( + context, + req, + res + ) => { + const body = { + bar: req.body.bar.toUpperCase(), + baz: req.body.baz.toString(), + }; + + return res.ok({ body }); + }; + + router.post( + { + path: '/', + validate: { + body: ({ bar, baz } = {}, { ok, badRequest }) => { + if (typeof bar === 'string' && typeof baz === 'number') { + return ok({ bar, baz }); + } else { + return badRequest('Wrong payload', ['body']); + } + }, + }, + }, + handler + ); + + const { registerRouter, server: innerServer } = await server.setup(config); + registerRouter(router); + + await server.start(); + + await supertest(innerServer.listener) + .post('/foo/') + .send({ + bar: 'test', + baz: 123, + }) + .expect(200) + .then(res => { + expect(res.body).toEqual({ bar: 'TEST', baz: '123' }); + }); +}); + test('invalid body', async () => { const router = new Router('/foo', logger, enhanceWithContext); diff --git a/src/core/server/http/http_service.mock.ts b/src/core/server/http/http_service.mock.ts index 1668b409050b7..700ae04f00d47 100644 --- a/src/core/server/http/http_service.mock.ts +++ b/src/core/server/http/http_service.mock.ts @@ -20,6 +20,7 @@ import { Server } from 'hapi'; import { CspConfig } from '../csp'; import { mockRouter } from './router/router.mock'; +import { configMock } from '../config/config.mock'; import { InternalHttpServiceSetup } from './types'; import { HttpService } from './http_service'; import { OnPreAuthToolkit } from './lifecycle/on_pre_auth'; @@ -28,13 +29,14 @@ import { sessionStorageMock } from './cookie_session_storage.mocks'; import { OnPostAuthToolkit } from './lifecycle/on_post_auth'; import { OnPreResponseToolkit } from './lifecycle/on_pre_response'; +type BasePathMocked = jest.Mocked; export type HttpServiceSetupMock = jest.Mocked & { - basePath: jest.Mocked; + basePath: BasePathMocked; }; -const createBasePathMock = (): jest.Mocked => ({ - serverBasePath: '/mock-server-basepath', - get: jest.fn(), +const createBasePathMock = (serverBasePath = '/mock-server-basepath'): BasePathMocked => ({ + serverBasePath, + get: jest.fn().mockReturnValue(serverBasePath), set: jest.fn(), prepend: jest.fn(), remove: jest.fn(), @@ -44,9 +46,12 @@ const createSetupContractMock = () => { const setupContract: HttpServiceSetupMock = { // we can mock other hapi server methods when we need it server: ({ + name: 'http-server-test', + version: 'kibana', route: jest.fn(), start: jest.fn(), stop: jest.fn(), + config: jest.fn().mockReturnValue(configMock.create()), } as unknown) as jest.MockedClass, createCookieSessionStorageFactory: jest.fn(), registerOnPreAuth: jest.fn(), diff --git a/src/core/server/http/index.ts b/src/core/server/http/index.ts index 21de3945f1044..55ba813484268 100644 --- a/src/core/server/http/index.ts +++ b/src/core/server/http/index.ts @@ -47,10 +47,16 @@ export { RouteMethod, RouteRegistrar, RouteConfigOptions, - RouteSchemas, RouteConfigOptionsBody, RouteContentType, validBodyOutput, + RouteValidatorConfig, + RouteValidationSpec, + RouteValidationFunction, + RouteValidatorOptions, + RouteValidationError, + RouteValidatorFullConfig, + RouteValidationResultFactory, } from './router'; export { BasePathProxyServer } from './base_path_proxy_server'; export { OnPreAuthHandler, OnPreAuthToolkit } from './lifecycle/on_pre_auth'; diff --git a/src/core/server/http/integration_tests/router.test.ts b/src/core/server/http/integration_tests/router.test.ts index 6117190c57ba8..c3b9b20d84865 100644 --- a/src/core/server/http/integration_tests/router.test.ts +++ b/src/core/server/http/integration_tests/router.test.ts @@ -642,6 +642,116 @@ describe('Response factory', () => { }); }); + it('validate function in body', async () => { + const { server: innerServer, createRouter } = await server.setup(setupDeps); + const router = createRouter('/foo'); + + router.post( + { + path: '/', + validate: { + body: ({ bar, baz } = {}, { ok, badRequest }) => { + if (typeof bar === 'string' && typeof baz === 'number') { + return ok({ bar, baz }); + } else { + return badRequest('Wrong payload', ['body']); + } + }, + }, + }, + (context, req, res) => { + return res.ok({ body: req.body }); + } + ); + + await server.start(); + + await supertest(innerServer.listener) + .post('/foo/') + .send({ + bar: 'test', + baz: 123, + }) + .expect(200) + .then(res => { + expect(res.body).toEqual({ bar: 'test', baz: 123 }); + }); + + await supertest(innerServer.listener) + .post('/foo/') + .send({ + bar: 'test', + baz: '123', + }) + .expect(400) + .then(res => { + expect(res.body).toEqual({ + error: 'Bad Request', + message: '[request body.body]: Wrong payload', + statusCode: 400, + }); + }); + }); + + it('@kbn/config-schema validation in body', async () => { + const { server: innerServer, createRouter } = await server.setup(setupDeps); + const router = createRouter('/foo'); + + router.post( + { + path: '/', + validate: { + body: schema.object({ + bar: schema.string(), + baz: schema.number(), + }), + }, + }, + (context, req, res) => { + return res.ok({ body: req.body }); + } + ); + + await server.start(); + + await supertest(innerServer.listener) + .post('/foo/') + .send({ + bar: 'test', + baz: 123, + }) + .expect(200) + .then(res => { + expect(res.body).toEqual({ bar: 'test', baz: 123 }); + }); + + await supertest(innerServer.listener) + .post('/foo/') + .send({ + bar: 'test', + baz: '123', // Automatic casting happens + }) + .expect(200) + .then(res => { + expect(res.body).toEqual({ bar: 'test', baz: 123 }); + }); + + await supertest(innerServer.listener) + .post('/foo/') + .send({ + bar: 'test', + baz: 'test', // Can't cast it into number + }) + .expect(400) + .then(res => { + expect(res.body).toEqual({ + error: 'Bad Request', + message: '[request body.baz]: expected value of type [number] but got [string]', + statusCode: 400, + }); + }); + }); + it('401 Unauthorized', async () => { const { server: innerServer, createRouter } = await server.setup(setupDeps); const router = createRouter('/'); diff --git a/src/core/server/http/router/error_wrapper.ts b/src/core/server/http/router/error_wrapper.ts index c4b4d3840d1b9..8f895753c38c3 100644 --- a/src/core/server/http/router/error_wrapper.ts +++ b/src/core/server/http/router/error_wrapper.ts @@ -18,19 +18,18 @@ */ import Boom from 'boom'; -import { ObjectType, TypeOf } from '@kbn/config-schema'; import { KibanaRequest } from './request'; import { KibanaResponseFactory } from './response'; import { RequestHandler } from './router'; import { RequestHandlerContext } from '../../../server'; import { RouteMethod } from './route'; -export const wrapErrors =

( +export const wrapErrors = ( handler: RequestHandler ): RequestHandler => { return async ( context: RequestHandlerContext, - request: KibanaRequest, TypeOf, TypeOf, RouteMethod>, + request: KibanaRequest, response: KibanaResponseFactory ) => { try { diff --git a/src/core/server/http/router/index.ts b/src/core/server/http/router/index.ts index 35bfb3ba9c33a..084d30d694474 100644 --- a/src/core/server/http/router/index.ts +++ b/src/core/server/http/router/index.ts @@ -31,7 +31,6 @@ export { RouteMethod, RouteConfig, RouteConfigOptions, - RouteSchemas, RouteContentType, RouteConfigOptionsBody, validBodyOutput, @@ -55,3 +54,13 @@ export { } from './response'; export { IKibanaSocket } from './socket'; + +export { + RouteValidatorConfig, + RouteValidationSpec, + RouteValidationFunction, + RouteValidatorOptions, + RouteValidationError, + RouteValidatorFullConfig, + RouteValidationResultFactory, +} from './validator'; diff --git a/src/core/server/http/router/request.test.ts b/src/core/server/http/router/request.test.ts index ebb7ffa7a6fc9..51162a2c258e9 100644 --- a/src/core/server/http/router/request.test.ts +++ b/src/core/server/http/router/request.test.ts @@ -18,6 +18,7 @@ */ import { KibanaRequest } from './request'; import { httpServerMock } from '../http_server.mocks'; +import { schema } from '@kbn/config-schema'; describe('KibanaRequest', () => { describe('get all headers', () => { @@ -64,4 +65,56 @@ describe('KibanaRequest', () => { }); }); }); + + describe('RouteSchema type inferring', () => { + it('should work with config-schema', () => { + const body = Buffer.from('body!'); + const request = { + ...httpServerMock.createRawRequest({ + params: { id: 'params' }, + query: { search: 'query' }, + }), + payload: body, // Set outside because the mock is using `merge` by lodash and breaks the Buffer into arrays + } as any; + const kibanaRequest = KibanaRequest.from(request, { + params: schema.object({ id: schema.string() }), + query: schema.object({ search: schema.string() }), + body: schema.buffer(), + }); + expect(kibanaRequest.params).toStrictEqual({ id: 'params' }); + expect(kibanaRequest.params.id.toUpperCase()).toEqual('PARAMS'); // infers it's a string + expect(kibanaRequest.query).toStrictEqual({ search: 'query' }); + expect(kibanaRequest.query.search.toUpperCase()).toEqual('QUERY'); // infers it's a string + expect(kibanaRequest.body).toEqual(body); + expect(kibanaRequest.body.byteLength).toBeGreaterThan(0); // infers it's a buffer + }); + + it('should work with ValidationFunction', () => { + const body = Buffer.from('body!'); + const request = { + ...httpServerMock.createRawRequest({ + params: { id: 'params' }, + query: { search: 'query' }, + }), + payload: body, // Set outside because the mock is using `merge` by lodash and breaks the Buffer into arrays + } as any; + const kibanaRequest = KibanaRequest.from(request, { + params: schema.object({ id: schema.string() }), + query: schema.object({ search: schema.string() }), + body: (data, { ok, badRequest }) => { + if (Buffer.isBuffer(data)) { + return ok(data); + } else { + return badRequest('It should be a Buffer', []); + } + }, + }); + expect(kibanaRequest.params).toStrictEqual({ id: 'params' }); + expect(kibanaRequest.params.id.toUpperCase()).toEqual('PARAMS'); // infers it's a string + expect(kibanaRequest.query).toStrictEqual({ search: 'query' }); + expect(kibanaRequest.query.search.toUpperCase()).toEqual('QUERY'); // infers it's a string + expect(kibanaRequest.body).toEqual(body); + expect(kibanaRequest.body.byteLength).toBeGreaterThan(0); // infers it's a buffer + }); + }); }); diff --git a/src/core/server/http/router/request.ts b/src/core/server/http/router/request.ts index b132899910569..47b001700b015 100644 --- a/src/core/server/http/router/request.ts +++ b/src/core/server/http/router/request.ts @@ -20,13 +20,11 @@ import { Url } from 'url'; import { Request } from 'hapi'; -import { ObjectType, Type, TypeOf } from '@kbn/config-schema'; - -import { Stream } from 'stream'; import { deepFreeze, RecursiveReadonly } from '../../../utils'; import { Headers } from './headers'; -import { RouteMethod, RouteSchemas, RouteConfigOptions, validBodyOutput } from './route'; +import { RouteMethod, RouteConfigOptions, validBodyOutput } from './route'; import { KibanaSocket, IKibanaSocket } from './socket'; +import { RouteValidator, RouteValidatorFullConfig } from './validator'; const requestSymbol = Symbol('request'); @@ -70,12 +68,13 @@ export class KibanaRequest< * instance of a KibanaRequest. * @internal */ - public static from< - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType | Type | Type - >(req: Request, routeSchemas?: RouteSchemas, withoutSecretHeaders: boolean = true) { - const requestParts = KibanaRequest.validate(req, routeSchemas); + public static from( + req: Request, + routeSchemas: RouteValidator | RouteValidatorFullConfig = {}, + withoutSecretHeaders: boolean = true + ) { + const routeValidator = RouteValidator.from(routeSchemas); + const requestParts = KibanaRequest.validate(req, routeValidator); return new KibanaRequest( req, requestParts.params, @@ -91,40 +90,17 @@ export class KibanaRequest< * received in the route handler. * @internal */ - private static validate< - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType | Type | Type - >( + private static validate( req: Request, - routeSchemas: RouteSchemas | undefined + routeValidator: RouteValidator ): { - params: TypeOf

; - query: TypeOf; - body: TypeOf; + params: P; + query: Q; + body: B; } { - if (routeSchemas === undefined) { - return { - body: {}, - params: {}, - query: {}, - }; - } - - const params = - routeSchemas.params === undefined - ? {} - : routeSchemas.params.validate(req.params, {}, 'request params'); - - const query = - routeSchemas.query === undefined - ? {} - : routeSchemas.query.validate(req.query, {}, 'request query'); - - const body = - routeSchemas.body === undefined - ? {} - : routeSchemas.body.validate(req.payload, {}, 'request body'); + const params = routeValidator.getParams(req.params, 'request params'); + const query = routeValidator.getQuery(req.query, 'request query'); + const body = routeValidator.getBody(req.payload, 'request body'); return { query, params, body }; } diff --git a/src/core/server/http/router/route.ts b/src/core/server/http/router/route.ts index 129cf4c922ffd..4439a80b1eac7 100644 --- a/src/core/server/http/router/route.ts +++ b/src/core/server/http/router/route.ts @@ -17,8 +17,7 @@ * under the License. */ -import { ObjectType, Type } from '@kbn/config-schema'; -import { Stream } from 'stream'; +import { RouteValidatorFullConfig } from './validator'; /** * The set of common HTTP methods supported by Kibana routing. @@ -124,12 +123,7 @@ export interface RouteConfigOptions { * Route specific configuration. * @public */ -export interface RouteConfig< - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType | Type | Type, - Method extends RouteMethod -> { +export interface RouteConfig { /** * The endpoint _within_ the router path to register the route. * @@ -201,25 +195,10 @@ export interface RouteConfig< * }); * ``` */ - validate: RouteSchemas | false; + validate: RouteValidatorFullConfig | false; /** * Additional route options {@link RouteConfigOptions}. */ options?: RouteConfigOptions; } - -/** - * RouteSchemas contains the schemas for validating the different parts of a - * request. - * @public - */ -export interface RouteSchemas< - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType | Type | Type -> { - params?: P; - query?: Q; - body?: B; -} diff --git a/src/core/server/http/router/router.test.ts b/src/core/server/http/router/router.test.ts index f5469a95b5106..a936da6a40a9f 100644 --- a/src/core/server/http/router/router.test.ts +++ b/src/core/server/http/router/router.test.ts @@ -20,6 +20,7 @@ import { Router } from './router'; import { loggingServiceMock } from '../../logging/logging_service.mock'; import { schema } from '@kbn/config-schema'; + const logger = loggingServiceMock.create().get(); const enhanceWithContext = (fn: (...args: any[]) => any) => fn.bind(null, {}); @@ -38,12 +39,15 @@ describe('Router', () => { const router = new Router('', logger, enhanceWithContext); expect(() => router.get( - // we use 'any' because validate requires @kbn/config-schema usage - { path: '/', validate: { params: { validate: () => 'error' } } } as any, + // we use 'any' because validate requires valid Type or function usage + { + path: '/', + validate: { params: { validate: () => 'error' } } as any, + }, (context, req, res) => res.ok({}) ) ).toThrowErrorMatchingInlineSnapshot( - `"Expected a valid schema declared with '@kbn/config-schema' package at key: [params]."` + `"Expected a valid validation logic declared with '@kbn/config-schema' package or a RouteValidationFunction at key: [params]."` ); }); diff --git a/src/core/server/http/router/router.ts b/src/core/server/http/router/router.ts index 3bed8fe4186ac..bb56ee3727d1a 100644 --- a/src/core/server/http/router/router.ts +++ b/src/core/server/http/router/router.ts @@ -17,24 +17,18 @@ * under the License. */ -import { ObjectType, TypeOf, Type } from '@kbn/config-schema'; import { Request, ResponseObject, ResponseToolkit } from 'hapi'; import Boom from 'boom'; -import { Stream } from 'stream'; +import { Type } from '@kbn/config-schema'; import { Logger } from '../../logging'; import { KibanaRequest } from './request'; import { KibanaResponseFactory, kibanaResponseFactory, IKibanaResponse } from './response'; -import { - RouteConfig, - RouteConfigOptions, - RouteMethod, - RouteSchemas, - validBodyOutput, -} from './route'; +import { RouteConfig, RouteConfigOptions, RouteMethod, validBodyOutput } from './route'; import { HapiResponseAdapter } from './response_adapter'; import { RequestHandlerContext } from '../../../server'; import { wrapErrors } from './error_wrapper'; +import { RouteValidator } from './validator'; interface RouterRoute { method: RouteMethod; @@ -48,11 +42,7 @@ interface RouterRoute { * * @public */ -export type RouteRegistrar = < - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType | Type | Type ->( +export type RouteRegistrar = ( route: RouteConfig, handler: RequestHandler ) => void; @@ -108,9 +98,7 @@ export interface IRouter { * Wrap a router handler to catch and converts legacy boom errors to proper custom errors. * @param handler {@link RequestHandler} - a route handler to wrap */ - handleLegacyErrors:

( - handler: RequestHandler - ) => RequestHandler; + handleLegacyErrors: (handler: RequestHandler) => RequestHandler; /** * Returns all routes registered with this router. @@ -120,12 +108,9 @@ export interface IRouter { getRoutes: () => RouterRoute[]; } -export type ContextEnhancer< - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType, - Method extends RouteMethod -> = (handler: RequestHandler) => RequestHandlerEnhanced; +export type ContextEnhancer = ( + handler: RequestHandler +) => RequestHandlerEnhanced; function getRouteFullPath(routerPath: string, routePath: string) { // If router's path ends with slash and route's path starts with slash, @@ -140,11 +125,10 @@ function getRouteFullPath(routerPath: string, routePath: string) { * @returns Route schemas if `validate` is specified on the route, otherwise * undefined. */ -function routeSchemasFromRouteConfig< - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType | Type | Type ->(route: RouteConfig, routeMethod: RouteMethod) { +function routeSchemasFromRouteConfig( + route: RouteConfig, + routeMethod: RouteMethod +) { // The type doesn't allow `validate` to be undefined, but it can still // happen when it's used from JavaScript. if (route.validate === undefined) { @@ -155,15 +139,17 @@ function routeSchemasFromRouteConfig< if (route.validate !== false) { Object.entries(route.validate).forEach(([key, schema]) => { - if (!(schema instanceof Type)) { + if (!(schema instanceof Type || typeof schema === 'function')) { throw new Error( - `Expected a valid schema declared with '@kbn/config-schema' package at key: [${key}].` + `Expected a valid validation logic declared with '@kbn/config-schema' package or a RouteValidationFunction at key: [${key}].` ); } }); } - return route.validate ? route.validate : undefined; + if (route.validate) { + return RouteValidator.from(route.validate); + } } /** @@ -174,12 +160,7 @@ function routeSchemasFromRouteConfig< */ function validOptions( method: RouteMethod, - routeConfig: RouteConfig< - ObjectType, - ObjectType, - ObjectType | Type | Type, - typeof method - > + routeConfig: RouteConfig ) { const shouldNotHavePayload = ['head', 'get'].includes(method); const { options = {}, validate } = routeConfig; @@ -225,11 +206,7 @@ export class Router implements IRouter { private readonly log: Logger, private readonly enhanceWithContext: ContextEnhancer ) { - const buildMethod = (method: Method) => < - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType | Type | Type - >( + const buildMethod = (method: Method) => ( route: RouteConfig, handler: RequestHandler ) => { @@ -260,17 +237,11 @@ export class Router implements IRouter { return [...this.routes]; } - public handleLegacyErrors

( - handler: RequestHandler - ): RequestHandler { + public handleLegacyErrors(handler: RequestHandler): RequestHandler { return wrapErrors(handler); } - private async handle< - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType | Type | Type - >({ + private async handle({ routeSchemas, request, responseToolkit, @@ -279,9 +250,9 @@ export class Router implements IRouter { request: Request; responseToolkit: ResponseToolkit; handler: RequestHandlerEnhanced; - routeSchemas?: RouteSchemas; + routeSchemas?: RouteValidator; }) { - let kibanaRequest: KibanaRequest, TypeOf, TypeOf, typeof request.method>; + let kibanaRequest: KibanaRequest; const hapiResponseAdapter = new HapiResponseAdapter(responseToolkit); try { kibanaRequest = KibanaRequest.from(request, routeSchemas); @@ -303,16 +274,14 @@ type WithoutHeadArgument = T extends (first: any, ...rest: infer Params) => i ? (...rest: Params) => Return : never; -type RequestHandlerEnhanced< - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType | Type | Type, - Method extends RouteMethod -> = WithoutHeadArgument>; +type RequestHandlerEnhanced = WithoutHeadArgument< + RequestHandler +>; /** * A function executed when route path matched requested resource path. * Request handler is expected to return a result of one of {@link KibanaResponseFactory} functions. + * @param context {@link RequestHandlerContext} - the core context exposed for this request. * @param request {@link KibanaRequest} - object containing information about requested resource, * such as path, method, headers, parameters, query, body, etc. * @param response {@link KibanaResponseFactory} - a set of helper functions used to respond to a request. @@ -344,12 +313,12 @@ type RequestHandlerEnhanced< * @public */ export type RequestHandler< - P extends ObjectType, - Q extends ObjectType, - B extends ObjectType | Type | Type, + P = unknown, + Q = unknown, + B = unknown, Method extends RouteMethod = any > = ( context: RequestHandlerContext, - request: KibanaRequest, TypeOf, TypeOf, Method>, + request: KibanaRequest, response: KibanaResponseFactory ) => IKibanaResponse | Promise>; diff --git a/src/core/server/http/router/validator/index.ts b/src/core/server/http/router/validator/index.ts new file mode 100644 index 0000000000000..edb116c40144a --- /dev/null +++ b/src/core/server/http/router/validator/index.ts @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { + RouteValidator, + RouteValidatorConfig, + RouteValidationSpec, + RouteValidationFunction, + RouteValidatorOptions, + RouteValidatorFullConfig, + RouteValidationResultFactory, +} from './validator'; +export { RouteValidationError } from './validator_error'; diff --git a/src/core/server/http/router/validator/validator.test.ts b/src/core/server/http/router/validator/validator.test.ts new file mode 100644 index 0000000000000..729eb1b60c10a --- /dev/null +++ b/src/core/server/http/router/validator/validator.test.ts @@ -0,0 +1,135 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { RouteValidationError, RouteValidator } from './'; +import { schema, Type } from '@kbn/config-schema'; + +describe('Router validator', () => { + it('should validate and infer the type from a function', () => { + const validator = RouteValidator.from({ + params: ({ foo }, validationResult) => { + if (typeof foo === 'string') { + return validationResult.ok({ foo }); + } + return validationResult.badRequest('Not a string', ['foo']); + }, + }); + expect(validator.getParams({ foo: 'bar' })).toStrictEqual({ foo: 'bar' }); + expect(validator.getParams({ foo: 'bar' }).foo.toUpperCase()).toBe('BAR'); // It knows it's a string! :) + expect(() => validator.getParams({ foo: 1 })).toThrowError('[foo]: Not a string'); + expect(() => validator.getParams({})).toThrowError('[foo]: Not a string'); + + expect(() => validator.getParams(undefined)).toThrowError( + "Cannot destructure property `foo` of 'undefined' or 'null'." + ); + expect(() => validator.getParams({}, 'myField')).toThrowError('[myField.foo]: Not a string'); + + expect(validator.getBody(undefined)).toStrictEqual({}); + expect(validator.getQuery(undefined)).toStrictEqual({}); + }); + + it('should validate and infer the type from a function that does not use the resolver', () => { + const validator = RouteValidator.from({ + params: data => { + if (typeof data.foo === 'string') { + return { value: { foo: data.foo as string } }; + } + return { error: new RouteValidationError('Not a string', ['foo']) }; + }, + }); + expect(validator.getParams({ foo: 'bar' })).toStrictEqual({ foo: 'bar' }); + expect(validator.getParams({ foo: 'bar' }).foo.toUpperCase()).toBe('BAR'); // It knows it's a string! :) + expect(() => validator.getParams({ foo: 1 })).toThrowError('[foo]: Not a string'); + expect(() => validator.getParams({})).toThrowError('[foo]: Not a string'); + + expect(() => validator.getParams(undefined)).toThrowError( + `Cannot read property 'foo' of undefined` + ); + expect(() => validator.getParams({}, 'myField')).toThrowError('[myField.foo]: Not a string'); + + expect(validator.getBody(undefined)).toStrictEqual({}); + expect(validator.getQuery(undefined)).toStrictEqual({}); + }); + + it('should validate and infer the type from a config-schema ObjectType', () => { + const schemaValidation = RouteValidator.from({ + params: schema.object({ + foo: schema.string(), + }), + }); + + expect(schemaValidation.getParams({ foo: 'bar' })).toStrictEqual({ foo: 'bar' }); + expect(schemaValidation.getParams({ foo: 'bar' }).foo.toUpperCase()).toBe('BAR'); // It knows it's a string! :) + expect(() => schemaValidation.getParams({ foo: 1 })).toThrowError( + '[foo]: expected value of type [string] but got [number]' + ); + expect(() => schemaValidation.getParams({})).toThrowError( + '[foo]: expected value of type [string] but got [undefined]' + ); + expect(() => schemaValidation.getParams(undefined)).toThrowError( + '[foo]: expected value of type [string] but got [undefined]' + ); + expect(() => schemaValidation.getParams({}, 'myField')).toThrowError( + '[myField.foo]: expected value of type [string] but got [undefined]' + ); + }); + + it('should validate and infer the type from a config-schema non-ObjectType', () => { + const schemaValidation = RouteValidator.from({ params: schema.buffer() }); + + const foo = Buffer.from('hi!'); + expect(schemaValidation.getParams(foo)).toStrictEqual(foo); + expect(schemaValidation.getParams(foo).byteLength).toBeGreaterThan(0); // It knows it's a buffer! :) + expect(() => schemaValidation.getParams({ foo: 1 })).toThrowError( + 'expected value of type [Buffer] but got [Object]' + ); + expect(() => schemaValidation.getParams({})).toThrowError( + 'expected value of type [Buffer] but got [Object]' + ); + expect(() => schemaValidation.getParams(undefined)).toThrowError( + `expected value of type [Buffer] but got [undefined]` + ); + expect(() => schemaValidation.getParams({}, 'myField')).toThrowError( + '[myField]: expected value of type [Buffer] but got [Object]' + ); + }); + + it('should catch the errors thrown by the validate function', () => { + const validator = RouteValidator.from({ + params: data => { + throw new Error('Something went terribly wrong'); + }, + }); + + expect(() => validator.getParams({ foo: 1 })).toThrowError('Something went terribly wrong'); + expect(() => validator.getParams({}, 'myField')).toThrowError( + '[myField]: Something went terribly wrong' + ); + }); + + it('should not accept invalid validation options', () => { + const wrongValidateSpec = RouteValidator.from({ + params: { validate: (data: T): T => data } as Type, + }); + + expect(() => wrongValidateSpec.getParams({ foo: 1 })).toThrowError( + 'The validation rule provided in the handler is not valid' + ); + }); +}); diff --git a/src/core/server/http/router/validator/validator.ts b/src/core/server/http/router/validator/validator.ts new file mode 100644 index 0000000000000..65c0a934e6ef0 --- /dev/null +++ b/src/core/server/http/router/validator/validator.ts @@ -0,0 +1,280 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { ValidationError, Type, schema, ObjectType } from '@kbn/config-schema'; +import { Stream } from 'stream'; +import { RouteValidationError } from './validator_error'; + +/** + * Validation result factory to be used in the custom validation function to return the valid data or validation errors + * + * See {@link RouteValidationFunction}. + * + * @public + */ +export interface RouteValidationResultFactory { + ok: (value: T) => { value: T }; + badRequest: (error: Error | string, path?: string[]) => { error: RouteValidationError }; +} + +/** + * The custom validation function if @kbn/config-schema is not a valid solution for your specific plugin requirements. + * + * @example + * + * The validation should look something like: + * ```typescript + * interface MyExpectedBody { + * bar: string; + * baz: number; + * } + * + * const myBodyValidation: RouteValidationFunction = (data, validationResult) => { + * const { ok, badRequest } = validationResult; + * const { bar, baz } = data || {}; + * if (typeof bar === 'string' && typeof baz === 'number') { + * return ok({ bar, baz }); + * } else { + * return badRequest('Wrong payload', ['body']); + * } + * } + * ``` + * + * @public + */ +export type RouteValidationFunction = ( + data: any, + validationResult: RouteValidationResultFactory +) => + | { + value: T; + error?: never; + } + | { + value?: never; + error: RouteValidationError; + }; + +/** + * Allowed property validation options: either @kbn/config-schema validations or custom validation functions + * + * See {@link RouteValidationFunction} for custom validation. + * + * @public + */ +export type RouteValidationSpec = ObjectType | Type | RouteValidationFunction; + +// Ugly as hell but we need this conditional typing to have proper type inference +type RouteValidationResultType | undefined> = NonNullable< + T extends RouteValidationFunction + ? ReturnType['value'] + : T extends Type + ? ReturnType + : undefined +>; + +/** + * The configuration object to the RouteValidator class. + * Set `params`, `query` and/or `body` to specify the validation logic to follow for that property. + * + * @public + */ +export interface RouteValidatorConfig { + /** + * Validation logic for the URL params + * @public + */ + params?: RouteValidationSpec

; + /** + * Validation logic for the Query params + * @public + */ + query?: RouteValidationSpec; + /** + * Validation logic for the body payload + * @public + */ + body?: RouteValidationSpec; +} + +/** + * Additional options for the RouteValidator class to modify its default behaviour. + * + * @public + */ +export interface RouteValidatorOptions { + /** + * Set the `unsafe` config to avoid running some additional internal *safe* validations on top of your custom validation + * @public + */ + unsafe?: { + params?: boolean; + query?: boolean; + body?: boolean; + }; +} + +/** + * Route validations config and options merged into one object + * @public + */ +export type RouteValidatorFullConfig = RouteValidatorConfig & + RouteValidatorOptions; + +/** + * Route validator class to define the validation logic for each new route. + * + * @internal + */ +export class RouteValidator

{ + public static from

( + opts: RouteValidator | RouteValidatorFullConfig + ) { + if (opts instanceof RouteValidator) { + return opts; + } + const { params, query, body, ...options } = opts; + return new RouteValidator({ params, query, body }, options); + } + + private static ResultFactory: RouteValidationResultFactory = { + ok: (value: T) => ({ value }), + badRequest: (error: Error | string, path?: string[]) => ({ + error: new RouteValidationError(error, path), + }), + }; + + private constructor( + private readonly config: RouteValidatorConfig, + private readonly options: RouteValidatorOptions = {} + ) {} + + /** + * Get validated URL params + * @internal + */ + public getParams(data: unknown, namespace?: string): Readonly

{ + return this.validate(this.config.params, this.options.unsafe?.params, data, namespace); + } + + /** + * Get validated query params + * @internal + */ + public getQuery(data: unknown, namespace?: string): Readonly { + return this.validate(this.config.query, this.options.unsafe?.query, data, namespace); + } + + /** + * Get validated body + * @internal + */ + public getBody(data: unknown, namespace?: string): Readonly { + return this.validate(this.config.body, this.options.unsafe?.body, data, namespace); + } + + /** + * Has body validation + * @internal + */ + public hasBody(): boolean { + return typeof this.config.body !== 'undefined'; + } + + private validate( + validationRule?: RouteValidationSpec, + unsafe?: boolean, + data?: unknown, + namespace?: string + ): RouteValidationResultType { + if (typeof validationRule === 'undefined') { + return {}; + } + let precheckedData = this.preValidateSchema(data).validate(data, {}, namespace); + + if (unsafe !== true) { + precheckedData = this.safetyPrechecks(precheckedData, namespace); + } + + const customCheckedData = this.customValidation(validationRule, precheckedData, namespace); + + if (unsafe === true) { + return customCheckedData; + } + + return this.safetyPostchecks(customCheckedData, namespace); + } + + private safetyPrechecks(data: T, namespace?: string): T { + // We can add any pre-validation safety logic in here + return data; + } + + private safetyPostchecks(data: T, namespace?: string): T { + // We can add any post-validation safety logic in here + return data; + } + + private customValidation( + validationRule: RouteValidationSpec, + data?: unknown, + namespace?: string + ): RouteValidationResultType { + if (validationRule instanceof Type) { + return validationRule.validate(data, {}, namespace); + } else if (typeof validationRule === 'function') { + return this.validateFunction(validationRule, data, namespace); + } else { + throw new ValidationError( + new RouteValidationError(`The validation rule provided in the handler is not valid`), + namespace + ); + } + } + + private validateFunction( + validateFn: RouteValidationFunction, + data: unknown, + namespace?: string + ): T { + let result: ReturnType; + try { + result = validateFn(data, RouteValidator.ResultFactory); + } catch (err) { + result = { error: new RouteValidationError(err) }; + } + + if (result.error) { + throw new ValidationError(result.error, namespace); + } + return result.value; + } + + private preValidateSchema(data: any) { + if (Buffer.isBuffer(data)) { + // if options.body.parse !== true + return schema.buffer(); + } else if (data instanceof Stream) { + // if options.body.output === 'stream' + return schema.stream(); + } else { + return schema.maybe(schema.nullable(schema.object({}, { allowUnknowns: true }))); + } + } +} diff --git a/src/legacy/ui/ui_render/lib/merge_variables.ts b/src/core/server/http/router/validator/validator_error.ts similarity index 61% rename from src/legacy/ui/ui_render/lib/merge_variables.ts rename to src/core/server/http/router/validator/validator_error.ts index 0f65c7825bdba..d306db4ad1cf4 100644 --- a/src/legacy/ui/ui_render/lib/merge_variables.ts +++ b/src/core/server/http/router/validator/validator_error.ts @@ -17,23 +17,18 @@ * under the License. */ -const ELIGIBLE_FLAT_MERGE_KEYS = ['uiCapabilities']; +import { SchemaTypeError } from '@kbn/config-schema'; -export function mergeVariables(...sources: Array>) { - const result: Record = {}; +/** + * Error to return when the validation is not successful. + * @public + */ +export class RouteValidationError extends SchemaTypeError { + constructor(error: Error | string, path: string[] = []) { + super(error, path); - for (const source of sources) { - Object.entries(source).forEach(([key, value]) => { - if (ELIGIBLE_FLAT_MERGE_KEYS.includes(key)) { - result[key] = { - ...value, - ...result[key], - }; - } else if (!result.hasOwnProperty(key)) { - result[key] = value; - } - }); + // Set the prototype explicitly, see: + // https://github.com/Microsoft/TypeScript/wiki/Breaking-Changes#extending-built-ins-like-error-array-and-map-may-no-longer-work + Object.setPrototypeOf(this, RouteValidationError.prototype); } - - return result; } diff --git a/src/core/server/index.ts b/src/core/server/index.ts index 2aaa8306e871f..953fa0738597c 100644 --- a/src/core/server/index.ts +++ b/src/core/server/index.ts @@ -41,9 +41,10 @@ import { ElasticsearchServiceSetup, IScopedClusterClient } from './elasticsearch'; import { HttpServiceSetup } from './http'; +import { IScopedRenderingClient } from './rendering'; import { PluginsServiceSetup, PluginsServiceStart, PluginOpaqueId } from './plugins'; import { ContextSetup } from './context'; -import { IUiSettingsClient, UiSettingsServiceSetup } from './ui_settings'; +import { IUiSettingsClient, UiSettingsServiceSetup, UiSettingsServiceStart } from './ui_settings'; import { SavedObjectsClientContract } from './saved_objects/types'; import { SavedObjectsServiceSetup, SavedObjectsServiceStart } from './saved_objects'; import { CapabilitiesSetup, CapabilitiesStart } from './capabilities'; @@ -134,15 +135,22 @@ export { RouteRegistrar, RouteMethod, RouteConfigOptions, - RouteSchemas, RouteConfigOptionsBody, RouteContentType, validBodyOutput, + RouteValidatorConfig, + RouteValidationSpec, + RouteValidationFunction, + RouteValidatorOptions, + RouteValidatorFullConfig, + RouteValidationResultFactory, + RouteValidationError, SessionStorage, SessionStorageCookieOptions, SessionCookieValidationResult, SessionStorageFactory, } from './http'; +export { RenderingServiceSetup, IRenderOptions, LegacyRenderOptions } from './rendering'; export { Logger, LoggerFactory, LogMeta, LogRecord, LogLevel } from './logging'; export { @@ -204,6 +212,7 @@ export { UiSettingsParams, UiSettingsType, UiSettingsServiceSetup, + UiSettingsServiceStart, UserProvidedValues, } from './ui_settings'; @@ -222,23 +231,35 @@ export { SavedObjectsMigrationVersion, } from './types'; -export { LegacyServiceSetupDeps, LegacyServiceStartDeps } from './legacy'; +export { + LegacyServiceSetupDeps, + LegacyServiceStartDeps, + LegacyServiceDiscoverPlugins, + LegacyConfig, + LegacyUiExports, + LegacyInternals, +} from './legacy'; /** * Plugin specific context passed to a route handler. * * Provides the following clients: + * - {@link IScopedRenderingClient | rendering} - Rendering client + * which uses the data of the incoming request * - {@link SavedObjectsClient | savedObjects.client} - Saved Objects client * which uses the credentials of the incoming request * - {@link ScopedClusterClient | elasticsearch.dataClient} - Elasticsearch * data client which uses the credentials of the incoming request * - {@link ScopedClusterClient | elasticsearch.adminClient} - Elasticsearch * admin client which uses the credentials of the incoming request + * - {@link IUiSettingsClient | uiSettings.client} - uiSettings client + * which uses the credentials of the incoming request * * @public */ export interface RequestHandlerContext { core: { + rendering: IScopedRenderingClient; savedObjects: { client: SavedObjectsClientContract; }; @@ -284,12 +305,15 @@ export interface CoreStart { capabilities: CapabilitiesStart; /** {@link SavedObjectsServiceStart} */ savedObjects: SavedObjectsServiceStart; + /** {@link UiSettingsServiceStart} */ + uiSettings: UiSettingsServiceStart; } export { CapabilitiesSetup, CapabilitiesStart, ContextSetup, + IScopedRenderingClient, PluginsServiceSetup, PluginsServiceStart, PluginOpaqueId, diff --git a/src/core/server/internal_types.ts b/src/core/server/internal_types.ts index 06cf848bff25a..be4d830c55eab 100644 --- a/src/core/server/internal_types.ts +++ b/src/core/server/internal_types.ts @@ -17,15 +17,15 @@ * under the License. */ +import { CapabilitiesSetup, CapabilitiesStart } from './capabilities'; +import { ContextSetup } from './context'; import { InternalElasticsearchServiceSetup } from './elasticsearch'; import { InternalHttpServiceSetup } from './http'; -import { InternalUiSettingsServiceSetup } from './ui_settings'; -import { ContextSetup } from './context'; import { - InternalSavedObjectsServiceStart, InternalSavedObjectsServiceSetup, + InternalSavedObjectsServiceStart, } from './saved_objects'; -import { CapabilitiesSetup, CapabilitiesStart } from './capabilities'; +import { InternalUiSettingsServiceSetup, InternalUiSettingsServiceStart } from './ui_settings'; import { UuidServiceSetup } from './uuid'; /** @internal */ @@ -45,4 +45,5 @@ export interface InternalCoreSetup { export interface InternalCoreStart { capabilities: CapabilitiesStart; savedObjects: InternalSavedObjectsServiceStart; + uiSettings: InternalUiSettingsServiceStart; } diff --git a/src/core/server/legacy/config/ensure_valid_configuration.ts b/src/core/server/legacy/config/ensure_valid_configuration.ts index 026683a7b7cb0..a68d3df577a89 100644 --- a/src/core/server/legacy/config/ensure_valid_configuration.ts +++ b/src/core/server/legacy/config/ensure_valid_configuration.ts @@ -19,7 +19,7 @@ import { getUnusedConfigKeys } from './get_unused_config_keys'; import { ConfigService } from '../../config'; -import { LegacyServiceDiscoverPlugins } from '../legacy_service'; +import { LegacyServiceDiscoverPlugins } from '../types'; import { CriticalError } from '../../errors'; export async function ensureValidConfiguration( diff --git a/src/core/server/legacy/config/get_unused_config_keys.test.ts b/src/core/server/legacy/config/get_unused_config_keys.test.ts index bf011fa01a342..c4452fc6a1209 100644 --- a/src/core/server/legacy/config/get_unused_config_keys.test.ts +++ b/src/core/server/legacy/config/get_unused_config_keys.test.ts @@ -17,8 +17,7 @@ * under the License. */ -import { LegacyPluginSpec } from '../plugins/find_legacy_plugin_specs'; -import { LegacyConfig } from './types'; +import { LegacyPluginSpec, LegacyConfig, LegacyVars } from '../types'; import { getUnusedConfigKeys } from './get_unused_config_keys'; describe('getUnusedConfigKeys', () => { @@ -26,7 +25,7 @@ describe('getUnusedConfigKeys', () => { jest.resetAllMocks(); }); - const getConfig = (values: Record = {}): LegacyConfig => + const getConfig = (values: LegacyVars = {}): LegacyConfig => ({ get: () => values as any, } as LegacyConfig); diff --git a/src/core/server/legacy/config/get_unused_config_keys.ts b/src/core/server/legacy/config/get_unused_config_keys.ts index 73cc7d8c50474..e425082ba126d 100644 --- a/src/core/server/legacy/config/get_unused_config_keys.ts +++ b/src/core/server/legacy/config/get_unused_config_keys.ts @@ -22,8 +22,7 @@ import { difference, get, set } from 'lodash'; import { getTransform } from '../../../../legacy/deprecation/index'; import { unset, getFlattenedObject } from '../../../../legacy/utils'; import { hasConfigPathIntersection } from '../../config'; -import { LegacyPluginSpec } from '../plugins/find_legacy_plugin_specs'; -import { LegacyConfig } from './types'; +import { LegacyPluginSpec, LegacyConfig, LegacyVars } from '../types'; const getFlattenedKeys = (object: object) => Object.keys(getFlattenedObject(object)); @@ -37,7 +36,7 @@ export async function getUnusedConfigKeys({ coreHandledConfigPaths: string[]; pluginSpecs: LegacyPluginSpec[]; disabledPluginSpecs: LegacyPluginSpec[]; - settings: Record; + settings: LegacyVars; legacyConfig: LegacyConfig; }) { // transform deprecated plugin settings diff --git a/src/core/server/legacy/config/index.ts b/src/core/server/legacy/config/index.ts index c3f308fd6d903..f10e3f22d53c5 100644 --- a/src/core/server/legacy/config/index.ts +++ b/src/core/server/legacy/config/index.ts @@ -20,9 +20,3 @@ export { ensureValidConfiguration } from './ensure_valid_configuration'; export { LegacyObjectToConfigAdapter } from './legacy_object_to_config_adapter'; export { convertLegacyDeprecationProvider } from './legacy_deprecation_adapters'; -export { - LegacyConfig, - LegacyConfigDeprecation, - LegacyConfigDeprecationFactory, - LegacyConfigDeprecationProvider, -} from './types'; diff --git a/src/core/server/legacy/config/legacy_deprecation_adapters.test.ts b/src/core/server/legacy/config/legacy_deprecation_adapters.test.ts index 144e057c118f7..8651d05064492 100644 --- a/src/core/server/legacy/config/legacy_deprecation_adapters.test.ts +++ b/src/core/server/legacy/config/legacy_deprecation_adapters.test.ts @@ -17,11 +17,11 @@ * under the License. */ -import { convertLegacyDeprecationProvider } from './legacy_deprecation_adapters'; -import { LegacyConfigDeprecationProvider } from './types'; import { ConfigDeprecation } from '../../config'; import { configDeprecationFactory } from '../../config/deprecation/deprecation_factory'; import { applyDeprecations } from '../../config/deprecation/apply_deprecations'; +import { LegacyConfigDeprecationProvider } from '../types'; +import { convertLegacyDeprecationProvider } from './legacy_deprecation_adapters'; jest.spyOn(configDeprecationFactory, 'unusedFromRoot'); jest.spyOn(configDeprecationFactory, 'renameFromRoot'); diff --git a/src/core/server/legacy/config/legacy_deprecation_adapters.ts b/src/core/server/legacy/config/legacy_deprecation_adapters.ts index b0e3bc37e1510..1e0733969e662 100644 --- a/src/core/server/legacy/config/legacy_deprecation_adapters.ts +++ b/src/core/server/legacy/config/legacy_deprecation_adapters.ts @@ -18,8 +18,8 @@ */ import { ConfigDeprecation, ConfigDeprecationProvider } from '../../config/deprecation'; -import { LegacyConfigDeprecation, LegacyConfigDeprecationProvider } from './index'; import { configDeprecationFactory } from '../../config/deprecation/deprecation_factory'; +import { LegacyConfigDeprecation, LegacyConfigDeprecationProvider } from '../types'; const convertLegacyDeprecation = ( legacyDeprecation: LegacyConfigDeprecation diff --git a/src/core/server/legacy/config/legacy_object_to_config_adapter.ts b/src/core/server/legacy/config/legacy_object_to_config_adapter.ts index ffcbfda4e024d..bdcde8262ef98 100644 --- a/src/core/server/legacy/config/legacy_object_to_config_adapter.ts +++ b/src/core/server/legacy/config/legacy_object_to_config_adapter.ts @@ -19,6 +19,7 @@ import { ConfigPath } from '../../config'; import { ObjectToConfigAdapter } from '../../config/object_to_config_adapter'; +import { LegacyVars } from '../types'; /** * Represents logging config supported by the legacy platform. @@ -77,7 +78,7 @@ export class LegacyObjectToConfigAdapter extends ObjectToConfigAdapter { }; } - private static transformPlugins(configValue: Record) { + private static transformPlugins(configValue: LegacyVars) { // These properties are the only ones we use from the existing `plugins` config node // since `scanDirs` isn't respected by new platform plugin discovery. return { @@ -94,7 +95,7 @@ export class LegacyObjectToConfigAdapter extends ObjectToConfigAdapter { case 'server': return LegacyObjectToConfigAdapter.transformServer(configValue); case 'plugins': - return LegacyObjectToConfigAdapter.transformPlugins(configValue as Record); + return LegacyObjectToConfigAdapter.transformPlugins(configValue as LegacyVars); default: return configValue; } diff --git a/src/core/server/legacy/config/types.ts b/src/core/server/legacy/config/types.ts deleted file mode 100644 index cac1002d6c244..0000000000000 --- a/src/core/server/legacy/config/types.ts +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * New platform representation of the legacy configuration (KibanaConfig) - * - * @internal - */ -export interface LegacyConfig { - get(key?: string): T; - has(key: string): boolean; - set(key: string, value: any): void; - set(config: Record): void; -} - -/** - * Representation of a legacy configuration deprecation factory used for - * legacy plugin deprecations. - * - * @internal - */ -export interface LegacyConfigDeprecationFactory { - rename(oldKey: string, newKey: string): LegacyConfigDeprecation; - unused(unusedKey: string): LegacyConfigDeprecation; -} - -/** - * Representation of a legacy configuration deprecation. - * - * @internal - */ -export type LegacyConfigDeprecation = ( - settings: Record, - log: (msg: string) => void -) => void; - -/** - * Representation of a legacy configuration deprecation provider. - * - * @internal - */ -export type LegacyConfigDeprecationProvider = ( - factory: LegacyConfigDeprecationFactory -) => LegacyConfigDeprecation[] | Promise; diff --git a/src/core/server/legacy/index.ts b/src/core/server/legacy/index.ts index 10686fc521d35..208e9b1167253 100644 --- a/src/core/server/legacy/index.ts +++ b/src/core/server/legacy/index.ts @@ -18,6 +18,10 @@ */ /** @internal */ -export { LegacyObjectToConfigAdapter, ensureValidConfiguration, LegacyConfig } from './config'; +export { LegacyObjectToConfigAdapter, ensureValidConfiguration } from './config'; /** @internal */ -export { LegacyService, LegacyServiceSetupDeps, LegacyServiceStartDeps } from './legacy_service'; +export { LegacyInternals } from './legacy_internals'; +/** @internal */ +export { LegacyService, ILegacyService } from './legacy_service'; +/** @internal */ +export * from './types'; diff --git a/src/core/server/legacy/legacy_internals.test.ts b/src/core/server/legacy/legacy_internals.test.ts new file mode 100644 index 0000000000000..dcab62627442b --- /dev/null +++ b/src/core/server/legacy/legacy_internals.test.ts @@ -0,0 +1,211 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Server } from 'hapi'; + +import { configMock } from '../config/config.mock'; +import { httpServiceMock } from '../http/http_service.mock'; +import { httpServerMock } from '../http/http_server.mocks'; +import { findLegacyPluginSpecsMock } from './legacy_service.test.mocks'; +import { LegacyInternals } from './legacy_internals'; +import { ILegacyInternals, LegacyConfig, LegacyVars, LegacyUiExports } from './types'; + +function varsProvider(vars: LegacyVars, configValue?: any) { + return { + fn: jest.fn().mockReturnValue(vars), + pluginSpec: { + readConfigValue: jest.fn().mockReturnValue(configValue), + }, + }; +} + +describe('LegacyInternals', () => { + describe('getInjectedUiAppVars()', () => { + let uiExports: LegacyUiExports; + let config: LegacyConfig; + let server: Server; + let legacyInternals: ILegacyInternals; + + beforeEach(async () => { + uiExports = findLegacyPluginSpecsMock().uiExports; + config = configMock.create() as any; + server = httpServiceMock.createSetupContract().server; + legacyInternals = new LegacyInternals(uiExports, config, server); + }); + + it('gets with no injectors', async () => { + await expect(legacyInternals.getInjectedUiAppVars('core')).resolves.toMatchInlineSnapshot( + `Object {}` + ); + }); + + it('gets with no matching injectors', async () => { + const injector = jest.fn().mockResolvedValue({ not: 'core' }); + legacyInternals.injectUiAppVars('not-core', injector); + + await expect(legacyInternals.getInjectedUiAppVars('core')).resolves.toMatchInlineSnapshot( + `Object {}` + ); + expect(injector).not.toHaveBeenCalled(); + }); + + it('gets with single matching injector', async () => { + const injector = jest.fn().mockResolvedValue({ is: 'core' }); + legacyInternals.injectUiAppVars('core', injector); + + await expect(legacyInternals.getInjectedUiAppVars('core')).resolves.toMatchInlineSnapshot(` + Object { + "is": "core", + } + `); + expect(injector).toHaveBeenCalled(); + }); + + it('gets with multiple matching injectors', async () => { + const injectors = [ + jest.fn().mockResolvedValue({ is: 'core' }), + jest.fn().mockReturnValue({ sync: 'injector' }), + jest.fn().mockResolvedValue({ is: 'merged-core' }), + ]; + + injectors.forEach(injector => legacyInternals.injectUiAppVars('core', injector)); + + await expect(legacyInternals.getInjectedUiAppVars('core')).resolves.toMatchInlineSnapshot(` + Object { + "is": "merged-core", + "sync": "injector", + } + `); + expect(injectors[0]).toHaveBeenCalled(); + expect(injectors[1]).toHaveBeenCalled(); + expect(injectors[2]).toHaveBeenCalled(); + }); + }); + + describe('getVars()', () => { + let uiExports: LegacyUiExports; + let config: LegacyConfig; + let server: Server; + let legacyInternals: LegacyInternals; + + beforeEach(async () => { + uiExports = findLegacyPluginSpecsMock().uiExports; + config = configMock.create() as any; + server = httpServiceMock.createSetupContract().server; + legacyInternals = new LegacyInternals(uiExports, config, server); + }); + + it('gets: no default injectors, no injected vars replacers, no ui app injectors, no inject arg', async () => { + const vars = await legacyInternals.getVars('core', httpServerMock.createRawRequest()); + + expect(vars).toMatchInlineSnapshot(`Object {}`); + }); + + it('gets: with default injectors, no injected vars replacers, no ui app injectors, no inject arg', async () => { + uiExports.defaultInjectedVarProviders = [ + varsProvider({ alpha: 'alpha' }), + varsProvider({ gamma: 'gamma' }), + varsProvider({ alpha: 'beta' }), + ]; + + const vars = await legacyInternals.getVars('core', httpServerMock.createRawRequest()); + + expect(vars).toMatchInlineSnapshot(` + Object { + "alpha": "beta", + "gamma": "gamma", + } + `); + }); + + it('gets: no default injectors, with injected vars replacers, with ui app injectors, no inject arg', async () => { + uiExports.injectedVarsReplacers = [ + jest.fn(async vars => ({ ...vars, added: 'key' })), + jest.fn(vars => vars), + jest.fn(vars => ({ replaced: 'all' })), + jest.fn(async vars => ({ ...vars, added: 'last-key' })), + ]; + + const request = httpServerMock.createRawRequest(); + const vars = await legacyInternals.getVars('core', request); + + expect(vars).toMatchInlineSnapshot(` + Object { + "added": "last-key", + "replaced": "all", + } + `); + }); + + it('gets: no default injectors, no injected vars replacers, with ui app injectors, no inject arg', async () => { + legacyInternals.injectUiAppVars('core', async () => ({ is: 'core' })); + legacyInternals.injectUiAppVars('core', () => ({ sync: 'injector' })); + legacyInternals.injectUiAppVars('core', async () => ({ is: 'merged-core' })); + + const vars = await legacyInternals.getVars('core', httpServerMock.createRawRequest()); + + expect(vars).toMatchInlineSnapshot(` + Object { + "is": "merged-core", + "sync": "injector", + } + `); + }); + + it('gets: no default injectors, no injected vars replacers, no ui app injectors, with inject arg', async () => { + const vars = await legacyInternals.getVars('core', httpServerMock.createRawRequest(), { + injected: 'arg', + }); + + expect(vars).toMatchInlineSnapshot(` + Object { + "injected": "arg", + } + `); + }); + + it('gets: with default injectors, with injected vars replacers, with ui app injectors, with inject arg', async () => { + uiExports.defaultInjectedVarProviders = [ + varsProvider({ alpha: 'alpha' }), + varsProvider({ gamma: 'gamma' }), + varsProvider({ alpha: 'beta' }), + ]; + uiExports.injectedVarsReplacers = [jest.fn(async vars => ({ ...vars, gamma: 'delta' }))]; + + legacyInternals.injectUiAppVars('core', async () => ({ is: 'core' })); + legacyInternals.injectUiAppVars('core', () => ({ sync: 'injector' })); + legacyInternals.injectUiAppVars('core', async () => ({ is: 'merged-core' })); + + const vars = await legacyInternals.getVars('core', httpServerMock.createRawRequest(), { + injected: 'arg', + sync: 'arg', + }); + + expect(vars).toMatchInlineSnapshot(` + Object { + "alpha": "beta", + "gamma": "delta", + "injected": "arg", + "is": "merged-core", + "sync": "arg", + } + `); + }); + }); +}); diff --git a/src/core/server/legacy/legacy_internals.ts b/src/core/server/legacy/legacy_internals.ts new file mode 100644 index 0000000000000..3bf54e5f75dce --- /dev/null +++ b/src/core/server/legacy/legacy_internals.ts @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Server } from 'hapi'; + +import { LegacyRequest } from '../http'; +import { mergeVars } from './merge_vars'; +import { ILegacyInternals, LegacyVars, VarsInjector, LegacyConfig, LegacyUiExports } from './types'; + +/** + * @internal + * @deprecated + */ +export class LegacyInternals implements ILegacyInternals { + private readonly injectors = new Map>(); + private cachedDefaultVars?: LegacyVars; + + constructor( + private readonly uiExports: LegacyUiExports, + private readonly config: LegacyConfig, + private readonly server: Server + ) {} + + private get defaultVars(): LegacyVars { + if (this.cachedDefaultVars) { + return this.cachedDefaultVars; + } + + const { defaultInjectedVarProviders = [] } = this.uiExports; + + return (this.cachedDefaultVars = defaultInjectedVarProviders.reduce( + (vars, { fn, pluginSpec }) => + mergeVars(vars, fn(this.server, pluginSpec.readConfigValue(this.config, []))), + {} + )); + } + + private replaceVars(vars: LegacyVars, request: LegacyRequest) { + const { injectedVarsReplacers = [] } = this.uiExports; + + return injectedVarsReplacers.reduce( + async (injected, replacer) => replacer(await injected, request, this.server), + Promise.resolve(vars) + ); + } + + public injectUiAppVars(id: string, injector: VarsInjector) { + if (!this.injectors.has(id)) { + this.injectors.set(id, new Set()); + } + + this.injectors.get(id)!.add(injector); + } + + public getInjectedUiAppVars(id: string) { + return [...(this.injectors.get(id) || [])].reduce( + async (promise, injector) => ({ + ...(await promise), + ...(await injector()), + }), + Promise.resolve({}) + ); + } + + public async getVars(id: string, request: LegacyRequest, injected: LegacyVars = {}) { + return this.replaceVars( + mergeVars(this.defaultVars, await this.getInjectedUiAppVars(id), injected), + request + ); + } +} diff --git a/src/core/server/legacy/legacy_service.mock.ts b/src/core/server/legacy/legacy_service.mock.ts index ac0319cdf4eb5..495141cdcb58d 100644 --- a/src/core/server/legacy/legacy_service.mock.ts +++ b/src/core/server/legacy/legacy_service.mock.ts @@ -17,23 +17,33 @@ * under the License. */ -import { LegacyServiceDiscoverPlugins } from './legacy_service'; +import { LegacyService } from './legacy_service'; +import { LegacyServiceDiscoverPlugins, LegacyServiceSetupDeps } from './types'; -const createDiscoverMock = () => { - const setupContract: DeeplyMockedKeys = { - pluginSpecs: [], - disabledPluginSpecs: [], - uiExports: {} as any, - settings: {}, - pluginExtendedConfig: { - get: jest.fn(), - has: jest.fn(), - set: jest.fn(), - } as any, - }; - return setupContract; -}; +type LegacyServiceMock = jest.Mocked & { legacyId: symbol }>; + +const createDiscoverPluginsMock = (): LegacyServiceDiscoverPlugins => ({ + pluginSpecs: [], + uiExports: {} as any, + navLinks: [], + pluginExtendedConfig: { + get: jest.fn(), + has: jest.fn(), + set: jest.fn(), + }, + disabledPluginSpecs: [], + settings: {}, +}); +const createLegacyServiceMock = (): LegacyServiceMock => ({ + legacyId: Symbol(), + discoverPlugins: jest.fn().mockResolvedValue(createDiscoverPluginsMock()), + setup: jest.fn(), + start: jest.fn(), + stop: jest.fn(), +}); export const legacyServiceMock = { - createDiscover: createDiscoverMock, + create: createLegacyServiceMock, + createSetupContract: (deps: LegacyServiceSetupDeps) => createLegacyServiceMock().setup(deps), + createDiscoverPlugins: createDiscoverPluginsMock, }; diff --git a/src/core/server/legacy/legacy_service.test.mocks.ts b/src/core/server/legacy/legacy_service.test.mocks.ts index e8d4a0ed0bd4d..451a75ced7ae2 100644 --- a/src/core/server/legacy/legacy_service.test.mocks.ts +++ b/src/core/server/legacy/legacy_service.test.mocks.ts @@ -17,18 +17,19 @@ * under the License. */ -export const findLegacyPluginSpecsMock = jest - .fn() - .mockImplementation((settings: Record) => ({ - pluginSpecs: [], - pluginExtendedConfig: { - has: jest.fn(), - get: jest.fn(() => settings), - set: jest.fn(), - }, - disabledPluginSpecs: [], - uiExports: [], - })); +import { LegacyVars } from './types'; + +export const findLegacyPluginSpecsMock = jest.fn().mockImplementation((settings: LegacyVars) => ({ + pluginSpecs: [], + pluginExtendedConfig: { + has: jest.fn(), + get: jest.fn().mockReturnValue(settings), + set: jest.fn(), + }, + disabledPluginSpecs: [], + uiExports: {}, + navLinks: [], +})); jest.doMock('./plugins/find_legacy_plugin_specs.ts', () => ({ findLegacyPluginSpecs: findLegacyPluginSpecsMock, })); diff --git a/src/core/server/legacy/legacy_service.test.ts b/src/core/server/legacy/legacy_service.test.ts index 17ec1e9756432..608392e4943f9 100644 --- a/src/core/server/legacy/legacy_service.test.ts +++ b/src/core/server/legacy/legacy_service.test.ts @@ -25,15 +25,14 @@ jest.mock('./config/legacy_deprecation_adapters', () => ({ import { findLegacyPluginSpecsMock } from './legacy_service.test.mocks'; import { BehaviorSubject, throwError } from 'rxjs'; -import { LegacyService, LegacyServiceSetupDeps, LegacyServiceStartDeps } from '.'; + // @ts-ignore: implicit any for JS file -import MockClusterManager from '../../../cli/cluster/cluster_manager'; +import { ClusterManager as MockClusterManager } from '../../../cli/cluster/cluster_manager'; import KbnServer from '../../../legacy/server/kbn_server'; import { Config, Env, ObjectToConfigAdapter } from '../config'; import { getEnvOptions } from '../config/__mocks__/env'; import { BasePathProxyServer } from '../http'; import { DiscoveredPlugin } from '../plugins'; -import { findLegacyPluginSpecs } from './plugins/find_legacy_plugin_specs'; import { configServiceMock } from '../config/config_service.mock'; import { loggingServiceMock } from '../logging/logging_service.mock'; @@ -42,7 +41,11 @@ import { httpServiceMock } from '../http/http_service.mock'; import { uiSettingsServiceMock } from '../ui_settings/ui_settings_service.mock'; import { savedObjectsServiceMock } from '../saved_objects/saved_objects_service.mock'; import { capabilitiesServiceMock } from '../capabilities/capabilities_service.mock'; +import { setupMock as renderingServiceMock } from '../rendering/__mocks__/rendering_service'; import { uuidServiceMock } from '../uuid/uuid_service.mock'; +import { findLegacyPluginSpecs } from './plugins'; +import { LegacyVars, LegacyServiceSetupDeps, LegacyServiceStartDeps } from './types'; +import { LegacyService } from './legacy_service'; const MockKbnServer: jest.Mock = KbnServer as any; @@ -89,6 +92,7 @@ beforeEach(() => { browserConfigs: new Map(), }, }, + rendering: renderingServiceMock, uuid: uuidSetup, }, plugins: { 'plugin-id': 'plugin-value' }, @@ -98,6 +102,7 @@ beforeEach(() => { core: { capabilities: capabilitiesServiceMock.createStartContract(), savedObjects: savedObjectsServiceMock.createStartContract(), + uiSettings: uiSettingsServiceMock.createStartContract(), plugins: { contracts: new Map() }, }, plugins: {}, @@ -137,7 +142,7 @@ describe('once LegacyService is set up with connection info', () => { { path: { autoListen: true }, server: { autoListen: true } }, // Because of the mock, path also gets the value expect.objectContaining({ get: expect.any(Function) }), expect.any(Object), - { disabledPluginSpecs: [], pluginSpecs: [], uiExports: [] } + { disabledPluginSpecs: [], pluginSpecs: [], uiExports: {}, navLinks: [] } ); expect(MockKbnServer.mock.calls[0][1].get()).toEqual({ path: { autoListen: true }, @@ -167,7 +172,7 @@ describe('once LegacyService is set up with connection info', () => { { path: { autoListen: false }, server: { autoListen: true } }, expect.objectContaining({ get: expect.any(Function) }), expect.any(Object), - { disabledPluginSpecs: [], pluginSpecs: [], uiExports: [] } + { disabledPluginSpecs: [], pluginSpecs: [], uiExports: {}, navLinks: [] } ); expect(MockKbnServer.mock.calls[0][1].get()).toEqual({ path: { autoListen: false }, @@ -308,7 +313,7 @@ describe('once LegacyService is set up without connection info', () => { { path: {}, server: { autoListen: true } }, expect.objectContaining({ get: expect.any(Function) }), expect.any(Object), - { disabledPluginSpecs: [], pluginSpecs: [], uiExports: [] } + { disabledPluginSpecs: [], pluginSpecs: [], uiExports: {}, navLinks: [] } ); expect(MockKbnServer.mock.calls[0][1].get()).toEqual({ path: {}, @@ -354,9 +359,15 @@ describe('once LegacyService is set up in `devClusterMaster` mode', () => { await devClusterLegacyService.setup(setupDeps); await devClusterLegacyService.start(startDeps); - const [[cliArgs, , basePathProxy]] = MockClusterManager.create.mock.calls; - expect(cliArgs.basePath).toBe(false); - expect(basePathProxy).not.toBeDefined(); + expect(MockClusterManager).toHaveBeenCalledTimes(1); + expect(MockClusterManager).toHaveBeenCalledWith( + expect.objectContaining({ silent: true, basePath: false }), + expect.objectContaining({ + get: expect.any(Function), + set: expect.any(Function), + }), + undefined + ); }); test('creates ClusterManager with base path proxy.', async () => { @@ -376,24 +387,30 @@ describe('once LegacyService is set up in `devClusterMaster` mode', () => { await devClusterLegacyService.setup(setupDeps); await devClusterLegacyService.start(startDeps); - expect(MockClusterManager.create).toBeCalledTimes(1); - - const [[cliArgs, , basePathProxy]] = MockClusterManager.create.mock.calls; - expect(cliArgs.basePath).toEqual(true); - expect(basePathProxy).toBeInstanceOf(BasePathProxyServer); + expect(MockClusterManager).toHaveBeenCalledTimes(1); + expect(MockClusterManager).toHaveBeenCalledWith( + expect.objectContaining({ quiet: true, basePath: true }), + expect.objectContaining({ + get: expect.any(Function), + set: expect.any(Function), + }), + expect.any(BasePathProxyServer) + ); }); }); -test('Cannot start without setup phase', async () => { - const legacyService = new LegacyService({ - coreId, - env, - logger, - configService: configService as any, +describe('start', () => { + test('Cannot start without setup phase', async () => { + const legacyService = new LegacyService({ + coreId, + env, + logger, + configService: configService as any, + }); + await expect(legacyService.start(startDeps)).rejects.toThrowErrorMatchingInlineSnapshot( + `"Legacy service is not setup yet."` + ); }); - await expect(legacyService.start(startDeps)).rejects.toThrowErrorMatchingInlineSnapshot( - `"Legacy service is not setup yet."` - ); }); describe('#discoverPlugins()', () => { @@ -427,7 +444,8 @@ describe('#discoverPlugins()', () => { ], pluginExtendedConfig: settings, disabledPluginSpecs: [], - uiExports: [], + uiExports: {}, + navLinks: [], }) as any ); @@ -458,15 +476,16 @@ test('Sets the server.uuid property on the legacy configuration', async () => { const configSetMock = jest.fn(); - findLegacyPluginSpecsMock.mockImplementation((settings: Record) => ({ + findLegacyPluginSpecsMock.mockImplementation((settings: LegacyVars) => ({ pluginSpecs: [], pluginExtendedConfig: { has: jest.fn(), - get: jest.fn(() => settings), + get: jest.fn().mockReturnValue(settings), set: configSetMock, }, disabledPluginSpecs: [], - uiExports: [], + uiExports: {}, + navLinks: [], })); await legacyService.discoverPlugins(); diff --git a/src/core/server/legacy/legacy_service.ts b/src/core/server/legacy/legacy_service.ts index 1bba38433d7f4..2ed87f4c6d488 100644 --- a/src/core/server/legacy/legacy_service.ts +++ b/src/core/server/legacy/legacy_service.ts @@ -19,24 +19,30 @@ import { combineLatest, ConnectableObservable, EMPTY, Observable, Subscription } from 'rxjs'; import { first, map, publishReplay, tap } from 'rxjs/operators'; + import { CoreService } from '../../types'; -import { CoreSetup, CoreStart } from '../'; -import { InternalCoreSetup, InternalCoreStart } from '../internal_types'; -import { SavedObjectsLegacyUiExports } from '../types'; import { Config, ConfigDeprecationProvider } from '../config'; import { CoreContext } from '../core_context'; import { CspConfigType, config as cspConfig } from '../csp'; import { DevConfig, DevConfigType, config as devConfig } from '../dev'; import { BasePathProxyServer, HttpConfig, HttpConfigType, config as httpConfig } from '../http'; import { Logger } from '../logging'; -import { PluginsServiceSetup, PluginsServiceStart } from '../plugins'; -import { findLegacyPluginSpecs } from './plugins'; -import { LegacyPluginSpec } from './plugins/find_legacy_plugin_specs'; import { PathConfigType } from '../path'; -import { LegacyConfig, convertLegacyDeprecationProvider } from './config'; +import { findLegacyPluginSpecs } from './plugins'; +import { convertLegacyDeprecationProvider } from './config'; +import { + LegacyServiceSetupDeps, + LegacyServiceStartDeps, + LegacyPlugins, + LegacyServiceDiscoverPlugins, + LegacyConfig, + LegacyVars, +} from './types'; +import { LegacyInternals } from './legacy_internals'; +import { CoreSetup, CoreStart } from '..'; interface LegacyKbnServer { - applyLoggingConfiguration: (settings: Readonly>) => void; + applyLoggingConfiguration: (settings: Readonly) => void; listen: () => Promise; ready: () => Promise; close: () => Promise; @@ -53,43 +59,14 @@ function getLegacyRawConfig(config: Config, pathConfig: PathConfigType) { return { ...rawConfig, - path: pathConfig, // We rely heavily in the default value of 'path.data' in the legacy world and, since it has been moved to NP, it won't show up in RawConfig - }; -} - -/** - * @public - * @deprecated - */ -export interface LegacyServiceSetupDeps { - core: InternalCoreSetup & { - plugins: PluginsServiceSetup; - }; - plugins: Record; -} - -/** - * @public - * @deprecated - */ -export interface LegacyServiceStartDeps { - core: InternalCoreStart & { - plugins: PluginsServiceStart; + // We rely heavily in the default value of 'path.data' in the legacy world and, + // since it has been moved to NP, it won't show up in RawConfig. + path: pathConfig, }; - plugins: Record; } /** @internal */ -export interface LegacyServiceDiscoverPlugins { - pluginSpecs: LegacyPluginSpec[]; - disabledPluginSpecs: LegacyPluginSpec[]; - uiExports: SavedObjectsLegacyUiExports; - pluginExtendedConfig: LegacyConfig; - settings: Record; -} - -/** @internal */ -export type ILegacyService = Pick; +export type ILegacyService = PublicMethodsOf; /** @internal */ export class LegacyService implements CoreService { @@ -101,16 +78,10 @@ export class LegacyService implements CoreService { private kbnServer?: LegacyKbnServer; private configSubscription?: Subscription; private setupDeps?: LegacyServiceSetupDeps; - private update$: ConnectableObservable<[Config, PathConfigType]> | undefined; - private legacyRawConfig: LegacyConfig | undefined; - private legacyPlugins: - | { - pluginSpecs: LegacyPluginSpec[]; - disabledPluginSpecs: LegacyPluginSpec[]; - uiExports: SavedObjectsLegacyUiExports; - } - | undefined; - private settings: Record | undefined; + private update$?: ConnectableObservable<[Config, PathConfigType]>; + private legacyRawConfig?: LegacyConfig; + private legacyPlugins?: LegacyPlugins; + private settings?: LegacyVars; constructor(private readonly coreContext: CoreContext) { const { logger, configService, env } = coreContext; @@ -153,12 +124,14 @@ export class LegacyService implements CoreService { pluginExtendedConfig, disabledPluginSpecs, uiExports, + navLinks, } = await findLegacyPluginSpecs(this.settings, this.coreContext.logger); this.legacyPlugins = { pluginSpecs, disabledPluginSpecs, uiExports, + navLinks, }; const deprecationProviders = await pluginSpecs @@ -188,6 +161,7 @@ export class LegacyService implements CoreService { pluginSpecs, disabledPluginSpecs, uiExports, + navLinks, pluginExtendedConfig, settings: this.settings, }; @@ -195,35 +169,37 @@ export class LegacyService implements CoreService { public async setup(setupDeps: LegacyServiceSetupDeps) { this.log.debug('setting up legacy service'); - if (!this.legacyRawConfig || !this.legacyPlugins || !this.settings) { + + if (!this.legacyPlugins) { throw new Error( 'Legacy service has not discovered legacy plugins yet. Ensure LegacyService.discoverPlugins() is called before LegacyService.setup()' ); } - // propagate the instance uuid to the legacy config, as it was the legacy way to access it. - this.legacyRawConfig.set('server.uuid', setupDeps.core.uuid.getInstanceUuid()); + // propagate the instance uuid to the legacy config, as it was the legacy way to access it. + this.legacyRawConfig!.set('server.uuid', setupDeps.core.uuid.getInstanceUuid()); this.setupDeps = setupDeps; } public async start(startDeps: LegacyServiceStartDeps) { const { setupDeps } = this; - if (!setupDeps || !this.legacyRawConfig || !this.legacyPlugins || !this.settings) { + + if (!setupDeps || !this.legacyPlugins) { throw new Error('Legacy service is not setup yet.'); } + this.log.debug('starting legacy service'); // Receive initial config and create kbnServer/ClusterManager. - if (this.coreContext.env.isDevClusterMaster) { - await this.createClusterManager(this.legacyRawConfig); + await this.createClusterManager(this.legacyRawConfig!); } else { this.kbnServer = await this.createKbnServer( - this.settings, - this.legacyRawConfig, + this.settings!, + this.legacyRawConfig!, setupDeps, startDeps, - this.legacyPlugins + this.legacyPlugins! ); } } @@ -244,7 +220,7 @@ export class LegacyService implements CoreService { private async createClusterManager(config: LegacyConfig) { const basePathProxy$ = this.coreContext.env.cliArgs.basePath - ? combineLatest(this.devConfig$, this.httpConfig$).pipe( + ? combineLatest([this.devConfig$, this.httpConfig$]).pipe( first(), map( ([dev, http]) => @@ -253,7 +229,9 @@ export class LegacyService implements CoreService { ) : EMPTY; - require('../../../cli/cluster/cluster_manager').create( + // eslint-disable-next-line @typescript-eslint/no-var-requires + const { ClusterManager } = require('../../../cli/cluster/cluster_manager'); + return new ClusterManager( this.coreContext.env.cliArgs, config, await basePathProxy$.toPromise() @@ -261,15 +239,11 @@ export class LegacyService implements CoreService { } private async createKbnServer( - settings: Record, + settings: LegacyVars, config: LegacyConfig, setupDeps: LegacyServiceSetupDeps, startDeps: LegacyServiceStartDeps, - legacyPlugins: { - pluginSpecs: LegacyPluginSpec[]; - disabledPluginSpecs: LegacyPluginSpec[]; - uiExports: SavedObjectsLegacyUiExports; - } + legacyPlugins: LegacyPlugins ) { const coreSetup: CoreSetup = { capabilities: setupDeps.core.capabilities, @@ -310,6 +284,7 @@ export class LegacyService implements CoreService { const coreStart: CoreStart = { capabilities: startDeps.core.capabilities, savedObjects: { getScopedClient: startDeps.core.savedObjects.getScopedClient }, + uiSettings: { asScopedToClient: startDeps.core.uiSettings.asScopedToClient }, }; // eslint-disable-next-line @typescript-eslint/no-var-requires @@ -335,8 +310,10 @@ export class LegacyService implements CoreService { kibanaMigrator: startDeps.core.savedObjects.migrator, uiPlugins: setupDeps.core.plugins.uiPlugins, elasticsearch: setupDeps.core.elasticsearch, + rendering: setupDeps.core.rendering, uiSettings: setupDeps.core.uiSettings, savedObjectsClientProvider: startDeps.core.savedObjects.clientProvider, + legacy: new LegacyInternals(legacyPlugins.uiExports, config, setupDeps.core.http.server), }, logger: this.coreContext.logger, }, diff --git a/src/core/server/legacy/logging/appenders/legacy_appender.ts b/src/core/server/legacy/logging/appenders/legacy_appender.ts index 011dfae8a5cef..6d82d929e7daa 100644 --- a/src/core/server/legacy/logging/appenders/legacy_appender.ts +++ b/src/core/server/legacy/logging/appenders/legacy_appender.ts @@ -21,6 +21,7 @@ import { schema } from '@kbn/config-schema'; import { DisposableAppender } from '../../../logging/appenders/appenders'; import { LogRecord } from '../../../logging/log_record'; import { LegacyLoggingServer } from '../legacy_logging_server'; +import { LegacyVars } from '../../types'; /** * Simple appender that just forwards `LogRecord` to the legacy KbnServer log. @@ -34,7 +35,7 @@ export class LegacyAppender implements DisposableAppender { private readonly loggingServer: LegacyLoggingServer; - constructor(legacyLoggingConfig: Readonly>) { + constructor(legacyLoggingConfig: Readonly) { this.loggingServer = new LegacyLoggingServer(legacyLoggingConfig); } diff --git a/src/core/server/legacy/logging/legacy_logging_server.ts b/src/core/server/legacy/logging/legacy_logging_server.ts index 57706bcac2232..85a8686b4eded 100644 --- a/src/core/server/legacy/logging/legacy_logging_server.ts +++ b/src/core/server/legacy/logging/legacy_logging_server.ts @@ -25,9 +25,10 @@ import { Config } from '../../../../legacy/server/config'; import { setupLogging } from '../../../../legacy/server/logging'; import { LogLevel } from '../../logging/log_level'; import { LogRecord } from '../../logging/log_record'; +import { LegacyVars } from '../../types'; export const metadataSymbol = Symbol('log message with metadata'); -export function attachMetaData(message: string, metadata: Record = {}) { +export function attachMetaData(message: string, metadata: LegacyVars = {}) { return { [metadataSymbol]: { message, @@ -50,7 +51,7 @@ interface PluginRegisterParams { options: PluginRegisterParams['options'] ) => Promise; }; - options: Record; + options: LegacyVars; } /** @@ -84,7 +85,7 @@ export class LegacyLoggingServer { private onPostStopCallback?: () => void; - constructor(legacyLoggingConfig: Readonly>) { + constructor(legacyLoggingConfig: Readonly) { // We set `ops.interval` to max allowed number and `ops` filter to value // that doesn't exist to avoid logging of ops at all, if turned on it will be // logged by the "legacy" Kibana. diff --git a/src/legacy/ui/ui_render/lib/merge_variables.test.ts b/src/core/server/legacy/merge_vars.test.ts similarity index 58% rename from src/legacy/ui/ui_render/lib/merge_variables.test.ts rename to src/core/server/legacy/merge_vars.test.ts index 4d69216bc0bfd..d977ee292d039 100644 --- a/src/legacy/ui/ui_render/lib/merge_variables.test.ts +++ b/src/core/server/legacy/merge_vars.test.ts @@ -17,29 +17,26 @@ * under the License. */ -import { mergeVariables } from './merge_variables'; +import { mergeVars } from './merge_vars'; -describe('mergeVariables', () => { +describe('mergeVars', () => { it('merges two objects together', () => { - const someVariables = { - name: 'value', - canFoo: true, - nested: { - anotherVariable: 'ok', - }, - }; - - const otherVariables = { + const first = { otherName: 'value', otherCanFoo: true, otherNested: { otherAnotherVariable: 'ok', }, }; + const second = { + name: 'value', + canFoo: true, + nested: { + anotherVariable: 'ok', + }, + }; - const result = mergeVariables(someVariables, otherVariables); - - expect(result).toEqual({ + expect(mergeVars(first, second)).toEqual({ name: 'value', canFoo: true, nested: { @@ -54,86 +51,76 @@ describe('mergeVariables', () => { }); it('does not mutate the source objects', () => { - const original = { - var1: 'original', + const first = { + var1: 'first', }; - - const set1 = { - var1: 'value1', - var2: 'value1', + const second = { + var1: 'second', + var2: 'second', }; - - const set2 = { - var1: 'value2', - var2: 'value2', - var3: 'value2', + const third = { + var1: 'third', + var2: 'third', + var3: 'third', }; - - const set3 = { - var1: 'value3', - var2: 'value3', - var3: 'value3', - var4: 'value3', + const fourth = { + var1: 'fourth', + var2: 'fourth', + var3: 'fourth', + var4: 'fourth', }; - mergeVariables(original, set1, set2, set3); + mergeVars(first, second, third, fourth); - expect(original).toEqual({ var1: 'original' }); - expect(set1).toEqual({ var1: 'value1', var2: 'value1' }); - expect(set2).toEqual({ var1: 'value2', var2: 'value2', var3: 'value2' }); - expect(set3).toEqual({ var1: 'value3', var2: 'value3', var3: 'value3', var4: 'value3' }); + expect(first).toEqual({ var1: 'first' }); + expect(second).toEqual({ var1: 'second', var2: 'second' }); + expect(third).toEqual({ var1: 'third', var2: 'third', var3: 'third' }); + expect(fourth).toEqual({ var1: 'fourth', var2: 'fourth', var3: 'fourth', var4: 'fourth' }); }); - it('merges multiple objects together, preferring the leftmost values', () => { - const original = { - var1: 'original', + it('merges multiple objects together with precedence increasing from left-to-right', () => { + const first = { + var1: 'first', + var2: 'first', + var3: 'first', + var4: 'first', }; - - const set1 = { - var1: 'value1', - var2: 'value1', + const second = { + var1: 'second', + var2: 'second', + var3: 'second', }; - - const set2 = { - var1: 'value2', - var2: 'value2', - var3: 'value2', + const third = { + var1: 'third', + var2: 'third', }; - - const set3 = { - var1: 'value3', - var2: 'value3', - var3: 'value3', - var4: 'value3', + const fourth = { + var1: 'fourth', }; - const result = mergeVariables(original, set1, set2, set3); - - expect(result).toEqual({ - var1: 'original', - var2: 'value1', - var3: 'value2', - var4: 'value3', + expect(mergeVars(first, second, third, fourth)).toEqual({ + var1: 'fourth', + var2: 'third', + var3: 'second', + var4: 'first', }); }); - it('retains the original variable value if a duplicate entry is found', () => { - const someVariables = { - name: 'value', - canFoo: true, + it('overwrites the original variable value if a duplicate entry is found', () => { + const first = { nested: { - anotherVariable: 'ok', + otherAnotherVariable: 'ok', }, }; - - const otherVariables = { + const second = { + name: 'value', + canFoo: true, nested: { - otherAnotherVariable: 'ok', + anotherVariable: 'ok', }, }; - const result = mergeVariables(someVariables, otherVariables); - expect(result).toEqual({ + expect(mergeVars(first, second)).toEqual({ name: 'value', canFoo: true, nested: { @@ -143,55 +130,61 @@ describe('mergeVariables', () => { }); it('combines entries within "uiCapabilities"', () => { - const someVariables = { - name: 'value', - canFoo: true, + const first = { uiCapabilities: { firstCapability: 'ok', + sharedCapability: 'shared', }, }; - - const otherVariables = { + const second = { + name: 'value', + canFoo: true, uiCapabilities: { secondCapability: 'ok', }, }; + const third = { + name: 'value', + canFoo: true, + uiCapabilities: { + thirdCapability: 'ok', + sharedCapability: 'blocked', + }, + }; - const result = mergeVariables(someVariables, otherVariables); - - expect(result).toEqual({ + expect(mergeVars(first, second, third)).toEqual({ name: 'value', canFoo: true, uiCapabilities: { firstCapability: 'ok', secondCapability: 'ok', + thirdCapability: 'ok', + sharedCapability: 'blocked', }, }); }); it('does not deeply combine entries within "uiCapabilities"', () => { - const someVariables = { - name: 'value', - canFoo: true, + const first = { uiCapabilities: { firstCapability: 'ok', nestedCapability: { - nestedProp: 'nestedValue', + otherNestedProp: 'otherNestedValue', }, }, }; - - const otherVariables = { + const second = { + name: 'value', + canFoo: true, uiCapabilities: { secondCapability: 'ok', nestedCapability: { - otherNestedProp: 'otherNestedValue', + nestedProp: 'nestedValue', }, }, }; - const result = mergeVariables(someVariables, otherVariables); - expect(result).toEqual({ + expect(mergeVars(first, second)).toEqual({ name: 'value', canFoo: true, uiCapabilities: { diff --git a/src/core/server/legacy/merge_vars.ts b/src/core/server/legacy/merge_vars.ts new file mode 100644 index 0000000000000..a1d43af2f861d --- /dev/null +++ b/src/core/server/legacy/merge_vars.ts @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { LegacyVars } from './types'; + +const ELIGIBLE_FLAT_MERGE_KEYS = ['uiCapabilities']; + +export function mergeVars(...sources: LegacyVars[]): LegacyVars { + return Object.assign( + {}, + ...sources, + ...ELIGIBLE_FLAT_MERGE_KEYS.flatMap(key => + sources.some(source => key in source) + ? [{ [key]: Object.assign({}, ...sources.map(source => source[key] || {})) }] + : [] + ) + ); +} diff --git a/src/core/server/legacy/plugins/find_legacy_plugin_specs.ts b/src/core/server/legacy/plugins/find_legacy_plugin_specs.ts index 0a49154801e56..d2e7a39236d0a 100644 --- a/src/core/server/legacy/plugins/find_legacy_plugin_specs.ts +++ b/src/core/server/legacy/plugins/find_legacy_plugin_specs.ts @@ -19,25 +19,77 @@ import { Observable, merge, forkJoin } from 'rxjs'; import { toArray, tap, distinct, map } from 'rxjs/operators'; + import { findPluginSpecs, defaultConfig, // @ts-ignore } from '../../../../legacy/plugin_discovery/find_plugin_specs.js'; -import { LoggerFactory } from '../../logging'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { collectUiExports as collectLegacyUiExports } from '../../../../legacy/ui/ui_exports/collect_ui_exports'; -import { LegacyConfig, LegacyConfigDeprecationProvider } from '../config'; -export interface LegacyPluginPack { - getPath(): string; +import { LoggerFactory } from '../../logging'; +import { + LegacyUiExports, + LegacyNavLink, + LegacyPluginSpec, + LegacyPluginPack, + LegacyConfig, +} from '../types'; + +const REMOVE_FROM_ARRAY: LegacyNavLink[] = []; + +function getUiAppsNavLinks({ uiAppSpecs = [] }: LegacyUiExports, pluginSpecs: LegacyPluginSpec[]) { + return uiAppSpecs.flatMap(spec => { + if (!spec) { + return REMOVE_FROM_ARRAY; + } + + const id = spec.pluginId || spec.id; + + if (!id) { + throw new Error('Every app must specify an id'); + } + + if (spec.pluginId && !pluginSpecs.some(plugin => plugin.getId() === spec.pluginId)) { + throw new Error(`Unknown plugin id "${spec.pluginId}"`); + } + + const listed = typeof spec.listed === 'boolean' ? spec.listed : true; + + if (spec.hidden || !listed) { + return REMOVE_FROM_ARRAY; + } + + return { + id, + title: spec.title, + order: typeof spec.order === 'number' ? spec.order : 0, + icon: spec.icon, + euiIconType: spec.euiIconType, + url: spec.url || `/app/${id}`, + linkToLastSubUrl: spec.linkToLastSubUrl, + }; + }); } -export interface LegacyPluginSpec { - getId: () => unknown; - getExpectedKibanaVersion: () => string; - getConfigPrefix: () => string; - getDeprecationsProvider: () => LegacyConfigDeprecationProvider | undefined; +function getNavLinks(uiExports: LegacyUiExports, pluginSpecs: LegacyPluginSpec[]) { + return (uiExports.navLinkSpecs || []) + .map(spec => ({ + id: spec.id, + title: spec.title, + order: typeof spec.order === 'number' ? spec.order : 0, + url: spec.url, + subUrlBase: spec.subUrlBase || spec.url, + icon: spec.icon, + euiIconType: spec.euiIconType, + linkToLastSub: 'linkToLastSubUrl' in spec ? spec.linkToLastSubUrl : false, + hidden: 'hidden' in spec ? spec.hidden : false, + disabled: 'disabled' in spec ? spec.disabled : false, + tooltip: spec.tooltip || '', + })) + .concat(getUiAppsNavLinks(uiExports, pluginSpecs)) + .sort((a, b) => a.order - b.order); } export async function findLegacyPluginSpecs(settings: unknown, loggerFactory: LoggerFactory) { @@ -128,11 +180,14 @@ export async function findLegacyPluginSpecs(settings: unknown, loggerFactory: Lo spec$.pipe(toArray()), log$.pipe(toArray()) ).toPromise(); + const uiExports = collectLegacyUiExports(pluginSpecs); + const navLinks = getNavLinks(uiExports, pluginSpecs); return { disabledPluginSpecs, pluginSpecs, pluginExtendedConfig: configToMutate, - uiExports: collectLegacyUiExports(pluginSpecs), + uiExports, + navLinks, }; } diff --git a/src/core/server/legacy/plugins/index.ts b/src/core/server/legacy/plugins/index.ts index 7c69546f0c4de..a6d55e1da7839 100644 --- a/src/core/server/legacy/plugins/index.ts +++ b/src/core/server/legacy/plugins/index.ts @@ -16,4 +16,5 @@ * specific language governing permissions and limitations * under the License. */ + export { findLegacyPluginSpecs } from './find_legacy_plugin_specs'; diff --git a/src/core/server/legacy/types.ts b/src/core/server/legacy/types.ts new file mode 100644 index 0000000000000..6ec893be9b310 --- /dev/null +++ b/src/core/server/legacy/types.ts @@ -0,0 +1,222 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Server } from 'hapi'; + +import { ChromeNavLink } from '../../public'; +import { LegacyRequest } from '../http'; +import { InternalCoreSetup, InternalCoreStart } from '../internal_types'; +import { PluginsServiceSetup, PluginsServiceStart } from '../plugins'; +import { RenderingServiceSetup } from '../rendering'; +import { SavedObjectsLegacyUiExports } from '../types'; + +/** + * @internal + * @deprecated + */ +export type LegacyVars = Record; + +type LegacyCoreSetup = InternalCoreSetup & { + plugins: PluginsServiceSetup; + rendering: RenderingServiceSetup; +}; +type LegacyCoreStart = InternalCoreStart & { plugins: PluginsServiceStart }; + +/** + * New platform representation of the legacy configuration (KibanaConfig) + * + * @internal + * @deprecated + */ +export interface LegacyConfig { + get(key?: string): T; + has(key: string): boolean; + set(key: string, value: any): void; + set(config: LegacyVars): void; +} + +/** + * Representation of a legacy configuration deprecation factory used for + * legacy plugin deprecations. + * + * @internal + * @deprecated + */ +export interface LegacyConfigDeprecationFactory { + rename(oldKey: string, newKey: string): LegacyConfigDeprecation; + unused(unusedKey: string): LegacyConfigDeprecation; +} + +/** + * Representation of a legacy configuration deprecation. + * + * @internal + * @deprecated + */ +export type LegacyConfigDeprecation = (settings: LegacyVars, log: (msg: string) => void) => void; + +/** + * Representation of a legacy configuration deprecation provider. + * + * @internal + * @deprecated + */ +export type LegacyConfigDeprecationProvider = ( + factory: LegacyConfigDeprecationFactory +) => LegacyConfigDeprecation[] | Promise; + +/** + * @internal + * @deprecated + */ +export interface LegacyPluginPack { + getPath(): string; +} + +/** + * @internal + * @deprecated + */ +export interface LegacyPluginSpec { + getId: () => unknown; + getExpectedKibanaVersion: () => string; + getConfigPrefix: () => string; + getDeprecationsProvider: () => LegacyConfigDeprecationProvider | undefined; +} + +/** + * @internal + * @deprecated + */ +export interface VarsProvider { + fn: (server: Server, configValue: any) => LegacyVars; + pluginSpec: { + readConfigValue(config: any, key: string | string[]): any; + }; +} + +/** + * @internal + * @deprecated + */ +export type VarsInjector = () => LegacyVars; + +/** + * @internal + * @deprecated + */ +export type VarsReplacer = ( + vars: LegacyVars, + request: LegacyRequest, + server: Server +) => LegacyVars | Promise; + +/** + * @internal + * @deprecated + */ +export type LegacyNavLinkSpec = Record & ChromeNavLink; + +/** + * @internal + * @deprecated + */ +export type LegacyAppSpec = Pick< + ChromeNavLink, + 'title' | 'order' | 'icon' | 'euiIconType' | 'url' | 'linkToLastSubUrl' | 'hidden' +> & { pluginId?: string; id?: string; listed?: boolean }; + +/** + * @internal + * @deprecated + */ +export type LegacyNavLink = Omit & { + order: number; +}; + +/** + * @internal + * @deprecated + */ +export type LegacyUiExports = SavedObjectsLegacyUiExports & { + defaultInjectedVarProviders?: VarsProvider[]; + injectedVarsReplacers?: VarsReplacer[]; + navLinkSpecs?: LegacyNavLinkSpec[] | null; + uiAppSpecs?: Array; + unknown?: [{ pluginSpec: LegacyPluginSpec; type: unknown }]; +}; + +/** + * @public + * @deprecated + */ +export interface LegacyServiceSetupDeps { + core: LegacyCoreSetup; + plugins: Record; +} + +/** + * @public + * @deprecated + */ +export interface LegacyServiceStartDeps { + core: LegacyCoreStart; + plugins: Record; +} + +/** + * @internal + * @deprecated + */ +export interface ILegacyInternals { + /** + * Inject UI app vars for a particular plugin + */ + injectUiAppVars(id: string, injector: VarsInjector): void; + + /** + * Get all the merged injected UI app vars for a particular plugin + */ + getInjectedUiAppVars(id: string): Promise; + + /** + * Get the metadata vars for a particular plugin + */ + getVars(id: string, request: LegacyRequest, injected?: LegacyVars): Promise; +} + +/** + * @internal + * @deprecated + */ +export interface LegacyPlugins { + disabledPluginSpecs: LegacyPluginSpec[]; + pluginSpecs: LegacyPluginSpec[]; + uiExports: LegacyUiExports; + navLinks: LegacyNavLink[]; +} + +/** + * @internal + * @deprecated + */ +export interface LegacyServiceDiscoverPlugins extends LegacyPlugins { + pluginExtendedConfig: LegacyConfig; + settings: LegacyVars; +} diff --git a/src/core/server/mocks.ts b/src/core/server/mocks.ts index 3a68b18409b0a..53849b040c413 100644 --- a/src/core/server/mocks.ts +++ b/src/core/server/mocks.ts @@ -121,6 +121,7 @@ function createCoreStartMock() { const mock: MockedKeys = { capabilities: capabilitiesServiceMock.createStartContract(), savedObjects: savedObjectsServiceMock.createStartContract(), + uiSettings: uiSettingsServiceMock.createStartContract(), }; return mock; @@ -143,6 +144,7 @@ function createInternalCoreStartMock() { const startDeps: InternalCoreStart = { capabilities: capabilitiesServiceMock.createStartContract(), savedObjects: savedObjectsServiceMock.createStartContract(), + uiSettings: uiSettingsServiceMock.createStartContract(), }; return startDeps; } diff --git a/src/core/server/plugins/plugin_context.ts b/src/core/server/plugins/plugin_context.ts index 04a7547fd3747..6e9a7967e9eca 100644 --- a/src/core/server/plugins/plugin_context.ts +++ b/src/core/server/plugins/plugin_context.ts @@ -200,6 +200,11 @@ export function createPluginStartContext( capabilities: { resolveCapabilities: deps.capabilities.resolveCapabilities, }, - savedObjects: { getScopedClient: deps.savedObjects.getScopedClient }, + savedObjects: { + getScopedClient: deps.savedObjects.getScopedClient, + }, + uiSettings: { + asScopedToClient: deps.uiSettings.asScopedToClient, + }, }; } diff --git a/src/core/server/plugins/plugins_service.mock.ts b/src/core/server/plugins/plugins_service.mock.ts index 8d3c6a8c909a2..5a52ebccbd472 100644 --- a/src/core/server/plugins/plugins_service.mock.ts +++ b/src/core/server/plugins/plugins_service.mock.ts @@ -17,28 +17,28 @@ * under the License. */ -import { PluginsService } from './plugins_service'; +import { PluginsService, PluginsServiceSetup } from './plugins_service'; -type ServiceContract = PublicMethodsOf; -const createServiceMock = () => { - const mocked: jest.Mocked = { - discover: jest.fn(), - setup: jest.fn(), - start: jest.fn(), - stop: jest.fn(), - }; - mocked.setup.mockResolvedValue({ - contracts: new Map(), - uiPlugins: { - browserConfigs: new Map(), - internal: new Map(), - public: new Map(), - }, - }); - mocked.start.mockResolvedValue({ contracts: new Map() }); - return mocked; -}; +type PluginsServiceMock = jest.Mocked>; + +const createSetupContractMock = (): PluginsServiceSetup => ({ + contracts: new Map(), + uiPlugins: { + browserConfigs: new Map(), + internal: new Map(), + public: new Map(), + }, +}); +const createStartContractMock = () => ({ contracts: new Map() }); +const createServiceMock = (): PluginsServiceMock => ({ + discover: jest.fn(), + setup: jest.fn().mockResolvedValue(createSetupContractMock()), + start: jest.fn().mockResolvedValue(createStartContractMock()), + stop: jest.fn(), +}); export const pluginServiceMock = { create: createServiceMock, + createSetupContract: createSetupContractMock, + createStartContract: createStartContractMock, }; diff --git a/src/core/server/rendering/__mocks__/params.ts b/src/core/server/rendering/__mocks__/params.ts new file mode 100644 index 0000000000000..392b2f0c5e2a4 --- /dev/null +++ b/src/core/server/rendering/__mocks__/params.ts @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { mockCoreContext } from '../../core_context.mock'; +import { httpServiceMock } from '../../http/http_service.mock'; +import { pluginServiceMock } from '../../plugins/plugins_service.mock'; +import { legacyServiceMock } from '../../legacy/legacy_service.mock'; + +const context = mockCoreContext.create(); +const http = httpServiceMock.createSetupContract(); +const plugins = pluginServiceMock.createSetupContract(); +const legacyPlugins = legacyServiceMock.createDiscoverPlugins(); + +export const mockRenderingServiceParams = context; +export const mockRenderingSetupDeps = { + http, + legacyPlugins, + plugins, +}; diff --git a/src/core/server/rendering/__mocks__/rendering_service.ts b/src/core/server/rendering/__mocks__/rendering_service.ts new file mode 100644 index 0000000000000..33dca7cc0d30e --- /dev/null +++ b/src/core/server/rendering/__mocks__/rendering_service.ts @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { RenderingService as Service } from '../rendering_service'; +import { RenderingServiceSetup } from '../types'; +import { mockRenderingServiceParams } from './params'; + +type IRenderingService = PublicMethodsOf; + +export const setupMock: jest.Mocked = { + render: jest.fn(), +}; +export const mockSetup = jest.fn().mockResolvedValue(setupMock); +export const mockStart = jest.fn(); +export const mockStop = jest.fn(); +export const mockRenderingService: jest.Mocked = { + setup: mockSetup, + start: mockStart, + stop: mockStop, +}; +export const RenderingService = jest.fn( + () => mockRenderingService +); diff --git a/src/core/server/rendering/__snapshots__/rendering_service.test.ts.snap b/src/core/server/rendering/__snapshots__/rendering_service.test.ts.snap new file mode 100644 index 0000000000000..edde1dee85f4f --- /dev/null +++ b/src/core/server/rendering/__snapshots__/rendering_service.test.ts.snap @@ -0,0 +1,719 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`RenderingService setup() render() renders "core" from legacy request 1`] = ` +Object { + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNumber": Any, + "csp": Object { + "warnLegacyBrowsers": true, + }, + "env": Object { + "binDir": Any, + "cliArgs": Object { + "basePath": false, + "dev": true, + "open": false, + "optimize": false, + "oss": false, + "quiet": false, + "repl": false, + "silent": false, + "watch": false, + }, + "configDir": Any, + "configs": Array [], + "homeDir": Any, + "isDevClusterMaster": false, + "logDir": Any, + "mode": Object { + "dev": true, + "name": "development", + "prod": false, + }, + "packageInfo": Object { + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "dist": false, + "version": Any, + }, + "pluginSearchPaths": Any, + "staticFilesDir": Any, + }, + "i18n": Object { + "translationsUrl": "/mock-server-basepath/translations/en.json", + }, + "legacyMetadata": Object { + "app": Object {}, + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "bundleId": "app:core", + "devMode": true, + "nav": Array [], + "serverName": "http-server-test", + "uiSettings": Object { + "defaults": Object { + "registered": Object { + "name": "title", + }, + }, + "user": Object {}, + }, + "version": Any, + }, + "legacyMode": false, + "uiPlugins": Array [], + "vars": Object {}, + "version": Any, +} +`; + +exports[`RenderingService setup() render() renders "core" page 1`] = ` +Object { + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNumber": Any, + "csp": Object { + "warnLegacyBrowsers": true, + }, + "env": Object { + "binDir": Any, + "cliArgs": Object { + "basePath": false, + "dev": true, + "open": false, + "optimize": false, + "oss": false, + "quiet": false, + "repl": false, + "silent": false, + "watch": false, + }, + "configDir": Any, + "configs": Array [], + "homeDir": Any, + "isDevClusterMaster": false, + "logDir": Any, + "mode": Object { + "dev": true, + "name": "development", + "prod": false, + }, + "packageInfo": Object { + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "dist": false, + "version": Any, + }, + "pluginSearchPaths": Any, + "staticFilesDir": Any, + }, + "i18n": Object { + "translationsUrl": "/mock-server-basepath/translations/en.json", + }, + "legacyMetadata": Object { + "app": Object {}, + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "bundleId": "app:core", + "devMode": true, + "nav": Array [], + "serverName": "http-server-test", + "uiSettings": Object { + "defaults": Object { + "registered": Object { + "name": "title", + }, + }, + "user": Object {}, + }, + "version": Any, + }, + "legacyMode": false, + "uiPlugins": Array [], + "vars": Object {}, + "version": Any, +} +`; + +exports[`RenderingService setup() render() renders "core" page driven by settings 1`] = ` +Object { + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNumber": Any, + "csp": Object { + "warnLegacyBrowsers": true, + }, + "env": Object { + "binDir": Any, + "cliArgs": Object { + "basePath": false, + "dev": true, + "open": false, + "optimize": false, + "oss": false, + "quiet": false, + "repl": false, + "silent": false, + "watch": false, + }, + "configDir": Any, + "configs": Array [], + "homeDir": Any, + "isDevClusterMaster": false, + "logDir": Any, + "mode": Object { + "dev": true, + "name": "development", + "prod": false, + }, + "packageInfo": Object { + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "dist": false, + "version": Any, + }, + "pluginSearchPaths": Any, + "staticFilesDir": Any, + }, + "i18n": Object { + "translationsUrl": "/mock-server-basepath/translations/en.json", + }, + "legacyMetadata": Object { + "app": Object {}, + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "bundleId": "app:core", + "devMode": true, + "nav": Array [], + "serverName": "http-server-test", + "uiSettings": Object { + "defaults": Object { + "registered": Object { + "name": "title", + }, + }, + "user": Object { + "theme:darkMode": Object { + "userValue": true, + }, + }, + }, + "version": Any, + }, + "legacyMode": false, + "uiPlugins": Array [], + "vars": Object {}, + "version": Any, +} +`; + +exports[`RenderingService setup() render() renders "core" page for blank basepath 1`] = ` +Object { + "basePath": "", + "branch": Any, + "buildNumber": Any, + "csp": Object { + "warnLegacyBrowsers": true, + }, + "env": Object { + "binDir": Any, + "cliArgs": Object { + "basePath": false, + "dev": true, + "open": false, + "optimize": false, + "oss": false, + "quiet": false, + "repl": false, + "silent": false, + "watch": false, + }, + "configDir": Any, + "configs": Array [], + "homeDir": Any, + "isDevClusterMaster": false, + "logDir": Any, + "mode": Object { + "dev": true, + "name": "development", + "prod": false, + }, + "packageInfo": Object { + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "dist": false, + "version": Any, + }, + "pluginSearchPaths": Any, + "staticFilesDir": Any, + }, + "i18n": Object { + "translationsUrl": "/translations/en.json", + }, + "legacyMetadata": Object { + "app": Object {}, + "basePath": "", + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "bundleId": "app:core", + "devMode": true, + "nav": Array [], + "serverName": "http-server-test", + "uiSettings": Object { + "defaults": Object { + "registered": Object { + "name": "title", + }, + }, + "user": Object {}, + }, + "version": Any, + }, + "legacyMode": false, + "uiPlugins": Array [], + "vars": Object {}, + "version": Any, +} +`; + +exports[`RenderingService setup() render() renders "core" with excluded user settings 1`] = ` +Object { + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNumber": Any, + "csp": Object { + "warnLegacyBrowsers": true, + }, + "env": Object { + "binDir": Any, + "cliArgs": Object { + "basePath": false, + "dev": true, + "open": false, + "optimize": false, + "oss": false, + "quiet": false, + "repl": false, + "silent": false, + "watch": false, + }, + "configDir": Any, + "configs": Array [], + "homeDir": Any, + "isDevClusterMaster": false, + "logDir": Any, + "mode": Object { + "dev": true, + "name": "development", + "prod": false, + }, + "packageInfo": Object { + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "dist": false, + "version": Any, + }, + "pluginSearchPaths": Any, + "staticFilesDir": Any, + }, + "i18n": Object { + "translationsUrl": "/mock-server-basepath/translations/en.json", + }, + "legacyMetadata": Object { + "app": Object {}, + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "bundleId": "app:core", + "devMode": true, + "nav": Array [], + "serverName": "http-server-test", + "uiSettings": Object { + "defaults": Object { + "registered": Object { + "name": "title", + }, + }, + "user": Object {}, + }, + "version": Any, + }, + "legacyMode": false, + "uiPlugins": Array [], + "vars": Object {}, + "version": Any, +} +`; + +exports[`RenderingService setup() render() renders "legacy" page 1`] = ` +Object { + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNumber": Any, + "csp": Object { + "warnLegacyBrowsers": true, + }, + "env": Object { + "binDir": Any, + "cliArgs": Object { + "basePath": false, + "dev": true, + "open": false, + "optimize": false, + "oss": false, + "quiet": false, + "repl": false, + "silent": false, + "watch": false, + }, + "configDir": Any, + "configs": Array [], + "homeDir": Any, + "isDevClusterMaster": false, + "logDir": Any, + "mode": Object { + "dev": true, + "name": "development", + "prod": false, + }, + "packageInfo": Object { + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "dist": false, + "version": Any, + }, + "pluginSearchPaths": Any, + "staticFilesDir": Any, + }, + "i18n": Object { + "translationsUrl": "/mock-server-basepath/translations/en.json", + }, + "legacyMetadata": Object { + "app": Object {}, + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "bundleId": "app:legacy", + "devMode": true, + "nav": Array [], + "serverName": "http-server-test", + "uiSettings": Object { + "defaults": Object { + "registered": Object { + "name": "title", + }, + }, + "user": Object {}, + }, + "version": Any, + }, + "legacyMode": true, + "uiPlugins": Array [], + "vars": Object {}, + "version": Any, +} +`; + +exports[`RenderingService setup() render() renders "legacy" page for blank basepath 1`] = ` +Object { + "basePath": "", + "branch": Any, + "buildNumber": Any, + "csp": Object { + "warnLegacyBrowsers": true, + }, + "env": Object { + "binDir": Any, + "cliArgs": Object { + "basePath": false, + "dev": true, + "open": false, + "optimize": false, + "oss": false, + "quiet": false, + "repl": false, + "silent": false, + "watch": false, + }, + "configDir": Any, + "configs": Array [], + "homeDir": Any, + "isDevClusterMaster": false, + "logDir": Any, + "mode": Object { + "dev": true, + "name": "development", + "prod": false, + }, + "packageInfo": Object { + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "dist": false, + "version": Any, + }, + "pluginSearchPaths": Any, + "staticFilesDir": Any, + }, + "i18n": Object { + "translationsUrl": "/translations/en.json", + }, + "legacyMetadata": Object { + "app": Object {}, + "basePath": "", + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "bundleId": "app:legacy", + "devMode": true, + "nav": Array [], + "serverName": "http-server-test", + "uiSettings": Object { + "defaults": Object { + "registered": Object { + "name": "title", + }, + }, + "user": Object {}, + }, + "version": Any, + }, + "legacyMode": true, + "uiPlugins": Array [], + "vars": Object {}, + "version": Any, +} +`; + +exports[`RenderingService setup() render() renders "legacy" with custom vars 1`] = ` +Object { + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNumber": Any, + "csp": Object { + "warnLegacyBrowsers": true, + }, + "env": Object { + "binDir": Any, + "cliArgs": Object { + "basePath": false, + "dev": true, + "open": false, + "optimize": false, + "oss": false, + "quiet": false, + "repl": false, + "silent": false, + "watch": false, + }, + "configDir": Any, + "configs": Array [], + "homeDir": Any, + "isDevClusterMaster": false, + "logDir": Any, + "mode": Object { + "dev": true, + "name": "development", + "prod": false, + }, + "packageInfo": Object { + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "dist": false, + "version": Any, + }, + "pluginSearchPaths": Any, + "staticFilesDir": Any, + }, + "i18n": Object { + "translationsUrl": "/mock-server-basepath/translations/en.json", + }, + "legacyMetadata": Object { + "app": Object {}, + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "bundleId": "app:legacy", + "devMode": true, + "nav": Array [], + "serverName": "http-server-test", + "uiSettings": Object { + "defaults": Object { + "registered": Object { + "name": "title", + }, + }, + "user": Object {}, + }, + "version": Any, + }, + "legacyMode": true, + "uiPlugins": Array [], + "vars": Object { + "fake": "__TEST_TOKEN__", + }, + "version": Any, +} +`; + +exports[`RenderingService setup() render() renders "legacy" with excluded user settings 1`] = ` +Object { + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNumber": Any, + "csp": Object { + "warnLegacyBrowsers": true, + }, + "env": Object { + "binDir": Any, + "cliArgs": Object { + "basePath": false, + "dev": true, + "open": false, + "optimize": false, + "oss": false, + "quiet": false, + "repl": false, + "silent": false, + "watch": false, + }, + "configDir": Any, + "configs": Array [], + "homeDir": Any, + "isDevClusterMaster": false, + "logDir": Any, + "mode": Object { + "dev": true, + "name": "development", + "prod": false, + }, + "packageInfo": Object { + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "dist": false, + "version": Any, + }, + "pluginSearchPaths": Any, + "staticFilesDir": Any, + }, + "i18n": Object { + "translationsUrl": "/mock-server-basepath/translations/en.json", + }, + "legacyMetadata": Object { + "app": Object {}, + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "bundleId": "app:legacy", + "devMode": true, + "nav": Array [], + "serverName": "http-server-test", + "uiSettings": Object { + "defaults": Object { + "registered": Object { + "name": "title", + }, + }, + "user": Object {}, + }, + "version": Any, + }, + "legacyMode": true, + "uiPlugins": Array [], + "vars": Object {}, + "version": Any, +} +`; + +exports[`RenderingService setup() render() renders "legacy" with excluded user settings and custom vars 1`] = ` +Object { + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNumber": Any, + "csp": Object { + "warnLegacyBrowsers": true, + }, + "env": Object { + "binDir": Any, + "cliArgs": Object { + "basePath": false, + "dev": true, + "open": false, + "optimize": false, + "oss": false, + "quiet": false, + "repl": false, + "silent": false, + "watch": false, + }, + "configDir": Any, + "configs": Array [], + "homeDir": Any, + "isDevClusterMaster": false, + "logDir": Any, + "mode": Object { + "dev": true, + "name": "development", + "prod": false, + }, + "packageInfo": Object { + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "dist": false, + "version": Any, + }, + "pluginSearchPaths": Any, + "staticFilesDir": Any, + }, + "i18n": Object { + "translationsUrl": "/mock-server-basepath/translations/en.json", + }, + "legacyMetadata": Object { + "app": Object {}, + "basePath": "/mock-server-basepath", + "branch": Any, + "buildNum": Any, + "buildSha": Any, + "bundleId": "app:legacy", + "devMode": true, + "nav": Array [], + "serverName": "http-server-test", + "uiSettings": Object { + "defaults": Object { + "registered": Object { + "name": "title", + }, + }, + "user": Object {}, + }, + "version": Any, + }, + "legacyMode": true, + "uiPlugins": Array [], + "vars": Object { + "fake": "__TEST_TOKEN__", + }, + "version": Any, +} +`; diff --git a/test/plugin_functional/plugins/ui_settings_plugin/public/index.ts b/src/core/server/rendering/index.ts similarity index 89% rename from test/plugin_functional/plugins/ui_settings_plugin/public/index.ts rename to src/core/server/rendering/index.ts index 3c5997132d460..233f4b26a70db 100644 --- a/test/plugin_functional/plugins/ui_settings_plugin/public/index.ts +++ b/src/core/server/rendering/index.ts @@ -16,6 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -import { UiSettingsPlugin } from './plugin'; -export const plugin = () => new UiSettingsPlugin(); +export { RenderingService } from './rendering_service'; +export * from './types'; diff --git a/src/core/server/rendering/rendering_service.test.ts b/src/core/server/rendering/rendering_service.test.ts new file mode 100644 index 0000000000000..63145f2b30573 --- /dev/null +++ b/src/core/server/rendering/rendering_service.test.ts @@ -0,0 +1,185 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { load } from 'cheerio'; + +import { httpServerMock } from '../http/http_server.mocks'; +import { uiSettingsServiceMock } from '../ui_settings/ui_settings_service.mock'; +import { mockRenderingServiceParams, mockRenderingSetupDeps } from './__mocks__/params'; +import { RenderingServiceSetup } from './types'; +import { RenderingService } from './rendering_service'; + +const INJECTED_METADATA = { + version: expect.any(String), + branch: expect.any(String), + buildNumber: expect.any(Number), + env: { + binDir: expect.any(String), + configDir: expect.any(String), + homeDir: expect.any(String), + logDir: expect.any(String), + packageInfo: { + branch: expect.any(String), + buildNum: expect.any(Number), + buildSha: expect.any(String), + version: expect.any(String), + }, + pluginSearchPaths: expect.any(Array), + staticFilesDir: expect.any(String), + }, + legacyMetadata: { + branch: expect.any(String), + buildNum: expect.any(Number), + buildSha: expect.any(String), + version: expect.any(String), + }, +}; +const { createKibanaRequest, createRawRequest } = httpServerMock; +const legacyApp = { getId: () => 'legacy' }; + +describe('RenderingService', () => { + let service: RenderingService; + + beforeEach(() => { + jest.clearAllMocks(); + service = new RenderingService(mockRenderingServiceParams); + }); + + describe('setup()', () => { + it('creates instance of RenderingServiceSetup', async () => { + const rendering = await service.setup(mockRenderingSetupDeps); + + expect(rendering.render).toBeInstanceOf(Function); + }); + + describe('render()', () => { + let uiSettings: ReturnType; + let render: RenderingServiceSetup['render']; + + beforeEach(async () => { + uiSettings = uiSettingsServiceMock.createClient(); + uiSettings.getRegistered.mockReturnValue({ + registered: { name: 'title' }, + }); + render = (await service.setup(mockRenderingSetupDeps)).render; + }); + + it('renders "core" page', async () => { + const content = await render(createKibanaRequest(), uiSettings); + const dom = load(content); + const data = JSON.parse(dom('kbn-injected-metadata').attr('data')); + + expect(data).toMatchSnapshot(INJECTED_METADATA); + }); + + it('renders "core" page for blank basepath', async () => { + mockRenderingSetupDeps.http.basePath.get.mockReturnValueOnce(''); + + const content = await render(createKibanaRequest(), uiSettings); + const dom = load(content); + const data = JSON.parse(dom('kbn-injected-metadata').attr('data')); + + expect(data).toMatchSnapshot(INJECTED_METADATA); + }); + + it('renders "core" page driven by settings', async () => { + uiSettings.getUserProvided.mockResolvedValue({ 'theme:darkMode': { userValue: true } }); + const content = await render(createKibanaRequest(), uiSettings); + const dom = load(content); + const data = JSON.parse(dom('kbn-injected-metadata').attr('data')); + + expect(data).toMatchSnapshot(INJECTED_METADATA); + }); + + it('renders "core" with excluded user settings', async () => { + const content = await render(createKibanaRequest(), uiSettings, { + includeUserSettings: false, + }); + const dom = load(content); + const data = JSON.parse(dom('kbn-injected-metadata').attr('data')); + + expect(data).toMatchSnapshot(INJECTED_METADATA); + }); + + it('renders "core" from legacy request', async () => { + const content = await render(createRawRequest(), uiSettings); + const dom = load(content); + const data = JSON.parse(dom('kbn-injected-metadata').attr('data')); + + expect(data).toMatchSnapshot(INJECTED_METADATA); + }); + + it('renders "legacy" page', async () => { + const content = await render(createRawRequest(), uiSettings, { app: legacyApp }); + const dom = load(content); + const data = JSON.parse(dom('kbn-injected-metadata').attr('data')); + + expect(data).toMatchSnapshot(INJECTED_METADATA); + }); + + it('renders "legacy" page for blank basepath', async () => { + mockRenderingSetupDeps.http.basePath.get.mockReturnValueOnce(''); + + const content = await render(createRawRequest(), uiSettings, { app: legacyApp }); + const dom = load(content); + const data = JSON.parse(dom('kbn-injected-metadata').attr('data')); + + expect(data).toMatchSnapshot(INJECTED_METADATA); + }); + + it('renders "legacy" with custom vars', async () => { + const content = await render(createRawRequest(), uiSettings, { + app: legacyApp, + vars: { + fake: '__TEST_TOKEN__', + }, + }); + const dom = load(content); + const data = JSON.parse(dom('kbn-injected-metadata').attr('data')); + + expect(data).toMatchSnapshot(INJECTED_METADATA); + }); + + it('renders "legacy" with excluded user settings', async () => { + const content = await render(createRawRequest(), uiSettings, { + app: legacyApp, + includeUserSettings: false, + }); + const dom = load(content); + const data = JSON.parse(dom('kbn-injected-metadata').attr('data')); + + expect(data).toMatchSnapshot(INJECTED_METADATA); + }); + + it('renders "legacy" with excluded user settings and custom vars', async () => { + const content = await render(createRawRequest(), uiSettings, { + app: legacyApp, + includeUserSettings: false, + vars: { + fake: '__TEST_TOKEN__', + }, + }); + const dom = load(content); + const data = JSON.parse(dom('kbn-injected-metadata').attr('data')); + + expect(data).toMatchSnapshot(INJECTED_METADATA); + }); + }); + }); +}); diff --git a/src/core/server/rendering/rendering_service.tsx b/src/core/server/rendering/rendering_service.tsx new file mode 100644 index 0000000000000..41810c6a10655 --- /dev/null +++ b/src/core/server/rendering/rendering_service.tsx @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React from 'react'; +import { renderToStaticMarkup } from 'react-dom/server'; +import { take } from 'rxjs/operators'; + +import { i18n } from '@kbn/i18n'; + +import { CoreService } from '../../types'; +import { CoreContext } from '../core_context'; +import { Template } from './views'; +import { + RenderingSetupDeps, + RenderingServiceSetup, + RenderingMetadata, + LegacyRenderOptions, +} from './types'; + +/** @internal */ +export class RenderingService implements CoreService { + constructor(private readonly coreContext: CoreContext) {} + + public async setup({ + http, + legacyPlugins, + plugins, + }: RenderingSetupDeps): Promise { + async function getUiConfig(pluginId: string) { + const browserConfig = plugins.uiPlugins.browserConfigs.get(pluginId); + + return ((await browserConfig?.pipe(take(1)).toPromise()) ?? {}) as Record; + } + + return { + render: async ( + request, + uiSettings, + { + app = { getId: () => 'core' }, + includeUserSettings = true, + vars = {}, + }: LegacyRenderOptions = {} + ) => { + const { env } = this.coreContext; + const basePath = http.basePath.get(request); + const settings = { + defaults: uiSettings.getRegistered(), + user: includeUserSettings ? await uiSettings.getUserProvided() : {}, + }; + const appId = app.getId(); + const metadata: RenderingMetadata = { + strictCsp: http.csp.strict, + uiPublicUrl: `${basePath}/ui`, + bootstrapScriptUrl: `${basePath}/bundles/app/${appId}/bootstrap.js`, + i18n: i18n.translate, + locale: i18n.getLocale(), + darkMode: settings.user?.['theme:darkMode']?.userValue + ? Boolean(settings.user['theme:darkMode'].userValue) + : false, + injectedMetadata: { + version: env.packageInfo.version, + buildNumber: env.packageInfo.buildNum, + branch: env.packageInfo.branch, + basePath, + env, + legacyMode: appId !== 'core', + i18n: { + translationsUrl: `${basePath}/translations/${i18n.getLocale()}.json`, + }, + csp: { warnLegacyBrowsers: http.csp.warnLegacyBrowsers }, + vars, + uiPlugins: await Promise.all( + [...plugins.uiPlugins.public].map(async ([id, plugin]) => ({ + id, + plugin, + config: await getUiConfig(id), + })) + ), + legacyMetadata: { + app, + bundleId: `app:${appId}`, + nav: legacyPlugins.navLinks, + version: env.packageInfo.version, + branch: env.packageInfo.branch, + buildNum: env.packageInfo.buildNum, + buildSha: env.packageInfo.buildSha, + serverName: http.server.name, + devMode: env.mode.dev, + basePath, + uiSettings: settings, + }, + }, + }; + + return `${renderToStaticMarkup(