diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f4e620dea95a9..8a8cc5c5e448c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -7,10 +7,12 @@ /x-pack/plugins/discover_enhanced/ @elastic/kibana-app /x-pack/plugins/lens/ @elastic/kibana-app /x-pack/plugins/graph/ @elastic/kibana-app +/src/plugins/advanced_settings/ @elastic/kibana-app /src/plugins/charts/ @elastic/kibana-app /src/plugins/dashboard/ @elastic/kibana-app /src/plugins/discover/ @elastic/kibana-app /src/plugins/input_control_vis/ @elastic/kibana-app +/src/plugins/management/ @elastic/kibana-app /src/plugins/kibana_legacy/ @elastic/kibana-app /src/plugins/vis_default_editor/ @elastic/kibana-app /src/plugins/vis_type_markdown/ @elastic/kibana-app @@ -38,7 +40,6 @@ /examples/url_generators_explorer/ @elastic/kibana-app-arch /packages/elastic-datemath/ @elastic/kibana-app-arch /packages/kbn-interpreter/ @elastic/kibana-app-arch -/src/plugins/advanced_settings/ @elastic/kibana-app-arch /src/plugins/bfetch/ @elastic/kibana-app-arch /src/plugins/data/ @elastic/kibana-app-arch /src/plugins/embeddable/ @elastic/kibana-app-arch @@ -47,7 +48,6 @@ /src/plugins/kibana_react/ @elastic/kibana-app-arch /src/plugins/kibana_react/public/code_editor @elastic/kibana-canvas /src/plugins/kibana_utils/ @elastic/kibana-app-arch -/src/plugins/management/ @elastic/kibana-app-arch /src/plugins/navigation/ @elastic/kibana-app-arch /src/plugins/share/ @elastic/kibana-app-arch /src/plugins/ui_actions/ @elastic/kibana-app-arch diff --git a/docs/management/advanced-options.asciidoc b/docs/management/advanced-options.asciidoc index ed20166c87f29..1bae04cc2e58b 100644 --- a/docs/management/advanced-options.asciidoc +++ b/docs/management/advanced-options.asciidoc @@ -1,9 +1,10 @@ [[advanced-options]] == Advanced Settings -The *Advanced Settings* UI enables you to edit settings that control the behavior of Kibana. -For example, you can change the format used to display dates, specify the default index pattern, and set the precision -for displayed decimal values. +The *Advanced Settings* UI enables you to edit settings that control the +behavior of Kibana. For example, you can change the format used to display dates, +specify the default index pattern, and set the precision for displayed decimal +values. . Open the menu, then go to *Stack Management > {kib} > Advanced Settings*. . Scroll or search for the setting you want to modify. @@ -15,8 +16,9 @@ for displayed decimal values. [[settings-read-only-access]] === [xpack]#Read only access# When you have insufficient privileges to edit advanced settings, the following -indicator in Kibana will be displayed. The buttons to edit settings won't be visible. -For more information on granting access to Kibana see <>. +indicator in Kibana will be displayed. The buttons to edit settings won't be +visible. For more information on granting access to Kibana, see +<>. [role="screenshot"] image::images/settings-read-only-badge.png[Example of Advanced Settings Management's read only access indicator in Kibana's header] @@ -25,12 +27,11 @@ image::images/settings-read-only-badge.png[Example of Advanced Settings Manageme [[kibana-settings-reference]] === Kibana settings reference -WARNING: Modifying a setting can affect {kib} -performance and cause problems that are -difficult to diagnose. Setting a property value to a blank field reverts -to the default behavior, which might not be -compatible with other configuration settings. Deleting a custom setting -removes it from {kib} permanently. +WARNING: Modifying a setting can affect {kib} performance and cause problems +that are difficult to diagnose. Setting a property value to a blank field +reverts to the default behavior, which might not be compatible with other +configuration settings. Deleting a custom setting removes it from {kib} +permanently. [float] @@ -38,72 +39,159 @@ removes it from {kib} permanently. ==== General [horizontal] -`csv:quoteValues`:: Set this property to `true` to quote exported values. -`csv:separator`:: A string that serves as the separator for exported values. -`dateFormat`:: The format to use for displaying https://momentjs.com/docs/#/displaying/format/[pretty formatted dates]. -`dateFormat:dow`:: The day that a week should start on. -`dateFormat:scaled`:: The values that define the format to use to render ordered time-based data. Formatted timestamps must -adapt to the interval between measurements. Keys are http://en.wikipedia.org/wiki/ISO_8601#Time_intervals[ISO8601 intervals]. -`dateFormat:tz`:: The timezone that Kibana uses. The default value of `Browser` uses the timezone detected by the browser. -`dateNanosFormat`:: The format to use for displaying https://momentjs.com/docs/#/displaying/format/[pretty formatted dates] of {ref}/date_nanos.html[Elasticsearch date_nanos type]. -`defaultIndex`:: The index to access if no index is set. The default is `null`. -`defaultRoute`:: The default route when opening Kibana. Use this setting to route users to a specific dashboard, application, or saved object as they enter each space. -`fields:popularLimit`:: The top N most popular fields to show. -`filterEditor:suggestValues`:: Set this property to `false` to prevent the filter editor from suggesting values for fields. -`filters:pinnedByDefault`:: Set this property to `true` to make filters have a global state (be pinned) by default. -`format:bytes:defaultPattern`:: The default <> format for the "bytes" format. -`format:currency:defaultPattern`:: The default <> format for the "currency" format. -`format:defaultTypeMap`:: A map of the default format name for each field type. Field types that are not explicitly -mentioned use "\_default_". -`format:number:defaultLocale`:: The <> locale. -`format:number:defaultPattern`:: The <> for the "number" format. -`format:percent:defaultPattern`:: The <> for the "percent" format. -`histogram:barTarget`:: When date histograms use the `auto` interval, Kibana attempts to generate this number of bars. -`histogram:maxBars`:: Date histograms are not generated with more bars than the value of this property, scaling values -when necessary. -`history:limit`:: In fields that have history, such as query inputs, show this many recent values. -`indexPattern:placeholder`:: The default placeholder value to use in Management > Index Patterns > Create Index Pattern. -`metaFields`:: Fields that exist outside of `_source`. Kibana merges these fields -into the document when displaying it. -`metrics:max_buckets`:: The maximum numbers of buckets that a single -data source can return. This might arise when the user selects a -short interval (for example, 1s) for a long time period (1 year). -`pageNavigation`:: The style of navigation menu for Kibana. -Choices are Legacy, the legacy style where every plugin is represented in the nav, -and Modern, a new format that bundles related plugins together in flyaway nested navigation. -`query:allowLeadingWildcards`:: Allows a wildcard (*) as the first character -in a query clause. Only applies when experimental query features are -enabled in the query bar. To disallow leading wildcards in Lucene queries, -use `query:queryString:options`. -`query:queryString:options`:: Options for the Lucene query string parser. Only -used when "Query language" is set to Lucene. -`savedObjects:listingLimit`:: The number of objects to fetch for lists of saved objects. -The default value is 1000. Do not set above 10000. -`savedObjects:perPage`:: The number of objects to show on each page of the -list of saved objects. The default is 5. -`search:queryLanguage`:: The query language to use in the query bar. -Choices are <>, a language built specifically for {kib}, and the <>. -`shortDots:enable`:: Set this property to `true` to shorten long -field names in visualizations. For example, show `f.b.baz` instead of `foo.bar.baz`. -`sort:options`:: Options for the Elasticsearch {ref}/search-request-body.html#request-body-search-sort[sort] parameter. -`state:storeInSessionStorage`:: [experimental] Kibana tracks UI state in the -URL, which can lead to problems when there is a lot of state information, -and the URL gets very long. -Enabling this setting stores part of the URL in your browser session to keep the -URL short. -`theme:darkMode`:: Set to `true` to enable a dark mode for the {kib} UI. You must -refresh the page to apply the setting. -`timepicker:quickRanges`:: The list of ranges to show in the Quick section of -the time filter. This should be an array of objects, with each object containing -`from`, `to` (see {ref}/common-options.html#date-math[accepted formats]), -and `display` (the title to be displayed). -`timepicker:refreshIntervalDefaults`:: The default refresh interval for the time filter. Example: `{ "display": "15 seconds", "pause": true, "value": 15000 }`. -`timepicker:timeDefaults`:: The default selection in the time filter. -`truncate:maxHeight`:: The maximum height that a cell occupies in a table. Set to 0 to disable +[[csv-quotevalues]]`csv:quoteValues`:: +Set this property to `true` to quote exported values. + +[[csv-separator]]`csv:separator`:: +A string that serves as the separator for exported values. + +[[dateformat]]`dateFormat`:: +The format to use for displaying +https://momentjs.com/docs/#/displaying/format/[pretty formatted dates]. + +[[dateformat-dow]]`dateFormat:dow`:: +The day that a week should start on. + +[[dateformat-scaled]]`dateFormat:scaled`:: +The values that define the format to use to render ordered time-based data. +Formatted timestamps must adapt to the interval between measurements. Keys are +http://en.wikipedia.org/wiki/ISO_8601#Time_intervals[ISO8601 intervals]. + +[[dateformat-tz]]`dateFormat:tz`:: +The timezone that Kibana uses. The default value of `Browser` uses the timezone +detected by the browser. + +[[datenanosformat]]`dateNanosFormat`:: +The format to use for displaying +https://momentjs.com/docs/#/displaying/format/[pretty formatted dates] of +{ref}/date_nanos.html[Elasticsearch date_nanos type]. + +[[defaultindex]]`defaultIndex`:: +The index to access if no index is set. The default is `null`. + +[[defaultroute]]`defaultRoute`:: +The default route when opening Kibana. Use this setting to route users to a +specific dashboard, application, or saved object as they enter each space. + +[[fields-popularlimit]]`fields:popularLimit`:: +The top N most popular fields to show. + +[[filtereditor-suggestvalues]]`filterEditor:suggestValues`:: +Set this property to `false` to prevent the filter editor from suggesting values +for fields. + +[[filters-pinnedbydefault]]`filters:pinnedByDefault`:: +Set this property to `true` to make filters have a global state (be pinned) by +default. + +[[format-bytes-defaultpattern]]`format:bytes:defaultPattern`:: +The default <> format for the "bytes" format. + +[[format-currency-defaultpattern]]`format:currency:defaultPattern`:: +The default <> format for the "currency" format. + +[[format-defaulttypemap]]`format:defaultTypeMap`:: +A map of the default format name for each field type. Field types that are not +explicitly mentioned use "\_default_". + +[[format-number-defaultlocale]]`format:number:defaultLocale`:: +The <> locale. + +[[format-number-defaultpattern]]`format:number:defaultPattern`:: +The <> for the "number" format. + +[[format-percent-defaultpattern]]`format:percent:defaultPattern`:: +The <> for the "percent" format. + +[[histogram-bartarget]]`histogram:barTarget`:: +When date histograms use the `auto` interval, Kibana attempts to generate this +number of bars. + +[[histogram-maxbars]]`histogram:maxBars`:: +Date histograms are not generated with more bars than the value of this property, +scaling values when necessary. + +[[history-limit]]`history:limit`:: +In fields that have history, such as query inputs, show this many recent values. + +[[indexpattern-placeholder]]`indexPattern:placeholder`:: +The default placeholder value to use in +*Management > Index Patterns > Create Index Pattern*. + +[[metafields]]`metaFields`:: +Fields that exist outside of `_source`. Kibana merges these fields into the +document when displaying it. + +[[metrics-maxbuckets]]`metrics:max_buckets`:: +The maximum numbers of buckets that a single data source can return. This might +arise when the user selects a short interval (for example, 1s) for a long time +period (1 year). + +[[pagenavigation]]`pageNavigation`:: +The style of navigation menu for Kibana. Choices are Legacy, the legacy style +where every plugin is represented in the nav, and Modern, a new format that +bundles related plugins together in flyaway nested navigation. + +[[query-allowleadingwildcards]]`query:allowLeadingWildcards`:: +Allows a wildcard (*) as the first character in a query clause. Only applies +when experimental query features are enabled in the query bar. To disallow +leading wildcards in Lucene queries, use `query:queryString:options`. + +[[query-querystring-options]]`query:queryString:options`:: +Options for the Lucene query string parser. Only used when "Query language" is +set to Lucene. + +[[savedobjects-listinglimit]]`savedObjects:listingLimit`:: +The number of objects to fetch for lists of saved objects. The default value +is 1000. Do not set above 10000. + +[[savedobjects-perpage]]`savedObjects:perPage`:: +The number of objects to show on each page of the list of saved objects. The +default is 5. + +[[search-querylanguage]]`search:queryLanguage`:: +The query language to use in the query bar. Choices are <>, a +language built specifically for {kib}, and the +<>. + +[[shortdots-enable]]`shortDots:enable`:: +Set this property to `true` to shorten long field names in visualizations. For +example, show `f.b.baz` instead of `foo.bar.baz`. + +[[sort-options]]`sort:options`:: Options for the Elasticsearch +{ref}/search-request-body.html#request-body-search-sort[sort] parameter. + +[[state-storeinsessionstorage]]`state:storeInSessionStorage`:: +experimental:[] +Kibana tracks UI state in the URL, which can lead to problems +when there is a lot of state information, and the URL gets very long. Enabling +this setting stores part of the URL in your browser session to keep the URL +short. + +[[theme-darkmode]]`theme:darkMode`:: +Set to `true` to enable a dark mode for the {kib} UI. You must refresh the page +to apply the setting. + +[[timepicker-quickranges]]`timepicker:quickRanges`:: +The list of ranges to show in the Quick section of the time filter. This should +be an array of objects, with each object containing `from`, `to` (see +{ref}/common-options.html#date-math[accepted formats]), and `display` (the title +to be displayed). + +[[timepicker-refreshintervaldefaults]]`timepicker:refreshIntervalDefaults`:: +The default refresh interval for the time filter. Example: +`{ "display": "15 seconds", "pause": true, "value": 15000 }`. + +[[timepicker-timedefaults]]`timepicker:timeDefaults`:: +The default selection in the time filter. + +[[truncate-maxheight]]`truncate:maxHeight`:: +The maximum height that a cell occupies in a table. Set to 0 to disable truncation. -`xPack:defaultAdminEmail`:: Email address for X-Pack admin operations, such as -cluster alert notifications from Monitoring. + +[[xpack-defaultadminemail]]`xPack:defaultAdminEmail`:: +Email address for {xpack} admin operations, such as cluster alert notifications +from *{stack-monitor-app}*. [float] @@ -111,15 +199,17 @@ cluster alert notifications from Monitoring. ==== Accessibility [horizontal] -`accessibility:disableAnimations`:: Turns off all unnecessary animations in the -{kib} UI. Refresh the page to apply the changes. +[[accessibility-disableanimations]]`accessibility:disableAnimations`:: +Turns off all unnecessary animations in the {kib} UI. Refresh the page to apply +the changes. [float] [[kibana-dashboard-settings]] ==== Dashboard [horizontal] -`xpackDashboardMode:roles`:: **Deprecated. Use <> instead.** +[[xpackdashboardmode-roles]]`xpackDashboardMode:roles`:: +**Deprecated. Use <> instead.** The roles that belong to <>. [float] @@ -127,38 +217,63 @@ The roles that belong to <>. ==== Discover [horizontal] -`context:defaultSize`:: The number of surrounding entries to display in the context view. The default value is 5. -`context:step`:: The number by which to increment or decrement the context size. The default value is 5. -`context:tieBreakerFields`:: A comma-separated list of fields to use -for breaking a tie between documents that have the same timestamp value. The first -field that is present and sortable in the current index pattern is used. -`defaultColumns`:: The columns that appear by default on the Discover page. -The default is `_source`. -`discover:aggs:terms:size`:: The number terms that are visualized when clicking -the Visualize button in the field drop down. The default is `20`. -`discover:sampleSize`:: The number of rows to show in the Discover table. -`discover:sort:defaultOrder`:: The default sort direction for time-based index patterns. -`discover:searchOnPageLoad`:: Controls whether a search is executed when Discover first loads. -This setting does not have an effect when loading a saved search. -`doc_table:hideTimeColumn`:: Hides the "Time" column in Discover and in all saved searches on dashboards. -`doc_table:highlight`:: Highlights results in Discover and saved searches on dashboards. -Highlighting slows requests when -working on big documents. +[[context-defaultsize]]`context:defaultSize`:: +The number of surrounding entries to display in the context view. The default +value is 5. + +[[context-step]]`context:step`:: +The number by which to increment or decrement the context size. The default +value is 5. + +[[context-tiebreakerfields]]`context:tieBreakerFields`:: +A comma-separated list of fields to use for breaking a tie between documents +that have the same timestamp value. The first field that is present and sortable +in the current index pattern is used. + +[[defaultcolumns]]`defaultColumns`:: +The columns that appear by default on the *Discover* page. The default is +`_source`. + +[[discover-aggs-terms-size]]`discover:aggs:terms:size`:: +The number terms that are visualized when clicking the *Visualize* button in the +field drop down. The default is `20`. + +[[discover-samplesize]]`discover:sampleSize`:: +The number of rows to show in the *Discover* table. + +[[discover-sort-defaultorder]]`discover:sort:defaultOrder`:: +The default sort direction for time-based index patterns. + +[[discover-searchonpageload]]`discover:searchOnPageLoad`:: +Controls whether a search is executed when *Discover* first loads. This setting +does not have an effect when loading a saved search. + +[[doctable-hidetimecolumn]]`doc_table:hideTimeColumn`:: +Hides the "Time" column in *Discover* and in all saved searches on dashboards. + +[[doctable-highlight]]`doc_table:highlight`:: +Highlights results in *Discover* and saved searches on dashboards. Highlighting +slows requests when working on big documents. [float] [[kibana-ml-settings]] ==== Machine learning [horizontal] -`ml:anomalyDetection:results:enableTimeDefaults`:: Use the default time filter -in the *Single Metric Viewer* and *Anomaly Explorer*. If this setting is -disabled, the results for the full time range are shown. -`ml:anomalyDetection:results:timeDefaults`:: Sets the default time filter for -viewing {anomaly-job} results. This setting must contain `from` and `to` values (see {ref}/common-options.html#date-math[accepted formats]). It is ignored -unless `ml:anomalyDetection:results:enableTimeDefaults` is enabled. -`ml:fileDataVisualizerMaxFileSize`:: Sets the file size limit when importing -data in the {data-viz}. The default value is `100MB`. The highest supported -value for this setting is `1GB`. +[[ml-anomalydetection-results-enabletimedefaults]]`ml:anomalyDetection:results:enableTimeDefaults`:: +Use the default time filter in the *Single Metric Viewer* and +*Anomaly Explorer*. If this setting is disabled, the results for the full time +range are shown. + +[[ml-anomalydetection-results-timedefaults]]`ml:anomalyDetection:results:timeDefaults`:: +Sets the default time filter for viewing {anomaly-job} results. This setting +must contain `from` and `to` values (see +{ref}/common-options.html#date-math[accepted formats]). It is ignored unless +`ml:anomalyDetection:results:enableTimeDefaults` is enabled. + +[[ml-filedatavisualizermaxfilesize]]`ml:fileDataVisualizerMaxFileSize`:: +Sets the file size limit when importing data in the {data-viz}. The default +value is `100MB`. The highest supported value for this setting is `1GB`. [float] @@ -166,18 +281,26 @@ value for this setting is `1GB`. ==== Notifications [horizontal] -`notifications:banner`:: A custom banner intended for temporary notices to all users. -Supports https://help.github.com/en/articles/basic-writing-and-formatting-syntax[Markdown]. -`notifications:lifetime:banner`:: The duration, in milliseconds, for banner -notification displays. The default value is 3000000. Set this field to `Infinity` -to disable banner notifications. -`notifications:lifetime:error`:: The duration, in milliseconds, for error -notification displays. The default value is 300000. Set this field to `Infinity` to disable error notifications. -`notifications:lifetime:info`:: The duration, in milliseconds, for information notification displays. -The default value is 5000. Set this field to `Infinity` to disable information notifications. -`notifications:lifetime:warning`:: The duration, in milliseconds, for warning notification -displays. The default value is 10000. Set this field to `Infinity` to disable warning notifications. +[[notifications-banner]]`notifications:banner`:: +A custom banner intended for temporary notices to all users. Supports +https://help.github.com/en/articles/basic-writing-and-formatting-syntax[Markdown]. +[[notifications-lifetime-banner]]`notifications:lifetime:banner`:: +The duration, in milliseconds, for banner notification displays. The default +value is 3000000. Set this field to `Infinity` to disable banner notifications. + +[[notificatios-lifetime-error]]`notifications:lifetime:error`:: +The duration, in milliseconds, for error notification displays. The default +value is 300000. Set this field to `Infinity` to disable error notifications. + +[[notifications-lifetime-info]]`notifications:lifetime:info`:: +The duration, in milliseconds, for information notification displays. The +default value is 5000. Set this field to `Infinity` to disable information +notifications. + +[[notifications-lifetime-warning]]`notifications:lifetime:warning`:: +The duration, in milliseconds, for warning notification displays. The default +value is 10000. Set this field to `Infinity` to disable warning notifications. [float] @@ -185,7 +308,8 @@ displays. The default value is 10000. Set this field to `Infinity` to disable wa ==== Reporting [horizontal] -`xpackReporting:customPdfLogo`:: A custom image to use in the footer of the PDF. +[[xpackreporting-custompdflogo]]`xpackReporting:customPdfLogo`:: +A custom image to use in the footer of the PDF. [float] @@ -193,9 +317,10 @@ displays. The default value is 10000. Set this field to `Infinity` to disable wa ==== Rollup [horizontal] -`rollups:enableIndexPatterns`:: Enables the creation of index patterns that -capture rollup indices, which in turn enables visualizations based on rollup data. -Refresh the page to apply the changes. +[[rollups-enableindexpatterns]]`rollups:enableIndexPatterns`:: +Enables the creation of index patterns that capture rollup indices, which in +turn enables visualizations based on rollup data. Refresh the page to apply the +changes. [float] @@ -203,67 +328,117 @@ Refresh the page to apply the changes. ==== Search [horizontal] -`courier:batchSearches`:: **Deprecated in 7.6. Starting in 8.0, this setting will be optimized internally.** -When disabled, dashboard panels will load individually, and search requests will terminate when -users navigate away or update the query. When enabled, dashboard panels will load together when all of the data is loaded, -and searches will not terminate. -`courier:customRequestPreference`:: {ref}/search-request-body.html#request-body-search-preference[Request preference] +[[courier-batchsearches]]`courier:batchSearches`:: +**Deprecated in 7.6. Starting in 8.0, this setting will be optimized internally.** +When disabled, dashboard panels will load individually, and search requests will +terminate when users navigate away or update the query. When enabled, dashboard +panels will load together when all of the data is loaded, and searches will not +terminate. + +[[courier-customrequestpreference]]`courier:customRequestPreference`:: +{ref}/search-request-body.html#request-body-search-preference[Request preference] to use when `courier:setRequestPreference` is set to "custom". -`courier:ignoreFilterIfFieldNotInIndex`:: Skips filters that apply to fields that don't exist in the index for a visualization. -Useful when dashboards consist of visualizations from multiple index patterns. -`courier:maxConcurrentShardRequests`:: Controls the {ref}/search-multi-search.html[max_concurrent_shard_requests] + +[[courier-ignorefilteriffieldnotinindex]]`courier:ignoreFilterIfFieldNotInIndex`:: +Skips filters that apply to fields that don't exist in the index for a +visualization. Useful when dashboards consist of visualizations from multiple +index patterns. + +[[courier-maxconcurrentshardrequests]]`courier:maxConcurrentShardRequests`:: +Controls the {ref}/search-multi-search.html[max_concurrent_shard_requests] setting used for `_msearch` requests sent by {kib}. Set to 0 to disable this config and use the {es} default. -`courier:setRequestPreference`:: Enables you to set which shards handle your search requests. -* *Session ID:* Restricts operations to execute all search requests on the same shards. -This has the benefit of reusing shard caches across requests. -* *Custom:* Allows you to define your own preference. Use `courier:customRequestPreference` -to customize your preference value. + +[[courier-setrequestpreference]]`courier:setRequestPreference`:: +Enables you to set which shards handle your search requests. +* *Session ID:* Restricts operations to execute all search requests on the same +shards. This has the benefit of reusing shard caches across requests. +* *Custom:* Allows you to define your own preference. Use +`courier:customRequestPreference` to customize your preference value. * *None:* Do not set a preference. This might provide better performance because requests can be spread across all shard copies. However, results might be inconsistent because different shards might be in different refresh states. -`search:includeFrozen`:: Includes {ref}/frozen-indices.html[frozen indices] in results. -Searching through frozen indices -might increase the search time. This setting is off by default. Users must opt-in to include frozen indices. -`search:timeout`:: Change the maximum timeout for a search session or set to 0 to disable the timeout and allow queries to run to completion. + +[[search-includefrozen]]`search:includeFrozen`:: +Includes {ref}/frozen-indices.html[frozen indices] in results. Searching through +frozen indices might increase the search time. This setting is off by default. +Users must opt-in to include frozen indices. + +[[search-timeout]]`search:timeout`:: Change the maximum timeout for a search +session or set to 0 to disable the timeout and allow queries to run to +completion. [float] [[kibana-siem-settings]] -==== Security Solution +==== Security solution [horizontal] -`securitySolution:defaultAnomalyScore`:: The threshold above which Machine Learning job anomalies are displayed in the Security app. -`securitySolution:defaultIndex`:: A comma-delimited list of Elasticsearch indices from which the Security app collects events. -`securitySolution:ipReputationLinks`:: A JSON array containing links for verifying the reputation of an IP address. The links are displayed on -{security-guide}/network-page-overview.html[IP detail] pages. -`securitySolution:enableNewsFeed`:: Enables the security news feed on the Security *Overview* -page. -`securitySolution:newsFeedUrl`:: The URL from which the security news feed content is -retrieved. -`securitySolution:refreshIntervalDefaults`:: The default refresh interval for the Security time filter, in milliseconds. -`securitySolution:timeDefaults`:: The default period of time in the Security time filter. +[[securitysolution-defaultanomalyscore]]`securitySolution:defaultAnomalyScore`:: +The threshold above which {ml} job anomalies are displayed in the {security-app}. + +[[securitysolution-defaultindex]]`securitySolution:defaultIndex`:: +A comma-delimited list of {es} indices from which the {security-app} collects +events. + +[[securitysolution-ipreputationlinks]]`securitySolution:ipReputationLinks`:: +A JSON array containing links for verifying the reputation of an IP address. The +links are displayed on {security-guide}/network-page-overview.html[IP detail] +pages. + +[[securitysolution-enablenewsfeed]]`securitySolution:enableNewsFeed`:: Enables +the security news feed on the Security *Overview* page. + +[[securitysolution-newsfeedurl]]`securitySolution:newsFeedUrl`:: +The URL from which the security news feed content is retrieved. + +[[securitysolution-refreshintervaldefaults]]`securitySolution:refreshIntervalDefaults`:: +The default refresh interval for the Security time filter, in milliseconds. + +[[securitysolution-timedefaults]]`securitySolution:timeDefaults`:: +The default period of time in the Security time filter. [float] [[kibana-timelion-settings]] ==== Timelion [horizontal] -`timelion:default_columns`:: The default number of columns to use on a Timelion sheet. -`timelion:default_rows`:: The default number of rows to use on a Timelion sheet. -`timelion:es.default_index`:: The default index when using the `.es()` query. -`timelion:es.timefield`:: The default field containing a timestamp when using the `.es()` query. -`timelion:graphite.url`:: [experimental] Used with graphite queries, this is the URL of your graphite host -in the form https://www.hostedgraphite.com/UID/ACCESS_KEY/graphite. This URL can be -selected from an allow-list configured in the `kibana.yml` under `timelion.graphiteUrls`. -`timelion:max_buckets`:: The maximum number of buckets a single data source can return. -This value is used for calculating automatic intervals in visualizations. -`timelion:min_interval`:: The smallest interval to calculate when using "auto". -`timelion:quandl.key`:: [experimental] Used with quandl queries, this is your API key from https://www.quandl.com/[www.quandl.com]. -`timelion:showTutorial`:: Shows the Timelion tutorial -to users when they first open the Timelion app. -`timelion:target_buckets`:: Used for calculating automatic intervals in visualizations, -this is the number of buckets to try to represent. +[[timelion-defaultcolumns]]`timelion:default_columns`:: +The default number of columns to use on a Timelion sheet. + +[[timelion-defaultrows]]`timelion:default_rows`:: +The default number of rows to use on a Timelion sheet. + +[[timelion-esdefaultindex]]`timelion:es.default_index`:: +The default index when using the `.es()` query. + +[[timelion-estimefield]]`timelion:es.timefield`:: +The default field containing a timestamp when using the `.es()` query. + +[[timelion-graphite-url]]`timelion:graphite.url`:: +experimental:[] +Used with graphite queries, this is the URL of your graphite host +in the form https://www.hostedgraphite.com/UID/ACCESS_KEY/graphite. This URL can +be selected from an allow-list configured in the `kibana.yml` under +`timelion.graphiteUrls`. + +[[timelion-maxbuckets]]`timelion:max_buckets`:: +The maximum number of buckets a single data source can return. This value is +used for calculating automatic intervals in visualizations. +[[timelion-mininterval]]`timelion:min_interval`:: +The smallest interval to calculate when using "auto". + +[[timelion-quandlkey]]`timelion:quandl.key`:: +experimental:[] +Used with quandl queries, this is your API key from +https://www.quandl.com/[www.quandl.com]. + +[[timelion-showtutorial]]`timelion:showTutorial`:: +Shows the Timelion tutorial to users when they first open the Timelion app. + +[[timelion-targetbuckets]]`timelion:target_buckets`:: +Used for calculating automatic intervals in visualizations, this is the number +of buckets to try to represent. [float] @@ -271,20 +446,32 @@ this is the number of buckets to try to represent. ==== Visualization [horizontal] -`visualization:colorMapping`:: Maps values to specified colors in visualizations. -`visualization:dimmingOpacity`:: The opacity of the chart items that are dimmed -when highlighting another element of the chart. The lower this number, the more -the highlighted element stands out. This must be a number between 0 and 1. -`visualization:loadingDelay`:: The time to wait before dimming visualizations -during a query. -`visualization:regionmap:showWarnings`:: Shows -a warning in a region map when terms cannot be joined to a shape. -`visualization:tileMap:WMSdefaults`:: The default properties for the WMS map server support in the coordinate map. -`visualization:tileMap:maxPrecision`:: The maximum geoHash precision displayed on tile maps: 7 is high, 10 is very high, +[[visualization-colormapping]]`visualization:colorMapping`:: +Maps values to specified colors in visualizations. + +[[visualization-dimmingopacity]]`visualization:dimmingOpacity`:: +The opacity of the chart items that are dimmed when highlighting another element +of the chart. The lower this number, the more the highlighted element stands out. +This must be a number between 0 and 1. + +[[visualization-loadingdelay]]`visualization:loadingDelay`:: +The time to wait before dimming visualizations during a query. + +[[visualization-regionmap-showwarnings]]`visualization:regionmap:showWarnings`:: +Shows a warning in a region map when terms cannot be joined to a shape. + +[[visualization-tilemap-wmsdefaults]]`visualization:tileMap:WMSdefaults`:: +The default properties for the WMS map server support in the coordinate map. + +[[visualization-tilemap-maxprecision]]`visualization:tileMap:maxPrecision`:: +The maximum geoHash precision displayed on tile maps: 7 is high, 10 is very high, and 12 is the maximum. See this {ref}/search-aggregations-bucket-geohashgrid-aggregation.html#_cell_dimensions_at_the_equator[explanation of cell dimensions]. -`visualize:enableLabs`:: Enables users to create, view, and edit experimental visualizations. -If disabled, only visualizations that are considered production-ready are available to the user. + +[[visualize-enablelabs]]`visualize:enableLabs`:: +Enables users to create, view, and edit experimental visualizations. If disabled, +only visualizations that are considered production-ready are available to the +user. [float] diff --git a/docs/management/images/add_remote_cluster.png b/docs/management/images/add_remote_cluster.png deleted file mode 100755 index 160d29b741c62..0000000000000 Binary files a/docs/management/images/add_remote_cluster.png and /dev/null differ diff --git a/docs/management/images/auto_follow_pattern.png b/docs/management/images/auto_follow_pattern.png deleted file mode 100755 index f80de9352280f..0000000000000 Binary files a/docs/management/images/auto_follow_pattern.png and /dev/null differ diff --git a/docs/management/images/cross-cluster-replication-list-view.png b/docs/management/images/cross-cluster-replication-list-view.png deleted file mode 100755 index 4c45174cff7f1..0000000000000 Binary files a/docs/management/images/cross-cluster-replication-list-view.png and /dev/null differ diff --git a/docs/management/images/remote-clusters-list-view.png b/docs/management/images/remote-clusters-list-view.png deleted file mode 100755 index c28379863b74b..0000000000000 Binary files a/docs/management/images/remote-clusters-list-view.png and /dev/null differ diff --git a/docs/management/managing-ccr.asciidoc b/docs/management/managing-ccr.asciidoc deleted file mode 100644 index 9c06e479e28b2..0000000000000 --- a/docs/management/managing-ccr.asciidoc +++ /dev/null @@ -1,80 +0,0 @@ -[role="xpack"] -[[managing-cross-cluster-replication]] -== Cross-Cluster Replication - -Use *Cross-Cluster Replication* to reproduce indices in -remote clusters on a local cluster. {ref}/xpack-ccr.html[Cross-cluster replication] -is commonly used to provide remote backups for disaster recovery and for -geo-proximite copies of data. - -To get started, open the menu, then go to *Stack Management > Data > Cross-Cluster Replication*. - -[role="screenshot"] -image::images/cross-cluster-replication-list-view.png[][Cross-cluster replication list view] - -[float] -=== Prerequisites - -* You must have a {ref}/modules-remote-clusters.html[remote cluster]. -* Leader indices must meet {ref}/ccr-requirements.html[these requirements]. -* The Elasticsearch version of the local cluster must be the same as or newer than the remote cluster. -Refer to {ref}/ccr-overview.html[this document] for more information. - -[float] -=== Required permissions - -The `manage` and `manage_ccr` cluster privileges are required to access *Cross-Cluster Replication*. - -You can add these privileges in *Stack Management > Security > Roles*. - -[float] -[[configure-replication]] -=== Configure replication - -Replication requires a leader index, the index being replicated, and a -follower index, which will contain the leader index's replicated data. -The follower index is passive in that it can read requests and searches, -but cannot accept direct writes. Only the leader index is active for direct writes. - -You can configure follower indices in two ways: - -* Create specific follower indices -* Create follower indices from an auto-follow pattern - -[float] -==== Create specific follower indices - -To replicate data from existing indices, or set up local followers on a case-by-case basis, -go to *Follower indices*. When you create the follower index, you must reference the -remote cluster and the leader index that you created in the remote cluster. - -[role="screenshot"] -image::images/follower_indices.png[][UI for adding follower indices] - -[float] -==== Create follower indices from an auto-follow pattern - -To automatically detect and follow new indices when they are created on a remote cluster, -go to *Auto-follow patterns*. Creating an auto-follow pattern is useful when you have -time series data, like event logs, on the remote cluster that is created or rolled over on a daily basis. - -When creating the pattern, you must reference the remote cluster that you -connected to your local cluster. You must also specify a collection of index patterns -that match the indices you want to automatically follow. - -Once you configure an -auto-follow pattern, any time a new index with a name that matches the pattern is -created in the remote cluster, a follower index is automatically configured in the local cluster. - -[role="screenshot"] -image::images/auto_follow_pattern.png[UI for adding an auto-follow pattern] - -[float] -[[manage-replication]] -=== Manage replication - -Use the list views in *Cross-Cluster Replication* to monitor whether the replication is active and -pause and resume replication. You can also edit and remove the follower indices and auto-follow patterns. - -For an example of cross-cluster replication, -refer to https://www.elastic.co/blog/bi-directional-replication-with-elasticsearch-cross-cluster-replication-ccr[Bi-directional replication with Elasticsearch cross-cluster replication]. diff --git a/docs/management/managing-remote-clusters.asciidoc b/docs/management/managing-remote-clusters.asciidoc deleted file mode 100644 index 92e0fa822b056..0000000000000 --- a/docs/management/managing-remote-clusters.asciidoc +++ /dev/null @@ -1,50 +0,0 @@ -[[working-remote-clusters]] -== Remote Clusters - -Use *Remote Clusters* to establish a unidirectional -connection from your cluster to other clusters. This functionality is -required for {ref}/xpack-ccr.html[cross-cluster replication] and -{ref}/modules-cross-cluster-search.html[cross-cluster search]. - -To get started, open the menu, then go to *Stack Management > Data > Remote Clusters*. - -[role="screenshot"] -image::images/remote-clusters-list-view.png[Remote Clusters list view, including Add a remote cluster button] - -[float] -=== Required permissions - -The `manage` cluster privilege is required to access *Remote Clusters*. - -You can add this privilege in *Stack Management > Security > Roles*. - -[float] -[[managing-remote-clusters]] -=== Add a remote cluster - -A {ref}/modules-remote-clusters.html[remote cluster] connection works by configuring a remote cluster and -connecting to a limited number of nodes, called {ref}/modules-remote-clusters.html#sniff-mode[seed nodes], -in that cluster. -Alternatively, you can define a single proxy address for the remote cluster. - -By default, a cross-cluster request, such as a cross-cluster search or -replication request, fails if any cluster in the request is unavailable. -To skip a cluster when its unavailable, -set *Skip if unavailable* to true. - -Once you add a remote cluster, you can configure <> -to reproduce indices in the remote cluster on a local cluster. - -[role="screenshot"] -image::images/add_remote_cluster.png[][UI for adding a remote cluster] - -To create an index pattern to search across clusters, -use the same syntax that you’d use in a raw cross-cluster search request in {es}: :. -See <> for examples. - -[float] -[[manage-remote-clusters]] -=== Manage remote clusters - -From the *Remote Clusters* list view, you can drill down into each cluster and -view its status. You can also edit and delete a cluster. diff --git a/docs/redirects.asciidoc b/docs/redirects.asciidoc index 42d1d89145d79..5067bc08bec99 100644 --- a/docs/redirects.asciidoc +++ b/docs/redirects.asciidoc @@ -100,3 +100,15 @@ This content has moved to the <> page. == TSVB This page was deleted. See <>. + +[role="exclude",id="managing-cross-cluster-replication"] +== Cross-Cluster Replication + +This content has moved. See +{ref}/ccr-getting-started.html[Set up cross-cluster replication]. + +[role="exclude",id="working-remote-clusters"] +== Remote clusters + +This content has moved. See +{ref}/ccr-getting-started.html#ccr-getting-started-remote-cluster[Connect to a remote cluster]. diff --git a/docs/settings/monitoring-settings.asciidoc b/docs/settings/monitoring-settings.asciidoc index 6c8632efa9cc0..917821ad09e2f 100644 --- a/docs/settings/monitoring-settings.asciidoc +++ b/docs/settings/monitoring-settings.asciidoc @@ -33,7 +33,7 @@ For more information, see |=== | `monitoring.enabled` | Set to `true` (default) to enable the {monitor-features} in {kib}. Unlike the - `monitoring.ui.enabled` setting, when this setting is `false`, the + <> setting, when this setting is `false`, the monitoring back-end does not run and {kib} stats are not sent to the monitoring cluster. @@ -44,7 +44,7 @@ a|`monitoring.cluster_alerts.` | `monitoring.ui.elasticsearch.hosts` | Specifies the location of the {es} cluster where your monitoring data is stored. - By default, this is the same as `elasticsearch.hosts`. This setting enables + By default, this is the same as <>. This setting enables you to use a single {kib} instance to search and visualize data in your production cluster as well as monitor data sent to a dedicated monitoring cluster. @@ -58,7 +58,7 @@ a|`monitoring.cluster_alerts.` cluster uses the authenticated user's credentials, which must be the same on both the {es} monitoring cluster and the {es} production cluster. + + - If not set, {kib} uses the value of the `elasticsearch.username` setting. + If not set, {kib} uses the value of the <> setting. | `monitoring.ui.elasticsearch.password` | Specifies the password used by {kib} monitoring to establish a persistent connection @@ -69,11 +69,11 @@ a|`monitoring.cluster_alerts.` cluster uses the authenticated user's credentials, which must be the same on both the {es} monitoring cluster and the {es} production cluster. + + - If not set, {kib} uses the value of the `elasticsearch.password` setting. + If not set, {kib} uses the value of the <> setting. | `monitoring.ui.elasticsearch.pingTimeout` | Specifies the time in milliseconds to wait for {es} to respond to internal - health checks. By default, it matches the `elasticsearch.pingTimeout` setting, + health checks. By default, it matches the <> setting, which has a default value of `30000`. |=== @@ -112,7 +112,7 @@ about configuring {kib}, see | Specifies the number of log entries to display in *{stack-monitor-app}*. Defaults to `10`. The maximum value is `50`. -| `monitoring.ui.enabled` +|[[monitoring-ui-enabled]] `monitoring.ui.enabled` | Set to `false` to hide *{stack-monitor-app}*. The monitoring back-end continues to run as an agent for sending {kib} stats to the monitoring cluster. Defaults to `true`. diff --git a/docs/settings/reporting-settings.asciidoc b/docs/settings/reporting-settings.asciidoc index 27ef089f5847d..adfc3964d4204 100644 --- a/docs/settings/reporting-settings.asciidoc +++ b/docs/settings/reporting-settings.asciidoc @@ -20,7 +20,7 @@ You can configure `xpack.reporting` settings in your `kibana.yml` to: | [[xpack-enable-reporting]]`xpack.reporting.enabled` {ess-icon} | Set to `false` to disable the {report-features}. -| `xpack.reporting.encryptionKey` {ess-icon} +|[[xpack-reporting-encryptionKey]] `xpack.reporting.encryptionKey` {ess-icon} | Set to an alphanumeric, at least 32 characters long text string. By default, {kib} will generate a random key when it starts, which will cause pending reports to fail after restart. Configure this setting to preserve the same key across multiple restarts and multiple instances of {kib}. @@ -53,20 +53,20 @@ proxy host requires that the {kib} server has network access to the proxy. [cols="2*<"] |=== | `xpack.reporting.kibanaServer.port` - | The port for accessing {kib}, if different from the `server.port` value. + | The port for accessing {kib}, if different from the <> value. | `xpack.reporting.kibanaServer.protocol` | The protocol for accessing {kib}, typically `http` or `https`. -| `xpack.reporting.kibanaServer.hostname` - | The hostname for accessing {kib}, if different from the `server.host` value. +|[[xpack-kibanaServer-hostname]] `xpack.reporting.kibanaServer.hostname` + | The hostname for accessing {kib}, if different from the <> value. |=== [NOTE] ============ Reporting authenticates requests on the Kibana page only when the hostname matches the -`xpack.reporting.kibanaServer.hostname` setting. Therefore Reporting would fail if the +<> setting. Therefore Reporting would fail if the set value redirects to another server. For that reason, `"0"` is an invalid setting because, in the Reporting browser, it becomes an automatic redirect to `"0.0.0.0"`. ============ @@ -97,8 +97,8 @@ reports, you might need to change the following settings. [NOTE] ============ Running multiple instances of {kib} in a cluster for load balancing of -reporting requires identical values for `xpack.reporting.encryptionKey` and, if -security is enabled, `xpack.security.encryptionKey`. +reporting requires identical values for <> and, if +security is enabled, <>. ============ [cols="2*<"] @@ -177,7 +177,7 @@ available, but there will likely be errors in the visualizations in the report. [[reporting-chromium-settings]] ==== Chromium settings -When `xpack.reporting.capture.browser.type` is set to `chromium` (default) you can also specify the following settings. +When <> is set to `chromium` (default) you can also specify the following settings. [cols="2*<"] |=== @@ -246,7 +246,7 @@ a| `xpack.reporting.capture.browser` | Reporting uses a weekly index in {es} to store the reporting job and the report content. The index is automatically created if it does not already exist. Configure this to a unique value, beginning with `.reporting-`, for every - {kib} instance that has a unique `kibana.index` setting. Defaults to `.reporting`. + {kib} instance that has a unique <> setting. Defaults to `.reporting`. | `xpack.reporting.roles.allow` | Specifies the roles in addition to superusers that can use reporting. diff --git a/docs/settings/security-settings.asciidoc b/docs/settings/security-settings.asciidoc index b6eecc6ea9f04..00e5f973f7d87 100644 --- a/docs/settings/security-settings.asciidoc +++ b/docs/settings/security-settings.asciidoc @@ -190,26 +190,26 @@ You can configure the following settings in the `kibana.yml` file. | `xpack.security.cookieName` | Sets the name of the cookie used for the session. The default value is `"sid"`. -| `xpack.security.encryptionKey` +|[[xpack-security-encryptionKey]] `xpack.security.encryptionKey` | An arbitrary string of 32 characters or more that is used to encrypt session information. Do **not** expose this key to users of {kib}. By default, a value is automatically generated in memory. If you use that default behavior, all sessions are invalidated when {kib} restarts. In addition, high-availability deployments of {kib} will behave unexpectedly if this setting isn't the same for all instances of {kib}. -| `xpack.security.secureCookies` +|[[xpack-security-secureCookies]] `xpack.security.secureCookies` | Sets the `secure` flag of the session cookie. The default value is `false`. It - is automatically set to `true` if `server.ssl.enabled` is set to `true`. Set + is automatically set to `true` if <> is set to `true`. Set this to `true` if SSL is configured outside of {kib} (for example, you are routing requests through a load balancer or proxy). | `xpack.security.sameSiteCookies` {ess-icon} | Sets the `SameSite` attribute of the session cookie. This allows you to declare whether your cookie should be restricted to a first-party or same-site context. Valid values are `Strict`, `Lax`, `None`. - This is *not set* by default, which modern browsers will treat as `Lax`. If you use Kibana embedded in an iframe in modern browsers, you might need to set it to `None`. Setting this value to `None` requires cookies to be sent over a secure connection by setting `xpack.security.secureCookies: true`. + This is *not set* by default, which modern browsers will treat as `Lax`. If you use Kibana embedded in an iframe in modern browsers, you might need to set it to `None`. Setting this value to `None` requires cookies to be sent over a secure connection by setting <>: `true`. -| `xpack.security.session.idleTimeout` {ess-icon} - | Ensures that user sessions will expire after a period of inactivity. This and `xpack.security.session.lifespan` are both +|[[xpack-session-idleTimeout]] `xpack.security.session.idleTimeout` {ess-icon} + | Ensures that user sessions will expire after a period of inactivity. This and <> are both highly recommended. By default, this setting is not set. 2+a| @@ -218,9 +218,9 @@ highly recommended. By default, this setting is not set. The format is a string of `[ms\|s\|m\|h\|d\|w\|M\|Y]` (e.g. '20m', '24h', '7d', '1w'). ============ -| `xpack.security.session.lifespan` {ess-icon} +|[[xpack-session-lifespan]] `xpack.security.session.lifespan` {ess-icon} | Ensures that user sessions will expire after the defined time period. This behavior also known as an "absolute timeout". If -this is _not_ set, user sessions could stay active indefinitely. This and `xpack.security.session.idleTimeout` are both highly +this is _not_ set, user sessions could stay active indefinitely. This and <> are both highly recommended. By default, this setting is not set. 2+a| diff --git a/docs/settings/telemetry-settings.asciidoc b/docs/settings/telemetry-settings.asciidoc index 89c018a86eca6..0329e2f010e80 100644 --- a/docs/settings/telemetry-settings.asciidoc +++ b/docs/settings/telemetry-settings.asciidoc @@ -19,7 +19,7 @@ See our https://www.elastic.co/legal/privacy-statement[Privacy Statement] to lea [cols="2*<"] |=== -| `telemetry.enabled` +|[[telemetry-enabled]] `telemetry.enabled` | Set to `true` to send cluster statistics to Elastic. Reporting your cluster statistics helps us improve your user experience. Your data is never shared with anyone. Set to `false` to disable statistics reporting from any @@ -31,16 +31,16 @@ See our https://www.elastic.co/legal/privacy-statement[Privacy Statement] to lea it is behind a firewall and falls back to `'browser'` to send it from users' browsers when they are navigating through {kib}. Defaults to `'server'`. -| `telemetry.optIn` +|[[telemetry-optIn]] `telemetry.optIn` | Set to `true` to automatically opt into reporting cluster statistics. You can also opt out through *Advanced Settings* in {kib}. Defaults to `true`. | `telemetry.allowChangingOptInStatus` - | Set to `true` to allow overwriting the `telemetry.optIn` setting via the {kib} UI. Defaults to `true`. + + | Set to `true` to allow overwriting the <> setting via the {kib} UI. Defaults to `true`. + |=== [NOTE] ============ -When `false`, `telemetry.optIn` must be `true`. To disable telemetry and not allow users to change that parameter, use `telemetry.enabled`. +When `false`, <> must be `true`. To disable telemetry and not allow users to change that parameter, use <>. ============ diff --git a/docs/setup/secure-settings.asciidoc b/docs/setup/secure-settings.asciidoc index 10380eb5d8fa4..840a18ac03bab 100644 --- a/docs/setup/secure-settings.asciidoc +++ b/docs/setup/secure-settings.asciidoc @@ -19,7 +19,7 @@ bin/kibana-keystore create ---------------------------------------------------------------- The file `kibana.keystore` will be created in the directory defined by the -`path.data` configuration setting. +<> configuration setting. [float] [[list-settings]] diff --git a/docs/setup/settings.asciidoc b/docs/setup/settings.asciidoc index f03022e9e9f00..7f48f21db7197 100644 --- a/docs/setup/settings.asciidoc +++ b/docs/setup/settings.asciidoc @@ -20,11 +20,11 @@ which may cause a delay before pages start being served. Set to `false` to disable Console. *Default: `true`* | `cpu.cgroup.path.override:` - | *deprecated* This setting has been renamed to `ops.cGroupOverrides.cpuPath` + | *deprecated* This setting has been renamed to <> and the old name will no longer be supported as of 8.0. | `cpuacct.cgroup.path.override:` - | *deprecated* This setting has been renamed to `ops.cGroupOverrides.cpuAcctPath` + | *deprecated* This setting has been renamed to <> and the old name will no longer be supported as of 8.0. | `csp.rules:` @@ -33,7 +33,7 @@ that disables certain unnecessary and potentially insecure capabilities in the browser. It is strongly recommended that you keep the default CSP rules that ship with {kib}. -| `csp.strict:` +|[[csp-strict]] `csp.strict:` | Blocks {kib} access to any browser that does not enforce even rudimentary CSP rules. In practice, this disables support for older, less safe browsers like Internet Explorer. @@ -43,15 +43,15 @@ For more information, refer to <>. | `csp.warnLegacyBrowsers:` | Shows a warning message after loading {kib} to any browser that does not enforce even rudimentary CSP rules, though {kib} is still accessible. This -configuration is effectively ignored when `csp.strict` is enabled. +configuration is effectively ignored when <> is enabled. *Default: `true`* | `elasticsearch.customHeaders:` | Header names and values to send to {es}. Any custom headers cannot be overwritten by client-side headers, regardless of the -`elasticsearch.requestHeadersWhitelist` configuration. *Default: `{}`* +<> configuration. *Default: `{}`* -| `elasticsearch.hosts:` +|[[elasticsearch-hosts]] `elasticsearch.hosts:` | The URLs of the {es} instances to use for all your queries. All nodes listed here must be on the same cluster. *Default: `[ "http://localhost:9200" ]`* + @@ -59,28 +59,28 @@ To enable SSL/TLS for outbound connections to {es}, use the `https` protocol in this setting. | `elasticsearch.logQueries:` - | Log queries sent to {es}. Requires `logging.verbose` set to `true`. + | Log queries sent to {es}. Requires <> set to `true`. This is useful for seeing the query DSL generated by applications that currently do not have an inspector, for example Timelion and Monitoring. *Default: `false`* -| `elasticsearch.pingTimeout:` +|[[elasticsearch-pingTimeout]] `elasticsearch.pingTimeout:` | Time in milliseconds to wait for {es} to respond to pings. -*Default: the value of the `elasticsearch.requestTimeout` setting* +*Default: the value of the <> setting* | `elasticsearch.preserveHost:` | When the value is `true`, {kib} uses the hostname specified in the -`server.host` setting. When the value is `false`, {kib} uses +<> setting. When the value is `false`, {kib} uses the hostname of the host that connects to this {kib} instance. *Default: `true`* -| `elasticsearch.requestHeadersWhitelist:` +|[[elasticsearch-requestHeadersWhitelist]] `elasticsearch.requestHeadersWhitelist:` | List of {kib} client-side headers to send to {es}. To send *no* client-side headers, set this value to [] (an empty list). Removing the `authorization` header from being whitelisted means that you cannot use <> in {kib}. *Default: `[ 'authorization' ]`* -| `elasticsearch.requestTimeout:` +|[[elasticsearch-requestTimeout]] `elasticsearch.requestTimeout:` | Time in milliseconds to wait for responses from the back end or {es}. This value must be a positive integer. *Default: `30000`* @@ -99,7 +99,7 @@ nodes. *Default: `false`* | Update the list of {es} nodes immediately following a connection fault. *Default: `false`* -| `elasticsearch.ssl.alwaysPresentCertificate:` +|[[elasticsearch-ssl-alwaysPresentCertificate]] `elasticsearch.ssl.alwaysPresentCertificate:` | Controls {kib} behavior in regard to presenting a client certificate when requested by {es}. This setting applies to all outbound SSL/TLS connections to {es}, including requests that are proxied for end users. *Default: `false`* @@ -109,7 +109,7 @@ to {es}, including requests that are proxied for end users. *Default: `false`* [WARNING] ============ When {es} uses certificates to authenticate end users with a PKI realm -and `elasticsearch.ssl.alwaysPresentCertificate` is `true`, +and <> is `true`, proxied requests may be executed as the identity that is tied to the {kib} server. ============ @@ -117,7 +117,7 @@ server. [cols="2*<"] |=== -| `elasticsearch.ssl.certificate:` and `elasticsearch.ssl.key:` +|[[elasticsearch-ssl-cert-key]] `elasticsearch.ssl.certificate:` and `elasticsearch.ssl.key:` | Paths to a PEM-encoded X.509 client certificate and its corresponding private key. These are used by {kib} to authenticate itself when making outbound SSL/TLS connections to {es}. For this setting to take effect, the @@ -129,27 +129,29 @@ be set to `"required"` or `"optional"` to request a client certificate from [NOTE] ============ -These settings cannot be used in conjunction with `elasticsearch.ssl.keystore.path`. +These settings cannot be used in conjunction with +<>. ============ [cols="2*<"] |=== -| `elasticsearch.ssl.certificateAuthorities:` +|[[elasticsearch-ssl-certificateAuthorities]] `elasticsearch.ssl.certificateAuthorities:` | Paths to one or more PEM-encoded X.509 certificate authority (CA) certificates, which make up a trusted certificate chain for {es}. This chain is used by {kib} to establish trust when making outbound SSL/TLS connections to {es}. + In addition to this setting, trusted certificates may be specified via -`elasticsearch.ssl.keystore.path` and/or `elasticsearch.ssl.truststore.path`. +<> and/or +<>. | `elasticsearch.ssl.keyPassphrase:` | The password that decrypts the private key that is specified -via `elasticsearch.ssl.key`. This value is optional, as the key may not be +via <>. This value is optional, as the key may not be encrypted. -| `elasticsearch.ssl.keystore.path:` +|[[elasticsearch-ssl-keystore-path]] `elasticsearch.ssl.keystore.path:` | Path to a PKCS#12 keystore that contains an X.509 client certificate and it's corresponding private key. These are used by {kib} to authenticate itself when making outbound SSL/TLS connections to {es}. For this setting, you must also set @@ -160,15 +162,15 @@ If the keystore contains any additional certificates, they are used as a trusted certificate chain for {es}. This chain is used by {kib} to establish trust when making outbound SSL/TLS connections to {es}. In addition to this setting, trusted certificates may be specified via -`elasticsearch.ssl.certificateAuthorities` and/or -`elasticsearch.ssl.truststore.path`. +<> and/or +<>. |=== [NOTE] ============ This setting cannot be used in conjunction with -`elasticsearch.ssl.certificate` or `elasticsearch.ssl.key`. +<> or <>. ============ [cols="2*<"] @@ -176,24 +178,24 @@ This setting cannot be used in conjunction with | `elasticsearch.ssl.keystore.password:` | The password that decrypts the keystore specified via -`elasticsearch.ssl.keystore.path`. If the keystore has no password, leave this +<>. If the keystore has no password, leave this as blank. If the keystore has an empty password, set this to `""`. -| `elasticsearch.ssl.truststore.path:`:: +|[[elasticsearch-ssl-truststore-path]] `elasticsearch.ssl.truststore.path:` | Path to a PKCS#12 trust store that contains one or more X.509 certificate authority (CA) certificates, which make up a trusted certificate chain for {es}. This chain is used by {kib} to establish trust when making outbound SSL/TLS connections to {es}. + In addition to this setting, trusted certificates may be specified via -`elasticsearch.ssl.certificateAuthorities` and/or -`elasticsearch.ssl.keystore.path`. +<> and/or +<>. |`elasticsearch.ssl.truststore.password:` | The password that decrypts the trust store specified via -`elasticsearch.ssl.truststore.path`. If the trust store has no password, -leave this as blank. If the trust store has an empty password, set this to `""`. +<>. If the trust store +has no password, leave this as blank. If the trust store has an empty password, set this to `""`. | `elasticsearch.ssl.verificationMode:` | Controls the verification of the server certificate that {kib} receives when @@ -206,7 +208,7 @@ verification entirely. *Default: `"full"`* | Time in milliseconds to wait for {es} at {kib} startup before retrying. *Default: `5000`* -| `elasticsearch.username:` and `elasticsearch.password:` +|[[elasticsearch-user-passwd]] `elasticsearch.username:` and `elasticsearch.password:` | If your {es} is protected with basic authentication, these settings provide the username and password that the {kib} server uses to perform maintenance on the {kib} index at startup. {kib} users still need to authenticate with @@ -220,7 +222,7 @@ on the {kib} index at startup. {kib} users still need to authenticate with Please use the `defaultRoute` advanced setting instead. The default application to load. *Default: `"home"`* -| `kibana.index:` +|[[kibana-index]] `kibana.index:` | {kib} uses an index in {es} to store saved searches, visualizations, and dashboards. {kib} creates a new index if the index doesn’t already exist. If you configure a custom index, the name must be lowercase, and conform to the @@ -236,7 +238,7 @@ This value must be a whole number greater than zero. *Default: `"1000"`* suggestions. This value must be a whole number greater than zero. *Default: `"100000"`* -| `logging.dest:` +|[[logging-dest]] `logging.dest:` | Enables you to specify a file where {kib} stores log output. *Default: `stdout`* @@ -244,7 +246,7 @@ suggestions. This value must be a whole number greater than zero. | Logs output as JSON. When set to `true`, the logs are formatted as JSON strings that include timestamp, log level, context, message text, and any other metadata that may be associated with the log message. -When `logging.dest.stdout` is set, and there is no interactive terminal ("TTY"), +When <> is set, and there is no interactive terminal ("TTY"), this setting defaults to `true`. *Default: `false`* | `logging.quiet:` @@ -271,7 +273,7 @@ The following example shows a valid logging rotate configuration: | `logging.rotate.enabled:` | experimental[] Set the value of this setting to `true` to -enable log rotation. If you do not have a `logging.dest` set that is different from `stdout` +enable log rotation. If you do not have a <> set that is different from `stdout` that feature would not take any effect. *Default: `false`* | `logging.rotate.everyBytes:` @@ -286,9 +288,9 @@ option has to be in the range of 2 to 1024 files. *Default: `7`* | `logging.rotate.pollingInterval:` | experimental[] The number of milliseconds for the polling strategy in case -the `logging.rotate.usePolling` is enabled. `logging.rotate.usePolling` must be in the 5000 to 3600000 millisecond range. *Default: `10000`* +the <> is enabled. `logging.rotate.usePolling` must be in the 5000 to 3600000 millisecond range. *Default: `10000`* -| `logging.rotate.usePolling:` +|[[logging-rotate-usePolling]] `logging.rotate.usePolling:` | experimental[] By default we try to understand the best way to monitoring the log file and warning about it. Please be aware there are some systems where watch api is not accurate. In those cases, in order to get the feature working, the `polling` method could be used enabling that option. *Default: `false`* @@ -308,8 +310,8 @@ requests. *Default: `false`* | `map.includeElasticMapsService:` {ess-icon} | Set to `false` to disable connections to Elastic Maps Service. -When `includeElasticMapsService` is turned off, only the vector layers configured by `map.regionmap` -and the tile layer configured by `map.tilemap.url` are available in <>. *Default: `true`* +When `includeElasticMapsService` is turned off, only the vector layers configured by <> +and the tile layer configured by <> are available in <>. *Default: `true`* | `map.proxyElasticMapsServiceInMaps:` | Set to `true` to proxy all <> Elastic Maps Service @@ -427,7 +429,7 @@ override this parameter to use their own Tile Map Service. For example: system for the {kib} UI notification center. Set to `false` to disable the newsfeed system. *Default: `true`* -| `path.data:` +|[[path-data]] `path.data:` | The path where {kib} stores persistent data not saved in {es}. *Default: `data`* @@ -438,17 +440,17 @@ not saved in {es}. *Default: `data`* | Set the interval in milliseconds to sample system and process performance metrics. The minimum value is 100. *Default: `5000`* -| `ops.cGroupOverrides.cpuPath:` +|[[ops-cGroupOverrides-cpuPath]] `ops.cGroupOverrides.cpuPath:` | Override for cgroup cpu path when mounted in a manner that is inconsistent with `/proc/self/cgroup`. -| `ops.cGroupOverrides.cpuAcctPath:` +|[[ops-cGroupOverrides-cpuAcctPath]] `ops.cGroupOverrides.cpuAcctPath:` | Override for cgroup cpuacct path when mounted in a manner that is inconsistent with `/proc/self/cgroup`. -| `server.basePath:` +|[[server-basePath]] `server.basePath:` | Enables you to specify a path to mount {kib} at if you are -running behind a proxy. Use the `server.rewriteBasePath` setting to tell {kib} +running behind a proxy. Use the <> setting to tell {kib} if it should remove the basePath from requests it receives, and to prevent a deprecation warning at startup. This setting cannot end in a slash (`/`). @@ -458,19 +460,19 @@ deprecation warning at startup. This setting cannot end in a slash (`/`). | `server.compression.referrerWhitelist:` | Specifies an array of trusted hostnames, such as the {kib} host, or a reverse proxy sitting in front of it. This determines whether HTTP compression may be used for responses, based on the request `Referer` header. -This setting may not be used when `server.compression.enabled` is set to `false`. *Default: `none`* +This setting may not be used when <> is set to `false`. *Default: `none`* | `server.customResponseHeaders:` {ess-icon} | Header names and values to send on all responses to the client from the {kib} server. *Default: `{}`* -| `server.host:` +|[[server-host]] `server.host:` | This setting specifies the host of the back end server. To allow remote users to connect, set the value to the IP address or DNS name of the {kib} server. *Default: `"localhost"`* | `server.keepaliveTimeout:` | The number of milliseconds to wait for additional data before restarting -the `server.socketTimeout` counter. *Default: `"120000"`* +the <> counter. *Default: `"120000"`* | `server.maxPayloadBytes:` | The maximum payload size in bytes @@ -480,28 +482,28 @@ for incoming server requests. *Default: `1048576`* | A human-readable display name that identifies this {kib} instance. *Default: `"your-hostname"`* -| `server.port:` +|[[server-port]] `server.port:` | {kib} is served by a back end server. This setting specifies the port to use. *Default: `5601`* -| `server.requestId.allowFromAnyIp:` +|[[server-requestId-allowFromAnyIp]] `server.requestId.allowFromAnyIp:` | Sets whether or not the X-Opaque-Id header should be trusted from any IP address for identifying requests in logs and forwarded to Elasticsearch. | `server.requestId.ipAllowlist:` - | A list of IPv4 and IPv6 address which the `X-Opaque-Id` header should be trusted from. Normally this would be set to the IP addresses of the load balancers or reverse-proxy that end users use to access Kibana. If any are set, `server.requestId.allowFromAnyIp` must also be set to `false.` + | A list of IPv4 and IPv6 address which the `X-Opaque-Id` header should be trusted from. Normally this would be set to the IP addresses of the load balancers or reverse-proxy that end users use to access Kibana. If any are set, <> must also be set to `false.` -| `server.rewriteBasePath:` +|[[server-rewriteBasePath]] `server.rewriteBasePath:` | Specifies whether {kib} should -rewrite requests that are prefixed with `server.basePath` or require that they +rewrite requests that are prefixed with <> or require that they are rewritten by your reverse proxy. In {kib} 6.3 and earlier, the default is `false`. In {kib} 7.x, the setting is deprecated. In {kib} 8.0 and later, the default is `true`. *Default: `deprecated`* -| `server.socketTimeout:` +|[[server-socketTimeout]] `server.socketTimeout:` | The number of milliseconds to wait before closing an inactive socket. *Default: `"120000"`* -| `server.ssl.certificate:` and `server.ssl.key:` +|[[server-ssl-cert-key]] `server.ssl.certificate:` and `server.ssl.key:` | Paths to a PEM-encoded X.509 server certificate and its corresponding private key. These are used by {kib} to establish trust when receiving inbound SSL/TLS connections from users. @@ -509,18 +511,18 @@ are used by {kib} to establish trust when receiving inbound SSL/TLS connections [NOTE] ============ -These settings cannot be used in conjunction with `server.ssl.keystore.path`. +These settings cannot be used in conjunction with <>. ============ [cols="2*<"] |=== -| `server.ssl.certificateAuthorities:` +|[[server-ssl-certificateAuthorities]] `server.ssl.certificateAuthorities:` | Paths to one or more PEM-encoded X.509 certificate authority (CA) certificates which make up a trusted certificate chain for {kib}. This chain is used by {kib} to establish trust when receiving inbound SSL/TLS connections from end users. If PKI authentication is enabled, this chain is also used by {kib} to verify client certificates from end users. + -In addition to this setting, trusted certificates may be specified via `server.ssl.keystore.path` and/or `server.ssl.truststore.path`. +In addition to this setting, trusted certificates may be specified via <> and/or <>. | `server.ssl.cipherSuites:` | Details on the format, and the valid options, are available via the @@ -533,53 +535,53 @@ connections. Valid values are `"required"`, `"optional"`, and `"none"`. Using `" client presents a certificate, using `"optional"` will allow a client to present a certificate if it has one, and using `"none"` will prevent a client from presenting a certificate. *Default: `"none"`* -| `server.ssl.enabled:` +|[[server-ssl-enabled]] `server.ssl.enabled:` | Enables SSL/TLS for inbound connections to {kib}. When set to `true`, a certificate and its -corresponding private key must be provided. These can be specified via `server.ssl.keystore.path` or the combination of -`server.ssl.certificate` and `server.ssl.key`. *Default: `false`* +corresponding private key must be provided. These can be specified via <> or the combination of +<> and <>. *Default: `false`* | `server.ssl.keyPassphrase:` - | The password that decrypts the private key that is specified via `server.ssl.key`. This value + | The password that decrypts the private key that is specified via <>. This value is optional, as the key may not be encrypted. -| `server.ssl.keystore.path:` +|[[server-ssl-keystore-path]] `server.ssl.keystore.path:` | Path to a PKCS#12 keystore that contains an X.509 server certificate and its corresponding private key. If the keystore contains any additional certificates, those will be used as a trusted certificate chain for {kib}. All of these are used by {kib} to establish trust when receiving inbound SSL/TLS connections from end users. The certificate chain is also used by {kib} to verify client certificates from end users when PKI authentication is enabled. + -In addition to this setting, trusted certificates may be specified via `server.ssl.certificateAuthorities` and/or -`server.ssl.truststore.path`. +In addition to this setting, trusted certificates may be specified via <> and/or +<>. |=== [NOTE] ============ -This setting cannot be used in conjunction with `server.ssl.certificate` or `server.ssl.key` +This setting cannot be used in conjunction with <> or <> ============ [cols="2*<"] |=== | `server.ssl.keystore.password:` - | The password that will be used to decrypt the keystore specified via `server.ssl.keystore.path`. If the + | The password that will be used to decrypt the keystore specified via <>. If the keystore has no password, leave this unset. If the keystore has an empty password, set this to `""`. -| `server.ssl.truststore.path:` +|[[server-ssl-truststore-path]] `server.ssl.truststore.path:` | Path to a PKCS#12 trust store that contains one or more X.509 certificate authority (CA) certificates which make up a trusted certificate chain for {kib}. This chain is used by {kib} to establish trust when receiving inbound SSL/TLS connections from end users. If PKI authentication is enabled, this chain is also used by {kib} to verify client certificates from end users. + -In addition to this setting, trusted certificates may be specified via `server.ssl.certificateAuthorities` and/or -`server.ssl.keystore.path`. +In addition to this setting, trusted certificates may be specified via <> and/or +<>. | `server.ssl.truststore.password:` - | The password that will be used to decrypt the trust store specified via `server.ssl.truststore.path`. If + | The password that will be used to decrypt the trust store specified via <>. If the trust store has no password, leave this unset. If the trust store has an empty password, set this to `""`. | `server.ssl.redirectHttpFromPort:` | {kib} binds to this port and redirects -all http requests to https over the port configured as `server.port`. +all http requests to https over the port configured as <>. | `server.ssl.supportedProtocols:` | An array of supported protocols with versions. @@ -588,7 +590,7 @@ Valid protocols: `TLSv1`, `TLSv1.1`, `TLSv1.2`. *Default: TLSv1.1, TLSv1.2* | [[settings-xsrf-whitelist]] `server.xsrf.whitelist:` | It is not recommended to disable protections for arbitrary API endpoints. Instead, supply the `kbn-xsrf` header. -The `server.xsrf.whitelist` setting requires the following format: +The <> setting requires the following format: |=== @@ -608,18 +610,18 @@ The `server.xsrf.whitelist` setting requires the following format: setting this to `true` enables unauthenticated users to access the {kib} server status API and status page. *Default: `false`* -| `telemetry.allowChangingOptInStatus` +|[[telemetry-allowChangingOptInStatus]] `telemetry.allowChangingOptInStatus` | When `true`, users are able to change the telemetry setting at a later time in <>. When `false`, -{kib} looks at the value of `telemetry.optIn` to determine whether to send -telemetry data or not. `telemetry.allowChangingOptInStatus` and `telemetry.optIn` +{kib} looks at the value of <> to determine whether to send +telemetry data or not. <> and <> cannot be `false` at the same time. *Default: `true`*. -| `telemetry.optIn` +|[[settings-telemetry-optIn]] `telemetry.optIn` | When `true`, telemetry data is sent to Elastic. When `false`, collection of telemetry data is disabled. To enable telemetry and prevent users from disabling it, -set `telemetry.allowChangingOptInStatus` to `false` and `telemetry.optIn` to `true`. +set <> to `false` and <> to `true`. *Default: `true`* | `telemetry.enabled` diff --git a/docs/user/management.asciidoc b/docs/user/management.asciidoc index bc96463f6efba..e0d550a15a907 100644 --- a/docs/user/management.asciidoc +++ b/docs/user/management.asciidoc @@ -58,12 +58,12 @@ years of historical data in combination with your raw data. | {ref}/transforms.html[Transforms] |Use transforms to pivot existing {es} indices into summarized or entity-centric indices. -| <> +| {ref}/ccr-getting-started.html[Cross-Cluster Replication] |Replicate indices on a remote cluster and copy them to a follower index on a local cluster. This is important for disaster recovery. It also keeps data local for faster queries. -| <> +| {ref}/ccr-getting-started.html#ccr-getting-started-remote-cluster[Remote Clusters] |Manage your remote clusters for use with cross-cluster search and cross-cluster replication. You can add and remove remote clusters, and check their connectivity. |=== @@ -180,8 +180,6 @@ include::{kib-repo-dir}/management/alerting/connector-management.asciidoc[] include::{kib-repo-dir}/management/managing-beats.asciidoc[] -include::{kib-repo-dir}/management/managing-ccr.asciidoc[] - include::{kib-repo-dir}/management/index-lifecycle-policies/intro-to-lifecycle-policies.asciidoc[] include::{kib-repo-dir}/management/index-lifecycle-policies/create-policy.asciidoc[] @@ -200,8 +198,6 @@ include::{kib-repo-dir}/management/managing-licenses.asciidoc[] include::{kib-repo-dir}/management/numeral.asciidoc[] -include::{kib-repo-dir}/management/managing-remote-clusters.asciidoc[] - include::{kib-repo-dir}/management/rollups/create_and_manage_rollups.asciidoc[] include::{kib-repo-dir}/management/managing-saved-objects.asciidoc[] diff --git a/src/plugins/embeddable/public/lib/embeddables/error_embeddable.test.tsx b/src/plugins/embeddable/public/lib/embeddables/error_embeddable.test.tsx index 715827a72c61b..17a2ac3b2a32b 100644 --- a/src/plugins/embeddable/public/lib/embeddables/error_embeddable.test.tsx +++ b/src/plugins/embeddable/public/lib/embeddables/error_embeddable.test.tsx @@ -17,43 +17,36 @@ * under the License. */ import React from 'react'; +import { wait } from '@testing-library/dom'; +import { cleanup, render } from '@testing-library/react/pure'; import { ErrorEmbeddable } from './error_embeddable'; import { EmbeddableRoot } from './embeddable_root'; -import { mount } from 'enzyme'; + +afterEach(cleanup); test('ErrorEmbeddable renders an embeddable', async () => { const embeddable = new ErrorEmbeddable('some error occurred', { id: '123', title: 'Error' }); - const component = mount(); - expect( - component.getDOMNode().querySelectorAll('[data-test-subj="embeddableStackError"]').length - ).toBe(1); - expect( - component.getDOMNode().querySelectorAll('[data-test-subj="errorMessageMarkdown"]').length - ).toBe(1); - expect( - component - .getDOMNode() - .querySelectorAll('[data-test-subj="errorMessageMarkdown"]')[0] - .innerHTML.includes('some error occurred') - ).toBe(true); + const { getByTestId, getByText } = render(); + + expect(getByTestId('embeddableStackError')).toBeVisible(); + await wait(() => getByTestId('errorMessageMarkdown')); // wait for lazy markdown component + expect(getByText(/some error occurred/i)).toBeVisible(); }); test('ErrorEmbeddable renders an embeddable with markdown message', async () => { const error = '[some link](http://localhost:5601/takeMeThere)'; const embeddable = new ErrorEmbeddable(error, { id: '123', title: 'Error' }); - const component = mount(); - expect( - component.getDOMNode().querySelectorAll('[data-test-subj="embeddableStackError"]').length - ).toBe(1); - expect( - component.getDOMNode().querySelectorAll('[data-test-subj="errorMessageMarkdown"]').length - ).toBe(1); - expect( - component - .getDOMNode() - .querySelectorAll('[data-test-subj="errorMessageMarkdown"]')[0] - .innerHTML.includes( - 'some link' - ) - ).toBe(true); + const { getByTestId, getByText } = render(); + + expect(getByTestId('embeddableStackError')).toBeVisible(); + await wait(() => getByTestId('errorMessageMarkdown')); // wait for lazy markdown component + expect(getByText(/some link/i)).toMatchInlineSnapshot(` + + some link + + `); }); diff --git a/src/plugins/kibana_react/public/code_editor/code_editor.tsx b/src/plugins/kibana_react/public/code_editor/code_editor.tsx index f049085ccff61..85b2c327e8b21 100644 --- a/src/plugins/kibana_react/public/code_editor/code_editor.tsx +++ b/src/plugins/kibana_react/public/code_editor/code_editor.tsx @@ -186,3 +186,7 @@ export class CodeEditor extends React.Component { } }; } + +// React.lazy requires default export +// eslint-disable-next-line import/no-default-export +export default CodeEditor; diff --git a/src/plugins/kibana_react/public/code_editor/index.tsx b/src/plugins/kibana_react/public/code_editor/index.tsx index 0ef83811d96d3..63e3d330a1c52 100644 --- a/src/plugins/kibana_react/public/code_editor/index.tsx +++ b/src/plugins/kibana_react/public/code_editor/index.tsx @@ -17,11 +17,23 @@ * under the License. */ import React from 'react'; +import { EuiDelayRender, EuiLoadingContent } from '@elastic/eui'; import { useUiSetting } from '../ui_settings'; -import { CodeEditor as BaseEditor, Props } from './code_editor'; +import type { Props } from './code_editor'; + +const LazyBaseEditor = React.lazy(() => import('./code_editor')); + +const Fallback = () => ( + + + +); export const CodeEditor: React.FunctionComponent = (props) => { const darkMode = useUiSetting('theme:darkMode'); - - return ; + return ( + }> + + + ); }; diff --git a/src/plugins/kibana_react/public/markdown/index.tsx b/src/plugins/kibana_react/public/markdown/index.tsx index cacf223cf33ed..4d6fd0b742ee4 100644 --- a/src/plugins/kibana_react/public/markdown/index.tsx +++ b/src/plugins/kibana_react/public/markdown/index.tsx @@ -17,5 +17,27 @@ * under the License. */ -export { MarkdownSimple } from './markdown_simple'; -export { Markdown } from './markdown'; +import React from 'react'; +import { EuiLoadingContent, EuiDelayRender } from '@elastic/eui'; +import type { MarkdownSimpleProps } from './markdown_simple'; +import type { MarkdownProps } from './markdown'; + +const Fallback = () => ( + + + +); + +const LazyMarkdownSimple = React.lazy(() => import('./markdown_simple')); +export const MarkdownSimple = (props: MarkdownSimpleProps) => ( + }> + + +); + +const LazyMarkdown = React.lazy(() => import('./markdown')); +export const Markdown = (props: MarkdownProps) => ( + }> + + +); diff --git a/src/plugins/kibana_react/public/markdown/markdown.tsx b/src/plugins/kibana_react/public/markdown/markdown.tsx index 15d1c4931e60b..8bb61bc71862e 100644 --- a/src/plugins/kibana_react/public/markdown/markdown.tsx +++ b/src/plugins/kibana_react/public/markdown/markdown.tsx @@ -84,7 +84,7 @@ export const markdownFactory = memoize( } ); -interface MarkdownProps extends React.HTMLAttributes { +export interface MarkdownProps extends React.HTMLAttributes { className?: string; markdown?: string; openLinksInNewTab?: boolean; @@ -112,3 +112,7 @@ export class Markdown extends PureComponent { ); } } + +// Needed for React.lazy +// eslint-disable-next-line import/no-default-export +export default Markdown; diff --git a/src/plugins/kibana_react/public/markdown/markdown_simple.tsx b/src/plugins/kibana_react/public/markdown/markdown_simple.tsx index a5465fd1c6fc9..71ae4e031abca 100644 --- a/src/plugins/kibana_react/public/markdown/markdown_simple.tsx +++ b/src/plugins/kibana_react/public/markdown/markdown_simple.tsx @@ -24,7 +24,7 @@ const markdownRenderers = { root: Fragment, }; -interface MarkdownSimpleProps { +export interface MarkdownSimpleProps { children: string; } @@ -32,3 +32,7 @@ interface MarkdownSimpleProps { export const MarkdownSimple = ({ children }: MarkdownSimpleProps) => ( {children} ); + +// Needed for React.lazy +// eslint-disable-next-line import/no-default-export +export default MarkdownSimple; diff --git a/src/plugins/kibana_react/public/table_list_view/table_list_view.tsx b/src/plugins/kibana_react/public/table_list_view/table_list_view.tsx index 2fa1debf51b5c..e0e295723a69d 100644 --- a/src/plugins/kibana_react/public/table_list_view/table_list_view.tsx +++ b/src/plugins/kibana_react/public/table_list_view/table_list_view.tsx @@ -556,3 +556,6 @@ class TableListView extends React.Component import('react-markdown')); +const ErrorRenderer = (props: { children: string }) => ( + }> + + +); + interface Mapping { [key: string]: string | { app: string; path: string }; } @@ -96,16 +103,7 @@ export function redirectWhenMissing({ defaultMessage: 'Saved object is missing', }), text: (element: HTMLElement) => { - ReactDOM.render( - - {error.message} - , - element - ); + ReactDOM.render({error.message}, element); return () => ReactDOM.unmountComponentAtNode(element); }, }); diff --git a/src/plugins/vis_type_markdown/public/markdown_vis_controller.test.tsx b/src/plugins/vis_type_markdown/public/markdown_vis_controller.test.tsx index ff0cc89a5d9c9..6df205b21d910 100644 --- a/src/plugins/vis_type_markdown/public/markdown_vis_controller.test.tsx +++ b/src/plugins/vis_type_markdown/public/markdown_vis_controller.test.tsx @@ -18,11 +18,14 @@ */ import React from 'react'; -import { render, mount } from 'enzyme'; +import { wait } from '@testing-library/dom'; +import { render, cleanup } from '@testing-library/react/pure'; import { MarkdownVisWrapper } from './markdown_vis_controller'; +afterEach(cleanup); + describe('markdown vis controller', () => { - it('should set html from markdown params', () => { + it('should set html from markdown params', async () => { const vis = { params: { openLinksInNewTab: false, @@ -32,13 +35,22 @@ describe('markdown vis controller', () => { }, }; - const wrapper = render( + const { getByTestId, getByText } = render( ); - expect(wrapper.find('a').text()).toBe('markdown'); + + await wait(() => getByTestId('markdownBody')); + + expect(getByText('markdown')).toMatchInlineSnapshot(` + + markdown + + `); }); - it('should not render the html', () => { + it('should not render the html', async () => { const vis = { params: { openLinksInNewTab: false, @@ -47,13 +59,20 @@ describe('markdown vis controller', () => { }, }; - const wrapper = render( + const { getByTestId, getByText } = render( ); - expect(wrapper.text()).toBe('Testing html\n'); + + await wait(() => getByTestId('markdownBody')); + + expect(getByText(/testing/i)).toMatchInlineSnapshot(` +

+ Testing <a>html</a> +

+ `); }); - it('should update the HTML when render again with changed params', () => { + it('should update the HTML when render again with changed params', async () => { const vis = { params: { openLinksInNewTab: false, @@ -62,13 +81,20 @@ describe('markdown vis controller', () => { }, }; - const wrapper = mount( + const { getByTestId, getByText, rerender } = render( ); - expect(wrapper.text().trim()).toBe('Initial'); + + await wait(() => getByTestId('markdownBody')); + + expect(getByText(/initial/i)).toBeInTheDocument(); + vis.params.markdown = 'Updated'; - wrapper.setProps({ vis }); - expect(wrapper.text().trim()).toBe('Updated'); + rerender( + + ); + + expect(getByText(/Updated/i)).toBeInTheDocument(); }); describe('renderComplete', () => { @@ -86,56 +112,71 @@ describe('markdown vis controller', () => { renderComplete.mockClear(); }); - it('should be called on initial rendering', () => { - mount( + it('should be called on initial rendering', async () => { + const { getByTestId } = render( ); - expect(renderComplete.mock.calls.length).toBe(1); + + await wait(() => getByTestId('markdownBody')); + + expect(renderComplete).toHaveBeenCalledTimes(1); }); - it('should be called on successive render when params change', () => { - mount( + it('should be called on successive render when params change', async () => { + const { getByTestId, rerender } = render( ); - expect(renderComplete.mock.calls.length).toBe(1); + + await wait(() => getByTestId('markdownBody')); + + expect(renderComplete).toHaveBeenCalledTimes(1); + renderComplete.mockClear(); vis.params.markdown = 'changed'; - mount( + + rerender( ); - expect(renderComplete.mock.calls.length).toBe(1); + + expect(renderComplete).toHaveBeenCalledTimes(1); }); - it('should be called on successive render even without data change', () => { - mount( + it('should be called on successive render even without data change', async () => { + const { getByTestId, rerender } = render( ); - expect(renderComplete.mock.calls.length).toBe(1); + + await wait(() => getByTestId('markdownBody')); + + expect(renderComplete).toHaveBeenCalledTimes(1); + renderComplete.mockClear(); - mount( + + rerender( ); - expect(renderComplete.mock.calls.length).toBe(1); + + expect(renderComplete).toHaveBeenCalledTimes(1); }); }); }); diff --git a/src/plugins/visualizations/public/embeddable/visualize_embeddable.ts b/src/plugins/visualizations/public/embeddable/visualize_embeddable.ts index 18ae68ec40fe5..c091d396b4924 100644 --- a/src/plugins/visualizations/public/embeddable/visualize_embeddable.ts +++ b/src/plugins/visualizations/public/embeddable/visualize_embeddable.ts @@ -243,12 +243,6 @@ export class VisualizeEmbeddable dirty = true; } - // propagate the title to the output embeddable - // but only when the visualization is in edit/Visualize mode - if (!this.parent && this.vis.title !== this.output.title) { - this.updateOutput({ title: this.vis.title }); - } - if (this.vis.description && this.domNode) { this.domNode.setAttribute('data-description', this.vis.description); } diff --git a/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx b/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx index 0423e48bfb41e..65c9a5410d226 100644 --- a/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx +++ b/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx @@ -267,6 +267,7 @@ export const getTopNavConfig = ( } const currentTitle = savedVis.title; savedVis.title = newTitle; + embeddableHandler.updateInput({ title: newTitle }); savedVis.copyOnSave = newCopyOnSave; savedVis.description = newDescription; const saveOptions = { diff --git a/test/functional/apps/discover/_inspector.js b/test/functional/apps/discover/_inspector.js index 900ad28e14e69..fcb66fbd52cf7 100644 --- a/test/functional/apps/discover/_inspector.js +++ b/test/functional/apps/discover/_inspector.js @@ -34,7 +34,8 @@ export default function ({ getService, getPageObjects }) { return hitsCountStatsRow[STATS_ROW_VALUE_INDEX]; } - describe('inspect', () => { + // FLAKY: https://github.com/elastic/kibana/issues/39842 + describe.skip('inspect', () => { before(async () => { await esArchiver.loadIfNeeded('logstash_functional'); await esArchiver.load('discover'); diff --git a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.test.ts b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.test.ts index 33260b5c9303f..df205dc742b07 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.test.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.test.ts @@ -49,4 +49,9 @@ describe('savedLens', () => { expect(expression.input.filters).toEqual(embeddableFilters); }); + + it('accepts an empty title when title is disabled', () => { + const expression = fn(null, { ...args, title: '' }, {} as any); + expect(expression.input.title).toEqual(''); + }); }); diff --git a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.ts b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.ts index 49b8c5562af65..a823d0606d46f 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_lens.ts @@ -72,7 +72,7 @@ export function savedLens(): ExpressionFunctionDefinition< id: args.id, filters: getQueryFilters(filters), timeRange: args.timerange || defaultTimeRange, - title: args.title ? args.title : undefined, + title: args.title === null ? undefined : args.title, disableTriggers: true, }, embeddableType: EmbeddableTypes.lens, diff --git a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_map.ts b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_map.ts index ec640cfb5b299..a64ff7da2aa19 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_map.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_map.ts @@ -93,7 +93,7 @@ export function savedMap(): ExpressionFunctionDefinition< mapCenter: center, hideFilterActions: true, - title: args.title ? args.title : undefined, + title: args.title === null ? undefined : args.title, isLayerTOCOpen: false, hiddenLayers: args.hideLayer || [], }, diff --git a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_visualization.test.ts b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_visualization.test.ts index a64fb167dd19f..7902d09a0bdf1 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_visualization.test.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_visualization.test.ts @@ -36,6 +36,7 @@ describe('savedVisualization', () => { timerange: null, colors: null, hideLegend: null, + title: null, }; it('accepts null context', () => { @@ -50,4 +51,9 @@ describe('savedVisualization', () => { expect(expression.input.filters).toEqual(embeddableFilters); }); + + it('accepts an empty title when title is disabled', () => { + const expression = fn(null, { ...args, title: '' }, {} as any); + expect(expression.input.title).toEqual(''); + }); }); diff --git a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_visualization.ts b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_visualization.ts index 2782ca039d7ed..449be2db43d44 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_visualization.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/functions/external/saved_visualization.ts @@ -20,6 +20,7 @@ interface Arguments { timerange: TimeRangeArg | null; colors: SeriesStyle[] | null; hideLegend: boolean | null; + title: string | null; } type Output = EmbeddableExpression; @@ -61,9 +62,14 @@ export function savedVisualization(): ExpressionFunctionDefinition< help: argHelp.hideLegend, required: false, }, + title: { + types: ['string'], + help: argHelp.title, + required: false, + }, }, type: EmbeddableExpressionType, - fn: (input, { id, timerange, colors, hideLegend }) => { + fn: (input, { id, timerange, colors, hideLegend, title }) => { const filters = input ? input.and : []; const visOptions: VisualizeInput['vis'] = {}; @@ -90,6 +96,7 @@ export function savedVisualization(): ExpressionFunctionDefinition< timeRange: timerange || defaultTimeRange, filters: getQueryFilters(filters), vis: visOptions, + title: title === null ? undefined : title, }, embeddableType: EmbeddableTypes.visualization, generatedAt: Date.now(), diff --git a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/lens.test.ts b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/lens.test.ts index 7bcfd6bef4620..0df39f281da9c 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/lens.test.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/lens.test.ts @@ -52,4 +52,16 @@ describe('toExpression', () => { expect(timerangeExpression.chain[0].arguments.from[0]).toEqual(input.timeRange?.from); expect(timerangeExpression.chain[0].arguments.to[0]).toEqual(input.timeRange?.to); }); + + it('includes empty panel title', () => { + const input: SavedLensInput = { + ...baseEmbeddableInput, + title: '', + }; + + const expression = toExpression(input); + const ast = fromExpression(expression); + + expect(ast.chain[0].arguments).toHaveProperty('title', [input.title]); + }); }); diff --git a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/lens.ts b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/lens.ts index 5bb45c5ca129e..a8e200dd3e4ba 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/lens.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/lens.ts @@ -13,7 +13,7 @@ export function toExpression(input: SavedLensInput): string { expressionParts.push(`id="${input.id}"`); - if (input.title) { + if (input.title !== undefined) { expressionParts.push(`title="${input.title}"`); } diff --git a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/map.test.ts b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/map.test.ts index d910c734a6974..d2c803a1ff208 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/map.test.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/map.test.ts @@ -69,4 +69,16 @@ describe('toExpression', () => { expect(timerangeExpression.chain[0].arguments.from[0]).toEqual(input.timeRange?.from); expect(timerangeExpression.chain[0].arguments.to[0]).toEqual(input.timeRange?.to); }); + + it('includes empty panel title', () => { + const input: MapEmbeddableInput = { + ...baseSavedMapInput, + title: '', + }; + + const expression = toExpression(input); + const ast = fromExpression(expression); + + expect(ast.chain[0].arguments).toHaveProperty('title', [input.title]); + }); }); diff --git a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/map.ts b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/map.ts index 111fdc71fa242..769c2c9e10e9c 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/map.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/map.ts @@ -12,7 +12,7 @@ export function toExpression(input: MapEmbeddableInput): string { expressionParts.push('savedMap'); expressionParts.push(`id="${input.id}"`); - if (input.title) { + if (input.title !== undefined) { expressionParts.push(`title="${input.title}"`); } diff --git a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/visualization.test.ts b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/visualization.test.ts index 07f828755e46f..4550a90ce98a2 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/visualization.test.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/visualization.test.ts @@ -69,4 +69,16 @@ describe('toExpression', () => { expect(aColor?.chain[0].arguments.color[0]).toBe(colorMap.a); expect(bColor?.chain[0].arguments.color[0]).toBe(colorMap.b); }); + + it('includes empty panel title', () => { + const input = { + ...baseInput, + title: '', + }; + + const expression = toExpression(input); + const ast = fromExpression(expression); + + expect(ast.chain[0].arguments).toHaveProperty('title', [input.title]); + }); }); diff --git a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/visualization.ts b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/visualization.ts index f03c10e2d424e..a8adbf9d2d860 100644 --- a/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/visualization.ts +++ b/x-pack/plugins/canvas/canvas_plugin_src/renderers/embeddable/input_type_to_expression/visualization.ts @@ -12,6 +12,10 @@ export function toExpression(input: VisualizeInput): string { expressionParts.push('savedVisualization'); expressionParts.push(`id="${input.id}"`); + if (input.title !== undefined) { + expressionParts.push(`title="${input.title}"`); + } + if (input.timeRange) { expressionParts.push( `timerange={timerange from="${input.timeRange.from}" to="${input.timeRange.to}"}` diff --git a/x-pack/plugins/canvas/i18n/functions/dict/saved_visualization.ts b/x-pack/plugins/canvas/i18n/functions/dict/saved_visualization.ts index e8cbddc5c1102..257e251fe2bc2 100644 --- a/x-pack/plugins/canvas/i18n/functions/dict/saved_visualization.ts +++ b/x-pack/plugins/canvas/i18n/functions/dict/saved_visualization.ts @@ -29,5 +29,8 @@ export const help: FunctionHelp> = { defaultMessage: `Specifies the option to hide the legend`, } ), + title: i18n.translate('xpack.canvas.functions.savedVisualization.args.titleHelpText', { + defaultMessage: `The title for the visualization object`, + }), }, }; diff --git a/x-pack/plugins/canvas/public/components/datatable/datatable.scss b/x-pack/plugins/canvas/public/components/datatable/datatable.scss index bd11bff18e091..8e36de3b84423 100644 --- a/x-pack/plugins/canvas/public/components/datatable/datatable.scss +++ b/x-pack/plugins/canvas/public/components/datatable/datatable.scss @@ -4,7 +4,6 @@ display: flex; flex-direction: column; justify-content: space-between; - font-size: $euiFontSizeS; .canvasDataTable__tableWrapper { @include euiScrollBar; @@ -33,7 +32,6 @@ .canvasDataTable__th, .canvasDataTable__td { - text-align: left; padding: $euiSizeS $euiSizeXS; border-bottom: $euiBorderThin; } diff --git a/x-pack/plugins/canvas/public/components/expression_input/__stories__/__snapshots__/expression_input.stories.storyshot b/x-pack/plugins/canvas/public/components/expression_input/__stories__/__snapshots__/expression_input.stories.storyshot index 99d5dc3c115be..5c17eb2b68137 100644 --- a/x-pack/plugins/canvas/public/components/expression_input/__stories__/__snapshots__/expression_input.stories.storyshot +++ b/x-pack/plugins/canvas/public/components/expression_input/__stories__/__snapshots__/expression_input.stories.storyshot @@ -16,18 +16,7 @@ exports[`Storyshots components/ExpressionInput default 1`] = ` id="generated-id" onBlur={[Function]} onFocus={[Function]} - > -
-
-
+ />
diff --git a/x-pack/plugins/enterprise_search/public/applications/__mocks__/index.ts b/x-pack/plugins/enterprise_search/public/applications/__mocks__/index.ts index f66235ff44c6a..88a900f69c5ec 100644 --- a/x-pack/plugins/enterprise_search/public/applications/__mocks__/index.ts +++ b/x-pack/plugins/enterprise_search/public/applications/__mocks__/index.ts @@ -6,7 +6,7 @@ export { mockHistory, mockLocation } from './react_router_history.mock'; export { mockKibanaContext } from './kibana_context.mock'; -export { mockLicenseContext } from './license_context.mock'; +export { mockLicensingValues } from './licensing_logic.mock'; export { mockHttpValues } from './http_logic.mock'; export { mockFlashMessagesValues, mockFlashMessagesActions } from './flash_messages_logic.mock'; export { mockAllValues, mockAllActions, setMockValues } from './kea.mock'; diff --git a/x-pack/plugins/enterprise_search/public/applications/__mocks__/kea.mock.ts b/x-pack/plugins/enterprise_search/public/applications/__mocks__/kea.mock.ts index 8e6b0baa5fc00..bad6beaa1652e 100644 --- a/x-pack/plugins/enterprise_search/public/applications/__mocks__/kea.mock.ts +++ b/x-pack/plugins/enterprise_search/public/applications/__mocks__/kea.mock.ts @@ -5,13 +5,17 @@ */ /** + * Combine all shared mock values/actions into a single obj + * * NOTE: These variable names MUST start with 'mock*' in order for * Jest to accept its use within a jest.mock() */ +import { mockLicensingValues } from './licensing_logic.mock'; import { mockHttpValues } from './http_logic.mock'; import { mockFlashMessagesValues, mockFlashMessagesActions } from './flash_messages_logic.mock'; export const mockAllValues = { + ...mockLicensingValues, ...mockHttpValues, ...mockFlashMessagesValues, }; diff --git a/x-pack/plugins/enterprise_search/public/applications/__mocks__/license_context.mock.ts b/x-pack/plugins/enterprise_search/public/applications/__mocks__/licensing_logic.mock.ts similarity index 79% rename from x-pack/plugins/enterprise_search/public/applications/__mocks__/license_context.mock.ts rename to x-pack/plugins/enterprise_search/public/applications/__mocks__/licensing_logic.mock.ts index 7c37ecc7cde1b..51b32e7a877b2 100644 --- a/x-pack/plugins/enterprise_search/public/applications/__mocks__/license_context.mock.ts +++ b/x-pack/plugins/enterprise_search/public/applications/__mocks__/licensing_logic.mock.ts @@ -6,6 +6,8 @@ import { licensingMock } from '../../../../licensing/public/mocks'; -export const mockLicenseContext = { +export const mockLicensingValues = { license: licensingMock.createLicense(), + hasPlatinumLicense: false, + hasGoldLicense: false, }; diff --git a/x-pack/plugins/enterprise_search/public/applications/__mocks__/mount_with_context.mock.tsx b/x-pack/plugins/enterprise_search/public/applications/__mocks__/mount_with_context.mock.tsx index 5e56f17c8e7f3..646c3104c286f 100644 --- a/x-pack/plugins/enterprise_search/public/applications/__mocks__/mount_with_context.mock.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/__mocks__/mount_with_context.mock.tsx @@ -15,8 +15,6 @@ import { getContext, resetContext } from 'kea'; import { I18nProvider } from '@kbn/i18n/react'; import { KibanaContext } from '../'; import { mockKibanaContext } from './kibana_context.mock'; -import { LicenseContext } from '../shared/licensing'; -import { mockLicenseContext } from './license_context.mock'; /** * This helper mounts a component with all the contexts/providers used @@ -34,9 +32,7 @@ export const mountWithContext = (children: React.ReactNode, context?: object) => return mount( - - {children} - + {children} ); diff --git a/x-pack/plugins/enterprise_search/public/applications/__mocks__/shallow_usecontext.mock.ts b/x-pack/plugins/enterprise_search/public/applications/__mocks__/shallow_usecontext.mock.ts index 3a2193db646de..df9e58994e36b 100644 --- a/x-pack/plugins/enterprise_search/public/applications/__mocks__/shallow_usecontext.mock.ts +++ b/x-pack/plugins/enterprise_search/public/applications/__mocks__/shallow_usecontext.mock.ts @@ -9,11 +9,10 @@ * Jest to accept its use within a jest.mock() */ import { mockKibanaContext } from './kibana_context.mock'; -import { mockLicenseContext } from './license_context.mock'; jest.mock('react', () => ({ ...(jest.requireActual('react') as object), - useContext: jest.fn(() => ({ ...mockKibanaContext, ...mockLicenseContext })), + useContext: jest.fn(() => ({ ...mockKibanaContext })), useEffect: jest.fn((fn) => fn()), // Calls on mount/every update - use mount for more complex behavior })); diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview.test.tsx index 928d92d791094..44afce96c1a6c 100644 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview.test.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview.test.tsx @@ -82,9 +82,11 @@ describe('EngineOverview', () => { describe('when on a platinum license', () => { it('renders a 2nd meta engines table & makes a 2nd meta engines API call', async () => { - const wrapper = await mountWithAsyncContext(, { - license: { type: 'platinum', isActive: true }, + setMockValues({ + hasPlatinumLicense: true, + http: { ...mockHttpValues.http, get: mockApi }, }); + const wrapper = await mountWithAsyncContext(); expect(wrapper.find(EngineTable)).toHaveLength(2); expect(mockApi).toHaveBeenNthCalledWith(2, '/api/app_search/engines', { diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview.tsx index c0aedbe7dc6b4..0cb9ba106dbb8 100644 --- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/engine_overview/engine_overview.tsx @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { useContext, useEffect, useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { useValues } from 'kea'; import { EuiPageContent, @@ -19,7 +19,7 @@ import { SetAppSearchChrome as SetPageChrome } from '../../../shared/kibana_chro import { SendAppSearchTelemetry as SendTelemetry } from '../../../shared/telemetry'; import { FlashMessages } from '../../../shared/flash_messages'; import { HttpLogic } from '../../../shared/http'; -import { LicenseContext, ILicenseContext, hasPlatinumLicense } from '../../../shared/licensing'; +import { LicensingLogic } from '../../../shared/licensing'; import { EngineIcon } from './assets/engine_icon'; import { MetaEngineIcon } from './assets/meta_engine_icon'; @@ -40,7 +40,7 @@ interface ISetEnginesCallbacks { export const EngineOverview: React.FC = () => { const { http } = useValues(HttpLogic); - const { license } = useContext(LicenseContext) as ILicenseContext; + const { hasPlatinumLicense } = useValues(LicensingLogic); const [isLoading, setIsLoading] = useState(true); const [engines, setEngines] = useState([]); @@ -72,13 +72,13 @@ export const EngineOverview: React.FC = () => { }, [enginesPage]); useEffect(() => { - if (hasPlatinumLicense(license)) { + if (hasPlatinumLicense) { const params = { type: 'meta', pageIndex: metaEnginesPage }; const callbacks = { setResults: setMetaEngines, setResultsTotal: setMetaEnginesTotal }; setEnginesData(params, callbacks); } - }, [license, metaEnginesPage]); + }, [hasPlatinumLicense, metaEnginesPage]); if (isLoading) return ; if (!engines.length) return ; diff --git a/x-pack/plugins/enterprise_search/public/applications/index.test.tsx b/x-pack/plugins/enterprise_search/public/applications/index.test.tsx index 053c450ab925e..6ee63ee22cae2 100644 --- a/x-pack/plugins/enterprise_search/public/applications/index.test.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/index.test.tsx @@ -6,7 +6,6 @@ import React from 'react'; -import { AppMountParameters } from 'src/core/public'; import { coreMock } from 'src/core/public/mocks'; import { licensingMock } from '../../../licensing/public/mocks'; @@ -15,37 +14,38 @@ import { AppSearch } from './app_search'; import { WorkplaceSearch } from './workplace_search'; describe('renderApp', () => { - let params: AppMountParameters; - const core = coreMock.createStart(); - const plugins = { - licensing: licensingMock.createSetup(), + const kibanaDeps = { + params: coreMock.createAppMountParamters(), + core: coreMock.createStart(), + plugins: { licensing: licensingMock.createStart() }, + } as any; + const pluginData = { + config: {}, + data: {}, } as any; - const config = {}; - const data = {} as any; beforeEach(() => { jest.clearAllMocks(); - params = coreMock.createAppMountParamters(); }); it('mounts and unmounts UI', () => { const MockApp = () =>
Hello world!
; - const unmount = renderApp(MockApp, params, core, plugins, config, data); - expect(params.element.querySelector('.hello-world')).not.toBeNull(); + const unmount = renderApp(MockApp, kibanaDeps, pluginData); + expect(kibanaDeps.params.element.querySelector('.hello-world')).not.toBeNull(); unmount(); - expect(params.element.innerHTML).toEqual(''); + expect(kibanaDeps.params.element.innerHTML).toEqual(''); }); it('renders AppSearch', () => { - renderApp(AppSearch, params, core, plugins, config, data); - expect(params.element.querySelector('.setupGuide')).not.toBeNull(); + renderApp(AppSearch, kibanaDeps, pluginData); + expect(kibanaDeps.params.element.querySelector('.setupGuide')).not.toBeNull(); }); it('renders WorkplaceSearch', () => { - renderApp(WorkplaceSearch, params, core, plugins, config, data); - expect(params.element.querySelector('.setupGuide')).not.toBeNull(); + renderApp(WorkplaceSearch, kibanaDeps, pluginData); + expect(kibanaDeps.params.element.querySelector('.setupGuide')).not.toBeNull(); }); }); diff --git a/x-pack/plugins/enterprise_search/public/applications/index.tsx b/x-pack/plugins/enterprise_search/public/applications/index.tsx index 0869ef7b22729..4a25ecf6067cc 100644 --- a/x-pack/plugins/enterprise_search/public/applications/index.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/index.tsx @@ -14,8 +14,8 @@ import { getContext, resetContext } from 'kea'; import { I18nProvider } from '@kbn/i18n/react'; import { AppMountParameters, CoreStart, ApplicationStart, ChromeBreadcrumb } from 'src/core/public'; -import { ClientConfigType, ClientData, PluginsSetup } from '../plugin'; -import { LicenseProvider } from './shared/licensing'; +import { PluginsStart, ClientConfigType, ClientData } from '../plugin'; +import { mountLicensingLogic } from './shared/licensing'; import { mountHttpLogic } from './shared/http'; import { mountFlashMessagesLogic } from './shared/flash_messages'; import { IExternalUrl } from './shared/enterprise_search_url'; @@ -39,15 +39,18 @@ export const KibanaContext = React.createContext({}); export const renderApp = ( App: React.FC, - params: AppMountParameters, - core: CoreStart, - plugins: PluginsSetup, - config: ClientConfigType, - { externalUrl, errorConnecting, ...initialData }: ClientData + { params, core, plugins }: { params: AppMountParameters; core: CoreStart; plugins: PluginsStart }, + { config, data }: { config: ClientConfigType; data: ClientData } ) => { + const { externalUrl, errorConnecting, ...initialData } = data; + resetContext({ createStore: true }); const store = getContext().store as Store; + const unmountLicensingLogic = mountLicensingLogic({ + license$: plugins.licensing.license$, + }); + const unmountHttpLogic = mountHttpLogic({ http: core.http, errorConnecting, @@ -67,19 +70,18 @@ export const renderApp = ( setDocTitle: core.chrome.docTitle.change, }} > - - - - - - - + + + + + , params.element ); return () => { ReactDOM.unmountComponentAtNode(params.element); + unmountLicensingLogic(); unmountHttpLogic(); unmountFlashMessagesLogic(); }; diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/index.ts b/x-pack/plugins/enterprise_search/public/applications/shared/licensing/index.ts index 29c11ffa1cef8..4e371b337c40a 100644 --- a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/index.ts +++ b/x-pack/plugins/enterprise_search/public/applications/shared/licensing/index.ts @@ -4,5 +4,4 @@ * you may not use this file except in compliance with the Elastic License. */ -export { LicenseContext, LicenseProvider, ILicenseContext } from './license_context'; -export { hasPlatinumLicense, hasGoldLicense } from './license_checks'; +export { LicensingLogic, mountLicensingLogic } from './licensing_logic'; diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_checks.test.ts b/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_checks.test.ts deleted file mode 100644 index 40f0f6380c21c..0000000000000 --- a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_checks.test.ts +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { hasPlatinumLicense, hasGoldLicense } from './license_checks'; - -describe('hasPlatinumLicense', () => { - it('is true for platinum licenses', () => { - expect(hasPlatinumLicense({ isActive: true, type: 'platinum' } as any)).toEqual(true); - }); - - it('is true for enterprise licenses', () => { - expect(hasPlatinumLicense({ isActive: true, type: 'enterprise' } as any)).toEqual(true); - }); - - it('is true for trial licenses', () => { - expect(hasPlatinumLicense({ isActive: true, type: 'platinum' } as any)).toEqual(true); - }); - - it('is false if the current license is expired', () => { - expect(hasPlatinumLicense({ isActive: false, type: 'platinum' } as any)).toEqual(false); - expect(hasPlatinumLicense({ isActive: false, type: 'enterprise' } as any)).toEqual(false); - expect(hasPlatinumLicense({ isActive: false, type: 'trial' } as any)).toEqual(false); - }); - - it('is false for licenses below platinum', () => { - expect(hasPlatinumLicense({ isActive: true, type: 'basic' } as any)).toEqual(false); - expect(hasPlatinumLicense({ isActive: false, type: 'standard' } as any)).toEqual(false); - expect(hasPlatinumLicense({ isActive: true, type: 'gold' } as any)).toEqual(false); - }); -}); - -describe('hasGoldLicense', () => { - it('is true for gold+ and trial licenses', () => { - expect(hasGoldLicense({ isActive: true, type: 'gold' } as any)).toEqual(true); - expect(hasGoldLicense({ isActive: true, type: 'platinum' } as any)).toEqual(true); - expect(hasGoldLicense({ isActive: true, type: 'enterprise' } as any)).toEqual(true); - expect(hasGoldLicense({ isActive: true, type: 'trial' } as any)).toEqual(true); - }); - - it('is false if the current license is expired', () => { - expect(hasGoldLicense({ isActive: false, type: 'gold' } as any)).toEqual(false); - expect(hasGoldLicense({ isActive: false, type: 'platinum' } as any)).toEqual(false); - expect(hasGoldLicense({ isActive: false, type: 'enterprise' } as any)).toEqual(false); - expect(hasGoldLicense({ isActive: false, type: 'trial' } as any)).toEqual(false); - }); - - it('is false for licenses below gold', () => { - expect(hasGoldLicense({ isActive: true, type: 'basic' } as any)).toEqual(false); - expect(hasGoldLicense({ isActive: false, type: 'standard' } as any)).toEqual(false); - }); -}); diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_checks.ts b/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_checks.ts deleted file mode 100644 index d13d0909243be..0000000000000 --- a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_checks.ts +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { ILicense } from '../../../../../licensing/public'; - -export const hasPlatinumLicense = (license?: ILicense) => { - const qualifyingLicenses = ['platinum', 'enterprise', 'trial']; - return license?.isActive && qualifyingLicenses.includes(license?.type as string); -}; - -export const hasGoldLicense = (license?: ILicense) => { - const qualifyingLicenses = ['gold', 'platinum', 'enterprise', 'trial']; - return license?.isActive && qualifyingLicenses.includes(license?.type as string); -}; diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_context.test.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_context.test.tsx deleted file mode 100644 index c65474ec1f590..0000000000000 --- a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_context.test.tsx +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import React, { useContext } from 'react'; - -import { mountWithContext } from '../../__mocks__'; -import { LicenseContext, ILicenseContext } from './'; - -describe('LicenseProvider', () => { - const MockComponent: React.FC = () => { - const { license } = useContext(LicenseContext) as ILicenseContext; - return
{license?.type}
; - }; - - it('renders children', () => { - const wrapper = mountWithContext(, { license: { type: 'basic' } }); - - expect(wrapper.find('.license-test')).toHaveLength(1); - expect(wrapper.text()).toEqual('basic'); - }); -}); diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_context.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_context.tsx deleted file mode 100644 index 9b47959ff7544..0000000000000 --- a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/license_context.tsx +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import React from 'react'; -import useObservable from 'react-use/lib/useObservable'; -import { Observable } from 'rxjs'; - -import { ILicense } from '../../../../../licensing/public'; - -export interface ILicenseContext { - license: ILicense; -} -interface ILicenseContextProps { - license$: Observable; - children: React.ReactNode; -} - -export const LicenseContext = React.createContext({}); - -export const LicenseProvider: React.FC = ({ license$, children }) => { - // Listen for changes to license subscription - const license = useObservable(license$); - - // Render rest of application and pass down license via context - return ; -}; diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/licensing_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/shared/licensing/licensing_logic.test.ts new file mode 100644 index 0000000000000..153a5ae765468 --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/shared/licensing/licensing_logic.test.ts @@ -0,0 +1,161 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { resetContext } from 'kea'; +import { BehaviorSubject } from 'rxjs'; + +import { licensingMock } from '../../../../../licensing/public/mocks'; + +import { LicensingLogic, mountLicensingLogic } from './licensing_logic'; + +describe('LicensingLogic', () => { + const mockLicense = licensingMock.createLicense(); + const mockLicense$ = new BehaviorSubject(mockLicense); + const mount = () => mountLicensingLogic({ license$: mockLicense$ }); + + beforeEach(() => { + jest.clearAllMocks(); + resetContext({}); + }); + + describe('setLicense()', () => { + it('sets license value', () => { + mount(); + LicensingLogic.actions.setLicense('test' as any); + expect(LicensingLogic.values.license).toEqual('test'); + }); + }); + + describe('setLicenseSubscription()', () => { + it('sets licenseSubscription value', () => { + mount(); + LicensingLogic.actions.setLicenseSubscription('test' as any); + expect(LicensingLogic.values.licenseSubscription).toEqual('test'); + }); + }); + + describe('licensing subscription', () => { + describe('on mount', () => { + it('subscribes to the license observable', () => { + mount(); + expect(LicensingLogic.values.license).toEqual(mockLicense); + expect(LicensingLogic.values.licenseSubscription).not.toBeNull(); + }); + }); + + describe('on subscription update', () => { + it('updates the license value', () => { + mount(); + + const nextMockLicense = licensingMock.createLicense({ license: { status: 'invalid' } }); + mockLicense$.next(nextMockLicense); + + expect(LicensingLogic.values.license).toEqual(nextMockLicense); + }); + }); + + describe('on unmount', () => { + it('unsubscribes to the license observable', () => { + const mockUnsubscribe = jest.fn(); + const unmount = mountLicensingLogic({ + license$: { subscribe: () => ({ unsubscribe: mockUnsubscribe }) } as any, + }); + unmount(); + expect(mockUnsubscribe).toHaveBeenCalled(); + }); + + it('does not crash if no subscription exists', () => { + const unmount = mount(); + LicensingLogic.actions.setLicenseSubscription(null as any); + unmount(); + }); + }); + }); + + describe('license check selectors', () => { + beforeEach(() => { + mount(); + }); + + const updateLicense = (license: any) => { + const updatedLicense = licensingMock.createLicense({ license }); + mockLicense$.next(updatedLicense); + }; + + describe('hasPlatinumLicense', () => { + it('is true for platinum+ and trial licenses', () => { + updateLicense({ status: 'active', type: 'platinum' }); + expect(LicensingLogic.values.hasPlatinumLicense).toEqual(true); + + updateLicense({ status: 'active', type: 'enterprise' }); + expect(LicensingLogic.values.hasPlatinumLicense).toEqual(true); + + updateLicense({ status: 'active', type: 'trial' }); + expect(LicensingLogic.values.hasPlatinumLicense).toEqual(true); + }); + + it('is false if the current license is expired', () => { + updateLicense({ status: 'expired', type: 'platinum' }); + expect(LicensingLogic.values.hasPlatinumLicense).toEqual(false); + + updateLicense({ status: 'expired', type: 'enterprise' }); + expect(LicensingLogic.values.hasPlatinumLicense).toEqual(false); + + updateLicense({ status: 'expired', type: 'trial' }); + expect(LicensingLogic.values.hasPlatinumLicense).toEqual(false); + }); + + it('is false for licenses below platinum', () => { + updateLicense({ status: 'active', type: 'basic' }); + expect(LicensingLogic.values.hasPlatinumLicense).toEqual(false); + + updateLicense({ status: 'active', type: 'standard' }); + expect(LicensingLogic.values.hasPlatinumLicense).toEqual(false); + + updateLicense({ status: 'active', type: 'gold' }); + expect(LicensingLogic.values.hasPlatinumLicense).toEqual(false); + }); + }); + + describe('hasGoldLicense', () => { + it('is true for gold+ and trial licenses', () => { + updateLicense({ status: 'active', type: 'gold' }); + expect(LicensingLogic.values.hasGoldLicense).toEqual(true); + + updateLicense({ status: 'active', type: 'platinum' }); + expect(LicensingLogic.values.hasGoldLicense).toEqual(true); + + updateLicense({ status: 'active', type: 'enterprise' }); + expect(LicensingLogic.values.hasGoldLicense).toEqual(true); + + updateLicense({ status: 'active', type: 'trial' }); + expect(LicensingLogic.values.hasGoldLicense).toEqual(true); + }); + + it('is false if the current license is expired', () => { + updateLicense({ status: 'expired', type: 'gold' }); + expect(LicensingLogic.values.hasGoldLicense).toEqual(false); + + updateLicense({ status: 'expired', type: 'platinum' }); + expect(LicensingLogic.values.hasGoldLicense).toEqual(false); + + updateLicense({ status: 'expired', type: 'enterprise' }); + expect(LicensingLogic.values.hasGoldLicense).toEqual(false); + + updateLicense({ status: 'expired', type: 'trial' }); + expect(LicensingLogic.values.hasGoldLicense).toEqual(false); + }); + + it('is false for licenses below gold', () => { + updateLicense({ status: 'active', type: 'basic' }); + expect(LicensingLogic.values.hasGoldLicense).toEqual(false); + + updateLicense({ status: 'active', type: 'standard' }); + expect(LicensingLogic.values.hasGoldLicense).toEqual(false); + }); + }); + }); +}); diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/licensing/licensing_logic.ts b/x-pack/plugins/enterprise_search/public/applications/shared/licensing/licensing_logic.ts new file mode 100644 index 0000000000000..ae31b2ec6168a --- /dev/null +++ b/x-pack/plugins/enterprise_search/public/applications/shared/licensing/licensing_logic.ts @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { kea, MakeLogicType } from 'kea'; +import { Observable, Subscription } from 'rxjs'; + +import { ILicense } from '../../../../../licensing/public'; + +export interface ILicensingValues { + license: ILicense | null; + licenseSubscription: Subscription | null; + hasPlatinumLicense: boolean; + hasGoldLicense: boolean; +} +export interface ILicensingActions { + setLicense(license: ILicense): ILicense; + setLicenseSubscription(licenseSubscription: Subscription): Subscription; +} + +export const LicensingLogic = kea>({ + path: ['enterprise_search', 'licensing_logic'], + actions: { + setLicense: (license) => license, + setLicenseSubscription: (licenseSubscription) => licenseSubscription, + }, + reducers: { + license: [ + null, + { + setLicense: (_, license) => license, + }, + ], + licenseSubscription: [ + null, + { + setLicenseSubscription: (_, licenseSubscription) => licenseSubscription, + }, + ], + }, + selectors: { + hasPlatinumLicense: [ + (selectors) => [selectors.license], + (license) => { + const qualifyingLicenses = ['platinum', 'enterprise', 'trial']; + return license?.isActive && qualifyingLicenses.includes(license?.type); + }, + ], + hasGoldLicense: [ + (selectors) => [selectors.license], + (license) => { + const qualifyingLicenses = ['gold', 'platinum', 'enterprise', 'trial']; + return license?.isActive && qualifyingLicenses.includes(license?.type); + }, + ], + }, + events: ({ props, actions, values }) => ({ + afterMount: () => { + const licenseSubscription = props.license$.subscribe(async (license: ILicense) => { + actions.setLicense(license); + }); + actions.setLicenseSubscription(licenseSubscription); + }, + beforeUnmount: () => { + if (values.licenseSubscription) values.licenseSubscription.unsubscribe(); + }, + }), +}); + +/** + * Mount/props helper + */ +interface ILicensingLogicProps { + license$: Observable; +} +export const mountLicensingLogic = (props: ILicensingLogicProps) => { + LicensingLogic(props); + const unmount = LicensingLogic.mount(); + return unmount; +}; diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/not_found/not_found.test.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/not_found/not_found.test.tsx index ce9071ad7b9d0..62c0af31cffd9 100644 --- a/x-pack/plugins/enterprise_search/public/applications/shared/not_found/not_found.test.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/shared/not_found/not_found.test.tsx @@ -4,9 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import '../../__mocks__/shallow_usecontext.mock'; +import '../../__mocks__/kea.mock'; -import React, { useContext } from 'react'; +import React from 'react'; +import { useValues } from 'kea'; import { shallow } from 'enzyme'; import { EuiButton as EuiButtonExternal, EuiEmptyPrompt } from '@elastic/eui'; @@ -18,13 +19,6 @@ import { WorkplaceSearchLogo } from './assets/workplace_search_logo'; import { NotFound } from './'; describe('NotFound', () => { - const basicLicense = { isActive: true, type: 'basic' }; - const goldLicense = { isActive: true, type: 'gold' }; - - beforeEach(() => { - (useContext as jest.Mock).mockImplementation(() => ({ license: basicLicense })); - }); - it('renders an App Search 404 view', () => { const wrapper = shallow(); const prompt = wrapper.find(EuiEmptyPrompt).dive().shallow(); @@ -50,7 +44,7 @@ describe('NotFound', () => { }); it('changes the support URL if the user has a gold+ license', () => { - (useContext as jest.Mock).mockImplementation(() => ({ license: goldLicense })); + (useValues as jest.Mock).mockReturnValueOnce({ hasGoldLicense: true }); const wrapper = shallow(); const prompt = wrapper.find(EuiEmptyPrompt).dive().shallow(); diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/not_found/not_found.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/not_found/not_found.tsx index bd988854225fb..40bb5efcc6330 100644 --- a/x-pack/plugins/enterprise_search/public/applications/shared/not_found/not_found.tsx +++ b/x-pack/plugins/enterprise_search/public/applications/shared/not_found/not_found.tsx @@ -4,7 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { useContext } from 'react'; +import React from 'react'; +import { useValues } from 'kea'; import { i18n } from '@kbn/i18n'; import { EuiPageContent, @@ -24,7 +25,7 @@ import { import { EuiButton } from '../react_router_helpers'; import { SetAppSearchChrome, SetWorkplaceSearchChrome } from '../kibana_chrome'; import { SendAppSearchTelemetry, SendWorkplaceSearchTelemetry } from '../telemetry'; -import { LicenseContext, ILicenseContext, hasGoldLicense } from '../licensing'; +import { LicensingLogic } from '../licensing'; import { AppSearchLogo } from './assets/app_search_logo'; import { WorkplaceSearchLogo } from './assets/workplace_search_logo'; @@ -39,8 +40,8 @@ interface NotFoundProps { } export const NotFound: React.FC = ({ product = {} }) => { - const { license } = useContext(LicenseContext) as ILicenseContext; - const supportUrl = hasGoldLicense(license) ? LICENSED_SUPPORT_URL : product.SUPPORT_URL; + const { hasGoldLicense } = useValues(LicensingLogic); + const supportUrl = hasGoldLicense ? LICENSED_SUPPORT_URL : product.SUPPORT_URL; let Logo; let SetPageChrome; diff --git a/x-pack/plugins/enterprise_search/public/plugin.ts b/x-pack/plugins/enterprise_search/public/plugin.ts index c23bb23be3979..f59ec830c812f 100644 --- a/x-pack/plugins/enterprise_search/public/plugin.ts +++ b/x-pack/plugins/enterprise_search/public/plugin.ts @@ -7,7 +7,6 @@ import { AppMountParameters, CoreSetup, - CoreStart, HttpSetup, Plugin, PluginInitializerContext, @@ -17,7 +16,7 @@ import { FeatureCatalogueCategory, HomePublicPluginSetup, } from '../../../../src/plugins/home/public'; -import { LicensingPluginSetup } from '../../licensing/public'; +import { LicensingPluginStart } from '../../licensing/public'; import { APP_SEARCH_PLUGIN, ENTERPRISE_SEARCH_PLUGIN, @@ -36,7 +35,9 @@ export interface ClientData extends IInitialAppData { export interface PluginsSetup { home?: HomePublicPluginSetup; - licensing: LicensingPluginSetup; +} +export interface PluginsStart { + licensing: LicensingPluginStart; } export class EnterpriseSearchPlugin implements Plugin { @@ -57,16 +58,17 @@ export class EnterpriseSearchPlugin implements Plugin { appRoute: ENTERPRISE_SEARCH_PLUGIN.URL, category: DEFAULT_APP_CATEGORIES.enterpriseSearch, mount: async (params: AppMountParameters) => { - const [coreStart] = await core.getStartServices(); - const { chrome } = coreStart; - chrome.docTitle.change(ENTERPRISE_SEARCH_PLUGIN.NAME); + const kibanaDeps = await this.getKibanaDeps(core, params); + const pluginData = this.getPluginData(); - await this.getInitialData(coreStart.http); + const { chrome, http } = kibanaDeps.core; + chrome.docTitle.change(ENTERPRISE_SEARCH_PLUGIN.NAME); + await this.getInitialData(http); const { renderApp } = await import('./applications'); const { EnterpriseSearch } = await import('./applications/enterprise_search'); - return renderApp(EnterpriseSearch, params, coreStart, plugins, this.config, this.data); + return renderApp(EnterpriseSearch, kibanaDeps, pluginData); }, }); @@ -77,16 +79,17 @@ export class EnterpriseSearchPlugin implements Plugin { appRoute: APP_SEARCH_PLUGIN.URL, category: DEFAULT_APP_CATEGORIES.enterpriseSearch, mount: async (params: AppMountParameters) => { - const [coreStart] = await core.getStartServices(); - const { chrome } = coreStart; - chrome.docTitle.change(APP_SEARCH_PLUGIN.NAME); + const kibanaDeps = await this.getKibanaDeps(core, params); + const pluginData = this.getPluginData(); - await this.getInitialData(coreStart.http); + const { chrome, http } = kibanaDeps.core; + chrome.docTitle.change(APP_SEARCH_PLUGIN.NAME); + await this.getInitialData(http); const { renderApp } = await import('./applications'); const { AppSearch } = await import('./applications/app_search'); - return renderApp(AppSearch, params, coreStart, plugins, this.config, this.data); + return renderApp(AppSearch, kibanaDeps, pluginData); }, }); @@ -97,11 +100,12 @@ export class EnterpriseSearchPlugin implements Plugin { appRoute: WORKPLACE_SEARCH_PLUGIN.URL, category: DEFAULT_APP_CATEGORIES.enterpriseSearch, mount: async (params: AppMountParameters) => { - const [coreStart] = await core.getStartServices(); - const { chrome } = coreStart; - chrome.docTitle.change(WORKPLACE_SEARCH_PLUGIN.NAME); + const kibanaDeps = await this.getKibanaDeps(core, params); + const pluginData = this.getPluginData(); - await this.getInitialData(coreStart.http); + const { chrome, http } = kibanaDeps.core; + chrome.docTitle.change(APP_SEARCH_PLUGIN.NAME); + await this.getInitialData(http); const { renderApp, renderHeaderActions } = await import('./applications'); const { WorkplaceSearch } = await import('./applications/workplace_search'); @@ -113,7 +117,7 @@ export class EnterpriseSearchPlugin implements Plugin { renderHeaderActions(WorkplaceSearchHeaderActions, element, this.data.externalUrl) ); - return renderApp(WorkplaceSearch, params, coreStart, plugins, this.config, this.data); + return renderApp(WorkplaceSearch, kibanaDeps, pluginData); }, }); @@ -149,10 +153,22 @@ export class EnterpriseSearchPlugin implements Plugin { } } - public start(core: CoreStart) {} + public start() {} public stop() {} + private async getKibanaDeps(core: CoreSetup, params: AppMountParameters) { + // Helper for using start dependencies on mount (instead of setup dependencies) + // and for grouping Kibana-related args together (vs. plugin-specific args) + const [coreStart, pluginsStart] = await core.getStartServices(); + return { params, core: coreStart, plugins: pluginsStart as PluginsStart }; + } + + private getPluginData() { + // Small helper for grouping plugin data related args together + return { config: this.config, data: this.data }; + } + private async getInitialData(http: HttpSetup) { if (!this.config.host) return; // No API to call if (this.hasInitialized) return; // We've already made an initial call diff --git a/x-pack/plugins/security_solution/server/graphql/uncommon_processes/index.ts b/x-pack/plugins/infra/common/http_api/infra_ml/index.ts similarity index 66% rename from x-pack/plugins/security_solution/server/graphql/uncommon_processes/index.ts rename to x-pack/plugins/infra/common/http_api/infra_ml/index.ts index d0da0efd8a560..38684cb22e237 100644 --- a/x-pack/plugins/security_solution/server/graphql/uncommon_processes/index.ts +++ b/x-pack/plugins/infra/common/http_api/infra_ml/index.ts @@ -4,5 +4,4 @@ * you may not use this file except in compliance with the Elastic License. */ -export { createUncommonProcessesResolvers } from './resolvers'; -export { uncommonProcessesSchema } from './schema.gql'; +export * from './results'; diff --git a/x-pack/plugins/infra/common/http_api/infra_ml/results/common.ts b/x-pack/plugins/infra/common/http_api/infra_ml/results/common.ts new file mode 100644 index 0000000000000..0474fbd1cfc2f --- /dev/null +++ b/x-pack/plugins/infra/common/http_api/infra_ml/results/common.ts @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +// [Sort field value, tiebreaker value] +export const paginationCursorRT = rt.tuple([ + rt.union([rt.string, rt.number]), + rt.union([rt.string, rt.number]), +]); + +export type PaginationCursor = rt.TypeOf; + +export const anomalyTypeRT = rt.keyof({ + metrics_hosts: null, + metrics_k8s: null, +}); + +export type AnomalyType = rt.TypeOf; + +const sortOptionsRT = rt.keyof({ + anomalyScore: null, + dataset: null, + startTime: null, +}); + +const sortDirectionsRT = rt.keyof({ + asc: null, + desc: null, +}); + +const paginationPreviousPageCursorRT = rt.type({ + searchBefore: paginationCursorRT, +}); + +const paginationNextPageCursorRT = rt.type({ + searchAfter: paginationCursorRT, +}); + +export const paginationRT = rt.intersection([ + rt.type({ + pageSize: rt.number, + }), + rt.partial({ + cursor: rt.union([paginationPreviousPageCursorRT, paginationNextPageCursorRT]), + }), +]); + +export type Pagination = rt.TypeOf; + +export const sortRT = rt.type({ + field: sortOptionsRT, + direction: sortDirectionsRT, +}); + +export type Sort = rt.TypeOf; diff --git a/x-pack/plugins/infra/common/http_api/infra_ml/results/index.ts b/x-pack/plugins/infra/common/http_api/infra_ml/results/index.ts new file mode 100644 index 0000000000000..efd597a043e07 --- /dev/null +++ b/x-pack/plugins/infra/common/http_api/infra_ml/results/index.ts @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './metrics_hosts_anomalies'; +export * from './metrics_k8s_anomalies'; +export * from './common'; diff --git a/x-pack/plugins/infra/common/http_api/infra_ml/results/metrics_hosts_anomalies.ts b/x-pack/plugins/infra/common/http_api/infra_ml/results/metrics_hosts_anomalies.ts new file mode 100644 index 0000000000000..9fdac09fec20e --- /dev/null +++ b/x-pack/plugins/infra/common/http_api/infra_ml/results/metrics_hosts_anomalies.ts @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +import { timeRangeRT, routeTimingMetadataRT } from '../../shared'; +import { anomalyTypeRT, paginationCursorRT, sortRT, paginationRT } from './common'; + +export const INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH = + '/api/infra/infra_ml/results/metrics_hosts_anomalies'; + +const metricsHostAnomalyCommonFieldsRT = rt.type({ + id: rt.string, + anomalyScore: rt.number, + typical: rt.number, + actual: rt.number, + type: anomalyTypeRT, + duration: rt.number, + startTime: rt.number, + jobId: rt.string, +}); +const metricsHostsAnomalyRT = metricsHostAnomalyCommonFieldsRT; + +export type MetricsHostsAnomaly = rt.TypeOf; + +export const getMetricsHostsAnomaliesSuccessReponsePayloadRT = rt.intersection([ + rt.type({ + data: rt.intersection([ + rt.type({ + anomalies: rt.array(metricsHostsAnomalyRT), + // Signifies there are more entries backwards or forwards. If this was a request + // for a previous page, there are more previous pages, if this was a request for a next page, + // there are more next pages. + hasMoreEntries: rt.boolean, + }), + rt.partial({ + paginationCursors: rt.type({ + // The cursor to use to fetch the previous page + previousPageCursor: paginationCursorRT, + // The cursor to use to fetch the next page + nextPageCursor: paginationCursorRT, + }), + }), + ]), + }), + rt.partial({ + timing: routeTimingMetadataRT, + }), +]); + +export type GetMetricsHostsAnomaliesSuccessResponsePayload = rt.TypeOf< + typeof getMetricsHostsAnomaliesSuccessReponsePayloadRT +>; + +export const getMetricsHostsAnomaliesRequestPayloadRT = rt.type({ + data: rt.intersection([ + rt.type({ + // the ID of the source configuration + sourceId: rt.string, + // the time range to fetch the log entry anomalies from + timeRange: timeRangeRT, + }), + rt.partial({ + // Pagination properties + pagination: paginationRT, + // Sort properties + sort: sortRT, + // // Dataset filters + // datasets: rt.array(rt.string), + }), + ]), +}); + +export type GetMetricsHostsAnomaliesRequestPayload = rt.TypeOf< + typeof getMetricsHostsAnomaliesRequestPayloadRT +>; diff --git a/x-pack/plugins/infra/common/http_api/infra_ml/results/metrics_k8s_anomalies.ts b/x-pack/plugins/infra/common/http_api/infra_ml/results/metrics_k8s_anomalies.ts new file mode 100644 index 0000000000000..ab1f245a74c0c --- /dev/null +++ b/x-pack/plugins/infra/common/http_api/infra_ml/results/metrics_k8s_anomalies.ts @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +import { timeRangeRT, routeTimingMetadataRT } from '../../shared'; +import { paginationCursorRT, anomalyTypeRT, sortRT, paginationRT } from './common'; + +export const INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH = + '/api/infra/infra_ml/results/metrics_k8s_anomalies'; + +const metricsK8sAnomalyCommonFieldsRT = rt.type({ + id: rt.string, + anomalyScore: rt.number, + typical: rt.number, + actual: rt.number, + type: anomalyTypeRT, + duration: rt.number, + startTime: rt.number, + jobId: rt.string, +}); +const metricsK8sAnomalyRT = metricsK8sAnomalyCommonFieldsRT; + +export type MetricsK8sAnomaly = rt.TypeOf; + +export const getMetricsK8sAnomaliesSuccessReponsePayloadRT = rt.intersection([ + rt.type({ + data: rt.intersection([ + rt.type({ + anomalies: rt.array(metricsK8sAnomalyRT), + // Signifies there are more entries backwards or forwards. If this was a request + // for a previous page, there are more previous pages, if this was a request for a next page, + // there are more next pages. + hasMoreEntries: rt.boolean, + }), + rt.partial({ + paginationCursors: rt.type({ + // The cursor to use to fetch the previous page + previousPageCursor: paginationCursorRT, + // The cursor to use to fetch the next page + nextPageCursor: paginationCursorRT, + }), + }), + ]), + }), + rt.partial({ + timing: routeTimingMetadataRT, + }), +]); + +export type GetMetricsK8sAnomaliesSuccessResponsePayload = rt.TypeOf< + typeof getMetricsK8sAnomaliesSuccessReponsePayloadRT +>; + +export const getMetricsK8sAnomaliesRequestPayloadRT = rt.type({ + data: rt.intersection([ + rt.type({ + // the ID of the source configuration + sourceId: rt.string, + // the time range to fetch the log entry anomalies from + timeRange: timeRangeRT, + }), + rt.partial({ + // Pagination properties + pagination: paginationRT, + // Sort properties + sort: sortRT, + // Dataset filters + datasets: rt.array(rt.string), + }), + ]), +}); + +export type GetMetricsK8sAnomaliesRequestPayload = rt.TypeOf< + typeof getMetricsK8sAnomaliesRequestPayloadRT +>; diff --git a/x-pack/plugins/infra/common/infra_ml/anomaly_results.ts b/x-pack/plugins/infra/common/infra_ml/anomaly_results.ts new file mode 100644 index 0000000000000..f4497dbba5056 --- /dev/null +++ b/x-pack/plugins/infra/common/infra_ml/anomaly_results.ts @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export const ML_SEVERITY_SCORES = { + warning: 3, + minor: 25, + major: 50, + critical: 75, +}; + +export type MLSeverityScoreCategories = keyof typeof ML_SEVERITY_SCORES; + +export const ML_SEVERITY_COLORS = { + critical: 'rgb(228, 72, 72)', + major: 'rgb(229, 113, 0)', + minor: 'rgb(255, 221, 0)', + warning: 'rgb(125, 180, 226)', +}; + +export const getSeverityCategoryForScore = ( + score: number +): MLSeverityScoreCategories | undefined => { + if (score >= ML_SEVERITY_SCORES.critical) { + return 'critical'; + } else if (score >= ML_SEVERITY_SCORES.major) { + return 'major'; + } else if (score >= ML_SEVERITY_SCORES.minor) { + return 'minor'; + } else if (score >= ML_SEVERITY_SCORES.warning) { + return 'warning'; + } else { + // Category is too low to include + return undefined; + } +}; + +export const formatAnomalyScore = (score: number) => { + return Math.round(score); +}; + +export const formatOneDecimalPlace = (number: number) => { + return Math.round(number * 10) / 10; +}; + +export const getFriendlyNameForPartitionId = (partitionId: string) => { + return partitionId !== '' ? partitionId : 'unknown'; +}; + +export const compareDatasetsByMaximumAnomalyScore = < + Dataset extends { maximumAnomalyScore: number } +>( + firstDataset: Dataset, + secondDataset: Dataset +) => firstDataset.maximumAnomalyScore - secondDataset.maximumAnomalyScore; diff --git a/x-pack/plugins/infra/common/infra_ml/index.ts b/x-pack/plugins/infra/common/infra_ml/index.ts new file mode 100644 index 0000000000000..88fbbd4f25045 --- /dev/null +++ b/x-pack/plugins/infra/common/infra_ml/index.ts @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './infra_ml'; +export * from './anomaly_results'; +export * from './job_parameters'; +export * from './metrics_hosts_ml'; +export * from './metrics_k8s_ml'; diff --git a/x-pack/plugins/infra/common/infra_ml/infra_ml.ts b/x-pack/plugins/infra/common/infra_ml/infra_ml.ts new file mode 100644 index 0000000000000..680a2a0fef114 --- /dev/null +++ b/x-pack/plugins/infra/common/infra_ml/infra_ml.ts @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +// combines and abstracts job and datafeed status +export type JobStatus = + | 'unknown' + | 'missing' + | 'initializing' + | 'stopped' + | 'started' + | 'finished' + | 'failed'; + +export type SetupStatus = + | { type: 'initializing' } // acquiring job statuses to determine setup status + | { type: 'unknown' } // job status could not be acquired (failed request etc) + | { type: 'required' } // setup required + | { type: 'pending' } // In the process of setting up the module for the first time or retrying, waiting for response + | { type: 'succeeded' } // setup succeeded, notifying user + | { + type: 'failed'; + reasons: string[]; + } // setup failed, notifying user + | { + type: 'skipped'; + newlyCreated?: boolean; + }; // setup is not necessary + +/** + * Maps a job status to the possibility that results have already been produced + * before this state was reached. + */ +export const isJobStatusWithResults = (jobStatus: JobStatus) => + ['started', 'finished', 'stopped', 'failed'].includes(jobStatus); + +export const isHealthyJobStatus = (jobStatus: JobStatus) => + ['started', 'finished'].includes(jobStatus); + +/** + * Maps a setup status to the possibility that results have already been + * produced before this state was reached. + */ +export const isSetupStatusWithResults = (setupStatus: SetupStatus) => + setupStatus.type === 'skipped'; + +const KIBANA_SAMPLE_DATA_INDICES = ['kibana_sample_data_logs*']; + +export const isExampleDataIndex = (indexName: string) => + KIBANA_SAMPLE_DATA_INDICES.includes(indexName); diff --git a/x-pack/plugins/infra/common/infra_ml/job_parameters.ts b/x-pack/plugins/infra/common/infra_ml/job_parameters.ts new file mode 100644 index 0000000000000..8cd1c9ea6e2ba --- /dev/null +++ b/x-pack/plugins/infra/common/infra_ml/job_parameters.ts @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +export const bucketSpan = 900000; + +export const categoriesMessageField = 'message'; + +export const partitionField = 'event.dataset'; + +export const getJobIdPrefix = (spaceId: string, sourceId: string) => + `kibana-metrics-ui-${spaceId}-${sourceId}-`; + +export const getJobId = (spaceId: string, sourceId: string, jobType: string) => + `${getJobIdPrefix(spaceId, sourceId)}${jobType}`; + +export const getDatafeedId = (spaceId: string, sourceId: string, jobType: string) => + `datafeed-${getJobId(spaceId, sourceId, jobType)}`; + +export const datasetFilterRT = rt.union([ + rt.strict({ + type: rt.literal('includeAll'), + }), + rt.strict({ + type: rt.literal('includeSome'), + datasets: rt.array(rt.string), + }), +]); + +export type DatasetFilter = rt.TypeOf; + +export const jobSourceConfigurationRT = rt.partial({ + indexPattern: rt.string, + timestampField: rt.string, + bucketSpan: rt.number, + datasetFilter: datasetFilterRT, +}); + +export type JobSourceConfiguration = rt.TypeOf; + +export const jobCustomSettingsRT = rt.partial({ + job_revision: rt.number, + metrics_source_config: jobSourceConfigurationRT, +}); + +export type JobCustomSettings = rt.TypeOf; + +export const combineDatasetFilters = ( + firstFilter: DatasetFilter, + secondFilter: DatasetFilter +): DatasetFilter => { + if (firstFilter.type === 'includeAll' && secondFilter.type === 'includeAll') { + return { + type: 'includeAll', + }; + } + + const includedDatasets = new Set([ + ...(firstFilter.type === 'includeSome' ? firstFilter.datasets : []), + ...(secondFilter.type === 'includeSome' ? secondFilter.datasets : []), + ]); + + return { + type: 'includeSome', + datasets: [...includedDatasets], + }; +}; + +export const filterDatasetFilter = ( + datasetFilter: DatasetFilter, + predicate: (dataset: string) => boolean +): DatasetFilter => { + if (datasetFilter.type === 'includeAll') { + return datasetFilter; + } else { + const newDatasets = datasetFilter.datasets.filter(predicate); + + if (newDatasets.length > 0) { + return { + type: 'includeSome', + datasets: newDatasets, + }; + } else { + return { + type: 'includeAll', + }; + } + } +}; diff --git a/x-pack/plugins/infra/common/infra_ml/metrics_hosts_ml.ts b/x-pack/plugins/infra/common/infra_ml/metrics_hosts_ml.ts new file mode 100644 index 0000000000000..d09b3be78204f --- /dev/null +++ b/x-pack/plugins/infra/common/infra_ml/metrics_hosts_ml.ts @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +export const metricsHostsJobTypeRT = rt.keyof({ + hosts_memory_usage: null, + hosts_network_in: null, + hosts_network_out: null, +}); + +export type MetricsHostsJobType = rt.TypeOf; + +export const metricsHostsJobTypes: MetricsHostsJobType[] = [ + 'hosts_memory_usage', + 'hosts_network_in', + 'hosts_network_out', +]; diff --git a/x-pack/plugins/infra/common/infra_ml/metrics_k8s_ml.ts b/x-pack/plugins/infra/common/infra_ml/metrics_k8s_ml.ts new file mode 100644 index 0000000000000..3c27dbb61a14a --- /dev/null +++ b/x-pack/plugins/infra/common/infra_ml/metrics_k8s_ml.ts @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +export const metricK8sJobTypeRT = rt.keyof({ + k8s_memory_usage: null, + k8s_network_in: null, + k8s_network_out: null, +}); + +export type MetricK8sJobType = rt.TypeOf; + +export const metricsK8SJobTypes: MetricK8sJobType[] = [ + 'k8s_memory_usage', + 'k8s_network_in', + 'k8s_network_out', +]; diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_api_types.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_api_types.ts new file mode 100644 index 0000000000000..ee70edc31d49b --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_api_types.ts @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +export const getMlCapabilitiesResponsePayloadRT = rt.type({ + capabilities: rt.type({ + canGetJobs: rt.boolean, + canCreateJob: rt.boolean, + canDeleteJob: rt.boolean, + canOpenJob: rt.boolean, + canCloseJob: rt.boolean, + canForecastJob: rt.boolean, + canGetDatafeeds: rt.boolean, + canStartStopDatafeed: rt.boolean, + canUpdateJob: rt.boolean, + canUpdateDatafeed: rt.boolean, + canPreviewDatafeed: rt.boolean, + }), + isPlatinumOrTrialLicense: rt.boolean, + mlFeatureEnabledInSpace: rt.boolean, + upgradeInProgress: rt.boolean, +}); + +export type GetMlCapabilitiesResponsePayload = rt.TypeOf; diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts new file mode 100644 index 0000000000000..23fa338e74f14 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; +import { pipe } from 'fp-ts/lib/pipeable'; +import { fold } from 'fp-ts/lib/Either'; +import { identity } from 'fp-ts/lib/function'; +import { npStart } from '../../../legacy_singletons'; + +import { getDatafeedId, getJobId } from '../../../../common/infra_ml'; +import { throwErrors, createPlainError } from '../../../../common/runtime_types'; + +export const callDeleteJobs = async ( + spaceId: string, + sourceId: string, + jobTypes: JobType[] +) => { + // NOTE: Deleting the jobs via this API will delete the datafeeds at the same time + const deleteJobsResponse = await npStart.http.fetch('/api/ml/jobs/delete_jobs', { + method: 'POST', + body: JSON.stringify( + deleteJobsRequestPayloadRT.encode({ + jobIds: jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)), + }) + ), + }); + + return pipe( + deleteJobsResponsePayloadRT.decode(deleteJobsResponse), + fold(throwErrors(createPlainError), identity) + ); +}; + +export const callGetJobDeletionTasks = async () => { + const jobDeletionTasksResponse = await npStart.http.fetch('/api/ml/jobs/deleting_jobs_tasks'); + + return pipe( + getJobDeletionTasksResponsePayloadRT.decode(jobDeletionTasksResponse), + fold(throwErrors(createPlainError), identity) + ); +}; + +export const callStopDatafeeds = async ( + spaceId: string, + sourceId: string, + jobTypes: JobType[] +) => { + // Stop datafeed due to https://github.com/elastic/kibana/issues/44652 + const stopDatafeedResponse = await npStart.http.fetch('/api/ml/jobs/stop_datafeeds', { + method: 'POST', + body: JSON.stringify( + stopDatafeedsRequestPayloadRT.encode({ + datafeedIds: jobTypes.map((jobType) => getDatafeedId(spaceId, sourceId, jobType)), + }) + ), + }); + + return pipe( + stopDatafeedsResponsePayloadRT.decode(stopDatafeedResponse), + fold(throwErrors(createPlainError), identity) + ); +}; + +export const deleteJobsRequestPayloadRT = rt.type({ + jobIds: rt.array(rt.string), +}); + +export type DeleteJobsRequestPayload = rt.TypeOf; + +export const deleteJobsResponsePayloadRT = rt.record( + rt.string, + rt.type({ + deleted: rt.boolean, + }) +); + +export type DeleteJobsResponsePayload = rt.TypeOf; + +export const getJobDeletionTasksResponsePayloadRT = rt.type({ + jobIds: rt.array(rt.string), +}); + +export const stopDatafeedsRequestPayloadRT = rt.type({ + datafeedIds: rt.array(rt.string), +}); + +export const stopDatafeedsResponsePayloadRT = rt.record( + rt.string, + rt.type({ + stopped: rt.boolean, + }) +); diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts new file mode 100644 index 0000000000000..3fddb63f69791 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { fold } from 'fp-ts/lib/Either'; +import { identity } from 'fp-ts/lib/function'; +import { pipe } from 'fp-ts/lib/pipeable'; +import * as rt from 'io-ts'; +import { npStart } from '../../../legacy_singletons'; + +import { getJobId, jobCustomSettingsRT } from '../../../../common/infra_ml'; +import { createPlainError, throwErrors } from '../../../../common/runtime_types'; + +export const callJobsSummaryAPI = async ( + spaceId: string, + sourceId: string, + jobTypes: JobType[] +) => { + const response = await npStart.http.fetch('/api/ml/jobs/jobs_summary', { + method: 'POST', + body: JSON.stringify( + fetchJobStatusRequestPayloadRT.encode({ + jobIds: jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)), + }) + ), + }); + return pipe( + fetchJobStatusResponsePayloadRT.decode(response), + fold(throwErrors(createPlainError), identity) + ); +}; + +export const fetchJobStatusRequestPayloadRT = rt.type({ + jobIds: rt.array(rt.string), +}); + +export type FetchJobStatusRequestPayload = rt.TypeOf; + +const datafeedStateRT = rt.keyof({ + started: null, + stopped: null, + stopping: null, + '': null, +}); + +const jobStateRT = rt.keyof({ + closed: null, + closing: null, + deleting: null, + failed: null, + opened: null, + opening: null, +}); + +const jobCategorizationStatusRT = rt.keyof({ + ok: null, + warn: null, +}); + +const jobModelSizeStatsRT = rt.type({ + categorization_status: jobCategorizationStatusRT, + categorized_doc_count: rt.number, + dead_category_count: rt.number, + frequent_category_count: rt.number, + rare_category_count: rt.number, + total_category_count: rt.number, +}); + +export type JobModelSizeStats = rt.TypeOf; + +export const jobSummaryRT = rt.intersection([ + rt.type({ + id: rt.string, + jobState: jobStateRT, + }), + rt.partial({ + datafeedIndices: rt.array(rt.string), + datafeedState: datafeedStateRT, + fullJob: rt.partial({ + custom_settings: jobCustomSettingsRT, + finished_time: rt.number, + model_size_stats: jobModelSizeStatsRT, + }), + }), +]); + +export type JobSummary = rt.TypeOf; + +export const fetchJobStatusResponsePayloadRT = rt.array(jobSummaryRT); + +export type FetchJobStatusResponsePayload = rt.TypeOf; diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts new file mode 100644 index 0000000000000..d492522c120a1 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { fold } from 'fp-ts/lib/Either'; +import { identity } from 'fp-ts/lib/function'; +import { pipe } from 'fp-ts/lib/pipeable'; +import * as rt from 'io-ts'; +import { npStart } from '../../../legacy_singletons'; + +import { jobCustomSettingsRT } from '../../../../common/log_analysis'; +import { createPlainError, throwErrors } from '../../../../common/runtime_types'; + +export const callGetMlModuleAPI = async (moduleId: string) => { + const response = await npStart.http.fetch(`/api/ml/modules/get_module/${moduleId}`, { + method: 'GET', + }); + + return pipe( + getMlModuleResponsePayloadRT.decode(response), + fold(throwErrors(createPlainError), identity) + ); +}; + +const jobDefinitionRT = rt.type({ + id: rt.string, + config: rt.type({ + custom_settings: jobCustomSettingsRT, + }), +}); + +export type JobDefinition = rt.TypeOf; + +const getMlModuleResponsePayloadRT = rt.type({ + id: rt.string, + jobs: rt.array(jobDefinitionRT), +}); + +export type GetMlModuleResponsePayload = rt.TypeOf; diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts new file mode 100644 index 0000000000000..06b0e075387b0 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { fold } from 'fp-ts/lib/Either'; +import { identity } from 'fp-ts/lib/function'; +import { pipe } from 'fp-ts/lib/pipeable'; +import * as rt from 'io-ts'; +import { npStart } from '../../../legacy_singletons'; + +import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../common/infra_ml'; +import { createPlainError, throwErrors } from '../../../../common/runtime_types'; + +export const callSetupMlModuleAPI = async ( + moduleId: string, + start: number | undefined, + end: number | undefined, + spaceId: string, + sourceId: string, + indexPattern: string, + jobOverrides: SetupMlModuleJobOverrides[] = [], + datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [], + query?: object +) => { + const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, { + method: 'POST', + body: JSON.stringify( + setupMlModuleRequestPayloadRT.encode({ + start, + end, + indexPatternName: indexPattern, + prefix: getJobIdPrefix(spaceId, sourceId), + startDatafeed: true, + jobOverrides, + datafeedOverrides, + query, + }) + ), + }); + + return pipe( + setupMlModuleResponsePayloadRT.decode(response), + fold(throwErrors(createPlainError), identity) + ); +}; + +const setupMlModuleTimeParamsRT = rt.partial({ + start: rt.number, + end: rt.number, +}); + +const setupMlModuleJobOverridesRT = rt.type({ + job_id: rt.string, + custom_settings: jobCustomSettingsRT, +}); + +export type SetupMlModuleJobOverrides = rt.TypeOf; + +const setupMlModuleDatafeedOverridesRT = rt.object; + +export type SetupMlModuleDatafeedOverrides = rt.TypeOf; + +const setupMlModuleRequestParamsRT = rt.intersection([ + rt.strict({ + indexPatternName: rt.string, + prefix: rt.string, + startDatafeed: rt.boolean, + jobOverrides: rt.array(setupMlModuleJobOverridesRT), + datafeedOverrides: rt.array(setupMlModuleDatafeedOverridesRT), + }), + rt.exact( + rt.partial({ + query: rt.object, + }) + ), +]); + +const setupMlModuleRequestPayloadRT = rt.intersection([ + setupMlModuleTimeParamsRT, + setupMlModuleRequestParamsRT, +]); + +const setupErrorResponseRT = rt.type({ + msg: rt.string, +}); + +const datafeedSetupResponseRT = rt.intersection([ + rt.type({ + id: rt.string, + started: rt.boolean, + success: rt.boolean, + }), + rt.partial({ + error: setupErrorResponseRT, + }), +]); + +const jobSetupResponseRT = rt.intersection([ + rt.type({ + id: rt.string, + success: rt.boolean, + }), + rt.partial({ + error: setupErrorResponseRT, + }), +]); + +const setupMlModuleResponsePayloadRT = rt.type({ + datafeeds: rt.array(datafeedSetupResponseRT), + jobs: rt.array(jobSetupResponseRT), +}); + +export type SetupMlModuleResponsePayload = rt.TypeOf; diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx new file mode 100644 index 0000000000000..f4c90a459af6a --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import createContainer from 'constate'; +import { useMemo, useState, useEffect } from 'react'; +import { fold } from 'fp-ts/lib/Either'; +import { pipe } from 'fp-ts/lib/pipeable'; +import { identity } from 'fp-ts/lib/function'; +import { useTrackedPromise } from '../../utils/use_tracked_promise'; +import { npStart } from '../../legacy_singletons'; +import { + getMlCapabilitiesResponsePayloadRT, + GetMlCapabilitiesResponsePayload, +} from './api/ml_api_types'; +import { throwErrors, createPlainError } from '../../../common/runtime_types'; + +export const useInfraMLCapabilities = () => { + const [mlCapabilities, setMlCapabilities] = useState( + initialMlCapabilities + ); + + const [fetchMlCapabilitiesRequest, fetchMlCapabilities] = useTrackedPromise( + { + cancelPreviousOn: 'resolution', + createPromise: async () => { + const rawResponse = await npStart.http.fetch('/api/ml/ml_capabilities'); + + return pipe( + getMlCapabilitiesResponsePayloadRT.decode(rawResponse), + fold(throwErrors(createPlainError), identity) + ); + }, + onResolve: (response) => { + setMlCapabilities(response); + }, + }, + [] + ); + + useEffect(() => { + fetchMlCapabilities(); + }, [fetchMlCapabilities]); + + const isLoading = useMemo(() => fetchMlCapabilitiesRequest.state === 'pending', [ + fetchMlCapabilitiesRequest.state, + ]); + + const hasInfraMLSetupCapabilities = mlCapabilities.capabilities.canCreateJob; + const hasInfraMLReadCapabilities = mlCapabilities.capabilities.canGetJobs; + const hasInfraMLCapabilites = + mlCapabilities.isPlatinumOrTrialLicense && mlCapabilities.mlFeatureEnabledInSpace; + + return { + hasInfraMLCapabilites, + hasInfraMLReadCapabilities, + hasInfraMLSetupCapabilities, + isLoading, + }; +}; + +export const [InfraMLCapabilitiesProvider, useInfraMLCapabilitiesContext] = createContainer( + useInfraMLCapabilities +); + +const initialMlCapabilities = { + capabilities: { + canGetJobs: false, + canCreateJob: false, + canDeleteJob: false, + canOpenJob: false, + canCloseJob: false, + canForecastJob: false, + canGetDatafeeds: false, + canStartStopDatafeed: false, + canUpdateJob: false, + canUpdateDatafeed: false, + canPreviewDatafeed: false, + canGetCalendars: false, + canCreateCalendar: false, + canDeleteCalendar: false, + canGetFilters: false, + canCreateFilter: false, + canDeleteFilter: false, + canFindFileStructure: false, + canGetDataFrameJobs: false, + canDeleteDataFrameJob: false, + canPreviewDataFrameJob: false, + canCreateDataFrameJob: false, + canStartStopDataFrameJob: false, + }, + isPlatinumOrTrialLicense: false, + mlFeatureEnabledInSpace: false, + upgradeInProgress: false, +}; diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx new file mode 100644 index 0000000000000..736982c8043b1 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { getJobId } from '../../../common/infra_ml'; +import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup'; + +export const cleanUpJobsAndDatafeeds = async ( + spaceId: string, + sourceId: string, + jobTypes: JobType[] +) => { + try { + await callStopDatafeeds(spaceId, sourceId, jobTypes); + } catch (err) { + // Proceed only if datafeed has been deleted or didn't exist in the first place + if (err?.res?.status !== 404) { + throw err; + } + } + + return await deleteJobs(spaceId, sourceId, jobTypes); +}; + +const deleteJobs = async ( + spaceId: string, + sourceId: string, + jobTypes: JobType[] +) => { + const deleteJobsResponse = await callDeleteJobs(spaceId, sourceId, jobTypes); + await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes); + return deleteJobsResponse; +}; + +const waitUntilJobsAreDeleted = async ( + spaceId: string, + sourceId: string, + jobTypes: JobType[] +) => { + const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)); + while (true) { + const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(); + const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId)); + + if (needToWait) { + await timeout(1000); + } else { + return true; + } + } +}; + +const timeout = (ms: number) => new Promise((res) => setTimeout(res, ms)); diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx new file mode 100644 index 0000000000000..349541d108f5e --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx @@ -0,0 +1,147 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useCallback, useMemo } from 'react'; +import { DatasetFilter } from '../../../common/infra_ml'; +import { useTrackedPromise } from '../../utils/use_tracked_promise'; +import { useModuleStatus } from './infra_ml_module_status'; +import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types'; + +export const useInfraMLModule = ({ + sourceConfiguration, + moduleDescriptor, +}: { + sourceConfiguration: ModuleSourceConfiguration; + moduleDescriptor: ModuleDescriptor; +}) => { + const { spaceId, sourceId, timestampField } = sourceConfiguration; + const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes); + + const [, fetchJobStatus] = useTrackedPromise( + { + cancelPreviousOn: 'resolution', + createPromise: async () => { + dispatchModuleStatus({ type: 'fetchingJobStatuses' }); + return await moduleDescriptor.getJobSummary(spaceId, sourceId); + }, + onResolve: (jobResponse) => { + dispatchModuleStatus({ + type: 'fetchedJobStatuses', + payload: jobResponse, + spaceId, + sourceId, + }); + }, + onReject: () => { + dispatchModuleStatus({ type: 'failedFetchingJobStatuses' }); + }, + }, + [spaceId, sourceId] + ); + + const [, setUpModule] = useTrackedPromise( + { + cancelPreviousOn: 'resolution', + createPromise: async ( + selectedIndices: string[], + start: number | undefined, + end: number | undefined, + datasetFilter: DatasetFilter, + partitionField?: string + ) => { + dispatchModuleStatus({ type: 'startedSetup' }); + const setupResult = await moduleDescriptor.setUpModule( + start, + end, + datasetFilter, + { + indices: selectedIndices, + sourceId, + spaceId, + timestampField, + }, + partitionField + ); + const jobSummaries = await moduleDescriptor.getJobSummary(spaceId, sourceId); + return { setupResult, jobSummaries }; + }, + onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => { + dispatchModuleStatus({ + type: 'finishedSetup', + datafeedSetupResults: datafeeds, + jobSetupResults: jobs, + jobSummaries, + spaceId, + sourceId, + }); + }, + onReject: () => { + dispatchModuleStatus({ type: 'failedSetup' }); + }, + }, + [moduleDescriptor.setUpModule, spaceId, sourceId, timestampField] + ); + + const [cleanUpModuleRequest, cleanUpModule] = useTrackedPromise( + { + cancelPreviousOn: 'resolution', + createPromise: async () => { + return await moduleDescriptor.cleanUpModule(spaceId, sourceId); + }, + }, + [spaceId, sourceId] + ); + + const isCleaningUp = useMemo(() => cleanUpModuleRequest.state === 'pending', [ + cleanUpModuleRequest.state, + ]); + + const cleanUpAndSetUpModule = useCallback( + ( + selectedIndices: string[], + start: number | undefined, + end: number | undefined, + datasetFilter: DatasetFilter, + partitionField?: string + ) => { + dispatchModuleStatus({ type: 'startedSetup' }); + cleanUpModule() + .then(() => { + setUpModule(selectedIndices, start, end, datasetFilter, partitionField); + }) + .catch(() => { + dispatchModuleStatus({ type: 'failedSetup' }); + }); + }, + [cleanUpModule, dispatchModuleStatus, setUpModule] + ); + + const viewResults = useCallback(() => { + dispatchModuleStatus({ type: 'viewedResults' }); + }, [dispatchModuleStatus]); + + const jobIds = useMemo(() => moduleDescriptor.getJobIds(spaceId, sourceId), [ + moduleDescriptor, + spaceId, + sourceId, + ]); + + return { + cleanUpAndSetUpModule, + cleanUpModule, + fetchJobStatus, + isCleaningUp, + jobIds, + jobStatus: moduleStatus.jobStatus, + jobSummaries: moduleStatus.jobSummaries, + lastSetupErrorMessages: moduleStatus.lastSetupErrorMessages, + moduleDescriptor, + setUpModule, + setupStatus: moduleStatus.setupStatus, + sourceConfiguration, + viewResults, + }; +}; diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_configuration.ts b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_configuration.ts new file mode 100644 index 0000000000000..2d90f5d531010 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_configuration.ts @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useMemo } from 'react'; +import { JobSummary } from './api/ml_get_jobs_summary_api'; +import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types'; + +export const useInfraMLModuleConfiguration = ({ + moduleDescriptor, + sourceConfiguration, +}: { + moduleDescriptor: ModuleDescriptor; + sourceConfiguration: ModuleSourceConfiguration; +}) => { + const getIsJobConfigurationOutdated = useMemo( + () => isJobConfigurationOutdated(moduleDescriptor, sourceConfiguration), + [sourceConfiguration, moduleDescriptor] + ); + + return { + getIsJobConfigurationOutdated, + }; +}; + +export const isJobConfigurationOutdated = ( + { bucketSpan }: ModuleDescriptor, + currentSourceConfiguration: ModuleSourceConfiguration +) => (jobSummary: JobSummary): boolean => { + if (!jobSummary.fullJob || !jobSummary.fullJob.custom_settings) { + return false; + } + + const jobConfiguration = jobSummary.fullJob.custom_settings.metrics_source_config; + + return !( + jobConfiguration && + jobConfiguration.bucketSpan === bucketSpan && + jobConfiguration.indexPattern && + isSubset( + new Set(jobConfiguration.indexPattern.split(',')), + new Set(currentSourceConfiguration.indices) + ) && + jobConfiguration.timestampField === currentSourceConfiguration.timestampField + ); +}; + +const isSubset = (subset: Set, superset: Set) => { + return Array.from(subset).every((subsetElement) => superset.has(subsetElement)); +}; diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx new file mode 100644 index 0000000000000..3c7ffcfd4a4e2 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useCallback, useMemo, useState } from 'react'; +import { getJobId } from '../../../common/log_analysis'; +import { useTrackedPromise } from '../../utils/use_tracked_promise'; +import { JobSummary } from './api/ml_get_jobs_summary_api'; +import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module'; +import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types'; + +export const useInfraMLModuleDefinition = ({ + sourceConfiguration: { spaceId, sourceId }, + moduleDescriptor, +}: { + sourceConfiguration: ModuleSourceConfiguration; + moduleDescriptor: ModuleDescriptor; +}) => { + const [moduleDefinition, setModuleDefinition] = useState< + GetMlModuleResponsePayload | undefined + >(); + + const jobDefinitionByJobId = useMemo( + () => + moduleDefinition + ? moduleDefinition.jobs.reduce>( + (accumulatedJobDefinitions, jobDefinition) => ({ + ...accumulatedJobDefinitions, + [getJobId(spaceId, sourceId, jobDefinition.id)]: jobDefinition, + }), + {} + ) + : {}, + [moduleDefinition, sourceId, spaceId] + ); + + const [fetchModuleDefinitionRequest, fetchModuleDefinition] = useTrackedPromise( + { + cancelPreviousOn: 'resolution', + createPromise: async () => { + return await moduleDescriptor.getModuleDefinition(); + }, + onResolve: (response) => { + setModuleDefinition(response); + }, + onReject: () => { + setModuleDefinition(undefined); + }, + }, + [moduleDescriptor.getModuleDefinition, spaceId, sourceId] + ); + + const getIsJobDefinitionOutdated = useCallback( + (jobSummary: JobSummary): boolean => { + const jobDefinition: JobDefinition | undefined = jobDefinitionByJobId[jobSummary.id]; + + if (jobDefinition == null) { + return false; + } + + const currentRevision = jobDefinition?.config.custom_settings.job_revision; + return (jobSummary.fullJob?.custom_settings?.job_revision ?? 0) < (currentRevision ?? 0); + }, + [jobDefinitionByJobId] + ); + + return { + fetchModuleDefinition, + fetchModuleDefinitionRequestState: fetchModuleDefinitionRequest.state, + getIsJobDefinitionOutdated, + jobDefinitionByJobId, + moduleDefinition, + }; +}; diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_status.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_status.tsx new file mode 100644 index 0000000000000..63d479546b44f --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_status.tsx @@ -0,0 +1,268 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useReducer } from 'react'; + +import { + JobStatus, + getDatafeedId, + getJobId, + isJobStatusWithResults, + SetupStatus, +} from '../../../common/infra_ml'; +import { FetchJobStatusResponsePayload, JobSummary } from './api/ml_get_jobs_summary_api'; +import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api'; +import { MandatoryProperty } from '../../../common/utility_types'; + +interface StatusReducerState { + jobStatus: Record; + jobSummaries: JobSummary[]; + lastSetupErrorMessages: string[]; + setupStatus: SetupStatus; +} + +type StatusReducerAction = + | { type: 'startedSetup' } + | { + type: 'finishedSetup'; + sourceId: string; + spaceId: string; + jobSetupResults: SetupMlModuleResponsePayload['jobs']; + jobSummaries: FetchJobStatusResponsePayload; + datafeedSetupResults: SetupMlModuleResponsePayload['datafeeds']; + } + | { type: 'failedSetup' } + | { type: 'fetchingJobStatuses' } + | { + type: 'fetchedJobStatuses'; + spaceId: string; + sourceId: string; + payload: FetchJobStatusResponsePayload; + } + | { type: 'failedFetchingJobStatuses' } + | { type: 'viewedResults' }; + +const createInitialState = ({ + jobTypes, +}: { + jobTypes: JobType[]; +}): StatusReducerState => ({ + jobStatus: jobTypes.reduce( + (accumulatedJobStatus, jobType) => ({ + ...accumulatedJobStatus, + [jobType]: 'unknown', + }), + {} as Record + ), + jobSummaries: [], + lastSetupErrorMessages: [], + setupStatus: { type: 'initializing' }, +}); + +const createStatusReducer = (jobTypes: JobType[]) => ( + state: StatusReducerState, + action: StatusReducerAction +): StatusReducerState => { + switch (action.type) { + case 'startedSetup': { + return { + ...state, + jobStatus: jobTypes.reduce( + (accumulatedJobStatus, jobType) => ({ + ...accumulatedJobStatus, + [jobType]: 'initializing', + }), + {} as Record + ), + setupStatus: { type: 'pending' }, + }; + } + case 'finishedSetup': { + const { datafeedSetupResults, jobSetupResults, jobSummaries, spaceId, sourceId } = action; + const nextJobStatus = jobTypes.reduce( + (accumulatedJobStatus, jobType) => ({ + ...accumulatedJobStatus, + [jobType]: + hasSuccessfullyCreatedJob(getJobId(spaceId, sourceId, jobType))(jobSetupResults) && + hasSuccessfullyStartedDatafeed(getDatafeedId(spaceId, sourceId, jobType))( + datafeedSetupResults + ) + ? 'started' + : 'failed', + }), + {} as Record + ); + const nextSetupStatus: SetupStatus = Object.values(nextJobStatus).every( + (jobState) => jobState === 'started' + ) + ? { type: 'succeeded' } + : { + type: 'failed', + reasons: [ + ...Object.values(datafeedSetupResults) + .filter(hasError) + .map((datafeed) => datafeed.error.msg), + ...Object.values(jobSetupResults) + .filter(hasError) + .map((job) => job.error.msg), + ], + }; + + return { + ...state, + jobStatus: nextJobStatus, + jobSummaries, + setupStatus: nextSetupStatus, + }; + } + case 'failedSetup': { + return { + ...state, + jobStatus: jobTypes.reduce( + (accumulatedJobStatus, jobType) => ({ + ...accumulatedJobStatus, + [jobType]: 'failed', + }), + {} as Record + ), + setupStatus: { type: 'failed', reasons: ['unknown'] }, + }; + } + case 'fetchingJobStatuses': { + return { + ...state, + setupStatus: + state.setupStatus.type === 'unknown' ? { type: 'initializing' } : state.setupStatus, + }; + } + case 'fetchedJobStatuses': { + const { payload: jobSummaries, spaceId, sourceId } = action; + const { setupStatus } = state; + const nextJobStatus = jobTypes.reduce( + (accumulatedJobStatus, jobType) => ({ + ...accumulatedJobStatus, + [jobType]: getJobStatus(getJobId(spaceId, sourceId, jobType))(jobSummaries), + }), + {} as Record + ); + const nextSetupStatus = getSetupStatus(nextJobStatus)(setupStatus); + + return { + ...state, + jobSummaries, + jobStatus: nextJobStatus, + setupStatus: nextSetupStatus, + }; + } + case 'failedFetchingJobStatuses': { + return { + ...state, + setupStatus: { type: 'unknown' }, + jobStatus: jobTypes.reduce( + (accumulatedJobStatus, jobType) => ({ + ...accumulatedJobStatus, + [jobType]: 'unknown', + }), + {} as Record + ), + }; + } + case 'viewedResults': { + return { + ...state, + setupStatus: { type: 'skipped', newlyCreated: true }, + }; + } + default: { + return state; + } + } +}; + +const hasSuccessfullyCreatedJob = (jobId: string) => ( + jobSetupResponses: SetupMlModuleResponsePayload['jobs'] +) => + jobSetupResponses.filter( + (jobSetupResponse) => + jobSetupResponse.id === jobId && jobSetupResponse.success && !jobSetupResponse.error + ).length > 0; + +const hasSuccessfullyStartedDatafeed = (datafeedId: string) => ( + datafeedSetupResponses: SetupMlModuleResponsePayload['datafeeds'] +) => + datafeedSetupResponses.filter( + (datafeedSetupResponse) => + datafeedSetupResponse.id === datafeedId && + datafeedSetupResponse.success && + datafeedSetupResponse.started && + !datafeedSetupResponse.error + ).length > 0; + +const getJobStatus = (jobId: string) => ( + jobSummaries: FetchJobStatusResponsePayload +): JobStatus => { + return ( + jobSummaries + .filter((jobSummary) => jobSummary.id === jobId) + .map( + (jobSummary): JobStatus => { + if (jobSummary.jobState === 'failed' || jobSummary.datafeedState === '') { + return 'failed'; + } else if ( + jobSummary.jobState === 'closed' && + jobSummary.datafeedState === 'stopped' && + jobSummary.fullJob && + jobSummary.fullJob.finished_time != null + ) { + return 'finished'; + } else if ( + jobSummary.jobState === 'closed' || + jobSummary.jobState === 'closing' || + jobSummary.datafeedState === 'stopped' + ) { + return 'stopped'; + } else if (jobSummary.jobState === 'opening') { + return 'initializing'; + } else if (jobSummary.jobState === 'opened' && jobSummary.datafeedState === 'started') { + return 'started'; + } + + return 'unknown'; + } + )[0] || 'missing' + ); +}; + +const getSetupStatus = (everyJobStatus: Record) => ( + previousSetupStatus: SetupStatus +): SetupStatus => { + return Object.entries(everyJobStatus).reduce( + (setupStatus, [, jobStatus]) => { + if (jobStatus === 'missing') { + return { type: 'required' }; + } else if (setupStatus.type === 'required' || setupStatus.type === 'succeeded') { + return setupStatus; + } else if (setupStatus.type === 'skipped' || isJobStatusWithResults(jobStatus)) { + return { + type: 'skipped', + // preserve newlyCreated status + newlyCreated: setupStatus.type === 'skipped' && setupStatus.newlyCreated, + }; + } + + return setupStatus; + }, + previousSetupStatus + ); +}; + +const hasError = ( + value: Value +): value is MandatoryProperty => value.error != null; + +export const useModuleStatus = (jobTypes: JobType[]) => { + return useReducer(createStatusReducer(jobTypes), { jobTypes }, createInitialState); +}; diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts new file mode 100644 index 0000000000000..a9f2671de8259 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { + ValidateLogEntryDatasetsResponsePayload, + ValidationIndicesResponsePayload, +} from '../../../common/http_api/log_analysis'; +import { DatasetFilter } from '../../../common/infra_ml'; +import { DeleteJobsResponsePayload } from './api/ml_cleanup'; +import { FetchJobStatusResponsePayload } from './api/ml_get_jobs_summary_api'; +import { GetMlModuleResponsePayload } from './api/ml_get_module'; +import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api'; + +export { JobModelSizeStats, JobSummary } from './api/ml_get_jobs_summary_api'; + +export interface ModuleDescriptor { + moduleId: string; + moduleName: string; + moduleDescription: string; + jobTypes: JobType[]; + bucketSpan: number; + getJobIds: (spaceId: string, sourceId: string) => Record; + getJobSummary: (spaceId: string, sourceId: string) => Promise; + getModuleDefinition: () => Promise; + setUpModule: ( + start: number | undefined, + end: number | undefined, + datasetFilter: DatasetFilter, + sourceConfiguration: ModuleSourceConfiguration, + partitionField?: string + ) => Promise; + cleanUpModule: (spaceId: string, sourceId: string) => Promise; + validateSetupIndices: ( + indices: string[], + timestampField: string + ) => Promise; + validateSetupDatasets: ( + indices: string[], + timestampField: string, + startTime: number, + endTime: number + ) => Promise; +} + +export interface ModuleSourceConfiguration { + indices: string[]; + sourceId: string; + spaceId: string; + timestampField: string; +} + +interface ManyCategoriesWarningReason { + type: 'manyCategories'; + categoriesDocumentRatio: number; +} + +interface ManyDeadCategoriesWarningReason { + type: 'manyDeadCategories'; + deadCategoriesRatio: number; +} + +interface ManyRareCategoriesWarningReason { + type: 'manyRareCategories'; + rareCategoriesRatio: number; +} + +interface NoFrequentCategoriesWarningReason { + type: 'noFrequentCategories'; +} + +interface SingleCategoryWarningReason { + type: 'singleCategory'; +} + +export type CategoryQualityWarningReason = + | ManyCategoriesWarningReason + | ManyDeadCategoriesWarningReason + | ManyRareCategoriesWarningReason + | NoFrequentCategoriesWarningReason + | SingleCategoryWarningReason; + +export type CategoryQualityWarningReasonType = CategoryQualityWarningReason['type']; + +export interface CategoryQualityWarning { + type: 'categoryQualityWarning'; + jobId: string; + reasons: CategoryQualityWarningReason[]; +} + +export type QualityWarning = CategoryQualityWarning; diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_setup_state.ts b/x-pack/plugins/infra/public/containers/ml/infra_ml_setup_state.ts new file mode 100644 index 0000000000000..0dfe3b301f240 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_setup_state.ts @@ -0,0 +1,289 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { isEqual } from 'lodash'; +import { useCallback, useEffect, useMemo, useState } from 'react'; +import { usePrevious } from 'react-use'; +import { + combineDatasetFilters, + DatasetFilter, + filterDatasetFilter, + isExampleDataIndex, +} from '../../../common/infra_ml'; +import { + AvailableIndex, + ValidationIndicesError, + ValidationUIError, +} from '../../components/logging/log_analysis_setup/initial_configuration_step'; +import { useTrackedPromise } from '../../utils/use_tracked_promise'; +import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types'; + +type SetupHandler = ( + indices: string[], + startTime: number | undefined, + endTime: number | undefined, + datasetFilter: DatasetFilter +) => void; + +interface AnalysisSetupStateArguments { + cleanUpAndSetUpModule: SetupHandler; + moduleDescriptor: ModuleDescriptor; + setUpModule: SetupHandler; + sourceConfiguration: ModuleSourceConfiguration; +} + +const fourWeeksInMs = 86400000 * 7 * 4; + +export const useAnalysisSetupState = ({ + cleanUpAndSetUpModule, + moduleDescriptor: { validateSetupDatasets, validateSetupIndices }, + setUpModule, + sourceConfiguration, +}: AnalysisSetupStateArguments) => { + const [startTime, setStartTime] = useState(Date.now() - fourWeeksInMs); + const [endTime, setEndTime] = useState(undefined); + + const isTimeRangeValid = useMemo( + () => (startTime != null && endTime != null ? startTime < endTime : true), + [endTime, startTime] + ); + + const [validatedIndices, setValidatedIndices] = useState( + sourceConfiguration.indices.map((indexName) => ({ + name: indexName, + validity: 'unknown' as const, + })) + ); + + const updateIndicesWithValidationErrors = useCallback( + (validationErrors: ValidationIndicesError[]) => + setValidatedIndices((availableIndices) => + availableIndices.map((previousAvailableIndex) => { + const indexValiationErrors = validationErrors.filter( + ({ index }) => index === previousAvailableIndex.name + ); + + if (indexValiationErrors.length > 0) { + return { + validity: 'invalid', + name: previousAvailableIndex.name, + errors: indexValiationErrors, + }; + } else if (previousAvailableIndex.validity === 'valid') { + return { + ...previousAvailableIndex, + validity: 'valid', + errors: [], + }; + } else { + return { + validity: 'valid', + name: previousAvailableIndex.name, + isSelected: !isExampleDataIndex(previousAvailableIndex.name), + availableDatasets: [], + datasetFilter: { + type: 'includeAll' as const, + }, + }; + } + }) + ), + [] + ); + + const updateIndicesWithAvailableDatasets = useCallback( + (availableDatasets: Array<{ indexName: string; datasets: string[] }>) => + setValidatedIndices((availableIndices) => + availableIndices.map((previousAvailableIndex) => { + if (previousAvailableIndex.validity !== 'valid') { + return previousAvailableIndex; + } + + const availableDatasetsForIndex = availableDatasets.filter( + ({ indexName }) => indexName === previousAvailableIndex.name + ); + const newAvailableDatasets = availableDatasetsForIndex.flatMap( + ({ datasets }) => datasets + ); + + // filter out datasets that have disappeared if this index' datasets were updated + const newDatasetFilter: DatasetFilter = + availableDatasetsForIndex.length > 0 + ? filterDatasetFilter(previousAvailableIndex.datasetFilter, (dataset) => + newAvailableDatasets.includes(dataset) + ) + : previousAvailableIndex.datasetFilter; + + return { + ...previousAvailableIndex, + availableDatasets: newAvailableDatasets, + datasetFilter: newDatasetFilter, + }; + }) + ), + [] + ); + + const validIndexNames = useMemo( + () => validatedIndices.filter((index) => index.validity === 'valid').map((index) => index.name), + [validatedIndices] + ); + + const selectedIndexNames = useMemo( + () => + validatedIndices + .filter((index) => index.validity === 'valid' && index.isSelected) + .map((i) => i.name), + [validatedIndices] + ); + + const datasetFilter = useMemo( + () => + validatedIndices + .flatMap((validatedIndex) => + validatedIndex.validity === 'valid' + ? validatedIndex.datasetFilter + : { type: 'includeAll' as const } + ) + .reduce(combineDatasetFilters, { type: 'includeAll' as const }), + [validatedIndices] + ); + + const [validateIndicesRequest, validateIndices] = useTrackedPromise( + { + cancelPreviousOn: 'resolution', + createPromise: async () => { + return await validateSetupIndices( + sourceConfiguration.indices, + sourceConfiguration.timestampField + ); + }, + onResolve: ({ data: { errors } }) => { + updateIndicesWithValidationErrors(errors); + }, + onReject: () => { + setValidatedIndices([]); + }, + }, + [sourceConfiguration.indices, sourceConfiguration.timestampField] + ); + + const [validateDatasetsRequest, validateDatasets] = useTrackedPromise( + { + cancelPreviousOn: 'resolution', + createPromise: async () => { + if (validIndexNames.length === 0) { + return { data: { datasets: [] } }; + } + + return await validateSetupDatasets( + validIndexNames, + sourceConfiguration.timestampField, + startTime ?? 0, + endTime ?? Date.now() + ); + }, + onResolve: ({ data: { datasets } }) => { + updateIndicesWithAvailableDatasets(datasets); + }, + }, + [validIndexNames, sourceConfiguration.timestampField, startTime, endTime] + ); + + const setUp = useCallback(() => { + return setUpModule(selectedIndexNames, startTime, endTime, datasetFilter); + }, [setUpModule, selectedIndexNames, startTime, endTime, datasetFilter]); + + const cleanUpAndSetUp = useCallback(() => { + return cleanUpAndSetUpModule(selectedIndexNames, startTime, endTime, datasetFilter); + }, [cleanUpAndSetUpModule, selectedIndexNames, startTime, endTime, datasetFilter]); + + const isValidating = useMemo( + () => validateIndicesRequest.state === 'pending' || validateDatasetsRequest.state === 'pending', + [validateDatasetsRequest.state, validateIndicesRequest.state] + ); + + const validationErrors = useMemo(() => { + if (isValidating) { + return []; + } + + return [ + // validate request status + ...(validateIndicesRequest.state === 'rejected' || + validateDatasetsRequest.state === 'rejected' + ? [{ error: 'NETWORK_ERROR' as const }] + : []), + // validation request results + ...validatedIndices.reduce((errors, index) => { + return index.validity === 'invalid' && selectedIndexNames.includes(index.name) + ? [...errors, ...index.errors] + : errors; + }, []), + // index count + ...(selectedIndexNames.length === 0 ? [{ error: 'TOO_FEW_SELECTED_INDICES' as const }] : []), + // time range + ...(!isTimeRangeValid ? [{ error: 'INVALID_TIME_RANGE' as const }] : []), + ]; + }, [ + isValidating, + validateIndicesRequest.state, + validateDatasetsRequest.state, + validatedIndices, + selectedIndexNames, + isTimeRangeValid, + ]); + + const prevStartTime = usePrevious(startTime); + const prevEndTime = usePrevious(endTime); + const prevValidIndexNames = usePrevious(validIndexNames); + + useEffect(() => { + if (!isTimeRangeValid) { + return; + } + + validateIndices(); + }, [isTimeRangeValid, validateIndices]); + + useEffect(() => { + if (!isTimeRangeValid) { + return; + } + + if ( + startTime !== prevStartTime || + endTime !== prevEndTime || + !isEqual(validIndexNames, prevValidIndexNames) + ) { + validateDatasets(); + } + }, [ + endTime, + isTimeRangeValid, + prevEndTime, + prevStartTime, + prevValidIndexNames, + startTime, + validIndexNames, + validateDatasets, + ]); + + return { + cleanUpAndSetUp, + datasetFilter, + endTime, + isValidating, + selectedIndexNames, + setEndTime, + setStartTime, + setUp, + startTime, + validatedIndices, + setValidatedIndices, + validationErrors, + }; +}; diff --git a/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module.tsx b/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module.tsx new file mode 100644 index 0000000000000..9c065f3e91bc4 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module.tsx @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import createContainer from 'constate'; +import { useMemo } from 'react'; +import { useInfraMLModule } from '../../infra_ml_module'; +import { useInfraMLModuleConfiguration } from '../../infra_ml_module_configuration'; +import { useInfraMLModuleDefinition } from '../../infra_ml_module_definition'; +import { ModuleSourceConfiguration } from '../../infra_ml_module_types'; +import { metricHostsModule } from './module_descriptor'; + +export const useMetricHostsModule = ({ + indexPattern, + sourceId, + spaceId, + timestampField, +}: { + indexPattern: string; + sourceId: string; + spaceId: string; + timestampField: string; +}) => { + const sourceConfiguration: ModuleSourceConfiguration = useMemo( + () => ({ + indices: indexPattern.split(','), + sourceId, + spaceId, + timestampField, + }), + [indexPattern, sourceId, spaceId, timestampField] + ); + + const infraMLModule = useInfraMLModule({ + moduleDescriptor: metricHostsModule, + sourceConfiguration, + }); + + const { getIsJobConfigurationOutdated } = useInfraMLModuleConfiguration({ + sourceConfiguration, + moduleDescriptor: metricHostsModule, + }); + + const { fetchModuleDefinition, getIsJobDefinitionOutdated } = useInfraMLModuleDefinition({ + sourceConfiguration, + moduleDescriptor: metricHostsModule, + }); + + const hasOutdatedJobConfigurations = useMemo( + () => infraMLModule.jobSummaries.some(getIsJobConfigurationOutdated), + [getIsJobConfigurationOutdated, infraMLModule.jobSummaries] + ); + + const hasOutdatedJobDefinitions = useMemo( + () => infraMLModule.jobSummaries.some(getIsJobDefinitionOutdated), + [getIsJobDefinitionOutdated, infraMLModule.jobSummaries] + ); + + const hasStoppedJobs = useMemo( + () => + Object.values(infraMLModule.jobStatus).some( + (currentJobStatus) => currentJobStatus === 'stopped' + ), + [infraMLModule.jobStatus] + ); + + return { + ...infraMLModule, + fetchModuleDefinition, + hasOutdatedJobConfigurations, + hasOutdatedJobDefinitions, + hasStoppedJobs, + }; +}; + +export const [MetricHostsModuleProvider, useMetricHostsModuleContext] = createContainer( + useMetricHostsModule +); diff --git a/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts b/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts new file mode 100644 index 0000000000000..cec87fb1144e3 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { i18n } from '@kbn/i18n'; +import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types'; +import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup'; +import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api'; +import { callGetMlModuleAPI } from '../../api/ml_get_module'; +import { callSetupMlModuleAPI } from '../../api/ml_setup_module_api'; +import { callValidateIndicesAPI } from '../../../logs/log_analysis/api/validate_indices'; +import { callValidateDatasetsAPI } from '../../../logs/log_analysis/api/validate_datasets'; +import { + metricsHostsJobTypes, + getJobId, + MetricsHostsJobType, + DatasetFilter, + bucketSpan, + partitionField, +} from '../../../../../common/infra_ml'; + +const moduleId = 'metrics_ui_hosts'; +const moduleName = i18n.translate('xpack.infra.ml.metricsModuleName', { + defaultMessage: 'Metrics anomanly detection', +}); +const moduleDescription = i18n.translate('xpack.infra.ml.metricsHostModuleDescription', { + defaultMessage: 'Use Machine Learning to automatically detect anomalous log entry rates.', +}); + +const getJobIds = (spaceId: string, sourceId: string) => + metricsHostsJobTypes.reduce( + (accumulatedJobIds, jobType) => ({ + ...accumulatedJobIds, + [jobType]: getJobId(spaceId, sourceId, jobType), + }), + {} as Record + ); + +const getJobSummary = async (spaceId: string, sourceId: string) => { + const response = await callJobsSummaryAPI(spaceId, sourceId, metricsHostsJobTypes); + const jobIds = Object.values(getJobIds(spaceId, sourceId)); + + return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); +}; + +const getModuleDefinition = async () => { + return await callGetMlModuleAPI(moduleId); +}; + +const setUpModule = async ( + start: number | undefined, + end: number | undefined, + datasetFilter: DatasetFilter, + { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, + pField?: string +) => { + const indexNamePattern = indices.join(','); + const jobIds = ['hosts_memory_usage', 'hosts_network_in', 'hosts_network_out']; + const jobOverrides = jobIds.map((id) => ({ + job_id: id, + data_description: { + time_field: timestampField, + }, + custom_settings: { + metrics_source_config: { + indexPattern: indexNamePattern, + timestampField, + bucketSpan, + }, + }, + })); + + return callSetupMlModuleAPI( + moduleId, + start, + end, + spaceId, + sourceId, + indexNamePattern, + jobOverrides, + [] + ); +}; + +const cleanUpModule = async (spaceId: string, sourceId: string) => { + return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsHostsJobTypes); +}; + +const validateSetupIndices = async (indices: string[], timestampField: string) => { + return await callValidateIndicesAPI(indices, [ + { + name: timestampField, + validTypes: ['date'], + }, + { + name: partitionField, + validTypes: ['keyword'], + }, + ]); +}; + +const validateSetupDatasets = async ( + indices: string[], + timestampField: string, + startTime: number, + endTime: number +) => { + return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime); +}; + +export const metricHostsModule: ModuleDescriptor = { + moduleId, + moduleName, + moduleDescription, + jobTypes: metricsHostsJobTypes, + bucketSpan, + getJobIds, + getJobSummary, + getModuleDefinition, + setUpModule, + cleanUpModule, + validateSetupDatasets, + validateSetupIndices, +}; diff --git a/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module.tsx b/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module.tsx new file mode 100644 index 0000000000000..07c8ab02f17ee --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module.tsx @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import createContainer from 'constate'; +import { useMemo } from 'react'; +import { useInfraMLModule } from '../../infra_ml_module'; +import { useInfraMLModuleConfiguration } from '../../infra_ml_module_configuration'; +import { useInfraMLModuleDefinition } from '../../infra_ml_module_definition'; +import { ModuleSourceConfiguration } from '../../infra_ml_module_types'; +import { metricHostsModule } from './module_descriptor'; + +export const useMetricK8sModule = ({ + indexPattern, + sourceId, + spaceId, + timestampField, +}: { + indexPattern: string; + sourceId: string; + spaceId: string; + timestampField: string; +}) => { + const sourceConfiguration: ModuleSourceConfiguration = useMemo( + () => ({ + indices: indexPattern.split(','), + sourceId, + spaceId, + timestampField, + }), + [indexPattern, sourceId, spaceId, timestampField] + ); + + const infraMLModule = useInfraMLModule({ + moduleDescriptor: metricHostsModule, + sourceConfiguration, + }); + + const { getIsJobConfigurationOutdated } = useInfraMLModuleConfiguration({ + sourceConfiguration, + moduleDescriptor: metricHostsModule, + }); + + const { fetchModuleDefinition, getIsJobDefinitionOutdated } = useInfraMLModuleDefinition({ + sourceConfiguration, + moduleDescriptor: metricHostsModule, + }); + + const hasOutdatedJobConfigurations = useMemo( + () => infraMLModule.jobSummaries.some(getIsJobConfigurationOutdated), + [getIsJobConfigurationOutdated, infraMLModule.jobSummaries] + ); + + const hasOutdatedJobDefinitions = useMemo( + () => infraMLModule.jobSummaries.some(getIsJobDefinitionOutdated), + [getIsJobDefinitionOutdated, infraMLModule.jobSummaries] + ); + + const hasStoppedJobs = useMemo( + () => + Object.values(infraMLModule.jobStatus).some( + (currentJobStatus) => currentJobStatus === 'stopped' + ), + [infraMLModule.jobStatus] + ); + + return { + ...infraMLModule, + fetchModuleDefinition, + hasOutdatedJobConfigurations, + hasOutdatedJobDefinitions, + hasStoppedJobs, + }; +}; + +export const [MetricK8sModuleProvider, useMetricK8sModuleContext] = createContainer( + useMetricK8sModule +); diff --git a/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts b/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts new file mode 100644 index 0000000000000..cbcff1c307af6 --- /dev/null +++ b/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { i18n } from '@kbn/i18n'; +import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types'; +import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup'; +import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api'; +import { callGetMlModuleAPI } from '../../api/ml_get_module'; +import { callSetupMlModuleAPI } from '../../api/ml_setup_module_api'; +import { callValidateIndicesAPI } from '../../../logs/log_analysis/api/validate_indices'; +import { callValidateDatasetsAPI } from '../../../logs/log_analysis/api/validate_datasets'; +import { + metricsK8SJobTypes, + getJobId, + MetricK8sJobType, + DatasetFilter, + bucketSpan, + partitionField, +} from '../../../../../common/infra_ml'; + +const moduleId = 'metrics_ui_k8s'; +const moduleName = i18n.translate('xpack.infra.ml.metricsModuleName', { + defaultMessage: 'Metrics anomanly detection', +}); +const moduleDescription = i18n.translate('xpack.infra.ml.metricsHostModuleDescription', { + defaultMessage: 'Use Machine Learning to automatically detect anomalous log entry rates.', +}); + +const getJobIds = (spaceId: string, sourceId: string) => + metricsK8SJobTypes.reduce( + (accumulatedJobIds, jobType) => ({ + ...accumulatedJobIds, + [jobType]: getJobId(spaceId, sourceId, jobType), + }), + {} as Record + ); + +const getJobSummary = async (spaceId: string, sourceId: string) => { + const response = await callJobsSummaryAPI(spaceId, sourceId, metricsK8SJobTypes); + const jobIds = Object.values(getJobIds(spaceId, sourceId)); + + return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); +}; + +const getModuleDefinition = async () => { + return await callGetMlModuleAPI(moduleId); +}; + +const setUpModule = async ( + start: number | undefined, + end: number | undefined, + datasetFilter: DatasetFilter, + { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, + pField?: string +) => { + const indexNamePattern = indices.join(','); + const jobIds = ['k8s_memory_usage', 'k8s_network_in', 'k8s_network_out']; + const jobOverrides = jobIds.map((id) => ({ + job_id: id, + analysis_config: { + bucket_span: `${bucketSpan}ms`, + }, + data_description: { + time_field: timestampField, + }, + custom_settings: { + metrics_source_config: { + indexPattern: indexNamePattern, + timestampField, + bucketSpan, + }, + }, + })); + + return callSetupMlModuleAPI( + moduleId, + start, + end, + spaceId, + sourceId, + indexNamePattern, + jobOverrides, + [] + ); +}; + +const cleanUpModule = async (spaceId: string, sourceId: string) => { + return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsK8SJobTypes); +}; + +const validateSetupIndices = async (indices: string[], timestampField: string) => { + return await callValidateIndicesAPI(indices, [ + { + name: timestampField, + validTypes: ['date'], + }, + { + name: partitionField, + validTypes: ['keyword'], + }, + ]); +}; + +const validateSetupDatasets = async ( + indices: string[], + timestampField: string, + startTime: number, + endTime: number +) => { + return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime); +}; + +export const metricHostsModule: ModuleDescriptor = { + moduleId, + moduleName, + moduleDescription, + jobTypes: metricsK8SJobTypes, + bucketSpan, + getJobIds, + getJobSummary, + getModuleDefinition, + setUpModule, + cleanUpModule, + validateSetupDatasets, + validateSetupIndices, +}; diff --git a/x-pack/plugins/infra/public/pages/metrics/index.tsx b/x-pack/plugins/infra/public/pages/metrics/index.tsx index 3b3ed80f9e731..ac2c87248ae77 100644 --- a/x-pack/plugins/infra/public/pages/metrics/index.tsx +++ b/x-pack/plugins/infra/public/pages/metrics/index.tsx @@ -38,6 +38,8 @@ import { MetricsAlertDropdown } from '../../alerting/metric_threshold/components import { SavedView } from '../../containers/saved_view/saved_view'; import { SourceConfigurationFields } from '../../graphql/types'; import { AlertPrefillProvider } from '../../alerting/use_alert_prefill'; +import { InfraMLCapabilitiesProvider } from '../../containers/ml/infra_ml_capabilities'; +import { AnomalyDetectionFlyout } from './inventory_view/components/ml/anomaly_detection/anomoly_detection_flyout'; const ADD_DATA_LABEL = i18n.translate('xpack.infra.metricsHeaderAddDataButtonLabel', { defaultMessage: 'Add data', @@ -55,110 +57,118 @@ export const InfrastructurePage = ({ match }: RouteComponentProps) => { - - + + + - + -
+ - - - - - - - - - - - - {ADD_DATA_LABEL} - - - - - - - - ( - - {({ configuration, createDerivedIndexPattern }) => ( - - - {configuration ? ( - - ) : ( - - )} - - )} - + } )} - /> - - - + > + + + + + + + + + + + + + + {ADD_DATA_LABEL} + + + + + + + + ( + + {({ configuration, createDerivedIndexPattern }) => ( + + + {configuration ? ( + + ) : ( + + )} + + )} + + )} + /> + + + + diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/anomoly_detection_flyout.tsx b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/anomoly_detection_flyout.tsx new file mode 100644 index 0000000000000..b063713fa2c97 --- /dev/null +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/anomoly_detection_flyout.tsx @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useState, useCallback } from 'react'; +import { EuiButtonEmpty, EuiFlyout } from '@elastic/eui'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { FlyoutHome } from './flyout_home'; +import { JobSetupScreen } from './job_setup_screen'; +import { useInfraMLCapabilities } from '../../../../../../containers/ml/infra_ml_capabilities'; +import { MetricHostsModuleProvider } from '../../../../../../containers/ml/modules/metrics_hosts/module'; +import { MetricK8sModuleProvider } from '../../../../../../containers/ml/modules/metrics_k8s/module'; +import { useSourceViaHttp } from '../../../../../../containers/source/use_source_via_http'; +import { useActiveKibanaSpace } from '../../../../../../hooks/use_kibana_space'; + +export const AnomalyDetectionFlyout = () => { + const { hasInfraMLSetupCapabilities } = useInfraMLCapabilities(); + const [showFlyout, setShowFlyout] = useState(false); + const [screenName, setScreenName] = useState<'home' | 'setup'>('home'); + const [screenParams, setScreenParams] = useState(null); + const { source } = useSourceViaHttp({ + sourceId: 'default', + type: 'metrics', + }); + + const { space } = useActiveKibanaSpace(); + + const openFlyout = useCallback(() => { + setScreenName('home'); + setShowFlyout(true); + }, []); + + const openJobSetup = useCallback( + (jobType: 'hosts' | 'kubernetes') => { + setScreenName('setup'); + setScreenParams({ jobType }); + }, + [setScreenName] + ); + + const closeFlyout = useCallback(() => { + setShowFlyout(false); + }, []); + + if (source?.configuration.metricAlias == null || space == null) { + return null; + } + + return ( + <> + + + + {showFlyout && ( + + + + {screenName === 'home' && ( + + )} + {screenName === 'setup' && ( + + )} + + + + )} + + ); +}; diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/flyout_home.tsx b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/flyout_home.tsx new file mode 100644 index 0000000000000..9cf898b684336 --- /dev/null +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/flyout_home.tsx @@ -0,0 +1,333 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useState, useCallback, useEffect } from 'react'; +import { EuiFlyoutHeader, EuiTitle, EuiFlyoutBody, EuiTabs, EuiTab, EuiSpacer } from '@elastic/eui'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { EuiText, EuiFlexGroup, EuiFlexItem, EuiCard, EuiIcon } from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; +import { EuiCallOut } from '@elastic/eui'; +import { EuiButton } from '@elastic/eui'; +import { EuiButtonEmpty } from '@elastic/eui'; +import moment from 'moment'; +import { useInfraMLCapabilitiesContext } from '../../../../../../containers/ml/infra_ml_capabilities'; +import { SubscriptionSplashContent } from './subscription_splash_content'; +import { + MissingResultsPrivilegesPrompt, + MissingSetupPrivilegesPrompt, +} from '../../../../../../components/logging/log_analysis_setup'; +import { useMetricHostsModuleContext } from '../../../../../../containers/ml/modules/metrics_hosts/module'; +import { useMetricK8sModuleContext } from '../../../../../../containers/ml/modules/metrics_k8s/module'; +import { LoadingPage } from '../../../../../../components/loading_page'; +import { useLinkProps } from '../../../../../../hooks/use_link_props'; + +interface Props { + hasSetupCapabilities: boolean; + goToSetup(type: 'hosts' | 'kubernetes'): void; +} + +export const FlyoutHome = (props: Props) => { + const [tab, setTab] = useState<'jobs' | 'anomalies'>('jobs'); + const { goToSetup } = props; + const { + fetchJobStatus: fetchHostJobStatus, + setupStatus: hostSetupStatus, + jobSummaries: hostJobSummaries, + } = useMetricHostsModuleContext(); + const { + fetchJobStatus: fetchK8sJobStatus, + setupStatus: k8sSetupStatus, + jobSummaries: k8sJobSummaries, + } = useMetricK8sModuleContext(); + const { + hasInfraMLCapabilites, + hasInfraMLReadCapabilities, + hasInfraMLSetupCapabilities, + } = useInfraMLCapabilitiesContext(); + + const createHosts = useCallback(() => { + goToSetup('hosts'); + }, [goToSetup]); + + const createK8s = useCallback(() => { + goToSetup('kubernetes'); + }, [goToSetup]); + + const goToJobs = useCallback(() => { + setTab('jobs'); + }, []); + + const jobIds = [ + ...(k8sJobSummaries || []).map((k) => k.id), + ...(hostJobSummaries || []).map((h) => h.id), + ]; + const anomaliesUrl = useLinkProps({ + app: 'ml', + pathname: `/explorer?_g=${createResultsUrl(jobIds)}`, + }); + + useEffect(() => { + if (hasInfraMLReadCapabilities) { + fetchHostJobStatus(); + fetchK8sJobStatus(); + } + }, [fetchK8sJobStatus, fetchHostJobStatus, hasInfraMLReadCapabilities]); + + if (!hasInfraMLCapabilites) { + return ; + } else if (!hasInfraMLReadCapabilities) { + return ; + } else if (hostSetupStatus.type === 'initializing' || k8sSetupStatus.type === 'initializing') { + return ( + + ); + } else if (!hasInfraMLSetupCapabilities) { + return ; + } else { + return ( + <> + + +

+ +

+
+
+ + + + + + + + + + + + {hostJobSummaries.length > 0 && ( + <> + 0} + hasK8sJobs={k8sJobSummaries.length > 0} + /> + + + )} + {tab === 'jobs' && ( + 0} + hasK8sJobs={k8sJobSummaries.length > 0} + hasSetupCapabilities={props.hasSetupCapabilities} + createHosts={createHosts} + createK8s={createK8s} + /> + )} + + + ); + } +}; + +interface CalloutProps { + hasHostJobs: boolean; + hasK8sJobs: boolean; +} +const JobsEnabledCallout = (props: CalloutProps) => { + let target = ''; + if (props.hasHostJobs && props.hasK8sJobs) { + target = `${i18n.translate('xpack.infra.ml.anomalyFlyout.create.hostTitle', { + defaultMessage: 'Hosts', + })} and ${i18n.translate('xpack.infra.ml.anomalyFlyout.create.k8sSuccessTitle', { + defaultMessage: 'Kubernetes', + })}`; + } else if (props.hasHostJobs) { + target = i18n.translate('xpack.infra.ml.anomalyFlyout.create.hostSuccessTitle', { + defaultMessage: 'Hosts', + }); + } else if (props.hasK8sJobs) { + target = i18n.translate('xpack.infra.ml.anomalyFlyout.create.k8sSuccessTitle', { + defaultMessage: 'Kubernetes', + }); + } + + const manageJobsLinkProps = useLinkProps({ + app: 'ml', + pathname: '/jobs', + }); + + return ( + <> + + } + iconType="check" + /> + + + + + + ); +}; + +interface CreateJobTab { + hasSetupCapabilities: boolean; + hasHostJobs: boolean; + hasK8sJobs: boolean; + createHosts(): void; + createK8s(): void; +} + +const CreateJobTab = (props: CreateJobTab) => { + return ( + <> +
+ +

+ +

+
+ +

+ +

+
+
+ + + + + } + // title="Hosts" + title={ + + } + description={ + + } + footer={ + <> + {props.hasHostJobs && ( + + + + )} + {!props.hasHostJobs && ( + + + + )} + + } + /> + + + } + title={ + + } + description={ + + } + footer={ + <> + {props.hasK8sJobs && ( + + + + )} + {!props.hasK8sJobs && ( + + + + )} + + } + /> + + + + ); +}; + +function createResultsUrl(jobIds: string[], mode = 'absolute') { + const idString = jobIds.map((j) => `'${j}'`).join(','); + let path = ''; + + const from = moment().subtract(4, 'weeks').toISOString(); + const to = moment().toISOString(); + + path += `(ml:(jobIds:!(${idString}))`; + path += `,refreshInterval:(display:Off,pause:!f,value:0),time:(from:'${from}'`; + path += `,to:'${to}'`; + if (mode === 'invalid') { + path += `,mode:invalid`; + } + path += "))&_a=(query:(query_string:(analyze_wildcard:!t,query:'*')))"; + + return path; +} diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/job_setup_screen.tsx b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/job_setup_screen.tsx new file mode 100644 index 0000000000000..730cd7b6e9ef5 --- /dev/null +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/job_setup_screen.tsx @@ -0,0 +1,277 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useState, useCallback, useMemo, useEffect } from 'react'; +import { EuiForm, EuiDescribedFormGroup, EuiFormRow } from '@elastic/eui'; +import { EuiText, EuiSpacer } from '@elastic/eui'; +import { EuiFlyoutHeader, EuiTitle, EuiFlyoutBody } from '@elastic/eui'; +import { FormattedMessage } from '@kbn/i18n/react'; +import { EuiFlyoutFooter } from '@elastic/eui'; +import { EuiButton } from '@elastic/eui'; +import { EuiFlexGroup, EuiFlexItem, EuiButtonEmpty } from '@elastic/eui'; +import moment, { Moment } from 'moment'; +import { EuiComboBox } from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; +import { EuiLoadingSpinner } from '@elastic/eui'; +import { useSourceViaHttp } from '../../../../../../containers/source/use_source_via_http'; +import { useMetricK8sModuleContext } from '../../../../../../containers/ml/modules/metrics_k8s/module'; +import { useMetricHostsModuleContext } from '../../../../../../containers/ml/modules/metrics_hosts/module'; +import { FixedDatePicker } from '../../../../../../components/fixed_datepicker'; + +interface Props { + jobType: 'hosts' | 'kubernetes'; + closeFlyout(): void; + goHome(): void; +} + +export const JobSetupScreen = (props: Props) => { + const [now] = useState(() => moment()); + const { goHome } = props; + const [startDate, setStartDate] = useState(now.clone().subtract(4, 'weeks')); + const [partitionField, setPartitionField] = useState(null); + const h = useMetricHostsModuleContext(); + const k = useMetricK8sModuleContext(); + const { createDerivedIndexPattern } = useSourceViaHttp({ + sourceId: 'default', + type: 'metrics', + }); + + const indicies = h.sourceConfiguration.indices; + + const setupStatus = useMemo(() => { + if (props.jobType === 'kubernetes') { + return k.setupStatus; + } else { + return h.setupStatus; + } + }, [props.jobType, k.setupStatus, h.setupStatus]); + + const cleanUpAndSetUpModule = useMemo(() => { + if (props.jobType === 'kubernetes') { + return k.cleanUpAndSetUpModule; + } else { + return h.cleanUpAndSetUpModule; + } + }, [props.jobType, k.cleanUpAndSetUpModule, h.cleanUpAndSetUpModule]); + + const setUpModule = useMemo(() => { + if (props.jobType === 'kubernetes') { + return k.setUpModule; + } else { + return h.setUpModule; + } + }, [props.jobType, k.setUpModule, h.setUpModule]); + + const hasSummaries = useMemo(() => { + if (props.jobType === 'kubernetes') { + return k.jobSummaries.length > 0; + } else { + return h.jobSummaries.length > 0; + } + }, [props.jobType, k.jobSummaries, h.jobSummaries]); + + const derivedIndexPattern = useMemo(() => createDerivedIndexPattern('metrics'), [ + createDerivedIndexPattern, + ]); + + const updateStart = useCallback((date: Moment) => { + setStartDate(date); + }, []); + + const createJobs = useCallback(() => { + if (hasSummaries) { + cleanUpAndSetUpModule( + indicies, + moment(startDate).toDate().getTime(), + undefined, + { type: 'includeAll' }, + partitionField ? partitionField[0] : undefined + ); + } else { + setUpModule( + indicies, + moment(startDate).toDate().getTime(), + undefined, + { type: 'includeAll' }, + partitionField ? partitionField[0] : undefined + ); + } + }, [cleanUpAndSetUpModule, setUpModule, hasSummaries, indicies, partitionField, startDate]); + + const onPartitionFieldChange = useCallback((value: Array<{ label: string }>) => { + setPartitionField(value.map((v) => v.label)); + }, []); + + useEffect(() => { + if (props.jobType === 'kubernetes') { + setPartitionField(['kubernetes.namespace']); + } + }, [props.jobType]); + + useEffect(() => { + if (setupStatus.type === 'succeeded') { + goHome(); + } + }, [setupStatus, goHome]); + + return ( + <> + + +

+ +

+
+
+ + {setupStatus.type === 'pending' ? ( + + + + + + + + + ) : setupStatus.type === 'failed' ? ( + <> + + + + + + + ) : ( + <> + +

+ +

+
+ + + + + + + } + description={ + + } + > + + } + > + + + + + + + + } + description={ + + } + > + + } + compressed + > + ({ label: p })) : undefined + } + options={derivedIndexPattern.fields + .filter((f) => f.aggregatable && f.type === 'string') + .map((f) => ({ label: f.name }))} + onChange={onPartitionFieldChange} + isClearable={true} + /> + + + + + )} +
+ + + + + + + + + + + + + + + + ); +}; diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/subscription_splash_content.tsx b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/subscription_splash_content.tsx new file mode 100644 index 0000000000000..f07c37f5e7ea2 --- /dev/null +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/ml/anomaly_detection/subscription_splash_content.tsx @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useEffect } from 'react'; +import { i18n } from '@kbn/i18n'; +import { + EuiPage, + EuiPageBody, + EuiPageContent, + EuiFlexGroup, + EuiFlexItem, + EuiSpacer, + EuiTitle, + EuiText, + EuiButton, + EuiButtonEmpty, + EuiImage, +} from '@elastic/eui'; +import { FormattedMessage } from '@kbn/i18n/react'; + +import { LoadingPage } from '../../../../../../components/loading_page'; +import { useTrialStatus } from '../../../../../../hooks/use_trial_status'; +import { useKibana } from '../../../../../../../../../../src/plugins/kibana_react/public'; +import { euiStyled } from '../../../../../../../../observability/public'; +import { HttpStart } from '../../../../../../../../../../src/core/public'; + +export const SubscriptionSplashContent: React.FC = () => { + const { services } = useKibana<{ http: HttpStart }>(); + const { loadState, isTrialAvailable, checkTrialAvailability } = useTrialStatus(); + + useEffect(() => { + checkTrialAvailability(); + }, [checkTrialAvailability]); + + if (loadState === 'pending') { + return ( + + ); + } + + const canStartTrial = isTrialAvailable && loadState === 'resolved'; + + let title; + let description; + let cta; + + if (canStartTrial) { + title = ( + + ); + + description = ( + + ); + + cta = ( + + + + ); + } else { + title = ( + + ); + + description = ( + + ); + + cta = ( + + + + ); + } + + return ( + + + + + + +

{title}

+
+ + +

{description}

+
+ +
{cta}
+
+ + + +
+ + +

+ +

+
+ + + +
+
+
+
+ ); +}; + +const SubscriptionPage = euiStyled(EuiPage)` + height: 100% +`; + +const SubscriptionPageContent = euiStyled(EuiPageContent)` + max-width: 768px !important; +`; + +const SubscriptionPageFooter = euiStyled.div` + background: ${(props) => props.theme.eui.euiColorLightestShade}; + margin: 0 -${(props) => props.theme.eui.paddingSizes.l} -${(props) => + props.theme.eui.paddingSizes.l}; + padding: ${(props) => props.theme.eui.paddingSizes.l}; +`; diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/waffle/interval_label.tsx b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/waffle/interval_label.tsx index 6e031c8396f07..dbbfb0f49c0e9 100644 --- a/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/waffle/interval_label.tsx +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/waffle/interval_label.tsx @@ -22,7 +22,7 @@ export const IntervalLabel = ({ intervalAsString }: Props) => {

diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts new file mode 100644 index 0000000000000..f755057d0b76d --- /dev/null +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts @@ -0,0 +1,318 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useMemo, useState, useCallback, useEffect, useReducer } from 'react'; +import { + INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, + Sort, + Pagination, + PaginationCursor, + getMetricsHostsAnomaliesRequestPayloadRT, + MetricsHostsAnomaly, + getMetricsHostsAnomaliesSuccessReponsePayloadRT, +} from '../../../../../common/http_api/infra_ml'; +import { useTrackedPromise } from '../../../../utils/use_tracked_promise'; +import { npStart } from '../../../../legacy_singletons'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +export type SortOptions = Sort; +export type PaginationOptions = Pick; +export type Page = number; +export type FetchNextPage = () => void; +export type FetchPreviousPage = () => void; +export type ChangeSortOptions = (sortOptions: Sort) => void; +export type ChangePaginationOptions = (paginationOptions: PaginationOptions) => void; +export type MetricsHostsAnomalies = MetricsHostsAnomaly[]; +interface PaginationCursors { + previousPageCursor: PaginationCursor; + nextPageCursor: PaginationCursor; +} + +interface ReducerState { + page: number; + lastReceivedCursors: PaginationCursors | undefined; + paginationCursor: Pagination['cursor'] | undefined; + hasNextPage: boolean; + paginationOptions: PaginationOptions; + sortOptions: Sort; + timeRange: { + start: number; + end: number; + }; + filteredDatasets?: string[]; +} + +type ReducerStateDefaults = Pick< + ReducerState, + 'page' | 'lastReceivedCursors' | 'paginationCursor' | 'hasNextPage' +>; + +type ReducerAction = + | { type: 'changePaginationOptions'; payload: { paginationOptions: PaginationOptions } } + | { type: 'changeSortOptions'; payload: { sortOptions: Sort } } + | { type: 'fetchNextPage' } + | { type: 'fetchPreviousPage' } + | { type: 'changeHasNextPage'; payload: { hasNextPage: boolean } } + | { type: 'changeLastReceivedCursors'; payload: { lastReceivedCursors: PaginationCursors } } + | { type: 'changeTimeRange'; payload: { timeRange: { start: number; end: number } } } + | { type: 'changeFilteredDatasets'; payload: { filteredDatasets?: string[] } }; + +const stateReducer = (state: ReducerState, action: ReducerAction): ReducerState => { + const resetPagination = { + page: 1, + paginationCursor: undefined, + }; + switch (action.type) { + case 'changePaginationOptions': + return { + ...state, + ...resetPagination, + ...action.payload, + }; + case 'changeSortOptions': + return { + ...state, + ...resetPagination, + ...action.payload, + }; + case 'changeHasNextPage': + return { + ...state, + ...action.payload, + }; + case 'changeLastReceivedCursors': + return { + ...state, + ...action.payload, + }; + case 'fetchNextPage': + return state.lastReceivedCursors + ? { + ...state, + page: state.page + 1, + paginationCursor: { searchAfter: state.lastReceivedCursors.nextPageCursor }, + } + : state; + case 'fetchPreviousPage': + return state.lastReceivedCursors + ? { + ...state, + page: state.page - 1, + paginationCursor: { searchBefore: state.lastReceivedCursors.previousPageCursor }, + } + : state; + case 'changeTimeRange': + return { + ...state, + ...resetPagination, + ...action.payload, + }; + case 'changeFilteredDatasets': + return { + ...state, + ...resetPagination, + ...action.payload, + }; + default: + return state; + } +}; + +const STATE_DEFAULTS: ReducerStateDefaults = { + // NOTE: This piece of state is purely for the client side, it could be extracted out of the hook. + page: 1, + // Cursor from the last request + lastReceivedCursors: undefined, + // Cursor to use for the next request. For the first request, and therefore not paging, this will be undefined. + paginationCursor: undefined, + hasNextPage: false, +}; + +export const useMetricsHostsAnomaliesResults = ({ + endTime, + startTime, + sourceId, + defaultSortOptions, + defaultPaginationOptions, + onGetMetricsHostsAnomaliesDatasetsError, + filteredDatasets, +}: { + endTime: number; + startTime: number; + sourceId: string; + defaultSortOptions: Sort; + defaultPaginationOptions: Pick; + onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void; + filteredDatasets?: string[]; +}) => { + const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => { + return { + ...stateDefaults, + paginationOptions: defaultPaginationOptions, + sortOptions: defaultSortOptions, + filteredDatasets, + timeRange: { + start: startTime, + end: endTime, + }, + }; + }; + + const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer); + + const [metricsHostsAnomalies, setMetricsHostsAnomalies] = useState([]); + + const [getMetricsHostsAnomaliesRequest, getMetricsHostsAnomalies] = useTrackedPromise( + { + cancelPreviousOn: 'creation', + createPromise: async () => { + const { + timeRange: { start: queryStartTime, end: queryEndTime }, + sortOptions, + paginationOptions, + paginationCursor, + } = reducerState; + return await callGetMetricHostsAnomaliesAPI( + sourceId, + queryStartTime, + queryEndTime, + sortOptions, + { + ...paginationOptions, + cursor: paginationCursor, + } + ); + }, + onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { + const { paginationCursor } = reducerState; + if (requestCursors) { + dispatch({ + type: 'changeLastReceivedCursors', + payload: { lastReceivedCursors: requestCursors }, + }); + } + // Check if we have more "next" entries. "Page" covers the "previous" scenario, + // since we need to know the page we're on anyway. + if (!paginationCursor || (paginationCursor && 'searchAfter' in paginationCursor)) { + dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: hasMoreEntries } }); + } else if (paginationCursor && 'searchBefore' in paginationCursor) { + // We've requested a previous page, therefore there is a next page. + dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: true } }); + } + setMetricsHostsAnomalies(anomalies); + }, + }, + [ + sourceId, + dispatch, + reducerState.timeRange, + reducerState.sortOptions, + reducerState.paginationOptions, + reducerState.paginationCursor, + reducerState.filteredDatasets, + ] + ); + + const changeSortOptions = useCallback( + (nextSortOptions: Sort) => { + dispatch({ type: 'changeSortOptions', payload: { sortOptions: nextSortOptions } }); + }, + [dispatch] + ); + + const changePaginationOptions = useCallback( + (nextPaginationOptions: PaginationOptions) => { + dispatch({ + type: 'changePaginationOptions', + payload: { paginationOptions: nextPaginationOptions }, + }); + }, + [dispatch] + ); + + // Time range has changed + useEffect(() => { + dispatch({ + type: 'changeTimeRange', + payload: { timeRange: { start: startTime, end: endTime } }, + }); + }, [startTime, endTime]); + + // Selected datasets have changed + useEffect(() => { + dispatch({ + type: 'changeFilteredDatasets', + payload: { filteredDatasets }, + }); + }, [filteredDatasets]); + + useEffect(() => { + getMetricsHostsAnomalies(); + }, [getMetricsHostsAnomalies]); // TODO: FIgure out the deps here. + + const handleFetchNextPage = useCallback(() => { + if (reducerState.lastReceivedCursors) { + dispatch({ type: 'fetchNextPage' }); + } + }, [dispatch, reducerState]); + + const handleFetchPreviousPage = useCallback(() => { + if (reducerState.lastReceivedCursors) { + dispatch({ type: 'fetchPreviousPage' }); + } + }, [dispatch, reducerState]); + + const isLoadingMetricsHostsAnomalies = useMemo( + () => getMetricsHostsAnomaliesRequest.state === 'pending', + [getMetricsHostsAnomaliesRequest.state] + ); + + const hasFailedLoadingMetricsHostsAnomalies = useMemo( + () => getMetricsHostsAnomaliesRequest.state === 'rejected', + [getMetricsHostsAnomaliesRequest.state] + ); + + return { + metricsHostsAnomalies, + getMetricsHostsAnomalies, + isLoadingMetricsHostsAnomalies, + hasFailedLoadingMetricsHostsAnomalies, + changeSortOptions, + sortOptions: reducerState.sortOptions, + changePaginationOptions, + paginationOptions: reducerState.paginationOptions, + fetchPreviousPage: reducerState.page > 1 ? handleFetchPreviousPage : undefined, + fetchNextPage: reducerState.hasNextPage ? handleFetchNextPage : undefined, + page: reducerState.page, + }; +}; + +export const callGetMetricHostsAnomaliesAPI = async ( + sourceId: string, + startTime: number, + endTime: number, + sort: Sort, + pagination: Pagination +) => { + const response = await npStart.http.fetch(INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, { + method: 'POST', + body: JSON.stringify( + getMetricsHostsAnomaliesRequestPayloadRT.encode({ + data: { + sourceId, + timeRange: { + startTime, + endTime, + }, + sort, + pagination, + }, + }) + ), + }); + + return decodeOrThrow(getMetricsHostsAnomaliesSuccessReponsePayloadRT)(response); +}; diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts new file mode 100644 index 0000000000000..4a7b78e1fdf92 --- /dev/null +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts @@ -0,0 +1,322 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useMemo, useState, useCallback, useEffect, useReducer } from 'react'; +import { + Sort, + Pagination, + PaginationCursor, + INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, + getMetricsK8sAnomaliesSuccessReponsePayloadRT, + getMetricsK8sAnomaliesRequestPayloadRT, + MetricsK8sAnomaly, +} from '../../../../../common/http_api/infra_ml'; +import { useTrackedPromise } from '../../../../utils/use_tracked_promise'; +import { npStart } from '../../../../legacy_singletons'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +export type SortOptions = Sort; +export type PaginationOptions = Pick; +export type Page = number; +export type FetchNextPage = () => void; +export type FetchPreviousPage = () => void; +export type ChangeSortOptions = (sortOptions: Sort) => void; +export type ChangePaginationOptions = (paginationOptions: PaginationOptions) => void; +export type MetricsK8sAnomalies = MetricsK8sAnomaly[]; +interface PaginationCursors { + previousPageCursor: PaginationCursor; + nextPageCursor: PaginationCursor; +} + +interface ReducerState { + page: number; + lastReceivedCursors: PaginationCursors | undefined; + paginationCursor: Pagination['cursor'] | undefined; + hasNextPage: boolean; + paginationOptions: PaginationOptions; + sortOptions: Sort; + timeRange: { + start: number; + end: number; + }; + filteredDatasets?: string[]; +} + +type ReducerStateDefaults = Pick< + ReducerState, + 'page' | 'lastReceivedCursors' | 'paginationCursor' | 'hasNextPage' +>; + +type ReducerAction = + | { type: 'changePaginationOptions'; payload: { paginationOptions: PaginationOptions } } + | { type: 'changeSortOptions'; payload: { sortOptions: Sort } } + | { type: 'fetchNextPage' } + | { type: 'fetchPreviousPage' } + | { type: 'changeHasNextPage'; payload: { hasNextPage: boolean } } + | { type: 'changeLastReceivedCursors'; payload: { lastReceivedCursors: PaginationCursors } } + | { type: 'changeTimeRange'; payload: { timeRange: { start: number; end: number } } } + | { type: 'changeFilteredDatasets'; payload: { filteredDatasets?: string[] } }; + +const stateReducer = (state: ReducerState, action: ReducerAction): ReducerState => { + const resetPagination = { + page: 1, + paginationCursor: undefined, + }; + switch (action.type) { + case 'changePaginationOptions': + return { + ...state, + ...resetPagination, + ...action.payload, + }; + case 'changeSortOptions': + return { + ...state, + ...resetPagination, + ...action.payload, + }; + case 'changeHasNextPage': + return { + ...state, + ...action.payload, + }; + case 'changeLastReceivedCursors': + return { + ...state, + ...action.payload, + }; + case 'fetchNextPage': + return state.lastReceivedCursors + ? { + ...state, + page: state.page + 1, + paginationCursor: { searchAfter: state.lastReceivedCursors.nextPageCursor }, + } + : state; + case 'fetchPreviousPage': + return state.lastReceivedCursors + ? { + ...state, + page: state.page - 1, + paginationCursor: { searchBefore: state.lastReceivedCursors.previousPageCursor }, + } + : state; + case 'changeTimeRange': + return { + ...state, + ...resetPagination, + ...action.payload, + }; + case 'changeFilteredDatasets': + return { + ...state, + ...resetPagination, + ...action.payload, + }; + default: + return state; + } +}; + +const STATE_DEFAULTS: ReducerStateDefaults = { + // NOTE: This piece of state is purely for the client side, it could be extracted out of the hook. + page: 1, + // Cursor from the last request + lastReceivedCursors: undefined, + // Cursor to use for the next request. For the first request, and therefore not paging, this will be undefined. + paginationCursor: undefined, + hasNextPage: false, +}; + +export const useMetricsK8sAnomaliesResults = ({ + endTime, + startTime, + sourceId, + defaultSortOptions, + defaultPaginationOptions, + onGetMetricsHostsAnomaliesDatasetsError, + filteredDatasets, +}: { + endTime: number; + startTime: number; + sourceId: string; + defaultSortOptions: Sort; + defaultPaginationOptions: Pick; + onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void; + filteredDatasets?: string[]; +}) => { + const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => { + return { + ...stateDefaults, + paginationOptions: defaultPaginationOptions, + sortOptions: defaultSortOptions, + filteredDatasets, + timeRange: { + start: startTime, + end: endTime, + }, + }; + }; + + const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer); + + const [metricsK8sAnomalies, setMetricsK8sAnomalies] = useState([]); + + const [getMetricsK8sAnomaliesRequest, getMetricsK8sAnomalies] = useTrackedPromise( + { + cancelPreviousOn: 'creation', + createPromise: async () => { + const { + timeRange: { start: queryStartTime, end: queryEndTime }, + sortOptions, + paginationOptions, + paginationCursor, + filteredDatasets: queryFilteredDatasets, + } = reducerState; + return await callGetMetricsK8sAnomaliesAPI( + sourceId, + queryStartTime, + queryEndTime, + sortOptions, + { + ...paginationOptions, + cursor: paginationCursor, + }, + queryFilteredDatasets + ); + }, + onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { + const { paginationCursor } = reducerState; + if (requestCursors) { + dispatch({ + type: 'changeLastReceivedCursors', + payload: { lastReceivedCursors: requestCursors }, + }); + } + // Check if we have more "next" entries. "Page" covers the "previous" scenario, + // since we need to know the page we're on anyway. + if (!paginationCursor || (paginationCursor && 'searchAfter' in paginationCursor)) { + dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: hasMoreEntries } }); + } else if (paginationCursor && 'searchBefore' in paginationCursor) { + // We've requested a previous page, therefore there is a next page. + dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: true } }); + } + setMetricsK8sAnomalies(anomalies); + }, + }, + [ + sourceId, + dispatch, + reducerState.timeRange, + reducerState.sortOptions, + reducerState.paginationOptions, + reducerState.paginationCursor, + reducerState.filteredDatasets, + ] + ); + + const changeSortOptions = useCallback( + (nextSortOptions: Sort) => { + dispatch({ type: 'changeSortOptions', payload: { sortOptions: nextSortOptions } }); + }, + [dispatch] + ); + + const changePaginationOptions = useCallback( + (nextPaginationOptions: PaginationOptions) => { + dispatch({ + type: 'changePaginationOptions', + payload: { paginationOptions: nextPaginationOptions }, + }); + }, + [dispatch] + ); + + // Time range has changed + useEffect(() => { + dispatch({ + type: 'changeTimeRange', + payload: { timeRange: { start: startTime, end: endTime } }, + }); + }, [startTime, endTime]); + + // Selected datasets have changed + useEffect(() => { + dispatch({ + type: 'changeFilteredDatasets', + payload: { filteredDatasets }, + }); + }, [filteredDatasets]); + + useEffect(() => { + getMetricsK8sAnomalies(); + }, [getMetricsK8sAnomalies]); + + const handleFetchNextPage = useCallback(() => { + if (reducerState.lastReceivedCursors) { + dispatch({ type: 'fetchNextPage' }); + } + }, [dispatch, reducerState]); + + const handleFetchPreviousPage = useCallback(() => { + if (reducerState.lastReceivedCursors) { + dispatch({ type: 'fetchPreviousPage' }); + } + }, [dispatch, reducerState]); + + const isLoadingMetricsK8sAnomalies = useMemo( + () => getMetricsK8sAnomaliesRequest.state === 'pending', + [getMetricsK8sAnomaliesRequest.state] + ); + + const hasFailedLoadingMetricsK8sAnomalies = useMemo( + () => getMetricsK8sAnomaliesRequest.state === 'rejected', + [getMetricsK8sAnomaliesRequest.state] + ); + + return { + metricsK8sAnomalies, + getMetricsK8sAnomalies, + isLoadingMetricsK8sAnomalies, + hasFailedLoadingMetricsK8sAnomalies, + changeSortOptions, + sortOptions: reducerState.sortOptions, + changePaginationOptions, + paginationOptions: reducerState.paginationOptions, + fetchPreviousPage: reducerState.page > 1 ? handleFetchPreviousPage : undefined, + fetchNextPage: reducerState.hasNextPage ? handleFetchNextPage : undefined, + page: reducerState.page, + }; +}; + +export const callGetMetricsK8sAnomaliesAPI = async ( + sourceId: string, + startTime: number, + endTime: number, + sort: Sort, + pagination: Pagination, + datasets?: string[] +) => { + const response = await npStart.http.fetch(INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, { + method: 'POST', + body: JSON.stringify( + getMetricsK8sAnomaliesRequestPayloadRT.encode({ + data: { + sourceId, + timeRange: { + startTime, + endTime, + }, + sort, + pagination, + datasets, + }, + }) + ), + }); + + return decodeOrThrow(getMetricsK8sAnomaliesSuccessReponsePayloadRT)(response); +}; diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_timeline.ts b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_timeline.ts index 650eda0362d9e..acf9011ac7ddd 100644 --- a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_timeline.ts +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_timeline.ts @@ -28,21 +28,28 @@ const ONE_MINUTE = 60; const ONE_HOUR = ONE_MINUTE * 60; const ONE_DAY = ONE_HOUR * 24; const ONE_WEEK = ONE_DAY * 7; +const ONE_MONTH = ONE_DAY * 30; + +const getDisplayInterval = (interval: string | undefined) => { + if (interval) { + const intervalInSeconds = getIntervalInSeconds(interval); + if (intervalInSeconds < 300) return '5m'; + } + return interval; +}; const getTimeLengthFromInterval = (interval: string | undefined) => { if (interval) { const intervalInSeconds = getIntervalInSeconds(interval); - const multiplier = - intervalInSeconds < ONE_MINUTE - ? ONE_HOUR / intervalInSeconds - : intervalInSeconds < ONE_HOUR - ? 60 - : intervalInSeconds < ONE_DAY - ? 7 - : intervalInSeconds < ONE_WEEK - ? 30 - : 1; - const timeLength = intervalInSeconds * multiplier; + // Get up to 288 datapoints based on interval + const timeLength = + intervalInSeconds <= ONE_MINUTE * 15 + ? ONE_DAY + : intervalInSeconds <= ONE_MINUTE * 35 + ? ONE_DAY * 3 + : intervalInSeconds <= ONE_HOUR * 2.5 + ? ONE_WEEK + : ONE_MONTH; return { timeLength, intervalInSeconds }; } else { return { timeLength: 0, intervalInSeconds: 0 }; @@ -67,15 +74,19 @@ export function useTimeline( ); }; - const timeLengthResult = useMemo(() => getTimeLengthFromInterval(interval), [interval]); + const displayInterval = useMemo(() => getDisplayInterval(interval), [interval]); + + const timeLengthResult = useMemo(() => getTimeLengthFromInterval(displayInterval), [ + displayInterval, + ]); const { timeLength, intervalInSeconds } = timeLengthResult; const timerange: InfraTimerangeInput = { - interval: interval ?? '', + interval: displayInterval ?? '', to: currentTime + intervalInSeconds * 1000, from: currentTime - timeLength * 1000, - lookbackSize: 0, ignoreLookback: true, + forceInterval: true, }; const { error, loading, response, makeRequest } = useHTTPRequest( diff --git a/x-pack/plugins/infra/server/infra_server.ts b/x-pack/plugins/infra/server/infra_server.ts index a72e40e25b479..206fffdd2e188 100644 --- a/x-pack/plugins/infra/server/infra_server.ts +++ b/x-pack/plugins/infra/server/infra_server.ts @@ -21,6 +21,8 @@ import { initGetLogEntryAnomaliesRoute, initGetLogEntryAnomaliesDatasetsRoute, } from './routes/log_analysis'; +import { initGetK8sAnomaliesRoute } from './routes/infra_ml'; +import { initGetHostsAnomaliesRoute } from './routes/infra_ml'; import { initMetricExplorerRoute } from './routes/metrics_explorer'; import { initMetadataRoute } from './routes/metadata'; import { initSnapshotRoute } from './routes/snapshot'; @@ -56,6 +58,8 @@ export const initInfraServer = (libs: InfraBackendLibs) => { initGetLogEntryRateRoute(libs); initGetLogEntryAnomaliesRoute(libs); initGetLogEntryAnomaliesDatasetsRoute(libs); + initGetK8sAnomaliesRoute(libs); + initGetHostsAnomaliesRoute(libs); initSnapshotRoute(libs); initNodeDetailsRoute(libs); initSourceRoute(libs); diff --git a/x-pack/plugins/infra/server/lib/infra_ml/common.ts b/x-pack/plugins/infra/server/lib/infra_ml/common.ts new file mode 100644 index 0000000000000..4d2be94c7cd62 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/common.ts @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import type { MlAnomalyDetectors, MlSystem } from '../../types'; +import { NoLogAnalysisMlJobError } from './errors'; + +import { + CompositeDatasetKey, + createLogEntryDatasetsQuery, + LogEntryDatasetBucket, + logEntryDatasetsResponseRT, +} from './queries/log_entry_data_sets'; +import { decodeOrThrow } from '../../../common/runtime_types'; +import { startTracingSpan, TracingSpan } from '../../../common/performance_tracing'; + +export async function fetchMlJob(mlAnomalyDetectors: MlAnomalyDetectors, jobId: string) { + const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES'); + const { + jobs: [mlJob], + } = await mlAnomalyDetectors.jobs(jobId); + + const mlGetJobSpan = finalizeMlGetJobSpan(); + + if (mlJob == null) { + throw new NoLogAnalysisMlJobError(`Failed to find ml job ${jobId}.`); + } + + return { + mlJob, + timing: { + spans: [mlGetJobSpan], + }, + }; +} + +const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000; + +// Finds datasets related to ML job ids +export async function getLogEntryDatasets( + mlSystem: MlSystem, + startTime: number, + endTime: number, + jobIds: string[] +) { + const finalizeLogEntryDatasetsSpan = startTracingSpan('get data sets'); + + let logEntryDatasetBuckets: LogEntryDatasetBucket[] = []; + let afterLatestBatchKey: CompositeDatasetKey | undefined; + let esSearchSpans: TracingSpan[] = []; + + while (true) { + const finalizeEsSearchSpan = startTracingSpan('fetch log entry dataset batch from ES'); + + const logEntryDatasetsResponse = decodeOrThrow(logEntryDatasetsResponseRT)( + await mlSystem.mlAnomalySearch( + createLogEntryDatasetsQuery( + jobIds, + startTime, + endTime, + COMPOSITE_AGGREGATION_BATCH_SIZE, + afterLatestBatchKey + ) + ) + ); + + const { after_key: afterKey, buckets: latestBatchBuckets = [] } = + logEntryDatasetsResponse.aggregations?.dataset_buckets ?? {}; + + logEntryDatasetBuckets = [...logEntryDatasetBuckets, ...latestBatchBuckets]; + afterLatestBatchKey = afterKey; + esSearchSpans = [...esSearchSpans, finalizeEsSearchSpan()]; + + if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) { + break; + } + } + + const logEntryDatasetsSpan = finalizeLogEntryDatasetsSpan(); + + return { + data: logEntryDatasetBuckets.map((logEntryDatasetBucket) => logEntryDatasetBucket.key.dataset), + timing: { + spans: [logEntryDatasetsSpan, ...esSearchSpans], + }, + }; +} diff --git a/x-pack/plugins/infra/server/lib/infra_ml/errors.ts b/x-pack/plugins/infra/server/lib/infra_ml/errors.ts new file mode 100644 index 0000000000000..ad46ebf710266 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/errors.ts @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/* eslint-disable max-classes-per-file */ + +import { + UnknownMLCapabilitiesError, + InsufficientMLCapabilities, + MLPrivilegesUninitialized, +} from '../../../../ml/server'; + +export class NoLogAnalysisMlJobError extends Error { + constructor(message?: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} + +export class InsufficientLogAnalysisMlJobConfigurationError extends Error { + constructor(message?: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} + +export class UnknownCategoryError extends Error { + constructor(categoryId: number) { + super(`Unknown ml category ${categoryId}`); + Object.setPrototypeOf(this, new.target.prototype); + } +} + +export class InsufficientAnomalyMlJobsConfigured extends Error { + constructor(message?: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} + +export const isMlPrivilegesError = (error: any) => { + return ( + error instanceof UnknownMLCapabilitiesError || + error instanceof InsufficientMLCapabilities || + error instanceof MLPrivilegesUninitialized + ); +}; diff --git a/x-pack/plugins/infra/server/lib/infra_ml/index.ts b/x-pack/plugins/infra/server/lib/infra_ml/index.ts new file mode 100644 index 0000000000000..536f0a44d5890 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/index.ts @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './errors'; +export * from './metrics_hosts_anomalies'; +export * from './metrics_k8s_anomalies'; diff --git a/x-pack/plugins/infra/server/lib/infra_ml/metrics_hosts_anomalies.ts b/x-pack/plugins/infra/server/lib/infra_ml/metrics_hosts_anomalies.ts new file mode 100644 index 0000000000000..e0afa458aac88 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/metrics_hosts_anomalies.ts @@ -0,0 +1,289 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { RequestHandlerContext } from 'src/core/server'; +import { InfraRequestHandlerContext } from '../../types'; +import { TracingSpan, startTracingSpan } from '../../../common/performance_tracing'; +import { fetchMlJob, getLogEntryDatasets } from './common'; +import { getJobId, metricsHostsJobTypes } from '../../../common/infra_ml'; +import { Sort, Pagination } from '../../../common/http_api/infra_ml'; +import type { MlSystem, MlAnomalyDetectors } from '../../types'; +import { InsufficientAnomalyMlJobsConfigured, isMlPrivilegesError } from './errors'; +import { decodeOrThrow } from '../../../common/runtime_types'; +import { + metricsHostsAnomaliesResponseRT, + createMetricsHostsAnomaliesQuery, +} from './queries/metrics_hosts_anomalies'; + +interface MappedAnomalyHit { + id: string; + anomalyScore: number; + dataset: string; + typical: number; + actual: number; + jobId: string; + startTime: number; + duration: number; + hostName: string[]; + categoryId?: string; +} + +async function getCompatibleAnomaliesJobIds( + spaceId: string, + sourceId: string, + mlAnomalyDetectors: MlAnomalyDetectors +) { + const metricsHostsJobIds = metricsHostsJobTypes.map((jt) => getJobId(spaceId, sourceId, jt)); + + const jobIds: string[] = []; + let jobSpans: TracingSpan[] = []; + + try { + await Promise.all( + metricsHostsJobIds.map((id) => { + return (async () => { + const { + timing: { spans }, + } = await fetchMlJob(mlAnomalyDetectors, id); + jobIds.push(id); + jobSpans = [...jobSpans, ...spans]; + })(); + }) + ); + } catch (e) { + if (isMlPrivilegesError(e)) { + throw e; + } + // An error is also thrown when no jobs are found + } + + return { + jobIds, + timing: { spans: jobSpans }, + }; +} + +export async function getMetricsHostsAnomalies( + context: RequestHandlerContext & { infra: Required }, + sourceId: string, + startTime: number, + endTime: number, + sort: Sort, + pagination: Pagination +) { + const finalizeMetricsHostsAnomaliesSpan = startTracingSpan('get metrics hosts entry anomalies'); + + const { + jobIds, + timing: { spans: jobSpans }, + } = await getCompatibleAnomaliesJobIds( + context.infra.spaceId, + sourceId, + context.infra.mlAnomalyDetectors + ); + + if (jobIds.length === 0) { + throw new InsufficientAnomalyMlJobsConfigured( + 'Metrics Hosts ML jobs need to be configured to search anomalies' + ); + } + + try { + const { + anomalies, + paginationCursors, + hasMoreEntries, + timing: { spans: fetchLogEntryAnomaliesSpans }, + } = await fetchMetricsHostsAnomalies( + context.infra.mlSystem, + jobIds, + startTime, + endTime, + sort, + pagination + ); + + const data = anomalies.map((anomaly) => { + const { jobId } = anomaly; + + return parseAnomalyResult(anomaly, jobId); + }); + + const metricsHostsAnomaliesSpan = finalizeMetricsHostsAnomaliesSpan(); + + return { + data, + paginationCursors, + hasMoreEntries, + timing: { + spans: [metricsHostsAnomaliesSpan, ...jobSpans, ...fetchLogEntryAnomaliesSpans], + }, + }; + } catch (e) { + throw new Error(e); + } +} + +const parseAnomalyResult = (anomaly: MappedAnomalyHit, jobId: string) => { + const { + id, + anomalyScore, + dataset, + typical, + actual, + duration, + hostName, + startTime: anomalyStartTime, + } = anomaly; + + return { + id, + anomalyScore, + dataset, + typical, + actual, + duration, + hostName, + startTime: anomalyStartTime, + type: 'metrics_hosts' as const, + jobId, + }; +}; + +async function fetchMetricsHostsAnomalies( + mlSystem: MlSystem, + jobIds: string[], + startTime: number, + endTime: number, + sort: Sort, + pagination: Pagination +) { + // We'll request 1 extra entry on top of our pageSize to determine if there are + // more entries to be fetched. This avoids scenarios where the client side can't + // determine if entries.length === pageSize actually means there are more entries / next page + // or not. + const expandedPagination = { ...pagination, pageSize: pagination.pageSize + 1 }; + + const finalizeFetchLogEntryAnomaliesSpan = startTracingSpan('fetch metrics hosts anomalies'); + + // console.log( + // 'data', + // JSON.stringify( + // await mlSystem.mlAnomalySearch( + // createMetricsHostsAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination) + // ), + // null, + // 2 + // ) + // ); + const results = decodeOrThrow(metricsHostsAnomaliesResponseRT)( + await mlSystem.mlAnomalySearch( + createMetricsHostsAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination) + ) + ); + + const { + hits: { hits }, + } = results; + const hasMoreEntries = hits.length > pagination.pageSize; + + // An extra entry was found and hasMoreEntries has been determined, the extra entry can be removed. + if (hasMoreEntries) { + hits.pop(); + } + + // To "search_before" the sort order will have been reversed for ES. + // The results are now reversed back, to match the requested sort. + if (pagination.cursor && 'searchBefore' in pagination.cursor) { + hits.reverse(); + } + + const paginationCursors = + hits.length > 0 + ? { + previousPageCursor: hits[0].sort, + nextPageCursor: hits[hits.length - 1].sort, + } + : undefined; + + const anomalies = hits.map((result) => { + const { + // eslint-disable-next-line @typescript-eslint/naming-convention + job_id, + record_score: anomalyScore, + typical, + actual, + bucket_span: duration, + timestamp: anomalyStartTime, + by_field_value: categoryId, + } = result._source; + + return { + id: result._id, + anomalyScore, + dataset: '', + typical: typical[0], + actual: actual[0], + jobId: job_id, + hostName: result._source['host.name'], + startTime: anomalyStartTime, + duration: duration * 1000, + categoryId, + }; + }); + + const fetchLogEntryAnomaliesSpan = finalizeFetchLogEntryAnomaliesSpan(); + + return { + anomalies, + paginationCursors, + hasMoreEntries, + timing: { + spans: [fetchLogEntryAnomaliesSpan], + }, + }; +} + +// TODO: FIgure out why we need datasets +export async function getMetricsHostsAnomaliesDatasets( + context: { + infra: { + mlSystem: MlSystem; + mlAnomalyDetectors: MlAnomalyDetectors; + spaceId: string; + }; + }, + sourceId: string, + startTime: number, + endTime: number +) { + const { + jobIds, + timing: { spans: jobSpans }, + } = await getCompatibleAnomaliesJobIds( + context.infra.spaceId, + sourceId, + context.infra.mlAnomalyDetectors + ); + + if (jobIds.length === 0) { + throw new InsufficientAnomalyMlJobsConfigured( + 'Log rate or categorisation ML jobs need to be configured to search for anomaly datasets' + ); + } + + const { + data: datasets, + timing: { spans: datasetsSpans }, + } = await getLogEntryDatasets(context.infra.mlSystem, startTime, endTime, jobIds); + + return { + datasets, + timing: { + spans: [...jobSpans, ...datasetsSpans], + }, + }; +} diff --git a/x-pack/plugins/infra/server/lib/infra_ml/metrics_k8s_anomalies.ts b/x-pack/plugins/infra/server/lib/infra_ml/metrics_k8s_anomalies.ts new file mode 100644 index 0000000000000..29507900e1847 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/metrics_k8s_anomalies.ts @@ -0,0 +1,272 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { RequestHandlerContext } from 'src/core/server'; +import { InfraRequestHandlerContext } from '../../types'; +import { TracingSpan, startTracingSpan } from '../../../common/performance_tracing'; +import { fetchMlJob, getLogEntryDatasets } from './common'; +import { getJobId, metricsK8SJobTypes } from '../../../common/infra_ml'; +import { Sort, Pagination } from '../../../common/http_api/infra_ml'; +import type { MlSystem, MlAnomalyDetectors } from '../../types'; +import { InsufficientAnomalyMlJobsConfigured, isMlPrivilegesError } from './errors'; +import { decodeOrThrow } from '../../../common/runtime_types'; +import { + metricsK8sAnomaliesResponseRT, + createMetricsK8sAnomaliesQuery, +} from './queries/metrics_k8s_anomalies'; + +interface MappedAnomalyHit { + id: string; + anomalyScore: number; + // dataset: string; + typical: number; + actual: number; + jobId: string; + startTime: number; + duration: number; + categoryId?: string; +} + +async function getCompatibleAnomaliesJobIds( + spaceId: string, + sourceId: string, + mlAnomalyDetectors: MlAnomalyDetectors +) { + const metricsK8sJobIds = metricsK8SJobTypes.map((jt) => getJobId(spaceId, sourceId, jt)); + + const jobIds: string[] = []; + let jobSpans: TracingSpan[] = []; + + try { + await Promise.all( + metricsK8sJobIds.map((id) => { + return (async () => { + const { + timing: { spans }, + } = await fetchMlJob(mlAnomalyDetectors, id); + jobIds.push(id); + jobSpans = [...jobSpans, ...spans]; + })(); + }) + ); + } catch (e) { + if (isMlPrivilegesError(e)) { + throw e; + } + // An error is also thrown when no jobs are found + } + + return { + jobIds, + timing: { spans: jobSpans }, + }; +} + +export async function getMetricK8sAnomalies( + context: RequestHandlerContext & { infra: Required }, + sourceId: string, + startTime: number, + endTime: number, + sort: Sort, + pagination: Pagination +) { + const finalizeMetricsK8sAnomaliesSpan = startTracingSpan('get metrics k8s entry anomalies'); + + const { + jobIds, + timing: { spans: jobSpans }, + } = await getCompatibleAnomaliesJobIds( + context.infra.spaceId, + sourceId, + context.infra.mlAnomalyDetectors + ); + + if (jobIds.length === 0) { + throw new InsufficientAnomalyMlJobsConfigured( + 'Log rate or categorisation ML jobs need to be configured to search anomalies' + ); + } + + const { + anomalies, + paginationCursors, + hasMoreEntries, + timing: { spans: fetchLogEntryAnomaliesSpans }, + } = await fetchMetricK8sAnomalies( + context.infra.mlSystem, + jobIds, + startTime, + endTime, + sort, + pagination + ); + + const data = anomalies.map((anomaly) => { + const { jobId } = anomaly; + + return parseAnomalyResult(anomaly, jobId); + }); + + const metricsK8sAnomaliesSpan = finalizeMetricsK8sAnomaliesSpan(); + + return { + data, + paginationCursors, + hasMoreEntries, + timing: { + spans: [metricsK8sAnomaliesSpan, ...jobSpans, ...fetchLogEntryAnomaliesSpans], + }, + }; +} + +const parseAnomalyResult = (anomaly: MappedAnomalyHit, jobId: string) => { + const { + id, + anomalyScore, + // dataset, + typical, + actual, + duration, + startTime: anomalyStartTime, + } = anomaly; + + return { + id, + anomalyScore, + // dataset, + typical, + actual, + duration, + startTime: anomalyStartTime, + type: 'metrics_k8s' as const, + jobId, + }; +}; + +async function fetchMetricK8sAnomalies( + mlSystem: MlSystem, + jobIds: string[], + startTime: number, + endTime: number, + sort: Sort, + pagination: Pagination +) { + // We'll request 1 extra entry on top of our pageSize to determine if there are + // more entries to be fetched. This avoids scenarios where the client side can't + // determine if entries.length === pageSize actually means there are more entries / next page + // or not. + const expandedPagination = { ...pagination, pageSize: pagination.pageSize + 1 }; + + const finalizeFetchLogEntryAnomaliesSpan = startTracingSpan('fetch metrics k8s anomalies'); + + const results = decodeOrThrow(metricsK8sAnomaliesResponseRT)( + await mlSystem.mlAnomalySearch( + createMetricsK8sAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination) + ) + ); + + const { + hits: { hits }, + } = results; + const hasMoreEntries = hits.length > pagination.pageSize; + + // An extra entry was found and hasMoreEntries has been determined, the extra entry can be removed. + if (hasMoreEntries) { + hits.pop(); + } + + // To "search_before" the sort order will have been reversed for ES. + // The results are now reversed back, to match the requested sort. + if (pagination.cursor && 'searchBefore' in pagination.cursor) { + hits.reverse(); + } + + const paginationCursors = + hits.length > 0 + ? { + previousPageCursor: hits[0].sort, + nextPageCursor: hits[hits.length - 1].sort, + } + : undefined; + + const anomalies = hits.map((result) => { + const { + // eslint-disable-next-line @typescript-eslint/naming-convention + job_id, + record_score: anomalyScore, + typical, + actual, + // partition_field_value: dataset, + bucket_span: duration, + timestamp: anomalyStartTime, + by_field_value: categoryId, + } = result._source; + + return { + id: result._id, + anomalyScore, + // dataset, + typical: typical[0], + actual: actual[0], + jobId: job_id, + startTime: anomalyStartTime, + duration: duration * 1000, + categoryId, + }; + }); + + const fetchLogEntryAnomaliesSpan = finalizeFetchLogEntryAnomaliesSpan(); + + return { + anomalies, + paginationCursors, + hasMoreEntries, + timing: { + spans: [fetchLogEntryAnomaliesSpan], + }, + }; +} + +// TODO: FIgure out why we need datasets +export async function getMetricK8sAnomaliesDatasets( + context: { + infra: { + mlSystem: MlSystem; + mlAnomalyDetectors: MlAnomalyDetectors; + spaceId: string; + }; + }, + sourceId: string, + startTime: number, + endTime: number +) { + const { + jobIds, + timing: { spans: jobSpans }, + } = await getCompatibleAnomaliesJobIds( + context.infra.spaceId, + sourceId, + context.infra.mlAnomalyDetectors + ); + + if (jobIds.length === 0) { + throw new InsufficientAnomalyMlJobsConfigured( + 'Log rate or categorisation ML jobs need to be configured to search for anomaly datasets' + ); + } + + const { + data: datasets, + timing: { spans: datasetsSpans }, + } = await getLogEntryDatasets(context.infra.mlSystem, startTime, endTime, jobIds); + + return { + datasets, + timing: { + spans: [...jobSpans, ...datasetsSpans], + }, + }; +} diff --git a/x-pack/plugins/infra/server/lib/infra_ml/queries/common.ts b/x-pack/plugins/infra/server/lib/infra_ml/queries/common.ts new file mode 100644 index 0000000000000..63e39ef022392 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/queries/common.ts @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export const defaultRequestParameters = { + allowNoIndices: true, + ignoreUnavailable: true, + trackScores: false, + trackTotalHits: false, +}; + +export const createJobIdFilters = (jobId: string) => [ + { + term: { + job_id: { + value: jobId, + }, + }, + }, +]; + +export const createJobIdsFilters = (jobIds: string[]) => [ + { + terms: { + job_id: jobIds, + }, + }, +]; + +export const createTimeRangeFilters = (startTime: number, endTime: number) => [ + { + range: { + timestamp: { + gte: startTime, + lte: endTime, + }, + }, + }, +]; + +export const createResultTypeFilters = (resultTypes: Array<'model_plot' | 'record'>) => [ + { + terms: { + result_type: resultTypes, + }, + }, +]; + +export const createCategoryIdFilters = (categoryIds: number[]) => [ + { + terms: { + category_id: categoryIds, + }, + }, +]; + +export const createDatasetsFilters = (datasets?: string[]) => + datasets && datasets.length > 0 + ? [ + { + terms: { + partition_field_value: datasets, + }, + }, + ] + : []; diff --git a/x-pack/plugins/infra/server/lib/infra_ml/queries/index.ts b/x-pack/plugins/infra/server/lib/infra_ml/queries/index.ts new file mode 100644 index 0000000000000..5a42011e1cea1 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/queries/index.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +export * from './metrics_k8s_anomalies'; +export * from './metrics_hosts_anomalies'; diff --git a/x-pack/plugins/infra/server/lib/infra_ml/queries/log_entry_data_sets.ts b/x-pack/plugins/infra/server/lib/infra_ml/queries/log_entry_data_sets.ts new file mode 100644 index 0000000000000..53971a91d86b1 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/queries/log_entry_data_sets.ts @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; +import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types'; +import { + createJobIdsFilters, + createResultTypeFilters, + createTimeRangeFilters, + defaultRequestParameters, +} from './common'; + +export const createLogEntryDatasetsQuery = ( + jobIds: string[], + startTime: number, + endTime: number, + size: number, + afterKey?: CompositeDatasetKey +) => ({ + ...defaultRequestParameters, + body: { + query: { + bool: { + filter: [ + ...createJobIdsFilters(jobIds), + ...createTimeRangeFilters(startTime, endTime), + ...createResultTypeFilters(['model_plot']), + ], + }, + }, + aggs: { + dataset_buckets: { + composite: { + after: afterKey, + size, + sources: [ + { + dataset: { + terms: { + field: 'partition_field_value', + order: 'asc', + }, + }, + }, + ], + }, + }, + }, + }, + size: 0, +}); + +const compositeDatasetKeyRT = rt.type({ + dataset: rt.string, +}); + +export type CompositeDatasetKey = rt.TypeOf; + +const logEntryDatasetBucketRT = rt.type({ + key: compositeDatasetKeyRT, +}); + +export type LogEntryDatasetBucket = rt.TypeOf; + +export const logEntryDatasetsResponseRT = rt.intersection([ + commonSearchSuccessResponseFieldsRT, + rt.partial({ + aggregations: rt.type({ + dataset_buckets: rt.intersection([ + rt.type({ + buckets: rt.array(logEntryDatasetBucketRT), + }), + rt.partial({ + after_key: compositeDatasetKeyRT, + }), + ]), + }), + }), +]); + +export type LogEntryDatasetsResponse = rt.TypeOf; diff --git a/x-pack/plugins/infra/server/lib/infra_ml/queries/metrics_hosts_anomalies.ts b/x-pack/plugins/infra/server/lib/infra_ml/queries/metrics_hosts_anomalies.ts new file mode 100644 index 0000000000000..b61119b60bc18 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/queries/metrics_hosts_anomalies.ts @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; +import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types'; +import { + createJobIdsFilters, + createTimeRangeFilters, + createResultTypeFilters, + defaultRequestParameters, +} from './common'; +import { Sort, Pagination } from '../../../../common/http_api/infra_ml'; + +// TODO: Reassess validity of this against ML docs +const TIEBREAKER_FIELD = '_doc'; + +const sortToMlFieldMap = { + dataset: 'partition_field_value', + anomalyScore: 'record_score', + startTime: 'timestamp', +}; + +export const createMetricsHostsAnomaliesQuery = ( + jobIds: string[], + startTime: number, + endTime: number, + sort: Sort, + pagination: Pagination +) => { + const { field } = sort; + const { pageSize } = pagination; + + const filters = [ + ...createJobIdsFilters(jobIds), + ...createTimeRangeFilters(startTime, endTime), + ...createResultTypeFilters(['record']), + ]; + + const sourceFields = [ + 'job_id', + 'record_score', + 'typical', + 'actual', + 'partition_field_value', + 'timestamp', + 'bucket_span', + 'by_field_value', + 'host.name', + 'influencers.influencer_field_name', + 'influencers.influencer_field_values', + ]; + + const { querySortDirection, queryCursor } = parsePaginationCursor(sort, pagination); + + const sortOptions = [ + { [sortToMlFieldMap[field]]: querySortDirection }, + { [TIEBREAKER_FIELD]: querySortDirection }, // Tiebreaker + ]; + + const resultsQuery = { + ...defaultRequestParameters, + body: { + query: { + bool: { + filter: filters, + }, + }, + search_after: queryCursor, + sort: sortOptions, + size: pageSize, + _source: sourceFields, + }, + }; + + return resultsQuery; +}; + +export const metricsHostsAnomalyHitRT = rt.type({ + _id: rt.string, + _source: rt.intersection([ + rt.type({ + job_id: rt.string, + record_score: rt.number, + typical: rt.array(rt.number), + actual: rt.array(rt.number), + 'host.name': rt.array(rt.string), + bucket_span: rt.number, + timestamp: rt.number, + }), + rt.partial({ + by_field_value: rt.string, + }), + ]), + sort: rt.tuple([rt.union([rt.string, rt.number]), rt.union([rt.string, rt.number])]), +}); + +export type MetricsHostsAnomalyHit = rt.TypeOf; + +export const metricsHostsAnomaliesResponseRT = rt.intersection([ + commonSearchSuccessResponseFieldsRT, + rt.type({ + hits: rt.type({ + hits: rt.array(metricsHostsAnomalyHitRT), + }), + }), +]); + +export type MetricsHostsAnomaliesResponseRT = rt.TypeOf; + +const parsePaginationCursor = (sort: Sort, pagination: Pagination) => { + const { cursor } = pagination; + const { direction } = sort; + + if (!cursor) { + return { querySortDirection: direction, queryCursor: undefined }; + } + + // We will always use ES's search_after to paginate, to mimic "search_before" behaviour we + // need to reverse the user's chosen search direction for the ES query. + if ('searchBefore' in cursor) { + return { + querySortDirection: direction === 'desc' ? 'asc' : 'desc', + queryCursor: cursor.searchBefore, + }; + } else { + return { querySortDirection: direction, queryCursor: cursor.searchAfter }; + } +}; diff --git a/x-pack/plugins/infra/server/lib/infra_ml/queries/metrics_k8s_anomalies.ts b/x-pack/plugins/infra/server/lib/infra_ml/queries/metrics_k8s_anomalies.ts new file mode 100644 index 0000000000000..84ed8b064c5ca --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/queries/metrics_k8s_anomalies.ts @@ -0,0 +1,128 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; +import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types'; +import { + createJobIdsFilters, + createTimeRangeFilters, + createResultTypeFilters, + defaultRequestParameters, +} from './common'; +import { Sort, Pagination } from '../../../../common/http_api/infra_ml'; + +// TODO: Reassess validity of this against ML docs +const TIEBREAKER_FIELD = '_doc'; + +const sortToMlFieldMap = { + dataset: 'partition_field_value', + anomalyScore: 'record_score', + startTime: 'timestamp', +}; + +export const createMetricsK8sAnomaliesQuery = ( + jobIds: string[], + startTime: number, + endTime: number, + sort: Sort, + pagination: Pagination +) => { + const { field } = sort; + const { pageSize } = pagination; + + const filters = [ + ...createJobIdsFilters(jobIds), + ...createTimeRangeFilters(startTime, endTime), + ...createResultTypeFilters(['record']), + ]; + + const sourceFields = [ + 'job_id', + 'record_score', + 'typical', + 'actual', + 'partition_field_value', + 'timestamp', + 'bucket_span', + 'by_field_value', + ]; + + const { querySortDirection, queryCursor } = parsePaginationCursor(sort, pagination); + + const sortOptions = [ + { [sortToMlFieldMap[field]]: querySortDirection }, + { [TIEBREAKER_FIELD]: querySortDirection }, // Tiebreaker + ]; + + const resultsQuery = { + ...defaultRequestParameters, + body: { + query: { + bool: { + filter: filters, + }, + }, + search_after: queryCursor, + sort: sortOptions, + size: pageSize, + _source: sourceFields, + }, + }; + + return resultsQuery; +}; + +export const metricsK8sAnomalyHitRT = rt.type({ + _id: rt.string, + _source: rt.intersection([ + rt.type({ + job_id: rt.string, + record_score: rt.number, + typical: rt.array(rt.number), + actual: rt.array(rt.number), + // partition_field_value: rt.string, + bucket_span: rt.number, + timestamp: rt.number, + }), + rt.partial({ + by_field_value: rt.string, + }), + ]), + sort: rt.tuple([rt.union([rt.string, rt.number]), rt.union([rt.string, rt.number])]), +}); + +export type MetricsK8sAnomalyHit = rt.TypeOf; + +export const metricsK8sAnomaliesResponseRT = rt.intersection([ + commonSearchSuccessResponseFieldsRT, + rt.type({ + hits: rt.type({ + hits: rt.array(metricsK8sAnomalyHitRT), + }), + }), +]); + +export type MetricsK8sAnomaliesResponseRT = rt.TypeOf; + +const parsePaginationCursor = (sort: Sort, pagination: Pagination) => { + const { cursor } = pagination; + const { direction } = sort; + + if (!cursor) { + return { querySortDirection: direction, queryCursor: undefined }; + } + + // We will always use ES's search_after to paginate, to mimic "search_before" behaviour we + // need to reverse the user's chosen search direction for the ES query. + if ('searchBefore' in cursor) { + return { + querySortDirection: direction === 'desc' ? 'asc' : 'desc', + queryCursor: cursor.searchBefore, + }; + } else { + return { querySortDirection: direction, queryCursor: cursor.searchAfter }; + } +}; diff --git a/x-pack/plugins/infra/server/lib/infra_ml/queries/ml_jobs.ts b/x-pack/plugins/infra/server/lib/infra_ml/queries/ml_jobs.ts new file mode 100644 index 0000000000000..ee4ccbfaeb5a7 --- /dev/null +++ b/x-pack/plugins/infra/server/lib/infra_ml/queries/ml_jobs.ts @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +export const createMlJobsQuery = (jobIds: string[]) => ({ + method: 'GET', + path: `/_ml/anomaly_detectors/${jobIds.join(',')}`, + query: { + allow_no_jobs: true, + }, +}); + +export const mlJobRT = rt.type({ + job_id: rt.string, + custom_settings: rt.unknown, +}); + +export const mlJobsResponseRT = rt.type({ + jobs: rt.array(mlJobRT), +}); diff --git a/x-pack/plugins/infra/server/routes/infra_ml/index.ts b/x-pack/plugins/infra/server/routes/infra_ml/index.ts new file mode 100644 index 0000000000000..38684cb22e237 --- /dev/null +++ b/x-pack/plugins/infra/server/routes/infra_ml/index.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './results'; diff --git a/x-pack/plugins/infra/server/routes/infra_ml/results/index.ts b/x-pack/plugins/infra/server/routes/infra_ml/results/index.ts new file mode 100644 index 0000000000000..82e30291faa20 --- /dev/null +++ b/x-pack/plugins/infra/server/routes/infra_ml/results/index.ts @@ -0,0 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './metrics_hosts_anomalies'; +export * from './metrics_k8s_anomalies'; diff --git a/x-pack/plugins/infra/server/routes/infra_ml/results/metrics_hosts_anomalies.ts b/x-pack/plugins/infra/server/routes/infra_ml/results/metrics_hosts_anomalies.ts new file mode 100644 index 0000000000000..29122ae159cdc --- /dev/null +++ b/x-pack/plugins/infra/server/routes/infra_ml/results/metrics_hosts_anomalies.ts @@ -0,0 +1,125 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import Boom from 'boom'; +import { InfraBackendLibs } from '../../../lib/infra_types'; +import { + INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, + getMetricsHostsAnomaliesSuccessReponsePayloadRT, + getMetricsHostsAnomaliesRequestPayloadRT, + GetMetricsHostsAnomaliesRequestPayload, + Sort, + Pagination, +} from '../../../../common/http_api/infra_ml'; +import { createValidationFunction } from '../../../../common/runtime_types'; +import { assertHasInfraMlPlugins } from '../../../utils/request_context'; + +import { isMlPrivilegesError } from '../../../lib/infra_ml/errors'; +import { getMetricsHostsAnomalies } from '../../../lib/infra_ml'; + +export const initGetHostsAnomaliesRoute = ({ framework }: InfraBackendLibs) => { + framework.registerRoute( + { + method: 'post', + path: INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, + validate: { + body: createValidationFunction(getMetricsHostsAnomaliesRequestPayloadRT), + }, + }, + framework.router.handleLegacyErrors(async (requestContext, request, response) => { + const { + data: { + sourceId, + timeRange: { startTime, endTime }, + sort: sortParam, + pagination: paginationParam, + }, + } = request.body; + + const { sort, pagination } = getSortAndPagination(sortParam, paginationParam); + + try { + assertHasInfraMlPlugins(requestContext); + + const { + data: anomalies, + paginationCursors, + hasMoreEntries, + timing, + } = await getMetricsHostsAnomalies( + requestContext, + sourceId, + startTime, + endTime, + sort, + pagination + ); + + // console.log('---- anomalies', anomalies); + + return response.ok({ + body: getMetricsHostsAnomaliesSuccessReponsePayloadRT.encode({ + data: { + anomalies, + hasMoreEntries, + paginationCursors, + }, + timing, + }), + }); + } catch (error) { + if (Boom.isBoom(error)) { + throw error; + } + + if (isMlPrivilegesError(error)) { + return response.customError({ + statusCode: 403, + body: { + message: error.message, + }, + }); + } + + return response.customError({ + statusCode: error.statusCode ?? 500, + body: { + message: error.message ?? 'An unexpected error occurred', + }, + }); + } + }) + ); +}; + +const getSortAndPagination = ( + sort: Partial = {}, + pagination: Partial = {} +): { + sort: Sort; + pagination: Pagination; +} => { + const sortDefaults = { + field: 'anomalyScore' as const, + direction: 'desc' as const, + }; + + const sortWithDefaults = { + ...sortDefaults, + ...sort, + }; + + const paginationDefaults = { + pageSize: 50, + }; + + const paginationWithDefaults = { + ...paginationDefaults, + ...pagination, + }; + + return { sort: sortWithDefaults, pagination: paginationWithDefaults }; +}; diff --git a/x-pack/plugins/infra/server/routes/infra_ml/results/metrics_k8s_anomalies.ts b/x-pack/plugins/infra/server/routes/infra_ml/results/metrics_k8s_anomalies.ts new file mode 100644 index 0000000000000..5260c55836c59 --- /dev/null +++ b/x-pack/plugins/infra/server/routes/infra_ml/results/metrics_k8s_anomalies.ts @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import Boom from 'boom'; +import { InfraBackendLibs } from '../../../lib/infra_types'; +import { + INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, + getMetricsK8sAnomaliesSuccessReponsePayloadRT, + getMetricsK8sAnomaliesRequestPayloadRT, + GetMetricsK8sAnomaliesRequestPayload, + Sort, + Pagination, +} from '../../../../common/http_api/infra_ml'; +import { createValidationFunction } from '../../../../common/runtime_types'; +import { assertHasInfraMlPlugins } from '../../../utils/request_context'; +import { getMetricK8sAnomalies } from '../../../lib/infra_ml'; +import { isMlPrivilegesError } from '../../../lib/infra_ml/errors'; + +export const initGetK8sAnomaliesRoute = ({ framework }: InfraBackendLibs) => { + framework.registerRoute( + { + method: 'post', + path: INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, + validate: { + body: createValidationFunction(getMetricsK8sAnomaliesRequestPayloadRT), + }, + }, + framework.router.handleLegacyErrors(async (requestContext, request, response) => { + const { + data: { + sourceId, + timeRange: { startTime, endTime }, + sort: sortParam, + pagination: paginationParam, + }, + } = request.body; + + const { sort, pagination } = getSortAndPagination(sortParam, paginationParam); + + try { + assertHasInfraMlPlugins(requestContext); + + const { + data: anomalies, + paginationCursors, + hasMoreEntries, + timing, + } = await getMetricK8sAnomalies( + requestContext, + sourceId, + startTime, + endTime, + sort, + pagination + ); + + return response.ok({ + body: getMetricsK8sAnomaliesSuccessReponsePayloadRT.encode({ + data: { + anomalies, + hasMoreEntries, + paginationCursors, + }, + timing, + }), + }); + } catch (error) { + if (Boom.isBoom(error)) { + throw error; + } + + if (isMlPrivilegesError(error)) { + return response.customError({ + statusCode: 403, + body: { + message: error.message, + }, + }); + } + + return response.customError({ + statusCode: error.statusCode ?? 500, + body: { + message: error.message ?? 'An unexpected error occurred', + }, + }); + } + }) + ); +}; + +const getSortAndPagination = ( + sort: Partial = {}, + pagination: Partial = {} +): { + sort: Sort; + pagination: Pagination; +} => { + const sortDefaults = { + field: 'anomalyScore' as const, + direction: 'desc' as const, + }; + + const sortWithDefaults = { + ...sortDefaults, + ...sort, + }; + + const paginationDefaults = { + pageSize: 50, + }; + + const paginationWithDefaults = { + ...paginationDefaults, + ...pagination, + }; + + return { sort: sortWithDefaults, pagination: paginationWithDefaults }; +}; diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts new file mode 100644 index 0000000000000..5e84e3a50bb44 --- /dev/null +++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { TestScheduler } from 'rxjs/testing'; +import { createRateLimiter } from './rxjs_utils'; + +describe('createRateLimiter', () => { + it('should rate limit correctly with 1 request per 10ms', async () => { + const scheduler = new TestScheduler((actual, expected) => { + expect(actual).toEqual(expected); + }); + + scheduler.run(({ expectObservable, cold }) => { + const source = cold('a-b-c-d-e-f|'); + const rateLimiter = createRateLimiter(10, 1, 2, scheduler); + const obs = source.pipe(rateLimiter()); + const results = 'a 9ms b 9ms c 9ms d 9ms e 9ms (f|)'; + expectObservable(obs).toBe(results); + }); + }); +}); diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts index dddade6841460..3bbfbbd4ec1bf 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts @@ -5,6 +5,7 @@ */ import * as Rx from 'rxjs'; +import { concatMap, delay } from 'rxjs/operators'; export class AbortError extends Error {} @@ -45,63 +46,35 @@ export const toPromiseAbortable = ( export function createRateLimiter( ratelimitIntervalMs: number, - ratelimitRequestPerInterval: number + ratelimitRequestPerInterval: number, + maxDelay: number, + scheduler = Rx.asyncScheduler ) { - function createCurrentInterval() { - return { - startedAt: Rx.asyncScheduler.now(), - numRequests: 0, - }; - } + let intervalEnd = 0; + let countInCurrentInterval = 0; - let currentInterval: { startedAt: number; numRequests: number } = createCurrentInterval(); - let observers: Array<[Rx.Subscriber, any]> = []; - let timerSubscription: Rx.Subscription | undefined; + function createRateLimitOperator(): Rx.OperatorFunction { + return Rx.pipe( + concatMap(function rateLimit(value: T) { + const now = scheduler.now(); + if (intervalEnd <= now) { + countInCurrentInterval = 1; + intervalEnd = now + ratelimitIntervalMs; + return Rx.of(value); + } else if (intervalEnd >= now + maxDelay) { + // re-rate limit in the future to avoid to schedule too far in the future as some observer can unsubscribe + return Rx.of(value).pipe(delay(maxDelay, scheduler), createRateLimitOperator()); + } else { + if (++countInCurrentInterval > ratelimitRequestPerInterval) { + countInCurrentInterval = 1; + intervalEnd += ratelimitIntervalMs; + } - function createTimeout() { - if (timerSubscription) { - return; - } - timerSubscription = Rx.asyncScheduler.schedule(() => { - timerSubscription = undefined; - currentInterval = createCurrentInterval(); - for (const [waitingObserver, value] of observers) { - if (currentInterval.numRequests >= ratelimitRequestPerInterval) { - createTimeout(); - continue; + const wait = intervalEnd - ratelimitIntervalMs - now; + return wait > 0 ? Rx.of(value).pipe(delay(wait, scheduler)) : Rx.of(value); } - currentInterval.numRequests++; - waitingObserver.next(value); - } - }, ratelimitIntervalMs); + }) + ); } - - return function limit(): Rx.MonoTypeOperatorFunction { - return (observable) => - new Rx.Observable((observer) => { - const subscription = observable.subscribe({ - next(value) { - if (currentInterval.numRequests < ratelimitRequestPerInterval) { - currentInterval.numRequests++; - observer.next(value); - return; - } - - observers = [...observers, [observer, value]]; - createTimeout(); - }, - error(err) { - observer.error(err); - }, - complete() { - observer.complete(); - }, - }); - - return () => { - observers = observers.filter((o) => o[0] !== observer); - subscription.unsubscribe(); - }; - }); - }; + return createRateLimitOperator; } diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts index 4122677a615ca..fbbed87b031e2 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts @@ -10,7 +10,6 @@ import { shareReplay, distinctUntilKeyChanged, switchMap, - mergeMap, merge, filter, timeout, @@ -33,6 +32,8 @@ import { import { appContextService } from '../../app_context'; import { toPromiseAbortable, AbortError, createRateLimiter } from './rxjs_utils'; +const RATE_LIMIT_MAX_DELAY_MS = 5 * 60 * 1000; // 5 minutes + function getInternalUserSOClient() { const fakeRequest = ({ headers: {}, @@ -135,11 +136,19 @@ export function agentCheckinStateNewActionsFactory() { const agentPolicies$ = new Map>(); const newActions$ = createNewActionsSharedObservable(); // Rx operators - const rateLimiter = createRateLimiter( + const pollingTimeoutMs = appContextService.getConfig()?.fleet.pollingRequestTimeout ?? 0; + const rateLimiterIntervalMs = appContextService.getConfig()?.fleet.agentPolicyRolloutRateLimitIntervalMs ?? - AGENT_POLICY_ROLLOUT_RATE_LIMIT_INTERVAL_MS, + AGENT_POLICY_ROLLOUT_RATE_LIMIT_INTERVAL_MS; + const rateLimiterRequestPerInterval = appContextService.getConfig()?.fleet.agentPolicyRolloutRateLimitRequestPerInterval ?? - AGENT_POLICY_ROLLOUT_RATE_LIMIT_REQUEST_PER_INTERVAL + AGENT_POLICY_ROLLOUT_RATE_LIMIT_REQUEST_PER_INTERVAL; + const rateLimiterMaxDelay = Math.min(RATE_LIMIT_MAX_DELAY_MS, pollingTimeoutMs); + + const rateLimiter = createRateLimiter( + rateLimiterIntervalMs, + rateLimiterRequestPerInterval, + rateLimiterMaxDelay ); async function subscribeToNewActions( @@ -162,7 +171,7 @@ export function agentCheckinStateNewActionsFactory() { const stream$ = agentPolicy$.pipe( timeout( // Set a timeout 3s before the real timeout to have a chance to respond an empty response before socket timeout - Math.max((appContextService.getConfig()?.fleet.pollingRequestTimeout ?? 0) - 3000, 3000) + Math.max(pollingTimeoutMs - 3000, 3000) ), filter( (action) => @@ -173,9 +182,9 @@ export function agentCheckinStateNewActionsFactory() { (!agent.policy_revision || action.policy_revision > agent.policy_revision) ), rateLimiter(), - mergeMap((policyAction) => createAgentActionFromPolicyAction(soClient, agent, policyAction)), + switchMap((policyAction) => createAgentActionFromPolicyAction(soClient, agent, policyAction)), merge(newActions$), - mergeMap(async (data) => { + switchMap(async (data) => { if (!data) { return; } diff --git a/x-pack/plugins/lens/common/constants.ts b/x-pack/plugins/lens/common/constants.ts index ea2331a577743..9099554cb8c7e 100644 --- a/x-pack/plugins/lens/common/constants.ts +++ b/x-pack/plugins/lens/common/constants.ts @@ -17,6 +17,6 @@ export function getEditPath(id: string) { return `#/edit/${encodeURIComponent(id)}`; } -export function getFullPath(id: string) { - return `/app/${PLUGIN_ID}${getEditPath(id)}`; +export function getFullPath(id?: string) { + return `/app/${PLUGIN_ID}${id ? getEditPath(id) : getBasePath()}`; } diff --git a/x-pack/plugins/lens/kibana.json b/x-pack/plugins/lens/kibana.json index 67d9d5ef64483..f5fba766e60ee 100644 --- a/x-pack/plugins/lens/kibana.json +++ b/x-pack/plugins/lens/kibana.json @@ -13,7 +13,7 @@ "dashboard", "charts" ], - "optionalPlugins": ["embeddable", "usageCollection", "taskManager", "uiActions"], + "optionalPlugins": ["embeddable", "usageCollection", "taskManager", "uiActions", "globalSearch"], "configPath": ["xpack", "lens"], "extraPublicDirs": ["common/constants"], "requiredBundles": ["savedObjects", "kibanaUtils", "kibanaReact", "embeddable"] diff --git a/x-pack/plugins/lens/public/plugin.ts b/x-pack/plugins/lens/public/plugin.ts index f9c63f54d6713..60c7011d55300 100644 --- a/x-pack/plugins/lens/public/plugin.ts +++ b/x-pack/plugins/lens/public/plugin.ts @@ -11,6 +11,7 @@ import { ExpressionsSetup, ExpressionsStart } from 'src/plugins/expressions/publ import { VisualizationsSetup } from 'src/plugins/visualizations/public'; import { NavigationPublicPluginStart } from 'src/plugins/navigation/public'; import { UrlForwardingSetup } from 'src/plugins/url_forwarding/public'; +import { GlobalSearchPluginSetup } from '../../global_search/public'; import { ChartsPluginSetup } from '../../../../src/plugins/charts/public'; import { EditorFrameService } from './editor_frame_service'; import { @@ -31,6 +32,7 @@ import { UiActionsStart } from '../../../../src/plugins/ui_actions/public'; import { NOT_INTERNATIONALIZED_PRODUCT_NAME } from '../common'; import { EditorFrameStart } from './types'; import { getLensAliasConfig } from './vis_type_alias'; +import { getSearchProvider } from './search_provider'; import './index.scss'; @@ -41,6 +43,7 @@ export interface LensPluginSetupDependencies { embeddable?: EmbeddableSetup; visualizations: VisualizationsSetup; charts: ChartsPluginSetup; + globalSearch?: GlobalSearchPluginSetup; } export interface LensPluginStartDependencies { @@ -78,6 +81,7 @@ export class LensPlugin { embeddable, visualizations, charts, + globalSearch, }: LensPluginSetupDependencies ) { const editorFrameSetupInterface = this.editorFrameService.setup(core, { @@ -116,6 +120,20 @@ export class LensPlugin { }, }); + if (globalSearch) { + globalSearch.registerResultProvider( + getSearchProvider( + core.getStartServices().then( + ([ + { + application: { capabilities }, + }, + ]) => capabilities + ) + ) + ); + } + urlForwarding.forwardApp('lens', 'lens'); } diff --git a/x-pack/plugins/lens/public/search_provider.ts b/x-pack/plugins/lens/public/search_provider.ts new file mode 100644 index 0000000000000..c19e7970b45ae --- /dev/null +++ b/x-pack/plugins/lens/public/search_provider.ts @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import levenshtein from 'js-levenshtein'; +import { ApplicationStart } from 'kibana/public'; +import { from } from 'rxjs'; +import { i18n } from '@kbn/i18n'; +import { DEFAULT_APP_CATEGORIES } from '../../../../src/core/public'; +import { GlobalSearchResultProvider } from '../../global_search/public'; +import { getFullPath } from '../common'; + +/** + * Global search provider adding a Lens entry. + * This is necessary because Lens does not show up in the nav bar and is filtered out by the + * default app provider. + * + * It is inlining the same search term matching logic as the application search provider. + * + * TODO: This is a workaround and can be removed once there is a generic way to register sub features + * of apps. In this case, Lens should be considered a feature of Visualize. + */ +export const getSearchProvider: ( + uiCapabilities: Promise +) => GlobalSearchResultProvider = (uiCapabilities) => ({ + id: 'lens', + find: (term) => { + return from( + uiCapabilities.then(({ navLinks: { visualize: visualizeNavLink } }) => { + if (!visualizeNavLink) { + return []; + } + const title = i18n.translate('xpack.lens.searchTitle', { + defaultMessage: 'Lens: create visualizations', + description: 'Lens is a product name and should not be translated', + }); + const searchableTitle = title.toLowerCase(); + + term = term.toLowerCase(); + let score = 0; + + // shortcuts to avoid calculating the distance when there is an exact match somewhere. + if (searchableTitle === term) { + score = 100; + } else if (searchableTitle.startsWith(term)) { + score = 90; + } else if (searchableTitle.includes(term)) { + score = 75; + } else { + const length = Math.max(term.length, searchableTitle.length); + const distance = levenshtein(term, searchableTitle); + + // maximum lev distance is length, we compute the match ratio (lower distance is better) + const ratio = Math.floor((1 - distance / length) * 100); + if (ratio >= 60) { + score = ratio; + } + } + if (score === 0) return []; + return [ + { + id: 'lens', + title, + type: 'application', + icon: 'logoKibana', + meta: { + categoryId: DEFAULT_APP_CATEGORIES.kibana.id, + categoryLabel: DEFAULT_APP_CATEGORIES.kibana.label, + }, + score, + url: getFullPath(), + }, + ]; + }) + ); + }, +}); diff --git a/x-pack/plugins/maps/common/constants.ts b/x-pack/plugins/maps/common/constants.ts index d72d04d2a1843..be891b6e59608 100644 --- a/x-pack/plugins/maps/common/constants.ts +++ b/x-pack/plugins/maps/common/constants.ts @@ -37,6 +37,7 @@ export const FONTS_API_PATH = `${GIS_API_PATH}/fonts`; export const API_ROOT_PATH = `/${GIS_API_PATH}`; export const MVT_GETTILE_API_PATH = 'mvt/getTile'; +export const MVT_GETGRIDTILE_API_PATH = 'mvt/getGridTile'; export const MVT_SOURCE_LAYER_NAME = 'source_layer'; export const KBN_TOO_MANY_FEATURES_PROPERTY = '__kbn_too_many_features__'; export const KBN_TOO_MANY_FEATURES_IMAGE_ID = '__kbn_too_many_features_image_id__'; @@ -165,8 +166,13 @@ export enum GRID_RESOLUTION { COARSE = 'COARSE', FINE = 'FINE', MOST_FINE = 'MOST_FINE', + SUPER_FINE = 'SUPER_FINE', } +export const SUPER_FINE_ZOOM_DELTA = 7; // (2 ^ SUPER_FINE_ZOOM_DELTA) ^ 2 = number of cells in a given tile +export const GEOTILE_GRID_AGG_NAME = 'gridSplit'; +export const GEOCENTROID_AGG_NAME = 'gridCentroid'; + export const TOP_TERM_PERCENTAGE_SUFFIX = '__percentage'; export const COUNT_PROP_LABEL = i18n.translate('xpack.maps.aggs.defaultCountLabel', { @@ -230,8 +236,6 @@ export enum SCALING_TYPES { MVT = 'MVT', } -export const RGBA_0000 = 'rgba(0,0,0,0)'; - export enum MVT_FIELD_TYPE { STRING = 'String', NUMBER = 'Number', diff --git a/x-pack/plugins/maps/common/elasticsearch_util/convert_to_geojson.d.ts b/x-pack/plugins/maps/common/elasticsearch_util/convert_to_geojson.d.ts new file mode 100644 index 0000000000000..b1c1b181d8130 --- /dev/null +++ b/x-pack/plugins/maps/common/elasticsearch_util/convert_to_geojson.d.ts @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Feature } from 'geojson'; +import { RENDER_AS } from '../constants'; + +export function convertCompositeRespToGeoJson(esResponse: any, renderAs: RENDER_AS): Feature[]; +export function convertRegularRespToGeoJson(esResponse: any, renderAs: RENDER_AS): Feature[]; diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/convert_to_geojson.js b/x-pack/plugins/maps/common/elasticsearch_util/convert_to_geojson.js similarity index 79% rename from x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/convert_to_geojson.js rename to x-pack/plugins/maps/common/elasticsearch_util/convert_to_geojson.js index 35dbebdfd3c8a..a8f32bb4e7f5d 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/convert_to_geojson.js +++ b/x-pack/plugins/maps/common/elasticsearch_util/convert_to_geojson.js @@ -5,12 +5,12 @@ */ import _ from 'lodash'; -import { RENDER_AS } from '../../../../common/constants'; -import { getTileBoundingBox } from './geo_tile_utils'; -import { extractPropertiesFromBucket } from '../../util/es_agg_utils'; -import { clamp } from '../../../../common/elasticsearch_geo_utils'; +import { RENDER_AS, GEOTILE_GRID_AGG_NAME, GEOCENTROID_AGG_NAME } from '../constants'; +import { getTileBoundingBox } from '../geo_tile_utils'; +import { extractPropertiesFromBucket } from './es_agg_utils'; +import { clamp } from './elasticsearch_geo_utils'; -const GRID_BUCKET_KEYS_TO_IGNORE = ['key', 'gridCentroid']; +const GRID_BUCKET_KEYS_TO_IGNORE = ['key', GEOCENTROID_AGG_NAME]; export function convertCompositeRespToGeoJson(esResponse, renderAs) { return convertToGeoJson( @@ -20,7 +20,7 @@ export function convertCompositeRespToGeoJson(esResponse, renderAs) { return _.get(esResponse, 'aggregations.compositeSplit.buckets', []); }, (gridBucket) => { - return gridBucket.key.gridSplit; + return gridBucket.key[GEOTILE_GRID_AGG_NAME]; } ); } @@ -30,7 +30,7 @@ export function convertRegularRespToGeoJson(esResponse, renderAs) { esResponse, renderAs, (esResponse) => { - return _.get(esResponse, 'aggregations.gridSplit.buckets', []); + return _.get(esResponse, `aggregations.${GEOTILE_GRID_AGG_NAME}.buckets`, []); }, (gridBucket) => { return gridBucket.key; @@ -49,7 +49,7 @@ function convertToGeoJson(esResponse, renderAs, pluckGridBuckets, pluckGridKey) type: 'Feature', geometry: rowToGeometry({ gridKey, - gridCentroid: gridBucket.gridCentroid, + [GEOCENTROID_AGG_NAME]: gridBucket[GEOCENTROID_AGG_NAME], renderAs, }), id: gridKey, diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/convert_to_geojson.test.ts b/x-pack/plugins/maps/common/elasticsearch_util/convert_to_geojson.test.ts similarity index 97% rename from x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/convert_to_geojson.test.ts rename to x-pack/plugins/maps/common/elasticsearch_util/convert_to_geojson.test.ts index 523cc86915010..ee40a1f2fc751 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/convert_to_geojson.test.ts +++ b/x-pack/plugins/maps/common/elasticsearch_util/convert_to_geojson.test.ts @@ -4,11 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -jest.mock('../../../kibana_services', () => {}); - // @ts-ignore import { convertCompositeRespToGeoJson, convertRegularRespToGeoJson } from './convert_to_geojson'; -import { RENDER_AS } from '../../../../common/constants'; +import { RENDER_AS } from '../constants'; describe('convertCompositeRespToGeoJson', () => { const esResponse = { diff --git a/x-pack/plugins/maps/common/elasticsearch_geo_utils.d.ts b/x-pack/plugins/maps/common/elasticsearch_util/elasticsearch_geo_utils.d.ts similarity index 89% rename from x-pack/plugins/maps/common/elasticsearch_geo_utils.d.ts rename to x-pack/plugins/maps/common/elasticsearch_util/elasticsearch_geo_utils.d.ts index e57efca94d95e..cff8ba119e1de 100644 --- a/x-pack/plugins/maps/common/elasticsearch_geo_utils.d.ts +++ b/x-pack/plugins/maps/common/elasticsearch_util/elasticsearch_geo_utils.d.ts @@ -5,8 +5,8 @@ */ import { FeatureCollection, GeoJsonProperties } from 'geojson'; -import { MapExtent } from './descriptor_types'; -import { ES_GEO_FIELD_TYPE } from './constants'; +import { MapExtent } from '../descriptor_types'; +import { ES_GEO_FIELD_TYPE } from '../constants'; export function scaleBounds(bounds: MapExtent, scaleFactor: number): MapExtent; diff --git a/x-pack/plugins/maps/common/elasticsearch_geo_utils.js b/x-pack/plugins/maps/common/elasticsearch_util/elasticsearch_geo_utils.js similarity index 98% rename from x-pack/plugins/maps/common/elasticsearch_geo_utils.js rename to x-pack/plugins/maps/common/elasticsearch_util/elasticsearch_geo_utils.js index f2bf83ae18bb0..be214e3b01e67 100644 --- a/x-pack/plugins/maps/common/elasticsearch_geo_utils.js +++ b/x-pack/plugins/maps/common/elasticsearch_util/elasticsearch_geo_utils.js @@ -15,9 +15,9 @@ import { POLYGON_COORDINATES_EXTERIOR_INDEX, LON_INDEX, LAT_INDEX, -} from '../common/constants'; -import { getEsSpatialRelationLabel } from './i18n_getters'; -import { FILTERS } from '../../../../src/plugins/data/common'; +} from '../constants'; +import { getEsSpatialRelationLabel } from '../i18n_getters'; +import { FILTERS } from '../../../../../src/plugins/data/common'; import turfCircle from '@turf/circle'; const SPATIAL_FILTER_TYPE = FILTERS.SPATIAL_FILTER; diff --git a/x-pack/plugins/maps/common/elasticsearch_geo_utils.test.js b/x-pack/plugins/maps/common/elasticsearch_util/elasticsearch_geo_utils.test.js similarity index 100% rename from x-pack/plugins/maps/common/elasticsearch_geo_utils.test.js rename to x-pack/plugins/maps/common/elasticsearch_util/elasticsearch_geo_utils.test.js diff --git a/x-pack/plugins/maps/public/classes/util/es_agg_utils.test.ts b/x-pack/plugins/maps/common/elasticsearch_util/es_agg_utils.test.ts similarity index 100% rename from x-pack/plugins/maps/public/classes/util/es_agg_utils.test.ts rename to x-pack/plugins/maps/common/elasticsearch_util/es_agg_utils.test.ts diff --git a/x-pack/plugins/maps/public/classes/util/es_agg_utils.ts b/x-pack/plugins/maps/common/elasticsearch_util/es_agg_utils.ts similarity index 92% rename from x-pack/plugins/maps/public/classes/util/es_agg_utils.ts rename to x-pack/plugins/maps/common/elasticsearch_util/es_agg_utils.ts index 329a2a6fc64fb..7828c3cc6410b 100644 --- a/x-pack/plugins/maps/public/classes/util/es_agg_utils.ts +++ b/x-pack/plugins/maps/common/elasticsearch_util/es_agg_utils.ts @@ -5,8 +5,8 @@ */ import { i18n } from '@kbn/i18n'; import _ from 'lodash'; -import { IndexPattern, IFieldType } from '../../../../../../src/plugins/data/public'; -import { TOP_TERM_PERCENTAGE_SUFFIX } from '../../../common/constants'; +import { IndexPattern, IFieldType } from '../../../../../src/plugins/data/common'; +import { TOP_TERM_PERCENTAGE_SUFFIX } from '../constants'; export function getField(indexPattern: IndexPattern, fieldName: string) { const field = indexPattern.fields.getByName(fieldName); diff --git a/x-pack/plugins/maps/common/elasticsearch_util/index.ts b/x-pack/plugins/maps/common/elasticsearch_util/index.ts new file mode 100644 index 0000000000000..ffb4a542374fa --- /dev/null +++ b/x-pack/plugins/maps/common/elasticsearch_util/index.ts @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './es_agg_utils'; +export * from './convert_to_geojson'; +export * from './elasticsearch_geo_utils'; diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/geo_tile_utils.test.js b/x-pack/plugins/maps/common/geo_tile_utils.test.js similarity index 96% rename from x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/geo_tile_utils.test.js rename to x-pack/plugins/maps/common/geo_tile_utils.test.js index 88a6ce048a178..ae2623e168766 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/geo_tile_utils.test.js +++ b/x-pack/plugins/maps/common/geo_tile_utils.test.js @@ -4,8 +4,6 @@ * you may not use this file except in compliance with the Elastic License. */ -jest.mock('../../../kibana_services', () => {}); - import { parseTileKey, getTileBoundingBox, expandToTileBoundaries } from './geo_tile_utils'; it('Should parse tile key', () => { diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/geo_tile_utils.js b/x-pack/plugins/maps/common/geo_tile_utils.ts similarity index 65% rename from x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/geo_tile_utils.js rename to x-pack/plugins/maps/common/geo_tile_utils.ts index 89b24522e4275..c6e35224458c5 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/geo_tile_utils.js +++ b/x-pack/plugins/maps/common/geo_tile_utils.ts @@ -5,18 +5,32 @@ */ import _ from 'lodash'; -import { DECIMAL_DEGREES_PRECISION } from '../../../../common/constants'; -import { clampToLatBounds } from '../../../../common/elasticsearch_geo_utils'; +import { DECIMAL_DEGREES_PRECISION } from './constants'; +import { clampToLatBounds } from './elasticsearch_util'; +import { MapExtent } from './descriptor_types'; const ZOOM_TILE_KEY_INDEX = 0; const X_TILE_KEY_INDEX = 1; const Y_TILE_KEY_INDEX = 2; -function getTileCount(zoom) { +function getTileCount(zoom: number): number { return Math.pow(2, zoom); } -export function parseTileKey(tileKey) { +export interface ESBounds { + top_left: { + lon: number; + lat: number; + }; + bottom_right: { + lon: number; + lat: number; + }; +} + +export function parseTileKey( + tileKey: string +): { x: number; y: number; zoom: number; tileCount: number } { const tileKeyParts = tileKey.split('/'); if (tileKeyParts.length !== 3) { @@ -42,7 +56,7 @@ export function parseTileKey(tileKey) { return { x, y, zoom, tileCount }; } -function sinh(x) { +function sinh(x: number): number { return (Math.exp(x) - Math.exp(-x)) / 2; } @@ -55,24 +69,52 @@ function sinh(x) { // We add one extra decimal level of precision because, at high zoom // levels rounding exactly can cause the boxes to render as uneven sizes // (some will be slightly larger and some slightly smaller) -function precisionRounding(v, minPrecision, binSize) { +function precisionRounding(v: number, minPrecision: number, binSize: number): number { let precision = Math.ceil(Math.abs(Math.log10(binSize))) + 1; precision = Math.max(precision, minPrecision); return _.round(v, precision); } -function tileToLatitude(y, tileCount) { +export function tile2long(x: number, z: number): number { + const tileCount = getTileCount(z); + return tileToLongitude(x, tileCount); +} + +export function tile2lat(y: number, z: number): number { + const tileCount = getTileCount(z); + return tileToLatitude(y, tileCount); +} + +export function tileToESBbox(x: number, y: number, z: number): ESBounds { + const wLon = tile2long(x, z); + const sLat = tile2lat(y + 1, z); + const eLon = tile2long(x + 1, z); + const nLat = tile2lat(y, z); + + return { + top_left: { + lon: wLon, + lat: nLat, + }, + bottom_right: { + lon: eLon, + lat: sLat, + }, + }; +} + +export function tileToLatitude(y: number, tileCount: number) { const radians = Math.atan(sinh(Math.PI - (2 * Math.PI * y) / tileCount)); const lat = (180 / Math.PI) * radians; return precisionRounding(lat, DECIMAL_DEGREES_PRECISION, 180 / tileCount); } -function tileToLongitude(x, tileCount) { +export function tileToLongitude(x: number, tileCount: number) { const lon = (x / tileCount) * 360 - 180; return precisionRounding(lon, DECIMAL_DEGREES_PRECISION, 360 / tileCount); } -export function getTileBoundingBox(tileKey) { +export function getTileBoundingBox(tileKey: string) { const { x, y, tileCount } = parseTileKey(tileKey); return { @@ -83,22 +125,22 @@ export function getTileBoundingBox(tileKey) { }; } -function sec(value) { +function sec(value: number): number { return 1 / Math.cos(value); } -function latitudeToTile(lat, tileCount) { +function latitudeToTile(lat: number, tileCount: number) { const radians = (clampToLatBounds(lat) * Math.PI) / 180; const y = ((1 - Math.log(Math.tan(radians) + sec(radians)) / Math.PI) / 2) * tileCount; return Math.floor(y); } -function longitudeToTile(lon, tileCount) { +function longitudeToTile(lon: number, tileCount: number) { const x = ((lon + 180) / 360) * tileCount; return Math.floor(x); } -export function expandToTileBoundaries(extent, zoom) { +export function expandToTileBoundaries(extent: MapExtent, zoom: number): MapExtent { const tileCount = getTileCount(zoom); const upperLeftX = longitudeToTile(extent.minLon, tileCount); diff --git a/x-pack/plugins/maps/public/actions/data_request_actions.ts b/x-pack/plugins/maps/public/actions/data_request_actions.ts index 2876f3d668a69..14d8196900506 100644 --- a/x-pack/plugins/maps/public/actions/data_request_actions.ts +++ b/x-pack/plugins/maps/public/actions/data_request_actions.ts @@ -40,7 +40,7 @@ import { ILayer } from '../classes/layers/layer'; import { IVectorLayer } from '../classes/layers/vector_layer/vector_layer'; import { DataMeta, MapExtent, MapFilters } from '../../common/descriptor_types'; import { DataRequestAbortError } from '../classes/util/data_request'; -import { scaleBounds, turfBboxToBounds } from '../../common/elasticsearch_geo_utils'; +import { scaleBounds, turfBboxToBounds } from '../../common/elasticsearch_util'; import { IVectorStyle } from '../classes/styles/vector/vector_style'; const FIT_TO_BOUNDS_SCALE_FACTOR = 0.1; diff --git a/x-pack/plugins/maps/public/actions/map_actions.ts b/x-pack/plugins/maps/public/actions/map_actions.ts index b00594cb7fb23..09491e5c3a7b3 100644 --- a/x-pack/plugins/maps/public/actions/map_actions.ts +++ b/x-pack/plugins/maps/public/actions/map_actions.ts @@ -54,7 +54,7 @@ import { MapRefreshConfig, } from '../../common/descriptor_types'; import { INITIAL_LOCATION } from '../../common/constants'; -import { scaleBounds } from '../../common/elasticsearch_geo_utils'; +import { scaleBounds } from '../../common/elasticsearch_util'; export function setMapInitError(errorMessage: string) { return { diff --git a/x-pack/plugins/maps/public/classes/fields/es_agg_field.ts b/x-pack/plugins/maps/public/classes/fields/es_agg_field.ts index 7b184819b839b..8cff98205186f 100644 --- a/x-pack/plugins/maps/public/classes/fields/es_agg_field.ts +++ b/x-pack/plugins/maps/public/classes/fields/es_agg_field.ts @@ -12,7 +12,7 @@ import { IVectorSource } from '../sources/vector_source'; import { ESDocField } from './es_doc_field'; import { AGG_TYPE, FIELD_ORIGIN } from '../../../common/constants'; import { isMetricCountable } from '../util/is_metric_countable'; -import { getField, addFieldToDSL } from '../util/es_agg_utils'; +import { getField, addFieldToDSL } from '../../../common/elasticsearch_util'; import { TopTermPercentageField } from './top_term_percentage_field'; import { ITooltipProperty, TooltipProperty } from '../tooltips/tooltip_property'; import { ESAggTooltipProperty } from '../tooltips/es_agg_tooltip_property'; @@ -30,6 +30,7 @@ export class ESAggField implements IESAggField { private readonly _label?: string; private readonly _aggType: AGG_TYPE; private readonly _esDocField?: IField | undefined; + private readonly _canReadFromGeoJson: boolean; constructor({ label, @@ -37,18 +38,21 @@ export class ESAggField implements IESAggField { aggType, esDocField, origin, + canReadFromGeoJson = true, }: { label?: string; source: IESAggSource; aggType: AGG_TYPE; esDocField?: IField; origin: FIELD_ORIGIN; + canReadFromGeoJson?: boolean; }) { this._source = source; this._origin = origin; this._label = label; this._aggType = aggType; this._esDocField = esDocField; + this._canReadFromGeoJson = canReadFromGeoJson; } getSource(): IVectorSource { @@ -132,18 +136,19 @@ export class ESAggField implements IESAggField { } supportsAutoDomain(): boolean { - return true; + return this._canReadFromGeoJson ? true : this.supportsFieldMeta(); } canReadFromGeoJson(): boolean { - return true; + return this._canReadFromGeoJson; } } export function esAggFieldsFactory( aggDescriptor: AggDescriptor, source: IESAggSource, - origin: FIELD_ORIGIN + origin: FIELD_ORIGIN, + canReadFromGeoJson: boolean = true ): IESAggField[] { const aggField = new ESAggField({ label: aggDescriptor.label, @@ -153,12 +158,13 @@ export function esAggFieldsFactory( aggType: aggDescriptor.type, source, origin, + canReadFromGeoJson, }); const aggFields: IESAggField[] = [aggField]; if (aggDescriptor.field && aggDescriptor.type === AGG_TYPE.TERMS) { - aggFields.push(new TopTermPercentageField(aggField)); + aggFields.push(new TopTermPercentageField(aggField, canReadFromGeoJson)); } return aggFields; diff --git a/x-pack/plugins/maps/public/classes/fields/field.ts b/x-pack/plugins/maps/public/classes/fields/field.ts index 2c190d54f0265..658c2bba87847 100644 --- a/x-pack/plugins/maps/public/classes/fields/field.ts +++ b/x-pack/plugins/maps/public/classes/fields/field.ts @@ -21,12 +21,12 @@ export interface IField { getOrdinalFieldMetaRequest(): Promise; getCategoricalFieldMetaRequest(size: number): Promise; - // Determines whether Maps-app can automatically determine the domain of the field-values + // Whether Maps-app can automatically determine the domain of the field-values // if this is not the case (e.g. for .mvt tiled data), // then styling properties that require the domain to be known cannot use this property. supportsAutoDomain(): boolean; - // Determinse wheter Maps-app can automatically deterime the domain of the field-values + // Whether Maps-app can automatically determine the domain of the field-values // _without_ having to retrieve the data as GeoJson // e.g. for ES-sources, this would use the extended_stats API supportsFieldMeta(): boolean; diff --git a/x-pack/plugins/maps/public/classes/fields/top_term_percentage_field.ts b/x-pack/plugins/maps/public/classes/fields/top_term_percentage_field.ts index fc931b13619ef..50db04d08b2aa 100644 --- a/x-pack/plugins/maps/public/classes/fields/top_term_percentage_field.ts +++ b/x-pack/plugins/maps/public/classes/fields/top_term_percentage_field.ts @@ -6,16 +6,17 @@ import { IESAggField } from './es_agg_field'; import { IVectorSource } from '../sources/vector_source'; -// @ts-ignore import { ITooltipProperty, TooltipProperty } from '../tooltips/tooltip_property'; import { TOP_TERM_PERCENTAGE_SUFFIX } from '../../../common/constants'; import { FIELD_ORIGIN } from '../../../common/constants'; export class TopTermPercentageField implements IESAggField { private readonly _topTermAggField: IESAggField; + private readonly _canReadFromGeoJson: boolean; - constructor(topTermAggField: IESAggField) { + constructor(topTermAggField: IESAggField, canReadFromGeoJson: boolean = true) { this._topTermAggField = topTermAggField; + this._canReadFromGeoJson = canReadFromGeoJson; } getSource(): IVectorSource { @@ -61,7 +62,7 @@ export class TopTermPercentageField implements IESAggField { } supportsAutoDomain(): boolean { - return true; + return this._canReadFromGeoJson; } supportsFieldMeta(): boolean { @@ -81,6 +82,6 @@ export class TopTermPercentageField implements IESAggField { } canReadFromGeoJson(): boolean { - return true; + return this._canReadFromGeoJson; } } diff --git a/x-pack/plugins/maps/public/classes/layers/layer.tsx b/x-pack/plugins/maps/public/classes/layers/layer.tsx index 8026f48fe6093..cd720063c6703 100644 --- a/x-pack/plugins/maps/public/classes/layers/layer.tsx +++ b/x-pack/plugins/maps/public/classes/layers/layer.tsx @@ -423,7 +423,7 @@ export class AbstractLayer implements ILayer { renderSourceSettingsEditor({ onChange }: SourceEditorArgs) { const source = this.getSourceForEditing(); - return source.renderSourceSettingsEditor({ onChange }); + return source.renderSourceSettingsEditor({ onChange, currentLayerType: this._descriptor.type }); } getPrevRequestToken(dataId: string): symbol | undefined { diff --git a/x-pack/plugins/maps/public/classes/sources/es_agg_source/es_agg_source.ts b/x-pack/plugins/maps/public/classes/sources/es_agg_source/es_agg_source.ts index a9c886617d3af..be947d79f4e39 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_agg_source/es_agg_source.ts +++ b/x-pack/plugins/maps/public/classes/sources/es_agg_source/es_agg_source.ts @@ -31,14 +31,25 @@ export interface IESAggSource extends IESSource { export class AbstractESAggSource extends AbstractESSource { private readonly _metricFields: IESAggField[]; + private readonly _canReadFromGeoJson: boolean; - constructor(descriptor: AbstractESAggSourceDescriptor, inspectorAdapters: Adapters) { + constructor( + descriptor: AbstractESAggSourceDescriptor, + inspectorAdapters: Adapters, + canReadFromGeoJson = true + ) { super(descriptor, inspectorAdapters); this._metricFields = []; + this._canReadFromGeoJson = canReadFromGeoJson; if (descriptor.metrics) { descriptor.metrics.forEach((aggDescriptor: AggDescriptor) => { this._metricFields.push( - ...esAggFieldsFactory(aggDescriptor, this, this.getOriginForField()) + ...esAggFieldsFactory( + aggDescriptor, + this, + this.getOriginForField(), + this._canReadFromGeoJson + ) ); }); } @@ -72,7 +83,12 @@ export class AbstractESAggSource extends AbstractESSource { const metrics = this._metricFields.filter((esAggField) => esAggField.isValid()); // Handle case where metrics is empty because older saved object state is empty array or there are no valid aggs. return metrics.length === 0 - ? esAggFieldsFactory({ type: AGG_TYPE.COUNT }, this, this.getOriginForField()) + ? esAggFieldsFactory( + { type: AGG_TYPE.COUNT }, + this, + this.getOriginForField(), + this._canReadFromGeoJson + ) : metrics; } diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/__snapshots__/resolution_editor.test.tsx.snap b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/__snapshots__/resolution_editor.test.tsx.snap new file mode 100644 index 0000000000000..ca9775594a9d7 --- /dev/null +++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/__snapshots__/resolution_editor.test.tsx.snap @@ -0,0 +1,73 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`resolution editor should add super-fine option 1`] = ` + + + +`; + +exports[`resolution editor should omit super-fine option 1`] = ` + + + +`; diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/__snapshots__/update_source_editor.test.tsx.snap b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/__snapshots__/update_source_editor.test.tsx.snap new file mode 100644 index 0000000000000..dfce6b36396a7 --- /dev/null +++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/__snapshots__/update_source_editor.test.tsx.snap @@ -0,0 +1,121 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`source editor geo_grid_source default vector layer config should allow super-fine option 1`] = ` + + + +
+ +
+
+ + +
+ + + +
+ +
+
+ + + +
+ +
+`; + +exports[`source editor geo_grid_source should put limitations based on heatmap-rendering selection should not allow super-fine option for heatmaps and should not allow multiple metrics 1`] = ` + + + +
+ +
+
+ + +
+ + + +
+ +
+
+ + + +
+ +
+`; diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.d.ts b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.d.ts index 2ce4353fca13c..ada76b8e4e674 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.d.ts +++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.d.ts @@ -5,11 +5,17 @@ */ import { AbstractESAggSource } from '../es_agg_source'; -import { ESGeoGridSourceDescriptor } from '../../../../common/descriptor_types'; +import { + ESGeoGridSourceDescriptor, + MapFilters, + MapQuery, + VectorSourceSyncMeta, +} from '../../../../common/descriptor_types'; import { GRID_RESOLUTION } from '../../../../common/constants'; import { IField } from '../../fields/field'; +import { ITiledSingleLayerVectorSource } from '../vector_source'; -export class ESGeoGridSource extends AbstractESAggSource { +export class ESGeoGridSource extends AbstractESAggSource implements ITiledSingleLayerVectorSource { static createDescriptor({ indexPatternId, geoField, @@ -19,8 +25,27 @@ export class ESGeoGridSource extends AbstractESAggSource { constructor(sourceDescriptor: ESGeoGridSourceDescriptor, inspectorAdapters: unknown); + readonly _descriptor: ESGeoGridSourceDescriptor; + getFieldNames(): string[]; getGridResolution(): GRID_RESOLUTION; getGeoGridPrecision(zoom: number): number; createField({ fieldName }: { fieldName: string }): IField; + + getLayerName(): string; + + getUrlTemplateWithMeta( + searchFilters: MapFilters & { + applyGlobalQuery: boolean; + fieldNames: string[]; + geogridPrecision?: number; + sourceQuery: MapQuery; + sourceMeta: VectorSourceSyncMeta; + } + ): Promise<{ + layerName: string; + urlTemplate: string; + minSourceZoom: number; + maxSourceZoom: number; + }>; } diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js index aa167cb577672..89258f04612fd 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js +++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js @@ -7,7 +7,11 @@ import React from 'react'; import uuid from 'uuid/v4'; -import { convertCompositeRespToGeoJson, convertRegularRespToGeoJson } from './convert_to_geojson'; +import { + convertCompositeRespToGeoJson, + convertRegularRespToGeoJson, + makeESBbox, +} from '../../../../common/elasticsearch_util'; import { UpdateSourceEditor } from './update_source_editor'; import { SOURCE_TYPES, @@ -15,13 +19,20 @@ import { RENDER_AS, GRID_RESOLUTION, VECTOR_SHAPE_TYPE, + MVT_SOURCE_LAYER_NAME, + GIS_API_PATH, + MVT_GETGRIDTILE_API_PATH, + GEOTILE_GRID_AGG_NAME, + GEOCENTROID_AGG_NAME, } from '../../../../common/constants'; import { i18n } from '@kbn/i18n'; import { getDataSourceLabel } from '../../../../common/i18n_getters'; import { AbstractESAggSource, DEFAULT_METRIC } from '../es_agg_source'; import { DataRequestAbortError } from '../../util/data_request'; import { registerSource } from '../source_registry'; -import { makeESBbox } from '../../../../common/elasticsearch_geo_utils'; + +import rison from 'rison-node'; +import { getHttp } from '../../../kibana_services'; export const MAX_GEOTILE_LEVEL = 29; @@ -48,9 +59,14 @@ export class ESGeoGridSource extends AbstractESAggSource { }; } - renderSourceSettingsEditor({ onChange }) { + constructor(descriptor, inspectorAdapters) { + super(descriptor, inspectorAdapters, descriptor.resolution !== GRID_RESOLUTION.SUPER_FINE); + } + + renderSourceSettingsEditor({ onChange, currentLayerType }) { return ( { @@ -96,59 +99,67 @@ describe('ESGeoGridSource', () => { }; }; - describe('getGeoJsonWithMeta', () => { - let mockSearchSource: unknown; - beforeEach(async () => { - mockSearchSource = new MockSearchSource(); - const mockSearchService = { - searchSource: { - async create() { - return mockSearchSource as SearchSource; - }, - createEmpty() { - return mockSearchSource as SearchSource; - }, + let mockSearchSource: unknown; + beforeEach(async () => { + mockSearchSource = new MockSearchSource(); + const mockSearchService = { + searchSource: { + async create() { + return mockSearchSource as SearchSource; }, - }; + createEmpty() { + return mockSearchSource as SearchSource; + }, + }, + }; - // @ts-expect-error - getIndexPatternService.mockReturnValue(mockIndexPatternService); - // @ts-expect-error - getSearchService.mockReturnValue(mockSearchService); + // @ts-expect-error + getIndexPatternService.mockReturnValue(mockIndexPatternService); + // @ts-expect-error + getSearchService.mockReturnValue(mockSearchService); + // @ts-expect-error + getHttp.mockReturnValue({ + basePath: { + prepend(path: string) { + return `rootdir${path};`; + }, + }, }); + }); - const extent: MapExtent = { - minLon: -160, - minLat: -80, - maxLon: 160, - maxLat: 80, - }; + const extent: MapExtent = { + minLon: -160, + minLat: -80, + maxLon: 160, + maxLat: 80, + }; - const mapFilters: VectorSourceRequestMeta = { - geogridPrecision: 4, - filters: [], - timeFilters: { - from: 'now', - to: '15m', - mode: 'relative', - }, - extent, - applyGlobalQuery: true, - fieldNames: [], - buffer: extent, - sourceQuery: { - query: '', - language: 'KQL', - queryLastTriggeredAt: '2019-04-25T20:53:22.331Z', - }, - sourceMeta: null, - zoom: 0, - }; + const vectorSourceRequestMeta: VectorSourceRequestMeta = { + geogridPrecision: 4, + filters: [], + timeFilters: { + from: 'now', + to: '15m', + mode: 'relative', + }, + extent, + applyGlobalQuery: true, + fieldNames: [], + buffer: extent, + sourceQuery: { + query: '', + language: 'KQL', + queryLastTriggeredAt: '2019-04-25T20:53:22.331Z', + }, + sourceMeta: null, + zoom: 0, + }; + describe('getGeoJsonWithMeta', () => { it('Should configure the SearchSource correctly', async () => { const { data, meta } = await geogridSource.getGeoJsonWithMeta( 'foobarLayer', - mapFilters, + vectorSourceRequestMeta, () => {} ); @@ -215,5 +226,48 @@ describe('ESGeoGridSource', () => { it('should use heuristic to derive precision', () => { expect(geogridSource.getGeoGridPrecision(10)).toBe(12); }); + + it('Should not return valid precision for super-fine resolution', () => { + const superFineSource = new ESGeoGridSource( + { + id: 'foobar', + indexPatternId: 'fooIp', + geoField: geoFieldName, + metrics: [], + resolution: GRID_RESOLUTION.SUPER_FINE, + type: SOURCE_TYPES.ES_GEO_GRID, + requestType: RENDER_AS.HEATMAP, + }, + {} + ); + expect(superFineSource.getGeoGridPrecision(10)).toBe(NaN); + }); + }); + + describe('ITiledSingleLayerVectorSource', () => { + it('getLayerName', () => { + expect(geogridSource.getLayerName()).toBe('source_layer'); + }); + + it('getMinZoom', () => { + expect(geogridSource.getMinZoom()).toBe(0); + }); + + it('getMaxZoom', () => { + expect(geogridSource.getMaxZoom()).toBe(24); + }); + + it('getUrlTemplateWithMeta', async () => { + const urlTemplateWithMeta = await geogridSource.getUrlTemplateWithMeta( + vectorSourceRequestMeta + ); + + expect(urlTemplateWithMeta.layerName).toBe('source_layer'); + expect(urlTemplateWithMeta.minSourceZoom).toBe(0); + expect(urlTemplateWithMeta.maxSourceZoom).toBe(24); + expect(urlTemplateWithMeta.urlTemplate).toBe( + "rootdir/api/maps/mvt/getGridTile;?x={x}&y={y}&z={z}&geometryFieldName=bar&index=undefined&requestBody=(foobar:ES_DSL_PLACEHOLDER,params:('0':('0':index,'1':(fields:())),'1':('0':size,'1':0),'2':('0':filter,'1':!((geo_bounding_box:(bar:(bottom_right:!(180,-82.67628),top_left:!(-180,82.67628)))))),'3':('0':query),'4':('0':index,'1':(fields:())),'5':('0':query,'1':(language:KQL,query:'',queryLastTriggeredAt:'2019-04-25T20:53:22.331Z')),'6':('0':aggs,'1':(gridSplit:(aggs:(gridCentroid:(geo_centroid:(field:bar))),geotile_grid:(bounds:!n,field:bar,precision:!n,shard_size:65535,size:65535))))))&requestType=heatmap&geoFieldType=geo_point" + ); + }); }); }); diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/resolution_editor.js b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/resolution_editor.js index 28c24f58a0efc..71133cb25280c 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/resolution_editor.js +++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/resolution_editor.js @@ -9,7 +9,7 @@ import { GRID_RESOLUTION } from '../../../../common/constants'; import { EuiSelect, EuiFormRow } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; -const OPTIONS = [ +const BASE_OPTIONS = [ { value: GRID_RESOLUTION.COARSE, text: i18n.translate('xpack.maps.source.esGrid.coarseDropdownOption', { @@ -30,7 +30,18 @@ const OPTIONS = [ }, ]; -export function ResolutionEditor({ resolution, onChange }) { +export function ResolutionEditor({ resolution, onChange, includeSuperFine }) { + const options = [...BASE_OPTIONS]; + + if (includeSuperFine) { + options.push({ + value: GRID_RESOLUTION.SUPER_FINE, + text: i18n.translate('xpack.maps.source.esGrid.superFineDropDownOption', { + defaultMessage: 'super fine (beta)', + }), + }); + } + return ( onChange(e.target.value)} compressed diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/resolution_editor.test.tsx b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/resolution_editor.test.tsx new file mode 100644 index 0000000000000..369203dbe16c0 --- /dev/null +++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/resolution_editor.test.tsx @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React from 'react'; +import { shallow } from 'enzyme'; + +// @ts-expect-error +import { ResolutionEditor } from './resolution_editor'; +import { GRID_RESOLUTION } from '../../../../common/constants'; + +const defaultProps = { + resolution: GRID_RESOLUTION.COARSE, + onChange: () => {}, + includeSuperFine: false, +}; + +describe('resolution editor', () => { + test('should omit super-fine option', () => { + const component = shallow(); + expect(component).toMatchSnapshot(); + }); + test('should add super-fine option', () => { + const component = shallow(); + expect(component).toMatchSnapshot(); + }); +}); diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/update_source_editor.js b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/update_source_editor.js index ac7d809c40f61..7e885c291b952 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/update_source_editor.js +++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/update_source_editor.js @@ -6,7 +6,7 @@ import React, { Fragment, Component } from 'react'; -import { RENDER_AS } from '../../../../common/constants'; +import { GRID_RESOLUTION, LAYER_TYPE } from '../../../../common/constants'; import { MetricsEditor } from '../../../components/metrics_editor'; import { getIndexPatternService } from '../../../kibana_services'; import { ResolutionEditor } from './resolution_editor'; @@ -62,8 +62,25 @@ export class UpdateSourceEditor extends Component { this.props.onChange({ propName: 'metrics', value: metrics }); }; - _onResolutionChange = (e) => { - this.props.onChange({ propName: 'resolution', value: e }); + _onResolutionChange = (resolution) => { + let newLayerType; + if ( + this.props.currentLayerType === LAYER_TYPE.VECTOR || + this.props.currentLayerType === LAYER_TYPE.TILED_VECTOR + ) { + newLayerType = + resolution === GRID_RESOLUTION.SUPER_FINE ? LAYER_TYPE.TILED_VECTOR : LAYER_TYPE.VECTOR; + } else if (this.props.currentLayerType === LAYER_TYPE.HEATMAP) { + if (resolution === GRID_RESOLUTION.SUPER_FINE) { + throw new Error('Heatmap does not support SUPER_FINE resolution'); + } else { + newLayerType = LAYER_TYPE.HEATMAP; + } + } else { + throw new Error('Unexpected layer-type'); + } + + this.props.onChange({ propName: 'resolution', value: resolution, newLayerType }); }; _onRequestTypeSelect = (requestType) => { @@ -72,13 +89,13 @@ export class UpdateSourceEditor extends Component { _renderMetricsPanel() { const metricsFilter = - this.props.renderAs === RENDER_AS.HEATMAP + this.props.currentLayerType === LAYER_TYPE.HEATMAP ? (metric) => { //these are countable metrics, where blending heatmap color blobs make sense return isMetricCountable(metric.value); } : null; - const allowMultipleMetrics = this.props.renderAs !== RENDER_AS.HEATMAP; + const allowMultipleMetrics = this.props.currentLayerType !== LAYER_TYPE.HEATMAP; return ( @@ -115,6 +132,7 @@ export class UpdateSourceEditor extends Component { diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/update_source_editor.test.tsx b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/update_source_editor.test.tsx new file mode 100644 index 0000000000000..ceb79230bc832 --- /dev/null +++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/update_source_editor.test.tsx @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React from 'react'; +import { shallow } from 'enzyme'; + +// @ts-expect-error +import { UpdateSourceEditor } from './update_source_editor'; +import { GRID_RESOLUTION, LAYER_TYPE, RENDER_AS } from '../../../../common/constants'; + +const defaultProps = { + currentLayerType: LAYER_TYPE.VECTOR, + indexPatternId: 'foobar', + onChange: () => {}, + metrics: [], + renderAs: RENDER_AS.POINT, + resolution: GRID_RESOLUTION.COARSE, +}; + +describe('source editor geo_grid_source', () => { + describe('default vector layer config', () => { + test('should allow super-fine option', async () => { + const component = shallow(); + expect(component).toMatchSnapshot(); + }); + }); + + describe('should put limitations based on heatmap-rendering selection', () => { + test('should not allow super-fine option for heatmaps and should not allow multiple metrics', async () => { + const component = shallow( + + ); + expect(component).toMatchSnapshot(); + }); + }); +}); diff --git a/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/convert_to_lines.js b/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/convert_to_lines.js index 96a7f50cdf523..24ac6d31bc645 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/convert_to_lines.js +++ b/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/convert_to_lines.js @@ -5,7 +5,7 @@ */ import _ from 'lodash'; -import { extractPropertiesFromBucket } from '../../util/es_agg_utils'; +import { extractPropertiesFromBucket } from '../../../../common/elasticsearch_util'; const LAT_INDEX = 0; const LON_INDEX = 1; diff --git a/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/es_pew_pew_source.js b/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/es_pew_pew_source.js index 9ec54335d4e78..0360208ef8370 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/es_pew_pew_source.js +++ b/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/es_pew_pew_source.js @@ -16,7 +16,7 @@ import { getDataSourceLabel } from '../../../../common/i18n_getters'; import { convertToLines } from './convert_to_lines'; import { AbstractESAggSource, DEFAULT_METRIC } from '../es_agg_source'; import { registerSource } from '../source_registry'; -import { turfBboxToBounds } from '../../../../common/elasticsearch_geo_utils'; +import { turfBboxToBounds } from '../../../../common/elasticsearch_util'; import { DataRequestAbortError } from '../../util/data_request'; const MAX_GEOTILE_LEVEL = 29; diff --git a/x-pack/plugins/maps/public/classes/sources/es_search_source/es_search_source.js b/x-pack/plugins/maps/public/classes/sources/es_search_source/es_search_source.js index df83bd1cf5e60..edcafae54d54c 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_search_source/es_search_source.js +++ b/x-pack/plugins/maps/public/classes/sources/es_search_source/es_search_source.js @@ -10,7 +10,7 @@ import rison from 'rison-node'; import { AbstractESSource } from '../es_source'; import { getSearchService, getHttp } from '../../../kibana_services'; -import { hitsToGeoJson } from '../../../../common/elasticsearch_geo_utils'; +import { hitsToGeoJson, getField, addFieldToDSL } from '../../../../common/elasticsearch_util'; import { UpdateSourceEditor } from './update_source_editor'; import { SOURCE_TYPES, @@ -31,7 +31,7 @@ import uuid from 'uuid/v4'; import { DEFAULT_FILTER_BY_MAP_BOUNDS } from './constants'; import { ESDocField } from '../../fields/es_doc_field'; -import { getField, addFieldToDSL } from '../../util/es_agg_utils'; + import { registerSource } from '../source_registry'; export const sourceTitle = i18n.translate('xpack.maps.source.esSearchTitle', { diff --git a/x-pack/plugins/maps/public/classes/sources/es_source/es_source.js b/x-pack/plugins/maps/public/classes/sources/es_source/es_source.js index d51ca46fd98ff..ab56ceeab4e77 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_source/es_source.js +++ b/x-pack/plugins/maps/public/classes/sources/es_source/es_source.js @@ -11,14 +11,14 @@ import { getTimeFilter, getSearchService, } from '../../../kibana_services'; -import { createExtentFilter } from '../../../../common/elasticsearch_geo_utils'; +import { createExtentFilter } from '../../../../common/elasticsearch_util'; import _ from 'lodash'; import { i18n } from '@kbn/i18n'; import uuid from 'uuid/v4'; import { copyPersistentState } from '../../../reducers/util'; import { DataRequestAbortError } from '../../util/data_request'; -import { expandToTileBoundaries } from '../es_geo_grid_source/geo_tile_utils'; +import { expandToTileBoundaries } from '../../../../common/geo_tile_utils'; import { search } from '../../../../../../../src/plugins/data/public'; export class AbstractESSource extends AbstractVectorSource { diff --git a/x-pack/plugins/maps/public/classes/sources/es_term_source/es_term_source.js b/x-pack/plugins/maps/public/classes/sources/es_term_source/es_term_source.js index b4ad256c1598a..359d22d2c44ce 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_term_source/es_term_source.js +++ b/x-pack/plugins/maps/public/classes/sources/es_term_source/es_term_source.js @@ -16,7 +16,11 @@ import { import { getJoinAggKey } from '../../../../common/get_agg_key'; import { ESDocField } from '../../fields/es_doc_field'; import { AbstractESAggSource } from '../es_agg_source'; -import { getField, addFieldToDSL, extractPropertiesFromBucket } from '../../util/es_agg_utils'; +import { + getField, + addFieldToDSL, + extractPropertiesFromBucket, +} from '../../../../common/elasticsearch_util'; const TERMS_AGG_NAME = 'join'; diff --git a/x-pack/plugins/maps/public/classes/sources/source.ts b/x-pack/plugins/maps/public/classes/sources/source.ts index 7e7a7bd8f049d..946381817b8fc 100644 --- a/x-pack/plugins/maps/public/classes/sources/source.ts +++ b/x-pack/plugins/maps/public/classes/sources/source.ts @@ -18,6 +18,7 @@ import { OnSourceChangeArgs } from '../../connected_components/layer_panel/view' export type SourceEditorArgs = { onChange: (...args: OnSourceChangeArgs[]) => void; + currentLayerType?: string; }; export type ImmutableSourceProperty = { @@ -50,7 +51,7 @@ export interface ISource { getImmutableProperties(): Promise; getAttributions(): Promise; isESSource(): boolean; - renderSourceSettingsEditor({ onChange }: SourceEditorArgs): ReactElement | null; + renderSourceSettingsEditor(sourceEditorArgs: SourceEditorArgs): ReactElement | null; supportsFitToBounds(): Promise; showJoinEditor(): boolean; getJoinsDisabledReason(): string | null; @@ -126,7 +127,7 @@ export class AbstractSource implements ISource { return []; } - renderSourceSettingsEditor({ onChange }: SourceEditorArgs): ReactElement | null { + renderSourceSettingsEditor(sourceEditorArgs: SourceEditorArgs): ReactElement | null { return null; } diff --git a/x-pack/plugins/maps/public/classes/styles/heatmap/heatmap_style.tsx b/x-pack/plugins/maps/public/classes/styles/heatmap/heatmap_style.tsx index a300225178526..c75698805225f 100644 --- a/x-pack/plugins/maps/public/classes/styles/heatmap/heatmap_style.tsx +++ b/x-pack/plugins/maps/public/classes/styles/heatmap/heatmap_style.tsx @@ -86,6 +86,7 @@ export class HeatmapStyle implements IStyle { } else if (resolution === GRID_RESOLUTION.MOST_FINE) { radius = 32; } else { + // SUPER_FINE or any other is not supported. const errorMessage = i18n.translate('xpack.maps.style.heatmap.resolutionStyleErrorMessage', { defaultMessage: `Resolution param not recognized: {resolution}`, values: { resolution }, diff --git a/x-pack/plugins/maps/public/classes/styles/vector/style_util.ts b/x-pack/plugins/maps/public/classes/styles/vector/style_util.ts index d190a62e6f300..49d6ccdeb9316 100644 --- a/x-pack/plugins/maps/public/classes/styles/vector/style_util.ts +++ b/x-pack/plugins/maps/public/classes/styles/vector/style_util.ts @@ -5,7 +5,7 @@ */ import { i18n } from '@kbn/i18n'; -import { MB_LOOKUP_FUNCTION, VECTOR_SHAPE_TYPE } from '../../../../common/constants'; +import { MB_LOOKUP_FUNCTION, VECTOR_SHAPE_TYPE, VECTOR_STYLES } from '../../../../common/constants'; import { Category } from '../../../../common/descriptor_types'; export function getOtherCategoryLabel() { @@ -14,8 +14,8 @@ export function getOtherCategoryLabel() { }); } -export function getComputedFieldName(styleName: string, fieldName: string) { - return `${getComputedFieldNamePrefix(fieldName)}__${styleName}`; +export function getComputedFieldName(styleName: VECTOR_STYLES, fieldName: string) { + return `${getComputedFieldNamePrefix(fieldName)}__${styleName as string}`; } export function getComputedFieldNamePrefix(fieldName: string) { diff --git a/x-pack/plugins/maps/public/connected_components/map/features_tooltip/feature_geometry_filter_form.js b/x-pack/plugins/maps/public/connected_components/map/features_tooltip/feature_geometry_filter_form.js index 98267965fd30f..33a0f1c5bf088 100644 --- a/x-pack/plugins/maps/public/connected_components/map/features_tooltip/feature_geometry_filter_form.js +++ b/x-pack/plugins/maps/public/connected_components/map/features_tooltip/feature_geometry_filter_form.js @@ -8,7 +8,7 @@ import React, { Component } from 'react'; import { i18n } from '@kbn/i18n'; import { URL_MAX_LENGTH } from '../../../../../../../src/core/public'; -import { createSpatialFilterWithGeometry } from '../../../../common/elasticsearch_geo_utils'; +import { createSpatialFilterWithGeometry } from '../../../../common/elasticsearch_util'; import { GEO_JSON_TYPE } from '../../../../common/constants'; import { GeometryFilterForm } from '../../../components/geometry_filter_form'; diff --git a/x-pack/plugins/maps/public/connected_components/map/mb/draw_control/draw_control.js b/x-pack/plugins/maps/public/connected_components/map/mb/draw_control/draw_control.js index 49675ac6a3924..0356a8267c18a 100644 --- a/x-pack/plugins/maps/public/connected_components/map/mb/draw_control/draw_control.js +++ b/x-pack/plugins/maps/public/connected_components/map/mb/draw_control/draw_control.js @@ -15,7 +15,7 @@ import { createSpatialFilterWithGeometry, getBoundingBoxGeometry, roundCoordinates, -} from '../../../../../common/elasticsearch_geo_utils'; +} from '../../../../../common/elasticsearch_util'; import { DrawTooltip } from './draw_tooltip'; const DRAW_RECTANGLE = 'draw_rectangle'; diff --git a/x-pack/plugins/maps/public/connected_components/map/mb/view.js b/x-pack/plugins/maps/public/connected_components/map/mb/view.js index eede1edf40cc4..ddc48cfc9c329 100644 --- a/x-pack/plugins/maps/public/connected_components/map/mb/view.js +++ b/x-pack/plugins/maps/public/connected_components/map/mb/view.js @@ -23,7 +23,7 @@ import sprites1 from '@elastic/maki/dist/sprite@1.png'; import sprites2 from '@elastic/maki/dist/sprite@2.png'; import { DrawControl } from './draw_control'; import { TooltipControl } from './tooltip_control'; -import { clampToLatBounds, clampToLonBounds } from '../../../../common/elasticsearch_geo_utils'; +import { clampToLatBounds, clampToLonBounds } from '../../../../common/elasticsearch_util'; import { getInitialView } from './get_initial_view'; import { getPreserveDrawingBuffer } from '../../../kibana_services'; diff --git a/x-pack/plugins/maps/public/selectors/map_selectors.ts b/x-pack/plugins/maps/public/selectors/map_selectors.ts index 03e0f753812c9..db4371e9cd590 100644 --- a/x-pack/plugins/maps/public/selectors/map_selectors.ts +++ b/x-pack/plugins/maps/public/selectors/map_selectors.ts @@ -31,7 +31,7 @@ import { SPATIAL_FILTERS_LAYER_ID, } from '../../common/constants'; // @ts-ignore -import { extractFeaturesFromFilters } from '../../common/elasticsearch_geo_utils'; +import { extractFeaturesFromFilters } from '../../common/elasticsearch_util'; import { MapStoreState } from '../reducers/store'; import { DataRequestDescriptor, diff --git a/x-pack/plugins/maps/server/mvt/__tests__/json/0_0_0_gridagg.json b/x-pack/plugins/maps/server/mvt/__tests__/json/0_0_0_gridagg.json new file mode 100644 index 0000000000000..0945dc57fa512 --- /dev/null +++ b/x-pack/plugins/maps/server/mvt/__tests__/json/0_0_0_gridagg.json @@ -0,0 +1 @@ +{"took":2,"timed_out":false,"_shards":{"total":1,"successful":1,"skipped":0,"failed":0},"hits":{"total":{"value":10000,"relation":"gte"},"max_score":null,"hits":[]},"aggregations":{"gridSplit":{"buckets":[{"key":"7/37/48","doc_count":42637,"avg_of_TOTAL_AV":{"value":5398920.390458991},"gridCentroid":{"location":{"lat":40.77936432658204,"lon":-73.96795676049909},"count":42637}}]}}} diff --git a/x-pack/plugins/maps/server/mvt/__tests__/pbf/0_0_0.pbf b/x-pack/plugins/maps/server/mvt/__tests__/pbf/0_0_0_docs.pbf similarity index 100% rename from x-pack/plugins/maps/server/mvt/__tests__/pbf/0_0_0.pbf rename to x-pack/plugins/maps/server/mvt/__tests__/pbf/0_0_0_docs.pbf diff --git a/x-pack/plugins/maps/server/mvt/__tests__/pbf/0_0_0_grid_asgrid.pbf b/x-pack/plugins/maps/server/mvt/__tests__/pbf/0_0_0_grid_asgrid.pbf new file mode 100644 index 0000000000000..f2289865b8022 Binary files /dev/null and b/x-pack/plugins/maps/server/mvt/__tests__/pbf/0_0_0_grid_asgrid.pbf differ diff --git a/x-pack/plugins/maps/server/mvt/__tests__/pbf/0_0_0_grid_aspoint.pbf b/x-pack/plugins/maps/server/mvt/__tests__/pbf/0_0_0_grid_aspoint.pbf new file mode 100644 index 0000000000000..54b0791ccd136 Binary files /dev/null and b/x-pack/plugins/maps/server/mvt/__tests__/pbf/0_0_0_grid_aspoint.pbf differ diff --git a/x-pack/plugins/maps/server/mvt/__tests__/tile_searches.ts b/x-pack/plugins/maps/server/mvt/__tests__/tile_es_responses.ts similarity index 57% rename from x-pack/plugins/maps/server/mvt/__tests__/tile_searches.ts rename to x-pack/plugins/maps/server/mvt/__tests__/tile_es_responses.ts index 317d6434cf81e..9fbaba21e71d5 100644 --- a/x-pack/plugins/maps/server/mvt/__tests__/tile_searches.ts +++ b/x-pack/plugins/maps/server/mvt/__tests__/tile_es_responses.ts @@ -7,10 +7,6 @@ import * as path from 'path'; import * as fs from 'fs'; -const search000path = path.resolve(__dirname, './json/0_0_0_search.json'); -const search000raw = fs.readFileSync(search000path); -const search000json = JSON.parse((search000raw as unknown) as string); - export const TILE_SEARCHES = { '0.0.0': { countResponse: { @@ -22,7 +18,18 @@ export const TILE_SEARCHES = { failed: 0, }, }, - searchResponse: search000json, + searchResponse: loadJson('./json/0_0_0_search.json'), + }, +}; + +export const TILE_GRIDAGGS = { + '0.0.0': { + gridAggResponse: loadJson('./json/0_0_0_gridagg.json'), }, - '1.1.0': {}, }; + +function loadJson(filePath: string) { + const absolutePath = path.resolve(__dirname, filePath); + const rawContents = fs.readFileSync(absolutePath); + return JSON.parse((rawContents as unknown) as string); +} diff --git a/x-pack/plugins/maps/server/mvt/get_tile.test.ts b/x-pack/plugins/maps/server/mvt/get_tile.test.ts index b9c928d594539..76c1741ab2ad0 100644 --- a/x-pack/plugins/maps/server/mvt/get_tile.test.ts +++ b/x-pack/plugins/maps/server/mvt/get_tile.test.ts @@ -4,11 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -import { getTile } from './get_tile'; -import { TILE_SEARCHES } from './__tests__/tile_searches'; +import { getGridTile, getTile } from './get_tile'; +import { TILE_GRIDAGGS, TILE_SEARCHES } from './__tests__/tile_es_responses'; import { Logger } from 'src/core/server'; import * as path from 'path'; import * as fs from 'fs'; +import { ES_GEO_FIELD_TYPE, RENDER_AS } from '../../common/constants'; describe('getTile', () => { const mockCallElasticsearch = jest.fn(); @@ -51,13 +52,84 @@ describe('getTile', () => { callElasticsearch: mockCallElasticsearch, }); - if (tile === null) { - throw new Error('Tile should be created'); - } + compareTiles('./__tests__/pbf/0_0_0_docs.pbf', tile); + }); +}); + +describe('getGridTile', () => { + const mockCallElasticsearch = jest.fn(); + + const geometryFieldName = 'geometry'; + + // For mock-purposes only. The ES-call response is mocked in 0_0_0_gridagg.json file + const requestBody = { + _source: { excludes: [] }, + aggs: { + gridSplit: { + aggs: { + // eslint-disable-next-line @typescript-eslint/naming-convention + avg_of_TOTAL_AV: { avg: { field: 'TOTAL_AV' } }, + gridCentroid: { geo_centroid: { field: geometryFieldName } }, + }, + geotile_grid: { + bounds: null, + field: geometryFieldName, + precision: null, + shard_size: 65535, + size: 65535, + }, + }, + }, + docvalue_fields: [], + query: { + bool: { + filter: [], + }, + }, + script_fields: {}, + size: 0, + stored_fields: ['*'], + }; - const expectedPath = path.resolve(__dirname, './__tests__/pbf/0_0_0.pbf'); - const expectedBin = fs.readFileSync(expectedPath, 'binary'); - const expectedTile = Buffer.from(expectedBin, 'binary'); - expect(expectedTile.equals(tile)).toBe(true); + beforeEach(() => { + mockCallElasticsearch.mockReset(); + mockCallElasticsearch.mockImplementation((type) => { + return TILE_GRIDAGGS['0.0.0'].gridAggResponse; + }); + }); + + const defaultParams = { + x: 0, + y: 0, + z: 0, + index: 'manhattan', + requestBody, + geometryFieldName, + logger: ({ + info: () => {}, + } as unknown) as Logger, + callElasticsearch: mockCallElasticsearch, + requestType: RENDER_AS.POINT, + geoFieldType: ES_GEO_FIELD_TYPE.GEO_POINT, + }; + + test('0.0.0 tile (clusters)', async () => { + const tile = await getGridTile(defaultParams); + compareTiles('./__tests__/pbf/0_0_0_grid_aspoint.pbf', tile); + }); + + test('0.0.0 tile (grids)', async () => { + const tile = await getGridTile({ ...defaultParams, requestType: RENDER_AS.GRID }); + compareTiles('./__tests__/pbf/0_0_0_grid_asgrid.pbf', tile); }); }); + +function compareTiles(expectedRelativePath: string, actualTile: Buffer | null) { + if (actualTile === null) { + throw new Error('Tile should be created'); + } + const expectedPath = path.resolve(__dirname, expectedRelativePath); + const expectedBin = fs.readFileSync(expectedPath, 'binary'); + const expectedTile = Buffer.from(expectedBin, 'binary'); + expect(expectedTile.equals(actualTile)).toBe(true); +} diff --git a/x-pack/plugins/maps/server/mvt/get_tile.ts b/x-pack/plugins/maps/server/mvt/get_tile.ts index 9621f7f174a30..dd88be7f80c2e 100644 --- a/x-pack/plugins/maps/server/mvt/get_tile.ts +++ b/x-pack/plugins/maps/server/mvt/get_tile.ts @@ -13,22 +13,89 @@ import { Feature, FeatureCollection, Polygon } from 'geojson'; import { ES_GEO_FIELD_TYPE, FEATURE_ID_PROPERTY_NAME, + GEOTILE_GRID_AGG_NAME, KBN_TOO_MANY_FEATURES_PROPERTY, + MAX_ZOOM, MVT_SOURCE_LAYER_NAME, + RENDER_AS, + SUPER_FINE_ZOOM_DELTA, } from '../../common/constants'; -import { hitsToGeoJson } from '../../common/elasticsearch_geo_utils'; +import { hitsToGeoJson } from '../../common/elasticsearch_util'; import { flattenHit } from './util'; +import { convertRegularRespToGeoJson } from '../../common/elasticsearch_util'; +import { ESBounds, tile2lat, tile2long, tileToESBbox } from '../../common/geo_tile_utils'; -interface ESBounds { - top_left: { - lon: number; - lat: number; - }; - bottom_right: { - lon: number; - lat: number; - }; +export async function getGridTile({ + logger, + callElasticsearch, + index, + geometryFieldName, + x, + y, + z, + requestBody = {}, + requestType = RENDER_AS.POINT, + geoFieldType = ES_GEO_FIELD_TYPE.GEO_POINT, +}: { + x: number; + y: number; + z: number; + geometryFieldName: string; + index: string; + callElasticsearch: (type: string, ...args: any[]) => Promise; + logger: Logger; + requestBody: any; + requestType: RENDER_AS; + geoFieldType: ES_GEO_FIELD_TYPE; +}): Promise { + const esBbox: ESBounds = tileToESBbox(x, y, z); + try { + let bboxFilter; + if (geoFieldType === ES_GEO_FIELD_TYPE.GEO_POINT) { + bboxFilter = { + geo_bounding_box: { + [geometryFieldName]: esBbox, + }, + }; + } else if (geoFieldType === ES_GEO_FIELD_TYPE.GEO_SHAPE) { + const geojsonPolygon = tileToGeoJsonPolygon(x, y, z); + bboxFilter = { + geo_shape: { + [geometryFieldName]: { + shape: geojsonPolygon, + relation: 'INTERSECTS', + }, + }, + }; + } else { + throw new Error(`${geoFieldType} is not valid geo field-type`); + } + requestBody.query.bool.filter.push(bboxFilter); + + requestBody.aggs[GEOTILE_GRID_AGG_NAME].geotile_grid.precision = Math.min( + z + SUPER_FINE_ZOOM_DELTA, + MAX_ZOOM + ); + requestBody.aggs[GEOTILE_GRID_AGG_NAME].geotile_grid.bounds = esBbox; + + const esGeotileGridQuery = { + index, + body: requestBody, + }; + + const gridAggResult = await callElasticsearch('search', esGeotileGridQuery); + const features: Feature[] = convertRegularRespToGeoJson(gridAggResult, requestType); + const featureCollection: FeatureCollection = { + features, + type: 'FeatureCollection', + }; + + return createMvtTile(featureCollection, z, x, y); + } catch (e) { + logger.warn(`Cannot generate grid-tile for ${z}/${x}/${y}: ${e.message}`); + return null; + } } export async function getTile({ @@ -149,26 +216,7 @@ export async function getTile({ type: 'FeatureCollection', }; - const tileIndex = geojsonvt(featureCollection, { - maxZoom: 24, // max zoom to preserve detail on; can't be higher than 24 - tolerance: 3, // simplification tolerance (higher means simpler) - extent: 4096, // tile extent (both width and height) - buffer: 64, // tile buffer on each side - debug: 0, // logging level (0 to disable, 1 or 2) - lineMetrics: false, // whether to enable line metrics tracking for LineString/MultiLineString features - promoteId: null, // name of a feature property to promote to feature.id. Cannot be used with `generateId` - generateId: false, // whether to generate feature ids. Cannot be used with `promoteId` - indexMaxZoom: 5, // max zoom in the initial tile index - indexMaxPoints: 100000, // max number of points per tile in the index - }); - const tile = tileIndex.getTile(z, x, y); - - if (tile) { - const pbf = vtpbf.fromGeojsonVt({ [MVT_SOURCE_LAYER_NAME]: tile }, { version: 2 }); - return Buffer.from(pbf); - } else { - return null; - } + return createMvtTile(featureCollection, z, x, y); } catch (e) { logger.warn(`Cannot generate tile for ${z}/${x}/${y}: ${e.message}`); return null; @@ -195,15 +243,6 @@ function tileToGeoJsonPolygon(x: number, y: number, z: number): Polygon { }; } -function tile2long(x: number, z: number): number { - return (x / Math.pow(2, z)) * 360 - 180; -} - -function tile2lat(y: number, z: number): number { - const n = Math.PI - (2 * Math.PI * y) / Math.pow(2, z); - return (180 / Math.PI) * Math.atan(0.5 * (Math.exp(n) - Math.exp(-n))); -} - function esBboxToGeoJsonPolygon(esBounds: ESBounds): Polygon { let minLon = esBounds.top_left.lon; const maxLon = esBounds.bottom_right.lon; @@ -224,3 +263,31 @@ function esBboxToGeoJsonPolygon(esBounds: ESBounds): Polygon { ], }; } + +function createMvtTile( + featureCollection: FeatureCollection, + z: number, + x: number, + y: number +): Buffer | null { + const tileIndex = geojsonvt(featureCollection, { + maxZoom: 24, // max zoom to preserve detail on; can't be higher than 24 + tolerance: 3, // simplification tolerance (higher means simpler) + extent: 4096, // tile extent (both width and height) + buffer: 64, // tile buffer on each side + debug: 0, // logging level (0 to disable, 1 or 2) + lineMetrics: false, // whether to enable line metrics tracking for LineString/MultiLineString features + promoteId: null, // name of a feature property to promote to feature.id. Cannot be used with `generateId` + generateId: false, // whether to generate feature ids. Cannot be used with `promoteId` + indexMaxZoom: 5, // max zoom in the initial tile index + indexMaxPoints: 100000, // max number of points per tile in the index + }); + const tile = tileIndex.getTile(z, x, y); + + if (tile) { + const pbf = vtpbf.fromGeojsonVt({ [MVT_SOURCE_LAYER_NAME]: tile }, { version: 2 }); + return Buffer.from(pbf); + } else { + return null; + } +} diff --git a/x-pack/plugins/maps/server/mvt/mvt_routes.ts b/x-pack/plugins/maps/server/mvt/mvt_routes.ts index 32c14a355ba2a..266a240b53017 100644 --- a/x-pack/plugins/maps/server/mvt/mvt_routes.ts +++ b/x-pack/plugins/maps/server/mvt/mvt_routes.ts @@ -6,10 +6,21 @@ import rison from 'rison-node'; import { schema } from '@kbn/config-schema'; -import { Logger } from 'src/core/server'; +import { + KibanaRequest, + KibanaResponseFactory, + Logger, + RequestHandlerContext, +} from 'src/core/server'; import { IRouter } from 'src/core/server'; -import { MVT_GETTILE_API_PATH, API_ROOT_PATH } from '../../common/constants'; -import { getTile } from './get_tile'; +import { + MVT_GETTILE_API_PATH, + API_ROOT_PATH, + MVT_GETGRIDTILE_API_PATH, + ES_GEO_FIELD_TYPE, + RENDER_AS, +} from '../../common/constants'; +import { getGridTile, getTile } from './get_tile'; const CACHE_TIMEOUT = 0; // Todo. determine good value. Unsure about full-implications (e.g. wrt. time-based data). @@ -28,46 +39,93 @@ export function initMVTRoutes({ router, logger }: { logger: Logger; router: IRou }), }, }, - async (context, request, response) => { + async ( + context: RequestHandlerContext, + request: KibanaRequest, unknown>, + response: KibanaResponseFactory + ) => { const { query } = request; + const requestBodyDSL = rison.decode(query.requestBody as string); - const callElasticsearch = async (type: string, ...args: any[]): Promise => { - return await context.core.elasticsearch.legacy.client.callAsCurrentUser(type, ...args); - }; + const tile = await getTile({ + logger, + callElasticsearch: makeCallElasticsearch(context), + geometryFieldName: query.geometryFieldName as string, + x: query.x as number, + y: query.y as number, + z: query.z as number, + index: query.index as string, + requestBody: requestBodyDSL as any, + }); - const requestBodyDSL = rison.decode(query.requestBody); + return sendResponse(response, tile); + } + ); - const tile = await getTile({ + router.get( + { + path: `${API_ROOT_PATH}/${MVT_GETGRIDTILE_API_PATH}`, + validate: { + query: schema.object({ + x: schema.number(), + y: schema.number(), + z: schema.number(), + geometryFieldName: schema.string(), + requestBody: schema.string(), + index: schema.string(), + requestType: schema.string(), + geoFieldType: schema.string(), + }), + }, + }, + async ( + context: RequestHandlerContext, + request: KibanaRequest, unknown>, + response: KibanaResponseFactory + ) => { + const { query } = request; + const requestBodyDSL = rison.decode(query.requestBody as string); + + const tile = await getGridTile({ logger, - callElasticsearch, - geometryFieldName: query.geometryFieldName, - x: query.x, - y: query.y, - z: query.z, - index: query.index, - requestBody: requestBodyDSL, + callElasticsearch: makeCallElasticsearch(context), + geometryFieldName: query.geometryFieldName as string, + x: query.x as number, + y: query.y as number, + z: query.z as number, + index: query.index as string, + requestBody: requestBodyDSL as any, + requestType: query.requestType as RENDER_AS, + geoFieldType: query.geoFieldType as ES_GEO_FIELD_TYPE, }); - if (tile) { - return response.ok({ - body: tile, - headers: { - 'content-disposition': 'inline', - 'content-length': `${tile.length}`, - 'Content-Type': 'application/x-protobuf', - 'Cache-Control': `max-age=${CACHE_TIMEOUT}`, - }, - }); - } else { - return response.ok({ - headers: { - 'content-disposition': 'inline', - 'content-length': '0', - 'Content-Type': 'application/x-protobuf', - 'Cache-Control': `max-age=${CACHE_TIMEOUT}`, - }, - }); - } + return sendResponse(response, tile); } ); } + +function sendResponse(response: KibanaResponseFactory, tile: any) { + const headers = { + 'content-disposition': 'inline', + 'content-length': tile ? `${tile.length}` : `0`, + 'Content-Type': 'application/x-protobuf', + 'Cache-Control': `max-age=${CACHE_TIMEOUT}`, + }; + + if (tile) { + return response.ok({ + body: tile, + headers, + }); + } else { + return response.ok({ + headers, + }); + } +} + +function makeCallElasticsearch(context: RequestHandlerContext) { + return async (type: string, ...args: any[]): Promise => { + return context.core.elasticsearch.legacy.client.callAsCurrentUser(type, ...args); + }; +} diff --git a/x-pack/plugins/monitoring/public/angular/app_modules.ts b/x-pack/plugins/monitoring/public/angular/app_modules.ts index 499610045d771..4ef905fd35fc4 100644 --- a/x-pack/plugins/monitoring/public/angular/app_modules.ts +++ b/x-pack/plugins/monitoring/public/angular/app_modules.ts @@ -41,10 +41,6 @@ import { licenseProvider } from '../services/license'; // @ts-ignore import { titleProvider } from '../services/title'; // @ts-ignore -import { monitoringBeatsBeatProvider } from '../directives/beats/beat'; -// @ts-ignore -import { monitoringBeatsOverviewProvider } from '../directives/beats/overview'; -// @ts-ignore import { monitoringMlListingProvider } from '../directives/elasticsearch/ml_job_listing'; // @ts-ignore import { monitoringMainProvider } from '../directives/main'; @@ -153,8 +149,6 @@ function createMonitoringAppServices() { function createMonitoringAppDirectives() { angular .module('monitoring/directives', []) - .directive('monitoringBeatsBeat', monitoringBeatsBeatProvider) - .directive('monitoringBeatsOverview', monitoringBeatsOverviewProvider) .directive('monitoringMlListing', monitoringMlListingProvider) .directive('monitoringMain', monitoringMainProvider); } diff --git a/x-pack/plugins/monitoring/public/components/apm/instance/instance.js b/x-pack/plugins/monitoring/public/components/apm/instance/instance.js index 396d2258edd0c..eec24e741ac41 100644 --- a/x-pack/plugins/monitoring/public/components/apm/instance/instance.js +++ b/x-pack/plugins/monitoring/public/components/apm/instance/instance.js @@ -42,9 +42,7 @@ export function ApmServerInstance({ summary, metrics, ...props }) { const charts = seriesToShow.map((data, index) => ( - - - + )); @@ -55,15 +53,15 @@ export function ApmServerInstance({ summary, metrics, ...props }) {

+ + + + - - - - {charts} diff --git a/x-pack/plugins/monitoring/public/components/apm/instances/instances.js b/x-pack/plugins/monitoring/public/components/apm/instances/instances.js index 6dcfa6dd043aa..e05ba1878caed 100644 --- a/x-pack/plugins/monitoring/public/components/apm/instances/instances.js +++ b/x-pack/plugins/monitoring/public/components/apm/instances/instances.js @@ -156,11 +156,11 @@ export function ApmServerInstances({ apms, setupMode }) { /> + + + + - - - - {setupModeCallout} ( - - - + )); @@ -51,15 +49,15 @@ export function ApmOverview({ stats, metrics, ...props }) {

+ + + + - - - - {charts} diff --git a/x-pack/plugins/monitoring/public/components/beats/beat/beat.js b/x-pack/plugins/monitoring/public/components/beats/beat/beat.js index 3fe211c0f2edc..f489271659bfe 100644 --- a/x-pack/plugins/monitoring/public/components/beats/beat/beat.js +++ b/x-pack/plugins/monitoring/public/components/beats/beat/beat.js @@ -135,6 +135,9 @@ export function Beat({ summary, metrics, ...props }) { + + + diff --git a/x-pack/plugins/monitoring/public/components/beats/listing/listing.js b/x-pack/plugins/monitoring/public/components/beats/listing/listing.js index be8595e8e6bbe..60a35e00a4c63 100644 --- a/x-pack/plugins/monitoring/public/components/beats/listing/listing.js +++ b/x-pack/plugins/monitoring/public/components/beats/listing/listing.js @@ -13,6 +13,7 @@ import { EuiSpacer, EuiLink, EuiScreenReaderOnly, + EuiPanel, } from '@elastic/eui'; import { Stats } from '../../beats'; import { formatMetric } from '../../../lib/format_number'; @@ -153,9 +154,11 @@ export class Listing extends PureComponent { /> - + - + + + {setupModeCallOut} - + + + + - - -

- -

-
- - -
+ +

+ +

+
+ +
- - -

- -

-
- - -
+ +

+ +

+
+ +
- - -

- -

-
- - -
+ +

+ +

+
+ +
- +
+ + @@ -212,18 +213,25 @@ exports[`Overview that overview page shows a message if there is no beats data 1 /> - + + + + - + + + diff --git a/x-pack/plugins/monitoring/public/components/beats/overview/overview.js b/x-pack/plugins/monitoring/public/components/beats/overview/overview.js index 83f92ea1b481c..897f017f44f41 100644 --- a/x-pack/plugins/monitoring/public/components/beats/overview/overview.js +++ b/x-pack/plugins/monitoring/public/components/beats/overview/overview.js @@ -30,46 +30,40 @@ function renderLatestActive(latestActive, latestTypes, latestVersions) { return ( - - -

- -

-
- - -
+ +

+ +

+
+ +
- - -

- -

-
- - -
+ +

+ +

+
+ +
- - -

- -

-
- - -
+ +

+ +

+
+ +
); @@ -118,10 +112,13 @@ export function BeatsOverview({ /> - + - {renderLatestActive(latestActive, latestTypes, latestVersions)} - + + + {renderLatestActive(latestActive, latestTypes, latestVersions)} + + {charts}
diff --git a/x-pack/plugins/monitoring/public/components/cluster/overview/apm_panel.js b/x-pack/plugins/monitoring/public/components/cluster/overview/apm_panel.js index ccbf0b0ec711d..4bf07710393ea 100644 --- a/x-pack/plugins/monitoring/public/components/cluster/overview/apm_panel.js +++ b/x-pack/plugins/monitoring/public/components/cluster/overview/apm_panel.js @@ -55,7 +55,7 @@ export function ApmPanel(props) { {...props} url="apm" title={i18n.translate('xpack.monitoring.cluster.overview.apmPanel.apmTitle', { - defaultMessage: 'APM', + defaultMessage: 'APM server', })} > @@ -70,21 +70,21 @@ export function ApmPanel(props) { aria-label={i18n.translate( 'xpack.monitoring.cluster.overview.apmPanel.overviewLinkAriaLabel', { - defaultMessage: 'APM Overview', + defaultMessage: 'APM server overview', } )} data-test-subj="apmOverview" > - + {formatMetric(props.totalEvents, '0.[0]a')} - + {apmsTotal} }} /> @@ -144,7 +144,7 @@ export function ApmPanel(props) { - + - + {formatMetric(props.totalEvents, '0.[0]a')} - + {props.logs.types.map((log, index) => ( - + - + @@ -276,7 +277,7 @@ export function ElasticsearchPanel(props) { - + - + - + - + {showMlJobs()} - + - + - + - + - + {formatNumber(get(indices, 'docs.count'), 'int_commas')} - + - + - + - + diff --git a/x-pack/plugins/monitoring/public/components/cluster/overview/kibana_panel.js b/x-pack/plugins/monitoring/public/components/cluster/overview/kibana_panel.js index 6fa533302db48..7df0a3ca7138e 100644 --- a/x-pack/plugins/monitoring/public/components/cluster/overview/kibana_panel.js +++ b/x-pack/plugins/monitoring/public/components/cluster/overview/kibana_panel.js @@ -111,7 +111,7 @@ export function KibanaPanel(props) { data-test-subj="kibana_overview" data-overview-status={props.status} > - + {props.requests_total} - + - + {formatNumber(props.concurrent_connections, 'int_commas')} - + - + {formatNumber(props.events_in_total, '0.[0]a')} - + - + {props.max_uptime ? formatNumber(props.max_uptime, 'time_since') : 0} - + - + {queueTypes[LOGSTASH.QUEUE_TYPES.MEMORY] || 0} - + } checked={showSystemIndices} diff --git a/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/__tests__/__snapshots__/cells.test.js.snap b/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/__tests__/__snapshots__/cells.test.js.snap index c7081dc439085..b0b5ceb46d16c 100644 --- a/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/__tests__/__snapshots__/cells.test.js.snap +++ b/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/__tests__/__snapshots__/cells.test.js.snap @@ -10,30 +10,46 @@ exports[`Node Listing Metric Cell should format N/A as the metric for an offline exports[`Node Listing Metric Cell should format a non-percentage metric 1`] = `
+
- - 206.3 GB  - - -
- 206.5 GB max -
- 206.3 GB min +
+
+
+
+
+
+
+
+
+ 206.3 GB +
+
@@ -41,30 +57,46 @@ exports[`Node Listing Metric Cell should format a non-percentage metric 1`] = ` exports[`Node Listing Metric Cell should format a percentage metric 1`] = `
+
- - 0%  - - -
- 2% max -
- 0% min +
+
+
+
+
+
+
+
+
+ 0% +
+
diff --git a/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/__tests__/cells.test.js b/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/__tests__/cells.test.js index 0c4b4b2b3c3f4..f0b131b65433c 100644 --- a/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/__tests__/cells.test.js +++ b/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/__tests__/cells.test.js @@ -27,6 +27,7 @@ describe('Node Listing Metric Cell', () => { }, summary: { minVal: 0, maxVal: 2, lastVal: 0, slope: -1 }, }, + 'data-test-subj': 'testCell', }; expect(renderWithIntl()).toMatchSnapshot(); }); @@ -54,6 +55,7 @@ describe('Node Listing Metric Cell', () => { slope: -1, }, }, + 'data-test-subj': 'testCell2', }; expect(renderWithIntl()).toMatchSnapshot(); }); diff --git a/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/cells.js b/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/cells.js index 4c3b642213d99..9956dd4da7d8a 100644 --- a/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/cells.js +++ b/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/cells.js @@ -4,19 +4,42 @@ * you may not use this file except in compliance with the Elastic License. */ -import React from 'react'; +import React, { useState } from 'react'; import { get } from 'lodash'; import { formatMetric } from '../../../lib/format_number'; -import { EuiText, EuiTitle, EuiFlexGroup, EuiFlexItem } from '@elastic/eui'; +import { + EuiText, + EuiPopover, + EuiIcon, + EuiDescriptionList, + EuiSpacer, + EuiKeyboardAccessible, + EuiFlexGroup, + EuiFlexItem, +} from '@elastic/eui'; import { i18n } from '@kbn/i18n'; +const TRENDING_DOWN = i18n.translate('xpack.monitoring.elasticsearch.node.cells.trendingDownText', { + defaultMessage: 'down', +}); +const TRENDING_UP = i18n.translate('xpack.monitoring.elasticsearch.node.cells.trendingUpText', { + defaultMessage: 'up', +}); + function OfflineCell() { return
N/A
; } -const getSlopeArrow = (slope) => { +const getDirection = (slope) => { + if (slope || slope === 0) { + return slope > 0 ? TRENDING_UP : TRENDING_DOWN; + } + return null; +}; + +const getIcon = (slope) => { if (slope || slope === 0) { - return slope > 0 ? 'up' : 'down'; + return slope > 0 ? 'arrowUp' : 'arrowDown'; } return null; }; @@ -28,40 +51,82 @@ const metricVal = (metric, format, isPercent, units) => { return formatMetric(metric, format, units); }; -const noWrapStyle = { overflowX: 'hidden', whiteSpace: 'nowrap' }; - function MetricCell({ isOnline, metric = {}, isPercent, ...props }) { + const [isPopoverOpen, setIsPopoverOpen] = useState(false); + const onButtonClick = () => setIsPopoverOpen((isPopoverOpen) => !isPopoverOpen); + const closePopover = () => setIsPopoverOpen(false); + if (isOnline) { const { lastVal, maxVal, minVal, slope } = get(metric, 'summary', {}); const format = get(metric, 'metric.format'); const units = get(metric, 'metric.units'); + const tooltipItems = [ + { + title: i18n.translate('xpack.monitoring.elasticsearch.node.cells.tooltip.trending', { + defaultMessage: 'Trending', + }), + description: getDirection(slope), + }, + { + title: i18n.translate('xpack.monitoring.elasticsearch.node.cells.tooltip.max', { + defaultMessage: 'Max value', + }), + description: metricVal(maxVal, format, isPercent, units), + }, + { + title: i18n.translate('xpack.monitoring.elasticsearch.node.cells.tooltip.min', { + defaultMessage: 'Min value', + }), + description: metricVal(minVal, format, isPercent, units), + }, + ]; + + const button = ( + + + + ); + return ( - + + - - - {metricVal(lastVal, format, isPercent)} -   - - - - - {i18n.translate('xpack.monitoring.elasticsearch.nodes.cells.maxText', { - defaultMessage: '{metric} max', - values: { - metric: metricVal(maxVal, format, isPercent, units), - }, - })} - - - {i18n.translate('xpack.monitoring.elasticsearch.nodes.cells.minText', { - defaultMessage: '{metric} min', - values: { - metric: metricVal(minVal, format, isPercent, units), - }, - })} - + + + +
+ + + + {i18n.translate('xpack.monitoring.elasticsearch.node.cells.tooltip.preface', { + defaultMessage: 'Applies to current time period', + })} + +
+
+
+ + {metricVal(lastVal, format, isPercent)} + +
); diff --git a/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/nodes.js b/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/nodes.js index 43512f8e528f6..f088f7c0d348a 100644 --- a/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/nodes.js +++ b/x-pack/plugins/monitoring/public/components/elasticsearch/nodes/nodes.js @@ -73,7 +73,6 @@ const getColumns = (showCgroupMetricsElasticsearch, setupMode, clusterUuid, aler name: i18n.translate('xpack.monitoring.elasticsearch.nodes.nameColumnTitle', { defaultMessage: 'Name', }), - width: '20%', field: 'name', sortable: true, render: (value, node) => { @@ -131,7 +130,7 @@ const getColumns = (showCgroupMetricsElasticsearch, setupMode, clusterUuid, aler defaultMessage: 'Alerts', }), field: 'alerts', - width: '175px', + // width: '175px', sortable: true, render: (_field, node) => { return ( @@ -148,6 +147,7 @@ const getColumns = (showCgroupMetricsElasticsearch, setupMode, clusterUuid, aler name: i18n.translate('xpack.monitoring.elasticsearch.nodes.statusColumnTitle', { defaultMessage: 'Status', }), + dataType: 'boolean', field: 'isOnline', sortable: true, render: (value) => { @@ -181,22 +181,18 @@ const getColumns = (showCgroupMetricsElasticsearch, setupMode, clusterUuid, aler name: i18n.translate('xpack.monitoring.elasticsearch.nodes.shardsColumnTitle', { defaultMessage: 'Shards', }), + dataType: 'number', field: 'shardCount', sortable: true, render: (value, node) => { - return node.isOnline ? ( -
- {value} -
- ) : ( - - ); + return node.isOnline ? {value} : ; }, }); if (showCgroupMetricsElasticsearch) { cols.push({ name: cpuUsageColumnTitle, + dataType: 'number', field: 'node_cgroup_quota', sortable: getSortHandler('node_cgroup_quota'), render: (value, node) => ( @@ -213,6 +209,7 @@ const getColumns = (showCgroupMetricsElasticsearch, setupMode, clusterUuid, aler name: i18n.translate('xpack.monitoring.elasticsearch.nodes.cpuThrottlingColumnTitle', { defaultMessage: 'CPU Throttling', }), + dataType: 'number', field: 'node_cgroup_throttled', sortable: getSortHandler('node_cgroup_throttled'), render: (value, node) => ( @@ -227,6 +224,7 @@ const getColumns = (showCgroupMetricsElasticsearch, setupMode, clusterUuid, aler } else { cols.push({ name: cpuUsageColumnTitle, + dataType: 'number', field: 'node_cpu_utilization', sortable: getSortHandler('node_cpu_utilization'), render: (value, node) => { @@ -245,6 +243,7 @@ const getColumns = (showCgroupMetricsElasticsearch, setupMode, clusterUuid, aler name: i18n.translate('xpack.monitoring.elasticsearch.nodes.loadAverageColumnTitle', { defaultMessage: 'Load Average', }), + dataType: 'number', field: 'node_load_average', sortable: getSortHandler('node_load_average'), render: (value, node) => ( @@ -265,6 +264,7 @@ const getColumns = (showCgroupMetricsElasticsearch, setupMode, clusterUuid, aler javaVirtualMachine: 'JVM', }, }), + dataType: 'number', field: 'node_jvm_mem_percent', sortable: getSortHandler('node_jvm_mem_percent'), render: (value, node) => ( @@ -281,6 +281,7 @@ const getColumns = (showCgroupMetricsElasticsearch, setupMode, clusterUuid, aler name: i18n.translate('xpack.monitoring.elasticsearch.nodes.diskFreeSpaceColumnTitle', { defaultMessage: 'Disk Free Space', }), + dataType: 'number', field: 'node_free_space', sortable: getSortHandler('node_free_space'), render: (value, node) => ( diff --git a/x-pack/plugins/monitoring/public/components/elasticsearch/shard_allocation/components/table_head.js b/x-pack/plugins/monitoring/public/components/elasticsearch/shard_allocation/components/table_head.js index fd5f28ea02039..3c875667fe04c 100644 --- a/x-pack/plugins/monitoring/public/components/elasticsearch/shard_allocation/components/table_head.js +++ b/x-pack/plugins/monitoring/public/components/elasticsearch/shard_allocation/components/table_head.js @@ -5,6 +5,7 @@ */ import React from 'react'; +import { i18n } from '@kbn/i18n'; import { EuiFlexGroup, EuiFlexItem, EuiSwitch } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; @@ -37,7 +38,12 @@ class IndexLabel extends React.Component { $el && $el[0] && unmountComponentAtNode($el[0])); - - scope.$watch('data', (data = {}) => { - render( - , - $el[0] - ); - }); - }, - }; -} diff --git a/x-pack/plugins/monitoring/public/directives/beats/overview/index.js b/x-pack/plugins/monitoring/public/directives/beats/overview/index.js deleted file mode 100644 index 4faf69e13d02c..0000000000000 --- a/x-pack/plugins/monitoring/public/directives/beats/overview/index.js +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import React from 'react'; -import { render, unmountComponentAtNode } from 'react-dom'; -import { BeatsOverview } from '../../../components/beats/overview'; - -export function monitoringBeatsOverviewProvider() { - return { - restrict: 'E', - scope: { - data: '=', - onBrush: '<', - zoomInfo: '<', - }, - link(scope, $el) { - scope.$on('$destroy', () => $el && $el[0] && unmountComponentAtNode($el[0])); - - scope.$watch('data', (data = {}) => { - render( - , - $el[0] - ); - }); - }, - }; -} diff --git a/x-pack/plugins/monitoring/public/directives/main/index.html b/x-pack/plugins/monitoring/public/directives/main/index.html index fabd207d72b1f..fb24d9e678d56 100644 --- a/x-pack/plugins/monitoring/public/directives/main/index.html +++ b/x-pack/plugins/monitoring/public/directives/main/index.html @@ -1,19 +1,32 @@
-
- - +
+
+
+
+
+
+

{{pageTitle || monitoringMain.instance}}

+
+
+
+
+
+ + +
+