From 0abfd3c72fb627232f56b959895e1b51fa8e70fd Mon Sep 17 00:00:00 2001 From: Greg Thompson Date: Fri, 14 Feb 2020 13:06:31 -0600 Subject: [PATCH 1/7] Upgrade EUI to v19.0.0 (#57284) * eui to v19.0.0 * typescript updates; idAria removal * src snapshot updates * mock euicode and euicodeblock for jest * x-pack snapshot updates * mock euicode for jest * more euicode snapshots * mock euicode in storyshots * types/enzyme yarn.lock * sidenav type update --- package.json | 2 +- packages/kbn-ui-shared-deps/package.json | 2 +- .../__snapshots__/no_results.test.js.snap | 64 +- .../np_ready/angular/directives/histogram.tsx | 2 +- .../angular/directives/no_results.test.js | 25 + src/legacy/server/sass/build.test.js | 69 +- .../field/__snapshots__/field.test.tsx.snap | 180 ---- .../management_app/components/field/field.tsx | 2 - .../static/forms/components/field.tsx | 1 - .../static/forms/components/form_row.tsx | 16 +- .../public/components/share_context_menu.tsx | 3 +- src/plugins/share/public/types.ts | 8 +- .../plugins/kbn_tp_run_pipeline/package.json | 2 +- .../kbn_tp_custom_visualizations/package.json | 2 +- .../kbn_tp_embeddable_explorer/package.json | 2 +- .../kbn_tp_sample_panel_action/package.json | 2 +- typings/@elastic/eui/index.d.ts | 2 - .../__test__/__snapshots__/List.test.tsx.snap | 6 - .../canvas/.storybook/storyshots.test.js | 13 + .../keyboard_shortcuts_doc.stories.storyshot | 792 ++++-------------- .../dense_vector_type.tsx | 1 - .../scaled_float_type.tsx | 1 - .../token_count_type.tsx | 1 - .../template_form/steps/step_logistics.tsx | 20 +- .../analysis_setup_indices_form.tsx | 9 +- .../analysis_setup_timerange_form.tsx | 17 +- .../fields_configuration_panel.tsx | 10 - .../indices_configuration_panel.tsx | 4 - .../name_configuration_panel.tsx | 2 - x-pack/legacy/plugins/infra/types/eui.d.ts | 17 - .../dimension_panel/dimension_panel.test.tsx | 29 +- .../common/model_memory_limit/description.tsx | 8 +- .../components/frequency/description.tsx | 8 +- .../components/query_delay/description.tsx | 8 +- .../components/scroll_size/description.tsx | 8 +- .../components/time_field/description.tsx | 3 +- .../components/calendars/description.tsx | 3 +- .../components/custom_urls/description.tsx | 3 +- .../dedicated_index/description.tsx | 3 +- .../components/model_plot/description.tsx | 3 +- .../components/groups/description.tsx | 8 +- .../job_description/description.tsx | 3 +- .../components/job_id/description.tsx | 9 +- .../advanced_detector_modal/descriptions.tsx | 21 +- .../components/bucket_span/description.tsx | 8 +- .../categorization_field/description.tsx | 3 +- .../components/influencers/description.tsx | 3 +- .../components/sparse_data/description.tsx | 3 +- .../components/split_field/description.tsx | 6 +- .../summary_count_field/description.tsx | 3 +- .../components/job_settings_form.tsx | 7 +- .../collection_enabled.test.js.snap | 26 +- .../collection_interval.test.js.snap | 52 +- .../__snapshots__/exporters.test.js.snap | 64 +- .../exporters/__tests__/exporters.test.js | 13 + .../__snapshots__/plugin_enabled.test.js.snap | 24 +- .../__tests__/plugin_enabled.test.js | 13 + .../__snapshots__/reason_found.test.js.snap | 104 +-- .../reasons/__tests__/reason_found.test.js | 13 + .../remote_cluster_form.test.js.snap | 75 +- .../report_listing.test.tsx.snap | 6 - .../note_card_body.test.tsx.snap | 63 +- .../__snapshots__/index.test.tsx.snap | 63 +- .../policy_form/steps/step_logistics.tsx | 8 - .../policy_form/steps/step_retention.tsx | 5 - .../policy_form/steps/step_settings.tsx | 20 +- .../components/repository_form/step_one.tsx | 5 +- .../type_settings/azure_settings.tsx | 18 - .../type_settings/fs_settings.tsx | 12 - .../type_settings/gcs_settings.tsx | 16 - .../type_settings/hdfs_settings.tsx | 22 +- .../type_settings/readonly_settings.tsx | 4 +- .../type_settings/s3_settings.tsx | 24 - .../steps/step_logistics.tsx | 23 +- .../steps/step_settings.tsx | 15 +- .../__snapshots__/monitor_list.test.tsx.snap | 3 - x-pack/package.json | 2 +- .../elasticsearch_privileges.test.tsx.snap | 6 - .../simple_privilege_section.test.tsx.snap | 3 - .../json_watch_edit_simulate.tsx | 4 - x-pack/typings/@elastic/eui/index.d.ts | 1 - yarn.lock | 18 +- 82 files changed, 616 insertions(+), 1536 deletions(-) diff --git a/package.json b/package.json index 26e1112ead697..3156e87e763b2 100644 --- a/package.json +++ b/package.json @@ -120,7 +120,7 @@ "@elastic/charts": "^17.0.2", "@elastic/datemath": "5.0.2", "@elastic/ems-client": "7.6.0", - "@elastic/eui": "18.3.0", + "@elastic/eui": "19.0.0", "@elastic/filesaver": "1.1.2", "@elastic/good": "8.1.1-kibana2", "@elastic/numeral": "2.3.5", diff --git a/packages/kbn-ui-shared-deps/package.json b/packages/kbn-ui-shared-deps/package.json index 0b1a31619fdf9..4b4db9d7f37f3 100644 --- a/packages/kbn-ui-shared-deps/package.json +++ b/packages/kbn-ui-shared-deps/package.json @@ -11,7 +11,7 @@ "devDependencies": { "@elastic/charts": "^17.0.2", "abort-controller": "^3.0.0", - "@elastic/eui": "18.3.0", + "@elastic/eui": "19.0.0", "@kbn/dev-utils": "1.0.0", "@kbn/i18n": "1.0.0", "@yarnpkg/lockfile": "^1.1.0", diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/__snapshots__/no_results.test.js.snap b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/__snapshots__/no_results.test.js.snap index 98cb3ccf6dd91..4126bd9d27ffd 100644 --- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/__snapshots__/no_results.test.js.snap +++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/__snapshots__/no_results.test.js.snap @@ -77,12 +77,8 @@ Array [
- - + + 200 @@ -101,12 +97,8 @@ Array [
- - + + status:200 @@ -125,12 +117,8 @@ Array [
- - + + status:[400 TO 499] @@ -149,12 +137,8 @@ Array [
- - + + status:[400 TO 499] AND extension:PHP @@ -173,12 +157,8 @@ Array [
- - + + status:[400 TO 499] AND (extension:php OR extension:html) @@ -291,15 +271,9 @@ Array [
-
-
-              
+          
+
+              
                 {"reason":"Awful error"}
               
             
@@ -320,15 +294,9 @@ Array [
-
-
-              
+          
+
+              
                 {"reason":"Bad error"}
               
             
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/histogram.tsx b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/histogram.tsx index 77bbab97d95c7..8db3c77ba0f47 100644 --- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/histogram.tsx +++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/histogram.tsx @@ -41,7 +41,7 @@ import { } from '@elastic/charts'; import { i18n } from '@kbn/i18n'; -import { EuiChartThemeType } from '@elastic/eui/src/themes/charts/themes'; +import { EuiChartThemeType } from '@elastic/eui/dist/eui_charts_theme'; import { Subscription } from 'rxjs'; import { getServices, timezoneProvider } from '../../../kibana_services'; diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/no_results.test.js b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/no_results.test.js index 7de792c612993..98a4a926a282e 100644 --- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/no_results.test.js +++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/no_results.test.js @@ -36,6 +36,31 @@ jest.mock('../../../kibana_services', () => { }; }); +// Mocking to prevent errors with React portal. +// Temporary until https://github.com/elastic/kibana/pull/55877 provides other alternatives. +jest.mock('@elastic/eui/lib/components/code/code_block', () => { + const React = require.requireActual('react'); + return { + EuiCodeBlock: ({ children }) => ( +
+
+          {children}
+        
+
+ ), + }; +}); +jest.mock('@elastic/eui/lib/components/code/code', () => { + const React = require.requireActual('react'); + return { + EuiCode: ({ children }) => ( + + {children} + + ), + }; +}); + beforeEach(() => { jest.clearAllMocks(); }); diff --git a/src/legacy/server/sass/build.test.js b/src/legacy/server/sass/build.test.js index 7092f6ad12921..46a898c30f84e 100644 --- a/src/legacy/server/sass/build.test.js +++ b/src/legacy/server/sass/build.test.js @@ -47,28 +47,7 @@ it('builds light themed SASS', async () => { expect(readFileSync(targetPath, 'utf8').replace(/(\/\*# sourceMappingURL=).*( \*\/)/, '$1...$2')) .toMatchInlineSnapshot(` - "/* 1 */ - /* 1 */ - /** - * 1. Extend beta badges to at least 40% of the container's width - * 2. Fix for IE to ensure badges are visible outside of a
+ } + > + xpack.monitoring.collection.enabled + - xpack.monitoring.collection.enabled - + /> @@ -214,15 +221,22 @@ exports[`ExplainCollectionEnabled should explain about xpack.monitoring.collecti paddingSize="l" transparentBackground={false} > + + -1 +
+ } + > + -1 + - -1 - + /> diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/collection_interval/__tests__/__snapshots__/collection_interval.test.js.snap b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/collection_interval/__tests__/__snapshots__/collection_interval.test.js.snap index ac3dce3bfaef6..3cf35609acd07 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/collection_interval/__tests__/__snapshots__/collection_interval.test.js.snap +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/collection_interval/__tests__/__snapshots__/collection_interval.test.js.snap @@ -366,15 +366,22 @@ exports[`ExplainCollectionInterval collection interval setting updates should sh paddingSize="l" transparentBackground={false} > + + xpack.monitoring.collection.interval +
+ } + > + xpack.monitoring.collection.interval + - xpack.monitoring.collection.interval - + /> @@ -387,15 +394,22 @@ exports[`ExplainCollectionInterval collection interval setting updates should sh paddingSize="l" transparentBackground={false} > + + -1 +
+ } + > + -1 + - -1 - + /> @@ -682,15 +696,22 @@ exports[`ExplainCollectionInterval should explain about xpack.monitoring.collect paddingSize="l" transparentBackground={false} > + + xpack.monitoring.collection.interval +
+ } + > + xpack.monitoring.collection.interval + - xpack.monitoring.collection.interval - + /> @@ -703,15 +724,22 @@ exports[`ExplainCollectionInterval should explain about xpack.monitoring.collect paddingSize="l" transparentBackground={false} > + + -1 +
+ } + > + -1 + - -1 - + /> diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/__snapshots__/exporters.test.js.snap b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/__snapshots__/exporters.test.js.snap index 89cd3e5852f82..fb06ff2d866bb 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/__snapshots__/exporters.test.js.snap +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/__snapshots__/exporters.test.js.snap @@ -26,32 +26,20 @@ Array [ >

We checked the - - + + esProd001 settings for - - + + xpack.monitoring.exporters , and found the reason: - - + + myMonitoringClusterExporter1 @@ -59,32 +47,20 @@ Array [

Using monitoring exporters to ship the monitoring data to a remote monitoring cluster is highly recommended as it keeps the integrity of the monitoring data safe no matter what the state of the production cluster. However, as this instance of Kibana could not find any monitoring data, there seems to be a problem with the - - + + xpack.monitoring.exporters configuration, or the - - + + xpack.monitoring.elasticsearch settings in - - + + kibana.yml @@ -92,22 +68,14 @@ Array [

Check that the intended exporters are enabled for sending statistics to the monitoring cluster, and that the monitoring cluster host matches the - - + + xpack.monitoring.elasticsearch setting in - - + + kibana.yml diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/exporters.test.js b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/exporters.test.js index bdeb469daee46..c9147037f0022 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/exporters.test.js +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/exporters.test.js @@ -8,6 +8,19 @@ import React from 'react'; import { renderWithIntl } from '../../../../../../../../../test_utils/enzyme_helpers'; import { ExplainExporters, ExplainExportersCloud } from '../exporters'; +// Mocking to prevent errors with React portal. +// Temporary until https://github.com/elastic/kibana/pull/55877 provides other alternatives. +jest.mock('@elastic/eui/lib/components/code/code', () => { + const React = require.requireActual('react'); + return { + EuiCode: ({ children }) => ( + + {children} + + ), + }; +}); + describe('ExplainExporters', () => { test('should explain about xpack.monitoring.exporters setting', () => { const reason = { diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/__snapshots__/plugin_enabled.test.js.snap b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/__snapshots__/plugin_enabled.test.js.snap index 8871d8caadd1c..63053c3f7c0cd 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/__snapshots__/plugin_enabled.test.js.snap +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/__snapshots__/plugin_enabled.test.js.snap @@ -26,32 +26,20 @@ Array [ >

We checked the cluster settings and found that - - + + xpack.monitoring.enabled is set to - - + + false set, which disables monitoring. Removing the - - + + xpack.monitoring.enabled: false diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/plugin_enabled.test.js b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/plugin_enabled.test.js index b962d136ba642..56536a8e4270b 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/plugin_enabled.test.js +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/plugin_enabled.test.js @@ -8,6 +8,19 @@ import React from 'react'; import { renderWithIntl } from '../../../../../../../../../test_utils/enzyme_helpers'; import { ExplainPluginEnabled } from '../plugin_enabled'; +// Mocking to prevent errors with React portal. +// Temporary until https://github.com/elastic/kibana/pull/55877 provides other alternatives. +jest.mock('@elastic/eui/lib/components/code/code', () => { + const React = require.requireActual('react'); + return { + EuiCode: ({ children }) => ( + + {children} + + ), + }; +}); + describe('ExplainPluginEnabled', () => { test('should explain about xpack.monitoring.enabled setting', () => { const reason = { diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/__snapshots__/reason_found.test.js.snap b/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/__snapshots__/reason_found.test.js.snap index fadf7c5757bf8..898be82b139d1 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/__snapshots__/reason_found.test.js.snap +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/__snapshots__/reason_found.test.js.snap @@ -26,22 +26,14 @@ Array [ >

We checked the cluster settings and found that - - + + xpack.monitoring.collection.interval is set to - - + + -1 @@ -109,32 +101,20 @@ Array [ >

We checked the - - + + node001foo settings for - - + + xpack.monitoring.exporters , and found the reason: - - + + myMonitoringClusterExporter1 @@ -142,32 +122,20 @@ Array [

Using monitoring exporters to ship the monitoring data to a remote monitoring cluster is highly recommended as it keeps the integrity of the monitoring data safe no matter what the state of the production cluster. However, as this instance of Kibana could not find any monitoring data, there seems to be a problem with the - - + + xpack.monitoring.exporters configuration, or the - - + + xpack.monitoring.elasticsearch settings in - - + + kibana.yml @@ -175,22 +143,14 @@ Array [

Check that the intended exporters are enabled for sending statistics to the monitoring cluster, and that the monitoring cluster host matches the - - + + xpack.monitoring.elasticsearch setting in - - + + kibana.yml @@ -277,32 +237,20 @@ Array [ >

We checked the node001foo settings and found that - - + + xpack.monitoring.enabled is set to - - + + false set, which disables monitoring. Removing the - - + + xpack.monitoring.enabled: false diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/reason_found.test.js b/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/reason_found.test.js index a51817db324b7..e9b2ff11538ab 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/reason_found.test.js +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/reason_found.test.js @@ -8,6 +8,19 @@ import React from 'react'; import { renderWithIntl } from '../../../../../../../../test_utils/enzyme_helpers'; import { ReasonFound } from '../'; +// Mocking to prevent errors with React portal. +// Temporary until https://github.com/elastic/kibana/pull/55877 provides other alternatives. +jest.mock('@elastic/eui/lib/components/code/code', () => { + const React = require.requireActual('react'); + return { + EuiCode: ({ children }) => ( + + {children} + + ), + }; +}); + const enabler = {}; describe('ReasonFound', () => { diff --git a/x-pack/legacy/plugins/remote_clusters/public/app/sections/components/remote_cluster_form/__snapshots__/remote_cluster_form.test.js.snap b/x-pack/legacy/plugins/remote_clusters/public/app/sections/components/remote_cluster_form/__snapshots__/remote_cluster_form.test.js.snap index 65fc455417fe3..45751997eb0d5 100644 --- a/x-pack/legacy/plugins/remote_clusters/public/app/sections/components/remote_cluster_form/__snapshots__/remote_cluster_form.test.js.snap +++ b/x-pack/legacy/plugins/remote_clusters/public/app/sections/components/remote_cluster_form/__snapshots__/remote_cluster_form.test.js.snap @@ -5,27 +5,28 @@ Array [

-
+
-

- Name -

+ + +
-
-
+
+
-

- Seed nodes for cluster discovery -

+ + +
-
-
+
+
-

- Make remote cluster optional -

+ + +
-
+
,
= ({ defaultMessage="A unique identifier for this policy." /> } - idAria="nameDescription" fullWidth > = ({ defaultMessage="Name" /> } - describedByIds={['nameDescription']} isInvalid={touched.name && Boolean(errors.name)} error={errors.name} fullWidth @@ -158,7 +156,6 @@ export const PolicyStepLogistics: React.FunctionComponent = ({ defaultMessage="The repository where you want to store the snapshots." /> } - idAria="policyRepositoryDescription" fullWidth > = ({ defaultMessage="Repository" /> } - describedByIds={['policyRepositoryDescription']} isInvalid={touched.repository && Boolean(errors.repository)} error={errors.repository} fullWidth @@ -307,7 +303,6 @@ export const PolicyStepLogistics: React.FunctionComponent = ({ defaultMessage="The name for the snapshots. A unique identifier is automatically added to each name." /> } - idAria="policySnapshotNameDescription" fullWidth > = ({ defaultMessage="Snapshot name" /> } - describedByIds={['policySnapshotNameDescription']} isInvalid={touched.snapshotName && Boolean(errors.snapshotName)} error={errors.snapshotName} helpText={ @@ -389,7 +383,6 @@ export const PolicyStepLogistics: React.FunctionComponent = ({ defaultMessage="The frequency at which to take the snapshots." /> } - idAria="policyScheduleDescription" fullWidth > {isAdvancedCronVisible ? ( @@ -401,7 +394,6 @@ export const PolicyStepLogistics: React.FunctionComponent = ({ defaultMessage="Schedule" /> } - describedByIds={['policyScheduleDescription']} isInvalid={touched.schedule && Boolean(errors.schedule)} error={errors.schedule} helpText={ diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_retention.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_retention.tsx index df7e2c8807d9f..ec01885e76ff1 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_retention.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_retention.tsx @@ -73,7 +73,6 @@ export const PolicyStepRetention: React.FunctionComponent = ({ defaultMessage="The time to wait before deleting snapshots." /> } - idAria="expirationDescription" fullWidth > = ({ defaultMessage="Delete after" /> } - describedByIds={['expirationDescription']} isInvalid={touched.expireAfterValue && Boolean(errors.expireAfterValue)} error={errors.expireAfterValue} fullWidth @@ -140,7 +138,6 @@ export const PolicyStepRetention: React.FunctionComponent = ({ defaultMessage="The minimum and maximum number of snapshots to store in your cluster." /> } - idAria="countDescription" fullWidth > @@ -152,7 +149,6 @@ export const PolicyStepRetention: React.FunctionComponent = ({ defaultMessage="Mininum count" /> } - describedByIds={['countDescription']} isInvalid={touched.minCount && Boolean(errors.minCount)} error={errors.minCount} fullWidth @@ -180,7 +176,6 @@ export const PolicyStepRetention: React.FunctionComponent = ({ defaultMessage="Maximum count" /> } - describedByIds={['countDescription']} isInvalid={touched.maxCount && Boolean(errors.maxCount)} error={errors.maxCount} fullWidth diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_settings.tsx index 0e3b6e030d1c6..552dbff8e7441 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_settings.tsx @@ -126,10 +126,9 @@ export const PolicyStepSettings: React.FunctionComponent = ({ defaultMessage="Indices to back up." /> } - idAria="indicesDescription" fullWidth > - + {isManagedPolicy ? ( = ({ defaultMessage="Ignores indices that are unavailable when taking the snapshot. Otherwise, the entire snapshot will fail." /> } - idAria="policyIgnoreUnavailableDescription" fullWidth > - + = ({ defaultMessage="Allows snapshots of indices with primary shards that are unavailable. Otherwise, the entire snapshot will fail." /> } - idAria="policyPartialDescription" fullWidth > - + = ({ defaultMessage="Stores the global state of the cluster as part of the snapshot." /> } - idAria="policyIncludeGlobalStateDescription" fullWidth > - + = ({ defaultMessage="A unique name for the repository." /> } - idAria="repositoryNameDescription" fullWidth > = ({ defaultMessage="Name" /> } - describedByIds={['repositoryNameDescription']} isInvalid={Boolean(hasValidationErrors && validation.errors.name)} error={validation.errors.name} fullWidth @@ -303,10 +301,9 @@ export const RepositoryFormStepOne: React.FunctionComponent = ({ /> } - idAria="sourceOnlyDescription" fullWidth > - + = ({ defaultMessage="The name of the Azure client." /> } - idAria="azureRepositoryClientDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryClientDescription']} isInvalid={Boolean(hasErrors && settingErrors.client)} error={settingErrors.client} > @@ -123,7 +121,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="The name of the Azure container to use for snapshots." /> } - idAria="azureRepositoryContainerDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryContainerDescription']} isInvalid={Boolean(hasErrors && settingErrors.container)} error={settingErrors.container} > @@ -169,7 +165,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="The container path to the repository data." /> } - idAria="azureRepositoryBasePathDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryBasePathDescription']} isInvalid={Boolean(hasErrors && settingErrors.basePath)} error={settingErrors.basePath} > @@ -215,13 +209,11 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="Compresses the index mapping and setting files for snapshots. Data files are not compressed." /> } - idAria="azureRepositoryCompressDescription" fullWidth > @@ -261,7 +253,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="Breaks files into smaller units when taking snapshots." /> } - idAria="azureRepositoryChunkSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryChunkSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.chunkSize)} error={settingErrors.chunkSize} helpText={textService.getSizeNotationHelpText()} @@ -308,7 +298,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="The rate for creating snapshots for each node." /> } - idAria="azureRepositoryMaxSnapshotBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryMaxSnapshotBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxSnapshotBytesPerSec)} error={settingErrors.maxSnapshotBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -355,7 +343,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="The snapshot restore rate for each node." /> } - idAria="azureRepositoryMaxRestoreBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryMaxRestoreBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxRestoreBytesPerSec)} error={settingErrors.maxRestoreBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -402,7 +388,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="The primary or secondary location. If secondary, read-only is true." /> } - idAria="azureRepositoryLocationModeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryLocationModeDescription']} isInvalid={Boolean(hasErrors && settingErrors.locationMode)} error={settingErrors.locationMode} > @@ -450,13 +434,11 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="Only one cluster should have write access to this repository. All other clusters should be read-only." /> } - idAria="azureRepositoryReadonlyDescription" fullWidth > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/fs_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/fs_settings.tsx index 2e2238ac93e3c..711db1ee300cb 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/fs_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/fs_settings.tsx @@ -73,7 +73,6 @@ export const FSSettings: React.FunctionComponent = ({ /> } - idAria="fsRepositoryLocationDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['fsRepositoryLocationDescription']} isInvalid={Boolean(hasErrors && settingErrors.location)} error={settingErrors.location} > @@ -119,13 +117,11 @@ export const FSSettings: React.FunctionComponent = ({ defaultMessage="Compresses the index mapping and setting files for snapshots. Data files are not compressed." /> } - idAria="fsRepositoryCompressDescription" fullWidth > @@ -165,7 +161,6 @@ export const FSSettings: React.FunctionComponent = ({ defaultMessage="Breaks files into smaller units when taking snapshots." /> } - idAria="fsRepositoryChunkSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['fsRepositoryChunkSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.chunkSize)} error={settingErrors.chunkSize} helpText={textService.getSizeNotationHelpText()} @@ -212,7 +206,6 @@ export const FSSettings: React.FunctionComponent = ({ defaultMessage="The rate for creating snapshots for each node." /> } - idAria="fsRepositoryMaxSnapshotBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['fsRepositoryMaxSnapshotBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxSnapshotBytesPerSec)} error={settingErrors.maxSnapshotBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -259,7 +251,6 @@ export const FSSettings: React.FunctionComponent = ({ defaultMessage="The snapshot restore rate for each node." /> } - idAria="fsRepositoryMaxRestoreBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['fsRepositoryMaxRestoreBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxRestoreBytesPerSec)} error={settingErrors.maxRestoreBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -306,13 +296,11 @@ export const FSSettings: React.FunctionComponent = ({ defaultMessage="Only one cluster should have write access to this repository. All other clusters should be read-only." /> } - idAria="fsRepositoryReadonlyDescription" fullWidth > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/gcs_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/gcs_settings.tsx index d15e0043b8c81..5a34d3aac6f6b 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/gcs_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/gcs_settings.tsx @@ -64,7 +64,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="The name of the Google Cloud Storage client." /> } - idAria="gcsRepositoryClientDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryClientDescription']} isInvalid={Boolean(hasErrors && settingErrors.client)} error={settingErrors.client} > @@ -110,7 +108,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="The name of the Google Cloud Storage bucket to use for snapshots." /> } - idAria="gcsRepositoryBucketDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryBucketDescription']} isInvalid={Boolean(hasErrors && settingErrors.bucket)} error={settingErrors.bucket} > @@ -156,7 +152,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="The bucket path to the repository data." /> } - idAria="gcsRepositoryBasePathDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryBasePathDescription']} isInvalid={Boolean(hasErrors && settingErrors.basePath)} error={settingErrors.basePath} > @@ -202,13 +196,11 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="Compresses the index mapping and setting files for snapshots. Data files are not compressed." /> } - idAria="gcsRepositoryCompressDescription" fullWidth > @@ -248,7 +240,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="Breaks files into smaller units when taking snapshots." /> } - idAria="gcsRepositoryChunkSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryChunkSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.chunkSize)} error={settingErrors.chunkSize} helpText={textService.getSizeNotationHelpText()} @@ -295,7 +285,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="The rate for creating snapshots for each node." /> } - idAria="gcsRepositoryMaxSnapshotBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryMaxSnapshotBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxSnapshotBytesPerSec)} error={settingErrors.maxSnapshotBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -342,7 +330,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="The snapshot restore rate for each node." /> } - idAria="gcsRepositoryMaxRestoreBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryMaxRestoreBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxRestoreBytesPerSec)} error={settingErrors.maxRestoreBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -389,13 +375,11 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="Only one cluster should have write access to this repository. All other clusters should be read-only." /> } - idAria="gcsRepositoryReadonlyDescription" fullWidth > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/hdfs_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/hdfs_settings.tsx index ae42b810bf059..4ef662d645bea 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/hdfs_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/hdfs_settings.tsx @@ -79,7 +79,6 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="The URI address for HDFS." /> } - idAria="hdfsRepositoryUriDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryUriDescription']} isInvalid={Boolean(hasErrors && settingErrors.uri)} error={settingErrors.uri} > @@ -108,7 +106,7 @@ export const HDFSSettings: React.FunctionComponent = ({ uri: e.target.value ? `hdfs://${e.target.value}` : '', }); }} - aria-describedby="hdfsRepositoryUriDescription hdfsRepositoryUriProtocolDescription" + aria-describedby="hdfsRepositoryUriProtocolDescription" data-test-subj="uriInput" /> @@ -132,7 +130,6 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="The file path where data is stored." /> } - idAria="hdfsRepositoryPathDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryPathDescription']} isInvalid={Boolean(hasErrors && settingErrors.path)} error={settingErrors.path} > @@ -178,13 +174,11 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="Loads the default Hadoop configuration." /> } - idAria="hdfsRepositoryLoadDefaultsDescription" fullWidth > @@ -224,13 +218,11 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="Compresses the index mapping and setting files for snapshots. Data files are not compressed." /> } - idAria="hdfsRepositoryCompressDescription" fullWidth > @@ -270,7 +262,6 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="Breaks files into smaller units when taking snapshots." /> } - idAria="hdfsRepositoryChunkSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryChunkSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.chunkSize)} error={settingErrors.chunkSize} helpText={textService.getSizeNotationHelpText()} @@ -317,7 +307,6 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="The Kerberos principal to use when connecting to a secured HDFS cluster." /> } - idAria="hdfsRepositorySecurityPrincipalDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositorySecurityPrincipalDescription']} isInvalid={Boolean(hasErrors && settingErrors.securityPrincipal)} error={settingErrors.securityPrincipal} > @@ -365,7 +353,6 @@ export const HDFSSettings: React.FunctionComponent = ({ /> } - idAria="hdfsRepositoryConfigurationDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryConfigurationDescription']} isInvalid={isConfInvalid} error={ = ({ defaultMessage="The rate for creating snapshots for each node." /> } - idAria="hdfsRepositoryMaxSnapshotBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryMaxSnapshotBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxSnapshotBytesPerSec)} error={settingErrors.maxSnapshotBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -510,7 +494,6 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="The snapshot restore rate for each node." /> } - idAria="hdfsRepositoryMaxRestoreBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryMaxRestoreBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxRestoreBytesPerSec)} error={settingErrors.maxRestoreBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -557,13 +539,11 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="Only one cluster should have write access to this repository. All other clusters should be read-only." /> } - idAria="hdfsRepositoryReadonlyDescription" fullWidth > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/readonly_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/readonly_settings.tsx index 5241a55455395..a0cc076465990 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/readonly_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/readonly_settings.tsx @@ -116,7 +116,6 @@ export const ReadonlySettings: React.FunctionComponent = ({ /> } - idAria="readonlyRepositoryUrlDescription" fullWidth >
@@ -130,7 +129,6 @@ export const ReadonlySettings: React.FunctionComponent = ({ /> } fullWidth - describedByIds={['readonlyRepositoryUrlDescription']} > = ({ /> } fullWidth - describedByIds={['readonlyRepositoryUrlDescription readonlyRepositoryUrlHelp']} + describedByIds={['readonlyRepositoryUrlHelp']} isInvalid={Boolean(hasErrors && settingErrors.url)} error={settingErrors.url} > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/s3_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/s3_settings.tsx index a897368ae7ca3..1a9902b42a931 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/s3_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/s3_settings.tsx @@ -93,7 +93,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The name of the AWS S3 client." /> } - idAria="s3RepositoryClientDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryClientDescription']} isInvalid={Boolean(hasErrors && settingErrors.client)} error={settingErrors.client} > @@ -139,7 +137,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The name of the AWS S3 bucket to use for snapshots." /> } - idAria="s3RepositoryBucketDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryBucketDescription']} isInvalid={Boolean(hasErrors && settingErrors.bucket)} error={settingErrors.bucket} > @@ -185,7 +181,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The bucket path to the repository data." /> } - idAria="s3RepositoryBasePathDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryBasePathDescription']} isInvalid={Boolean(hasErrors && settingErrors.basePath)} error={settingErrors.basePath} > @@ -231,13 +225,11 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="Compresses the index mapping and setting files for snapshots. Data files are not compressed." /> } - idAria="s3RepositoryCompressDescription" fullWidth > @@ -277,7 +269,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="Breaks files into smaller units when taking snapshots." /> } - idAria="s3RepositoryChunkSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryChunkSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.chunkSize)} error={settingErrors.chunkSize} helpText={textService.getSizeNotationHelpText()} @@ -324,13 +314,11 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="Encrypts files on the server using AES256 algorithm." /> } - idAria="s3RepositoryServerSideEncryptionDescription" fullWidth > @@ -371,7 +359,6 @@ export const S3Settings: React.FunctionComponent = ({ to split the chunk into several parts and upload each in its own request." /> } - idAria="s3RepositoryBufferSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryBufferSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.bufferSize)} error={settingErrors.bufferSize} helpText={textService.getSizeNotationHelpText()} @@ -418,7 +404,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The canned ACL to add to new S3 buckets and objects." /> } - idAria="s3RepositoryCannedAclDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryCannedAclDescription']} isInvalid={Boolean(hasErrors && settingErrors.cannedAcl)} error={settingErrors.cannedAcl} > @@ -465,7 +449,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The storage class for new objects in the S3 repository." /> } - idAria="s3RepositoryStorageClassDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryStorageClassDescription']} isInvalid={Boolean(hasErrors && settingErrors.storageClass)} error={settingErrors.storageClass} > @@ -512,7 +494,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The rate for creating snapshots for each node." /> } - idAria="s3RepositoryMaxSnapshotBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryMaxSnapshotBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxSnapshotBytesPerSec)} error={settingErrors.maxSnapshotBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -559,7 +539,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The snapshot restore rate for each node." /> } - idAria="s3RepositoryMaxRestoreBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryMaxRestoreBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxRestoreBytesPerSec)} error={settingErrors.maxRestoreBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -606,13 +584,11 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="Only one cluster should have write access to this repository. All other clusters should be read-only." /> } - idAria="s3RepositoryReadonlyDescription" fullWidth > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/restore_snapshot_form/steps/step_logistics.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/restore_snapshot_form/steps/step_logistics.tsx index f5a3180adbd6e..bd8a0650c087f 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/restore_snapshot_form/steps/step_logistics.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/restore_snapshot_form/steps/step_logistics.tsx @@ -141,14 +141,9 @@ export const RestoreSnapshotStepLogistics: React.FunctionComponent = if they are closed and have the same number of shards as the snapshot index." /> } - idAria="stepLogisticsIndicesDescription" fullWidth > - + = defaultMessage="Renames indices on restore." /> } - idAria="stepLogisticsRenameIndicesDescription" fullWidth > - + = defaultMessage="Allows restore of indices that don’t have snapshots of all shards." /> } - idAria="stepLogisticsPartialDescription" fullWidth > - + = templates with the same name. Also restores persistent settings." /> } - idAria="stepLogisticsIncludeGlobalStateDescription" fullWidth > = ( }} /> } - idAria="stepSettingsIndexSettingsDescription" fullWidth > - + = ( /> } fullWidth - describedByIds={['stepSettingsIndexSettingsDescription']} isInvalid={Boolean(errors.indexSettings)} error={errors.indexSettings} helpText={ @@ -235,14 +229,9 @@ export const RestoreSnapshotStepSettings: React.FunctionComponent = ( }} /> } - idAria="stepSettingsIgnoreIndexSettingsDescription" fullWidth > - + diff --git a/x-pack/package.json b/x-pack/package.json index 43df763c22bdc..e3bc8aa36373d 100644 --- a/x-pack/package.json +++ b/x-pack/package.json @@ -176,7 +176,7 @@ "@elastic/apm-rum-react": "^0.3.2", "@elastic/datemath": "5.0.2", "@elastic/ems-client": "7.6.0", - "@elastic/eui": "18.3.0", + "@elastic/eui": "19.0.0", "@elastic/filesaver": "1.1.2", "@elastic/maki": "6.1.0", "@elastic/node-crypto": "^1.0.0", diff --git a/x-pack/plugins/security/public/management/roles/edit_role/privileges/es/__snapshots__/elasticsearch_privileges.test.tsx.snap b/x-pack/plugins/security/public/management/roles/edit_role/privileges/es/__snapshots__/elasticsearch_privileges.test.tsx.snap index 323629de7578d..2a00c7ca5c347 100644 --- a/x-pack/plugins/security/public/management/roles/edit_role/privileges/es/__snapshots__/elasticsearch_privileges.test.tsx.snap +++ b/x-pack/plugins/security/public/management/roles/edit_role/privileges/es/__snapshots__/elasticsearch_privileges.test.tsx.snap @@ -26,8 +26,6 @@ exports[`it renders without crashing 1`] = `

} - fullWidth={false} - gutterSize="l" title={

} - titleSize="xs" >

} - fullWidth={false} - gutterSize="l" title={

} - titleSize="xs" > renders without crashing 1`] = ` />

} - fullWidth={false} - gutterSize="l" title={

renders without crashing 1`] = ` />

} - titleSize="xs" > {i18n.translate( @@ -323,7 +322,6 @@ export const JsonWatchEditSimulate = ({ } > {i18n.translate( @@ -361,7 +358,6 @@ export const JsonWatchEditSimulate = ({ > ; export const EuiCodeEditor: React.FC; export const Query: any; } diff --git a/yarn.lock b/yarn.lock index 0a55e3d7c7850..cac39dfd4c352 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1955,15 +1955,17 @@ tabbable "^1.1.0" uuid "^3.1.0" -"@elastic/eui@18.3.0": - version "18.3.0" - resolved "https://registry.yarnpkg.com/@elastic/eui/-/eui-18.3.0.tgz#e21c6246624f694e2ae1c7c1f1a11b612faf260a" - integrity sha512-Rkj1rTtDa6iZMUF7pxYRojku1sLXzTU0FK1D9i0XE3H//exy3VyTV6qUlbdkiKXjO7emrgQqfzKDeXT+ZYztgg== +"@elastic/eui@19.0.0": + version "19.0.0" + resolved "https://registry.yarnpkg.com/@elastic/eui/-/eui-19.0.0.tgz#cf7d644945c95997d442585cf614e853f173746e" + integrity sha512-8/USz56MYhu6bV4oecJct7tsdi0ktErOIFLobNmQIKdxDOni/KpttX6IHqxM7OuIWi1AEMXoIozw68+oyL/uKQ== dependencies: "@types/chroma-js" "^1.4.3" + "@types/enzyme" "^3.1.13" "@types/lodash" "^4.14.116" "@types/numeral" "^0.0.25" "@types/react-beautiful-dnd" "^10.1.0" + "@types/react-virtualized" "^9.18.7" chroma-js "^2.0.4" classnames "^2.2.5" highlight.js "^9.12.0" @@ -4384,10 +4386,10 @@ resolved "https://registry.yarnpkg.com/@types/elasticsearch/-/elasticsearch-5.0.33.tgz#b0fd37dc674f498223b6d68c313bdfd71f4d812b" integrity sha512-n/g9pqJEpE4fyUE8VvHNGtl7E2Wv8TCroNwfgAeJKRV4ghDENahtrAo1KMsFNIejBD2gDAlEUa4CM4oEEd8p9Q== -"@types/enzyme@^3.9.0": - version "3.9.3" - resolved "https://registry.yarnpkg.com/@types/enzyme/-/enzyme-3.9.3.tgz#d1029c0edd353d7b00f3924803eb88216460beed" - integrity sha512-jDKoZiiMA3lGO3skSO7dfqEHNvmiTLLV+PHD9EBQVlJANJvpY6qq1zzjRI24ZOtG7F+CS7BVWDXKewRmN8PjHQ== +"@types/enzyme@^3.1.13", "@types/enzyme@^3.9.0": + version "3.10.5" + resolved "https://registry.yarnpkg.com/@types/enzyme/-/enzyme-3.10.5.tgz#fe7eeba3550369eed20e7fb565bfb74eec44f1f0" + integrity sha512-R+phe509UuUYy9Tk0YlSbipRpfVtIzb/9BHn5pTEtjJTF5LXvUjrIQcZvNyANNEyFrd2YGs196PniNT1fgvOQA== dependencies: "@types/cheerio" "*" "@types/react" "*" From 6bd09d616ffb93124d9fd36db77da11c883eb9e1 Mon Sep 17 00:00:00 2001 From: Josh Dover Date: Fri, 14 Feb 2020 12:06:59 -0700 Subject: [PATCH 2/7] Fix maybe behavior with object type (#55932) --- .../src/types/duration_type.test.ts | 6 +-- .../src/types/maybe_type.test.ts | 38 +++++++++++++++++++ .../kbn-config-schema/src/types/maybe_type.ts | 2 +- .../src/types/object_type.test.ts | 22 ++++++++++- .../src/types/object_type.ts | 6 +-- .../event_log/server/event_logger.test.ts | 2 - x-pack/plugins/security/server/config.ts | 2 +- 7 files changed, 67 insertions(+), 11 deletions(-) diff --git a/packages/kbn-config-schema/src/types/duration_type.test.ts b/packages/kbn-config-schema/src/types/duration_type.test.ts index 09e92ce727f2a..57e917dc99b2b 100644 --- a/packages/kbn-config-schema/src/types/duration_type.test.ts +++ b/packages/kbn-config-schema/src/types/duration_type.test.ts @@ -101,7 +101,7 @@ describe('#defaultValue', () => { source: duration({ defaultValue: 600 }), target: duration({ defaultValue: siblingRef('source') }), fromContext: duration({ defaultValue: contextRef('val') }), - }).validate(undefined, { val: momentDuration(700, 'ms') }) + }).validate({}, { val: momentDuration(700, 'ms') }) ).toMatchInlineSnapshot(` Object { "fromContext": "PT0.7S", @@ -115,7 +115,7 @@ Object { source: duration({ defaultValue: '1h' }), target: duration({ defaultValue: siblingRef('source') }), fromContext: duration({ defaultValue: contextRef('val') }), - }).validate(undefined, { val: momentDuration(2, 'hour') }) + }).validate({}, { val: momentDuration(2, 'hour') }) ).toMatchInlineSnapshot(` Object { "fromContext": "PT2H", @@ -129,7 +129,7 @@ Object { source: duration({ defaultValue: momentDuration(1, 'hour') }), target: duration({ defaultValue: siblingRef('source') }), fromContext: duration({ defaultValue: contextRef('val') }), - }).validate(undefined, { val: momentDuration(2, 'hour') }) + }).validate({}, { val: momentDuration(2, 'hour') }) ).toMatchInlineSnapshot(` Object { "fromContext": "PT2H", diff --git a/packages/kbn-config-schema/src/types/maybe_type.test.ts b/packages/kbn-config-schema/src/types/maybe_type.test.ts index ecc1d218e186d..c35fa18593520 100644 --- a/packages/kbn-config-schema/src/types/maybe_type.test.ts +++ b/packages/kbn-config-schema/src/types/maybe_type.test.ts @@ -60,3 +60,41 @@ test('includes namespace in failure', () => { const type = schema.maybe(schema.string()); expect(() => type.validate(null, {}, 'foo-namespace')).toThrowErrorMatchingSnapshot(); }); + +describe('maybe + object', () => { + test('returns undefined if undefined object', () => { + const type = schema.maybe(schema.object({})); + expect(type.validate(undefined)).toEqual(undefined); + }); + + test('returns undefined if undefined object with no defaults', () => { + const type = schema.maybe( + schema.object({ + type: schema.string(), + id: schema.string(), + }) + ); + + expect(type.validate(undefined)).toEqual(undefined); + }); + + test('returns empty object if maybe keys', () => { + const type = schema.object({ + name: schema.maybe(schema.string()), + }); + expect(type.validate({})).toEqual({}); + }); + + test('returns empty object if maybe nested object', () => { + const type = schema.object({ + name: schema.maybe( + schema.object({ + type: schema.string(), + id: schema.string(), + }) + ), + }); + + expect(type.validate({})).toEqual({}); + }); +}); diff --git a/packages/kbn-config-schema/src/types/maybe_type.ts b/packages/kbn-config-schema/src/types/maybe_type.ts index 06a9369110203..415f6315c5723 100644 --- a/packages/kbn-config-schema/src/types/maybe_type.ts +++ b/packages/kbn-config-schema/src/types/maybe_type.ts @@ -25,7 +25,7 @@ export class MaybeType extends Type { type .getSchema() .optional() - .default() + .default(() => undefined, 'undefined') ); } } diff --git a/packages/kbn-config-schema/src/types/object_type.test.ts b/packages/kbn-config-schema/src/types/object_type.test.ts index 5786984cf7ebd..64739d7a4c4da 100644 --- a/packages/kbn-config-schema/src/types/object_type.test.ts +++ b/packages/kbn-config-schema/src/types/object_type.test.ts @@ -30,6 +30,11 @@ test('returns value by default', () => { expect(type.validate(value)).toEqual({ name: 'test' }); }); +test('returns empty object if undefined', () => { + const type = schema.object({}); + expect(type.validate(undefined)).toEqual({}); +}); + test('properly parse the value if input is a string', () => { const type = schema.object({ name: schema.string(), @@ -112,14 +117,26 @@ test('undefined object within object', () => { }), }); + expect(type.validate(undefined)).toEqual({ + foo: { + bar: 'hello world', + }, + }); + expect(type.validate({})).toEqual({ foo: { bar: 'hello world', }, }); + + expect(type.validate({ foo: {} })).toEqual({ + foo: { + bar: 'hello world', + }, + }); }); -test('object within object with required', () => { +test('object within object with key without defaultValue', () => { const type = schema.object({ foo: schema.object({ bar: schema.string(), @@ -127,6 +144,9 @@ test('object within object with required', () => { }); const value = { foo: {} }; + expect(() => type.validate(undefined)).toThrowErrorMatchingInlineSnapshot( + `"[foo.bar]: expected value of type [string] but got [undefined]"` + ); expect(() => type.validate(value)).toThrowErrorMatchingInlineSnapshot( `"[foo.bar]: expected value of type [string] but got [undefined]"` ); diff --git a/packages/kbn-config-schema/src/types/object_type.ts b/packages/kbn-config-schema/src/types/object_type.ts index d2e6c708c263c..4f3d68a6bac97 100644 --- a/packages/kbn-config-schema/src/types/object_type.ts +++ b/packages/kbn-config-schema/src/types/object_type.ts @@ -33,23 +33,23 @@ export type ObjectResultType

= Readonly<{ [K in keyof P]: TypeO export type ObjectTypeOptions

= TypeOptions< { [K in keyof P]: TypeOf } > & { + /** Should uknown keys not be defined in the schema be allowed. Defaults to `false` */ allowUnknowns?: boolean; }; export class ObjectType

extends Type> { private props: Record; - constructor(props: P, options: ObjectTypeOptions

= {}) { + constructor(props: P, { allowUnknowns = false, ...typeOptions }: ObjectTypeOptions

= {}) { const schemaKeys = {} as Record; for (const [key, value] of Object.entries(props)) { schemaKeys[key] = value.getSchema(); } - const { allowUnknowns, ...typeOptions } = options; const schema = internals .object() .keys(schemaKeys) - .optional() .default() + .optional() .unknown(Boolean(allowUnknowns)); super(schema, typeOptions); diff --git a/x-pack/plugins/event_log/server/event_logger.test.ts b/x-pack/plugins/event_log/server/event_logger.test.ts index c2de8d4dfd12b..97e52ad04dd08 100644 --- a/x-pack/plugins/event_log/server/event_logger.test.ts +++ b/x-pack/plugins/event_log/server/event_logger.test.ts @@ -57,8 +57,6 @@ describe('EventLogger', () => { kibana: { server_uuid: '424-24-2424', }, - error: {}, - user: {}, }); const $timeStamp = event!['@timestamp']!; diff --git a/x-pack/plugins/security/server/config.ts b/x-pack/plugins/security/server/config.ts index 4f1c25702ae97..db8c48f314d7c 100644 --- a/x-pack/plugins/security/server/config.ts +++ b/x-pack/plugins/security/server/config.ts @@ -41,7 +41,7 @@ export const ConfigSchema = schema.object( secureCookies: schema.boolean({ defaultValue: false }), authc: schema.object({ providers: schema.arrayOf(schema.string(), { defaultValue: ['basic'], minSize: 1 }), - oidc: providerOptionsSchema('oidc', schema.maybe(schema.object({ realm: schema.string() }))), + oidc: providerOptionsSchema('oidc', schema.object({ realm: schema.string() })), saml: providerOptionsSchema( 'saml', schema.object({ From 73cb0aa840622aa6e48afc03f5bf0fede6f54786 Mon Sep 17 00:00:00 2001 From: Pete Harverson Date: Fri, 14 Feb 2020 19:08:02 +0000 Subject: [PATCH 3/7] [ML] New Platform server shim: update filters routes to use new platform router (#57597) * [ML] Update filters routes to use new platform router * [ML] Edits to filters route following review * [ML] Edits following review and fix job service api docs Co-authored-by: Elastic Machine --- .../{detector_rule.js => detector_rule.ts} | 38 +-- .../plugins/ml/common/types/detector_rules.ts | 26 ++ .../{filter_manager.js => filter_manager.ts} | 115 ++++++--- .../plugins/ml/server/models/filter/index.ts | 7 + .../ml/server/new_platform/filters_schema.ts | 19 ++ .../plugins/ml/server/new_platform/plugin.ts | 1 - .../plugins/ml/server/routes/apidoc.json | 11 +- .../plugins/ml/server/routes/filters.js | 126 ---------- .../plugins/ml/server/routes/filters.ts | 227 ++++++++++++++++++ .../plugins/ml/server/routes/job_service.ts | 40 +-- 10 files changed, 410 insertions(+), 200 deletions(-) rename x-pack/legacy/plugins/ml/common/constants/{detector_rule.js => detector_rule.ts} (53%) create mode 100644 x-pack/legacy/plugins/ml/common/types/detector_rules.ts rename x-pack/legacy/plugins/ml/server/models/filter/{filter_manager.js => filter_manager.ts} (57%) create mode 100644 x-pack/legacy/plugins/ml/server/models/filter/index.ts create mode 100644 x-pack/legacy/plugins/ml/server/new_platform/filters_schema.ts delete mode 100644 x-pack/legacy/plugins/ml/server/routes/filters.js create mode 100644 x-pack/legacy/plugins/ml/server/routes/filters.ts diff --git a/x-pack/legacy/plugins/ml/common/constants/detector_rule.js b/x-pack/legacy/plugins/ml/common/constants/detector_rule.ts similarity index 53% rename from x-pack/legacy/plugins/ml/common/constants/detector_rule.js rename to x-pack/legacy/plugins/ml/common/constants/detector_rule.ts index f07b82f4d2d3e..81120d01318ab 100644 --- a/x-pack/legacy/plugins/ml/common/constants/detector_rule.js +++ b/x-pack/legacy/plugins/ml/common/constants/detector_rule.ts @@ -8,28 +8,28 @@ * Contains values for ML job detector rules. */ -export const ACTION = { - SKIP_MODEL_UPDATE: 'skip_model_update', - SKIP_RESULT: 'skip_result', -}; +export enum ACTION { + SKIP_MODEL_UPDATE = 'skip_model_update', + SKIP_RESULT = 'skip_result', +} -export const FILTER_TYPE = { - EXCLUDE: 'exclude', - INCLUDE: 'include', -}; +export enum FILTER_TYPE { + EXCLUDE = 'exclude', + INCLUDE = 'include', +} -export const APPLIES_TO = { - ACTUAL: 'actual', - DIFF_FROM_TYPICAL: 'diff_from_typical', - TYPICAL: 'typical', -}; +export enum APPLIES_TO { + ACTUAL = 'actual', + DIFF_FROM_TYPICAL = 'diff_from_typical', + TYPICAL = 'typical', +} -export const OPERATOR = { - LESS_THAN: 'lt', - LESS_THAN_OR_EQUAL: 'lte', - GREATER_THAN: 'gt', - GREATER_THAN_OR_EQUAL: 'gte', -}; +export enum OPERATOR { + LESS_THAN = 'lt', + LESS_THAN_OR_EQUAL = 'lte', + GREATER_THAN = 'gt', + GREATER_THAN_OR_EQUAL = 'gte', +} // List of detector functions which don't support rules with numeric conditions. export const CONDITIONS_NOT_SUPPORTED_FUNCTIONS = ['freq_rare', 'lat_long', 'metric', 'rare']; diff --git a/x-pack/legacy/plugins/ml/common/types/detector_rules.ts b/x-pack/legacy/plugins/ml/common/types/detector_rules.ts new file mode 100644 index 0000000000000..c94e5d1327363 --- /dev/null +++ b/x-pack/legacy/plugins/ml/common/types/detector_rules.ts @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { ACTION, FILTER_TYPE, APPLIES_TO, OPERATOR } from '../constants/detector_rule'; + +export interface DetectorRuleScope { + [id: string]: { + filter_id: string; + filter_type: FILTER_TYPE; + }; +} + +export interface DetectorRuleCondition { + applies_to: APPLIES_TO; + operator: OPERATOR; + value: number; +} + +export interface DetectorRule { + actions: ACTION[]; + scope?: DetectorRuleScope; + conditions?: DetectorRuleCondition[]; +} diff --git a/x-pack/legacy/plugins/ml/server/models/filter/filter_manager.js b/x-pack/legacy/plugins/ml/server/models/filter/filter_manager.ts similarity index 57% rename from x-pack/legacy/plugins/ml/server/models/filter/filter_manager.js rename to x-pack/legacy/plugins/ml/server/models/filter/filter_manager.ts index d644494c20606..f40663a5eb6b2 100644 --- a/x-pack/legacy/plugins/ml/server/models/filter/filter_manager.js +++ b/x-pack/legacy/plugins/ml/server/models/filter/filter_manager.ts @@ -5,22 +5,75 @@ */ import Boom from 'boom'; +import { IScopedClusterClient } from 'src/core/server'; + +import { DetectorRule, DetectorRuleScope } from '../../../common/types/detector_rules'; + +export interface Filter { + filter_id: string; + description?: string; + items: string[]; +} + +export interface FormFilter { + filterId: string; + description?: string; + addItems?: string[]; + removeItems?: string[]; +} + +export interface FilterRequest { + filter_id: string; + description?: string; + add_items?: string[]; + remove_items?: string[]; +} + +interface FilterUsage { + jobs: string[]; + detectors: string[]; +} + +interface FilterStats { + filter_id: string; + description?: string; + item_count: number; + used_by: FilterUsage; +} + +interface FiltersInUse { + [id: string]: FilterUsage; +} + +interface PartialDetector { + detector_description: string; + custom_rules: DetectorRule[]; +} + +interface PartialJob { + job_id: string; + analysis_config: { + detectors: PartialDetector[]; + }; +} export class FilterManager { - constructor(callWithRequest) { - this.callWithRequest = callWithRequest; + private _client: IScopedClusterClient['callAsCurrentUser']; + + constructor(client: IScopedClusterClient['callAsCurrentUser']) { + this._client = client; } - async getFilter(filterId) { + async getFilter(filterId: string) { try { const [JOBS, FILTERS] = [0, 1]; const results = await Promise.all([ - this.callWithRequest('ml.jobs'), - this.callWithRequest('ml.filters', { filterId }), + this._client('ml.jobs'), + this._client('ml.filters', { filterId }), ]); if (results[FILTERS] && results[FILTERS].filters.length) { - let filtersInUse = {}; + let filtersInUse: FiltersInUse = {}; if (results[JOBS] && results[JOBS].jobs) { filtersInUse = this.buildFiltersInUse(results[JOBS].jobs); } @@ -38,7 +91,7 @@ export class FilterManager { async getAllFilters() { try { - const filtersResp = await this.callWithRequest('ml.filters'); + const filtersResp = await this._client('ml.filters'); return filtersResp.filters; } catch (error) { throw Boom.badRequest(error); @@ -48,13 +101,10 @@ export class FilterManager { async getAllFilterStats() { try { const [JOBS, FILTERS] = [0, 1]; - const results = await Promise.all([ - this.callWithRequest('ml.jobs'), - this.callWithRequest('ml.filters'), - ]); + const results = await Promise.all([this._client('ml.jobs'), this._client('ml.filters')]); // Build a map of filter_ids against jobs and detectors using that filter. - let filtersInUse = {}; + let filtersInUse: FiltersInUse = {}; if (results[JOBS] && results[JOBS].jobs) { filtersInUse = this.buildFiltersInUse(results[JOBS].jobs); } @@ -64,10 +114,10 @@ export class FilterManager { // description // item_count // jobs using the filter - const filterStats = []; + const filterStats: FilterStats[] = []; if (results[FILTERS] && results[FILTERS].filters) { - results[FILTERS].filters.forEach(filter => { - const stats = { + results[FILTERS].filters.forEach((filter: Filter) => { + const stats: FilterStats = { filter_id: filter.filter_id, description: filter.description, item_count: filter.items.length, @@ -83,32 +133,32 @@ export class FilterManager { } } - async newFilter(filter) { + async newFilter(filter: FormFilter) { const filterId = filter.filterId; delete filter.filterId; try { // Returns the newly created filter. - return await this.callWithRequest('ml.addFilter', { filterId, body: filter }); + return await this._client('ml.addFilter', { filterId, body: filter }); } catch (error) { throw Boom.badRequest(error); } } - async updateFilter(filterId, description, addItems, removeItems) { + async updateFilter(filterId: string, filter: FormFilter) { try { - const body = {}; - if (description !== undefined) { - body.description = description; + const body: FilterRequest = { filter_id: filterId }; + if (filter.description !== undefined) { + body.description = filter.description; } - if (addItems !== undefined) { - body.add_items = addItems; + if (filter.addItems !== undefined) { + body.add_items = filter.addItems; } - if (removeItems !== undefined) { - body.remove_items = removeItems; + if (filter.removeItems !== undefined) { + body.remove_items = filter.removeItems; } // Returns the newly updated filter. - return await this.callWithRequest('ml.updateFilter', { + return await this._client('ml.updateFilter', { filterId, body, }); @@ -117,13 +167,13 @@ export class FilterManager { } } - async deleteFilter(filterId) { - return this.callWithRequest('ml.deleteFilter', { filterId }); + async deleteFilter(filterId: string) { + return this._client('ml.deleteFilter', { filterId }); } - buildFiltersInUse(jobsList) { + buildFiltersInUse(jobsList: PartialJob[]) { // Build a map of filter_ids against jobs and detectors using that filter. - const filtersInUse = {}; + const filtersInUse: FiltersInUse = {}; jobsList.forEach(job => { const detectors = job.analysis_config.detectors; detectors.forEach(detector => { @@ -131,9 +181,10 @@ export class FilterManager { const rules = detector.custom_rules; rules.forEach(rule => { if (rule.scope) { - const scopeFields = Object.keys(rule.scope); + const ruleScope: DetectorRuleScope = rule.scope; + const scopeFields = Object.keys(ruleScope); scopeFields.forEach(scopeField => { - const filter = rule.scope[scopeField]; + const filter = ruleScope[scopeField]; const filterId = filter.filter_id; if (filtersInUse[filterId] === undefined) { filtersInUse[filterId] = { jobs: [], detectors: [] }; diff --git a/x-pack/legacy/plugins/ml/server/models/filter/index.ts b/x-pack/legacy/plugins/ml/server/models/filter/index.ts new file mode 100644 index 0000000000000..ed4802f6d5ee6 --- /dev/null +++ b/x-pack/legacy/plugins/ml/server/models/filter/index.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { FilterManager, Filter, FormFilter } from './filter_manager'; diff --git a/x-pack/legacy/plugins/ml/server/new_platform/filters_schema.ts b/x-pack/legacy/plugins/ml/server/new_platform/filters_schema.ts new file mode 100644 index 0000000000000..dffee56565c73 --- /dev/null +++ b/x-pack/legacy/plugins/ml/server/new_platform/filters_schema.ts @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { schema } from '@kbn/config-schema'; + +export const createFilterSchema = { + filterId: schema.string(), + description: schema.maybe(schema.string()), + items: schema.arrayOf(schema.string()), +}; + +export const updateFilterSchema = { + description: schema.maybe(schema.string()), + addItems: schema.maybe(schema.arrayOf(schema.string())), + removeItems: schema.maybe(schema.arrayOf(schema.string())), +}; diff --git a/x-pack/legacy/plugins/ml/server/new_platform/plugin.ts b/x-pack/legacy/plugins/ml/server/new_platform/plugin.ts index 68ab88744278e..068bfc40f53e1 100644 --- a/x-pack/legacy/plugins/ml/server/new_platform/plugin.ts +++ b/x-pack/legacy/plugins/ml/server/new_platform/plugin.ts @@ -48,7 +48,6 @@ import { dataVisualizerRoutes } from '../routes/data_visualizer'; import { calendars } from '../routes/calendars'; // @ts-ignore: could not find declaration file for module import { fieldsService } from '../routes/fields_service'; -// @ts-ignore: could not find declaration file for module import { filtersRoutes } from '../routes/filters'; // @ts-ignore: could not find declaration file for module import { resultsServiceRoutes } from '../routes/results_service'; diff --git a/x-pack/legacy/plugins/ml/server/routes/apidoc.json b/x-pack/legacy/plugins/ml/server/routes/apidoc.json index 3fac715fef85a..be1554bf55f78 100644 --- a/x-pack/legacy/plugins/ml/server/routes/apidoc.json +++ b/x-pack/legacy/plugins/ml/server/routes/apidoc.json @@ -57,7 +57,7 @@ "DeleteJobs", "CloseJobs", "JobsSummary", - "JobsWithTimerange", + "JobsWithTimeRange", "CreateFullJobsList", "GetAllGroups", "UpdateGroups", @@ -69,6 +69,13 @@ "GetAllJobAndGroupIds", "GetLookBackProgress", "ValidateCategoryExamples", - "TopCategories" + "TopCategories", + "Filters", + "GetFilters", + "GetFilterById", + "CreateFilter", + "UpdateFilter", + "DeleteFilter", + "GetFiltersStats" ] } diff --git a/x-pack/legacy/plugins/ml/server/routes/filters.js b/x-pack/legacy/plugins/ml/server/routes/filters.js deleted file mode 100644 index b09566c6adfbe..0000000000000 --- a/x-pack/legacy/plugins/ml/server/routes/filters.js +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { callWithRequestFactory } from '../client/call_with_request_factory'; -import { wrapError } from '../client/errors'; -import { FilterManager } from '../models/filter'; - -// TODO - add function for returning a list of just the filter IDs. -// TODO - add function for returning a list of filter IDs plus item count. -function getAllFilters(callWithRequest) { - const mgr = new FilterManager(callWithRequest); - return mgr.getAllFilters(); -} - -function getAllFilterStats(callWithRequest) { - const mgr = new FilterManager(callWithRequest); - return mgr.getAllFilterStats(); -} - -function getFilter(callWithRequest, filterId) { - const mgr = new FilterManager(callWithRequest); - return mgr.getFilter(filterId); -} - -function newFilter(callWithRequest, filter) { - const mgr = new FilterManager(callWithRequest); - return mgr.newFilter(filter); -} - -function updateFilter(callWithRequest, filterId, description, addItems, removeItems) { - const mgr = new FilterManager(callWithRequest); - return mgr.updateFilter(filterId, description, addItems, removeItems); -} - -function deleteFilter(callWithRequest, filterId) { - const mgr = new FilterManager(callWithRequest); - return mgr.deleteFilter(filterId); -} - -export function filtersRoutes({ commonRouteConfig, elasticsearchPlugin, route }) { - route({ - method: 'GET', - path: '/api/ml/filters', - handler(request) { - const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request); - return getAllFilters(callWithRequest).catch(resp => wrapError(resp)); - }, - config: { - ...commonRouteConfig, - }, - }); - - route({ - method: 'GET', - path: '/api/ml/filters/_stats', - handler(request) { - const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request); - return getAllFilterStats(callWithRequest).catch(resp => wrapError(resp)); - }, - config: { - ...commonRouteConfig, - }, - }); - - route({ - method: 'GET', - path: '/api/ml/filters/{filterId}', - handler(request) { - const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request); - const filterId = request.params.filterId; - return getFilter(callWithRequest, filterId).catch(resp => wrapError(resp)); - }, - config: { - ...commonRouteConfig, - }, - }); - - route({ - method: 'PUT', - path: '/api/ml/filters', - handler(request) { - const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request); - const body = request.payload; - return newFilter(callWithRequest, body).catch(resp => wrapError(resp)); - }, - config: { - ...commonRouteConfig, - }, - }); - - route({ - method: 'PUT', - path: '/api/ml/filters/{filterId}', - handler(request) { - const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request); - const filterId = request.params.filterId; - const payload = request.payload; - return updateFilter( - callWithRequest, - filterId, - payload.description, - payload.addItems, - payload.removeItems - ).catch(resp => wrapError(resp)); - }, - config: { - ...commonRouteConfig, - }, - }); - - route({ - method: 'DELETE', - path: '/api/ml/filters/{filterId}', - handler(request) { - const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request); - const filterId = request.params.filterId; - return deleteFilter(callWithRequest, filterId).catch(resp => wrapError(resp)); - }, - config: { - ...commonRouteConfig, - }, - }); -} diff --git a/x-pack/legacy/plugins/ml/server/routes/filters.ts b/x-pack/legacy/plugins/ml/server/routes/filters.ts new file mode 100644 index 0000000000000..a06f8d4f8b727 --- /dev/null +++ b/x-pack/legacy/plugins/ml/server/routes/filters.ts @@ -0,0 +1,227 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { RequestHandlerContext } from 'src/core/server'; +import { schema } from '@kbn/config-schema'; +import { licensePreRoutingFactory } from '../new_platform/licence_check_pre_routing_factory'; +import { wrapError } from '../client/error_wrapper'; +import { RouteInitialization } from '../new_platform/plugin'; +import { createFilterSchema, updateFilterSchema } from '../new_platform/filters_schema'; +import { FilterManager, FormFilter } from '../models/filter'; + +// TODO - add function for returning a list of just the filter IDs. +// TODO - add function for returning a list of filter IDs plus item count. +function getAllFilters(context: RequestHandlerContext) { + const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + return mgr.getAllFilters(); +} + +function getAllFilterStats(context: RequestHandlerContext) { + const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + return mgr.getAllFilterStats(); +} + +function getFilter(context: RequestHandlerContext, filterId: string) { + const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + return mgr.getFilter(filterId); +} + +function newFilter(context: RequestHandlerContext, filter: FormFilter) { + const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + return mgr.newFilter(filter); +} + +function updateFilter(context: RequestHandlerContext, filterId: string, filter: FormFilter) { + const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + return mgr.updateFilter(filterId, filter); +} + +function deleteFilter(context: RequestHandlerContext, filterId: string) { + const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + return mgr.deleteFilter(filterId); +} + +export function filtersRoutes({ xpackMainPlugin, router }: RouteInitialization) { + /** + * @apiGroup Filters + * + * @api {get} /api/ml/filters Gets filters + * @apiName GetFilters + * @apiDescription Retrieves the list of filters which are used for custom rules in anomaly detection. + * + * @apiSuccess {Boolean} success + * @apiSuccess {Object[]} filters list of filters + */ + router.get( + { + path: '/api/ml/filters', + validate: false, + }, + licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => { + try { + const resp = await getAllFilters(context); + + return response.ok({ + body: resp, + }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); + + /** + * @apiGroup Filters + * + * @api {get} /api/ml/filters/:filterId Gets filter by ID + * @apiName GetFilterById + * @apiDescription Retrieves the filter with the specified ID. + * + * @apiSuccess {Boolean} success + * @apiSuccess {Object} filter the filter with the specified ID + */ + router.get( + { + path: '/api/ml/filters/{filterId}', + validate: { + params: schema.object({ filterId: schema.string() }), + }, + }, + licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => { + try { + const resp = await getFilter(context, request.params.filterId); + return response.ok({ + body: resp, + }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); + + /** + * @apiGroup Filters + * + * @api {put} /api/ml/filters Creates a filter + * @apiName CreateFilter + * @apiDescription Instantiates a filter, for use by custom rules in anomaly detection. + * + * @apiSuccess {Boolean} success + * @apiSuccess {Object} filter created filter + */ + router.put( + { + path: '/api/ml/filters', + validate: { + body: schema.object(createFilterSchema), + }, + }, + licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => { + try { + const body = request.body; + const resp = await newFilter(context, body); + + return response.ok({ + body: resp, + }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); + + /** + * @apiGroup Filters + * + * @api {put} /api/ml/filters/:filterId Updates a filter + * @apiName UpdateFilter + * @apiDescription Updates the description of a filter, adds items or removes items. + * + * @apiSuccess {Boolean} success + * @apiSuccess {Object} filter updated filter + */ + router.put( + { + path: '/api/ml/filters/{filterId}', + validate: { + params: schema.object({ filterId: schema.string() }), + body: schema.object(updateFilterSchema), + }, + }, + licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => { + try { + const { filterId } = request.params; + const body = request.body; + const resp = await updateFilter(context, filterId, body); + + return response.ok({ + body: resp, + }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); + + /** + * @apiGroup Filters + * + * @api {delete} /api/ml/filters/:filterId Delete filter + * @apiName DeleteFilter + * @apiDescription Deletes the filter with the specified ID. + * + * @apiParam {String} filterId the ID of the filter to delete + */ + router.delete( + { + path: '/api/ml/filters/{filterId}', + validate: { + params: schema.object({ filterId: schema.string() }), + }, + }, + licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => { + try { + const { filterId } = request.params; + const resp = await deleteFilter(context, filterId); + + return response.ok({ + body: resp, + }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); + + /** + * @apiGroup Filters + * + * @api {get} /api/ml/filters/_stats Gets filters stats + * @apiName GetFiltersStats + * @apiDescription Retrieves the list of filters which are used for custom rules in anomaly detection, + * with stats on the list of jobs and detectors which are using each filter. + * + * @apiSuccess {Boolean} success + * @apiSuccess {Object[]} filters list of filters with stats on usage + */ + router.get( + { + path: '/api/ml/filters/_stats', + validate: false, + }, + licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => { + try { + const resp = await getAllFilterStats(context); + + return response.ok({ + body: resp, + }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); +} diff --git a/x-pack/legacy/plugins/ml/server/routes/job_service.ts b/x-pack/legacy/plugins/ml/server/routes/job_service.ts index 3af651c92353b..9aa3960e59e4c 100644 --- a/x-pack/legacy/plugins/ml/server/routes/job_service.ts +++ b/x-pack/legacy/plugins/ml/server/routes/job_service.ts @@ -29,7 +29,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/force_start_datafeeds + * @api {post} /api/ml/jobs/force_start_datafeeds Start datafeeds * @apiName ForceStartDatafeeds * @apiDescription Starts one or more datafeeds */ @@ -58,7 +58,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/stop_datafeeds + * @api {post} /api/ml/jobs/stop_datafeeds Stop datafeeds * @apiName StopDatafeeds * @apiDescription Stops one or more datafeeds */ @@ -87,7 +87,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/delete_jobs + * @api {post} /api/ml/jobs/delete_jobs Delete jobs * @apiName DeleteJobs * @apiDescription Deletes an existing anomaly detection job */ @@ -116,7 +116,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/close_jobs + * @api {post} /api/ml/jobs/close_jobs Close jobs * @apiName CloseJobs * @apiDescription Closes one or more anomaly detection jobs */ @@ -145,7 +145,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/jobs_summary + * @api {post} /api/ml/jobs/jobs_summary Jobs summary * @apiName JobsSummary * @apiDescription Creates a summary jobs list. Jobs include job stats, datafeed stats, and calendars. */ @@ -174,9 +174,9 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/jobs_with_time_range - * @apiName JobsWithTimerange - * @apiDescription Creates a list of jobs with data about the job's timerange + * @api {post} /api/ml/jobs/jobs_with_time_range Jobs with time range + * @apiName JobsWithTimeRange + * @apiDescription Creates a list of jobs with data about the job's time range */ router.post( { @@ -203,7 +203,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/jobs + * @api {post} /api/ml/jobs/jobs Create jobs list * @apiName CreateFullJobsList * @apiDescription Creates a list of jobs */ @@ -232,7 +232,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {get} /api/ml/jobs/groups + * @api {get} /api/ml/jobs/groups Get job groups * @apiName GetAllGroups * @apiDescription Returns array of group objects with job ids listed for each group */ @@ -258,7 +258,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/update_groups + * @api {post} /api/ml/jobs/update_groups Update job groups * @apiName UpdateGroups * @apiDescription Updates 'groups' property of an anomaly detection job */ @@ -287,7 +287,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {get} /api/ml/jobs/deleting_jobs_tasks + * @api {get} /api/ml/jobs/deleting_jobs_tasks Get deleting job tasks * @apiName DeletingJobTasks * @apiDescription Gets the ids of deleting anomaly detection jobs */ @@ -313,7 +313,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/jobs_exist + * @api {post} /api/ml/jobs/jobs_exist Check if jobs exist * @apiName JobsExist * @apiDescription Checks if each of the jobs in the specified list of IDs exist */ @@ -342,7 +342,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {get} /api/ml/jobs/new_job_caps/:indexPattern + * @api {get} /api/ml/jobs/new_job_caps/:indexPattern Get new job capabilities * @apiName NewJobCaps * @apiDescription Retrieve the capabilities of fields for indices */ @@ -374,7 +374,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/new_job_line_chart + * @api {post} /api/ml/jobs/new_job_line_chart Get job line chart data * @apiName NewJobLineChart * @apiDescription Returns line chart data for anomaly detection job */ @@ -427,7 +427,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/new_job_population_chart + * @api {post} /api/ml/jobs/new_job_population_chart Get population job chart data * @apiName NewJobPopulationChart * @apiDescription Returns population job chart data */ @@ -477,7 +477,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {get} /api/ml/jobs/all_jobs_and_group_ids + * @api {get} /api/ml/jobs/all_jobs_and_group_ids Get all job and group IDs * @apiName GetAllJobAndGroupIds * @apiDescription Returns a list of all job IDs and all group IDs */ @@ -503,7 +503,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/look_back_progress + * @api {post} /api/ml/jobs/look_back_progress Get lookback progress * @apiName GetLookBackProgress * @apiDescription Returns current progress of anomaly detection job */ @@ -532,7 +532,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/categorization_field_examples + * @api {post} /api/ml/jobs/categorization_field_examples Get categorization field examples * @apiName ValidateCategoryExamples * @apiDescription Validates category examples */ @@ -582,7 +582,7 @@ export function jobServiceRoutes({ xpackMainPlugin, router }: RouteInitializatio /** * @apiGroup JobService * - * @api {post} /api/ml/jobs/top_categories + * @api {post} /api/ml/jobs/top_categories Get top categories * @apiName TopCategories * @apiDescription Returns list of top categories */ From fe21356020ee9567e3164869bae768c1220db130 Mon Sep 17 00:00:00 2001 From: Paul Tavares <56442535+paul-tavares@users.noreply.github.com> Date: Fri, 14 Feb 2020 14:14:39 -0500 Subject: [PATCH 4/7] [Endpoint] Policy List UI route and initial view (#56918) * Initial Policy List view * Add `endpoint/policy` route and displays Policy List * test cases (both unit and functional) Does not yet interact with API (Ingest). --- x-pack/plugins/endpoint/common/types.ts | 2 +- .../endpoint/components/truncate_text.ts | 13 + .../public/applications/endpoint/index.tsx | 2 + .../applications/endpoint/store/action.ts | 3 +- .../applications/endpoint/store/index.ts | 5 + .../endpoint/store/policy_list/action.ts | 27 ++ .../endpoint/store/policy_list/fake_data.ts | 53 ++++ .../endpoint/store/policy_list/index.test.ts | 74 ++++++ .../endpoint/store/policy_list/index.ts | 9 + .../endpoint/store/policy_list/middleware.ts | 45 ++++ .../endpoint/store/policy_list/reducer.ts | 48 ++++ .../endpoint/store/policy_list/selectors.ts | 17 ++ .../applications/endpoint/store/reducer.ts | 2 + .../endpoint/store/routing/action.ts | 7 +- .../public/applications/endpoint/types.ts | 29 +++ .../endpoint/view/policy/index.ts | 7 + .../endpoint/view/policy/policy_hooks.ts | 12 + .../endpoint/view/policy/policy_list.tsx | 232 ++++++++++++++++++ .../applications/endpoint/view/use_page_id.ts | 10 +- x-pack/test/functional/apps/endpoint/index.ts | 1 + .../functional/apps/endpoint/policy_list.ts | 47 ++++ .../functional/page_objects/endpoint_page.ts | 19 ++ 22 files changed, 660 insertions(+), 4 deletions(-) create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/components/truncate_text.ts create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/action.ts create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/fake_data.ts create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.test.ts create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.ts create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/middleware.ts create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/reducer.ts create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/selectors.ts create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/view/policy/index.ts create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_hooks.ts create mode 100644 x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_list.tsx create mode 100644 x-pack/test/functional/apps/endpoint/policy_list.ts diff --git a/x-pack/plugins/endpoint/common/types.ts b/x-pack/plugins/endpoint/common/types.ts index 0dc3fc29ca805..5ef9d22e4dd7b 100644 --- a/x-pack/plugins/endpoint/common/types.ts +++ b/x-pack/plugins/endpoint/common/types.ts @@ -118,4 +118,4 @@ export interface EndpointMetadata { /** * The PageId type is used for the payload when firing userNavigatedToPage actions */ -export type PageId = 'alertsPage' | 'managementPage'; +export type PageId = 'alertsPage' | 'managementPage' | 'policyListPage'; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/components/truncate_text.ts b/x-pack/plugins/endpoint/public/applications/endpoint/components/truncate_text.ts new file mode 100644 index 0000000000000..83f4bc1e79317 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/components/truncate_text.ts @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import styled from 'styled-components'; + +export const TruncateText = styled.div` + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; +`; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/index.tsx b/x-pack/plugins/endpoint/public/applications/endpoint/index.tsx index a86c647e771d4..7bb3b13525914 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/index.tsx +++ b/x-pack/plugins/endpoint/public/applications/endpoint/index.tsx @@ -14,6 +14,7 @@ import { Store } from 'redux'; import { appStoreFactory } from './store'; import { AlertIndex } from './view/alerts'; import { ManagementList } from './view/managing'; +import { PolicyList } from './view/policy'; /** * This module will be loaded asynchronously to reduce the bundle size of your plugin's main bundle. @@ -51,6 +52,7 @@ const AppRoot: React.FunctionComponent = React.memo(({ basename, st /> + ( diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/action.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/action.ts index 04c6cf7fc4634..d099c81317090 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/store/action.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/action.ts @@ -7,5 +7,6 @@ import { ManagementAction } from './managing'; import { AlertAction } from './alerts'; import { RoutingAction } from './routing'; +import { PolicyListAction } from './policy_list'; -export type AppAction = ManagementAction | AlertAction | RoutingAction; +export type AppAction = ManagementAction | AlertAction | RoutingAction | PolicyListAction; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/index.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/index.ts index 3bbcc3f25a6d8..8fe61ae01d319 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/store/index.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/index.ts @@ -17,6 +17,7 @@ import { CoreStart } from 'kibana/public'; import { appReducer } from './reducer'; import { alertMiddlewareFactory } from './alerts/middleware'; import { managementMiddlewareFactory } from './managing'; +import { policyListMiddlewareFactory } from './policy_list'; import { GlobalState } from '../types'; import { AppAction } from './action'; @@ -56,6 +57,10 @@ export const appStoreFactory = (coreStart: CoreStart): Store => { substateMiddlewareFactory( globalState => globalState.managementList, managementMiddlewareFactory(coreStart) + ), + substateMiddlewareFactory( + globalState => globalState.policyList, + policyListMiddlewareFactory(coreStart) ) ) ) diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/action.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/action.ts new file mode 100644 index 0000000000000..5ac2a4328b00a --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/action.ts @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { PolicyData } from '../../types'; + +interface ServerReturnedPolicyListData { + type: 'serverReturnedPolicyListData'; + payload: { + policyItems: PolicyData[]; + total: number; + pageSize: number; + pageIndex: number; + }; +} + +interface UserPaginatedPolicyListTable { + type: 'userPaginatedPolicyListTable'; + payload: { + pageSize: number; + pageIndex: number; + }; +} + +export type PolicyListAction = ServerReturnedPolicyListData | UserPaginatedPolicyListTable; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/fake_data.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/fake_data.ts new file mode 100644 index 0000000000000..62bdd28f30be1 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/fake_data.ts @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +// !!!! Should be deleted when https://github.com/elastic/endpoint-app-team/issues/150 +// is implemented + +const dateOffsets = [ + 0, + 1000, + 300000, // 5 minutes + 3.6e6, // 1 hour + 86340000, // 23h, 59m + 9e7, // 25h + 9e7 * 5, // 5d +]; + +const randomNumbers = [5, 50, 500, 5000, 50000]; + +const getRandomDateIsoString = () => { + const randomIndex = Math.floor(Math.random() * Math.floor(dateOffsets.length)); + return new Date(Date.now() - dateOffsets[randomIndex]).toISOString(); +}; + +const getRandomNumber = () => { + const randomIndex = Math.floor(Math.random() * Math.floor(randomNumbers.length)); + return randomNumbers[randomIndex]; +}; + +export const getFakeDatasourceApiResponse = async (page: number, pageSize: number) => { + await new Promise(resolve => setTimeout(resolve, 500)); + + // Emulates the API response - see PR: + // https://github.com/elastic/kibana/pull/56567/files#diff-431549a8739efe0c56763f164c32caeeR25 + return { + items: Array.from({ length: pageSize }, (x, i) => ({ + name: `policy with some protections ${i + 1}`, + total: getRandomNumber(), + pending: getRandomNumber(), + failed: getRandomNumber(), + created_by: `admin ABC`, + created: getRandomDateIsoString(), + updated_by: 'admin 123', + updated: getRandomDateIsoString(), + })), + success: true, + total: pageSize * 10, + page, + perPage: pageSize, + }; +}; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.test.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.test.ts new file mode 100644 index 0000000000000..ae4a0868a68fe --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.test.ts @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { PolicyListState } from '../../types'; +import { applyMiddleware, createStore, Dispatch, Store } from 'redux'; +import { AppAction } from '../action'; +import { policyListReducer } from './reducer'; +import { policyListMiddlewareFactory } from './middleware'; +import { coreMock } from '../../../../../../../../src/core/public/mocks'; +import { CoreStart } from 'kibana/public'; +import { selectIsLoading } from './selectors'; + +describe('policy list store concerns', () => { + const sleep = () => new Promise(resolve => setTimeout(resolve, 1000)); + let fakeCoreStart: jest.Mocked; + let store: Store; + let getState: typeof store['getState']; + let dispatch: Dispatch; + + beforeEach(() => { + fakeCoreStart = coreMock.createStart({ basePath: '/mock' }); + store = createStore( + policyListReducer, + applyMiddleware(policyListMiddlewareFactory(fakeCoreStart)) + ); + getState = store.getState; + dispatch = store.dispatch; + }); + + test('it sets `isLoading` when `userNavigatedToPage`', async () => { + expect(selectIsLoading(getState())).toBe(false); + dispatch({ type: 'userNavigatedToPage', payload: 'policyListPage' }); + expect(selectIsLoading(getState())).toBe(true); + await sleep(); + expect(selectIsLoading(getState())).toBe(false); + }); + + test('it sets `isLoading` when `userPaginatedPolicyListTable`', async () => { + expect(selectIsLoading(getState())).toBe(false); + dispatch({ + type: 'userPaginatedPolicyListTable', + payload: { + pageSize: 10, + pageIndex: 1, + }, + }); + expect(selectIsLoading(getState())).toBe(true); + await sleep(); + expect(selectIsLoading(getState())).toBe(false); + }); + + test('it resets state on `userNavigatedFromPage` action', async () => { + dispatch({ + type: 'serverReturnedPolicyListData', + payload: { + policyItems: [], + pageIndex: 20, + pageSize: 50, + total: 200, + }, + }); + dispatch({ type: 'userNavigatedFromPage', payload: 'policyListPage' }); + expect(getState()).toEqual({ + policyItems: [], + isLoading: false, + pageIndex: 0, + pageSize: 10, + total: 0, + }); + }); +}); diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.ts new file mode 100644 index 0000000000000..8086acc41d2bd --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.ts @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { policyListReducer } from './reducer'; +export { PolicyListAction } from './action'; +export { policyListMiddlewareFactory } from './middleware'; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/middleware.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/middleware.ts new file mode 100644 index 0000000000000..f8e2b7d07c389 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/middleware.ts @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { MiddlewareFactory, PolicyListState } from '../../types'; + +export const policyListMiddlewareFactory: MiddlewareFactory = coreStart => { + return ({ getState, dispatch }) => next => async action => { + next(action); + + if ( + (action.type === 'userNavigatedToPage' && action.payload === 'policyListPage') || + action.type === 'userPaginatedPolicyListTable' + ) { + const state = getState(); + let pageSize: number; + let pageIndex: number; + + if (action.type === 'userPaginatedPolicyListTable') { + pageSize = action.payload.pageSize; + pageIndex = action.payload.pageIndex; + } else { + pageSize = state.pageSize; + pageIndex = state.pageIndex; + } + + // Need load data from API and remove fake data below + // Refactor tracked via: https://github.com/elastic/endpoint-app-team/issues/150 + const { getFakeDatasourceApiResponse } = await import('./fake_data'); + const { items: policyItems, total } = await getFakeDatasourceApiResponse(pageIndex, pageSize); + + dispatch({ + type: 'serverReturnedPolicyListData', + payload: { + policyItems, + pageIndex, + pageSize, + total, + }, + }); + } + }; +}; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/reducer.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/reducer.ts new file mode 100644 index 0000000000000..77f536d413ae3 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/reducer.ts @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Reducer } from 'redux'; +import { PolicyListState } from '../../types'; +import { AppAction } from '../action'; + +const initialPolicyListState = (): PolicyListState => { + return { + policyItems: [], + isLoading: false, + pageIndex: 0, + pageSize: 10, + total: 0, + }; +}; + +export const policyListReducer: Reducer = ( + state = initialPolicyListState(), + action +) => { + if (action.type === 'serverReturnedPolicyListData') { + return { + ...state, + ...action.payload, + isLoading: false, + }; + } + + if ( + action.type === 'userPaginatedPolicyListTable' || + (action.type === 'userNavigatedToPage' && action.payload === 'policyListPage') + ) { + return { + ...state, + isLoading: true, + }; + } + + if (action.type === 'userNavigatedFromPage' && action.payload === 'policyListPage') { + return initialPolicyListState(); + } + + return state; +}; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/selectors.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/selectors.ts new file mode 100644 index 0000000000000..b9c2edbf5d55b --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/selectors.ts @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { PolicyListState } from '../../types'; + +export const selectPolicyItems = (state: PolicyListState) => state.policyItems; + +export const selectPageIndex = (state: PolicyListState) => state.pageIndex; + +export const selectPageSize = (state: PolicyListState) => state.pageSize; + +export const selectTotal = (state: PolicyListState) => state.total; + +export const selectIsLoading = (state: PolicyListState) => state.isLoading; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/reducer.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/reducer.ts index 7d738c266fae0..3d9d21c0da9c3 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/store/reducer.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/reducer.ts @@ -8,8 +8,10 @@ import { managementListReducer } from './managing'; import { AppAction } from './action'; import { alertListReducer } from './alerts'; import { GlobalState } from '../types'; +import { policyListReducer } from './policy_list'; export const appReducer: Reducer = combineReducers({ managementList: managementListReducer, alertList: alertListReducer, + policyList: policyListReducer, }); diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/routing/action.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/routing/action.ts index 263a3f72d57d5..9080af8c91817 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/store/routing/action.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/routing/action.ts @@ -11,4 +11,9 @@ interface UserNavigatedToPage { readonly payload: PageId; } -export type RoutingAction = UserNavigatedToPage; +interface UserNavigatedFromPage { + readonly type: 'userNavigatedFromPage'; + readonly payload: PageId; +} + +export type RoutingAction = UserNavigatedToPage | UserNavigatedFromPage; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/types.ts b/x-pack/plugins/endpoint/public/applications/endpoint/types.ts index 02a7793fc38b0..6b20012592fd9 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/types.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/types.ts @@ -29,9 +29,38 @@ export interface ManagementListPagination { pageSize: number; } +// REFACTOR to use Types from Ingest Manager - see: https://github.com/elastic/endpoint-app-team/issues/150 +export interface PolicyData { + name: string; + total: number; + pending: number; + failed: number; + created_by: string; + created: string; + updated_by: string; + updated: string; +} + +/** + * Policy list store state + */ +export interface PolicyListState { + /** Array of policy items */ + policyItems: PolicyData[]; + /** total number of policies */ + total: number; + /** Number of policies per page */ + pageSize: number; + /** page number (zero based) */ + pageIndex: number; + /** data is being retrieved from server */ + isLoading: boolean; +} + export interface GlobalState { readonly managementList: ManagementListState; readonly alertList: AlertListState; + readonly policyList: PolicyListState; } export type AlertListData = AlertResultList; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/index.ts b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/index.ts new file mode 100644 index 0000000000000..d561da7574de0 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/index.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './policy_list'; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_hooks.ts b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_hooks.ts new file mode 100644 index 0000000000000..14558fb6504bb --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_hooks.ts @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useSelector } from 'react-redux'; +import { GlobalState, PolicyListState } from '../../types'; + +export function usePolicyListSelector(selector: (state: PolicyListState) => TSelected) { + return useSelector((state: GlobalState) => selector(state.policyList)); +} diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_list.tsx b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_list.tsx new file mode 100644 index 0000000000000..75ffa5e8806e9 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_list.tsx @@ -0,0 +1,232 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useCallback, useMemo } from 'react'; +import { + EuiPage, + EuiPageBody, + EuiPageContent, + EuiPageContentBody, + EuiPageContentHeader, + EuiPageContentHeaderSection, + EuiTitle, + EuiBasicTable, + EuiText, + EuiTableFieldDataColumnType, + EuiToolTip, +} from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; +import { + FormattedMessage, + FormattedDate, + FormattedTime, + FormattedNumber, + FormattedRelative, +} from '@kbn/i18n/react'; +import { useDispatch } from 'react-redux'; +import styled from 'styled-components'; +import { usePageId } from '../use_page_id'; +import { + selectIsLoading, + selectPageIndex, + selectPageSize, + selectPolicyItems, + selectTotal, +} from '../../store/policy_list/selectors'; +import { usePolicyListSelector } from './policy_hooks'; +import { PolicyListAction } from '../../store/policy_list'; +import { PolicyData } from '../../types'; +import { TruncateText } from '../../components/truncate_text'; + +interface TableChangeCallbackArguments { + page: { index: number; size: number }; +} + +const TruncateTooltipText = styled(TruncateText)` + .euiToolTipAnchor { + display: block; + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; + } +`; + +const FormattedDateAndTime: React.FC<{ date: Date }> = ({ date }) => { + // If date is greater than or equal to 24h (ago), then show it as a date + // else, show it as relative to "now" + return Date.now() - date.getTime() >= 8.64e7 ? ( + <> + + {' @'} + + + ) : ( + <> + + + ); +}; + +const renderDate = (date: string, _item: PolicyData) => ( + + + + + +); + +const renderFormattedNumber = (value: number, _item: PolicyData) => ( + + + +); + +export const PolicyList = React.memo(() => { + usePageId('policyListPage'); + + const dispatch = useDispatch<(action: PolicyListAction) => void>(); + const policyItems = usePolicyListSelector(selectPolicyItems); + const pageIndex = usePolicyListSelector(selectPageIndex); + const pageSize = usePolicyListSelector(selectPageSize); + const totalItemCount = usePolicyListSelector(selectTotal); + const loading = usePolicyListSelector(selectIsLoading); + + const paginationSetup = useMemo(() => { + return { + pageIndex, + pageSize, + totalItemCount, + pageSizeOptions: [10, 20, 50], + hidePerPageOptions: false, + }; + }, [pageIndex, pageSize, totalItemCount]); + + const handleTableChange = useCallback( + ({ page: { index, size } }: TableChangeCallbackArguments) => { + dispatch({ + type: 'userPaginatedPolicyListTable', + payload: { + pageIndex: index, + pageSize: size, + }, + }); + }, + [dispatch] + ); + + const columns: Array> = useMemo( + () => [ + { + field: 'name', + name: i18n.translate('xpack.endpoint.policyList.nameField', { + defaultMessage: 'Policy Name', + }), + truncateText: true, + }, + { + field: 'total', + name: i18n.translate('xpack.endpoint.policyList.totalField', { + defaultMessage: 'Total', + }), + render: renderFormattedNumber, + dataType: 'number', + truncateText: true, + width: '15ch', + }, + { + field: 'pending', + name: i18n.translate('xpack.endpoint.policyList.pendingField', { + defaultMessage: 'Pending', + }), + render: renderFormattedNumber, + dataType: 'number', + truncateText: true, + width: '15ch', + }, + { + field: 'failed', + name: i18n.translate('xpack.endpoint.policyList.failedField', { + defaultMessage: 'Failed', + }), + render: renderFormattedNumber, + dataType: 'number', + truncateText: true, + width: '15ch', + }, + { + field: 'created_by', + name: i18n.translate('xpack.endpoint.policyList.createdByField', { + defaultMessage: 'Created By', + }), + truncateText: true, + }, + { + field: 'created', + name: i18n.translate('xpack.endpoint.policyList.createdField', { + defaultMessage: 'Created', + }), + render: renderDate, + truncateText: true, + }, + { + field: 'updated_by', + name: i18n.translate('xpack.endpoint.policyList.updatedByField', { + defaultMessage: 'Last Updated By', + }), + truncateText: true, + }, + { + field: 'updated', + name: i18n.translate('xpack.endpoint.policyList.updatedField', { + defaultMessage: 'Last Updated', + }), + render: renderDate, + truncateText: true, + }, + ], + [] + ); + + return ( + + + + + + +

+ +

+ +

+ + + +

+ + + + + + + + + ); +}); diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/use_page_id.ts b/x-pack/plugins/endpoint/public/applications/endpoint/view/use_page_id.ts index 9e241af4c0445..49c39064c8d9a 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/view/use_page_id.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/use_page_id.ts @@ -10,11 +10,19 @@ import { PageId } from '../../../../common/types'; import { RoutingAction } from '../store/routing'; /** - * Dispatches a 'userNavigatedToPage' action with the given 'pageId' as the action payload + * Dispatches a 'userNavigatedToPage' action with the given 'pageId' as the action payload. + * When the component is un-mounted, a `userNavigatedFromPage` action will be dispatched + * with the given `pageId`. + * + * @param pageId A page id */ export function usePageId(pageId: PageId) { const dispatch: (action: RoutingAction) => unknown = useDispatch(); useEffect(() => { dispatch({ type: 'userNavigatedToPage', payload: pageId }); + + return () => { + dispatch({ type: 'userNavigatedFromPage', payload: pageId }); + }; }, [dispatch, pageId]); } diff --git a/x-pack/test/functional/apps/endpoint/index.ts b/x-pack/test/functional/apps/endpoint/index.ts index 5fdf54b98cda6..0ea9344a67aba 100644 --- a/x-pack/test/functional/apps/endpoint/index.ts +++ b/x-pack/test/functional/apps/endpoint/index.ts @@ -12,5 +12,6 @@ export default function({ loadTestFile }: FtrProviderContext) { loadTestFile(require.resolve('./feature_controls')); loadTestFile(require.resolve('./landing_page')); loadTestFile(require.resolve('./management')); + loadTestFile(require.resolve('./policy_list')); }); } diff --git a/x-pack/test/functional/apps/endpoint/policy_list.ts b/x-pack/test/functional/apps/endpoint/policy_list.ts new file mode 100644 index 0000000000000..1fe2492bed5a0 --- /dev/null +++ b/x-pack/test/functional/apps/endpoint/policy_list.ts @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +import expect from '@kbn/expect'; +import { FtrProviderContext } from '../../ftr_provider_context'; + +export default function({ getPageObjects, getService }: FtrProviderContext) { + const pageObjects = getPageObjects(['common', 'endpoint']); + const testSubjects = getService('testSubjects'); + + describe('Endpoint Policy List', function() { + this.tags(['ciGroup7']); + before(async () => { + await pageObjects.common.navigateToUrlWithBrowserHistory('endpoint', '/policy'); + }); + + it('loads the Policy List Page', async () => { + await testSubjects.existOrFail('policyListPage'); + }); + it('displays page title', async () => { + const policyTitle = await testSubjects.getVisibleText('policyViewTitle'); + expect(policyTitle).to.equal('Policies'); + }); + it('shows policy count total', async () => { + const policyTotal = await testSubjects.getVisibleText('policyTotalCount'); + expect(policyTotal).to.equal('0 Policies'); + }); + it('includes policy list table', async () => { + await testSubjects.existOrFail('policyTable'); + }); + it('has correct table headers', async () => { + const allHeaderCells = await pageObjects.endpoint.tableHeaderVisibleText('policyTable'); + expect(allHeaderCells).to.eql([ + 'Policy Name', + 'Total', + 'Pending', + 'Failed', + 'Created By', + 'Created', + 'Last Updated By', + 'Last Updated', + ]); + }); + }); +} diff --git a/x-pack/test/functional/page_objects/endpoint_page.ts b/x-pack/test/functional/page_objects/endpoint_page.ts index a306a855a83eb..54f537dd0e8c3 100644 --- a/x-pack/test/functional/page_objects/endpoint_page.ts +++ b/x-pack/test/functional/page_objects/endpoint_page.ts @@ -11,6 +11,25 @@ export function EndpointPageProvider({ getService }: FtrProviderContext) { const table = getService('table'); return { + /** + * Finds the Table with the given `selector` (test subject) and returns + * back an array containing the table's header column text + * + * @param selector + * @returns Promise + */ + async tableHeaderVisibleText(selector: string) { + const $ = await (await testSubjects.find('policyTable')).parseDomContent(); + return $('thead tr th') + .toArray() + .map(th => + $(th) + .text() + .replace(/ /g, '') + .trim() + ); + }, + async welcomeEndpointTitle() { return await testSubjects.getVisibleText('welcomeTitle'); }, From 52a566d095756883168fc54fe9afa502ff695af2 Mon Sep 17 00:00:00 2001 From: gchaps <33642766+gchaps@users.noreply.github.com> Date: Fri, 14 Feb 2020 11:33:25 -0800 Subject: [PATCH 5/7] [DOCS] Adds link to migration guide (#57702) --- docs/developer/plugin/development-plugin-resources.asciidoc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/developer/plugin/development-plugin-resources.asciidoc b/docs/developer/plugin/development-plugin-resources.asciidoc index 71c442aaf52e8..a2fd0e23d0be4 100644 --- a/docs/developer/plugin/development-plugin-resources.asciidoc +++ b/docs/developer/plugin/development-plugin-resources.asciidoc @@ -66,3 +66,8 @@ To enable TypeScript support, create a `tsconfig.json` file at the root of your TypeScript code is automatically converted into JavaScript during development, but not in the distributable version of Kibana. If you use the {repo}blob/{branch}/packages/kbn-plugin-helpers[@kbn/plugin-helpers] to build your plugin, then your `.ts` and `.tsx` files will be permanently transpiled before your plugin is archived. If you have your own build process, make sure to run the TypeScript compiler on your source files and ship the compilation output so that your plugin will work with the distributable version of Kibana. + +==== {kib} platform migration guide + +{repo}blob/{branch}/src/core/MIGRATION.md#migrating-legacy-plugins-to-the-new-platform[This guide] +provides an action plan for moving a legacy plugin to the new platform. From 34ae99b516f7aede7e108cad7cb1fb8d82a7de0f Mon Sep 17 00:00:00 2001 From: Tim Sullivan Date: Fri, 14 Feb 2020 12:36:08 -0700 Subject: [PATCH 6/7] [Reporting/New Platform] Provide async access to server-side dependencies (#56824) * [Reporting/New Platform] Provide async access to server-side * consistent name for reportingPlugin * Prettier changes * simplify reporting usage collector setup * add more tests * extract internals access to separate core class * fix tests * fix imports for jest and build Co-authored-by: Elastic Machine --- .../execute_job/decrypt_job_headers.test.ts | 2 +- .../get_conditional_headers.test.ts | 59 ++- .../execute_job/get_custom_logo.test.ts | 29 +- .../common/execute_job/get_custom_logo.ts | 18 +- .../common/execute_job/get_full_urls.test.ts | 2 +- .../common/lib/screenshots/index.ts | 18 +- .../export_types/csv/server/create_job.ts | 9 +- .../csv/server/execute_job.test.js | 354 +++++++++++------- .../export_types/csv/server/execute_job.ts | 133 +++---- .../csv/server/lib/field_format_map.ts | 10 +- .../server/create_job/create_job.ts | 4 +- .../server/execute_job.ts | 8 +- .../server/lib/generate_csv.ts | 11 +- .../server/lib/generate_csv_search.ts | 47 +-- .../png/server/create_job/index.ts | 13 +- .../png/server/execute_job/index.test.js | 47 ++- .../png/server/execute_job/index.ts | 16 +- .../printable_pdf/server/create_job/index.ts | 11 +- .../server/execute_job/index.test.js | 60 +-- .../printable_pdf/server/execute_job/index.ts | 18 +- x-pack/legacy/plugins/reporting/index.ts | 2 +- .../legacy/plugins/reporting/reporting.d.ts | 2 +- .../legacy/plugins/reporting/server/core.ts | 122 ++++++ .../legacy/plugins/reporting/server/index.ts | 3 + .../legacy/plugins/reporting/server/legacy.ts | 9 +- .../reporting/server/lib/create_queue.ts | 31 +- .../server/lib/create_worker.test.ts | 38 +- .../reporting/server/lib/create_worker.ts | 36 +- .../reporting/server/lib/enqueue_job.ts | 21 +- .../plugins/reporting/server/lib/get_user.ts | 2 +- .../reporting/server/lib/validate/index.ts | 5 +- .../legacy/plugins/reporting/server/plugin.ts | 109 ++---- .../server/routes/generate_from_jobparams.ts | 2 +- .../routes/generate_from_savedobject.ts | 2 +- .../generate_from_savedobject_immediate.ts | 10 +- .../reporting/server/routes/generation.ts | 26 +- .../plugins/reporting/server/routes/index.ts | 16 +- .../reporting/server/routes/jobs.test.js | 128 +++++-- .../plugins/reporting/server/routes/jobs.ts | 6 +- .../routes/lib/authorized_user_pre_routing.ts | 2 +- .../lib/reporting_feature_pre_routing.ts | 2 +- .../routes/lib/route_config_factories.ts | 2 +- .../plugins/reporting/server/types.d.ts | 50 +++ .../server/usage/decorate_range_stats.ts | 2 +- .../usage/reporting_usage_collector.test.js | 230 +++++++----- .../server/usage/reporting_usage_collector.ts | 25 +- .../create_mock_reportingplugin.ts | 53 +++ .../test_helpers/create_mock_server.ts | 7 - .../plugins/reporting/test_helpers/index.ts | 8 + x-pack/legacy/plugins/reporting/types.d.ts | 36 +- 50 files changed, 1109 insertions(+), 747 deletions(-) create mode 100644 x-pack/legacy/plugins/reporting/server/core.ts create mode 100644 x-pack/legacy/plugins/reporting/server/types.d.ts create mode 100644 x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts create mode 100644 x-pack/legacy/plugins/reporting/test_helpers/index.ts diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts index 1b7ba3c90bab1..468caf93ec5dd 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/decrypt_job_headers.test.ts @@ -5,7 +5,7 @@ */ import { cryptoFactory } from '../../../server/lib/crypto'; -import { createMockServer } from '../../../test_helpers/create_mock_server'; +import { createMockServer } from '../../../test_helpers'; import { Logger } from '../../../types'; import { decryptJobHeaders } from './decrypt_job_headers'; diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts index 070bdb4314af9..eedb742ad7597 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_conditional_headers.test.ts @@ -4,13 +4,16 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createMockServer } from '../../../test_helpers/create_mock_server'; -import { getConditionalHeaders, getCustomLogo } from './index'; +import { createMockReportingCore, createMockServer } from '../../../test_helpers'; +import { ReportingCore } from '../../../server'; import { JobDocPayload } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; +import { getConditionalHeaders, getCustomLogo } from './index'; +let mockReportingPlugin: ReportingCore; let mockServer: any; -beforeEach(() => { +beforeEach(async () => { + mockReportingPlugin = await createMockReportingCore(); mockServer = createMockServer(''); }); @@ -148,56 +151,76 @@ describe('conditions', () => { }); test('uses basePath from job when creating saved object service', async () => { + const mockGetSavedObjectsClient = jest.fn(); + mockReportingPlugin.getSavedObjectsClient = mockGetSavedObjectsClient; + const permittedHeaders = { foo: 'bar', baz: 'quix', }; - const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, server: mockServer, }); - - const logo = 'custom-logo'; - mockServer.uiSettingsServiceFactory().get.mockReturnValue(logo); - const jobBasePath = '/sbp/s/marketing'; await getCustomLogo({ + reporting: mockReportingPlugin, job: { basePath: jobBasePath } as JobDocPayloadPDF, conditionalHeaders, server: mockServer, }); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.mock.calls[0][0].getBasePath()).toBe( - jobBasePath - ); + const getBasePath = mockGetSavedObjectsClient.mock.calls[0][0].getBasePath; + expect(getBasePath()).toBe(jobBasePath); }); test(`uses basePath from server if job doesn't have a basePath when creating saved object service`, async () => { + const mockGetSavedObjectsClient = jest.fn(); + mockReportingPlugin.getSavedObjectsClient = mockGetSavedObjectsClient; + const permittedHeaders = { foo: 'bar', baz: 'quix', }; - const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, server: mockServer, }); - const logo = 'custom-logo'; - mockServer.uiSettingsServiceFactory().get.mockReturnValue(logo); - await getCustomLogo({ + reporting: mockReportingPlugin, job: {} as JobDocPayloadPDF, conditionalHeaders, server: mockServer, }); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.mock.calls[0][0].getBasePath()).toBe( - '/sbp' - ); + const getBasePath = mockGetSavedObjectsClient.mock.calls[0][0].getBasePath; + expect(getBasePath()).toBe(`/sbp`); + expect(mockGetSavedObjectsClient.mock.calls[0]).toMatchInlineSnapshot(` + Array [ + Object { + "getBasePath": [Function], + "headers": Object { + "baz": "quix", + "foo": "bar", + }, + "path": "/", + "raw": Object { + "req": Object { + "url": "/", + }, + }, + "route": Object { + "settings": Object {}, + }, + "url": Object { + "href": "/", + }, + }, + ] + `); }); describe('config formatting', () => { diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts index ff2c44026315d..fa53f474dfba7 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts @@ -4,12 +4,16 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createMockServer } from '../../../test_helpers/create_mock_server'; -import { getConditionalHeaders, getCustomLogo } from './index'; +import { ReportingCore } from '../../../server'; +import { createMockReportingCore, createMockServer } from '../../../test_helpers'; +import { ServerFacade } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; +import { getConditionalHeaders, getCustomLogo } from './index'; -let mockServer: any; -beforeEach(() => { +let mockReportingPlugin: ReportingCore; +let mockServer: ServerFacade; +beforeEach(async () => { + mockReportingPlugin = await createMockReportingCore(); mockServer = createMockServer(''); }); @@ -19,6 +23,17 @@ test(`gets logo from uiSettings`, async () => { baz: 'quix', }; + const mockGet = jest.fn(); + mockGet.mockImplementationOnce((...args: any[]) => { + if (args[0] === 'xpackReporting:customPdfLogo') { + return 'purple pony'; + } + throw new Error('wrong caller args!'); + }); + mockReportingPlugin.getUiSettingsServiceFactory = jest.fn().mockResolvedValue({ + get: mockGet, + }); + const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayloadPDF, filteredHeaders: permittedHeaders, @@ -26,12 +41,12 @@ test(`gets logo from uiSettings`, async () => { }); const { logo } = await getCustomLogo({ + reporting: mockReportingPlugin, job: {} as JobDocPayloadPDF, conditionalHeaders, server: mockServer, }); - mockServer.uiSettingsServiceFactory().get.mockReturnValue(logo); - - expect(mockServer.uiSettingsServiceFactory().get).toBeCalledWith('xpackReporting:customPdfLogo'); + expect(mockGet).toBeCalledWith('xpackReporting:customPdfLogo'); + expect(logo).toBe('purple pony'); }); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts index 0059276f6df71..7af5edab41ab7 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts @@ -5,14 +5,17 @@ */ import { UI_SETTINGS_CUSTOM_PDF_LOGO } from '../../../common/constants'; +import { ReportingCore } from '../../../server'; import { ConditionalHeaders, ServerFacade } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; // Logo is PDF only export const getCustomLogo = async ({ + reporting, server, job, conditionalHeaders, }: { + reporting: ReportingCore; server: ServerFacade; job: JobDocPayloadPDF; conditionalHeaders: ConditionalHeaders; @@ -27,19 +30,12 @@ export const getCustomLogo = async ({ getBasePath: () => job.basePath || serverBasePath, path: '/', route: { settings: {} }, - url: { - href: '/', - }, - raw: { - req: { - url: '/', - }, - }, + url: { href: '/' }, + raw: { req: { url: '/' } }, }; - const savedObjects = server.savedObjects; - const savedObjectsClient = savedObjects.getScopedSavedObjectsClient(fakeRequest); - const uiSettings = server.uiSettingsServiceFactory({ savedObjectsClient }); + const savedObjectsClient = await reporting.getSavedObjectsClient(fakeRequest); + const uiSettings = await reporting.getUiSettingsServiceFactory(savedObjectsClient); const logo: string = await uiSettings.get(UI_SETTINGS_CUSTOM_PDF_LOGO); return { conditionalHeaders, logo }; }; diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts index 9b2a065427f70..27e772195f726 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createMockServer } from '../../../test_helpers/create_mock_server'; +import { createMockServer } from '../../../test_helpers'; import { ServerFacade } from '../../../types'; import { JobDocPayloadPNG } from '../../png/types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/index.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/index.ts index 9fd3ee391ddbb..62b5e29e88ecf 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/index.ts @@ -5,19 +5,19 @@ */ import * as Rx from 'rxjs'; -import { first, concatMap, take, toArray, mergeMap } from 'rxjs/operators'; -import { ServerFacade, CaptureConfig, HeadlessChromiumDriverFactory } from '../../../../types'; -import { ScreenshotResults, ScreenshotObservableOpts } from './types'; -import { injectCustomCss } from './inject_css'; -import { openUrl } from './open_url'; -import { waitForRenderComplete } from './wait_for_render'; -import { getNumberOfItems } from './get_number_of_items'; -import { waitForElementsToBeInDOM } from './wait_for_dom_elements'; -import { getTimeRange } from './get_time_range'; +import { concatMap, first, mergeMap, take, toArray } from 'rxjs/operators'; +import { CaptureConfig, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; import { getElementPositionAndAttributes } from './get_element_position_data'; +import { getNumberOfItems } from './get_number_of_items'; import { getScreenshots } from './get_screenshots'; +import { getTimeRange } from './get_time_range'; +import { injectCustomCss } from './inject_css'; +import { openUrl } from './open_url'; import { scanPage } from './scan_page'; import { skipTelemetry } from './skip_telemetry'; +import { ScreenshotObservableOpts, ScreenshotResults } from './types'; +import { waitForElementsToBeInDOM } from './wait_for_dom_elements'; +import { waitForRenderComplete } from './wait_for_render'; export function screenshotsObservableFactory( server: ServerFacade, diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts index 063ac7f77704c..7ea67277015ab 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts @@ -4,19 +4,20 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ReportingCore } from '../../../server'; import { cryptoFactory } from '../../../server/lib/crypto'; import { - CreateJobFactory, ConditionalHeaders, - ServerFacade, - RequestFacade, + CreateJobFactory, ESQueueCreateJobFn, + RequestFacade, + ServerFacade, } from '../../../types'; import { JobParamsDiscoverCsv } from '../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(server: ServerFacade) { +>> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { const crypto = cryptoFactory(server); return async function createJob( diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js index b21d628332027..f12916b734dbf 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js @@ -9,6 +9,7 @@ import sinon from 'sinon'; import nodeCrypto from '@elastic/node-crypto'; import { CancellationToken } from '../../../common/cancellation_token'; import { fieldFormats } from '../../../../../../../src/plugins/data/server'; +import { createMockReportingCore } from '../../../test_helpers'; import { LevelLogger } from '../../../server/lib/level_logger'; import { executeJobFactory } from './execute_job'; import { setFieldFormats } from '../../../server/services'; @@ -36,16 +37,19 @@ describe('CSV Execute Job', function() { let encryptedHeaders; let cancellationToken; + let mockReportingPlugin; let mockServer; let clusterStub; let callAsCurrentUserStub; - let uiSettingsGetStub; const mockElasticsearch = { dataClient: { asScoped: () => clusterStub, }, }; + const mockUiSettingsClient = { + get: sinon.stub(), + }; beforeAll(async function() { const crypto = nodeCrypto({ encryptionKey }); @@ -53,6 +57,8 @@ describe('CSV Execute Job', function() { }); beforeEach(async function() { + mockReportingPlugin = await createMockReportingCore(); + mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; cancellationToken = new CancellationToken(); defaultElasticsearchResponse = { @@ -70,9 +76,8 @@ describe('CSV Execute Job', function() { .resolves(defaultElasticsearchResponse); const configGetStub = sinon.stub(); - uiSettingsGetStub = sinon.stub(); - uiSettingsGetStub.withArgs('csv:separator').returns(','); - uiSettingsGetStub.withArgs('csv:quoteValues').returns(true); + mockUiSettingsClient.get.withArgs('csv:separator').returns(','); + mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); setFieldFormats({ fieldFormatServiceFactory: function() { @@ -90,26 +95,11 @@ describe('CSV Execute Job', function() { }); mockServer = { - expose: function() {}, - plugins: { - elasticsearch: { - getCluster: function() { - return clusterStub; - }, - }, - }, config: function() { return { get: configGetStub, }; }, - savedObjects: { - getScopedSavedObjectsClient: sinon.stub(), - }, - uiSettingsServiceFactory: sinon.stub().returns({ - get: uiSettingsGetStub, - }), - log: function() {}, }; mockServer .config() @@ -125,83 +115,14 @@ describe('CSV Execute Job', function() { .returns({}); }); - describe('calls getScopedSavedObjectsClient with request', function() { - it('containing decrypted headers', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); - await executeJob( - 'job456', - { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, - cancellationToken - ); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).toBe(true); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.firstCall.args[0].headers).toEqual( - headers - ); - }); - - it(`containing getBasePath() returning server's basePath if the job doesn't have one`, async function() { - const serverBasePath = '/foo-server/basePath/'; - mockServer - .config() - .get.withArgs('server.basePath') - .returns(serverBasePath); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); - await executeJob( - 'job456', - { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, - cancellationToken - ); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).toBe(true); - expect( - mockServer.savedObjects.getScopedSavedObjectsClient.firstCall.args[0].getBasePath() - ).toEqual(serverBasePath); - }); - - it(`containing getBasePath() returning job's basePath if the job has one`, async function() { - const serverBasePath = '/foo-server/basePath/'; - mockServer - .config() - .get.withArgs('server.basePath') - .returns(serverBasePath); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); - const jobBasePath = 'foo-job/basePath/'; - await executeJob( - 'job789', - { - headers: encryptedHeaders, - fields: [], - searchRequest: { index: null, body: null }, - basePath: jobBasePath, - }, - cancellationToken - ); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).toBe(true); - expect( - mockServer.savedObjects.getScopedSavedObjectsClient.firstCall.args[0].getBasePath() - ).toEqual(jobBasePath); - }); - }); - - describe('uiSettings', function() { - it('passed scoped SavedObjectsClient to uiSettingsServiceFactory', async function() { - const returnValue = Symbol(); - mockServer.savedObjects.getScopedSavedObjectsClient.returns(returnValue); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); - await executeJob( - 'job456', - { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, - cancellationToken - ); - expect(mockServer.uiSettingsServiceFactory.calledOnce).toBe(true); - expect(mockServer.uiSettingsServiceFactory.firstCall.args[0].savedObjectsClient).toBe( - returnValue - ); - }); - }); - describe('basic Elasticsearch call behavior', function() { it('should decrypt encrypted headers and pass to callAsCurrentUser', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -217,7 +138,12 @@ describe('CSV Execute Job', function() { testBody: true, }; - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const job = { headers: encryptedHeaders, fields: [], @@ -244,7 +170,12 @@ describe('CSV Execute Job', function() { _scroll_id: scrollId, }); callAsCurrentUserStub.onSecondCall().resolves(defaultElasticsearchResponse); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -258,7 +189,12 @@ describe('CSV Execute Job', function() { }); it('should not execute scroll if there are no hits from the search', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -288,7 +224,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -323,7 +264,12 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -351,7 +297,12 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -381,7 +332,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -409,7 +365,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['=SUM(A1:A2)', 'two'], @@ -437,7 +398,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -465,7 +431,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -485,7 +456,12 @@ describe('CSV Execute Job', function() { describe('Elasticsearch call errors', function() { it('should reject Promise if search call errors out', async function() { callAsCurrentUserStub.rejects(new Error()); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -504,7 +480,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); callAsCurrentUserStub.onSecondCall().rejects(new Error()); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -525,7 +506,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -546,7 +532,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -574,7 +565,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -602,7 +598,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -638,7 +639,12 @@ describe('CSV Execute Job', function() { }); it('should stop calling Elasticsearch when cancellationToken.cancel is called', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -653,7 +659,12 @@ describe('CSV Execute Job', function() { }); it(`shouldn't call clearScroll if it never got a scrollId`, async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -667,7 +678,12 @@ describe('CSV Execute Job', function() { }); it('should call clearScroll if it got a scrollId', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -685,7 +701,12 @@ describe('CSV Execute Job', function() { describe('csv content', function() { it('should write column headers to output, even if there are no results', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -696,8 +717,13 @@ describe('CSV Execute Job', function() { }); it('should use custom uiSettings csv:separator for header', async function() { - uiSettingsGetStub.withArgs('csv:separator').returns(';'); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + mockUiSettingsClient.get.withArgs('csv:separator').returns(';'); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -708,8 +734,13 @@ describe('CSV Execute Job', function() { }); it('should escape column headers if uiSettings csv:quoteValues is true', async function() { - uiSettingsGetStub.withArgs('csv:quoteValues').returns(true); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -720,8 +751,13 @@ describe('CSV Execute Job', function() { }); it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function() { - uiSettingsGetStub.withArgs('csv:quoteValues').returns(false); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(false); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -732,7 +768,12 @@ describe('CSV Execute Job', function() { }); it('should write column headers to output, when there are results', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ one: '1', two: '2' }], @@ -752,7 +793,12 @@ describe('CSV Execute Job', function() { }); it('should use comma separated values of non-nested fields from _source', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -773,7 +819,12 @@ describe('CSV Execute Job', function() { }); it('should concatenate the hits from multiple responses', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -801,7 +852,12 @@ describe('CSV Execute Job', function() { }); it('should use field formatters to format fields', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -846,7 +902,12 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.maxSizeBytes') .returns(1); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -879,7 +940,12 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.maxSizeBytes') .returns(9); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -919,7 +985,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -948,6 +1019,7 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { + mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; mockServer .config() .get.withArgs('xpack.reporting.csv.maxSizeBytes') @@ -960,7 +1032,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1000,7 +1077,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1029,7 +1111,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1058,7 +1145,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts index 9f94a755cf655..1579985891053 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts @@ -4,20 +4,26 @@ * you may not use this file except in compliance with the Elastic License. */ -import Hapi from 'hapi'; import { i18n } from '@kbn/i18n'; -import { ElasticsearchServiceSetup, KibanaRequest } from '../../../../../../../src/core/server'; +import Hapi from 'hapi'; +import { + ElasticsearchServiceSetup, + IUiSettingsClient, + KibanaRequest, +} from '../../../../../../../src/core/server'; import { CSV_JOB_TYPE } from '../../../common/constants'; +import { ReportingCore } from '../../../server'; import { cryptoFactory } from '../../../server/lib'; +import { getFieldFormats } from '../../../server/services'; import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger, ServerFacade } from '../../../types'; import { JobDocPayloadDiscoverCsv } from '../types'; import { fieldFormatMapFactory } from './lib/field_format_map'; import { createGenerateCsv } from './lib/generate_csv'; -import { getFieldFormats } from '../../../server/services'; export const executeJobFactory: ExecuteJobFactory> = function executeJobFactoryFn( +>> = async function executeJobFactoryFn( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, parentLogger: Logger @@ -40,83 +46,78 @@ export const executeJobFactory: ExecuteJobFactory { + let decryptedHeaders; + try { + decryptedHeaders = await crypto.decrypt(headers); + } catch (err) { + logger.error(err); + throw new Error( + i18n.translate( + 'xpack.reporting.exportTypes.csv.executeJob.failedToDecryptReportJobDataErrorMessage', + { + defaultMessage: 'Failed to decrypt report job data. Please ensure that {encryptionKey} is set and re-generate this report. {err}', + values: { encryptionKey: 'xpack.reporting.encryptionKey', err: err.toString() }, + } + ) + ); // prettier-ignore + } + return decryptedHeaders; + }; - const fakeRequest = { - headers: decryptedHeaders, + const fakeRequest = KibanaRequest.from({ + headers: await decryptHeaders(), // This is used by the spaces SavedObjectClientWrapper to determine the existing space. // We use the basePath from the saved job, which we'll have post spaces being implemented; // or we use the server base path, which uses the default space getBasePath: () => basePath || serverBasePath, path: '/', route: { settings: {} }, - url: { - href: '/', - }, - raw: { - req: { - url: '/', - }, - }, - }; + url: { href: '/' }, + raw: { req: { url: '/' } }, + } as Hapi.Request); + + const { callAsCurrentUser } = elasticsearch.dataClient.asScoped(fakeRequest); + const callEndpoint = (endpoint: string, clientParams = {}, options = {}) => + callAsCurrentUser(endpoint, clientParams, options); - const { callAsCurrentUser } = elasticsearch.dataClient.asScoped( - KibanaRequest.from(fakeRequest as Hapi.Request) - ); - const callEndpoint = (endpoint: string, clientParams = {}, options = {}) => { - return callAsCurrentUser(endpoint, clientParams, options); + const savedObjectsClient = await reporting.getSavedObjectsClient(fakeRequest); + const uiSettingsClient = await reporting.getUiSettingsServiceFactory(savedObjectsClient); + + const getFormatsMap = async (client: IUiSettingsClient) => { + const fieldFormats = await getFieldFormats().fieldFormatServiceFactory(client); + return fieldFormatMapFactory(indexPatternSavedObject, fieldFormats); }; - const savedObjects = server.savedObjects; - const savedObjectsClient = savedObjects.getScopedSavedObjectsClient( - (fakeRequest as unknown) as KibanaRequest - ); - const uiConfig = server.uiSettingsServiceFactory({ - savedObjectsClient, - }); + const getUiSettings = async (client: IUiSettingsClient) => { + const [separator, quoteValues, timezone] = await Promise.all([ + client.get('csv:separator'), + client.get('csv:quoteValues'), + client.get('dateFormat:tz'), + ]); - const [formatsMap, uiSettings] = await Promise.all([ - (async () => { - const fieldFormats = await getFieldFormats().fieldFormatServiceFactory(uiConfig); - return fieldFormatMapFactory(indexPatternSavedObject, fieldFormats); - })(), - (async () => { - const [separator, quoteValues, timezone] = await Promise.all([ - uiConfig.get('csv:separator'), - uiConfig.get('csv:quoteValues'), - uiConfig.get('dateFormat:tz'), - ]); + if (timezone === 'Browser') { + logger.warn( + i18n.translate('xpack.reporting.exportTypes.csv.executeJob.dateFormateSetting', { + defaultMessage: 'Kibana Advanced Setting "{dateFormatTimezone}" is set to "Browser". Dates will be formatted as UTC to avoid ambiguity.', + values: { dateFormatTimezone: 'dateFormat:tz' } + }) + ); // prettier-ignore + } - if (timezone === 'Browser') { - jobLogger.warn( - `Kibana Advanced Setting "dateFormat:tz" is set to "Browser". Dates will be formatted as UTC to avoid ambiguity.` - ); - } + return { + separator, + quoteValues, + timezone, + }; + }; - return { - separator, - quoteValues, - timezone, - }; - })(), + const [formatsMap, uiSettings] = await Promise.all([ + getFormatsMap(uiSettingsClient), + getUiSettings(uiSettingsClient), ]); const generateCsv = createGenerateCsv(jobLogger); diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/lib/field_format_map.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/lib/field_format_map.ts index e1459e195d9f6..dac963635c469 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/lib/field_format_map.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/lib/field_format_map.ts @@ -9,15 +9,7 @@ import { FieldFormatConfig, IFieldFormatsRegistry, } from '../../../../../../../../src/plugins/data/server'; - -interface IndexPatternSavedObject { - attributes: { - fieldFormatMap: string; - }; - id: string; - type: string; - version: string; -} +import { IndexPatternSavedObject } from '../../../../types'; /** * Create a map of FieldFormat instances for index pattern fields diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts index ddef2aa0a6268..17072d311b35f 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts @@ -5,9 +5,10 @@ */ import { notFound, notImplemented } from 'boom'; -import { get } from 'lodash'; import { ElasticsearchServiceSetup } from 'kibana/server'; +import { get } from 'lodash'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants'; +import { ReportingCore } from '../../../../server'; import { cryptoFactory } from '../../../../server/lib'; import { CreateJobFactory, @@ -37,6 +38,7 @@ interface VisData { export const createJobFactory: CreateJobFactory> = function createJobFactoryFn( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, parentLogger: Logger diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts index b1b7b7d818200..6bb3e73fcfe84 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts @@ -7,6 +7,7 @@ import { i18n } from '@kbn/i18n'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; +import { ReportingCore } from '../../../server'; import { cryptoFactory } from '../../../server/lib'; import { ExecuteJobFactory, @@ -22,13 +23,15 @@ import { createGenerateCsv } from './lib'; export const executeJobFactory: ExecuteJobFactory> = function executeJobFactoryFn( +>> = async function executeJobFactoryFn( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, parentLogger: Logger ) { const crypto = cryptoFactory(server); const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']); + const generateCsv = createGenerateCsv(reporting, server, elasticsearch, parentLogger); return async function executeJob( jobId: string | null, @@ -86,11 +89,8 @@ export const executeJobFactory: ExecuteJobFactory { +const getEsQueryConfig = async (config: IUiSettingsClient) => { const configs = await Promise.all([ config.get('query:allowLeadingWildcards'), config.get('query:queryString:options'), @@ -49,7 +53,7 @@ const getEsQueryConfig = async (config: any) => { } as EsQueryConfig; }; -const getUiSettings = async (config: any) => { +const getUiSettings = async (config: IUiSettingsClient) => { const configs = await Promise.all([config.get('csv:separator'), config.get('csv:quoteValues')]); const [separator, quoteValues] = configs; return { separator, quoteValues }; @@ -57,14 +61,14 @@ const getUiSettings = async (config: any) => { export async function generateCsvSearch( req: RequestFacade, + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, logger: Logger, searchPanel: SearchPanel, jobParams: JobParamsDiscoverCsv ): Promise { - const { savedObjects, uiSettingsServiceFactory } = server; - const savedObjectsClient = savedObjects.getScopedSavedObjectsClient( + const savedObjectsClient = await reporting.getSavedObjectsClient( KibanaRequest.from(req.getRawRequest()) ); const { indexPatternSavedObjectId, timerange } = searchPanel; @@ -73,7 +77,8 @@ export async function generateCsvSearch( savedObjectsClient, indexPatternSavedObjectId ); - const uiConfig = uiSettingsServiceFactory({ savedObjectsClient }); + + const uiConfig = await reporting.getUiSettingsServiceFactory(savedObjectsClient); const esQueryConfig = await getEsQueryConfig(uiConfig); const { diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts index 3f03246106d3e..a6911e1f14704 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts @@ -4,20 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ +import { validateUrls } from '../../../../common/validate_urls'; +import { ReportingCore } from '../../../../server'; +import { cryptoFactory } from '../../../../server/lib/crypto'; import { + ConditionalHeaders, CreateJobFactory, - ServerFacade, - RequestFacade, ESQueueCreateJobFn, - ConditionalHeaders, + RequestFacade, + ServerFacade, } from '../../../../types'; -import { validateUrls } from '../../../../common/validate_urls'; -import { cryptoFactory } from '../../../../server/lib/crypto'; import { JobParamsPNG } from '../../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(server: ServerFacade) { +>> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { const crypto = cryptoFactory(server); return async function createJob( diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js index bb33ef9c19a1d..c0c21119e1d53 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js @@ -6,6 +6,7 @@ import * as Rx from 'rxjs'; import { memoize } from 'lodash'; +import { createMockReportingCore } from '../../../../test_helpers'; import { cryptoFactory } from '../../../../server/lib/crypto'; import { executeJobFactory } from './index'; import { generatePngObservableFactory } from '../lib/generate_png'; @@ -19,7 +20,11 @@ const cancellationToken = { let config; let mockServer; -beforeEach(() => { +let mockReporting; + +beforeEach(async () => { + mockReporting = await createMockReportingCore(); + config = { 'xpack.reporting.encryptionKey': 'testencryptionkey', 'server.basePath': '/sbp', @@ -27,18 +32,11 @@ beforeEach(() => { 'server.port': 5601, }; mockServer = { - expose: () => {}, // NOTE: this is for oncePerServer config: memoize(() => ({ get: jest.fn() })), info: { protocol: 'http', }, - savedObjects: { - getScopedSavedObjectsClient: jest.fn(), - }, - uiSettingsServiceFactory: jest.fn().mockReturnValue({ get: jest.fn() }), - log: jest.fn(), }; - mockServer.config().get.mockImplementation(key => { return config[key]; }); @@ -67,9 +65,12 @@ test(`passes browserTimezone to generatePng`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(''))); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger() + ); const browserTimezone = 'UTC'; await executeJob( 'pngJobId', @@ -87,9 +88,15 @@ test(`passes browserTimezone to generatePng`, async () => { }); test(`returns content_type of application/png`, async () => { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger(), + { + browserDriverFactory: {}, + } + ); const encryptedHeaders = await encryptHeaders({}); const generatePngObservable = generatePngObservableFactory(); @@ -109,9 +116,15 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(testContent))); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger(), + { + browserDriverFactory: {}, + } + ); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pngJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts index c9f370197da66..5cde245080914 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts @@ -7,14 +7,9 @@ import * as Rx from 'rxjs'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; +import { ReportingCore } from '../../../../server'; import { PNG_JOB_TYPE } from '../../../../common/constants'; -import { - ServerFacade, - ExecuteJobFactory, - ESQueueWorkerExecuteFn, - HeadlessChromiumDriverFactory, - Logger, -} from '../../../../types'; +import { ServerFacade, ExecuteJobFactory, ESQueueWorkerExecuteFn, Logger } from '../../../../types'; import { decryptJobHeaders, omitBlacklistedHeaders, @@ -26,12 +21,13 @@ import { generatePngObservableFactory } from '../lib/generate_png'; type QueuedPngExecutorFactory = ExecuteJobFactory>; -export const executeJobFactory: QueuedPngExecutorFactory = function executeJobFactoryFn( +export const executeJobFactory: QueuedPngExecutorFactory = async function executeJobFactoryFn( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger, - { browserDriverFactory }: { browserDriverFactory: HeadlessChromiumDriverFactory } + parentLogger: Logger ) { + const browserDriverFactory = await reporting.getBrowserDriverFactory(); const generatePngObservable = generatePngObservableFactory(server, browserDriverFactory); const logger = parentLogger.clone([PNG_JOB_TYPE, 'execute']); diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts index a8cc71175cffe..656c99991e1f6 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts @@ -4,20 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ +import { validateUrls } from '../../../../common/validate_urls'; +import { ReportingCore } from '../../../../server'; +import { cryptoFactory } from '../../../../server/lib/crypto'; import { + ConditionalHeaders, CreateJobFactory, ESQueueCreateJobFn, - ServerFacade, RequestFacade, - ConditionalHeaders, + ServerFacade, } from '../../../../types'; -import { validateUrls } from '../../../../common/validate_urls'; -import { cryptoFactory } from '../../../../server/lib/crypto'; import { JobParamsPDF } from '../../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(server: ServerFacade) { +>> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { const crypto = cryptoFactory(server); return async function createJobFn( diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js index c21d39f4922cb..cc6b298bebdc5 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js @@ -6,6 +6,7 @@ import * as Rx from 'rxjs'; import { memoize } from 'lodash'; +import { createMockReportingCore } from '../../../../test_helpers'; import { cryptoFactory } from '../../../../server/lib/crypto'; import { executeJobFactory } from './index'; import { generatePdfObservableFactory } from '../lib/generate_pdf'; @@ -19,7 +20,11 @@ const cancellationToken = { let config; let mockServer; -beforeEach(() => { +let mockReporting; + +beforeEach(async () => { + mockReporting = await createMockReportingCore(); + config = { 'xpack.reporting.encryptionKey': 'testencryptionkey', 'server.basePath': '/sbp', @@ -27,18 +32,11 @@ beforeEach(() => { 'server.port': 5601, }; mockServer = { - expose: jest.fn(), - log: jest.fn(), config: memoize(() => ({ get: jest.fn() })), info: { protocol: 'http', }, - savedObjects: { - getScopedSavedObjectsClient: jest.fn(), - }, - uiSettingsServiceFactory: jest.fn().mockReturnValue({ get: jest.fn() }), }; - mockServer.config().get.mockImplementation(key => { return config[key]; }); @@ -60,38 +58,13 @@ const encryptHeaders = async headers => { return await crypto.encrypt(headers); }; -test(`passes browserTimezone to generatePdf`, async () => { - const encryptedHeaders = await encryptHeaders({}); - - const generatePdfObservable = generatePdfObservableFactory(); - generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(''))); - - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); - const browserTimezone = 'UTC'; - await executeJob( - 'pdfJobId', - { relativeUrls: [], browserTimezone, headers: encryptedHeaders }, - cancellationToken - ); - - expect(mockServer.uiSettingsServiceFactory().get).toBeCalledWith('xpackReporting:customPdfLogo'); - expect(generatePdfObservable).toBeCalledWith( - expect.any(LevelLogger), - undefined, - [], - browserTimezone, - expect.anything(), - undefined, - undefined - ); -}); - test(`returns content_type of application/pdf`, async () => { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger() + ); const encryptedHeaders = await encryptHeaders({}); const generatePdfObservable = generatePdfObservableFactory(); @@ -111,9 +84,12 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => { const generatePdfObservable = generatePdfObservableFactory(); generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(testContent))); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger() + ); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pdfJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts index 162376e31216e..e8461862bee82 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts @@ -7,13 +7,8 @@ import * as Rx from 'rxjs'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; -import { - ServerFacade, - ExecuteJobFactory, - ESQueueWorkerExecuteFn, - HeadlessChromiumDriverFactory, - Logger, -} from '../../../../types'; +import { ReportingCore } from '../../../../server'; +import { ServerFacade, ExecuteJobFactory, ESQueueWorkerExecuteFn, Logger } from '../../../../types'; import { JobDocPayloadPDF } from '../../types'; import { PDF_JOB_TYPE } from '../../../../common/constants'; import { generatePdfObservableFactory } from '../lib/generate_pdf'; @@ -27,12 +22,13 @@ import { type QueuedPdfExecutorFactory = ExecuteJobFactory>; -export const executeJobFactory: QueuedPdfExecutorFactory = function executeJobFactoryFn( +export const executeJobFactory: QueuedPdfExecutorFactory = async function executeJobFactoryFn( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger, - { browserDriverFactory }: { browserDriverFactory: HeadlessChromiumDriverFactory } + parentLogger: Logger ) { + const browserDriverFactory = await reporting.getBrowserDriverFactory(); const generatePdfObservable = generatePdfObservableFactory(server, browserDriverFactory); const logger = parentLogger.clone([PDF_JOB_TYPE, 'execute']); @@ -43,7 +39,7 @@ export const executeJobFactory: QueuedPdfExecutorFactory = function executeJobFa mergeMap(() => decryptJobHeaders({ server, job, logger })), map(decryptedHeaders => omitBlacklistedHeaders({ job, decryptedHeaders })), map(filteredHeaders => getConditionalHeaders({ server, job, filteredHeaders })), - mergeMap(conditionalHeaders => getCustomLogo({ server, job, conditionalHeaders })), + mergeMap(conditionalHeaders => getCustomLogo({ reporting, server, job, conditionalHeaders })), mergeMap(({ logo, conditionalHeaders }) => { const urls = getFullUrls({ server, job }); diff --git a/x-pack/legacy/plugins/reporting/index.ts b/x-pack/legacy/plugins/reporting/index.ts index cbafc4b1ecc4b..9ce4e807f8ef8 100644 --- a/x-pack/legacy/plugins/reporting/index.ts +++ b/x-pack/legacy/plugins/reporting/index.ts @@ -10,7 +10,7 @@ import { resolve } from 'path'; import { PLUGIN_ID, UI_SETTINGS_CUSTOM_PDF_LOGO } from './common/constants'; import { config as reportingConfig } from './config'; import { legacyInit } from './server/legacy'; -import { ReportingConfigOptions, ReportingPluginSpecOptions } from './types.d'; +import { ReportingConfigOptions, ReportingPluginSpecOptions } from './types'; const kbToBase64Length = (kb: number) => { return Math.floor((kb * 1024 * 8) / 6); diff --git a/x-pack/legacy/plugins/reporting/reporting.d.ts b/x-pack/legacy/plugins/reporting/reporting.d.ts index d4a7943f6d067..ec65c15f53864 100644 --- a/x-pack/legacy/plugins/reporting/reporting.d.ts +++ b/x-pack/legacy/plugins/reporting/reporting.d.ts @@ -4,4 +4,4 @@ * you may not use this file except in compliance with the Elastic License. */ -export { ReportingPlugin } from './types'; +export { ReportingPlugin } from './server/plugin'; diff --git a/x-pack/legacy/plugins/reporting/server/core.ts b/x-pack/legacy/plugins/reporting/server/core.ts new file mode 100644 index 0000000000000..4506d41e4f5c3 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/core.ts @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as Rx from 'rxjs'; +import { first, mapTo } from 'rxjs/operators'; +import { + IUiSettingsClient, + KibanaRequest, + SavedObjectsClient, + SavedObjectsServiceStart, + UiSettingsServiceStart, +} from 'src/core/server'; +// @ts-ignore no module definition +import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; +import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; +import { PLUGIN_ID } from '../common/constants'; +import { EnqueueJobFn, ESQueueInstance, ReportingPluginSpecOptions, ServerFacade } from '../types'; +import { HeadlessChromiumDriverFactory } from './browsers/chromium/driver_factory'; +import { checkLicenseFactory, getExportTypesRegistry, LevelLogger } from './lib'; +import { registerRoutes } from './routes'; +import { ReportingSetupDeps } from './types'; + +interface ReportingInternalSetup { + browserDriverFactory: HeadlessChromiumDriverFactory; +} +interface ReportingInternalStart { + savedObjects: SavedObjectsServiceStart; + uiSettings: UiSettingsServiceStart; + esqueue: ESQueueInstance; + enqueueJob: EnqueueJobFn; +} + +export class ReportingCore { + private pluginSetupDeps?: ReportingInternalSetup; + private pluginStartDeps?: ReportingInternalStart; + private readonly pluginSetup$ = new Rx.ReplaySubject(); + private readonly pluginStart$ = new Rx.ReplaySubject(); + private exportTypesRegistry = getExportTypesRegistry(); + + constructor(private logger: LevelLogger) {} + + legacySetup( + xpackMainPlugin: XPackMainPlugin, + reporting: ReportingPluginSpecOptions, + __LEGACY: ServerFacade, + plugins: ReportingSetupDeps + ) { + mirrorPluginStatus(xpackMainPlugin, reporting); + const checkLicense = checkLicenseFactory(this.exportTypesRegistry); + (xpackMainPlugin as any).status.once('green', () => { + // Register a function that is called whenever the xpack info changes, + // to re-compute the license check results for this plugin + xpackMainPlugin.info.feature(PLUGIN_ID).registerLicenseCheckResultsGenerator(checkLicense); + }); + // Reporting routes + registerRoutes(this, __LEGACY, plugins, this.logger); + } + + public pluginSetup(reportingSetupDeps: ReportingInternalSetup) { + this.pluginSetup$.next(reportingSetupDeps); + } + + public pluginStart(reportingStartDeps: ReportingInternalStart) { + this.pluginStart$.next(reportingStartDeps); + } + + public pluginHasStarted(): Promise { + return this.pluginStart$.pipe(first(), mapTo(true)).toPromise(); + } + + /* + * Internal module dependencies + */ + public getExportTypesRegistry() { + return this.exportTypesRegistry; + } + + public async getEsqueue(): Promise { + return (await this.getPluginStartDeps()).esqueue; + } + + public async getEnqueueJob(): Promise { + return (await this.getPluginStartDeps()).enqueueJob; + } + + public async getBrowserDriverFactory(): Promise { + return (await this.getPluginSetupDeps()).browserDriverFactory; + } + + /* + * Kibana core module dependencies + */ + private async getPluginSetupDeps() { + if (this.pluginSetupDeps) { + return this.pluginSetupDeps; + } + return await this.pluginSetup$.pipe(first()).toPromise(); + } + + private async getPluginStartDeps() { + if (this.pluginStartDeps) { + return this.pluginStartDeps; + } + return await this.pluginStart$.pipe(first()).toPromise(); + } + + public async getSavedObjectsClient(fakeRequest: KibanaRequest): Promise { + const { savedObjects } = await this.getPluginStartDeps(); + return savedObjects.getScopedClient(fakeRequest) as SavedObjectsClient; + } + + public async getUiSettingsServiceFactory( + savedObjectsClient: SavedObjectsClient + ): Promise { + const { uiSettings: uiSettingsService } = await this.getPluginStartDeps(); + const scopedUiSettingsService = uiSettingsService.asScopedToClient(savedObjectsClient); + return scopedUiSettingsService; + } +} diff --git a/x-pack/legacy/plugins/reporting/server/index.ts b/x-pack/legacy/plugins/reporting/server/index.ts index 438a3fd595a10..24e2a954415d9 100644 --- a/x-pack/legacy/plugins/reporting/server/index.ts +++ b/x-pack/legacy/plugins/reporting/server/index.ts @@ -10,3 +10,6 @@ import { ReportingPlugin as Plugin } from './plugin'; export const plugin = (context: PluginInitializerContext) => { return new Plugin(context); }; + +export { ReportingCore } from './core'; +export { ReportingPlugin } from './plugin'; diff --git a/x-pack/legacy/plugins/reporting/server/legacy.ts b/x-pack/legacy/plugins/reporting/server/legacy.ts index c80aef06cf270..336ff5f4d2ee7 100644 --- a/x-pack/legacy/plugins/reporting/server/legacy.ts +++ b/x-pack/legacy/plugins/reporting/server/legacy.ts @@ -8,7 +8,7 @@ import { PluginInitializerContext } from 'src/core/server'; import { SecurityPluginSetup } from '../../../../plugins/security/server'; import { ReportingPluginSpecOptions } from '../types'; import { plugin } from './index'; -import { LegacySetup, ReportingStartDeps } from './plugin'; +import { LegacySetup, ReportingStartDeps } from './types'; const buildLegacyDependencies = ( server: Legacy.Server, @@ -22,8 +22,6 @@ const buildLegacyDependencies = ( xpack_main: server.plugins.xpack_main, reporting: reportingPlugin, }, - savedObjects: server.savedObjects, - uiSettingsServiceFactory: server.uiSettingsServiceFactory, }); export const legacyInit = async ( @@ -33,17 +31,20 @@ export const legacyInit = async ( const coreSetup = server.newPlatform.setup.core; const pluginInstance = plugin(server.newPlatform.coreContext as PluginInitializerContext); + const __LEGACY = buildLegacyDependencies(server, reportingPlugin); await pluginInstance.setup(coreSetup, { elasticsearch: coreSetup.elasticsearch, security: server.newPlatform.setup.plugins.security as SecurityPluginSetup, usageCollection: server.newPlatform.setup.plugins.usageCollection, - __LEGACY: buildLegacyDependencies(server, reportingPlugin), + __LEGACY, }); // Schedule to call the "start" hook only after start dependencies are ready coreSetup.getStartServices().then(([core, plugins]) => pluginInstance.start(core, { + elasticsearch: coreSetup.elasticsearch, data: (plugins as ReportingStartDeps).data, + __LEGACY, }) ); }; diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts index c4e32b3ebcd99..d593e4625cdf4 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts @@ -5,29 +5,19 @@ */ import { ElasticsearchServiceSetup } from 'kibana/server'; -import { - ServerFacade, - ExportTypesRegistry, - HeadlessChromiumDriverFactory, - QueueConfig, - Logger, -} from '../../types'; +import { ESQueueInstance, ServerFacade, QueueConfig, Logger } from '../../types'; +import { ReportingCore } from '../core'; // @ts-ignore import { Esqueue } from './esqueue'; import { createWorkerFactory } from './create_worker'; import { createTaggedLogger } from './create_tagged_logger'; // TODO remove createTaggedLogger once esqueue is removed -interface CreateQueueFactoryOpts { - exportTypesRegistry: ExportTypesRegistry; - browserDriverFactory: HeadlessChromiumDriverFactory; -} - -export function createQueueFactory( +export async function createQueueFactory( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - logger: Logger, - { exportTypesRegistry, browserDriverFactory }: CreateQueueFactoryOpts -): Esqueue { + logger: Logger +): Promise { const queueConfig: QueueConfig = server.config().get('xpack.reporting.queue'); const index = server.config().get('xpack.reporting.index'); @@ -39,15 +29,12 @@ export function createQueueFactory( logger: createTaggedLogger(logger, ['esqueue', 'queue-worker']), }; - const queue: Esqueue = new Esqueue(index, queueOptions); + const queue: ESQueueInstance = new Esqueue(index, queueOptions); if (queueConfig.pollEnabled) { // create workers to poll the index for idle jobs waiting to be claimed and executed - const createWorker = createWorkerFactory(server, elasticsearch, logger, { - exportTypesRegistry, - browserDriverFactory, - }); - createWorker(queue); + const createWorker = createWorkerFactory(reporting, server, elasticsearch, logger); + await createWorker(queue); } else { logger.info( 'xpack.reporting.queue.pollEnabled is set to false. This Kibana instance ' + diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts index f5c42e5505cd1..d4d913243e18d 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts @@ -4,9 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import * as sinon from 'sinon'; import { ElasticsearchServiceSetup } from 'kibana/server'; -import { HeadlessChromiumDriverFactory, ServerFacade } from '../../types'; +import * as sinon from 'sinon'; +import { ReportingCore } from '../../server'; +import { createMockReportingCore } from '../../test_helpers'; +import { ServerFacade } from '../../types'; import { createWorkerFactory } from './create_worker'; // @ts-ignore import { Esqueue } from './esqueue'; @@ -33,34 +35,34 @@ const getMockLogger = jest.fn(); const getMockExportTypesRegistry = ( exportTypes: any[] = [{ executeJobFactory: executeJobFactoryStub }] -) => ({ - getAll: () => exportTypes, -}); +) => + ({ + getAll: () => exportTypes, + } as ExportTypesRegistry); describe('Create Worker', () => { let queue: Esqueue; let client: ClientMock; + let mockReporting: ReportingCore; - beforeEach(() => { + beforeEach(async () => { + mockReporting = await createMockReportingCore(); client = new ClientMock(); queue = new Esqueue('reporting-queue', { client }); executeJobFactoryStub.reset(); }); test('Creates a single Esqueue worker for Reporting', async () => { - const exportTypesRegistry = getMockExportTypesRegistry(); + mockReporting.getExportTypesRegistry = () => getMockExportTypesRegistry(); const createWorker = createWorkerFactory( + mockReporting, getMockServer(), {} as ElasticsearchServiceSetup, - getMockLogger(), - { - exportTypesRegistry: exportTypesRegistry as ExportTypesRegistry, - browserDriverFactory: {} as HeadlessChromiumDriverFactory, - } + getMockLogger() ); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); - createWorker(queue); + await createWorker(queue); sinon.assert.callCount(executeJobFactoryStub, 1); sinon.assert.callCount(registerWorkerSpy, 1); @@ -88,18 +90,16 @@ Object { { executeJobFactory: executeJobFactoryStub }, { executeJobFactory: executeJobFactoryStub }, ]); + mockReporting.getExportTypesRegistry = () => exportTypesRegistry; const createWorker = createWorkerFactory( + mockReporting, getMockServer(), {} as ElasticsearchServiceSetup, - getMockLogger(), - { - exportTypesRegistry: exportTypesRegistry as ExportTypesRegistry, - browserDriverFactory: {} as HeadlessChromiumDriverFactory, - } + getMockLogger() ); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); - createWorker(queue); + await createWorker(queue); sinon.assert.callCount(executeJobFactoryStub, 5); sinon.assert.callCount(registerWorkerSpy, 1); diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts index 2ca638f641291..3567712367608 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts @@ -5,34 +5,29 @@ */ import { ElasticsearchServiceSetup } from 'kibana/server'; -import { PLUGIN_ID } from '../../common/constants'; -import { ExportTypesRegistry, HeadlessChromiumDriverFactory } from '../../types'; import { CancellationToken } from '../../common/cancellation_token'; +import { PLUGIN_ID } from '../../common/constants'; import { ESQueueInstance, - QueueConfig, - ExportTypeDefinition, ESQueueWorkerExecuteFn, - JobDocPayload, + ExportTypeDefinition, ImmediateExecuteFn, + JobDocPayload, JobSource, + Logger, + QueueConfig, RequestFacade, ServerFacade, - Logger, } from '../../types'; +import { ReportingCore } from '../core'; // @ts-ignore untyped dependency import { events as esqueueEvents } from './esqueue'; -interface CreateWorkerFactoryOpts { - exportTypesRegistry: ExportTypesRegistry; - browserDriverFactory: HeadlessChromiumDriverFactory; -} - export function createWorkerFactory( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - logger: Logger, - { exportTypesRegistry, browserDriverFactory }: CreateWorkerFactoryOpts + logger: Logger ) { type JobDocPayloadType = JobDocPayload; const config = server.config(); @@ -41,20 +36,23 @@ export function createWorkerFactory( const kibanaId: string = config.get('server.uuid'); // Once more document types are added, this will need to be passed in - return function createWorker(queue: ESQueueInstance) { + return async function createWorker(queue: ESQueueInstance) { // export type / execute job map const jobExecutors: Map< string, ImmediateExecuteFn | ESQueueWorkerExecuteFn > = new Map(); - for (const exportType of exportTypesRegistry.getAll() as Array< - ExportTypeDefinition + for (const exportType of reporting.getExportTypesRegistry().getAll() as Array< + ExportTypeDefinition >) { // TODO: the executeJobFn should be unwrapped in the register method of the export types registry - const jobExecutor = exportType.executeJobFactory(server, elasticsearch, logger, { - browserDriverFactory, - }); + const jobExecutor = await exportType.executeJobFactory( + reporting, + server, + elasticsearch, + logger + ); jobExecutors.set(exportType.jobType, jobExecutor); } diff --git a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts index 1da8a3795aacc..c215bdc398904 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts @@ -16,11 +16,11 @@ import { ServerFacade, RequestFacade, Logger, - ExportTypesRegistry, CaptureConfig, QueueConfig, ConditionalHeaders, } from '../../types'; +import { ReportingCore } from '../core'; interface ConfirmedJob { id: string; @@ -29,16 +29,11 @@ interface ConfirmedJob { _primary_term: number; } -interface EnqueueJobFactoryOpts { - exportTypesRegistry: ExportTypesRegistry; - esqueue: any; -} - export function enqueueJobFactory( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger, - { exportTypesRegistry, esqueue }: EnqueueJobFactoryOpts + parentLogger: Logger ): EnqueueJobFn { const logger = parentLogger.clone(['queue-job']); const config = server.config(); @@ -56,14 +51,20 @@ export function enqueueJobFactory( ): Promise { type CreateJobFn = ESQueueCreateJobFn | ImmediateCreateJobFn; - const exportType = exportTypesRegistry.getById(exportTypeId); + const esqueue = await reporting.getEsqueue(); + const exportType = reporting.getExportTypesRegistry().getById(exportTypeId); if (exportType == null) { throw new Error(`Export type ${exportTypeId} does not exist in the registry!`); } // TODO: the createJobFn should be unwrapped in the register method of the export types registry - const createJob = exportType.createJobFactory(server, elasticsearch, logger) as CreateJobFn; + const createJob = exportType.createJobFactory( + reporting, + server, + elasticsearch, + logger + ) as CreateJobFn; const payload = await createJob(jobParams, headers, request); const options = { diff --git a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts index ab02dfe0743f0..49d5c568c3981 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts @@ -7,7 +7,7 @@ import { Legacy } from 'kibana'; import { KibanaRequest } from '../../../../../../src/core/server'; import { ServerFacade } from '../../types'; -import { ReportingSetupDeps } from '../plugin'; +import { ReportingSetupDeps } from '../types'; export function getUserFactory(server: ServerFacade, security: ReportingSetupDeps['security']) { /* diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts index 028d8fa143487..0fdbd858b8e3c 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts @@ -16,8 +16,8 @@ import { validateServerHost } from './validate_server_host'; export async function runValidations( server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - logger: Logger, - browserFactory: HeadlessChromiumDriverFactory + browserFactory: HeadlessChromiumDriverFactory, + logger: Logger ) { try { await Promise.all([ @@ -32,6 +32,7 @@ export async function runValidations( }) ); } catch (err) { + logger.error(err); logger.warning( i18n.translate('xpack.reporting.selfCheck.warning', { defaultMessage: `Reporting plugin self-check generated a warning: {err}`, diff --git a/x-pack/legacy/plugins/reporting/server/plugin.ts b/x-pack/legacy/plugins/reporting/server/plugin.ts index ef7b01f8e9c15..4f24cc16b2277 100644 --- a/x-pack/legacy/plugins/reporting/server/plugin.ts +++ b/x-pack/legacy/plugins/reporting/server/plugin.ts @@ -4,97 +4,66 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Legacy } from 'kibana'; -import { - CoreSetup, - CoreStart, - ElasticsearchServiceSetup, - Plugin, - PluginInitializerContext, -} from 'src/core/server'; -import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; -import { PluginStart as DataPluginStart } from '../../../../../src/plugins/data/server'; -import { SecurityPluginSetup } from '../../../../plugins/security/server'; -// @ts-ignore -import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; -import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; -import { PLUGIN_ID } from '../common/constants'; +import { CoreSetup, CoreStart, Plugin, PluginInitializerContext } from 'src/core/server'; import { logConfiguration } from '../log_configuration'; -import { ReportingPluginSpecOptions } from '../types.d'; import { createBrowserDriverFactory } from './browsers'; -import { checkLicenseFactory, getExportTypesRegistry, LevelLogger, runValidations } from './lib'; -import { registerRoutes } from './routes'; +import { ReportingCore } from './core'; +import { createQueueFactory, enqueueJobFactory, LevelLogger, runValidations } from './lib'; import { setFieldFormats } from './services'; +import { ReportingSetup, ReportingSetupDeps, ReportingStart, ReportingStartDeps } from './types'; import { registerReportingUsageCollector } from './usage'; +// @ts-ignore no module definition +import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; -export interface ReportingSetupDeps { - elasticsearch: ElasticsearchServiceSetup; - usageCollection: UsageCollectionSetup; - security: SecurityPluginSetup; - __LEGACY: LegacySetup; -} - -export interface ReportingStartDeps { - data: DataPluginStart; -} - -export interface LegacySetup { - config: Legacy.Server['config']; - info: Legacy.Server['info']; - plugins: { - elasticsearch: Legacy.Server['plugins']['elasticsearch']; - xpack_main: XPackMainPlugin & { - status?: any; - }; - reporting: ReportingPluginSpecOptions; - }; - route: Legacy.Server['route']; - savedObjects: Legacy.Server['savedObjects']; - uiSettingsServiceFactory: Legacy.Server['uiSettingsServiceFactory']; -} +export class ReportingPlugin + implements Plugin { + private logger: LevelLogger; + private reportingCore: ReportingCore; -export class ReportingPlugin implements Plugin { - constructor(private context: PluginInitializerContext) {} + constructor(context: PluginInitializerContext) { + this.logger = new LevelLogger(context.logger.get('reporting')); + this.reportingCore = new ReportingCore(this.logger); + } public async setup(core: CoreSetup, plugins: ReportingSetupDeps) { const { elasticsearch, usageCollection, __LEGACY } = plugins; - const exportTypesRegistry = getExportTypesRegistry(); - let isCollectorReady = false; + const browserDriverFactory = await createBrowserDriverFactory(__LEGACY, this.logger); // required for validations :( + runValidations(__LEGACY, elasticsearch, browserDriverFactory, this.logger); // this must run early, as it sets up config defaults + + const { xpack_main: xpackMainLegacy, reporting: reportingLegacy } = __LEGACY.plugins; + this.reportingCore.legacySetup(xpackMainLegacy, reportingLegacy, __LEGACY, plugins); // Register a function with server to manage the collection of usage stats - registerReportingUsageCollector( - usageCollection, - __LEGACY, - () => isCollectorReady, - exportTypesRegistry - ); + registerReportingUsageCollector(this.reportingCore, __LEGACY, usageCollection); - const logger = new LevelLogger(this.context.logger.get('reporting')); - const browserDriverFactory = await createBrowserDriverFactory(__LEGACY, logger); + // regsister setup internals + this.reportingCore.pluginSetup({ browserDriverFactory }); - logConfiguration(__LEGACY, logger); - runValidations(__LEGACY, elasticsearch, logger, browserDriverFactory); + return {}; + } - const { xpack_main: xpackMainPlugin, reporting } = __LEGACY.plugins; - mirrorPluginStatus(xpackMainPlugin, reporting); + public async start(core: CoreStart, plugins: ReportingStartDeps) { + const { reportingCore, logger } = this; + const { elasticsearch, __LEGACY } = plugins; - const checkLicense = checkLicenseFactory(exportTypesRegistry); + const esqueue = await createQueueFactory(reportingCore, __LEGACY, elasticsearch, logger); + const enqueueJob = enqueueJobFactory(reportingCore, __LEGACY, elasticsearch, logger); - (xpackMainPlugin as any).status.once('green', () => { - // Register a function that is called whenever the xpack info changes, - // to re-compute the license check results for this plugin - xpackMainPlugin.info.feature(PLUGIN_ID).registerLicenseCheckResultsGenerator(checkLicense); + this.reportingCore.pluginStart({ + savedObjects: core.savedObjects, + uiSettings: core.uiSettings, + esqueue, + enqueueJob, }); - // Post initialization of the above code, the collector is now ready to fetch its data - isCollectorReady = true; + setFieldFormats(plugins.data.fieldFormats); + logConfiguration(__LEGACY, this.logger); - // Reporting routes - registerRoutes(__LEGACY, plugins, exportTypesRegistry, browserDriverFactory, logger); + return {}; } - public start(core: CoreStart, plugins: ReportingStartDeps) { - setFieldFormats(plugins.data.fieldFormats); + public getReportingCore() { + return this.reportingCore; } } diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts index ed761b1e684ae..49868bb7ad5d5 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts @@ -10,7 +10,7 @@ import { Legacy } from 'kibana'; import rison from 'rison-node'; import { API_BASE_URL } from '../../common/constants'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingSetupDeps } from '../plugin'; +import { ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { GetRouteConfigFactoryFn, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts index 8696f36a45c62..415b6b7d64366 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts @@ -9,7 +9,7 @@ import { get } from 'lodash'; import { API_BASE_GENERATE_V1, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../common/constants'; import { getJobParamsFromRequest } from '../../export_types/csv_from_savedobject/server/lib/get_job_params_from_request'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingSetupDeps } from '../plugin'; +import { ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { getRouteOptionsCsv } from './lib/route_config_factories'; import { HandlerErrorFunction, HandlerFunction, QueuedJobPayload } from './types'; diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts index fd1d85fef0f21..5d17fa2e82b8c 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts @@ -10,14 +10,13 @@ import { createJobFactory, executeJobFactory } from '../../export_types/csv_from import { getJobParamsFromRequest } from '../../export_types/csv_from_savedobject/server/lib/get_job_params_from_request'; import { JobDocPayloadPanelCsv } from '../../export_types/csv_from_savedobject/types'; import { - HeadlessChromiumDriverFactory, JobDocOutput, Logger, ReportingResponseToolkit, ResponseFacade, ServerFacade, } from '../../types'; -import { ReportingSetupDeps } from '../plugin'; +import { ReportingSetupDeps, ReportingCore } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { getRouteOptionsCsv } from './lib/route_config_factories'; @@ -31,6 +30,7 @@ import { getRouteOptionsCsv } from './lib/route_config_factories'; * - local (transient) changes the user made to the saved object */ export function registerGenerateCsvFromSavedObjectImmediate( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, parentLogger: Logger @@ -58,10 +58,8 @@ export function registerGenerateCsvFromSavedObjectImmediate( * * Calling an execute job factory requires passing a browserDriverFactory option, so we should not call the factory from here */ - const createJobFn = createJobFactory(server, elasticsearch, logger); - const executeJobFn = executeJobFactory(server, elasticsearch, logger, { - browserDriverFactory: {} as HeadlessChromiumDriverFactory, - }); + const createJobFn = createJobFactory(reporting, server, elasticsearch, logger); + const executeJobFn = await executeJobFactory(reporting, server, elasticsearch, logger); const jobDocPayload: JobDocPayloadPanelCsv = await createJobFn( jobParams, request.headers, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generation.ts b/x-pack/legacy/plugins/reporting/server/routes/generation.ts index 02a9541484bc6..096ba84b63d1a 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generation.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generation.ts @@ -8,15 +8,8 @@ import boom from 'boom'; import { errors as elasticsearchErrors } from 'elasticsearch'; import { Legacy } from 'kibana'; import { API_BASE_URL } from '../../common/constants'; -import { - ExportTypesRegistry, - HeadlessChromiumDriverFactory, - Logger, - ReportingResponseToolkit, - ServerFacade, -} from '../../types'; -import { createQueueFactory, enqueueJobFactory } from '../lib'; -import { ReportingSetupDeps } from '../plugin'; +import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; +import { ReportingSetupDeps, ReportingCore } from '../types'; import { registerGenerateFromJobParams } from './generate_from_jobparams'; import { registerGenerateCsvFromSavedObject } from './generate_from_savedobject'; import { registerGenerateCsvFromSavedObjectImmediate } from './generate_from_savedobject_immediate'; @@ -25,23 +18,13 @@ import { makeRequestFacade } from './lib/make_request_facade'; const esErrors = elasticsearchErrors as Record; export function registerJobGenerationRoutes( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, - exportTypesRegistry: ExportTypesRegistry, - browserDriverFactory: HeadlessChromiumDriverFactory, logger: Logger ) { const config = server.config(); const DOWNLOAD_BASE_URL = config.get('server.basePath') + `${API_BASE_URL}/jobs/download`; - const { elasticsearch } = plugins; - const esqueue = createQueueFactory(server, elasticsearch, logger, { - exportTypesRegistry, - browserDriverFactory, - }); - const enqueueJob = enqueueJobFactory(server, elasticsearch, logger, { - exportTypesRegistry, - esqueue, - }); /* * Generates enqueued job details to use in responses @@ -56,6 +39,7 @@ export function registerJobGenerationRoutes( const user = request.pre.user; const headers = request.headers; + const enqueueJob = await reporting.getEnqueueJob(); const job = await enqueueJob(exportTypeId, jobParams, user, headers, request); // return the queue's job information @@ -87,6 +71,6 @@ export function registerJobGenerationRoutes( // Register beta panel-action download-related API's if (config.get('xpack.reporting.csv.enablePanelActionDownload')) { registerGenerateCsvFromSavedObject(server, plugins, handler, handleError, logger); - registerGenerateCsvFromSavedObjectImmediate(server, plugins, logger); + registerGenerateCsvFromSavedObjectImmediate(reporting, server, plugins, logger); } } diff --git a/x-pack/legacy/plugins/reporting/server/routes/index.ts b/x-pack/legacy/plugins/reporting/server/routes/index.ts index 4cfa9dd465eab..610ab4907d369 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/index.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/index.ts @@ -4,23 +4,17 @@ * you may not use this file except in compliance with the Elastic License. */ -import { - ExportTypesRegistry, - HeadlessChromiumDriverFactory, - Logger, - ServerFacade, -} from '../../types'; -import { ReportingSetupDeps } from '../plugin'; +import { Logger, ServerFacade } from '../../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { registerJobGenerationRoutes } from './generation'; import { registerJobInfoRoutes } from './jobs'; export function registerRoutes( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, - exportTypesRegistry: ExportTypesRegistry, - browserDriverFactory: HeadlessChromiumDriverFactory, logger: Logger ) { - registerJobGenerationRoutes(server, plugins, exportTypesRegistry, browserDriverFactory, logger); - registerJobInfoRoutes(server, plugins, exportTypesRegistry, logger); + registerJobGenerationRoutes(reporting, server, plugins, logger); + registerJobInfoRoutes(reporting, server, plugins, logger); } diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js index 811c81c502b81..071b401d2321b 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js @@ -5,30 +5,30 @@ */ import Hapi from 'hapi'; -import { difference, memoize } from 'lodash'; -import { registerJobInfoRoutes } from './jobs'; +import { memoize } from 'lodash'; +import { createMockReportingCore } from '../../test_helpers'; import { ExportTypesRegistry } from '../lib/export_types_registry'; -jest.mock('./lib/authorized_user_pre_routing', () => { - return { - authorizedUserPreRoutingFactory: () => () => ({}), - }; -}); -jest.mock('./lib/reporting_feature_pre_routing', () => { - return { - reportingFeaturePreRoutingFactory: () => () => () => ({ - jobTypes: ['unencodedJobType', 'base64EncodedJobType'], - }), - }; -}); + +jest.mock('./lib/authorized_user_pre_routing', () => ({ + authorizedUserPreRoutingFactory: () => () => ({}), +})); +jest.mock('./lib/reporting_feature_pre_routing', () => ({ + reportingFeaturePreRoutingFactory: () => () => () => ({ + jobTypes: ['unencodedJobType', 'base64EncodedJobType'], + }), +})); + +import { registerJobInfoRoutes } from './jobs'; let mockServer; let exportTypesRegistry; +let mockReportingPlugin; const mockLogger = { error: jest.fn(), debug: jest.fn(), }; -beforeEach(() => { +beforeEach(async () => { mockServer = new Hapi.Server({ debug: false, port: 8080, routes: { log: { collect: true } } }); mockServer.config = memoize(() => ({ get: jest.fn() })); exportTypesRegistry = new ExportTypesRegistry(); @@ -43,6 +43,8 @@ beforeEach(() => { jobContentEncoding: 'base64', jobContentExtension: 'pdf', }); + mockReportingPlugin = await createMockReportingCore(); + mockReportingPlugin.getExportTypesRegistry = () => exportTypesRegistry; }); const mockPlugins = { @@ -60,12 +62,15 @@ const getHits = (...sources) => { }; }; +const getErrorsFromRequest = request => + request.logs.filter(log => log.tags.includes('error')).map(log => log.error); + test(`returns 404 if job not found`, async () => { mockPlugins.elasticsearch.adminClient = { callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(getHits())), }; - registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -84,7 +89,7 @@ test(`returns 401 if not valid job type`, async () => { .mockReturnValue(Promise.resolve(getHits({ jobtype: 'invalidJobType' }))), }; - registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -105,7 +110,7 @@ describe(`when job is incomplete`, () => { ), }; - registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -147,7 +152,7 @@ describe(`when job is failed`, () => { callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)), }; - registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -192,7 +197,7 @@ describe(`when job is completed`, () => { callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)), }; - registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -203,72 +208,115 @@ describe(`when job is completed`, () => { }; test(`sets statusCode to 200`, async () => { - const { statusCode } = await getCompletedResponse(); + const { statusCode, request } = await getCompletedResponse(); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); expect(statusCode).toBe(200); }); test(`doesn't encode output content for not-specified jobTypes`, async () => { - const { payload } = await getCompletedResponse({ + const { payload, request } = await getCompletedResponse({ jobType: 'unencodedJobType', outputContent: 'test', }); + + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); + expect(payload).toBe('test'); }); test(`base64 encodes output content for configured jobTypes`, async () => { - const { payload } = await getCompletedResponse({ + const { payload, request } = await getCompletedResponse({ jobType: 'base64EncodedJobType', outputContent: 'test', }); + + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); + expect(payload).toBe(Buffer.from('test', 'base64').toString()); }); test(`specifies text/csv; charset=utf-8 contentType header from the job output`, async () => { - const { headers } = await getCompletedResponse({ outputContentType: 'text/csv' }); + const { headers, request } = await getCompletedResponse({ outputContentType: 'text/csv' }); + + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); + expect(headers['content-type']).toBe('text/csv; charset=utf-8'); }); test(`specifies default filename in content-disposition header if no title`, async () => { - const { headers } = await getCompletedResponse({}); + const { headers, request } = await getCompletedResponse({}); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); expect(headers['content-disposition']).toBe('inline; filename="report.csv"'); }); test(`specifies payload title in content-disposition header`, async () => { - const { headers } = await getCompletedResponse({ title: 'something' }); + const { headers, request } = await getCompletedResponse({ title: 'something' }); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); expect(headers['content-disposition']).toBe('inline; filename="something.csv"'); }); test(`specifies jobContentExtension in content-disposition header`, async () => { - const { headers } = await getCompletedResponse({ jobType: 'base64EncodedJobType' }); + const { headers, request } = await getCompletedResponse({ jobType: 'base64EncodedJobType' }); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); expect(headers['content-disposition']).toBe('inline; filename="report.pdf"'); }); test(`specifies application/pdf contentType header from the job output`, async () => { - const { headers } = await getCompletedResponse({ outputContentType: 'application/pdf' }); + const { headers, request } = await getCompletedResponse({ + outputContentType: 'application/pdf', + }); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); expect(headers['content-type']).toBe('application/pdf'); }); describe(`when non-whitelisted contentType specified in job output`, () => { test(`sets statusCode to 500`, async () => { - const { statusCode } = await getCompletedResponse({ outputContentType: 'application/html' }); + const { statusCode, request } = await getCompletedResponse({ + outputContentType: 'application/html', + }); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toMatchInlineSnapshot(` + Array [ + [Error: Unsupported content-type of application/html specified by job output], + [Error: Unsupported content-type of application/html specified by job output], + ] + `); expect(statusCode).toBe(500); }); test(`doesn't include job output content in payload`, async () => { - const { payload } = await getCompletedResponse({ outputContentType: 'application/html' }); - expect(payload).not.toMatch(/job output content/); + const { payload, request } = await getCompletedResponse({ + outputContentType: 'application/html', + }); + expect(payload).toMatchInlineSnapshot( + `"{\\"statusCode\\":500,\\"error\\":\\"Internal Server Error\\",\\"message\\":\\"An internal server error occurred\\"}"` + ); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toMatchInlineSnapshot(` + Array [ + [Error: Unsupported content-type of application/html specified by job output], + [Error: Unsupported content-type of application/html specified by job output], + ] + `); }); test(`logs error message about invalid content type`, async () => { - const { - request: { logs }, - } = await getCompletedResponse({ outputContentType: 'application/html' }); - const errorLogs = logs.filter( - log => difference(['internal', 'implementation', 'error'], log.tags).length === 0 - ); - expect(errorLogs).toHaveLength(1); - expect(errorLogs[0].error).toBeInstanceOf(Error); - expect(errorLogs[0].error.message).toMatch(/Unsupported content-type of application\/html/); + const { request } = await getCompletedResponse({ outputContentType: 'application/html' }); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toMatchInlineSnapshot(` + Array [ + [Error: Unsupported content-type of application/html specified by job output], + [Error: Unsupported content-type of application/html specified by job output], + ] + `); }); }); }); diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts index daabc2cf22f4e..2de420e6577c3 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts @@ -9,7 +9,6 @@ import { ResponseObject } from 'hapi'; import { Legacy } from 'kibana'; import { API_BASE_URL } from '../../common/constants'; import { - ExportTypesRegistry, JobDocOutput, JobSource, ListQuery, @@ -18,7 +17,7 @@ import { ServerFacade, } from '../../types'; import { jobsQueryFactory } from '../lib/jobs_query'; -import { ReportingSetupDeps } from '../plugin'; +import { ReportingSetupDeps, ReportingCore } from '../types'; import { jobResponseHandlerFactory } from './lib/job_response_handler'; import { makeRequestFacade } from './lib/make_request_facade'; import { @@ -33,9 +32,9 @@ function isResponse(response: Boom | ResponseObject): response is Response } export function registerJobInfoRoutes( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, - exportTypesRegistry: ExportTypesRegistry, logger: Logger ) { const { elasticsearch } = plugins; @@ -138,6 +137,7 @@ export function registerJobInfoRoutes( }); // trigger a download of the output from a job + const exportTypesRegistry = reporting.getExportTypesRegistry(); const jobResponseHandler = jobResponseHandlerFactory(server, elasticsearch, exportTypesRegistry); server.route({ path: `${MAIN_ENTRY}/download/{docId}`, diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts index 57c3fcee222da..c5f8c78016f61 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts @@ -9,7 +9,7 @@ import { Legacy } from 'kibana'; import { AuthenticatedUser } from '../../../../../../plugins/security/server'; import { Logger, ServerFacade } from '../../../types'; import { getUserFactory } from '../../lib/get_user'; -import { ReportingSetupDeps } from '../../plugin'; +import { ReportingSetupDeps } from '../../types'; const superuserRole = 'superuser'; diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts index 7367fceb50857..9e618ff1fe40a 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts @@ -7,7 +7,7 @@ import Boom from 'boom'; import { Legacy } from 'kibana'; import { Logger, ServerFacade } from '../../../types'; -import { ReportingSetupDeps } from '../../plugin'; +import { ReportingSetupDeps } from '../../types'; export type GetReportingFeatureIdFn = (request: Legacy.Request) => string; diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts index 931f642397bf8..82ba9ba22c706 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts @@ -7,7 +7,7 @@ import Joi from 'joi'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; import { Logger, ServerFacade } from '../../../types'; -import { ReportingSetupDeps } from '../../plugin'; +import { ReportingSetupDeps } from '../../types'; import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; import { GetReportingFeatureIdFn, diff --git a/x-pack/legacy/plugins/reporting/server/types.d.ts b/x-pack/legacy/plugins/reporting/server/types.d.ts new file mode 100644 index 0000000000000..20673423aa448 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/types.d.ts @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Legacy } from 'kibana'; +import { + ElasticsearchServiceSetup, + SavedObjectsServiceStart, + UiSettingsServiceStart, +} from 'src/core/server'; +import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; +import { PluginStart as DataPluginStart } from '../../../../../src/plugins/data/server'; +import { SecurityPluginSetup } from '../../../../plugins/security/server'; +import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; +import { EnqueueJobFn, ESQueueInstance, ReportingPluginSpecOptions } from '../types'; +import { HeadlessChromiumDriverFactory } from './browsers/chromium/driver_factory'; + +export interface ReportingSetupDeps { + elasticsearch: ElasticsearchServiceSetup; + security: SecurityPluginSetup; + usageCollection: UsageCollectionSetup; + __LEGACY: LegacySetup; +} + +export interface ReportingStartDeps { + elasticsearch: ElasticsearchServiceSetup; + data: DataPluginStart; + __LEGACY: LegacySetup; +} + +export type ReportingSetup = object; + +export type ReportingStart = object; + +export interface LegacySetup { + config: Legacy.Server['config']; + info: Legacy.Server['info']; + plugins: { + elasticsearch: Legacy.Server['plugins']['elasticsearch']; + xpack_main: XPackMainPlugin & { + status?: any; + }; + reporting: ReportingPluginSpecOptions; + }; + route: Legacy.Server['route']; +} + +export { ReportingCore } from './core'; diff --git a/x-pack/legacy/plugins/reporting/server/usage/decorate_range_stats.ts b/x-pack/legacy/plugins/reporting/server/usage/decorate_range_stats.ts index 0118dea38d985..359bcc45230c3 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/decorate_range_stats.ts +++ b/x-pack/legacy/plugins/reporting/server/usage/decorate_range_stats.ts @@ -6,7 +6,7 @@ import { uniq } from 'lodash'; import { CSV_JOB_TYPE, PDF_JOB_TYPE, PNG_JOB_TYPE } from '../../common/constants'; -import { AvailableTotal, FeatureAvailabilityMap, RangeStats, ExportType } from './types.d'; +import { AvailableTotal, FeatureAvailabilityMap, RangeStats, ExportType } from './types'; function getForFeature( range: Partial, diff --git a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js index f761f0d2d270b..a6d753f9b107a 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js +++ b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js @@ -3,9 +3,14 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + import sinon from 'sinon'; +import { createMockReportingCore } from '../../test_helpers'; import { getExportTypesRegistry } from '../lib/export_types_registry'; -import { getReportingUsageCollector } from './reporting_usage_collector'; +import { + registerReportingUsageCollector, + getReportingUsageCollector, +} from './reporting_usage_collector'; const exportTypesRegistry = getExportTypesRegistry(); @@ -70,9 +75,8 @@ describe('license checks', () => { const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); const usageCollection = getMockUsageCollection(); const { fetch: getReportingUsage } = getReportingUsageCollector( - usageCollection, serverWithBasicLicenseMock, - () => {}, + usageCollection, exportTypesRegistry ); usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); @@ -101,9 +105,8 @@ describe('license checks', () => { const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); const usageCollection = getMockUsageCollection(); const { fetch: getReportingUsage } = getReportingUsageCollector( - usageCollection, serverWithNoLicenseMock, - () => {}, + usageCollection, exportTypesRegistry ); usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); @@ -132,9 +135,8 @@ describe('license checks', () => { const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); const usageCollection = getMockUsageCollection(); const { fetch: getReportingUsage } = getReportingUsageCollector( - usageCollection, serverWithPlatinumLicenseMock, - () => {}, + usageCollection, exportTypesRegistry ); usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); @@ -163,9 +165,8 @@ describe('license checks', () => { const callClusterMock = jest.fn(() => Promise.resolve({})); const usageCollection = getMockUsageCollection(); const { fetch: getReportingUsage } = getReportingUsageCollector( - usageCollection, serverWithBasicLicenseMock, - () => {}, + usageCollection, exportTypesRegistry ); usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); @@ -190,9 +191,8 @@ describe('data modeling', () => { .stub() .returns('platinum'); ({ fetch: getReportingUsage } = getReportingUsageCollector( - usageCollection, serverWithPlatinumLicenseMock, - () => {}, + usageCollection, exportTypesRegistry )); }); @@ -322,94 +322,124 @@ describe('data modeling', () => { const usageStats = await getReportingUsage(callClusterMock); expect(usageStats).toMatchInlineSnapshot(` -Object { - "PNG": Object { - "available": true, - "total": 4, - }, - "_all": 54, - "available": true, - "browser_type": undefined, - "csv": Object { - "available": true, - "total": 27, - }, - "enabled": true, - "last7Days": Object { - "PNG": Object { - "available": true, - "total": 4, - }, - "_all": 27, - "csv": Object { - "available": true, - "total": 10, - }, - "printable_pdf": Object { - "app": Object { - "dashboard": 13, - "visualization": 0, - }, - "available": true, - "layout": Object { - "preserve_layout": 3, - "print": 10, - }, - "total": 13, - }, - "status": Object { - "completed": 0, - "failed": 0, - "pending": 27, - }, - }, - "lastDay": Object { - "PNG": Object { - "available": true, - "total": 4, - }, - "_all": 11, - "csv": Object { - "available": true, - "total": 5, - }, - "printable_pdf": Object { - "app": Object { - "dashboard": 2, - "visualization": 0, - }, - "available": true, - "layout": Object { - "preserve_layout": 0, - "print": 2, - }, - "total": 2, - }, - "status": Object { - "completed": 0, - "failed": 0, - "pending": 11, - }, - }, - "printable_pdf": Object { - "app": Object { - "dashboard": 23, - "visualization": 0, - }, - "available": true, - "layout": Object { - "preserve_layout": 13, - "print": 10, - }, - "total": 23, - }, - "status": Object { - "completed": 20, - "failed": 0, - "pending": 33, - "processing": 1, - }, -} -`); + Object { + "PNG": Object { + "available": true, + "total": 4, + }, + "_all": 54, + "available": true, + "browser_type": undefined, + "csv": Object { + "available": true, + "total": 27, + }, + "enabled": true, + "last7Days": Object { + "PNG": Object { + "available": true, + "total": 4, + }, + "_all": 27, + "csv": Object { + "available": true, + "total": 10, + }, + "printable_pdf": Object { + "app": Object { + "dashboard": 13, + "visualization": 0, + }, + "available": true, + "layout": Object { + "preserve_layout": 3, + "print": 10, + }, + "total": 13, + }, + "status": Object { + "completed": 0, + "failed": 0, + "pending": 27, + }, + }, + "lastDay": Object { + "PNG": Object { + "available": true, + "total": 4, + }, + "_all": 11, + "csv": Object { + "available": true, + "total": 5, + }, + "printable_pdf": Object { + "app": Object { + "dashboard": 2, + "visualization": 0, + }, + "available": true, + "layout": Object { + "preserve_layout": 0, + "print": 2, + }, + "total": 2, + }, + "status": Object { + "completed": 0, + "failed": 0, + "pending": 11, + }, + }, + "printable_pdf": Object { + "app": Object { + "dashboard": 23, + "visualization": 0, + }, + "available": true, + "layout": Object { + "preserve_layout": 13, + "print": 10, + }, + "total": 23, + }, + "status": Object { + "completed": 20, + "failed": 0, + "pending": 33, + "processing": 1, + }, + } + `); + }); +}); + +describe('Ready for collection observable', () => { + let mockReporting; + + beforeEach(async () => { + mockReporting = await createMockReportingCore(); + }); + + test('converts observable to promise', async () => { + const serverWithBasicLicenseMock = getServerMock(); + const makeCollectorSpy = sinon.spy(); + const usageCollection = { + makeUsageCollector: makeCollectorSpy, + registerCollector: sinon.stub(), + }; + registerReportingUsageCollector(mockReporting, serverWithBasicLicenseMock, usageCollection); + + const [args] = makeCollectorSpy.firstCall.args; + expect(args).toMatchInlineSnapshot(` + Object { + "fetch": [Function], + "formatForBulkUpload": [Function], + "isReady": [Function], + "type": "reporting", + } + `); + + await expect(args.isReady()).resolves.toBe(true); }); }); diff --git a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts index 567838391d2e7..14202530fb6c7 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts +++ b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts @@ -5,8 +5,9 @@ */ import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; -import { ServerFacade, ExportTypesRegistry, ESCallCluster } from '../../types'; import { KIBANA_REPORTING_TYPE } from '../../common/constants'; +import { ReportingCore } from '../../server'; +import { ESCallCluster, ExportTypesRegistry, ServerFacade } from '../../types'; import { getReportingUsage } from './get_reporting_usage'; import { RangeStats } from './types'; @@ -18,16 +19,16 @@ const METATYPE = 'kibana_stats'; * @return {Object} kibana usage stats type collection object */ export function getReportingUsageCollector( - usageCollection: UsageCollectionSetup, server: ServerFacade, - isReady: () => boolean, - exportTypesRegistry: ExportTypesRegistry + usageCollection: UsageCollectionSetup, + exportTypesRegistry: ExportTypesRegistry, + isReady: () => Promise ) { return usageCollection.makeUsageCollector({ type: KIBANA_REPORTING_TYPE, - isReady, fetch: (callCluster: ESCallCluster) => getReportingUsage(server, callCluster, exportTypesRegistry), + isReady, /* * Format the response data into a model for internal upload @@ -50,16 +51,18 @@ export function getReportingUsageCollector( } export function registerReportingUsageCollector( - usageCollection: UsageCollectionSetup, + reporting: ReportingCore, server: ServerFacade, - isReady: () => boolean, - exportTypesRegistry: ExportTypesRegistry + usageCollection: UsageCollectionSetup ) { + const exportTypesRegistry = reporting.getExportTypesRegistry(); + const collectionIsReady = reporting.pluginHasStarted.bind(reporting); + const collector = getReportingUsageCollector( - usageCollection, server, - isReady, - exportTypesRegistry + usageCollection, + exportTypesRegistry, + collectionIsReady ); usageCollection.registerCollector(collector); } diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts new file mode 100644 index 0000000000000..2cd129d47b3f9 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +jest.mock('../server/routes'); +jest.mock('../server/usage'); +jest.mock('../server/browsers'); +jest.mock('../server/browsers'); +jest.mock('../server/lib/create_queue'); +jest.mock('../server/lib/enqueue_job'); +jest.mock('../server/lib/validate'); +jest.mock('../log_configuration'); + +import { EventEmitter } from 'events'; +// eslint-disable-next-line @kbn/eslint/no-restricted-paths +import { coreMock } from 'src/core/server/mocks'; +import { ReportingPlugin, ReportingCore } from '../server'; +import { ReportingSetupDeps, ReportingStartDeps } from '../server/types'; + +export const createMockSetupDeps = (setupMock?: any): ReportingSetupDeps => ({ + elasticsearch: setupMock.elasticsearch, + security: setupMock.security, + usageCollection: {} as any, + __LEGACY: { plugins: { xpack_main: { status: new EventEmitter() } } } as any, +}); + +export const createMockStartDeps = (startMock?: any): ReportingStartDeps => ({ + data: startMock.data, + elasticsearch: startMock.elasticsearch, + __LEGACY: {} as any, +}); + +const createMockReportingPlugin = async (config = {}): Promise => { + const plugin = new ReportingPlugin(coreMock.createPluginInitializerContext(config)); + const setupMock = coreMock.createSetup(); + const coreStartMock = coreMock.createStart(); + const startMock = { + ...coreStartMock, + data: { fieldFormats: {} }, + }; + + await plugin.setup(setupMock, createMockSetupDeps(setupMock)); + await plugin.start(startMock, createMockStartDeps(startMock)); + + return plugin; +}; + +export const createMockReportingCore = async (config = {}): Promise => { + const plugin = await createMockReportingPlugin(config); + return plugin.getReportingCore(); +}; diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts index 226355f5edc61..bb7851ba036a9 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts @@ -8,9 +8,6 @@ import { ServerFacade } from '../types'; export const createMockServer = ({ settings = {} }: any): ServerFacade => { const mockServer = { - expose: () => { - ' '; - }, config: memoize(() => ({ get: jest.fn() })), info: { protocol: 'http', @@ -24,10 +21,6 @@ export const createMockServer = ({ settings = {} }: any): ServerFacade => { }), }, }, - savedObjects: { - getScopedSavedObjectsClient: jest.fn(), - }, - uiSettingsServiceFactory: jest.fn().mockReturnValue({ get: jest.fn() }), }; const defaultSettings: any = { diff --git a/x-pack/legacy/plugins/reporting/test_helpers/index.ts b/x-pack/legacy/plugins/reporting/test_helpers/index.ts new file mode 100644 index 0000000000000..7fbc5661d5211 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/test_helpers/index.ts @@ -0,0 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { createMockServer } from './create_mock_server'; +export { createMockReportingCore } from './create_mock_reportingplugin'; diff --git a/x-pack/legacy/plugins/reporting/types.d.ts b/x-pack/legacy/plugins/reporting/types.d.ts index a4ff39b23747d..1549c173b3d6e 100644 --- a/x-pack/legacy/plugins/reporting/types.d.ts +++ b/x-pack/legacy/plugins/reporting/types.d.ts @@ -6,16 +6,15 @@ import { EventEmitter } from 'events'; import { ResponseObject } from 'hapi'; -import { ElasticsearchServiceSetup } from 'kibana/server'; import { Legacy } from 'kibana'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { CallCluster } from '../../../../src/legacy/core_plugins/elasticsearch'; import { CancellationToken } from './common/cancellation_token'; import { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory'; import { BrowserType } from './server/browsers/types'; import { LevelLogger } from './server/lib/level_logger'; -import { LegacySetup, ReportingSetupDeps } from './server/plugin'; - -export type ReportingPlugin = object; // For Plugin contract +import { ReportingCore } from './server/core'; +import { LegacySetup, ReportingStartDeps, ReportingSetup, ReportingStart } from './server/types'; export type Job = EventEmitter & { id: string; @@ -65,6 +64,7 @@ interface GenerateExportTypePayload { /* * Legacy System + * TODO: move to server/types */ export type ServerFacade = LegacySetup; @@ -179,6 +179,15 @@ export interface CryptoFactory { decrypt: (headers?: string) => any; } +export interface IndexPatternSavedObject { + attributes: { + fieldFormatMap: string; + }; + id: string; + type: string; + version: string; +} + export interface TimeRangeParams { timezone: string; min: Date | string | number; @@ -214,10 +223,6 @@ export interface JobDocOutput { size: number; } -export interface ESQueue { - addJob: (type: string, payload: object, options: object) => Job; -} - export interface ESQueueWorker { on: (event: string, handler: any) => void; } @@ -267,8 +272,9 @@ type GenericWorkerFn = ( ...workerRestArgs: any[] ) => void | Promise; -export interface ESQueueInstance { - registerWorker: ( +export interface ESQueueInstance { + addJob: (type: string, payload: unknown, options: object) => Job; + registerWorker: ( pluginId: string, workerFn: GenericWorkerFn, workerOptions: ESQueueWorkerOptions @@ -276,18 +282,17 @@ export interface ESQueueInstance { } export type CreateJobFactory = ( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, logger: LevelLogger ) => CreateJobFnType; export type ExecuteJobFactory = ( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - logger: LevelLogger, - opts: { - browserDriverFactory: HeadlessChromiumDriverFactory; - } -) => ExecuteJobFnType; + logger: LevelLogger +) => Promise; export interface ExportTypeDefinition< JobParamsType, @@ -309,7 +314,6 @@ export { CancellationToken } from './common/cancellation_token'; export { HeadlessChromiumDriver } from './server/browsers/chromium/driver'; export { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory'; export { ExportTypesRegistry } from './server/lib/export_types_registry'; - // Prefer to import this type using: `import { LevelLogger } from 'relative/path/server/lib';` export { LevelLogger as Logger }; From 918c0dec9f1f24ca9b347f33f1b832e132d44218 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mike=20C=C3=B4t=C3=A9?= Date: Fri, 14 Feb 2020 15:38:45 -0500 Subject: [PATCH 7/7] Increase stability when initializing the Elasticsearch index for the event log (#57465) * Fix ILM policy creation * Handle errors thrown in scenario multiple Kibana instances are started at the same time * Fix tests and cleanup * Start adding tests * Refactor tests, add index template failure test * Create cluster client adapter to facilitate testing and isolation * Fix places calling callEs still Co-authored-by: Elastic Machine --- .../server/es/cluster_client_adapter.mock.ts | 24 +++ .../server/es/cluster_client_adapter.test.ts | 196 ++++++++++++++++++ .../server/es/cluster_client_adapter.ts | 126 +++++++++++ .../event_log/server/es/context.mock.ts | 60 ++---- x-pack/plugins/event_log/server/es/context.ts | 30 +-- .../event_log/server/es/documents.test.ts | 14 +- .../plugins/event_log/server/es/documents.ts | 7 +- .../plugins/event_log/server/es/init.test.ts | 64 ++++++ x-pack/plugins/event_log/server/es/init.ts | 121 +++-------- .../plugins/event_log/server/es/names.mock.ts | 23 ++ .../server/event_log_service.test.ts | 8 +- .../event_log/server/event_logger.test.ts | 5 +- .../plugins/event_log/server/event_logger.ts | 2 +- 13 files changed, 495 insertions(+), 185 deletions(-) create mode 100644 x-pack/plugins/event_log/server/es/cluster_client_adapter.mock.ts create mode 100644 x-pack/plugins/event_log/server/es/cluster_client_adapter.test.ts create mode 100644 x-pack/plugins/event_log/server/es/cluster_client_adapter.ts create mode 100644 x-pack/plugins/event_log/server/es/init.test.ts create mode 100644 x-pack/plugins/event_log/server/es/names.mock.ts diff --git a/x-pack/plugins/event_log/server/es/cluster_client_adapter.mock.ts b/x-pack/plugins/event_log/server/es/cluster_client_adapter.mock.ts new file mode 100644 index 0000000000000..87e8fb0f521a9 --- /dev/null +++ b/x-pack/plugins/event_log/server/es/cluster_client_adapter.mock.ts @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { IClusterClientAdapter } from './cluster_client_adapter'; + +const createClusterClientMock = () => { + const mock: jest.Mocked = { + indexDocument: jest.fn(), + doesIlmPolicyExist: jest.fn(), + createIlmPolicy: jest.fn(), + doesIndexTemplateExist: jest.fn(), + createIndexTemplate: jest.fn(), + doesAliasExist: jest.fn(), + createIndex: jest.fn(), + }; + return mock; +}; + +export const clusterClientAdapterMock = { + create: createClusterClientMock, +}; diff --git a/x-pack/plugins/event_log/server/es/cluster_client_adapter.test.ts b/x-pack/plugins/event_log/server/es/cluster_client_adapter.test.ts new file mode 100644 index 0000000000000..ecefd4bfa271e --- /dev/null +++ b/x-pack/plugins/event_log/server/es/cluster_client_adapter.test.ts @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { ClusterClient, Logger } from '../../../../../src/core/server'; +import { elasticsearchServiceMock, loggingServiceMock } from '../../../../../src/core/server/mocks'; +import { ClusterClientAdapter, IClusterClientAdapter } from './cluster_client_adapter'; + +type EsClusterClient = Pick, 'callAsInternalUser' | 'asScoped'>; + +let logger: Logger; +let clusterClient: EsClusterClient; +let clusterClientAdapter: IClusterClientAdapter; + +beforeEach(() => { + logger = loggingServiceMock.createLogger(); + clusterClient = elasticsearchServiceMock.createClusterClient(); + clusterClientAdapter = new ClusterClientAdapter({ + logger, + clusterClient, + }); +}); + +describe('indexDocument', () => { + test('should call cluster client with given doc', async () => { + await clusterClientAdapter.indexDocument({ args: true }); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('index', { + args: true, + }); + }); + + test('should throw error when cluster client throws an error', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.indexDocument({ args: true }) + ).rejects.toThrowErrorMatchingInlineSnapshot(`"Fail"`); + }); +}); + +describe('doesIlmPolicyExist', () => { + const notFoundError = new Error('Not found') as any; + notFoundError.statusCode = 404; + + test('should call cluster with proper arguments', async () => { + await clusterClientAdapter.doesIlmPolicyExist('foo'); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('transport.request', { + method: 'GET', + path: '_ilm/policy/foo', + }); + }); + + test('should return false when 404 error is returned by Elasticsearch', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(notFoundError); + await expect(clusterClientAdapter.doesIlmPolicyExist('foo')).resolves.toEqual(false); + }); + + test('should throw error when error is not 404', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.doesIlmPolicyExist('foo') + ).rejects.toThrowErrorMatchingInlineSnapshot(`"error checking existance of ilm policy: Fail"`); + }); + + test('should return true when no error is thrown', async () => { + await expect(clusterClientAdapter.doesIlmPolicyExist('foo')).resolves.toEqual(true); + }); +}); + +describe('createIlmPolicy', () => { + test('should call cluster client with given policy', async () => { + clusterClient.callAsInternalUser.mockResolvedValue({ success: true }); + await clusterClientAdapter.createIlmPolicy('foo', { args: true }); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('transport.request', { + method: 'PUT', + path: '_ilm/policy/foo', + body: { args: true }, + }); + }); + + test('should throw error when call cluster client throws', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.createIlmPolicy('foo', { args: true }) + ).rejects.toThrowErrorMatchingInlineSnapshot(`"error creating ilm policy: Fail"`); + }); +}); + +describe('doesIndexTemplateExist', () => { + test('should call cluster with proper arguments', async () => { + await clusterClientAdapter.doesIndexTemplateExist('foo'); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('indices.existsTemplate', { + name: 'foo', + }); + }); + + test('should return true when call cluster returns true', async () => { + clusterClient.callAsInternalUser.mockResolvedValue(true); + await expect(clusterClientAdapter.doesIndexTemplateExist('foo')).resolves.toEqual(true); + }); + + test('should return false when call cluster returns false', async () => { + clusterClient.callAsInternalUser.mockResolvedValue(false); + await expect(clusterClientAdapter.doesIndexTemplateExist('foo')).resolves.toEqual(false); + }); + + test('should throw error when call cluster throws an error', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.doesIndexTemplateExist('foo') + ).rejects.toThrowErrorMatchingInlineSnapshot( + `"error checking existance of index template: Fail"` + ); + }); +}); + +describe('createIndexTemplate', () => { + test('should call cluster with given template', async () => { + await clusterClientAdapter.createIndexTemplate('foo', { args: true }); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('indices.putTemplate', { + name: 'foo', + create: true, + body: { args: true }, + }); + }); + + test(`should throw error if index template still doesn't exist after error is thrown`, async () => { + clusterClient.callAsInternalUser.mockRejectedValueOnce(new Error('Fail')); + clusterClient.callAsInternalUser.mockResolvedValueOnce(false); + await expect( + clusterClientAdapter.createIndexTemplate('foo', { args: true }) + ).rejects.toThrowErrorMatchingInlineSnapshot(`"error creating index template: Fail"`); + }); + + test('should not throw error if index template exists after error is thrown', async () => { + clusterClient.callAsInternalUser.mockRejectedValueOnce(new Error('Fail')); + clusterClient.callAsInternalUser.mockResolvedValueOnce(true); + await clusterClientAdapter.createIndexTemplate('foo', { args: true }); + }); +}); + +describe('doesAliasExist', () => { + test('should call cluster with proper arguments', async () => { + await clusterClientAdapter.doesAliasExist('foo'); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('indices.existsAlias', { + name: 'foo', + }); + }); + + test('should return true when call cluster returns true', async () => { + clusterClient.callAsInternalUser.mockResolvedValueOnce(true); + await expect(clusterClientAdapter.doesAliasExist('foo')).resolves.toEqual(true); + }); + + test('should return false when call cluster returns false', async () => { + clusterClient.callAsInternalUser.mockResolvedValueOnce(false); + await expect(clusterClientAdapter.doesAliasExist('foo')).resolves.toEqual(false); + }); + + test('should throw error when call cluster throws an error', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.doesAliasExist('foo') + ).rejects.toThrowErrorMatchingInlineSnapshot( + `"error checking existance of initial index: Fail"` + ); + }); +}); + +describe('createIndex', () => { + test('should call cluster with proper arguments', async () => { + await clusterClientAdapter.createIndex('foo'); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('indices.create', { + index: 'foo', + }); + }); + + test('should throw error when not getting an error of type resource_already_exists_exception', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.createIndex('foo') + ).rejects.toThrowErrorMatchingInlineSnapshot(`"error creating initial index: Fail"`); + }); + + test(`shouldn't throw when an error of type resource_already_exists_exception is thrown`, async () => { + const err = new Error('Already exists') as any; + err.body = { + error: { + type: 'resource_already_exists_exception', + }, + }; + clusterClient.callAsInternalUser.mockRejectedValue(err); + await clusterClientAdapter.createIndex('foo'); + }); +}); diff --git a/x-pack/plugins/event_log/server/es/cluster_client_adapter.ts b/x-pack/plugins/event_log/server/es/cluster_client_adapter.ts new file mode 100644 index 0000000000000..c74eeacc9bb19 --- /dev/null +++ b/x-pack/plugins/event_log/server/es/cluster_client_adapter.ts @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Logger, ClusterClient } from '../../../../../src/core/server'; + +export type EsClusterClient = Pick; +export type IClusterClientAdapter = PublicMethodsOf; + +export interface ConstructorOpts { + logger: Logger; + clusterClient: EsClusterClient; +} + +export class ClusterClientAdapter { + private readonly logger: Logger; + private readonly clusterClient: EsClusterClient; + + constructor(opts: ConstructorOpts) { + this.logger = opts.logger; + this.clusterClient = opts.clusterClient; + } + + public async indexDocument(doc: any): Promise { + await this.callEs('index', doc); + } + + public async doesIlmPolicyExist(policyName: string): Promise { + const request = { + method: 'GET', + path: `_ilm/policy/${policyName}`, + }; + try { + await this.callEs('transport.request', request); + } catch (err) { + if (err.statusCode === 404) return false; + throw new Error(`error checking existance of ilm policy: ${err.message}`); + } + return true; + } + + public async createIlmPolicy(policyName: string, policy: any): Promise { + const request = { + method: 'PUT', + path: `_ilm/policy/${policyName}`, + body: policy, + }; + try { + await this.callEs('transport.request', request); + } catch (err) { + throw new Error(`error creating ilm policy: ${err.message}`); + } + } + + public async doesIndexTemplateExist(name: string): Promise { + let result; + try { + result = await this.callEs('indices.existsTemplate', { name }); + } catch (err) { + throw new Error(`error checking existance of index template: ${err.message}`); + } + return result as boolean; + } + + public async createIndexTemplate(name: string, template: any): Promise { + const addTemplateParams = { + name, + create: true, + body: template, + }; + try { + await this.callEs('indices.putTemplate', addTemplateParams); + } catch (err) { + // The error message doesn't have a type attribute we can look to guarantee it's due + // to the template already existing (only long message) so we'll check ourselves to see + // if the template now exists. This scenario would happen if you startup multiple Kibana + // instances at the same time. + const existsNow = await this.doesIndexTemplateExist(name); + if (!existsNow) { + throw new Error(`error creating index template: ${err.message}`); + } + } + } + + public async doesAliasExist(name: string): Promise { + let result; + try { + result = await this.callEs('indices.existsAlias', { name }); + } catch (err) { + throw new Error(`error checking existance of initial index: ${err.message}`); + } + return result as boolean; + } + + public async createIndex(name: string): Promise { + try { + await this.callEs('indices.create', { index: name }); + } catch (err) { + if (err.body?.error?.type !== 'resource_already_exists_exception') { + throw new Error(`error creating initial index: ${err.message}`); + } + } + } + + private async callEs(operation: string, body?: any): Promise { + try { + this.debug(`callEs(${operation}) calls:`, body); + const result = await this.clusterClient.callAsInternalUser(operation, body); + this.debug(`callEs(${operation}) result:`, result); + return result; + } catch (err) { + this.debug(`callEs(${operation}) error:`, { + message: err.message, + statusCode: err.statusCode, + }); + throw err; + } + } + + private debug(message: string, object?: any) { + const objectString = object == null ? '' : JSON.stringify(object); + this.logger.debug(`esContext: ${message} ${objectString}`); + } +} diff --git a/x-pack/plugins/event_log/server/es/context.mock.ts b/x-pack/plugins/event_log/server/es/context.mock.ts index fb894ce6e7787..6581cd689e43d 100644 --- a/x-pack/plugins/event_log/server/es/context.mock.ts +++ b/x-pack/plugins/event_log/server/es/context.mock.ts @@ -4,43 +4,25 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Logger, ClusterClient } from '../../../../../src/core/server'; import { EsContext } from './context'; - -import { EsNames } from './names'; - -export type EsClusterClient = Pick; - -export interface EsError { - readonly statusCode: number; - readonly message: string; -} - -interface CreateMockEsContextParams { - logger: Logger; - esNames: EsNames; -} - -export function createMockEsContext(params: CreateMockEsContextParams): EsContext { - return new EsContextMock(params); -} - -class EsContextMock implements EsContext { - public logger: Logger; - public esNames: EsNames; - - constructor(params: CreateMockEsContextParams) { - this.logger = params.logger; - this.esNames = params.esNames; - } - - initialize() {} - - async waitTillReady(): Promise { - return true; - } - - async callEs(operation: string, body?: any): Promise { - return {}; - } -} +import { namesMock } from './names.mock'; +import { IClusterClientAdapter } from './cluster_client_adapter'; +import { loggingServiceMock } from '../../../../../src/core/server/mocks'; +import { clusterClientAdapterMock } from './cluster_client_adapter.mock'; + +const createContextMock = () => { + const mock: jest.Mocked & { + esAdapter: jest.Mocked; + } = { + logger: loggingServiceMock.createLogger(), + esNames: namesMock.create(), + initialize: jest.fn(), + waitTillReady: jest.fn(), + esAdapter: clusterClientAdapterMock.create(), + }; + return mock; +}; + +export const contextMock = { + create: createContextMock, +}; diff --git a/x-pack/plugins/event_log/server/es/context.ts b/x-pack/plugins/event_log/server/es/context.ts index b93c1892d0206..144f44ac8e5ea 100644 --- a/x-pack/plugins/event_log/server/es/context.ts +++ b/x-pack/plugins/event_log/server/es/context.ts @@ -8,6 +8,7 @@ import { Logger, ClusterClient } from 'src/core/server'; import { EsNames, getEsNames } from './names'; import { initializeEs } from './init'; +import { ClusterClientAdapter, IClusterClientAdapter } from './cluster_client_adapter'; import { createReadySignal, ReadySignal } from '../lib/ready_signal'; export type EsClusterClient = Pick; @@ -15,9 +16,9 @@ export type EsClusterClient = Pick; - callEs(operation: string, body?: any): Promise; } export interface EsError { @@ -38,16 +39,19 @@ export interface EsContextCtorParams { class EsContextImpl implements EsContext { public readonly logger: Logger; public readonly esNames: EsNames; - private readonly clusterClient: EsClusterClient; + public esAdapter: IClusterClientAdapter; private readonly readySignal: ReadySignal; private initialized: boolean; constructor(params: EsContextCtorParams) { this.logger = params.logger; this.esNames = getEsNames(params.indexNameRoot); - this.clusterClient = params.clusterClient; this.readySignal = createReadySignal(); this.initialized = false; + this.esAdapter = new ClusterClientAdapter({ + logger: params.logger, + clusterClient: params.clusterClient, + }); } initialize() { @@ -73,27 +77,7 @@ class EsContextImpl implements EsContext { return await this.readySignal.wait(); } - async callEs(operation: string, body?: any): Promise { - try { - this.debug(`callEs(${operation}) calls:`, body); - const result = await this.clusterClient.callAsInternalUser(operation, body); - this.debug(`callEs(${operation}) result:`, result); - return result; - } catch (err) { - this.debug(`callEs(${operation}) error:`, { - message: err.message, - statusCode: err.statusCode, - }); - throw err; - } - } - private async _initialize() { await initializeEs(this); } - - private debug(message: string, object?: any) { - const objectString = object == null ? '' : JSON.stringify(object); - this.logger.debug(`esContext: ${message} ${objectString}`); - } } diff --git a/x-pack/plugins/event_log/server/es/documents.test.ts b/x-pack/plugins/event_log/server/es/documents.test.ts index 2dec23c61de2f..7edca4b3943a6 100644 --- a/x-pack/plugins/event_log/server/es/documents.test.ts +++ b/x-pack/plugins/event_log/server/es/documents.test.ts @@ -21,23 +21,13 @@ describe('getIndexTemplate()', () => { const esNames = getEsNames('XYZ'); test('returns the correct details of the index template', () => { - const indexTemplate = getIndexTemplate(esNames, true); + const indexTemplate = getIndexTemplate(esNames); expect(indexTemplate.index_patterns).toEqual([esNames.indexPattern]); expect(indexTemplate.aliases[esNames.alias]).toEqual({}); expect(indexTemplate.settings.number_of_shards).toBeGreaterThanOrEqual(0); expect(indexTemplate.settings.number_of_replicas).toBeGreaterThanOrEqual(0); - expect(indexTemplate.mappings).toMatchObject({}); - }); - - test('returns correct index template bits for ilm when ilm is supported', () => { - const indexTemplate = getIndexTemplate(esNames, true); expect(indexTemplate.settings['index.lifecycle.name']).toBe(esNames.ilmPolicy); expect(indexTemplate.settings['index.lifecycle.rollover_alias']).toBe(esNames.alias); - }); - - test('returns correct index template bits for ilm when ilm is not supported', () => { - const indexTemplate = getIndexTemplate(esNames, false); - expect(indexTemplate.settings['index.lifecycle.name']).toBeUndefined(); - expect(indexTemplate.settings['index.lifecycle.rollover_alias']).toBeUndefined(); + expect(indexTemplate.mappings).toMatchObject({}); }); }); diff --git a/x-pack/plugins/event_log/server/es/documents.ts b/x-pack/plugins/event_log/server/es/documents.ts index dfc544f8a41cb..09dd7383c4c5e 100644 --- a/x-pack/plugins/event_log/server/es/documents.ts +++ b/x-pack/plugins/event_log/server/es/documents.ts @@ -8,7 +8,7 @@ import { EsNames } from './names'; import mappings from '../../generated/mappings.json'; // returns the body of an index template used in an ES indices.putTemplate call -export function getIndexTemplate(esNames: EsNames, ilmExists: boolean) { +export function getIndexTemplate(esNames: EsNames) { const indexTemplateBody: any = { index_patterns: [esNames.indexPattern], aliases: { @@ -23,11 +23,6 @@ export function getIndexTemplate(esNames: EsNames, ilmExists: boolean) { mappings, }; - if (!ilmExists) { - delete indexTemplateBody.settings['index.lifecycle.name']; - delete indexTemplateBody.settings['index.lifecycle.rollover_alias']; - } - return indexTemplateBody; } diff --git a/x-pack/plugins/event_log/server/es/init.test.ts b/x-pack/plugins/event_log/server/es/init.test.ts new file mode 100644 index 0000000000000..ad237e522c0a5 --- /dev/null +++ b/x-pack/plugins/event_log/server/es/init.test.ts @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { contextMock } from './context.mock'; +import { initializeEs } from './init'; + +describe('initializeEs', () => { + let esContext = contextMock.create(); + + beforeEach(() => { + esContext = contextMock.create(); + }); + + test(`should create ILM policy if it doesn't exist`, async () => { + esContext.esAdapter.doesIlmPolicyExist.mockResolvedValue(false); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesIlmPolicyExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIlmPolicy).toHaveBeenCalled(); + }); + + test(`shouldn't create ILM policy if it exists`, async () => { + esContext.esAdapter.doesIlmPolicyExist.mockResolvedValue(true); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesIlmPolicyExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIlmPolicy).not.toHaveBeenCalled(); + }); + + test(`should create index template if it doesn't exist`, async () => { + esContext.esAdapter.doesIndexTemplateExist.mockResolvedValue(false); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesIndexTemplateExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIndexTemplate).toHaveBeenCalled(); + }); + + test(`shouldn't create index template if it already exists`, async () => { + esContext.esAdapter.doesIndexTemplateExist.mockResolvedValue(true); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesIndexTemplateExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIndexTemplate).not.toHaveBeenCalled(); + }); + + test(`should create initial index if it doesn't exist`, async () => { + esContext.esAdapter.doesAliasExist.mockResolvedValue(false); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesAliasExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIndex).toHaveBeenCalled(); + }); + + test(`shouldn't create initial index if it already exists`, async () => { + esContext.esAdapter.doesAliasExist.mockResolvedValue(true); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesAliasExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIndex).not.toHaveBeenCalled(); + }); +}); diff --git a/x-pack/plugins/event_log/server/es/init.ts b/x-pack/plugins/event_log/server/es/init.ts index d87f5bce03475..7094277f7aa9f 100644 --- a/x-pack/plugins/event_log/server/es/init.ts +++ b/x-pack/plugins/event_log/server/es/init.ts @@ -23,25 +23,10 @@ export async function initializeEs(esContext: EsContext): Promise { async function initializeEsResources(esContext: EsContext) { const steps = new EsInitializationSteps(esContext); - let ilmExists: boolean; - // create the ilm policy, if required - ilmExists = await steps.doesIlmPolicyExist(); - if (!ilmExists) { - ilmExists = await steps.createIlmPolicy(); - } - - if (!(await steps.doesIndexTemplateExist())) { - await steps.createIndexTemplate({ ilmExists }); - } - - if (!(await steps.doesInitialIndexExist())) { - await steps.createInitialIndex(); - } -} - -interface AddTemplateOpts { - ilmExists: boolean; + await steps.createIlmPolicyIfNotExists(); + await steps.createIndexTemplateIfNotExists(); + await steps.createInitialIndexIfNotExists(); } class EsInitializationSteps { @@ -49,89 +34,35 @@ class EsInitializationSteps { this.esContext = esContext; } - async doesIlmPolicyExist(): Promise { - const request = { - method: 'GET', - path: `_ilm/policy/${this.esContext.esNames.ilmPolicy}`, - }; - try { - await this.esContext.callEs('transport.request', request); - } catch (err) { - if (err.statusCode === 404) return false; - // TODO: remove following once kibana user can access ilm - if (err.statusCode === 403) return false; - - throw new Error(`error checking existance of ilm policy: ${err.message}`); - } - return true; - } - - async createIlmPolicy(): Promise { - const request = { - method: 'PUT', - path: `_ilm/policy/${this.esContext.esNames.ilmPolicy}`, - body: getIlmPolicy(), - }; - try { - await this.esContext.callEs('transport.request', request); - } catch (err) { - // TODO: remove following once kibana user can access ilm - if (err.statusCode === 403) return false; - throw new Error(`error creating ilm policy: ${err.message}`); + async createIlmPolicyIfNotExists(): Promise { + const exists = await this.esContext.esAdapter.doesIlmPolicyExist( + this.esContext.esNames.ilmPolicy + ); + if (!exists) { + await this.esContext.esAdapter.createIlmPolicy( + this.esContext.esNames.ilmPolicy, + getIlmPolicy() + ); } - return true; } - async doesIndexTemplateExist(): Promise { - const name = this.esContext.esNames.indexTemplate; - let result; - try { - result = await this.esContext.callEs('indices.existsTemplate', { name }); - } catch (err) { - throw new Error(`error checking existance of index template: ${err.message}`); + async createIndexTemplateIfNotExists(): Promise { + const exists = await this.esContext.esAdapter.doesIndexTemplateExist( + this.esContext.esNames.indexTemplate + ); + if (!exists) { + const templateBody = getIndexTemplate(this.esContext.esNames); + await this.esContext.esAdapter.createIndexTemplate( + this.esContext.esNames.indexTemplate, + templateBody + ); } - return result as boolean; } - async createIndexTemplate(opts: AddTemplateOpts): Promise { - const templateBody = getIndexTemplate(this.esContext.esNames, opts.ilmExists); - const addTemplateParams = { - create: true, - name: this.esContext.esNames.indexTemplate, - body: templateBody, - }; - try { - await this.esContext.callEs('indices.putTemplate', addTemplateParams); - } catch (err) { - throw new Error(`error creating index template: ${err.message}`); + async createInitialIndexIfNotExists(): Promise { + const exists = await this.esContext.esAdapter.doesAliasExist(this.esContext.esNames.alias); + if (!exists) { + await this.esContext.esAdapter.createIndex(this.esContext.esNames.initialIndex); } } - - async doesInitialIndexExist(): Promise { - const name = this.esContext.esNames.alias; - let result; - try { - result = await this.esContext.callEs('indices.existsAlias', { name }); - } catch (err) { - throw new Error(`error checking existance of initial index: ${err.message}`); - } - return result as boolean; - } - - async createInitialIndex(): Promise { - const index = this.esContext.esNames.initialIndex; - try { - await this.esContext.callEs('indices.create', { index }); - } catch (err) { - throw new Error(`error creating initial index: ${err.message}`); - } - } - - debug(message: string) { - this.esContext.logger.debug(message); - } - - warn(message: string) { - this.esContext.logger.warn(message); - } } diff --git a/x-pack/plugins/event_log/server/es/names.mock.ts b/x-pack/plugins/event_log/server/es/names.mock.ts new file mode 100644 index 0000000000000..7b013a0d263da --- /dev/null +++ b/x-pack/plugins/event_log/server/es/names.mock.ts @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { EsNames } from './names'; + +const createNamesMock = () => { + const mock: jest.Mocked = { + base: '.kibana', + alias: '.kibana-event-log', + ilmPolicy: '.kibana-event-log-policy', + indexPattern: '.kibana-event-log-*', + initialIndex: '.kibana-event-log-000001', + indexTemplate: '.kibana-event-log-template', + }; + return mock; +}; + +export const namesMock = { + create: createNamesMock, +}; diff --git a/x-pack/plugins/event_log/server/event_log_service.test.ts b/x-pack/plugins/event_log/server/event_log_service.test.ts index c7e752d1a652b..3b250b7462009 100644 --- a/x-pack/plugins/event_log/server/event_log_service.test.ts +++ b/x-pack/plugins/event_log/server/event_log_service.test.ts @@ -6,18 +6,14 @@ import { IEventLogConfig } from './types'; import { EventLogService } from './event_log_service'; -import { getEsNames } from './es/names'; -import { createMockEsContext } from './es/context.mock'; +import { contextMock } from './es/context.mock'; import { loggingServiceMock } from '../../../../src/core/server/logging/logging_service.mock'; const loggingService = loggingServiceMock.create(); const systemLogger = loggingService.get(); describe('EventLogService', () => { - const esContext = createMockEsContext({ - esNames: getEsNames('ABC'), - logger: systemLogger, - }); + const esContext = contextMock.create(); function getService(config: IEventLogConfig) { const { enabled, logEntries, indexEntries } = config; diff --git a/x-pack/plugins/event_log/server/event_logger.test.ts b/x-pack/plugins/event_log/server/event_logger.test.ts index 97e52ad04dd08..673bac4f396e1 100644 --- a/x-pack/plugins/event_log/server/event_logger.test.ts +++ b/x-pack/plugins/event_log/server/event_logger.test.ts @@ -7,9 +7,8 @@ import { IEvent, IEventLogger, IEventLogService } from './index'; import { ECS_VERSION } from './types'; import { EventLogService } from './event_log_service'; -import { getEsNames } from './es/names'; import { EsContext } from './es/context'; -import { createMockEsContext } from './es/context.mock'; +import { contextMock } from './es/context.mock'; import { loggerMock, MockedLogger } from '../../../../src/core/server/logging/logger.mock'; import { delay } from './lib/delay'; import { EVENT_LOGGED_PREFIX } from './event_logger'; @@ -24,7 +23,7 @@ describe('EventLogger', () => { beforeEach(() => { systemLogger = loggerMock.create(); - esContext = createMockEsContext({ esNames: getEsNames('ABC'), logger: systemLogger }); + esContext = contextMock.create(); service = new EventLogService({ esContext, systemLogger, diff --git a/x-pack/plugins/event_log/server/event_logger.ts b/x-pack/plugins/event_log/server/event_logger.ts index 891abda947fc8..f5149da069953 100644 --- a/x-pack/plugins/event_log/server/event_logger.ts +++ b/x-pack/plugins/event_log/server/event_logger.ts @@ -171,7 +171,7 @@ function indexEventDoc(esContext: EsContext, doc: Doc): void { async function indexLogEventDoc(esContext: EsContext, doc: any) { esContext.logger.debug(`writing to event log: ${JSON.stringify(doc)}`); await esContext.waitTillReady(); - await esContext.callEs('index', doc); + await esContext.esAdapter.indexDocument(doc); esContext.logger.debug(`writing to event log complete`); }